focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override @Transactional(value="defaultTransactionManager") public OAuth2AccessTokenEntity createAccessToken(OAuth2Authentication authentication) throws AuthenticationException, InvalidClientException { if (authentication != null && authentication.getOAuth2Request() != null) { // look up our client OAuth2Request request = authentication.getOAuth2Request(); ClientDetailsEntity client = clientDetailsService.loadClientByClientId(request.getClientId()); if (client == null) { throw new InvalidClientException("Client not found: " + request.getClientId()); } // handle the PKCE code challenge if present if (request.getExtensions().containsKey(CODE_CHALLENGE)) { String challenge = (String) request.getExtensions().get(CODE_CHALLENGE); PKCEAlgorithm alg = PKCEAlgorithm.parse((String) request.getExtensions().get(CODE_CHALLENGE_METHOD)); String verifier = request.getRequestParameters().get(CODE_VERIFIER); if (alg.equals(PKCEAlgorithm.plain)) { // do a direct string comparison if (!challenge.equals(verifier)) { throw new InvalidRequestException("Code challenge and verifier do not match"); } } else if (alg.equals(PKCEAlgorithm.S256)) { // hash the verifier try { MessageDigest digest = MessageDigest.getInstance("SHA-256"); String hash = Base64URL.encode(digest.digest(verifier.getBytes(StandardCharsets.US_ASCII))).toString(); if (!challenge.equals(hash)) { throw new InvalidRequestException("Code challenge and verifier do not match"); } } catch (NoSuchAlgorithmException e) { logger.error("Unknown algorithm for PKCE digest", e); } } } OAuth2AccessTokenEntity token = new OAuth2AccessTokenEntity();//accessTokenFactory.createNewAccessToken(); // attach the client token.setClient(client); // inherit the scope from the auth, but make a new set so it is //not unmodifiable. Unmodifiables don't play nicely with Eclipselink, which //wants to use the clone operation. Set<SystemScope> scopes = scopeService.fromStrings(request.getScope()); // remove any of the special system scopes scopes = scopeService.removeReservedScopes(scopes); token.setScope(scopeService.toStrings(scopes)); // make it expire if necessary if (client.getAccessTokenValiditySeconds() != null && client.getAccessTokenValiditySeconds() > 0) { Date expiration = new Date(System.currentTimeMillis() + (client.getAccessTokenValiditySeconds() * 1000L)); token.setExpiration(expiration); } // attach the authorization so that we can look it up later AuthenticationHolderEntity authHolder = new AuthenticationHolderEntity(); authHolder.setAuthentication(authentication); authHolder = authenticationHolderRepository.save(authHolder); token.setAuthenticationHolder(authHolder); // attach a refresh token, if this client is allowed to request them and the user gets the offline scope if (client.isAllowRefresh() && token.getScope().contains(SystemScopeService.OFFLINE_ACCESS)) { OAuth2RefreshTokenEntity savedRefreshToken = createRefreshToken(client, authHolder); token.setRefreshToken(savedRefreshToken); } //Add approved site reference, if any OAuth2Request originalAuthRequest = authHolder.getAuthentication().getOAuth2Request(); if (originalAuthRequest.getExtensions() != null && originalAuthRequest.getExtensions().containsKey("approved_site")) { Long apId = Long.parseLong((String) originalAuthRequest.getExtensions().get("approved_site")); ApprovedSite ap = approvedSiteService.getById(apId); token.setApprovedSite(ap); } OAuth2AccessTokenEntity enhancedToken = (OAuth2AccessTokenEntity) tokenEnhancer.enhance(token, authentication); OAuth2AccessTokenEntity savedToken = saveAccessToken(enhancedToken); if (savedToken.getRefreshToken() != null) { tokenRepository.saveRefreshToken(savedToken.getRefreshToken()); // make sure we save any changes that might have been enhanced } return savedToken; } throw new AuthenticationCredentialsNotFoundException("No authentication credentials found"); }
@Test public void createAccessToken_checkClient() { OAuth2AccessTokenEntity token = service.createAccessToken(authentication); verify(scopeService, atLeastOnce()).removeReservedScopes(anySet()); assertThat(token.getClient().getClientId(), equalTo(clientId)); }
public static SchemaAndValue parseString(String value) { if (value == null) { return NULL_SCHEMA_AND_VALUE; } if (value.isEmpty()) { return new SchemaAndValue(Schema.STRING_SCHEMA, value); } ValueParser parser = new ValueParser(new Parser(value)); return parser.parse(false); }
@Test public void shouldParseTimestampStringAsTimestamp() throws Exception { String str = "2019-08-23T14:34:54.346Z"; SchemaAndValue result = Values.parseString(str); assertEquals(Type.INT64, result.schema().type()); assertEquals(Timestamp.LOGICAL_NAME, result.schema().name()); java.util.Date expected = new SimpleDateFormat(Values.ISO_8601_TIMESTAMP_FORMAT_PATTERN).parse(str); assertEquals(expected, result.value()); }
public static ExpressionEvaluator compileExpression( String expression, List<String> argumentNames, List<Class<?>> argumentClasses, Class<?> returnClass) { ExpressionEvaluator expressionEvaluator = new ExpressionEvaluator(); expressionEvaluator.setParameters( argumentNames.toArray(new String[0]), argumentClasses.toArray(new Class[0])); expressionEvaluator.setExpressionType(returnClass); try { expressionEvaluator.cook(expression); return expressionEvaluator; } catch (CompileException e) { throw new InvalidProgramException( "Expression cannot be compiled. This is a bug. Please file an issue.\nExpression: " + expression, e); } }
@Test public void testJaninoNumericCompare() throws InvocationTargetException { String expression = "col1==3.14"; List<String> columnNames = Arrays.asList("col1"); List<Class<?>> paramTypes = Arrays.asList(Double.class); List<Object> params = Arrays.asList(3.14); ExpressionEvaluator expressionEvaluator = JaninoCompiler.compileExpression( expression, columnNames, paramTypes, Boolean.class); Object evaluate = expressionEvaluator.evaluate(params.toArray()); Assert.assertEquals(true, evaluate); }
@Override public synchronized void write(int b) throws IOException { mUfsOutStream.write(b); mBytesWritten++; }
@Test public void writePartialIncreasingByteArray() throws IOException, AlluxioException { int offset = CHUNK_SIZE / 2; AlluxioURI ufsPath = getUfsPath(); try (FileOutStream outStream = mFileSystem.createFile(ufsPath)) { outStream.write(BufferUtils.getIncreasingByteArray(CHUNK_SIZE), offset, CHUNK_SIZE / 2); } verifyIncreasingBytesWritten(ufsPath, offset, CHUNK_SIZE / 2); }
@Udf public <T> List<T> intersect( @UdfParameter(description = "First array of values") final List<T> left, @UdfParameter(description = "Second array of values") final List<T> right) { if (left == null || right == null) { return null; } final Set<T> intersection = Sets.newLinkedHashSet(left); intersection.retainAll(Sets.newHashSet(right)); return Lists.newArrayList(intersection); }
@Test public void shouldIntersectArraysBothContainingNulls() { final List<String> input1 = Arrays.asList(null, "foo", "bar"); final List<String> input2 = Arrays.asList("foo", null); final List<String> result = udf.intersect(input1, input2); assertThat(result, contains((String) null, "foo")); }
@Override public Path mkdir(final Path folder, final TransferStatus status) throws BackgroundException { if(containerService.isContainer(folder)) { return super.mkdir(folder, status); } else { status.setChecksum(writer.checksum(folder, status).compute(new NullInputStream(0L), status)); return super.mkdir(folder, status); } }
@Test public void testCreateBucket() throws Exception { final SpectraDirectoryFeature feature = new SpectraDirectoryFeature(session, new SpectraWriteFeature(session)); final Path test = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)); feature.mkdir(test, new TransferStatus()); assertTrue(new SpectraFindFeature(session).find(test)); new SpectraDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public int size() { return count(members, selector); }
@Test public void testSizeWhenLiteMembersSelectedAndNoLocalMember() { Collection<MemberImpl> collection = new MemberSelectingCollection<>(members, and(LITE_MEMBER_SELECTOR, NON_LOCAL_MEMBER_SELECTOR)); assertEquals(1, collection.size()); }
public static <K, E> Collector<E, ImmutableListMultimap.Builder<K, E>, ImmutableListMultimap<K, E>> index(Function<? super E, K> keyFunction) { return index(keyFunction, Function.identity()); }
@Test public void index_with_valueFunction_fails_if_key_function_is_null() { assertThatThrownBy(() -> index(null, MyObj::getText)) .isInstanceOf(NullPointerException.class) .hasMessage("Key function can't be null"); }
public static InetAddress getLocalhost() { final LinkedHashSet<InetAddress> localAddressList = localAddressList(address -> { // 非loopback地址,指127.*.*.*的地址 return false == address.isLoopbackAddress() // 需为IPV4地址 && address instanceof Inet4Address; }); if (CollUtil.isNotEmpty(localAddressList)) { InetAddress address2 = null; for (InetAddress inetAddress : localAddressList) { if (false == inetAddress.isSiteLocalAddress()) { // 非地区本地地址,指10.0.0.0 ~ 10.255.255.255、172.16.0.0 ~ 172.31.255.255、192.168.0.0 ~ 192.168.255.255 return inetAddress; } else if (null == address2) { address2 = inetAddress; } } if (null != address2) { return address2; } } try { return InetAddress.getLocalHost(); } catch (UnknownHostException e) { // ignore } return null; }
@Test @Disabled public void getLocalhostTest() { final InetAddress localhost = NetUtil.getLocalhost(); assertNotNull(localhost); }
public static <T> AvroSchema<T> of(SchemaDefinition<T> schemaDefinition) { if (schemaDefinition.getSchemaReaderOpt().isPresent() && schemaDefinition.getSchemaWriterOpt().isPresent()) { return new AvroSchema<>(schemaDefinition.getSchemaReaderOpt().get(), schemaDefinition.getSchemaWriterOpt().get(), parseSchemaInfo(schemaDefinition, SchemaType.AVRO)); } ClassLoader pojoClassLoader = null; if (schemaDefinition.getClassLoader() != null) { pojoClassLoader = schemaDefinition.getClassLoader(); } else if (schemaDefinition.getPojo() != null) { pojoClassLoader = schemaDefinition.getPojo().getClassLoader(); } return new AvroSchema<>(parseSchemaInfo(schemaDefinition, SchemaType.AVRO), pojoClassLoader); }
@Test public void testAllowNullEncodeAndDecode() { AvroSchema<Foo> avroSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build()); Foo foo1 = new Foo(); foo1.setField1("foo1"); foo1.setField2("bar1"); foo1.setField4(new Bar()); Foo foo2 = new Foo(); foo2.setField1("foo2"); foo2.setField2("bar2"); byte[] bytes1 = avroSchema.encode(foo1); Assert.assertTrue(bytes1.length > 0); byte[] bytes2 = avroSchema.encode(foo2); Assert.assertTrue(bytes2.length > 0); Foo object1 = avroSchema.decode(bytes1); Foo object2 = avroSchema.decode(bytes2); assertEquals(object1, foo1); assertEquals(object2, foo2); }
public CompletableFuture<InetSocketAddress> resolveAndCheckTargetAddress(String hostAndPort) { int pos = hostAndPort.lastIndexOf(':'); String host = hostAndPort.substring(0, pos); int port = Integer.parseInt(hostAndPort.substring(pos + 1)); if (!isPortAllowed(port)) { return FutureUtil.failedFuture( new TargetAddressDeniedException("Given port in '" + hostAndPort + "' isn't allowed.")); } else if (!isHostAllowed(host)) { return FutureUtil.failedFuture( new TargetAddressDeniedException("Given host in '" + hostAndPort + "' isn't allowed.")); } else { return NettyFutureUtil.toCompletableFuture( inetSocketAddressResolver.resolve(InetSocketAddress.createUnresolved(host, port))) .thenCompose(resolvedAddress -> { CompletableFuture<InetSocketAddress> result = new CompletableFuture<>(); if (isIPAddressAllowed(resolvedAddress)) { result.complete(resolvedAddress); } else { result.completeExceptionally(new TargetAddressDeniedException( "The IP address of the given host and port '" + hostAndPort + "' isn't allowed.")); } return result; }); } }
@Test(expectedExceptions = ExecutionException.class, expectedExceptionsMessageRegExp = ".* The IP address of the given host and port 'myhost:6650' isn't allowed.") public void shouldPreventInvalidIPAddress() throws Exception { BrokerProxyValidator brokerProxyValidator = new BrokerProxyValidator( createMockedAddressResolver("1.2.3.4"), "myhost" , "1.3.0.0/16" , "6650"); brokerProxyValidator.resolveAndCheckTargetAddress("myhost:6650").get(); }
public String getMountedExternalStorageDirectoryPath() { String path = null; String state = Environment.getExternalStorageState(); if (Environment.MEDIA_MOUNTED.equals(state) || Environment.MEDIA_MOUNTED_READ_ONLY.equals(state)) { path = getExternalStorageDirectoryPath(); } return path; }
@Test public void getMountedExternalStorageDirectoryPathReturnsNullWhenUnknown() { ShadowEnvironment.setExternalStorageState(Environment.MEDIA_UNKNOWN); assertThat(contextUtil.getMountedExternalStorageDirectoryPath(), is(nullValue())); }
@ScalarOperator(LESS_THAN_OR_EQUAL) @SqlType(StandardTypes.BOOLEAN) public static boolean lessThanOrEqual(@SqlType(StandardTypes.INTEGER) long left, @SqlType(StandardTypes.INTEGER) long right) { return left <= right; }
@Test public void testLessThanOrEqual() { assertFunction("INTEGER'37' <= INTEGER'37'", BOOLEAN, true); assertFunction("INTEGER'37' <= INTEGER'17'", BOOLEAN, false); assertFunction("INTEGER'17' <= INTEGER'37'", BOOLEAN, true); assertFunction("INTEGER'17' <= INTEGER'17'", BOOLEAN, true); }
@Override public ImportResult importItem( UUID jobId, IdempotentImportExecutor executor, TokensAndUrlAuthData authData, VideosContainerResource data) throws Exception { if (data == null) { // Nothing to do return ImportResult.OK; } PhotosLibraryClient client; if (clientsMap.containsKey(jobId)) { client = clientsMap.get(jobId); } else { client = buildPhotosLibraryClient(appCredentials, authData); clientsMap.put(jobId, client); } for (VideoAlbum album : data.getAlbums()) { executor.importAndSwallowIOExceptions(album, (a) -> { String title = GooglePhotosImportUtils.cleanAlbumTitle(a.getName()); return ItemImportResult.success(client.createAlbum(title).getId()); }); } long bytes = 0L; // Uploads videos final Collection<VideoModel> videos = data.getVideos(); if (videos != null && videos.size() > 0) { Stream<VideoModel> stream = videos.stream() .filter(video -> shouldImport(video, executor)) .map(this::transformVideoName); // We partition into groups of 49 as 50 is the maximum number of items that can be created in // one call. (We use 49 to avoid potential off by one errors) // https://developers.google.com/photos/library/guides/upload-media#creating-media-item final UnmodifiableIterator<List<VideoModel>> batches = Iterators.partition(stream.iterator(), 49); while (batches.hasNext()) { long batchBytes = uploadBatchOfVideos( jobId, batches.next(), dataStore, client, executor, connectionProvider, monitor); bytes += batchBytes; } } final ImportResult result = ImportResult.OK; return result.copyWithBytes(bytes); }
@Test public void importTwoVideosInDifferentAlbums() throws Exception { String googleAlbumId = "googleId"; Album expected = Album.newBuilder().setId(googleAlbumId).setTitle("albumName").build(); when(client.createAlbum(anyString())).thenReturn(expected); // Mock uploads when(client.uploadMediaItem(any())) .thenReturn( UploadMediaItemResponse.newBuilder().setUploadToken("token1").build(), UploadMediaItemResponse.newBuilder().setUploadToken("token2").build()); // Mock creation response final NewMediaItemResult newMediaItemResult = NewMediaItemResult.newBuilder() .setStatus(Status.newBuilder().setCode(Code.OK_VALUE).build()) .setUploadToken("token1") .build(); final NewMediaItemResult newMediaItemResult2 = NewMediaItemResult.newBuilder() .setStatus(Status.newBuilder().setCode(Code.OK_VALUE).build()) .setUploadToken("token2") .build(); BatchCreateMediaItemsResponse response = BatchCreateMediaItemsResponse.newBuilder() .addNewMediaItemResults(newMediaItemResult) .build(); BatchCreateMediaItemsResponse response2 = BatchCreateMediaItemsResponse.newBuilder() .addNewMediaItemResults(newMediaItemResult2) .build(); NewMediaItem mediaItem = NewMediaItemFactory.createNewMediaItem("token1", VIDEO_DESCRIPTION); NewMediaItem mediaItem2 = NewMediaItemFactory.createNewMediaItem("token2", VIDEO_DESCRIPTION); when(client.batchCreateMediaItems(eq(googleAlbumId), eq(List.of(mediaItem)))) .thenReturn(response); when(client.batchCreateMediaItems(eq(List.of(mediaItem2)))) .thenReturn(response2); InMemoryIdempotentImportExecutor executor = new InMemoryIdempotentImportExecutor(mock(Monitor.class)); long length = googleVideosImporter .importItem( jobId, executor, mock(TokensAndUrlAuthData.class), new VideosContainerResource( List.of(new VideoAlbum(ALBUM_ID, "name", null)), List.of( new VideoModel( VIDEO_TITLE, VIDEO_URI, VIDEO_DESCRIPTION, MP4_MEDIA_TYPE, VIDEO_ID, ALBUM_ID, false, null), new VideoModel( VIDEO_TITLE, VIDEO_URI, VIDEO_DESCRIPTION, MP4_MEDIA_TYPE, "myId2", null, false, null)))) .getBytes() .get(); assertEquals(64L, length,"Expected the number of bytes to be the two files of 32L."); assertEquals(0, executor.getErrors().size(),"Expected executor to have no errors."); }
public RemoteLogSegmentMetadata createWithUpdates(RemoteLogSegmentMetadataUpdate rlsmUpdate) { if (!remoteLogSegmentId.equals(rlsmUpdate.remoteLogSegmentId())) { throw new IllegalArgumentException("Given rlsmUpdate does not have this instance's remoteLogSegmentId."); } return new RemoteLogSegmentMetadata(remoteLogSegmentId, startOffset, endOffset, maxTimestampMs, rlsmUpdate.brokerId(), rlsmUpdate.eventTimestampMs(), segmentSizeInBytes, rlsmUpdate.customMetadata(), rlsmUpdate.state(), segmentLeaderEpochs); }
@Test void createWithUpdates() { int brokerId = 0; int eventTimestamp = 0; int brokerIdFinished = 1; int timestampFinished = 1; long startOffset = 0L; long endOffset = 100L; int segmentSize = 123; long maxTimestamp = -1L; Map<Integer, Long> segmentLeaderEpochs = new HashMap<>(); segmentLeaderEpochs.put(0, 0L); RemoteLogSegmentId segmentId = new RemoteLogSegmentId(TP0, Uuid.randomUuid()); RemoteLogSegmentMetadata segmentMetadata = new RemoteLogSegmentMetadata(segmentId, startOffset, endOffset, maxTimestamp, brokerId, eventTimestamp, segmentSize, segmentLeaderEpochs); CustomMetadata customMetadata = new CustomMetadata(new byte[]{0, 1, 2, 3}); RemoteLogSegmentMetadataUpdate segmentMetadataUpdate = new RemoteLogSegmentMetadataUpdate( segmentId, timestampFinished, Optional.of(customMetadata), RemoteLogSegmentState.COPY_SEGMENT_FINISHED, brokerIdFinished); RemoteLogSegmentMetadata updatedMetadata = segmentMetadata.createWithUpdates(segmentMetadataUpdate); RemoteLogSegmentMetadata expectedUpdatedMetadata = new RemoteLogSegmentMetadata( segmentId, startOffset, endOffset, maxTimestamp, brokerIdFinished, timestampFinished, segmentSize, Optional.of(customMetadata), RemoteLogSegmentState.COPY_SEGMENT_FINISHED, segmentLeaderEpochs ); assertEquals(expectedUpdatedMetadata, updatedMetadata); // Check that the original metadata have not changed. assertEquals(segmentId, segmentMetadata.remoteLogSegmentId()); assertEquals(startOffset, segmentMetadata.startOffset()); assertEquals(endOffset, segmentMetadata.endOffset()); assertEquals(maxTimestamp, segmentMetadata.maxTimestampMs()); assertEquals(brokerId, segmentMetadata.brokerId()); assertEquals(eventTimestamp, segmentMetadata.eventTimestampMs()); assertEquals(segmentSize, segmentMetadata.segmentSizeInBytes()); assertEquals(segmentLeaderEpochs, segmentMetadata.segmentLeaderEpochs()); }
public void clearBlocks() { this.blocks = BlockInfo.EMPTY_ARRAY; }
@Test public void testClearBlocks() { INodeFile toBeCleared = createINodeFiles(1, "toBeCleared")[0]; assertEquals(1, toBeCleared.getBlocks().length); toBeCleared.clearBlocks(); assertTrue(toBeCleared.getBlocks().length == 0); }
private Function<KsqlConfig, Kudf> getUdfFactory( final Method method, final UdfDescription udfDescriptionAnnotation, final String functionName, final FunctionInvoker invoker, final String sensorName ) { return ksqlConfig -> { final Object actualUdf = FunctionLoaderUtils.instantiateFunctionInstance( method.getDeclaringClass(), udfDescriptionAnnotation.name()); if (actualUdf instanceof Configurable) { ExtensionSecurityManager.INSTANCE.pushInUdf(); try { ((Configurable) actualUdf) .configure(ksqlConfig.getKsqlFunctionsConfigProps(functionName)); } finally { ExtensionSecurityManager.INSTANCE.popOutUdf(); } } final PluggableUdf theUdf = new PluggableUdf(invoker, actualUdf); return metrics.<Kudf>map(m -> new UdfMetricProducer( m.getSensor(sensorName), theUdf, Time.SYSTEM )).orElse(theUdf); }; }
@Test public void shouldThrowOnReturnTypeMismatch() { // Given: final UdfFactory returnIncompatible = FUNC_REG .getUdfFactory(of("returnincompatible")); final SqlDecimal decimal = decimal(2, 1); final List<SqlArgument> args = singletonList(SqlArgument.of(decimal)); final KsqlScalarFunction function = returnIncompatible.getFunction(args); // When: final Exception e = assertThrows( KsqlException.class, () -> function.getReturnType(args) ); // Then: assertThat(e.getMessage(), containsString( "Return type DECIMAL(2, 1) of UDF RETURNINCOMPATIBLE does not " + "match the declared return type STRING.")); }
@JsonIgnore public long deriveInstanceStepConcurrency() { Long stepConcurrency = runProperties.getStepConcurrency(); // if both are set, should return the smaller one if (instanceStepConcurrency != null && stepConcurrency != null) { return Math.min(instanceStepConcurrency, stepConcurrency); } return ObjectHelper.valueOrDefault( ObjectHelper.valueOrDefault(instanceStepConcurrency, stepConcurrency), Defaults.DEFAULT_STEP_CONCURRENCY); }
@Test public void testDeriveInstanceStepConcurrency() throws Exception { WorkflowSummary summary = loadObject("fixtures/parameters/sample-wf-summary-params.json", WorkflowSummary.class); // use default if no concurrences are set assertEquals(Defaults.DEFAULT_STEP_CONCURRENCY, summary.deriveInstanceStepConcurrency()); // use properties' step concurrency summary.getRunProperties().setStepConcurrency(10L); assertEquals(10L, summary.deriveInstanceStepConcurrency()); // use workflow instance step concurrency summary.getRunProperties().setStepConcurrency(null); summary.setInstanceStepConcurrency(20L); assertEquals(20L, summary.deriveInstanceStepConcurrency()); // use the min of both concurrences summary.getRunProperties().setStepConcurrency(10L); summary.setInstanceStepConcurrency(20L); assertEquals(10L, summary.deriveInstanceStepConcurrency()); }
public static TrustManager[] trustManager(boolean needAuth, String trustCertPath) { if (needAuth) { try { return trustCertPath == null ? null : buildSecureTrustManager(trustCertPath); } catch (SSLException e) { LOGGER.warn("degrade trust manager as build failed, " + "will trust all certs."); return trustAll; } } else { return trustAll; } }
@Test void testTrustManagerNonExist() throws CertificateException { TrustManager[] actual = SelfTrustManager.trustManager(true, "non-exist-cert.pem"); assertNotNull(actual); assertEquals(1, actual.length); assertTrue(actual[0] instanceof X509TrustManager); assertTrue(actual[0].getClass().isAnonymousClass()); X509TrustManager x509TrustManager = (X509TrustManager) actual[0]; assertNull(x509TrustManager.getAcceptedIssuers()); x509TrustManager.checkClientTrusted(null, "a"); x509TrustManager.checkServerTrusted(null, "b"); }
public static double getAssignedMemoryForSlot(final Map<String, Object> topConf) { double totalWorkerMemory = 0.0; final Integer topologyWorkerDefaultMemoryAllocation = 768; List<String> topologyWorkerGcChildopts = ConfigUtils.getValueAsList( Config.TOPOLOGY_WORKER_GC_CHILDOPTS, topConf); List<String> workerGcChildopts = ConfigUtils.getValueAsList( Config.WORKER_GC_CHILDOPTS, topConf); Double memGcChildopts = null; memGcChildopts = Utils.parseJvmHeapMemByChildOpts( topologyWorkerGcChildopts, null); if (memGcChildopts == null) { memGcChildopts = Utils.parseJvmHeapMemByChildOpts( workerGcChildopts, null); } List<String> topologyWorkerChildopts = ConfigUtils.getValueAsList( Config.TOPOLOGY_WORKER_CHILDOPTS, topConf); Double memTopologyWorkerChildopts = Utils.parseJvmHeapMemByChildOpts( topologyWorkerChildopts, null); List<String> workerChildopts = ConfigUtils.getValueAsList( Config.WORKER_CHILDOPTS, topConf); Double memWorkerChildopts = Utils.parseJvmHeapMemByChildOpts( workerChildopts, null); if (memGcChildopts != null) { totalWorkerMemory += memGcChildopts; } else if (memTopologyWorkerChildopts != null) { totalWorkerMemory += memTopologyWorkerChildopts; } else if (memWorkerChildopts != null) { totalWorkerMemory += memWorkerChildopts; } else { Object workerHeapMemoryMb = topConf.get( Config.WORKER_HEAP_MEMORY_MB); totalWorkerMemory += ObjectReader.getInt( workerHeapMemoryMb, topologyWorkerDefaultMemoryAllocation); } List<String> topoWorkerLwChildopts = ConfigUtils.getValueAsList( Config.TOPOLOGY_WORKER_LOGWRITER_CHILDOPTS, topConf); if (topoWorkerLwChildopts != null) { totalWorkerMemory += Utils.parseJvmHeapMemByChildOpts( topoWorkerLwChildopts, 0.0); } return totalWorkerMemory; }
@Test public void getAssignedMemoryForSlot_allNull() { Map<String, Object> topConf = getEmptyConfig(); assertEquals(TOPOLOGY_WORKER_DEFAULT_MEMORY_ALLOCATION, Cluster.getAssignedMemoryForSlot(topConf), 0); }
@GetMapping("") public AdminResult<CommonPager<RuleVO>> queryRules(final String selectorId, final String name, @RequestParam @NotNull final Integer currentPage, @RequestParam @NotNull final Integer pageSize) { final RuleQueryCondition condition = new RuleQueryCondition(); condition.setUserId(SessionUtil.visitor().getUserId()); condition.setSelectors(ListUtil.of(selectorId)); condition.setKeyword(name); return searchAdaptor(new PageCondition<>(currentPage, pageSize, condition)); }
@Test public void testQueryRules() throws Exception { given(this.ruleService.searchByPageToPager(any())).willReturn(commonPager); String urlTemplate = "/rule?selectorId={selectorId}&name={name}&currentPage={currentPage}&pageSize={pageSize}"; this.mockMvc.perform(MockMvcRequestBuilders.get(urlTemplate, "168", "/http/test/**", 1, 12)) .andExpect(status().isOk()) .andExpect(jsonPath("$.message", is(ShenyuResultMessage.QUERY_SUCCESS))) .andExpect(jsonPath("$.data.dataList[0].id", is(ruleVO.getId()))) .andReturn(); }
public static boolean objectEquals(@Nullable Object first, @Nullable Object second) { //noinspection EqualsReplaceableByObjectsCall return (first == second) || (first != null && first.equals(second)); }
@Test public void testObjectsEqual() { Assert.assertTrue(CompatUtils.objectEquals(null, null)); Assert.assertFalse(CompatUtils.objectEquals(null, new Object())); Assert.assertFalse(CompatUtils.objectEquals(new Object(), null)); Assert.assertTrue(CompatUtils.objectEquals(new String("test"), new String("test"))); Assert.assertFalse(CompatUtils.objectEquals(new String("test"), new String("test1"))); }
@Override public Path copy(final Path file, final Path target, final TransferStatus status, final ConnectionCallback callback, final StreamListener listener) throws BackgroundException { try { final EueApiClient client = new EueApiClient(session); if(status.isExists()) { if(log.isWarnEnabled()) { log.warn(String.format("Trash file %s to be replaced with %s", target, file)); } new EueTrashFeature(session, fileid).delete(Collections.singletonMap(target, status), callback, new Delete.DisabledCallback()); } final String resourceId = fileid.getFileId(file); final String parentResourceId = fileid.getFileId(target.getParent()); String targetResourceId = null; final ResourceCopyResponseEntries resourceCopyResponseEntries; switch(parentResourceId) { case EueResourceIdProvider.ROOT: case EueResourceIdProvider.TRASH: resourceCopyResponseEntries = new CopyChildrenForAliasApiApi(client) .resourceAliasAliasChildrenCopyPost(parentResourceId, Collections.singletonList(String.format("%s/resource/%s", session.getBasePath(), resourceId)), null, null, null, "rename", null); break; default: resourceCopyResponseEntries = new CopyChildrenApi(client).resourceResourceIdChildrenCopyPost(parentResourceId, Collections.singletonList(String.format("%s/resource/%s", session.getBasePath(), resourceId)), null, null, null, "rename", null); } if(null == resourceCopyResponseEntries) { // Copy of single file will return 200 status code with empty response body } else { for(ResourceCopyResponseEntry resourceCopyResponseEntry : resourceCopyResponseEntries.values()) { switch(resourceCopyResponseEntry.getStatusCode()) { case HttpStatus.SC_CREATED: fileid.cache(target, EueResourceIdProvider.getResourceIdFromResourceUri(resourceCopyResponseEntry.getHeaders().getLocation())); break; default: log.warn(String.format("Failure %s copying file %s", resourceCopyResponseEntries, file)); throw new EueExceptionMappingService().map(new ApiException(resourceCopyResponseEntry.getReason(), null, resourceCopyResponseEntry.getStatusCode(), client.getResponseHeaders())); } } } listener.sent(status.getLength()); if(!StringUtils.equals(file.getName(), target.getName())) { final ResourceUpdateModel resourceUpdateModel = new ResourceUpdateModel(); final ResourceUpdateModelUpdate resourceUpdateModelUpdate = new ResourceUpdateModelUpdate(); final Uifs uifs = new Uifs(); uifs.setName(target.getName()); resourceUpdateModelUpdate.setUifs(uifs); resourceUpdateModel.setUpdate(resourceUpdateModelUpdate); final ResourceMoveResponseEntries resourceMoveResponseEntries = new UpdateResourceApi(client).resourceResourceIdPatch(fileid.getFileId(target), resourceUpdateModel, null, null, null); if(null == resourceMoveResponseEntries) { // Move of single file will return 200 status code with empty response body } else { for(ResourceMoveResponseEntry resourceMoveResponseEntry : resourceMoveResponseEntries.values()) { switch(resourceMoveResponseEntry.getStatusCode()) { case HttpStatus.SC_CREATED: break; default: log.warn(String.format("Failure %s renaming file %s", resourceMoveResponseEntry, file)); throw new EueExceptionMappingService().map(new ApiException(resourceMoveResponseEntry.getReason(), null, resourceMoveResponseEntry.getStatusCode(), client.getResponseHeaders())); } } } } return target; } catch(ApiException e) { throw new EueExceptionMappingService().map("Cannot copy {0}", e, file); } }
@Test public void testCopyRecursive() throws Exception { final EueResourceIdProvider fileid = new EueResourceIdProvider(session); final Path testFolder = new EueDirectoryFeature(session, fileid).mkdir( new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final Path sourceFolder = new EueDirectoryFeature(session, fileid).mkdir( new Path(testFolder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final Path sourceFile = new Path(sourceFolder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); createFile(fileid, sourceFile, RandomUtils.nextBytes(541)); final Path targetFolder = new Path(testFolder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); assertNull(targetFolder.attributes().getFileId()); final EueCopyFeature feature = new EueCopyFeature(session, fileid); feature.copy(sourceFolder, targetFolder, new TransferStatus(), new DisabledLoginCallback(), new DisabledStreamListener()); assertNotEquals(sourceFolder.attributes().getFileId(), targetFolder.attributes().getFileId()); assertTrue(new EueFindFeature(session, fileid).find(targetFolder)); assertTrue(new EueFindFeature(session, fileid).find(new Path(targetFolder, sourceFile.getName(), sourceFile.getType()))); assertTrue(new DefaultFindFeature(session).find(new Path(targetFolder, sourceFile.getName(), sourceFile.getType()))); assertTrue(new EueFindFeature(session, fileid).find(sourceFolder)); assertTrue(new EueFindFeature(session, fileid).find(sourceFile)); assertTrue(new DefaultFindFeature(session).find(sourceFile)); new EueDeleteFeature(session, fileid).delete(Arrays.asList(sourceFolder, targetFolder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public String getNamespace() { return namespace.getNamespace(); }
@Test public void testGetNamespace() throws Exception { CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), new RetryOneTime(1)); CuratorFramework client2 = CuratorFrameworkFactory.builder() .namespace("snafu") .retryPolicy(new RetryOneTime(1)) .connectString("foo") .build(); try { client.start(); CuratorFramework fooClient = client.usingNamespace("foo"); CuratorFramework barClient = client.usingNamespace("bar"); assertEquals(client.getNamespace(), ""); assertEquals(client2.getNamespace(), "snafu"); assertEquals(fooClient.getNamespace(), "foo"); assertEquals(barClient.getNamespace(), "bar"); } finally { CloseableUtils.closeQuietly(client2); CloseableUtils.closeQuietly(client); } }
public static String substVars(String val, PropertyContainer pc1) { return substVars(val, pc1, null); }
@Test(timeout = 1000) public void detectCircularReferences4() { context.putProperty("A", "${B}"); context.putProperty("B", "${C}"); context.putProperty("C", "${A}"); expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage("Circular variable reference detected while parsing input [${C} --> ${A} --> ${B} --> ${C}]"); OptionHelper.substVars("${C} and ${A}", context); }
public List<String> getDatacentersByGeo( String continent, String country, Optional<String> subdivision ) { GeoKey v4Key = new GeoKey(continent, country, subdivision, Protocol.v4); List<String> v4Options = this.geoToDatacenter.getOrDefault(v4Key, Collections.emptyList()); List<String> v4OptionsBackup = v4Options.isEmpty() && subdivision.isPresent() ? this.geoToDatacenter.getOrDefault( new GeoKey(continent, country, Optional.empty(), Protocol.v4), Collections.emptyList()) : Collections.emptyList(); GeoKey v6Key = new GeoKey(continent, country, subdivision, Protocol.v6); List<String> v6Options = this.geoToDatacenter.getOrDefault(v6Key, Collections.emptyList()); List<String> v6OptionsBackup = v6Options.isEmpty() && subdivision.isPresent() ? this.geoToDatacenter.getOrDefault( new GeoKey(continent, country, Optional.empty(), Protocol.v6), Collections.emptyList()) : Collections.emptyList(); return Stream.of( v4Options.stream(), v6Options.stream(), v4OptionsBackup.stream(), v6OptionsBackup.stream() ) .flatMap(Function.identity()) .distinct() .toList(); }
@Test void testGetFastestDataCentersByGeoFallback() { var actualExactMatch = basicTable.getDatacentersByGeo("NA", "US", Optional.of("VA")); assertThat(actualExactMatch).isEqualTo(List.of("datacenter-2", "datacenter-1")); var actualApproximateMatch = basicTable.getDatacentersByGeo("NA", "US", Optional.of("MD")); assertThat(actualApproximateMatch).isEqualTo(List.of("datacenter-3", "datacenter-4")); }
@Override public void connect() { MongoCredential mongoCredential = MongoCredential.createCredential(MONGO_USER, DATABASE_NAME, MONGO_PASSWORD.toCharArray()); MongoClientOptions options = MongoClientOptions.builder().build(); client = new MongoClient(new ServerAddress(), mongoCredential, options); db = client.getDatabase(DATABASE_NAME); }
@Test void connect() { assertDoesNotThrow(() -> mongoDb.connect()); }
@Override public String fromObjectToExpression(Object value) { FEELFnResult<String> invoke = codeFunction.invoke(value); return invoke.getOrElseThrow(feelEvent -> new IllegalArgumentException("This should not happen", feelEvent.getSourceException())); }
@Test public void fromObjectToExpression() { assertThat(expressionEvaluator.fromObjectToExpression("Test")).isEqualTo("\"Test\""); assertThat(expressionEvaluator.fromObjectToExpression(false)).isEqualTo("false"); assertThat(expressionEvaluator.fromObjectToExpression(BigDecimal.valueOf(1))).isEqualTo("1"); assertThat(expressionEvaluator.fromObjectToExpression(LocalDate.of(2019, 5, 13))).isEqualTo("date( \"2019-05-13\" )"); assertThat(expressionEvaluator.fromObjectToExpression(null)).isEqualTo("null"); }
@NonNull String readOperatingSystemName(File dataFile, @NonNull String patternStr) { if (dataFile == null || !dataFile.exists()) { return ""; } Pattern pattern = Pattern.compile("^PRETTY_NAME=[\"](" + patternStr + ")[\"]"); String name = ""; try { List<String> lines = dataFile.equals(lastDataFile) ? lastLines : Files.readAllLines(dataFile.toPath()); for (String line : lines) { Matcher matcher = pattern.matcher(line); if (matcher.matches()) { name = matcher.group(1); } } if (!dataFile.equals(lastDataFile)) { lastDataFile = dataFile; lastLines = new ArrayList<>(lines); } } catch (IOException ioe) { LOGGER.log(Level.SEVERE, "File read exception", ioe); } return name; }
@Test public void testReadOperatingSystemNameMissingFile() { assertThat(monitor.readOperatingSystemName(new File("/this/file/does/not/exist"), ".*"), is("")); }
@Override public List<Feature> map(String tag, List<Feature> features) { List<Feature> hashedFeatures = new ArrayList<>(); for (Feature feature : features) { int hash = MurmurHash3.murmurhash3_x86_32(feature.getName(), 0, feature.getName().length(), hashSeed); hash = hash >>> 1; int code = hash % dimension; double value; if (preserveValue) { value = feature.getValue(); } else { int bit = MurmurHash3.murmurhash3_x86_32(feature.getName(), 0, feature.getName().length(), valueHashSeed) & 1; value = bit == 1 ? 1 : -1; } Feature newFeature = new Feature(tag + "-hash="+code, value); hashedFeatures.add(newFeature); } return hashedFeatures; }
@Test public void negativeValuesTest() { List<Feature> input = new ArrayList<>(); Feature posValue = new Feature("Testing", 2.0); input.add(posValue); Feature negValue = new Feature("Test",2.0); input.add(negValue); FeatureHasher preserving = new FeatureHasher(10, true); FeatureHasher notPreserving = new FeatureHasher(10, false); List<Feature> preservingOutput = preserving.map("test", input); List<Feature> notPreservingOutput = notPreserving.map("test", input); assertEquals(2.0, preservingOutput.get(0).getValue()); assertEquals(2.0, preservingOutput.get(1).getValue()); assertEquals(1.0, notPreservingOutput.get(0).getValue()); assertEquals(-1.0, notPreservingOutput.get(1).getValue()); }
@GetMapping("/fetch") public ShenyuAdminResult fetchConfigs(@NotNull final String[] groupKeys) { Map<String, ConfigData<?>> result = Maps.newHashMap(); for (String groupKey : groupKeys) { ConfigData<?> data = longPollingListener.fetchConfig(ConfigGroupEnum.valueOf(groupKey)); result.put(groupKey, data); } return ShenyuAdminResult.success(ShenyuResultMessage.SUCCESS, result); }
@Test public void testFetchConfigs() throws Exception { // Configure HttpLongPollingDataChangedListener.fetchConfig(...). final ConfigData<?> configData = new ConfigData<>("md5-value1", 0L, Collections.emptyList()); doReturn(configData).when(mockLongPollingListener).fetchConfig(ConfigGroupEnum.APP_AUTH); // Run the test final MockHttpServletResponse response = mockMvc.perform(get("/configs/fetch") .param("groupKeys", new String[]{ConfigGroupEnum.APP_AUTH.toString()}) .accept(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andExpect(jsonPath("$.message", is(ShenyuResultMessage.SUCCESS))) .andExpect(jsonPath("$.data['APP_AUTH'].md5", is("md5-value1"))) .andReturn().getResponse(); // Verify the results assertThat(response.getStatus()).isEqualTo(HttpStatus.OK.value()); }
@Override public void cycle() { if (!getConfig().isWritable()) { LOG.debug("Not cycling non-writable index set <{}> ({})", getConfig().id(), getConfig().title()); return; } int oldTargetNumber; try { oldTargetNumber = getNewestIndexNumber(); } catch (NoTargetIndexException ex) { oldTargetNumber = -1; } final int newTargetNumber = oldTargetNumber + 1; final String newTarget = buildIndexName(newTargetNumber); final String oldTarget = buildIndexName(oldTargetNumber); if (oldTargetNumber == -1) { LOG.info("Cycling from <none> to <{}>.", newTarget); } else { LOG.info("Cycling from <{}> to <{}>.", oldTarget, newTarget); } // Create new index. LOG.info("Creating target index <{}>.", newTarget); if (!indices.create(newTarget, this)) { throw new RuntimeException("Could not create new target index <" + newTarget + ">."); } LOG.info("Waiting for allocation of index <{}>.", newTarget); final HealthStatus healthStatus = indices.waitForRecovery(newTarget); checkIfHealthy(healthStatus, (status) -> new RuntimeException("New target index did not become healthy (target index: <" + newTarget + ">)")); LOG.debug("Health status of index <{}>: {}", newTarget, healthStatus); addDeflectorIndexRange(newTarget); LOG.info("Index <{}> has been successfully allocated.", newTarget); // Point deflector to new index. final String indexAlias = getWriteIndexAlias(); LOG.info("Pointing index alias <{}> to new index <{}>.", indexAlias, newTarget); final Activity activity = new Activity(IndexSet.class); if (oldTargetNumber == -1) { // Only pointing, not cycling. pointTo(newTarget); activity.setMessage("Cycled index alias <" + indexAlias + "> from <none> to <" + newTarget + ">."); } else { // Re-pointing from existing old index to the new one. LOG.debug("Switching over index alias <{}>.", indexAlias); pointTo(newTarget, oldTarget); setIndexReadOnlyAndCalculateRange(oldTarget); activity.setMessage("Cycled index alias <" + indexAlias + "> from <" + oldTarget + "> to <" + newTarget + ">."); } LOG.info("Successfully pointed index alias <{}> to index <{}>.", indexAlias, newTarget); activityWriter.write(activity); auditEventSender.success(AuditActor.system(nodeId), ES_WRITE_INDEX_UPDATE, ImmutableMap.of("indexName", newTarget)); }
@Test public void cycleThrowsRuntimeExceptionIfIndexCreationFailed() { final Map<String, Set<String>> indexNameAliases = ImmutableMap.of(); when(indices.getIndexNamesAndAliases(anyString())).thenReturn(indexNameAliases); when(indices.create("graylog_0", mongoIndexSet)).thenReturn(false); expectedException.expect(RuntimeException.class); expectedException.expectMessage("Could not create new target index <graylog_0>."); final MongoIndexSet mongoIndexSet = createIndexSet(config); mongoIndexSet.cycle(); }
public AstNode rewrite(final AstNode node, final C context) { return rewriter.process(node, context); }
@Test public void shouldRewriteJoinWithWindowExpression() { // Given: final WithinExpression withinExpression = mock(WithinExpression.class); final WithinExpression rewrittenWithinExpression = mock(WithinExpression.class); final Join join = givenJoin(Optional.of(withinExpression)); when(mockRewriter.apply(withinExpression, context)).thenReturn(rewrittenWithinExpression); // When: final AstNode rewritten = rewriter.rewrite(join, context); // Then: assertThat( rewritten, equalTo( new Join( location, rewrittenRelation, ImmutableList.of(new JoinedSource( Optional.empty(), rewrittenRightRelation, Type.LEFT, joinCriteria, Optional.of(rewrittenWithinExpression))))) ); }
@Override public OUT nextRecord(OUT record) throws IOException { OUT returnRecord = null; do { returnRecord = super.nextRecord(record); } while (returnRecord == null && !reachedEnd()); return returnRecord; }
@Test void readStringFieldsWithTrailingDelimiters() { try { final String fileContent = "abc|-def|-ghijk\nabc|-|-hhg\n|-|-|-\n"; final FileInputSplit split = createTempFile(fileContent); final TupleTypeInfo<Tuple3<String, String, String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class, String.class, String.class); final CsvInputFormat<Tuple3<String, String, String>> format = new TupleCsvInputFormat<>(PATH, typeInfo); format.setFieldDelimiter("|-"); format.configure(new Configuration()); format.open(split); Tuple3<String, String, String> result = new Tuple3<>(); result = format.nextRecord(result); assertThat(result.f0).isEqualTo("abc"); assertThat(result.f1).isEqualTo("def"); assertThat(result.f2).isEqualTo("ghijk"); result = format.nextRecord(result); assertThat(result.f0).isEqualTo("abc"); assertThat(result.f1).isEmpty(); assertThat(result.f2).isEqualTo("hhg"); result = format.nextRecord(result); assertThat(result.f0).isEmpty(); assertThat(result.f1).isEmpty(); assertThat(result.f2).isEmpty(); result = format.nextRecord(result); assertThat(result).isNull(); assertThat(format.reachedEnd()).isTrue(); } catch (Exception ex) { fail("Test failed due to a " + ex.getClass().getName() + ": " + ex.getMessage()); } }
public static RuleDescriptionSectionContextDto of(String key, String displayName) { return new RuleDescriptionSectionContextDto(key, displayName); }
@Test void check_of_with_display_name_is_empty() { assertThatThrownBy(() -> RuleDescriptionSectionContextDto.of(CONTEXT_KEY, "")) .isInstanceOf(IllegalArgumentException.class) .hasMessage(DISPLAY_NAME_MUST_BE_SET_ERROR); }
@Override public void resumeConsumption(RemoteInputChannel inputChannel) { sendToChannel(new ResumeConsumptionMessage(inputChannel)); }
@TestTemplate void testResumeConsumption() throws Exception { final CreditBasedPartitionRequestClientHandler handler = new CreditBasedPartitionRequestClientHandler(); final EmbeddedChannel channel = new EmbeddedChannel(handler); final PartitionRequestClient client = createPartitionRequestClient(channel, handler, connectionReuseEnabled); final NetworkBufferPool networkBufferPool = new NetworkBufferPool(10, 32); final SingleInputGate inputGate = createSingleInputGate(1, networkBufferPool); final RemoteInputChannel inputChannel = createRemoteInputChannel(inputGate, client); try { final BufferPool bufferPool = networkBufferPool.createBufferPool(6, 6); inputGate.setBufferPool(bufferPool); inputGate.setupChannels(); inputChannel.requestSubpartitions(); inputChannel.resumeConsumption(); channel.runPendingTasks(); Object readFromOutbound = channel.readOutbound(); assertThat(readFromOutbound).isInstanceOf(PartitionRequest.class); readFromOutbound = channel.readOutbound(); assertThat(readFromOutbound).isInstanceOf(ResumeConsumption.class); assertThat(((ResumeConsumption) readFromOutbound).receiverId) .isEqualTo(inputChannel.getInputChannelId()); assertThat((Object) channel.readOutbound()).isNull(); } finally { // Release all the buffer resources inputGate.close(); networkBufferPool.destroyAllBufferPools(); networkBufferPool.destroy(); } }
public Collection<ServerPluginInfo> loadPlugins() { Map<String, ServerPluginInfo> bundledPluginsByKey = new LinkedHashMap<>(); for (ServerPluginInfo bundled : getBundledPluginsMetadata()) { failIfContains(bundledPluginsByKey, bundled, plugin -> MessageException.of(format("Found two versions of the plugin %s [%s] in the directory %s. Please remove one of %s or %s.", bundled.getName(), bundled.getKey(), getRelativeDir(fs.getInstalledBundledPluginsDir()), bundled.getNonNullJarFile().getName(), plugin.getNonNullJarFile().getName()))); bundledPluginsByKey.put(bundled.getKey(), bundled); } Map<String, ServerPluginInfo> externalPluginsByKey = new LinkedHashMap<>(); for (ServerPluginInfo external : getExternalPluginsMetadata()) { failIfContains(bundledPluginsByKey, external, plugin -> MessageException.of(format("Found a plugin '%s' in the directory '%s' with the same key [%s] as a built-in feature '%s'. Please remove '%s'.", external.getName(), getRelativeDir(fs.getInstalledExternalPluginsDir()), external.getKey(), plugin.getName(), new File(getRelativeDir(fs.getInstalledExternalPluginsDir()), external.getNonNullJarFile().getName())))); failIfContains(externalPluginsByKey, external, plugin -> MessageException.of(format("Found two versions of the plugin '%s' [%s] in the directory '%s'. Please remove %s or %s.", external.getName(), external.getKey(), getRelativeDir(fs.getInstalledExternalPluginsDir()), external.getNonNullJarFile().getName(), plugin.getNonNullJarFile().getName()))); externalPluginsByKey.put(external.getKey(), external); } for (PluginInfo downloaded : getDownloadedPluginsMetadata()) { failIfContains(bundledPluginsByKey, downloaded, plugin -> MessageException.of(format("Fail to update plugin: %s. Built-in feature with same key already exists: %s. Move or delete plugin from %s directory", plugin.getName(), plugin.getKey(), getRelativeDir(fs.getDownloadedPluginsDir())))); ServerPluginInfo installedPlugin; if (externalPluginsByKey.containsKey(downloaded.getKey())) { deleteQuietly(externalPluginsByKey.get(downloaded.getKey()).getNonNullJarFile()); installedPlugin = moveDownloadedPluginToExtensions(downloaded); LOG.info("Plugin {} [{}] updated to version {}", installedPlugin.getName(), installedPlugin.getKey(), installedPlugin.getVersion()); } else { installedPlugin = moveDownloadedPluginToExtensions(downloaded); LOG.info("Plugin {} [{}] installed", installedPlugin.getName(), installedPlugin.getKey()); } externalPluginsByKey.put(downloaded.getKey(), installedPlugin); } Map<String, ServerPluginInfo> plugins = new HashMap<>(externalPluginsByKey.size() + bundledPluginsByKey.size()); plugins.putAll(externalPluginsByKey); plugins.putAll(bundledPluginsByKey); PluginRequirementsValidator.unloadIncompatiblePlugins(plugins); return plugins.values(); }
@Test public void install_plugin_and_its_extension_plugins_at_startup() throws Exception { copyTestPluginTo("test-base-plugin", fs.getInstalledExternalPluginsDir()); copyTestPluginTo("test-extend-plugin", fs.getInstalledExternalPluginsDir()); // both plugins are installed assertThat(underTest.loadPlugins()).extracting(PluginInfo::getKey).containsOnly("testbase", "testextend"); }
static void format(final JavaInput javaInput, JavaOutput javaOutput, JavaFormatterOptions options) throws FormatterException { Context context = new Context(); DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<>(); context.put(DiagnosticListener.class, diagnostics); Options.instance(context).put("allowStringFolding", "false"); Options.instance(context).put("--enable-preview", "true"); JCCompilationUnit unit; JavacFileManager fileManager = new JavacFileManager(context, true, UTF_8); try { fileManager.setLocation(StandardLocation.PLATFORM_CLASS_PATH, ImmutableList.of()); } catch (IOException e) { // impossible throw new IOError(e); } SimpleJavaFileObject source = new SimpleJavaFileObject(URI.create("source"), JavaFileObject.Kind.SOURCE) { @Override public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException { return javaInput.getText(); } }; Log.instance(context).useSource(source); ParserFactory parserFactory = ParserFactory.instance(context); JavacParser parser = parserFactory.newParser( javaInput.getText(), /* keepDocComments= */ true, /* keepEndPos= */ true, /* keepLineMap= */ true); unit = parser.parseCompilationUnit(); unit.sourcefile = source; javaInput.setCompilationUnit(unit); Iterable<Diagnostic<? extends JavaFileObject>> errorDiagnostics = Iterables.filter(diagnostics.getDiagnostics(), Formatter::errorDiagnostic); if (!Iterables.isEmpty(errorDiagnostics)) { throw FormatterException.fromJavacDiagnostics(errorDiagnostics); } OpsBuilder builder = new OpsBuilder(javaInput, javaOutput); // Output the compilation unit. JavaInputAstVisitor visitor; if (Runtime.version().feature() >= 21) { visitor = createVisitor( "com.google.googlejavaformat.java.java21.Java21InputAstVisitor", builder, options); } else if (Runtime.version().feature() >= 17) { visitor = createVisitor( "com.google.googlejavaformat.java.java17.Java17InputAstVisitor", builder, options); } else { visitor = new JavaInputAstVisitor(builder, options.indentationMultiplier()); } visitor.scan(unit, null); builder.sync(javaInput.getText().length()); builder.drain(); Doc doc = new DocBuilder().withOps(builder.build()).build(); doc.computeBreaks(javaOutput.getCommentsHelper(), MAX_LINE_LENGTH, new Doc.State(+0, 0)); doc.write(javaOutput); javaOutput.flush(); }
@Test public void testFormatStdinStdoutWithDashFlag() throws Exception { String input = "class Foo{\n" + "void f\n" + "() {\n" + "}\n" + "}\n"; String expectedOutput = "class Foo {\n" + " void f() {}\n" + "}\n"; InputStream in = new ByteArrayInputStream(input.getBytes(UTF_8)); StringWriter out = new StringWriter(); StringWriter err = new StringWriter(); InputStream oldIn = System.in; System.setIn(in); Main main = new Main(new PrintWriter(out, true), new PrintWriter(err, true), System.in); assertThat(main.format("-")).isEqualTo(0); assertThat(out.toString()).isEqualTo(expectedOutput); System.setIn(oldIn); }
@Override @SuppressWarnings("rawtypes") public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) { final long timestamp = clock.getTime() / 1000; // oh it'd be lovely to use Java 7 here try { graphite.connect(); for (Map.Entry<String, Gauge> entry : gauges.entrySet()) { reportGauge(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Counter> entry : counters.entrySet()) { reportCounter(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Histogram> entry : histograms.entrySet()) { reportHistogram(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Meter> entry : meters.entrySet()) { reportMetered(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Timer> entry : timers.entrySet()) { reportTimer(entry.getKey(), entry.getValue(), timestamp); } graphite.flush(); } catch (IOException e) { LOGGER.warn("Unable to report to Graphite", graphite, e); } finally { try { graphite.close(); } catch (IOException e1) { LOGGER.warn("Error closing Graphite", graphite, e1); } } }
@Test public void reportsFloatGaugeValues() throws Exception { reporter.report(map("gauge", gauge(1.1f)), map(), map(), map(), map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.gauge", "1.10", timestamp); inOrder.verify(graphite).flush(); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); }
@Override public void updateArticleCategory(ArticleCategoryUpdateReqVO updateReqVO) { // 校验存在 validateArticleCategoryExists(updateReqVO.getId()); // 更新 ArticleCategoryDO updateObj = ArticleCategoryConvert.INSTANCE.convert(updateReqVO); articleCategoryMapper.updateById(updateObj); }
@Test public void testUpdateArticleCategory_notExists() { // 准备参数 ArticleCategoryUpdateReqVO reqVO = randomPojo(ArticleCategoryUpdateReqVO.class); // 调用, 并断言异常 assertServiceException(() -> articleCategoryService.updateArticleCategory(reqVO), ARTICLE_CATEGORY_NOT_EXISTS); }
@Override public List<TransferItem> list(final Session<?> session, final Path directory, final Local local, final ListProgressListener listener) throws BackgroundException { if(log.isDebugEnabled()) { log.debug(String.format("List children for %s", directory)); } if(directory.isSymbolicLink() && new DownloadSymlinkResolver(roots).resolve(directory)) { if(log.isDebugEnabled()) { log.debug(String.format("Do not list children for symbolic link %s", directory)); } return Collections.emptyList(); } else { final AttributedList<Path> list; if(cache.isCached(directory)) { list = cache.get(directory); } else { list = session.getFeature(ListService.class).list(directory, listener); cache.put(directory, list); } final List<TransferItem> children = new ArrayList<>(); // Return copy with filtered result only for(Path f : new AttributedList<>(list.filter(comparator, filter))) { children.add(new TransferItem(f, LocalFactory.get(local, f.getName()))); } return children; } }
@Test public void testListSorted() throws Exception { final Path root = new Path("/t", EnumSet.of(Path.Type.directory)); final NullSession session = new NullSession(new Host(new TestProtocol())) { @Override public AttributedList<Path> list(final Path file, final ListProgressListener listener) { final AttributedList<Path> children = new AttributedList<>(); children.add(new Path("/t/c", EnumSet.of(Path.Type.file))); children.add(new Path("/t/c.html", EnumSet.of(Path.Type.file))); return children; } }; { Transfer t = new DownloadTransfer(new Host(new TestProtocol()), Collections.singletonList(new TransferItem(root, new NullLocal("l"))), new DownloadRegexFilter(), new DownloadRegexPriorityComparator(".*\\.html")); final List<TransferItem> list = t.list(session, root, new NullLocal("t") { @Override public boolean exists() { return true; } }, new DisabledListProgressListener()); assertEquals(new Path("/t/c.html", EnumSet.of(Path.Type.file)), list.get(0).remote); assertEquals(new Path("/t/c", EnumSet.of(Path.Type.file)), list.get(1).remote); } { Transfer t = new DownloadTransfer(new Host(new TestProtocol()), Collections.singletonList(new TransferItem(root, new NullLocal("l"))), new DownloadRegexFilter(), new DownloadRegexPriorityComparator()); final List<TransferItem> list = t.list(session, root, new NullLocal("t") { @Override public boolean exists() { return true; } }, new DisabledListProgressListener()); assertEquals(new Path("/t/c.html", EnumSet.of(Path.Type.file)), list.get(1).remote); assertEquals(new Path("/t/c", EnumSet.of(Path.Type.file)), list.get(0).remote); } }
public static Fiat parseFiat(final String currencyCode, final String str) { try { long val = new BigDecimal(str).movePointRight(SMALLEST_UNIT_EXPONENT).longValueExact(); return Fiat.valueOf(currencyCode, val); } catch (ArithmeticException e) { throw new IllegalArgumentException(e); } }
@Test public void testParseFiat() { assertEquals(1, Fiat.parseFiat("EUR", "0.0001").value); assertEquals(1, Fiat.parseFiat("EUR", "0.00010").value); }
static void setConstructor(final DroolsCompilationDTO<Scorecard> compilationDTO, final ClassOrInterfaceDeclaration modelTemplate) { KiePMMLModelFactoryUtils.init(compilationDTO, modelTemplate); }
@Test void setConstructor() { final String targetField = "overallScore"; final ClassOrInterfaceDeclaration modelTemplate = classOrInterfaceDeclaration.clone(); final CommonCompilationDTO<Scorecard> compilationDTO = CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME, pmml, scorecardModel, new PMMLCompilationContextMock(), "FILENAME"); final DroolsCompilationDTO<Scorecard> droolsCompilationDTO = DroolsCompilationDTO.fromCompilationDTO(compilationDTO, new HashMap<>()); KiePMMLScorecardModelFactory.setConstructor(droolsCompilationDTO, modelTemplate); Map<Integer, Expression> superInvocationExpressionsMap = new HashMap<>(); superInvocationExpressionsMap.put(0, new NameExpr(String.format("\"%s\"", "FILENAME"))); superInvocationExpressionsMap.put(1, new NameExpr(String.format("\"%s\"", scorecardModel.getModelName()))); MINING_FUNCTION miningFunction = MINING_FUNCTION.byName(scorecardModel.getMiningFunction().value()); PMML_MODEL pmmlModel = PMML_MODEL.byName(scorecardModel.getClass().getSimpleName()); Map<String, Expression> assignExpressionMap = new HashMap<>(); assignExpressionMap.put("targetField", new StringLiteralExpr(targetField)); assignExpressionMap.put("miningFunction", new NameExpr(miningFunction.getClass().getName() + "." + miningFunction.name())); assignExpressionMap.put("pmmlMODEL", new NameExpr(pmmlModel.getClass().getName() + "." + pmmlModel.name())); ConstructorDeclaration constructorDeclaration = modelTemplate.getDefaultConstructor().get(); assertThat(commonEvaluateConstructor(constructorDeclaration, getSanitizedClassName(scorecardModel.getModelName()), superInvocationExpressionsMap, assignExpressionMap)).isTrue(); }
@Override public void readFrame(ChannelHandlerContext ctx, ByteBuf input, Http2FrameListener listener) throws Http2Exception { if (readError) { input.skipBytes(input.readableBytes()); return; } try { do { if (readingHeaders && !preProcessFrame(input)) { return; } // The header is complete, fall into the next case to process the payload. // This is to ensure the proper handling of zero-length payloads. In this // case, we don't want to loop around because there may be no more data // available, causing us to exit the loop. Instead, we just want to perform // the first pass at payload processing now. // Wait until the entire payload has been read. if (input.readableBytes() < payloadLength) { return; } // Slice to work only on the frame being read ByteBuf framePayload = input.readSlice(payloadLength); // We have consumed the data for this frame, next time we read, // we will be expecting to read a new frame header. readingHeaders = true; verifyFrameState(); processPayloadState(ctx, framePayload, listener); } while (input.isReadable()); } catch (Http2Exception e) { readError = !Http2Exception.isStreamError(e); throw e; } catch (RuntimeException e) { readError = true; throw e; } catch (Throwable cause) { readError = true; PlatformDependent.throwException(cause); } }
@Test public void failedWhenContinuationFrameNotFollowHeaderFrame() throws Http2Exception { final ByteBuf input = Unpooled.buffer(); try { writeContinuationFrame(input, 1, new DefaultHttp2Headers().add("foo", "bar"), new Http2Flags().endOfHeaders(true)); Http2Exception ex = assertThrows(Http2Exception.class, new Executable() { @Override public void execute() throws Throwable { frameReader.readFrame(ctx, input, listener); } }); assertFalse(ex instanceof Http2Exception.StreamException); } finally { input.release(); } }
@Override @Nullable public Object convert(@Nullable String value) { if (isNullOrEmpty(value)) { return null; } LOG.debug("Trying to parse date <{}> with pattern <{}>, locale <{}>, and timezone <{}>.", value, dateFormat, locale, timeZone); final DateTimeFormatter formatter; if (containsTimeZone) { formatter = DateTimeFormat .forPattern(dateFormat) .withDefaultYear(YearMonth.now(timeZone).getYear()) .withLocale(locale); } else { formatter = DateTimeFormat .forPattern(dateFormat) .withDefaultYear(YearMonth.now(timeZone).getYear()) .withLocale(locale) .withZone(timeZone); } return DateTime.parse(value, formatter); }
@Test(expected = ConfigurationException.class) public void testWithEmptyDateFormat() throws Exception { final DateConverter converter = new DateConverter(config("", null, null)); assertThat((DateTime) converter.convert("foo")).isNull(); }
public static <K, V> AsMultimap<K, V> asMultimap() { return new AsMultimap<>(false); }
@Test @Category(ValidatesRunner.class) public void testEmptyMultimapSideInput() throws Exception { final PCollectionView<Map<String, Iterable<Integer>>> view = pipeline .apply( "CreateEmptyView", Create.empty(KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of()))) .apply(View.asMultimap()); PCollection<Integer> results = pipeline .apply("Create1", Create.of(1)) .apply( "OutputSideInputs", ParDo.of( new DoFn<Integer, Integer>() { @ProcessElement public void processElement(ProcessContext c) { assertTrue(c.sideInput(view).isEmpty()); assertTrue(c.sideInput(view).entrySet().isEmpty()); assertFalse(c.sideInput(view).entrySet().iterator().hasNext()); c.output(c.element()); } }) .withSideInputs(view)); // Pass at least one value through to guarantee that DoFn executes. PAssert.that(results).containsInAnyOrder(1); pipeline.run(); }
@Override protected String getMessageId(Exchange exchange) { return exchange.getIn().getHeader(CAMEL_IRON_MQ_MESSAGE_ID, String.class); }
@Test public void testGetMessageId() { String messageId = "abcd"; Exchange exchange = Mockito.mock(Exchange.class); Message message = Mockito.mock(Message.class); Mockito.when(exchange.getIn()).thenReturn(message); Mockito.when(message.getHeader(IronmqSpanDecorator.CAMEL_IRON_MQ_MESSAGE_ID, String.class)).thenReturn(messageId); AbstractMessagingSpanDecorator decorator = new IronmqSpanDecorator(); assertEquals(messageId, decorator.getMessageId(exchange)); }
public static List<CredentialProvider> getProviders(Configuration conf ) throws IOException { List<CredentialProvider> result = new ArrayList<>(); for(String path: conf.getStringCollection(CREDENTIAL_PROVIDER_PATH)) { try { URI uri = new URI(path); boolean found = false; // Iterate serviceLoader in a synchronized block since // serviceLoader iterator is not thread-safe. synchronized (serviceLoader) { try { if (SERVICE_LOADER_LOCKED.getAndSet(true)) { throw new PathIOException(path, "Recursive load of credential provider; " + "if loading a JCEKS file, this means that the filesystem connector is " + "trying to load the same file"); } for (CredentialProviderFactory factory : serviceLoader) { CredentialProvider kp = factory.createProvider(uri, conf); if (kp != null) { result.add(kp); found = true; break; } } } finally { SERVICE_LOADER_LOCKED.set(false); } } if (!found) { throw new IOException("No CredentialProviderFactory for " + uri + " in " + CREDENTIAL_PROVIDER_PATH); } } catch (URISyntaxException error) { throw new IOException("Bad configuration of " + CREDENTIAL_PROVIDER_PATH + " at " + path, error); } } return result; }
@Test public void testFactoryErrors() throws Exception { Configuration conf = new Configuration(); conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, "unknown:///"); try { List<CredentialProvider> providers = CredentialProviderFactory.getProviders(conf); assertTrue("should throw!", false); } catch (IOException e) { assertEquals("No CredentialProviderFactory for unknown:/// in " + CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, e.getMessage()); } }
@POST @Timed @Path("/test") @NoAuditEvent("Only used to test pattern.") @ApiOperation(value = "Test pattern with sample data") public Response testPattern(@ApiParam(name = "pattern", required = true) GrokPatternTestRequest request) { Map<String, Object> result; try { result = grokPatternService.match(request.grokPattern(), request.sampleData()); } catch (GrokException | IllegalArgumentException e) { Map<String, String> error = ImmutableMap.of("message", e.getMessage()); throw new BadRequestException(Response.status(Response.Status.BAD_REQUEST).entity(error).build()); } return Response.ok(result).build(); }
@Test public void testPatternWithSampleData() throws Exception { final String sampleData = "1.2.3.4"; final GrokPattern grokPattern = GrokPattern.create("IP", "\\d.\\d.\\d.\\d"); grokPatternService.save(grokPattern); final GrokPatternTestRequest grokPatternTestRequest = GrokPatternTestRequest.create(grokPattern, sampleData); final Map<String, Object> expectedReturn = Collections.singletonMap("IP", "1.2.3.4"); final Response response = grokResource.testPattern(grokPatternTestRequest); assertThat(response.hasEntity()).isTrue(); assertThat(response.getEntity()).isEqualTo(expectedReturn); }
public <T> T parse(String input, Class<T> cls) { return readFlow(input, cls, type(cls)); }
@Test void parse() { Flow flow = parse("flows/valids/full.yaml"); assertThat(flow.getId(), is("full")); assertThat(flow.getTasks().size(), is(5)); // third with all optionals Task optionals = flow.getTasks().get(2); assertThat(optionals.getTimeout(), is(Duration.ofMinutes(60))); assertThat(optionals.getRetry().getType(), is("constant")); assertThat(optionals.getRetry().getMaxAttempt(), is(5)); assertThat(((Constant) optionals.getRetry()).getInterval().getSeconds(), is(900L)); }
public static QueryQueueOptions createFromEnvAndQuery(DefaultCoordinator coord) { if (!coord.getJobSpec().isEnableQueue() || !coord.getJobSpec().isNeedQueued()) { return new QueryQueueOptions(false, V2.DEFAULT); } return createFromEnv(); }
@Test public void testCreateFromEnvAndQuery() throws Exception { Config.enable_query_queue_v2 = true; { GlobalVariable.setEnableQueryQueueSelect(false); DefaultCoordinator coordinator = getScheduler("SELECT /*+SET_VAR(enable_query_queue=true)*/ * FROM lineitem"); QueryQueueOptions opts = QueryQueueOptions.createFromEnvAndQuery(coordinator); assertThat(opts.isEnableQueryQueueV2()).isFalse(); } { GlobalVariable.setEnableQueryQueueSelect(true); DefaultCoordinator coordinator = getScheduler("SELECT /*+SET_VAR(enable_query_queue=true)*/ * FROM lineitem"); QueryQueueOptions opts = QueryQueueOptions.createFromEnvAndQuery(coordinator); assertThat(opts.isEnableQueryQueueV2()).isTrue(); } { GlobalVariable.setEnableQueryQueueSelect(true); DefaultCoordinator coordinator = getScheduler("SELECT /*+SET_VAR(enable_query_queue=false)*/ * FROM lineitem"); QueryQueueOptions opts = QueryQueueOptions.createFromEnvAndQuery(coordinator); assertThat(opts.isEnableQueryQueueV2()).isFalse(); } { GlobalVariable.setEnableQueryQueueSelect(true); DefaultCoordinator coordinator = getScheduler( "SELECT /*+SET_VAR(enable_query_queue=true)*/ * FROM information_schema.columns"); QueryQueueOptions opts = QueryQueueOptions.createFromEnvAndQuery(coordinator); assertThat(opts.isEnableQueryQueueV2()).isFalse(); } }
public static void validate(BugPattern pattern) throws ValidationException { if (pattern == null) { throw new ValidationException("No @BugPattern provided"); } // name must not contain spaces if (CharMatcher.whitespace().matchesAnyOf(pattern.name())) { throw new ValidationException("Name must not contain whitespace: " + pattern.name()); } // linkType must be consistent with link element. switch (pattern.linkType()) { case CUSTOM: if (pattern.link().isEmpty()) { throw new ValidationException("Expected a custom link but none was provided"); } break; case AUTOGENERATED: case NONE: if (!pattern.link().isEmpty()) { throw new ValidationException("Expected no custom link but found: " + pattern.link()); } break; } }
@Test public void unsuppressible() throws Exception { @BugPattern( name = "Unsuppressible", summary = "An unsuppressible BugPattern", explanation = "An unsuppressible BugPattern", severity = SeverityLevel.ERROR, suppressionAnnotations = {}, disableable = false) final class BugPatternTestClass {} BugPattern annotation = BugPatternTestClass.class.getAnnotation(BugPattern.class); BugPatternValidator.validate(annotation); }
private Main() { // Utility Class. }
@Test public void runsApacheAgainstRelease() throws Exception { final File pwd = temp.newFolder(); Main.main( String.format("--%s=5.5.0", UserInput.DISTRIBUTION_VERSION_PARAM), String.format("--%s=apache", UserInput.TEST_CASE_PARAM), String.format("--workdir=%s", pwd.getAbsolutePath()) ); }
public static FieldScope ignoringFieldDescriptors( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return FieldScopeImpl.createIgnoringFieldDescriptors(asList(firstFieldDescriptor, rest)); }
@Test public void testIgnoringFieldOfAnyMessage() throws Exception { String typeUrl = isProto3() ? "type.googleapis.com/com.google.common.truth.extensions.proto.SubTestMessage3" : "type.googleapis.com/com.google.common.truth.extensions.proto.SubTestMessage2"; Message message = parse("o_int: 1 o_any_message { [" + typeUrl + "]: { o_int: 2 r_string: \"foo\" } }"); Message diffMessage1 = parse("o_int: 2 o_any_message { [" + typeUrl + "]: { o_int: 2 r_string: \"foo\" } }"); Message diffMessage2 = parse("o_int: 1 o_any_message { [" + typeUrl + "]: { o_int: 2 r_string: \"bar\" } }"); Message eqMessage = parse("o_int: 1 o_any_message { [" + typeUrl + "]: { o_int: 3 r_string: \"foo\" } }"); FieldDescriptor fieldDescriptor = getTypeRegistry().getDescriptorForTypeUrl(typeUrl).findFieldByName("o_int"); FieldScope partialScope = FieldScopes.ignoringFieldDescriptors(fieldDescriptor); expectThat(diffMessage1) .unpackingAnyUsing(getTypeRegistry(), getExtensionRegistry()) .withPartialScope(partialScope) .isNotEqualTo(message); expectThat(diffMessage2) .unpackingAnyUsing(getTypeRegistry(), getExtensionRegistry()) .withPartialScope(partialScope) .isNotEqualTo(message); expectThat(eqMessage) .unpackingAnyUsing(getTypeRegistry(), getExtensionRegistry()) .withPartialScope(partialScope) .isEqualTo(message); expectFailureWhenTesting() .that(diffMessage1) .unpackingAnyUsing(getTypeRegistry(), getExtensionRegistry()) .withPartialScope(partialScope) .isEqualTo(message); expectIsEqualToFailed(); expectThatFailure().hasMessageThat().contains("modified: o_int: 1 -> 2"); expectFailureWhenTesting() .that(diffMessage2) .unpackingAnyUsing(getTypeRegistry(), getExtensionRegistry()) .withPartialScope(partialScope) .isEqualTo(message); expectIsEqualToFailed(); expectThatFailure() .hasMessageThat() .contains("modified: o_any_message.value.r_string[0]: \"foo\" -> \"bar\""); }
@Override @Transactional(value="defaultTransactionManager") public OAuth2AccessTokenEntity refreshAccessToken(String refreshTokenValue, TokenRequest authRequest) throws AuthenticationException { if (Strings.isNullOrEmpty(refreshTokenValue)) { // throw an invalid token exception if there's no refresh token value at all throw new InvalidTokenException("Invalid refresh token: " + refreshTokenValue); } OAuth2RefreshTokenEntity refreshToken = clearExpiredRefreshToken(tokenRepository.getRefreshTokenByValue(refreshTokenValue)); if (refreshToken == null) { // throw an invalid token exception if we couldn't find the token throw new InvalidTokenException("Invalid refresh token: " + refreshTokenValue); } ClientDetailsEntity client = refreshToken.getClient(); AuthenticationHolderEntity authHolder = refreshToken.getAuthenticationHolder(); // make sure that the client requesting the token is the one who owns the refresh token ClientDetailsEntity requestingClient = clientDetailsService.loadClientByClientId(authRequest.getClientId()); if (!client.getClientId().equals(requestingClient.getClientId())) { tokenRepository.removeRefreshToken(refreshToken); throw new InvalidClientException("Client does not own the presented refresh token"); } //Make sure this client allows access token refreshing if (!client.isAllowRefresh()) { throw new InvalidClientException("Client does not allow refreshing access token!"); } // clear out any access tokens if (client.isClearAccessTokensOnRefresh()) { tokenRepository.clearAccessTokensForRefreshToken(refreshToken); } if (refreshToken.isExpired()) { tokenRepository.removeRefreshToken(refreshToken); throw new InvalidTokenException("Expired refresh token: " + refreshTokenValue); } OAuth2AccessTokenEntity token = new OAuth2AccessTokenEntity(); // get the stored scopes from the authentication holder's authorization request; these are the scopes associated with the refresh token Set<String> refreshScopesRequested = new HashSet<>(refreshToken.getAuthenticationHolder().getAuthentication().getOAuth2Request().getScope()); Set<SystemScope> refreshScopes = scopeService.fromStrings(refreshScopesRequested); // remove any of the special system scopes refreshScopes = scopeService.removeReservedScopes(refreshScopes); Set<String> scopeRequested = authRequest.getScope() == null ? new HashSet<String>() : new HashSet<>(authRequest.getScope()); Set<SystemScope> scope = scopeService.fromStrings(scopeRequested); // remove any of the special system scopes scope = scopeService.removeReservedScopes(scope); if (scope != null && !scope.isEmpty()) { // ensure a proper subset of scopes if (refreshScopes != null && refreshScopes.containsAll(scope)) { // set the scope of the new access token if requested token.setScope(scopeService.toStrings(scope)); } else { String errorMsg = "Up-scoping is not allowed."; logger.error(errorMsg); throw new InvalidScopeException(errorMsg); } } else { // otherwise inherit the scope of the refresh token (if it's there -- this can return a null scope set) token.setScope(scopeService.toStrings(refreshScopes)); } token.setClient(client); if (client.getAccessTokenValiditySeconds() != null) { Date expiration = new Date(System.currentTimeMillis() + (client.getAccessTokenValiditySeconds() * 1000L)); token.setExpiration(expiration); } if (client.isReuseRefreshToken()) { // if the client re-uses refresh tokens, do that token.setRefreshToken(refreshToken); } else { // otherwise, make a new refresh token OAuth2RefreshTokenEntity newRefresh = createRefreshToken(client, authHolder); token.setRefreshToken(newRefresh); // clean up the old refresh token tokenRepository.removeRefreshToken(refreshToken); } token.setAuthenticationHolder(authHolder); tokenEnhancer.enhance(token, authHolder.getAuthentication()); tokenRepository.saveAccessToken(token); return token; }
@Test public void refreshAccessToken_requestingNullScope() { tokenRequest.setScope(null); OAuth2AccessTokenEntity token = service.refreshAccessToken(refreshTokenValue, tokenRequest); verify(scopeService, atLeastOnce()).removeReservedScopes(anySet()); assertThat(token.getScope(), equalTo(storedScope)); }
@Override public Iterator<T> iterator() { final Iterator<T> it = Iterables.concat(chunks).iterator(); return new Iterator<T>() { @Override public boolean hasNext() { return it.hasNext(); } @Override public T next() { return it.next(); } @Override public void remove() { it.remove(); size--; } }; }
@Test public void testIterator() { ChunkedArrayList<Integer> l = new ChunkedArrayList<Integer>(); for (int i = 0; i < 30000; i++) { l.add(i); } int i = 0; for (int fromList : l) { assertEquals(i, fromList); i++; } }
public String getShardIterator(SimplifiedKinesisClient kinesisClient) throws TransientKinesisException { if (checkpointIsInTheMiddleOfAUserRecord()) { return kinesisClient.getShardIterator( streamName, shardId, AT_SEQUENCE_NUMBER, sequenceNumber, null); } return kinesisClient.getShardIterator( streamName, shardId, shardIteratorType, sequenceNumber, timestamp); }
@Test public void testProvidingShardIterator() throws IOException, TransientKinesisException { assertThat(checkpoint(AT_SEQUENCE_NUMBER, "100", null).getShardIterator(client)) .isEqualTo(AT_SEQUENCE_SHARD_IT); assertThat(checkpoint(AFTER_SEQUENCE_NUMBER, "100", null).getShardIterator(client)) .isEqualTo(AFTER_SEQUENCE_SHARD_IT); assertThat(checkpoint(AT_SEQUENCE_NUMBER, "100", 10L).getShardIterator(client)) .isEqualTo(AT_SEQUENCE_SHARD_IT); assertThat(checkpoint(AFTER_SEQUENCE_NUMBER, "100", 10L).getShardIterator(client)) .isEqualTo(AT_SEQUENCE_SHARD_IT); }
public LoggerContext configure() { LoggerContext ctx = helper.getRootContext(); ctx.reset(); helper.enableJulChangePropagation(ctx); configureConsole(ctx); configureWithLogbackWritingToFile(ctx); helper.apply( LogLevelConfig.newBuilder(helper.getRootLoggerName()) .rootLevelFor(ProcessId.APP) .immutableLevel("com.hazelcast", Level.toLevel("WARN")) .build(), appSettings.getProps()); return ctx; }
@Test public void root_logger_level_changes_with_app_property_and_is_case_insensitive() { settings.getProps().set("sonar.log.level.app", "debug"); LoggerContext ctx = underTest.configure(); verifyRootLogLevel(ctx, Level.DEBUG); }
public static void mergeParams( Map<String, ParamDefinition> params, Map<String, ParamDefinition> paramsToMerge, MergeContext context) { if (paramsToMerge == null) { return; } Stream.concat(params.keySet().stream(), paramsToMerge.keySet().stream()) .forEach( name -> { ParamDefinition paramToMerge = paramsToMerge.get(name); if (paramToMerge == null) { return; } if (paramToMerge.getType() == ParamType.MAP && paramToMerge.isLiteral()) { Map<String, ParamDefinition> baseMap = mapValueOrEmpty(params, name); Map<String, ParamDefinition> toMergeMap = mapValueOrEmpty(paramsToMerge, name); mergeParams( baseMap, toMergeMap, MergeContext.copyWithParentMode( context, params.getOrDefault(name, paramToMerge).getMode())); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else if (paramToMerge.getType() == ParamType.STRING_MAP && paramToMerge.isLiteral()) { Map<String, String> baseMap = stringMapValueOrEmpty(params, name); Map<String, String> toMergeMap = stringMapValueOrEmpty(paramsToMerge, name); baseMap.putAll(toMergeMap); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else { params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, paramToMerge.getValue())); } }); }
@Test public void testMergeBothOverwrite() throws JsonProcessingException { Map<String, ParamDefinition> allParams = parseParamDefMap("{'tomerge': {'type': 'STRING','value': 'hello', 'mode': 'MUTABLE'}}"); Map<String, ParamDefinition> paramsToMerge = parseParamDefMap("{'tomerge': {'type': 'STRING', 'value': 'goodbye', 'mode': 'MUTABLE'}}"); ParamsMergeHelper.mergeParams(allParams, paramsToMerge, definitionContext); assertEquals(1, allParams.size()); StringParamDefinition tomerge = allParams.get("tomerge").asStringParamDef(); assertEquals("goodbye", tomerge.getValue()); assertEquals(ParamMode.MUTABLE, tomerge.getMode()); assertEquals(ParamSource.DEFINITION, tomerge.getSource()); }
public void performSortOperation(int option, List<File> pdf) { switch (option) { case DATE_INDEX: sortFilesByDateNewestToOldest(pdf); break; case NAME_INDEX: sortByNameAlphabetical(pdf); break; case SIZE_INCREASING_ORDER_INDEX: sortFilesBySizeIncreasingOrder(pdf); break; case SIZE_DECREASING_ORDER_INDEX: sortFilesBySizeDecreasingOrder(pdf); break; } }
@Test public void shouldReturnArraySortedByDescendingSize() { // given long[] sizes = {10000, 1000, 100, 50, 2000, 2500}; for (int i = 0; i < sizes.length; i++) when(mFiles.get(i).length()).thenReturn(sizes[i]); File[] expected = new File[]{mFiles.get(0), mFiles.get(5), mFiles.get(4), mFiles.get(1), mFiles.get(2), mFiles.get(3)}; // when mInstance.performSortOperation(mInstance.SIZE_DECREASING_ORDER_INDEX, mFiles); // then Assert.assertEquals(asList(expected), mFiles); }
@Override @MethodNotAvailable public Map<K, Object> executeOnKeys(Set<K> keys, com.hazelcast.map.EntryProcessor entryProcessor) { throw new MethodNotAvailableException(); }
@Test(expected = MethodNotAvailableException.class) public void testExecuteOnKeys() { Set<Integer> keys = new HashSet<>(singleton(23)); adapter.executeOnKeys(keys, new IMapReplaceEntryProcessor("value", "newValue")); }
public static boolean shouldDelete(Feed feed) { if (feed.getState() != Feed.STATE_NOT_SUBSCRIBED) { return false; } else if (feed.getItems() == null) { return false; } for (FeedItem item : feed.getItems()) { if (item.isTagged(FeedItem.TAG_FAVORITE) || item.isTagged(FeedItem.TAG_QUEUE) || item.isDownloaded()) { return false; } } return feed.getLastRefreshAttempt() < System.currentTimeMillis() - TIME_TO_KEEP; }
@Test public void testSubscribed() { Feed feed = createFeed(); feed.setLastRefreshAttempt(System.currentTimeMillis() - TimeUnit.MILLISECONDS.convert(200, TimeUnit.DAYS)); assertFalse(NonSubscribedFeedsCleaner.shouldDelete(feed)); feed.setState(Feed.STATE_NOT_SUBSCRIBED); assertTrue(NonSubscribedFeedsCleaner.shouldDelete(feed)); }
private int resolveTimeout(SofaRequest request, ConsumerConfig consumerConfig, ProviderInfo providerInfo) { // 动态配置优先 final String dynamicAlias = consumerConfig.getParameter(DynamicConfigKeys.DYNAMIC_ALIAS); if (StringUtils.isNotBlank(dynamicAlias)) { String dynamicTimeout = null; DynamicConfigManager dynamicConfigManager = DynamicConfigManagerFactory.getDynamicManager( consumerConfig.getAppName(), dynamicAlias); if (dynamicConfigManager != null) { dynamicTimeout = dynamicConfigManager.getConsumerMethodProperty(request.getInterfaceName(), request.getMethodName(), "timeout"); } if (DynamicHelper.isNotDefault(dynamicTimeout) && StringUtils.isNotBlank(dynamicTimeout)) { return Integer.parseInt(dynamicTimeout); } } // 先去调用级别配置 Integer timeout = request.getTimeout(); if (timeout == null || timeout <= 0) { // 取客户端配置(先方法级别再接口级别) timeout = consumerConfig.getMethodTimeout(request.getMethodName()); if (timeout == null || timeout <= 0) { // 再取服务端配置 timeout = StringUtils.parseInteger(providerInfo.getAttr(ATTR_TIMEOUT)); if (timeout == null || timeout <= 0) { // 取框架默认值 timeout = getIntValue(CONSUMER_INVOKE_TIMEOUT); } } } return timeout; }
@Test public void testResolveTimeout() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { Method resolveTimeoutMethod = AbstractCluster.class.getDeclaredMethod("resolveTimeout", SofaRequest.class, ConsumerConfig.class, ProviderInfo.class); resolveTimeoutMethod.setAccessible(true); SofaRequest sofaRequest = new SofaRequest(); ConsumerConfig consumerConfig = new ConsumerConfig(); ProviderInfo providerInfo = new ProviderInfo(); Integer defaultTimeout = (Integer) resolveTimeoutMethod.invoke(abstractCluster, sofaRequest, consumerConfig, providerInfo); Assert.assertTrue(defaultTimeout == 3000); providerInfo.setStaticAttr(ProviderInfoAttrs.ATTR_TIMEOUT, "5000"); Integer providerTimeout = (Integer) resolveTimeoutMethod.invoke(abstractCluster, sofaRequest, consumerConfig, providerInfo); Assert.assertTrue(providerTimeout == 5000); consumerConfig.setTimeout(2000); Integer consumerTimeout = (Integer) resolveTimeoutMethod.invoke(abstractCluster, sofaRequest, consumerConfig, providerInfo); Assert.assertTrue(consumerTimeout == 2000); sofaRequest.setTimeout(1000); Integer invokeTimeout = (Integer) resolveTimeoutMethod.invoke(abstractCluster, sofaRequest, consumerConfig, providerInfo); Assert.assertTrue(invokeTimeout == 1000); }
public static FieldScope ignoringFields(int firstFieldNumber, int... rest) { return FieldScopeImpl.createIgnoringFields(asList(firstFieldNumber, rest)); }
@Test public void testInvalidFieldNumber() { Message message1 = parse("o_int: 44"); Message message2 = parse("o_int: 33"); try { assertThat(message1).ignoringFields(999).isEqualTo(message2); fail("Expected failure."); } catch (Exception expected) { // TODO(user): Use hasTransitiveCauseThat() if/when it becomes available. Throwable cause = expected; while (cause != null) { if (cause .getMessage() .contains("Message type " + fullMessageName() + " has no field with number 999.")) { break; } else { cause = cause.getCause(); } } if (cause == null) { fail("No cause with field number error message."); } } }
protected String encodeCookie(String[] cookieTokens) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < cookieTokens.length; i++) { sb.append(URLEncoder.encode(cookieTokens[i], StandardCharsets.UTF_8)); if (i < cookieTokens.length - 1) { sb.append(DELIMITER); } } String value = sb.toString(); sb = new StringBuilder(new String(Base64.getEncoder().encode(value.getBytes()))); while (sb.charAt(sb.length() - 1) == '=') { sb.deleteCharAt(sb.length() - 1); } return sb.toString(); }
@Test void encodeCookieTest() { var expireMs = 1716435187323L; var cookieTokens = new String[] {"fake-user", Long.toString(expireMs), TokenBasedRememberMeServices.DEFAULT_ALGORITHM, "29f1c7ccbb489741392d27ba5c30f30d05c79ee66289b6d6da5b431bba99a0c7"}; var encode = tokenBasedRememberMeServices.encodeCookie(cookieTokens); assertThat(encode) .isEqualTo("ZmFrZS11c2VyOjE3MTY0MzUxODczMjM6U0hBLTI1NjoyOWYxYzdjY2JiNDg5NzQxMz" + "kyZDI3YmE1YzMwZjMwZDA1Yzc5ZWU2NjI4OWI2ZDZkYTViNDMxYmJhOTlhMGM3"); }
public static void validateFields(Object data) { validateFields(data, "Validation error: "); }
@Test void validateFields() { StringDataEntry stringDataEntryValid = new StringDataEntry("key", "value"); StringDataEntry stringDataEntryInvalid1 = new StringDataEntry("<object type=\"text/html\"><script>alert(document)</script></object>", "value"); Assert.assertThrows(DataValidationException.class, () -> ConstraintValidator.validateFields(stringDataEntryInvalid1)); ConstraintValidator.validateFields(stringDataEntryValid); }
@Description("compute CRC-32") @ScalarFunction @SqlType(StandardTypes.BIGINT) public static long crc32(@SqlType(StandardTypes.VARBINARY) Slice slice) { CRC32 crc32 = new CRC32(); crc32.update(slice.toByteBuffer()); return crc32.getValue(); }
@Test public void testCrc32() { assertFunction("crc32(to_utf8('CRC me!'))", BIGINT, 38028046L); assertFunction("crc32(to_utf8('1234567890'))", BIGINT, 639479525L); assertFunction("crc32(to_utf8(CAST(1234567890 AS VARCHAR)))", BIGINT, 639479525L); assertFunction("crc32(to_utf8('ABCDEFGHIJK'))", BIGINT, 1129618807L); assertFunction("crc32(to_utf8('ABCDEFGHIJKLM'))", BIGINT, 4223167559L); }
public static <E extends LINK> E setLinkHref(E e, String href) { if (href.endsWith(".css")) { e.$rel("stylesheet"); // required in html5 } e.$href(href); return e; }
@Test void testSetLinkHref() { LINK link = mock(LINK.class); HamletImpl.setLinkHref(link, "uri"); HamletImpl.setLinkHref(link, "style.css"); verify(link).$href("uri"); verify(link).$rel("stylesheet"); verify(link).$href("style.css"); verifyNoMoreInteractions(link); }
@Override public <T extends State> T state(StateNamespace namespace, StateTag<T> address) { return workItemState.get(namespace, address, StateContexts.nullContext()); }
@Test public void testOrderedListIsEmptyFalse() throws Exception { StateTag<OrderedListState<String>> addr = StateTags.orderedList("orderedList", StringUtf8Coder.of()); OrderedListState<String> orderedList = underTest.state(NAMESPACE, addr); SettableFuture<Iterable<TimestampedValue<String>>> future = SettableFuture.create(); when(mockReader.orderedListFuture( FULL_ORDERED_LIST_RANGE, key(NAMESPACE, "orderedList"), STATE_FAMILY, StringUtf8Coder.of())) .thenReturn(future); ReadableState<Boolean> result = orderedList.isEmpty().readLater(); Mockito.verify(mockReader) .orderedListFuture( FULL_ORDERED_LIST_RANGE, key(NAMESPACE, "orderedList"), STATE_FAMILY, StringUtf8Coder.of()); waitAndSet(future, Collections.singletonList(TimestampedValue.of("world", Instant.EPOCH)), 200); assertThat(result.read(), Matchers.is(false)); }
public CompletableFuture<PutMessageResult> asyncPutHalfMessage(MessageExtBrokerInner messageInner) { return store.asyncPutMessage(parseHalfMessageInner(messageInner)); }
@Test public void testAsyncPutHalfMessage() throws Exception { when(messageStore.asyncPutMessage(any(MessageExtBrokerInner.class))) .thenReturn(CompletableFuture.completedFuture(new PutMessageResult(PutMessageStatus.PUT_OK, new AppendMessageResult(AppendMessageStatus.PUT_OK)))); CompletableFuture<PutMessageResult> result = transactionBridge.asyncPutHalfMessage(createMessageBrokerInner()); assertThat(result.get().getPutMessageStatus()).isEqualTo(PutMessageStatus.PUT_OK); }
public static Read read() { return new AutoValue_MqttIO_Read.Builder() .setMaxReadTime(null) .setMaxNumRecords(Long.MAX_VALUE) .build(); }
@Test(timeout = 60 * 1000) @Ignore("https://github.com/apache/beam/issues/18723 Test timeout failure.") public void testReadNoClientId() throws Exception { final String topicName = "READ_TOPIC_NO_CLIENT_ID"; Read mqttReader = MqttIO.read() .withConnectionConfiguration( MqttIO.ConnectionConfiguration.create("tcp://localhost:" + port, topicName)) .withMaxNumRecords(10); PCollection<byte[]> output = pipeline.apply(mqttReader); PAssert.that(output) .containsInAnyOrder( "This is test 0".getBytes(StandardCharsets.UTF_8), "This is test 1".getBytes(StandardCharsets.UTF_8), "This is test 2".getBytes(StandardCharsets.UTF_8), "This is test 3".getBytes(StandardCharsets.UTF_8), "This is test 4".getBytes(StandardCharsets.UTF_8), "This is test 5".getBytes(StandardCharsets.UTF_8), "This is test 6".getBytes(StandardCharsets.UTF_8), "This is test 7".getBytes(StandardCharsets.UTF_8), "This is test 8".getBytes(StandardCharsets.UTF_8), "This is test 9".getBytes(StandardCharsets.UTF_8)); // produce messages on the brokerService in another thread // This thread prevents to block the pipeline waiting for new messages MQTT client = new MQTT(); client.setHost("tcp://localhost:" + port); final BlockingConnection publishConnection = client.blockingConnection(); publishConnection.connect(); Thread publisherThread = new Thread( () -> { try { LOG.info( "Waiting pipeline connected to the MQTT broker before sending " + "messages ..."); boolean pipelineConnected = false; while (!pipelineConnected) { Thread.sleep(1000); for (Connection connection : brokerService.getBroker().getClients()) { if (!connection.getConnectionId().isEmpty()) { pipelineConnected = true; } } } for (int i = 0; i < 10; i++) { publishConnection.publish( topicName, ("This is test " + i).getBytes(StandardCharsets.UTF_8), QoS.EXACTLY_ONCE, false); } } catch (Exception e) { // nothing to do } }); publisherThread.start(); pipeline.run(); publishConnection.disconnect(); publisherThread.join(); }
public static String sanitizePath(String path) { String sanitized = path; if (path != null) { sanitized = PATH_USERINFO_PASSWORD.matcher(sanitized).replaceFirst("$1xxxxxx$3"); } return sanitized; }
@Test public void testSanitizePathWithUserInfo() { String path = "GEORGE:HARRISON@LIVERPOOL/QSYS.LIB/BEATLES.LIB/PENNYLANE.PGM"; String expected = "GEORGE:xxxxxx@LIVERPOOL/QSYS.LIB/BEATLES.LIB/PENNYLANE.PGM"; assertEquals(expected, URISupport.sanitizePath(path)); }
@Override // mappedStatementId 参数,暂时没有用。以后,可以基于 mappedStatementId + DataPermission 进行缓存 public List<DataPermissionRule> getDataPermissionRule(String mappedStatementId) { // 1. 无数据权限 if (CollUtil.isEmpty(rules)) { return Collections.emptyList(); } // 2. 未配置,则默认开启 DataPermission dataPermission = DataPermissionContextHolder.get(); if (dataPermission == null) { return rules; } // 3. 已配置,但禁用 if (!dataPermission.enable()) { return Collections.emptyList(); } // 4. 已配置,只选择部分规则 if (ArrayUtil.isNotEmpty(dataPermission.includeRules())) { return rules.stream().filter(rule -> ArrayUtil.contains(dataPermission.includeRules(), rule.getClass())) .collect(Collectors.toList()); // 一般规则不会太多,所以不采用 HashSet 查询 } // 5. 已配置,只排除部分规则 if (ArrayUtil.isNotEmpty(dataPermission.excludeRules())) { return rules.stream().filter(rule -> !ArrayUtil.contains(dataPermission.excludeRules(), rule.getClass())) .collect(Collectors.toList()); // 一般规则不会太多,所以不采用 HashSet 查询 } // 6. 已配置,全部规则 return rules; }
@Test public void testGetDataPermissionRule_06() { // 准备参数 String mappedStatementId = randomString(); // mock 方法 DataPermissionContextHolder.add(AnnotationUtils.findAnnotation(TestClass06.class, DataPermission.class)); // 调用 List<DataPermissionRule> result = dataPermissionRuleFactory.getDataPermissionRule(mappedStatementId); // 断言 assertSame(rules, result); }
public static SegmentFetcher getSegmentFetcher(String protocol) { SegmentFetcher segmentFetcher = SEGMENT_FETCHER_MAP.get(protocol); if (segmentFetcher != null) { return segmentFetcher; } else { LOGGER.info("Segment fetcher is not configured for protocol: {}, using default", protocol); switch (protocol) { case CommonConstants.HTTP_PROTOCOL: case CommonConstants.HTTPS_PROTOCOL: return HTTP_SEGMENT_FETCHER; default: return PINOT_FS_SEGMENT_FETCHER; } } }
@Test public void testDefaultSegmentFetcherFactory() { assertEquals(SegmentFetcherFactory.getSegmentFetcher(HTTP_PROTOCOL).getClass(), HttpSegmentFetcher.class); assertEquals(SegmentFetcherFactory.getSegmentFetcher(HTTPS_PROTOCOL).getClass(), HttpSegmentFetcher.class); assertEquals(SegmentFetcherFactory.getSegmentFetcher(FILE_PROTOCOL).getClass(), PinotFSSegmentFetcher.class); assertEquals(SegmentFetcherFactory.getSegmentFetcher("foo").getClass(), PinotFSSegmentFetcher.class); }
@Override public String toString() { String toStringStr = "{}"; switch (bitmapType) { case EMPTY: break; case SINGLE_VALUE: toStringStr = String.format("{%s}", singleValue); break; case BITMAP_VALUE: toStringStr = this.bitmap.toString(); break; case SET_VALUE: toStringStr = String.format("{%s}", setToString()); break; } return toStringStr; }
@Test public void testToString() { Assert.assertEquals(emptyBitmap.toString(), "{}"); Assert.assertEquals(singleBitmap.toString(), "{1}"); Assert.assertEquals(mediumBitmap.toString(), "{0,1,2,3,4,5,6,7,8,9}"); Assert.assertEquals(largeBitmap.toString(), "{0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22," + "23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39}"); }
@Override public long nextBackOffMillis() { // Make sure we have not gone over the maximum elapsed time. if (getElapsedTimeMillis() > maxElapsedTimeMillis) { return maxElapsedTimeMillis; } int randomizedInterval = getRandomValueFromInterval(randomizationFactor, Math.random(), currentIntervalMillis); incrementCurrentInterval(); return randomizedInterval; }
@Test public void testBackOffOverflow() { int testInitialInterval = Integer.MAX_VALUE / 2; double testMultiplier = 2.1; int testMaxInterval = Integer.MAX_VALUE; ExponentialBackOff backOffPolicy = new ExponentialBackOff.Builder() .setInitialIntervalMillis(testInitialInterval) .setMultiplier(testMultiplier) .setMaxIntervalMillis(testMaxInterval) .build(); backOffPolicy.nextBackOffMillis(); // Assert that when an overflow is possible the current interval is set to the max interval. assertEquals(testMaxInterval, backOffPolicy.getCurrentIntervalMillis()); }
public static UTypeCast create(UTree<?> type, UExpression expression) { return new AutoValue_UTypeCast(type, expression); }
@Test public void serialization() { SerializableTester.reserializeAndAssert( UTypeCast.create(UPrimitiveTypeTree.BYTE, ULiteral.intLit(100))); }
static ScanMetricsResult fromJson(String json) { return JsonUtil.parse(json, ScanMetricsResultParser::fromJson); }
@Test public void extraFields() { ScanMetrics scanMetrics = ScanMetrics.of(new DefaultMetricsContext()); scanMetrics.totalPlanningDuration().record(10, TimeUnit.MINUTES); scanMetrics.resultDataFiles().increment(5L); scanMetrics.resultDeleteFiles().increment(5L); scanMetrics.scannedDataManifests().increment(5L); scanMetrics.skippedDataManifests().increment(5L); scanMetrics.totalFileSizeInBytes().increment(1024L); scanMetrics.totalDataManifests().increment(5L); scanMetrics.totalFileSizeInBytes().increment(45L); scanMetrics.totalDeleteFileSizeInBytes().increment(23L); scanMetrics.skippedDataFiles().increment(3L); scanMetrics.skippedDeleteFiles().increment(3L); scanMetrics.scannedDeleteManifests().increment(3L); scanMetrics.skippedDeleteManifests().increment(3L); scanMetrics.indexedDeleteFiles().increment(10L); scanMetrics.positionalDeleteFiles().increment(6L); scanMetrics.equalityDeleteFiles().increment(4L); ScanMetricsResult scanMetricsResult = ScanMetricsResult.fromScanMetrics(scanMetrics); assertThat( ScanMetricsResultParser.fromJson( "{\"total-planning-duration\":{\"count\":1,\"time-unit\":\"nanoseconds\",\"total-duration\":600000000000}," + "\"result-data-files\":{\"unit\":\"count\",\"value\":5}," + "\"result-delete-files\":{\"unit\":\"count\",\"value\":5}," + "\"total-data-manifests\":{\"unit\":\"count\",\"value\":5}," + "\"total-delete-manifests\":{\"unit\":\"count\",\"value\":0}," + "\"scanned-data-manifests\":{\"unit\":\"count\",\"value\":5}," + "\"skipped-data-manifests\":{\"unit\":\"count\",\"value\":5}," + "\"total-file-size-in-bytes\":{\"unit\":\"bytes\",\"value\":1069}," + "\"total-delete-file-size-in-bytes\":{\"unit\":\"bytes\",\"value\":23}," + "\"skipped-data-files\":{\"unit\":\"count\",\"value\":3}," + "\"skipped-delete-files\":{\"unit\":\"count\",\"value\":3}," + "\"scanned-delete-manifests\":{\"unit\":\"count\",\"value\":3}," + "\"skipped-delete-manifests\":{\"unit\":\"count\",\"value\":3}," + "\"indexed-delete-files\":{\"unit\":\"count\",\"value\":10}," + "\"equality-delete-files\":{\"unit\":\"count\",\"value\":4}," + "\"positional-delete-files\":{\"unit\":\"count\",\"value\":6}," + "\"extra\": \"value\",\"extra2\":23}")) .isEqualTo(scanMetricsResult); }
@Override public MaterializedWindowedTable windowed() { return new KsqlMaterializedWindowedTable(inner.windowed()); }
@Test public void shouldFilterWindowed() { // Given: final MaterializedWindowedTable table = materialization.windowed(); givenNoopProject(); when(filter.apply(any(), any(), any())).thenReturn(Optional.empty()); // When: final Iterator<WindowedRow> result = table.get(aKey, partition, windowStartBounds, windowEndBounds); // Then: assertThat(result.hasNext(), is(false)); }
public static <T> WithTimestamps<T> of(SerializableFunction<T, Instant> fn) { return new WithTimestamps<>(fn, Duration.ZERO); }
@Test @Category(ValidatesRunner.class) public void withTimestampsLambdaShouldApplyTimestamps() { final String yearTwoThousand = "946684800000"; PCollection<String> timestamped = p.apply(Create.of("1234", "0", Integer.toString(Integer.MAX_VALUE), yearTwoThousand)) .apply(WithTimestamps.of((String input) -> new Instant(Long.valueOf(input)))); PCollection<KV<String, Instant>> timestampedVals = timestamped.apply( ParDo.of( new DoFn<String, KV<String, Instant>>() { @ProcessElement public void processElement(ProcessContext c) throws Exception { c.output(KV.of(c.element(), c.timestamp())); } })); PAssert.that(timestamped) .containsInAnyOrder(yearTwoThousand, "0", "1234", Integer.toString(Integer.MAX_VALUE)); PAssert.that(timestampedVals) .containsInAnyOrder( KV.of("0", new Instant(0)), KV.of("1234", new Instant(Long.valueOf("1234"))), KV.of(Integer.toString(Integer.MAX_VALUE), new Instant(Integer.MAX_VALUE)), KV.of(yearTwoThousand, new Instant(Long.valueOf(yearTwoThousand)))); p.run(); }
@Override public Optional<DatabaseAdminExecutor> create(final SQLStatementContext sqlStatementContext) { SQLStatement sqlStatement = sqlStatementContext.getSqlStatement(); if (sqlStatement instanceof ShowFunctionStatusStatement) { return Optional.of(new ShowFunctionStatusExecutor((ShowFunctionStatusStatement) sqlStatement)); } if (sqlStatement instanceof ShowProcedureStatusStatement) { return Optional.of(new ShowProcedureStatusExecutor((ShowProcedureStatusStatement) sqlStatement)); } if (sqlStatement instanceof ShowTablesStatement) { return Optional.of(new ShowTablesExecutor((ShowTablesStatement) sqlStatement, sqlStatementContext.getDatabaseType())); } return Optional.empty(); }
@Test void assertCreateWithUse() { when(sqlStatementContext.getSqlStatement()).thenReturn(new MySQLUseStatement()); Optional<DatabaseAdminExecutor> actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "use db", "", Collections.emptyList()); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(UseDatabaseExecutor.class)); }
static BlockStmt getMapValuesVariableDeclaration(final String variableName, final MapValues mapValues) { if (mapValues.getInlineTable() == null && mapValues.getTableLocator() != null) { throw new UnsupportedOperationException("TableLocator not supported, yet"); } final MethodDeclaration methodDeclaration = MAPVALUES_TEMPLATE.getMethodsByName(GETKIEPMMLMAPVALUES).get(0).clone(); final BlockStmt mapValuesBody = methodDeclaration.getBody().orElseThrow(() -> new KiePMMLException(String.format(MISSING_BODY_TEMPLATE, methodDeclaration))); final VariableDeclarator variableDeclarator = getVariableDeclarator(mapValuesBody, MAPVALUES).orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, MAPVALUES, mapValuesBody))); variableDeclarator.setName(variableName); final BlockStmt toReturn = new BlockStmt(); int counter = 0; final NodeList<Expression> arguments = new NodeList<>(); if (mapValues.hasFieldColumnPairs()) { for (FieldColumnPair fieldColumnPair : mapValues.getFieldColumnPairs()) { String nestedVariableName = String.format(VARIABLE_NAME_TEMPLATE, variableName, counter); arguments.add(new NameExpr(nestedVariableName)); BlockStmt toAdd = getFieldColumnPairVariableDeclaration(nestedVariableName, fieldColumnPair); toAdd.getStatements().forEach(toReturn::addStatement); counter++; } } String inlineTableVariableName = String.format("%s_InlineTable", variableName); BlockStmt toAdd = getInlineTableVariableDeclaration(inlineTableVariableName, mapValues.getInlineTable()); toAdd.getStatements().forEach(toReturn::addStatement); final MethodCallExpr initializer = variableDeclarator.getInitializer() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, MAPVALUES, toReturn))) .asMethodCallExpr(); final MethodCallExpr builder = getChainedMethodCallExprFrom("builder", initializer); final StringLiteralExpr nameExpr = new StringLiteralExpr(variableName); final StringLiteralExpr outputColumnExpr = new StringLiteralExpr(mapValues.getOutputColumn()); builder.setArgument(0, nameExpr); builder.setArgument(2, outputColumnExpr); final Expression dataTypeExpression = getExpressionForDataType(mapValues.getDataType()); getChainedMethodCallExprFrom("withDefaultValue", initializer).setArgument(0, getExpressionForObject (mapValues.getDefaultValue())); getChainedMethodCallExprFrom("withMapMissingTo", initializer).setArgument(0, getExpressionForObject (mapValues.getMapMissingTo())); getChainedMethodCallExprFrom("withDataType", initializer).setArgument(0, dataTypeExpression); getChainedMethodCallExprFrom("withKiePMMLInlineTable", initializer).setArgument(0, new NameExpr(inlineTableVariableName)); getChainedMethodCallExprFrom("asList", initializer).setArguments(arguments); mapValuesBody.getStatements().forEach(toReturn::addStatement); return toReturn; }
@Test void getMapValuesVariableDeclaration() throws IOException { String variableName = "variableName"; BlockStmt retrieved = KiePMMLMapValuesFactory.getMapValuesVariableDeclaration(variableName, MAPVALUES); String text = getFileContent(TEST_01_SOURCE); Statement expected = JavaParserUtils.parseBlock(String.format(text, variableName)); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); List<Class<?>> imports = Arrays.asList(Arrays.class, Collections.class, Collectors.class, KiePMMLFieldColumnPair.class, KiePMMLInlineTable.class, KiePMMLMapValues.class, KiePMMLRow.class, Map.class, Stream.class); commonValidateCompilationWithImports(retrieved, imports); }
@Override public SuspensionReasons verifyGroupGoingDownIsFine(ClusterApi clusterApi) throws HostStateChangeDeniedException { return verifyGroupGoingDownIsFine(clusterApi, false); }
@Test public void verifyGroupGoingDownIsFine_fails() throws HostStateChangeDeniedException { verifyGroupGoingDownIsFine(false, Optional.empty(), 13, false); }
public void setValue(Object obj) { try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); TypedBytesOutput tbo = TypedBytesOutput.get(new DataOutputStream(baos)); tbo.write(obj); byte[] bytes = baos.toByteArray(); set(bytes, 0, bytes.length); } catch (IOException e) { throw new RuntimeException(e); } }
@Test public void testIO() throws IOException { TypedBytesWritable tbw = new TypedBytesWritable(); tbw.setValue(12345); ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutput dout = new DataOutputStream(baos); tbw.write(dout); ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); DataInput din = new DataInputStream(bais); TypedBytesWritable readTbw = new TypedBytesWritable(); readTbw.readFields(din); assertEquals(tbw, readTbw); }
@Override public synchronized boolean add(E e) { int index = originList.indexOf(e); if (index > -1) { rootSet.set(index); return true; } else { if (tailList == null) { tailList = new LinkedList<>(); } return tailList.add(e); } }
@Test void testAdd() { List<String> list = Arrays.asList("A", "B", "C"); BitList<String> bitList = new BitList<>(list); bitList.remove("A"); Assertions.assertEquals(2, bitList.size()); bitList.addAll(Collections.singletonList("A")); Assertions.assertEquals(3, bitList.size()); bitList.addAll(Collections.singletonList("D")); Assertions.assertEquals(4, bitList.size()); Assertions.assertEquals("D", bitList.get(3)); Assertions.assertTrue(bitList.hasMoreElementInTailList()); Assertions.assertEquals(Collections.singletonList("D"), bitList.getTailList()); bitList.clear(); bitList.addAll(Collections.singletonList("A")); Assertions.assertEquals(1, bitList.size()); Assertions.assertEquals("A", bitList.get(0)); }
public T findOneById(K objectId) { return delegate.findOneById(objectId); }
@Test void findOneById() { final var collection = jacksonCollection("simple", Simple.class); final List<Simple> items = List.of( new Simple("000000000000000000000001", "foo"), new Simple("000000000000000000000002", "bar") ); collection.insert(items); assertThat(collection.findOneById(items.get(1).id())).isEqualTo(items.get(1)); assertThat(collection.findOneById(new org.bson.types.ObjectId().toHexString())).isNull(); }
public Optional<IndexFieldTypesDTO> upsert(IndexFieldTypesDTO dto) { final WriteResult<IndexFieldTypesDTO, ObjectId> update = MongoDBUpsertRetryer.run(() -> db.update( DBQuery.and( DBQuery.is(FIELD_INDEX_NAME, dto.indexName()), DBQuery.is(FIELD_INDEX_SET_ID, dto.indexSetId()) ), dto, true, false )); final Object upsertedId = update.getUpsertedId(); if (upsertedId instanceof ObjectId) { return get(((ObjectId) upsertedId).toHexString()); } else if (upsertedId instanceof String) { return get((String) upsertedId); } return Optional.empty(); }
@Test public void upsert() { final IndexFieldTypesDTO newDto1 = createDto("graylog_0", Collections.emptySet()); final IndexFieldTypesDTO newDto2 = createDto("graylog_1", Collections.emptySet()); assertThat(dbService.findAll().size()).isEqualTo(0); final IndexFieldTypesDTO upsertedDto1 = dbService.upsert(newDto1).orElse(null); final IndexFieldTypesDTO upsertedDto2 = dbService.upsert(newDto2).orElse(null); assertThat(upsertedDto1).isNotNull(); assertThat(upsertedDto2).isNotNull(); assertThat(upsertedDto1.indexName()).isEqualTo("graylog_0"); assertThat(upsertedDto2.indexName()).isEqualTo("graylog_1"); assertThat(dbService.findAll().size()).isEqualTo(2); assertThat(dbService.upsert(newDto1)).isNotPresent(); assertThat(dbService.upsert(newDto2)).isNotPresent(); assertThat(dbService.findAll().size()).isEqualTo(2); }
public JsonNode toJson(Object object) { // NOTE: jayway json path 2.4.0 seems to have issues with '@.name' so we'll do this manually // as determined by a cursory and purely subjective investigation by alex // "$..[?(@.name =~ /password$/i || @.name =~ /secret$/i || @.name =~ /secret[\\s_-]*key$/i || @.name =~ /keytab$/i // || @.name =~ /token$/i)]" try { JsonNode node; if (object instanceof JsonNode) { node = (JsonNode) object; } else if (object instanceof String) { node = JsonUtils.stringToJsonNode(String.valueOf(object)); } else { node = JsonUtils.objectToJsonNode(object); } return toJsonRecursive(node); } catch (IOException e) { throw new RuntimeException(e); } }
@Test public void testNull() { Assert.assertEquals(String.valueOf(_obfuscator.toJson(null)), "null"); }
protected String setupHttpEntityEnclosingRequestData(HttpEntityEnclosingRequestBase entityEnclosingRequest) throws IOException { // Buffer to hold the post body, except file content StringBuilder postedBody = new StringBuilder(1000); HTTPFileArg[] files = getHTTPFiles(); final String contentEncoding = getContentEncoding(); Charset charset = Charset.forName(contentEncoding); final boolean haveContentEncoding = true; // Check if we should do a multipart/form-data or an // application/x-www-form-urlencoded post request if(getUseMultipart()) { if (entityEnclosingRequest.getHeaders(HTTPConstants.HEADER_CONTENT_TYPE).length > 0) { log.info( "Content-Header is set already on the request! Will be replaced by a Multipart-Header. Old headers: {}", Arrays.asList(entityEnclosingRequest.getHeaders(HTTPConstants.HEADER_CONTENT_TYPE))); entityEnclosingRequest.removeHeaders(HTTPConstants.HEADER_CONTENT_TYPE); } // doBrowserCompatibleMultipart means "use charset for encoding MIME headers", // while RFC6532 means "use UTF-8 for encoding MIME headers" boolean doBrowserCompatibleMultipart = getDoBrowserCompatibleMultipart(); if(log.isDebugEnabled()) { log.debug("Building multipart with:getDoBrowserCompatibleMultipart(): {}, with charset:{}, haveContentEncoding:{}", doBrowserCompatibleMultipart, charset, haveContentEncoding); } // Write the request to our own stream MultipartEntityBuilder multipartEntityBuilder = MultipartEntityBuilder.create(); multipartEntityBuilder.setCharset(charset); if (doBrowserCompatibleMultipart) { multipartEntityBuilder.setLaxMode(); } else { // Use UTF-8 for encoding header names and values multipartEntityBuilder.setMode(HttpMultipartMode.RFC6532); } // Create the parts // Add any parameters for (JMeterProperty jMeterProperty : getArguments()) { HTTPArgument arg = (HTTPArgument) jMeterProperty.getObjectValue(); String parameterName = arg.getName(); if (arg.isSkippable(parameterName)) { continue; } ContentType contentType; if (arg.getContentType().indexOf(';') >= 0) { // assume, that the content type contains charset info // don't add another charset and use parse to cope with the semicolon contentType = ContentType.parse(arg.getContentType()); } else { contentType = ContentType.create(arg.getContentType(), charset); } StringBody stringBody = new StringBody(arg.getValue(), contentType); FormBodyPart formPart = FormBodyPartBuilder.create( parameterName, stringBody).build(); multipartEntityBuilder.addPart(formPart); } // Add any files // Cannot retrieve parts once added to the MultiPartEntity, so have to save them here. ViewableFileBody[] fileBodies = new ViewableFileBody[files.length]; for (int i=0; i < files.length; i++) { HTTPFileArg file = files[i]; File reservedFile = FileServer.getFileServer().getResolvedFile(file.getPath()); Charset filenameCharset = doBrowserCompatibleMultipart ? charset : StandardCharsets.UTF_8; fileBodies[i] = new ViewableFileBody(reservedFile, ContentType.parse(file.getMimeType()), filenameCharset); multipartEntityBuilder.addPart(file.getParamName(), fileBodies[i] ); } HttpEntity entity = multipartEntityBuilder.build(); entityEnclosingRequest.setEntity(entity); writeEntityToSB(postedBody, entity, fileBodies, contentEncoding); } else { // not multipart // Check if the header manager had a content type header // This allows the user to specify their own content-type for a POST request Header contentTypeHeader = entityEnclosingRequest.getFirstHeader(HTTPConstants.HEADER_CONTENT_TYPE); boolean hasContentTypeHeader = contentTypeHeader != null && contentTypeHeader.getValue() != null && contentTypeHeader.getValue().length() > 0; // If there are no arguments, we can send a file as the body of the request // TODO: needs a multiple file upload scenario if(!hasArguments() && getSendFileAsPostBody()) { // If getSendFileAsPostBody returned true, it's sure that file is not null HTTPFileArg file = files[0]; if(!hasContentTypeHeader) { // Allow the mimetype of the file to control the content type if(file.getMimeType() != null && file.getMimeType().length() > 0) { entityEnclosingRequest.setHeader(HTTPConstants.HEADER_CONTENT_TYPE, file.getMimeType()); } else if(ADD_CONTENT_TYPE_TO_POST_IF_MISSING) { entityEnclosingRequest.setHeader(HTTPConstants.HEADER_CONTENT_TYPE, HTTPConstants.APPLICATION_X_WWW_FORM_URLENCODED); } } FileEntity fileRequestEntity = new FileEntity(FileServer.getFileServer().getResolvedFile(file.getPath()), (ContentType) null); entityEnclosingRequest.setEntity(fileRequestEntity); // We just add placeholder text for file content postedBody.append("<actual file content, not shown here>"); } else { // In a post request which is not multipart, we only support // parameters, no file upload is allowed // If none of the arguments have a name specified, we // just send all the values as the post body if(getSendParameterValuesAsPostBody()) { // Allow the mimetype of the file to control the content type // This is not obvious in GUI if you are not uploading any files, // but just sending the content of nameless parameters // TODO: needs a multiple file upload scenario if(!hasContentTypeHeader) { HTTPFileArg file = files.length > 0? files[0] : null; if(file != null && file.getMimeType() != null && file.getMimeType().length() > 0) { entityEnclosingRequest.setHeader(HTTPConstants.HEADER_CONTENT_TYPE, file.getMimeType()); } else if(ADD_CONTENT_TYPE_TO_POST_IF_MISSING) { entityEnclosingRequest.setHeader(HTTPConstants.HEADER_CONTENT_TYPE, HTTPConstants.APPLICATION_X_WWW_FORM_URLENCODED); } } // Just append all the parameter values, and use that as the post body StringBuilder postBody = new StringBuilder(); for (JMeterProperty jMeterProperty : getArguments()) { HTTPArgument arg = (HTTPArgument) jMeterProperty.getObjectValue(); postBody.append(arg.getEncodedValue(contentEncoding)); } // Let StringEntity perform the encoding StringEntity requestEntity = new StringEntity(postBody.toString(), contentEncoding); entityEnclosingRequest.setEntity(requestEntity); postedBody.append(postBody.toString()); } else { // It is a normal post request, with parameter names and values // Set the content type if(!hasContentTypeHeader && ADD_CONTENT_TYPE_TO_POST_IF_MISSING) { entityEnclosingRequest.setHeader(HTTPConstants.HEADER_CONTENT_TYPE, HTTPConstants.APPLICATION_X_WWW_FORM_URLENCODED); } UrlEncodedFormEntity entity = createUrlEncodedFormEntity(contentEncoding); entityEnclosingRequest.setEntity(entity); writeEntityToSB(postedBody, entity, EMPTY_FILE_BODIES, contentEncoding); } } } return postedBody.toString(); }
@Test void testParameterWithMultipartAndExplicitHeader() throws Exception { HTTPSamplerBase sampler = (HTTPSamplerBase) new HttpTestSampleGui().createTestElement(); sampler.setThreadContext(jmctx); sampler.setDoMultipart(true); sampler.setDoBrowserCompatibleMultipart(true); HttpEntityEnclosingRequestBase post = new HttpPost(); post.addHeader(HTTPConstants.HEADER_CONTENT_TYPE, "application/json"); sampler.setHTTPFiles(new HTTPFileArg[] {new HTTPFileArg("filename", "file", "application/octect; charset=utf-8")}); HTTPHC4Impl hc = new HTTPHC4Impl(sampler); String requestData = hc.setupHttpEntityEnclosingRequestData(post); assertEquals(0, post.getHeaders(HTTPConstants.HEADER_CONTENT_TYPE).length); Assertions.assertTrue(requestData.contains("charset=utf-8")); }
@Override public UserRestResponse fetchUser(String id) { return usersSearchResponseGenerator.toRestUser(userService.fetchUser(id)); }
@Test public void fetchUser_whenUserExists_shouldReturnUser() throws Exception { UserInformation user = generateUserSearchResult("user1", true, true, false, 2, 3); UserRestResponseForAdmins restUserForAdmins = toRestUser(user); when(userService.fetchUser("userLogin")).thenReturn(user); when(responseGenerator.toRestUser(user)).thenReturn(restUserForAdmins); MvcResult mvcResult = mockMvc.perform(get(USER_ENDPOINT + "/userLogin")) .andExpect(status().isOk()) .andReturn(); UserRestResponseForAdmins responseUser = gson.fromJson(mvcResult.getResponse().getContentAsString(), UserRestResponseForAdmins.class); assertThat(responseUser).isEqualTo(restUserForAdmins); }
ProducerListeners listeners() { return new ProducerListeners(eventListeners.toArray(new HollowProducerEventListener[0])); }
@Test public void fireCycleStartDontStopWhenOneFails() { long version = 31337; HollowProducer.ReadState readState = Mockito.mock(HollowProducer.ReadState.class); Mockito.when(readState.getVersion()).thenReturn(version); Mockito.doThrow(RuntimeException.class).when(listener).onCycleStart(version); listenerSupport.listeners().fireCycleStart(version); Mockito.verify(listener).onCycleStart(version); }
public List<String> sort() { for (String strN : map.keySet()) { visit(strN); } return result; }
@Test public void simple() { ImportUnit a = create("a"); ImportUnit b = create("b", "a"); ImportUnit c = create("c", "b"); Map<String, ImportUnit> importUnits = new HashMap<>(); importUnits.put("a", a); importUnits.put("b", b); importUnits.put("c", c); ImportUnitSorter sorter = new ImportUnitSorter(importUnits); List<String> sorted = sorter.sort(); assertEquals(importUnits.size(), sorted.size()); assertEquals(List.of("a", "b", "c"), sorted); }
@Override public ByteBuf heapBuffer() { return heapBuffer(DEFAULT_INITIAL_CAPACITY, DEFAULT_MAX_CAPACITY); }
@Test public void testUsedHeapMemory() { T allocator = newAllocator(true); ByteBufAllocatorMetric metric = ((ByteBufAllocatorMetricProvider) allocator).metric(); assertEquals(0, metric.usedHeapMemory()); ByteBuf buffer = allocator.heapBuffer(1024, 4096); int capacity = buffer.capacity(); assertEquals(expectedUsedMemory(allocator, capacity), metric.usedHeapMemory()); // Double the size of the buffer buffer.capacity(capacity << 1); capacity = buffer.capacity(); assertEquals(expectedUsedMemory(allocator, capacity), metric.usedHeapMemory()); buffer.release(); assertEquals(expectedUsedMemoryAfterRelease(allocator, capacity), metric.usedHeapMemory()); }
public static <E> E checkNotInstanceOf(Class type, E object, String errorMessage) { isNotNull(type, "type"); if (type.isInstance(object)) { throw new IllegalArgumentException(errorMessage); } return object; }
@Test(expected = IllegalArgumentException.class) public void test_checkNotInstanceOf_withNullType() { checkNotInstanceOf(null, BigInteger.ONE, "argumentName"); }
@Override @Nonnull public ListIterator<T> listIterator(final int initialIndex) { final Iterator<T> initialIterator; try { initialIterator = iterator(initialIndex); } catch (NoSuchElementException ex) { throw new IndexOutOfBoundsException(); } return new AbstractListIterator<T>() { private int index = initialIndex - 1; @Nullable private Iterator<T> forwardIterator = initialIterator; @Nonnull private Iterator<T> getForwardIterator() { if (forwardIterator == null) { try { forwardIterator = iterator(index+1); } catch (IndexOutOfBoundsException ex) { throw new NoSuchElementException(); } } return forwardIterator; } @Override public boolean hasNext() { return getForwardIterator().hasNext(); } @Override public boolean hasPrevious() { return index >= 0; } @Override public T next() { T ret = getForwardIterator().next(); index++; return ret; } @Override public int nextIndex() { return index+1; } @Override public T previous() { forwardIterator = null; try { return iterator(index--).next(); } catch (IndexOutOfBoundsException ex) { throw new NoSuchElementException(); } } @Override public int previousIndex() { return index; } }; }
@Test public void testReverseIterationException() { // note: no "expected = NoSuchElementException", because we want to make sure the exception occurs only during // the last call to previous() ListIterator<Integer> iter = list.listIterator(100); for (int i=0; i<100; i++) { iter.previous(); } try { iter.previous(); } catch (NoSuchElementException ex) { return; } Assert.fail(); }
@Override public void start( final KsqlModuleType moduleType, final Properties ksqlProperties) { final BaseSupportConfig ksqlVersionCheckerConfig = new PhoneHomeConfig(ksqlProperties, "ksql"); if (!ksqlVersionCheckerConfig.isProactiveSupportEnabled()) { log.warn(legalDisclaimerProactiveSupportDisabled()); return; } try { final KsqlVersionChecker ksqlVersionChecker = versionCheckerFactory.create( ksqlVersionCheckerConfig, moduleType, enableSettlingTime, this::isActive ); ksqlVersionChecker.init(); ksqlVersionChecker.setUncaughtExceptionHandler((t, e) -> log.error("Uncaught exception in thread '{}':", t.getName(), e)); ksqlVersionChecker.start(); final long reportIntervalMs = ksqlVersionCheckerConfig.getReportIntervalMs(); final long reportIntervalHours = reportIntervalMs / (60 * 60 * 1000); // We log at WARN level to increase the visibility of this information. log.warn(legalDisclaimerProactiveSupportEnabled(reportIntervalHours)); } catch (final Exception e) { // We catch any exceptions to prevent collateral damage to the more important broker // threads that are running in the same JVM. log.error("Failed to start KsqlVersionCheckerAgent: {}", e.getMessage()); } }
@Test public void shouldStartTheAgentCorrectly() { // When: ksqlVersionCheckerAgent.start(KsqlModuleType.SERVER, properties); // Then: final InOrder inOrder = Mockito.inOrder(ksqlVersionChecker); inOrder.verify(ksqlVersionChecker).init(); inOrder.verify(ksqlVersionChecker).setUncaughtExceptionHandler(any()); inOrder.verify(ksqlVersionChecker).start(); }
@Override public Executor getExecutor(URL url) { String name = url.getParameter(THREAD_NAME_KEY, (String) url.getAttribute(THREAD_NAME_KEY, DEFAULT_THREAD_NAME)); int threads = url.getParameter(THREADS_KEY, DEFAULT_THREADS); int queues = url.getParameter(QUEUES_KEY, DEFAULT_QUEUES); BlockingQueue<Runnable> blockingQueue; if (queues == 0) { blockingQueue = new SynchronousQueue<>(); } else if (queues < 0) { blockingQueue = new MemorySafeLinkedBlockingQueue<>(); } else { blockingQueue = new LinkedBlockingQueue<>(queues); } return new ThreadPoolExecutor( threads, threads, 0, TimeUnit.MILLISECONDS, blockingQueue, new NamedInternalThreadFactory(name, true), new AbortPolicyWithReport(name, url)); }
@Test void getExecutor2() throws Exception { URL url = URL.valueOf("dubbo://10.20.130.230:20880/context/path?" + QUEUES_KEY + "=1"); ThreadPool threadPool = new FixedThreadPool(); ThreadPoolExecutor executor = (ThreadPoolExecutor) threadPool.getExecutor(url); assertThat(executor.getQueue(), Matchers.<BlockingQueue<Runnable>>instanceOf(LinkedBlockingQueue.class)); }
public static void checkInstanceIsEphemeral(Instance instance) throws NacosException { if (!instance.isEphemeral()) { throw new NacosApiException(NacosException.INVALID_PARAM, ErrorCode.INSTANCE_ERROR, String.format("Batch registration does not allow persistent instance registration , Instance:%s", instance)); } }
@Test void testCheckInstanceIsEphemeral() throws NacosException { Instance instance = new Instance(); instance.setIp("127.0.0.1"); instance.setPort(9089); instance.setEphemeral(true); NamingUtils.checkInstanceIsEphemeral(instance); try { instance = new Instance(); instance.setIp("127.0.0.1"); instance.setPort(9089); instance.setEphemeral(false); NamingUtils.checkInstanceIsEphemeral(instance); } catch (NacosException e) { assertEquals(NacosException.INVALID_PARAM, e.getErrCode()); } }
@Override public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { super.onDataReceived(device, data); if (data.size() < 2) { onInvalidDataReceived(device, data); return; } // Read the Op Code final int opCode = data.getIntValue(Data.FORMAT_UINT8, 0); // Estimate the expected operand size based on the Op Code int expectedOperandSize; switch (opCode) { case OP_CODE_COMMUNICATION_INTERVAL_RESPONSE -> // UINT8 expectedOperandSize = 1; case OP_CODE_CALIBRATION_VALUE_RESPONSE -> // Calibration Value expectedOperandSize = 10; case OP_CODE_PATIENT_HIGH_ALERT_LEVEL_RESPONSE, OP_CODE_PATIENT_LOW_ALERT_LEVEL_RESPONSE, OP_CODE_HYPO_ALERT_LEVEL_RESPONSE, OP_CODE_HYPER_ALERT_LEVEL_RESPONSE, OP_CODE_RATE_OF_DECREASE_ALERT_LEVEL_RESPONSE, OP_CODE_RATE_OF_INCREASE_ALERT_LEVEL_RESPONSE -> // SFLOAT expectedOperandSize = 2; case OP_CODE_RESPONSE_CODE -> // Request Op Code (UINT8), Response Code Value (UINT8) expectedOperandSize = 2; default -> { onInvalidDataReceived(device, data); return; } } // Verify packet length if (data.size() != 1 + expectedOperandSize && data.size() != 1 + expectedOperandSize + 2) { onInvalidDataReceived(device, data); return; } // Verify CRC if present final boolean crcPresent = data.size() == 1 + expectedOperandSize + 2; // opCode + expected operand + CRC if (crcPresent) { final int expectedCrc = data.getIntValue(Data.FORMAT_UINT16_LE, 1 + expectedOperandSize); final int actualCrc = CRC16.MCRF4XX(data.getValue(), 0, 1 + expectedOperandSize); if (expectedCrc != actualCrc) { onCGMSpecificOpsResponseReceivedWithCrcError(device, data); return; } } switch (opCode) { case OP_CODE_COMMUNICATION_INTERVAL_RESPONSE -> { final int interval = data.getIntValue(Data.FORMAT_UINT8, 1); onContinuousGlucoseCommunicationIntervalReceived(device, interval, crcPresent); return; } case OP_CODE_CALIBRATION_VALUE_RESPONSE -> { final float glucoseConcentrationOfCalibration = data.getFloatValue(Data.FORMAT_SFLOAT, 1); final int calibrationTime = data.getIntValue(Data.FORMAT_UINT16_LE, 3); final int calibrationTypeAndSampleLocation = data.getIntValue(Data.FORMAT_UINT8, 5); @SuppressLint("WrongConstant") final int calibrationType = calibrationTypeAndSampleLocation & 0x0F; final int calibrationSampleLocation = calibrationTypeAndSampleLocation >> 4; final int nextCalibrationTime = data.getIntValue(Data.FORMAT_UINT16_LE, 6); final int calibrationDataRecordNumber = data.getIntValue(Data.FORMAT_UINT16_LE, 8); final int calibrationStatus = data.getIntValue(Data.FORMAT_UINT8, 10); onContinuousGlucoseCalibrationValueReceived(device, glucoseConcentrationOfCalibration, calibrationTime, nextCalibrationTime, calibrationType, calibrationSampleLocation, calibrationDataRecordNumber, new CGMCalibrationStatus(calibrationStatus), crcPresent); return; } case OP_CODE_RESPONSE_CODE -> { final int requestCode = data.getIntValue(Data.FORMAT_UINT8, 1); // ignore final int responseCode = data.getIntValue(Data.FORMAT_UINT8, 2); if (responseCode == CGM_RESPONSE_SUCCESS) { onCGMSpecificOpsOperationCompleted(device, requestCode, crcPresent); } else { onCGMSpecificOpsOperationError(device, requestCode, responseCode, crcPresent); } return; } } // Read SFLOAT value final float value = data.getFloatValue(Data.FORMAT_SFLOAT, 1); switch (opCode) { case OP_CODE_PATIENT_HIGH_ALERT_LEVEL_RESPONSE -> onContinuousGlucosePatientHighAlertReceived(device, value, crcPresent); case OP_CODE_PATIENT_LOW_ALERT_LEVEL_RESPONSE -> onContinuousGlucosePatientLowAlertReceived(device, value, crcPresent); case OP_CODE_HYPO_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseHypoAlertReceived(device, value, crcPresent); case OP_CODE_HYPER_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseHyperAlertReceived(device, value, crcPresent); case OP_CODE_RATE_OF_DECREASE_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseRateOfDecreaseAlertReceived(device, value, crcPresent); case OP_CODE_RATE_OF_INCREASE_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseRateOfIncreaseAlertReceived(device, value, crcPresent); } }
@Test public void onCGMSpecificOpsOperationError() { final Data data = new Data(new byte[] { 28, 2, 2}); callback.onDataReceived(null, data); assertEquals(error, 2); assertFalse(secured); assertEquals(2, requestCode); }
@Override public <T> int score(List<T> left, List<T> right) { if (left.isEmpty() && right.isEmpty()) { return 0; } int distance = levenshteinDistance(left, right); return (int) (100 * (1.0 - ((double) distance) / (max(left.size(), right.size())))); }
@Test public void zero_if_fully_different() { List<String> left = asList("a", "b", "c"); List<String> right = asList("d", "e"); assertThat(underTest.score(left, right)).isZero(); }