focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public AppNamespace findByAppIdAndName(String appId, String namespaceName) { return appNamespaceRepository.findByAppIdAndName(appId, namespaceName); }
@Test @Sql(scripts = "/sql/appnamespaceservice/init-appnamespace.sql", executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) @Sql(scripts = "/sql/cleanup.sql", executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD) public void testFindPublicAppNamespaceByAppAndName() { Assert.assertNotNull(appNamespaceService.findByAppIdAndName("100003173", "datasourcexml")); Assert.assertNull(appNamespaceService.findByAppIdAndName("100003173", "TFF.song0711-02")); }
@Override public void addOrUpdate(Iterable<HaloDocument> haloDocs) { var docs = new LinkedList<Document>(); var terms = new LinkedList<BytesRef>(); haloDocs.forEach(haloDoc -> { var doc = this.haloDocumentConverter.convert(haloDoc); terms.add(new BytesRef(haloDoc.getId())); docs.add(doc); }); var deleteQuery = new TermInSetQuery("id", terms); try { this.indexWriter.updateDocuments(deleteQuery, docs); this.searcherManager.maybeRefreshBlocking(); this.indexWriter.commit(); } catch (IOException e) { throw new RuntimeException(e); } }
@Test void shouldAddOrUpdateDocument() throws IOException { var haloDoc = createFakeHaloDoc(); searchEngine.addOrUpdate(List.of(haloDoc)); verify(this.indexWriter).updateDocuments(any(Query.class), assertArg(docs -> { var docList = Streams.stream(docs).toList(); assertEquals(1, docList.size()); var doc = docList.get(0); assertInstanceOf(Document.class, doc); var document = (Document) doc; assertEquals("fake-id", document.get("id")); })); verify(this.searcherManager).maybeRefreshBlocking(); verify(this.indexWriter).commit(); }
public static SchemaAndValue parseString(String value) { if (value == null) { return NULL_SCHEMA_AND_VALUE; } if (value.isEmpty()) { return new SchemaAndValue(Schema.STRING_SCHEMA, value); } ValueParser parser = new ValueParser(new Parser(value)); return parser.parse(false); }
@Test public void shouldParseDoubleAsFloat64() { Double value = Double.MAX_VALUE; SchemaAndValue schemaAndValue = Values.parseString( String.valueOf(value) ); assertEquals(Schema.FLOAT64_SCHEMA, schemaAndValue.schema()); assertInstanceOf(Double.class, schemaAndValue.value()); assertEquals(value, (Double) schemaAndValue.value(), 0); value = -Double.MAX_VALUE; schemaAndValue = Values.parseString( String.valueOf(value) ); assertEquals(Schema.FLOAT64_SCHEMA, schemaAndValue.schema()); assertInstanceOf(Double.class, schemaAndValue.value()); assertEquals(value, (Double) schemaAndValue.value(), 0); }
@Override public boolean exists(URI fileUri) throws IOException { try { if (isDirectory(fileUri)) { return true; } if (isPathTerminatedByDelimiter(fileUri)) { return false; } return existsFile(fileUri); } catch (NoSuchKeyException e) { return false; } }
@Test public void testExists() throws Exception { String[] originalFiles = new String[]{"a-ex.txt", "b-ex.txt", "c-ex.txt"}; String folder = "my-files-dir"; String childFolder = "my-files-dir-child"; for (String fileName : originalFiles) { String folderName = folder + DELIMITER + childFolder; createEmptyFile(folderName, fileName); } boolean bucketExists = _s3PinotFS.exists(URI.create(String.format(DIR_FORMAT, SCHEME, BUCKET))); boolean dirExists = _s3PinotFS.exists(URI.create(String.format(FILE_FORMAT, SCHEME, BUCKET, folder))); boolean childDirExists = _s3PinotFS.exists(URI.create(String.format(FILE_FORMAT, SCHEME, BUCKET, folder + DELIMITER + childFolder))); boolean fileExists = _s3PinotFS.exists(URI.create( String.format(FILE_FORMAT, SCHEME, BUCKET, folder + DELIMITER + childFolder + DELIMITER + "a-ex.txt"))); boolean fileNotExists = _s3PinotFS.exists(URI.create( String.format(FILE_FORMAT, SCHEME, BUCKET, folder + DELIMITER + childFolder + DELIMITER + "d-ex.txt"))); Assert.assertTrue(bucketExists); Assert.assertTrue(dirExists); Assert.assertTrue(childDirExists); Assert.assertTrue(fileExists); Assert.assertFalse(fileNotExists); }
public static HoodieWriteConfig.Builder newBuilder() { return new Builder(); }
@Test public void testConsistentBucketIndexInvalidClusteringConfig() { Properties props = new Properties(); props.setProperty(KeyGeneratorOptions.RECORDKEY_FIELD_NAME.key(), "uuid"); TypedProperties consistentBucketIndexProps = HoodieIndexConfig.newBuilder().fromProperties(props).withIndexType(HoodieIndex.IndexType.BUCKET) .withBucketIndexEngineType(HoodieIndex.BucketIndexEngineType.CONSISTENT_HASHING).build().getProps(); HoodieWriteConfig.Builder writeConfigBuilder = HoodieWriteConfig.newBuilder().withPath("/tmp"); assertThrows(IllegalArgumentException.class, () -> writeConfigBuilder.withClusteringConfig(HoodieClusteringConfig.newBuilder() .fromProperties(consistentBucketIndexProps) .withClusteringPlanStrategyClass(HoodieClusteringConfig.JAVA_SIZED_BASED_CLUSTERING_PLAN_STRATEGY).build())); assertThrows(IllegalArgumentException.class, () -> writeConfigBuilder.withClusteringConfig(HoodieClusteringConfig.newBuilder() .fromProperties(consistentBucketIndexProps) .withClusteringExecutionStrategyClass(HoodieClusteringConfig.SPARK_SORT_AND_SIZE_EXECUTION_STRATEGY).build())); }
@Override public void importData(JsonReader reader) throws IOException { logger.info("Reading configuration for 1.1"); // this *HAS* to start as an object reader.beginObject(); while (reader.hasNext()) { JsonToken tok = reader.peek(); switch (tok) { case NAME: String name = reader.nextName(); // find out which member it is if (name.equals(CLIENTS)) { readClients(reader); } else if (name.equals(GRANTS)) { readGrants(reader); } else if (name.equals(WHITELISTEDSITES)) { readWhitelistedSites(reader); } else if (name.equals(BLACKLISTEDSITES)) { readBlacklistedSites(reader); } else if (name.equals(AUTHENTICATIONHOLDERS)) { readAuthenticationHolders(reader); } else if (name.equals(ACCESSTOKENS)) { readAccessTokens(reader); } else if (name.equals(REFRESHTOKENS)) { readRefreshTokens(reader); } else if (name.equals(SYSTEMSCOPES)) { readSystemScopes(reader); } else { for (MITREidDataServiceExtension extension : extensions) { if (extension.supportsVersion(THIS_VERSION)) { if (extension.supportsVersion(THIS_VERSION)) { extension.importExtensionData(name, reader); break; } } } // unknown token, skip it reader.skipValue(); } break; case END_OBJECT: // the object ended, we're done here reader.endObject(); continue; default: logger.debug("Found unexpected entry"); reader.skipValue(); continue; } } fixObjectReferences(); for (MITREidDataServiceExtension extension : extensions) { if (extension.supportsVersion(THIS_VERSION)) { extension.fixExtensionObjectReferences(maps); break; } } maps.clearAll(); }
@Test public void testImportRefreshTokens() throws IOException, ParseException { String expiration1 = "2014-09-10T22:49:44.090+00:00"; Date expirationDate1 = formatter.parse(expiration1, Locale.ENGLISH); ClientDetailsEntity mockedClient1 = mock(ClientDetailsEntity.class); when(mockedClient1.getClientId()).thenReturn("mocked_client_1"); AuthenticationHolderEntity mockedAuthHolder1 = mock(AuthenticationHolderEntity.class); when(mockedAuthHolder1.getId()).thenReturn(1L); OAuth2RefreshTokenEntity token1 = new OAuth2RefreshTokenEntity(); token1.setId(1L); token1.setClient(mockedClient1); token1.setExpiration(expirationDate1); token1.setJwt(JWTParser.parse("eyJhbGciOiJub25lIn0.eyJqdGkiOiJmOTg4OWQyOS0xMTk1LTQ4ODEtODgwZC1lZjVlYzAwY2Y4NDIifQ.")); token1.setAuthenticationHolder(mockedAuthHolder1); String expiration2 = "2015-01-07T18:31:50.079+00:00"; Date expirationDate2 = formatter.parse(expiration2, Locale.ENGLISH); ClientDetailsEntity mockedClient2 = mock(ClientDetailsEntity.class); when(mockedClient2.getClientId()).thenReturn("mocked_client_2"); AuthenticationHolderEntity mockedAuthHolder2 = mock(AuthenticationHolderEntity.class); when(mockedAuthHolder2.getId()).thenReturn(2L); OAuth2RefreshTokenEntity token2 = new OAuth2RefreshTokenEntity(); token2.setId(2L); token2.setClient(mockedClient2); token2.setExpiration(expirationDate2); token2.setJwt(JWTParser.parse("eyJhbGciOiJub25lIn0.eyJqdGkiOiJlYmEyYjc3My0xNjAzLTRmNDAtOWQ3MS1hMGIxZDg1OWE2MDAifQ.")); token2.setAuthenticationHolder(mockedAuthHolder2); String configJson = "{" + "\"" + MITREidDataService.SYSTEMSCOPES + "\": [], " + "\"" + MITREidDataService.ACCESSTOKENS + "\": [], " + "\"" + MITREidDataService.CLIENTS + "\": [], " + "\"" + MITREidDataService.GRANTS + "\": [], " + "\"" + MITREidDataService.WHITELISTEDSITES + "\": [], " + "\"" + MITREidDataService.BLACKLISTEDSITES + "\": [], " + "\"" + MITREidDataService.AUTHENTICATIONHOLDERS + "\": [], " + "\"" + MITREidDataService.REFRESHTOKENS + "\": [" + "{\"id\":1,\"clientId\":\"mocked_client_1\",\"expiration\":\"2014-09-10T22:49:44.090+00:00\"," + "\"authenticationHolderId\":1,\"value\":\"eyJhbGciOiJub25lIn0.eyJqdGkiOiJmOTg4OWQyOS0xMTk1LTQ4ODEtODgwZC1lZjVlYzAwY2Y4NDIifQ.\"}," + "{\"id\":2,\"clientId\":\"mocked_client_2\",\"expiration\":\"2015-01-07T18:31:50.079+00:00\"," + "\"authenticationHolderId\":2,\"value\":\"eyJhbGciOiJub25lIn0.eyJqdGkiOiJlYmEyYjc3My0xNjAzLTRmNDAtOWQ3MS1hMGIxZDg1OWE2MDAifQ.\"}" + " ]" + "}"; System.err.println(configJson); JsonReader reader = new JsonReader(new StringReader(configJson)); final Map<Long, OAuth2RefreshTokenEntity> fakeDb = new HashMap<>(); when(tokenRepository.saveRefreshToken(isA(OAuth2RefreshTokenEntity.class))).thenAnswer(new Answer<OAuth2RefreshTokenEntity>() { Long id = 332L; @Override public OAuth2RefreshTokenEntity answer(InvocationOnMock invocation) throws Throwable { OAuth2RefreshTokenEntity _token = (OAuth2RefreshTokenEntity) invocation.getArguments()[0]; if(_token.getId() == null) { _token.setId(id++); } fakeDb.put(_token.getId(), _token); return _token; } }); when(tokenRepository.getRefreshTokenById(anyLong())).thenAnswer(new Answer<OAuth2RefreshTokenEntity>() { @Override public OAuth2RefreshTokenEntity answer(InvocationOnMock invocation) throws Throwable { Long _id = (Long) invocation.getArguments()[0]; return fakeDb.get(_id); } }); when(clientRepository.getClientByClientId(anyString())).thenAnswer(new Answer<ClientDetailsEntity>() { @Override public ClientDetailsEntity answer(InvocationOnMock invocation) throws Throwable { String _clientId = (String) invocation.getArguments()[0]; ClientDetailsEntity _client = mock(ClientDetailsEntity.class); when(_client.getClientId()).thenReturn(_clientId); return _client; } }); when(authHolderRepository.getById(isNull(Long.class))).thenAnswer(new Answer<AuthenticationHolderEntity>() { Long id = 131L; @Override public AuthenticationHolderEntity answer(InvocationOnMock invocation) throws Throwable { AuthenticationHolderEntity _auth = mock(AuthenticationHolderEntity.class); when(_auth.getId()).thenReturn(id); id++; return _auth; } }); dataService.importData(reader); //2 times for token, 2 times to update client, 2 times to update authHolder verify(tokenRepository, times(6)).saveRefreshToken(capturedRefreshTokens.capture()); List<OAuth2RefreshTokenEntity> savedRefreshTokens = new ArrayList(fakeDb.values()); //capturedRefreshTokens.getAllValues(); Collections.sort(savedRefreshTokens, new refreshTokenIdComparator()); assertThat(savedRefreshTokens.size(), is(2)); assertThat(savedRefreshTokens.get(0).getClient().getClientId(), equalTo(token1.getClient().getClientId())); assertThat(savedRefreshTokens.get(0).getExpiration(), equalTo(token1.getExpiration())); assertThat(savedRefreshTokens.get(0).getValue(), equalTo(token1.getValue())); assertThat(savedRefreshTokens.get(1).getClient().getClientId(), equalTo(token2.getClient().getClientId())); assertThat(savedRefreshTokens.get(1).getExpiration(), equalTo(token2.getExpiration())); assertThat(savedRefreshTokens.get(1).getValue(), equalTo(token2.getValue())); }
public static <T> T getItemAtPositionOrNull(Collection<T> collection, int position) { if (position >= collection.size() || position < 0) { return null; } if (collection instanceof List) { return ((List<T>) collection).get(position); } Iterator<T> iterator = collection.iterator(); T item = null; for (int i = 0; i < position + 1; i++) { item = iterator.next(); } return item; }
@Test @SuppressWarnings("unchecked") public void testGetItemAsPositionOrNull_whenInputImplementsList_thenDoNotUserIterator() { Object obj = new Object(); List<Object> src = mock(List.class); when(src.size()).thenReturn(1); when(src.get(0)).thenReturn(obj); Object result = getItemAtPositionOrNull(src, 0); assertSame(obj, result); verify(src, never()).iterator(); }
public static ExecutableStage forGrpcPortRead( QueryablePipeline pipeline, PipelineNode.PCollectionNode inputPCollection, Set<PipelineNode.PTransformNode> initialNodes) { checkArgument( !initialNodes.isEmpty(), "%s must contain at least one %s.", GreedyStageFuser.class.getSimpleName(), PipelineNode.PTransformNode.class.getSimpleName()); // Choose the environment from an arbitrary node. The initial nodes may not be empty for this // subgraph to make any sense, there has to be at least one processor node // (otherwise the stage is gRPC Read -> gRPC Write, which doesn't do anything). Environment environment = getStageEnvironment(pipeline, initialNodes); ImmutableSet.Builder<PipelineNode.PTransformNode> fusedTransforms = ImmutableSet.builder(); fusedTransforms.addAll(initialNodes); Set<SideInputReference> sideInputs = new LinkedHashSet<>(); Set<UserStateReference> userStates = new LinkedHashSet<>(); Set<TimerReference> timers = new LinkedHashSet<>(); Set<PipelineNode.PCollectionNode> fusedCollections = new LinkedHashSet<>(); Set<PipelineNode.PCollectionNode> materializedPCollections = new LinkedHashSet<>(); Queue<PipelineNode.PCollectionNode> fusionCandidates = new ArrayDeque<>(); for (PipelineNode.PTransformNode initialConsumer : initialNodes) { fusionCandidates.addAll(pipeline.getOutputPCollections(initialConsumer)); sideInputs.addAll(pipeline.getSideInputs(initialConsumer)); userStates.addAll(pipeline.getUserStates(initialConsumer)); timers.addAll(pipeline.getTimers(initialConsumer)); } while (!fusionCandidates.isEmpty()) { PipelineNode.PCollectionNode candidate = fusionCandidates.poll(); if (fusedCollections.contains(candidate) || materializedPCollections.contains(candidate)) { // This should generally mean we get to a Flatten via multiple paths through the graph and // we've already determined what to do with the output. LOG.debug( "Skipping fusion candidate {} because it is {} in this {}", candidate, fusedCollections.contains(candidate) ? "fused" : "materialized", ExecutableStage.class.getSimpleName()); continue; } PCollectionFusibility fusibility = canFuse(pipeline, candidate, environment, fusedCollections); switch (fusibility) { case MATERIALIZE: materializedPCollections.add(candidate); break; case FUSE: // All of the consumers of the candidate PCollection can be fused into this stage. Do so. fusedCollections.add(candidate); fusedTransforms.addAll(pipeline.getPerElementConsumers(candidate)); for (PipelineNode.PTransformNode consumer : pipeline.getPerElementConsumers(candidate)) { // The outputs of every transform fused into this stage must be either materialized or // themselves fused away, so add them to the set of candidates. fusionCandidates.addAll(pipeline.getOutputPCollections(consumer)); sideInputs.addAll(pipeline.getSideInputs(consumer)); } break; default: throw new IllegalStateException( String.format( "Unknown type of %s %s", PCollectionFusibility.class.getSimpleName(), fusibility)); } } return ImmutableExecutableStage.ofFullComponents( pipeline.getComponents(), environment, inputPCollection, sideInputs, userStates, timers, fusedTransforms.build(), materializedPCollections, ExecutableStage.DEFAULT_WIRE_CODER_SETTINGS); }
@Test public void flattenWithHeterogeneousInputsAndOutputs() { // (impulse.out) -> pyRead -> pyRead.out \ -> pyParDo -> pyParDo.out // (impulse.out) -> -> flatten -> flatten.out | // (impulse.out) -> goRead -> goRead.out / -> goWindow -> goWindow.out // fuses into // (impulse.out) -> pyRead -> pyRead.out -> flatten -> (flatten.out) // (impulse.out) -> goRead -> goRead.out -> flatten -> (flatten.out) // (flatten.out) -> pyParDo -> pyParDo.out // (flatten.out) -> goWindow -> goWindow.out PTransform pyRead = PTransform.newBuilder() .putInputs("input", "impulse.out") .putOutputs("output", "pyRead.out") .setSpec( FunctionSpec.newBuilder() .setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN) .setPayload( ParDoPayload.newBuilder() .setDoFn(FunctionSpec.newBuilder()) .build() .toByteString()) .build()) .setEnvironmentId("py") .build(); PTransform goRead = PTransform.newBuilder() .putInputs("input", "impulse.out") .putOutputs("output", "goRead.out") .setSpec( FunctionSpec.newBuilder() .setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN) .setPayload( ParDoPayload.newBuilder() .setDoFn(FunctionSpec.newBuilder()) .build() .toByteString()) .build()) .setEnvironmentId("go") .build(); PTransform pyParDo = PTransform.newBuilder() .putInputs("input", "flatten.out") .putOutputs("output", "pyParDo.out") .setSpec( FunctionSpec.newBuilder() .setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN) .setPayload( ParDoPayload.newBuilder() .setDoFn(FunctionSpec.newBuilder()) .build() .toByteString()) .build()) .setEnvironmentId("py") .build(); PTransform goWindow = PTransform.newBuilder() .putInputs("input", "flatten.out") .putOutputs("output", "goWindow.out") .setSpec( FunctionSpec.newBuilder() .setUrn(PTransformTranslation.ASSIGN_WINDOWS_TRANSFORM_URN) .setPayload( WindowIntoPayload.newBuilder() .setWindowFn(FunctionSpec.newBuilder()) .build() .toByteString()) .build()) .setEnvironmentId("go") .build(); PCollection flattenPc = PCollection.newBuilder().setUniqueName("flatten.out").build(); Components components = partialComponents .toBuilder() .putTransforms("pyRead", pyRead) .putPcollections( "pyRead.out", PCollection.newBuilder().setUniqueName("pyRead.out").build()) .putTransforms("goRead", goRead) .putPcollections( "goRead.out", PCollection.newBuilder().setUniqueName("goRead.out").build()) .putTransforms( "flatten", PTransform.newBuilder() .putInputs("py_input", "pyRead.out") .putInputs("go_input", "goRead.out") .putOutputs("output", "flatten.out") .setSpec( FunctionSpec.newBuilder() .setUrn(PTransformTranslation.FLATTEN_TRANSFORM_URN) .build()) .build()) .putPcollections("flatten.out", flattenPc) .putTransforms("pyParDo", pyParDo) .putPcollections( "pyParDo.out", PCollection.newBuilder().setUniqueName("pyParDo.out").build()) .putTransforms("goWindow", goWindow) .putPcollections( "goWindow.out", PCollection.newBuilder().setUniqueName("goWindow.out").build()) .putEnvironments("go", Environments.createDockerEnvironment("go")) .putEnvironments("py", Environments.createDockerEnvironment("py")) .build(); QueryablePipeline p = QueryablePipeline.forPrimitivesIn(components); ExecutableStage readFromPy = GreedyStageFuser.forGrpcPortRead( p, impulseOutputNode, ImmutableSet.of(PipelineNode.pTransform("pyRead", pyRead))); ExecutableStage readFromGo = GreedyStageFuser.forGrpcPortRead( p, impulseOutputNode, ImmutableSet.of(PipelineNode.pTransform("goRead", goRead))); assertThat( readFromPy.getOutputPCollections(), contains(PipelineNode.pCollection("flatten.out", flattenPc))); // The stage must materialize the flatten, so the `go` stage can read it; this means that this // parDo can't be in the stage, as it'll be a reader of that materialized PCollection. The same // is true for the go window. assertThat( readFromPy.getTransforms(), not(hasItem(PipelineNode.pTransform("pyParDo", pyParDo)))); assertThat( readFromGo.getOutputPCollections(), contains(PipelineNode.pCollection("flatten.out", flattenPc))); assertThat( readFromGo.getTransforms(), not(hasItem(PipelineNode.pTransform("goWindow", goWindow)))); }
public ProviderBuilder isDefault(Boolean isDefault) { this.isDefault = isDefault; return getThis(); }
@Test void isDefault() { ProviderBuilder builder = ProviderBuilder.newBuilder(); builder.isDefault(true); Assertions.assertTrue(builder.build().isDefault()); }
@Override public void setUserGroupIfNeeded(AlluxioURI uri) { try { mFileSystem.setAttribute(uri, mSetAttributeOptions); } catch (IOException | AlluxioException e) { throw AlluxioRuntimeException.from(e); } }
@Test public void setUserGroupIfNeed() throws Exception { AlluxioURI uri = new AlluxioURI("/SetUserGroupIfNeed"); mAuthPolicy.setUserGroupIfNeeded(uri); URIStatus status = mFileSystem.getStatus(uri); Assert.assertEquals(USER, status.getOwner()); Assert.assertEquals(GROUP, status.getGroup()); }
public static String getGcloudCancelCommand(DataflowPipelineOptions options, String jobId) { // If using a different Dataflow API than default, prefix command with an API override. String dataflowApiOverridePrefix = ""; String apiUrl = options.getDataflowClient().getBaseUrl(); if (!apiUrl.equals(Dataflow.DEFAULT_BASE_URL)) { dataflowApiOverridePrefix = String.format("%s=%s ", ENDPOINT_OVERRIDE_ENV_VAR, apiUrl); } // Assemble cancel command from optional prefix and project/job parameters. return String.format( "%s%s jobs --project=%s cancel --region=%s %s", dataflowApiOverridePrefix, GCLOUD_DATAFLOW_PREFIX, options.getProject(), options.getRegion(), jobId); }
@Test public void testDontOverrideEndpointWithDefaultApi() { DataflowPipelineOptions options = PipelineOptionsFactory.create().as(DataflowPipelineOptions.class); options.setProject(PROJECT_ID); options.setRegion(REGION_ID); options.setGcpCredential(new TestCredential()); String cancelCommand = MonitoringUtil.getGcloudCancelCommand(options, JOB_ID); assertEquals( "gcloud dataflow jobs --project=someProject cancel --region=thatRegion 1234", cancelCommand); }
public Collection<ChannelHandler> getChannelHandlers() { return channelHandlers; }
@Test void constructorNullObjectTest() { ChannelHandlerDispatcher channelHandlerDispatcher = new ChannelHandlerDispatcher(null, null); Assertions.assertEquals(0, channelHandlerDispatcher.getChannelHandlers().size()); ChannelHandlerDispatcher channelHandlerDispatcher1 = new ChannelHandlerDispatcher((MockChannelHandler) null); Assertions.assertEquals( 0, channelHandlerDispatcher1.getChannelHandlers().size()); ChannelHandlerDispatcher channelHandlerDispatcher2 = new ChannelHandlerDispatcher(null, new MockChannelHandler()); Assertions.assertEquals( 1, channelHandlerDispatcher2.getChannelHandlers().size()); ChannelHandlerDispatcher channelHandlerDispatcher3 = new ChannelHandlerDispatcher(Collections.singleton(new MockChannelHandler())); Assertions.assertEquals( 1, channelHandlerDispatcher3.getChannelHandlers().size()); Collection<ChannelHandler> mockChannelHandlers = new HashSet<>(); mockChannelHandlers.add(new MockChannelHandler()); mockChannelHandlers.add(null); ChannelHandlerDispatcher channelHandlerDispatcher4 = new ChannelHandlerDispatcher(mockChannelHandlers); Assertions.assertEquals( 1, channelHandlerDispatcher4.getChannelHandlers().size()); }
boolean canAcquireRecords() { if (nextFetchOffset() != endOffset() + 1) { return true; } lock.readLock().lock(); long numRecords; try { if (cachedState.isEmpty()) { numRecords = 0; } else { numRecords = this.endOffset - this.startOffset + 1; } } finally { lock.readLock().unlock(); } return numRecords < maxInFlightMessages; }
@Test public void testCanAcquireRecordsWithEmptyCache() { SharePartition sharePartition = SharePartitionBuilder.builder().withMaxInflightMessages(1).build(); assertTrue(sharePartition.canAcquireRecords()); }
@Override public T deserialize(final String topic, final byte[] bytes) { try { if (bytes == null) { return null; } // don't use the JsonSchemaConverter to read this data because // we require that the MAPPER enables USE_BIG_DECIMAL_FOR_FLOATS, // which is not currently available in the standard converters final JsonNode value = isJsonSchema ? JsonSerdeUtils.readJsonSR(bytes, MAPPER, JsonNode.class) : MAPPER.readTree(bytes); final Object coerced = enforceFieldType( "$", new JsonValueContext(value, schema) ); if (LOG.isTraceEnabled()) { LOG.trace("Deserialized {}. topic:{}, row:{}", target, topic, coerced); } return SerdeUtils.castToTargetType(coerced, targetType); } catch (final Exception e) { // Clear location in order to avoid logging data, for security reasons if (e instanceof JsonParseException) { ((JsonParseException) e).clearLocation(); } throw new SerializationException( "Failed to deserialize " + target + " from topic: " + topic + ". " + e.getMessage(), e); } }
@Test public void shouldCreateJsonStringForStructIfDefinedAsVarchar() { // Given: final KsqlJsonDeserializer<Struct> deserializer = givenDeserializerForSchema( SchemaBuilder.struct() .field("ITEMID", Schema.OPTIONAL_STRING_SCHEMA) .build(), Struct.class ); final byte[] bytes = ("{" + "\"itemid\": {" + " \"CATEGORY\": {" + " \"ID\":2," + " \"NAME\":\"Food\"" + " }," + " \"ITEMID\":6," + " \"NAME\":\"Item_6\"" + " }" + "}").getBytes(StandardCharsets.UTF_8); // When: final Struct result = deserializer.deserialize(SOME_TOPIC, addMagic(bytes)); // Then: assertThat(result.get(ITEMID), is("{\"CATEGORY\":{\"ID\":2,\"NAME\":\"Food\"},\"ITEMID\":6,\"NAME\":\"Item_6\"}")); }
@SuppressWarnings("checkstyle:MagicNumber") public static int nextPowerOfTwo(final int value) { return 1 << (32 - Integer.numberOfLeadingZeros(value - 1)); }
@Test public void testNextPowerOfTwo() { assertEquals(1, nextPowerOfTwo(-9999999)); assertEquals(1, nextPowerOfTwo(-1)); assertEquals(1, nextPowerOfTwo(0)); assertEquals(1, nextPowerOfTwo(1)); assertEquals(2, nextPowerOfTwo(2)); assertEquals(1024, nextPowerOfTwo(999)); assertEquals(1 << 23, nextPowerOfTwo((1 << 23) - 1)); assertEquals(1 << 23, nextPowerOfTwo(1 << 23)); }
@Override public synchronized Response handle(Request req) { // note the [synchronized] if (corsEnabled && "OPTIONS".equals(req.getMethod())) { Response response = new Response(200); response.setHeader("Allow", ALLOWED_METHODS); response.setHeader("Access-Control-Allow-Origin", "*"); response.setHeader("Access-Control-Allow-Methods", ALLOWED_METHODS); List<String> requestHeaders = req.getHeaderValues("Access-Control-Request-Headers"); if (requestHeaders != null) { response.setHeader("Access-Control-Allow-Headers", requestHeaders); } return response; } if (prefix != null && req.getPath().startsWith(prefix)) { req.setPath(req.getPath().substring(prefix.length())); } // rare case when http-client is active within same jvm // snapshot existing thread-local to restore ScenarioEngine prevEngine = ScenarioEngine.get(); for (Map.Entry<Feature, ScenarioRuntime> entry : scenarioRuntimes.entrySet()) { Feature feature = entry.getKey(); ScenarioRuntime runtime = entry.getValue(); // important for graal to work properly Thread.currentThread().setContextClassLoader(runtime.featureRuntime.suite.classLoader); LOCAL_REQUEST.set(req); req.processBody(); ScenarioEngine engine = initEngine(runtime, globals, req); for (FeatureSection fs : feature.getSections()) { if (fs.isOutline()) { runtime.logger.warn("skipping scenario outline - {}:{}", feature, fs.getScenarioOutline().getLine()); break; } Scenario scenario = fs.getScenario(); if (isMatchingScenario(scenario, engine)) { Map<String, Object> configureHeaders; Variable response, responseStatus, responseHeaders, responseDelay; ScenarioActions actions = new ScenarioActions(engine); Result result = executeScenarioSteps(feature, runtime, scenario, actions); engine.mockAfterScenario(); configureHeaders = engine.mockConfigureHeaders(); response = engine.vars.remove(ScenarioEngine.RESPONSE); responseStatus = engine.vars.remove(ScenarioEngine.RESPONSE_STATUS); responseHeaders = engine.vars.remove(ScenarioEngine.RESPONSE_HEADERS); responseDelay = engine.vars.remove(RESPONSE_DELAY); globals.putAll(engine.shallowCloneVariables()); Response res = new Response(200); if (result.isFailed()) { response = new Variable(result.getError().getMessage()); responseStatus = new Variable(500); } else { if (corsEnabled) { res.setHeader("Access-Control-Allow-Origin", "*"); } res.setHeaders(configureHeaders); if (responseHeaders != null && responseHeaders.isMap()) { res.setHeaders(responseHeaders.getValue()); } if (responseDelay != null) { res.setDelay(responseDelay.getAsInt()); } } if (response != null && !response.isNull()) { res.setBody(response.getAsByteArray()); if (res.getContentType() == null) { ResourceType rt = ResourceType.fromObject(response.getValue()); if (rt != null) { res.setContentType(rt.contentType); } } } if (responseStatus != null) { res.setStatus(responseStatus.getAsInt()); } if (prevEngine != null) { ScenarioEngine.set(prevEngine); } if (mockInterceptor != null) { mockInterceptor.intercept(req, res, scenario); } return res; } } } logger.warn("no scenarios matched, returning 404: {}", req); // NOTE: not logging with engine.logger if (prevEngine != null) { ScenarioEngine.set(prevEngine); } return new Response(404); }
@Test void testQueryParams() { background().scenario( "pathMatches('/hello')", "def response = 'hello ' + paramValue('foo')" ); request.path("/hello").param("foo", "world"); handle(); match(response.getBodyAsString(), "hello world"); }
public String selectIdpForm(List<IdpEntry> identityProviders, Locale locale) { identityProviders = identityProviders.stream().sorted(Comparator.comparing(IdpEntry::name)).toList(); return renderer.render( "select-idp.html.mustache", Map.of("identityProviders", identityProviders), locale); }
@Test void selectIdpForm_withFixture() { var sut = new Pages(renderer); var rendered = sut.selectIdpForm( List.of( new IdpEntry("https://a.example.com", "AoK Tesfalen", null), new IdpEntry("https://b.example.com", "Siemens", null), new IdpEntry("https://c.example.com", "Zuse", null), new IdpEntry("https://d.example.com", "Barmer", null)), Locale.US); assertEquals(Fixtures.getUtf8String("pages_golden_idp-select-form.bin"), rendered); }
public void writeUnsignedIntAsDec(int value) throws IOException { if (value < 0) { writeSignedLongAsDec(value & 0xFFFFFFFFL); } else { writeSignedIntAsDec(value); } }
@Test public void testWriteUnsignedIntAsDec() throws IOException { Assert.assertEquals("4294967295", performWriteUnsignedIntAsDec(-1)); Assert.assertEquals("2147483647", performWriteUnsignedIntAsDec(Integer.MAX_VALUE)); Assert.assertEquals("2147483648", performWriteUnsignedIntAsDec(Integer.MIN_VALUE)); Assert.assertEquals("0", performWriteUnsignedIntAsDec(0)); Assert.assertEquals("1", performWriteUnsignedIntAsDec(1)); }
public synchronized TopologyDescription describe() { return internalTopologyBuilder.describe(); }
@Test public void streamStreamOuterJoinTopologyWithCustomStoresNames() { final StreamsBuilder builder = new StreamsBuilder(); final KStream<Integer, String> stream1; final KStream<Integer, String> stream2; stream1 = builder.stream("input-topic1"); stream2 = builder.stream("input-topic2"); stream1.outerJoin( stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(100)), StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String()) .withStoreName("custom-name")); final TopologyDescription describe = builder.build().describe(); assertEquals( "Topologies:\n" + " Sub-topology: 0\n" + " Source: KSTREAM-SOURCE-0000000000 (topics: [input-topic1])\n" + " --> KSTREAM-WINDOWED-0000000002\n" + " Source: KSTREAM-SOURCE-0000000001 (topics: [input-topic2])\n" + " --> KSTREAM-WINDOWED-0000000003\n" + " Processor: KSTREAM-WINDOWED-0000000002 (stores: [custom-name-outer-this-join-store])\n" + " --> KSTREAM-OUTERTHIS-0000000004\n" + " <-- KSTREAM-SOURCE-0000000000\n" + " Processor: KSTREAM-WINDOWED-0000000003 (stores: [custom-name-outer-other-join-store])\n" + " --> KSTREAM-OUTEROTHER-0000000005\n" + " <-- KSTREAM-SOURCE-0000000001\n" + " Processor: KSTREAM-OUTEROTHER-0000000005 (stores: [custom-name-outer-this-join-store, custom-name-outer-shared-join-store])\n" + " --> KSTREAM-MERGE-0000000006\n" + " <-- KSTREAM-WINDOWED-0000000003\n" + " Processor: KSTREAM-OUTERTHIS-0000000004 (stores: [custom-name-outer-other-join-store, custom-name-outer-shared-join-store])\n" + " --> KSTREAM-MERGE-0000000006\n" + " <-- KSTREAM-WINDOWED-0000000002\n" + " Processor: KSTREAM-MERGE-0000000006 (stores: [])\n" + " --> none\n" + " <-- KSTREAM-OUTERTHIS-0000000004, KSTREAM-OUTEROTHER-0000000005\n\n", describe.toString()); }
public static final boolean delimiterNext(byte[] bytes, int startPos, byte[] delim) { for (int pos = 0; pos < delim.length; pos++) { // check each position if (delim[pos] != bytes[startPos + pos]) { return false; } } return true; }
@Test void testDelimiterNext() { byte[] bytes = "aaabc".getBytes(); byte[] delim = "aa".getBytes(); assertThat(FieldParser.delimiterNext(bytes, 0, delim)).isTrue(); assertThat(FieldParser.delimiterNext(bytes, 1, delim)).isTrue(); assertThat(FieldParser.delimiterNext(bytes, 2, delim)).isFalse(); }
@Override public boolean isIndexed(QueryContext queryContext) { Index index = queryContext.matchIndex(attributeName, QueryContext.IndexMatchHint.PREFER_ORDERED); return index != null && index.isOrdered() && expressionCanBeUsedAsIndexPrefix(); }
@Test public void likePredicateIsNotIndexed_whenPercentWildcardIsUsedAtTheBeginning() { QueryContext queryContext = mock(QueryContext.class); when(queryContext.matchIndex("this", QueryContext.IndexMatchHint.PREFER_ORDERED)).thenReturn(createIndex(IndexType.SORTED)); assertFalse(new LikePredicate("this", "%string").isIndexed(queryContext)); }
public Repository getRepo(String serverUrl, String token, String project, String repoSlug) { HttpUrl url = buildUrl(serverUrl, format("/rest/api/1.0/projects/%s/repos/%s", project, repoSlug)); return doGet(token, url, body -> buildGson().fromJson(body, Repository.class)); }
@Test @UseDataProvider("expectedErrorMessageFromHttpNoJsonBody") public void fail_response_when_http_no_json_body(int responseCode, String body, String headerContent, String expectedErrorMessage) { server.enqueue(new MockResponse() .setHeader("Content-Type", headerContent) .setResponseCode(responseCode) .setBody(body)); String serverUrl = server.url("/").toString(); assertThatThrownBy(() -> underTest.getRepo(serverUrl, "token", "", "")) .isInstanceOf(IllegalArgumentException.class) .hasMessage(expectedErrorMessage); }
public ListenableFuture<BufferResult> getPages(long sequenceId, DataSize maxSize) { return getPages(sequenceId, maxSize, Optional.empty()); }
@Test public void testInvalidTokenFails() { ClientBuffer buffer = new ClientBuffer(TASK_INSTANCE_ID, BUFFER_ID, NOOP_RELEASE_LISTENER); addPage(buffer, createPage(0)); addPage(buffer, createPage(1)); buffer.getPages(1, sizeOfPages(10)).cancel(true); assertBufferInfo(buffer, 1, 1); // request negative token assertInvalidSequenceId(buffer, -1); assertBufferInfo(buffer, 1, 1); // request token off end of buffer assertInvalidSequenceId(buffer, 10); assertBufferInfo(buffer, 1, 1); }
RpcReply(int xid, ReplyState state, Verifier verifier) { super(xid, RpcMessage.Type.RPC_REPLY); this.replyState = state; this.verifier = verifier; }
@Test public void testRpcReply() { RpcReply reply = new RpcReply(0, ReplyState.MSG_ACCEPTED, new VerifierNone()) { @Override public XDR write(XDR xdr) { return null; } }; Assert.assertEquals(0, reply.getXid()); Assert.assertEquals(RpcMessage.Type.RPC_REPLY, reply.getMessageType()); Assert.assertEquals(ReplyState.MSG_ACCEPTED, reply.getState()); }
public ParsedQuery parse(final String query) throws ParseException { final TokenCollectingQueryParser parser = new TokenCollectingQueryParser(ParsedTerm.DEFAULT_FIELD, ANALYZER); parser.setSplitOnWhitespace(true); parser.setAllowLeadingWildcard(allowLeadingWildcard); final Query parsed = parser.parse(query); final ParsedQuery.Builder builder = ParsedQuery.builder().query(query); builder.tokensBuilder().addAll(parser.getTokens()); final TermCollectingQueryVisitor visitor = new TermCollectingQueryVisitor(ANALYZER, parser.getTokenLookup()); parsed.visit(visitor); builder.termsBuilder().addAll(visitor.getParsedTerms()); return builder.build(); }
@Test void testGtQuery() throws ParseException { final ParsedQuery query = parser.parse("http_response_code:>400"); assertThat(query.terms()).extracting(ParsedTerm::value).contains(">400"); }
@Override public BitcoinSerializer withProtocolVersion(int protocolVersion) { return protocolVersion == this.protocolVersion ? this : new BitcoinSerializer(network, protocolVersion); }
@Test public void testEquals() { assertTrue(MAINNET.getDefaultSerializer().equals(MAINNET.getDefaultSerializer())); assertFalse(MAINNET.getDefaultSerializer().equals(TestNet3Params.get().getDefaultSerializer())); assertFalse(MAINNET.getDefaultSerializer().equals(MAINNET.getDefaultSerializer().withProtocolVersion(0))); }
public static <K, E> Collector<E, ImmutableListMultimap.Builder<K, E>, ImmutableListMultimap<K, E>> index(Function<? super E, K> keyFunction) { return index(keyFunction, Function.identity()); }
@Test public void index_with_valueFunction_fails_if_value_function_is_null() { assertThatThrownBy(() -> index(MyObj::getId, null)) .isInstanceOf(NullPointerException.class) .hasMessage("Value function can't be null"); }
@Override public void start(final PluginConfiguration pluginConfig, final boolean isEnhancedForProxy) { PluginContext.getInstance().setEnhancedForProxy(isEnhancedForProxy); PluginConfigurationValidator.validatePort(getType(), pluginConfig); startServer(pluginConfig, isEnhancedForProxy); }
@Test void assertStart() throws IOException { ContextManager contextManager = mockContextManager(); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); pluginLifecycleService.start(new PluginConfiguration("localhost", 8090, "", PropertiesBuilder.build(new Property("JVM_INFORMATION_COLLECTOR_ENABLED", Boolean.TRUE.toString()))), true); try (Socket socket = new Socket()) { assertDoesNotThrow(() -> socket.connect(new InetSocketAddress("localhost", 8090))); } }
@Override public Integer doCall() throws Exception { String jv = VersionHelper.getJBangVersion(); if (jv != null) { printer().println("JBang version: " + jv); } CamelCatalog catalog = new DefaultCamelCatalog(); String v = catalog.getCatalogVersion(); printer().println("Camel JBang version: " + v); CommandLineHelper.loadProperties(properties -> { String uv = properties.getProperty("camel-version"); String kv = properties.getProperty("kamelets-version"); String repos = properties.getProperty("repos"); String runtime = properties.getProperty("runtime"); if (uv != null || repos != null || runtime != null) { printer().println("User configuration:"); if (uv != null) { printer().println(" camel-version = " + uv); } if (kv != null) { printer().println(" kamelets-version = " + kv); } if (runtime != null) { printer().println(" runtime = " + runtime); } if (repos != null) { printer().println(" repos = " + repos); } } }); return 0; }
@Test public void shouldPrintVersions() throws Exception { UserConfigHelper.createUserConfig(""); createJBangVersionFile("0.100"); VersionGet command = createCommand(); command.doCall(); List<String> lines = printer.getLines(); Assertions.assertTrue(lines.get(0).startsWith("JBang version:")); Assertions.assertTrue(lines.get(1).startsWith("Camel JBang version:")); }
@Override public void onStateElection(Job job, JobState newState) { if (isNotFailed(newState) || isJobNotFoundException(newState) || isProblematicExceptionAndMustNotRetry(newState) || maxAmountOfRetriesReached(job)) return; job.scheduleAt(now().plusSeconds(getSecondsToAdd(job)), String.format("Retry %d of %d", getFailureCount(job), getMaxNumberOfRetries(job))); }
@Test void retryFilterUsesValueOfRetriesOnJobAnnotationIfProvided() { retryFilter = new RetryFilter(0); // GIVEN FIRST FAILURE, NOT YET RETRIED Job job = aJob() .<TestService>withJobDetails(ts -> ts.doWorkThatFails()) .withState(new FailedState("a message", new RuntimeException("boom"))) .build(); applyDefaultJobFilter(job); int beforeVersion = job.getJobStates().size(); // WHEN retryFilter.onStateElection(job, job.getJobState()); // THEN int afterVersion = job.getJobStates().size(); assertThat(afterVersion).isEqualTo(beforeVersion + 1); assertThat(job.getState()).isEqualTo(SCHEDULED); // GIVEN SECOND FAILURE, ALREADY RETRIED job = aCopyOf(job) .withState(new FailedState("a message", new RuntimeException("boom"))) .build(); beforeVersion = job.getJobStates().size(); // WHEN retryFilter.onStateElection(job, job.getJobState()); // THEN afterVersion = job.getJobStates().size(); assertThat(afterVersion).isEqualTo(beforeVersion); assertThat(job.getState()).isEqualTo(FAILED); }
public static <K, InputT, AccumT> ParDoFn create( PipelineOptions options, KvCoder<K, ?> inputElementCoder, @Nullable CloudObject cloudUserFn, @Nullable List<SideInputInfo> sideInputInfos, List<Receiver> receivers, DataflowExecutionContext<?> executionContext, DataflowOperationContext operationContext) throws Exception { AppliedCombineFn<K, InputT, AccumT, ?> combineFn; SideInputReader sideInputReader; StepContext stepContext; if (cloudUserFn == null) { combineFn = null; sideInputReader = NullSideInputReader.empty(); stepContext = null; } else { Object deserializedFn = SerializableUtils.deserializeFromByteArray( getBytes(cloudUserFn, PropertyNames.SERIALIZED_FN), "serialized combine fn"); @SuppressWarnings("unchecked") AppliedCombineFn<K, InputT, AccumT, ?> combineFnUnchecked = ((AppliedCombineFn<K, InputT, AccumT, ?>) deserializedFn); combineFn = combineFnUnchecked; sideInputReader = executionContext.getSideInputReader( sideInputInfos, combineFn.getSideInputViews(), operationContext); stepContext = executionContext.getStepContext(operationContext); } return create( options, inputElementCoder, combineFn, sideInputReader, receivers.get(0), stepContext); }
@Test public void testCreateWithCombinerAndBatchSideInputs() throws Exception { PipelineOptions options = PipelineOptionsFactory.create(); Coder keyCoder = StringUtf8Coder.of(); Coder valueCoder = BigEndianIntegerCoder.of(); KvCoder<String, Integer> kvCoder = KvCoder.of(keyCoder, valueCoder); TestOutputReceiver receiver = new TestOutputReceiver( new ElementByteSizeObservableCoder(WindowedValue.getValueOnlyCoder(kvCoder)), counterSet, NameContextsForTests.nameContextForTest()); StepContext stepContext = BatchModeExecutionContext.forTesting(options, "testStage") .getStepContext(TestOperationContext.create(counterSet)); when(mockSideInputReader.isEmpty()).thenReturn(false); ParDoFn pgbk = PartialGroupByKeyParDoFns.create( options, kvCoder, AppliedCombineFn.withInputCoder( Sum.ofIntegers(), CoderRegistry.createDefault(), kvCoder, ImmutableList.<PCollectionView<?>>of(), WindowingStrategy.globalDefault()), mockSideInputReader, receiver, stepContext); assertTrue(pgbk instanceof BatchSideInputPGBKParDoFn); }
@Override public int getNumPartitions() { return 1; }
@Test public void testGetNumPartitions() { HoodieData<Integer> listData = HoodieListData.eager( IntStream.rangeClosed(0, 100).boxed().collect(Collectors.toList())); assertEquals(1, listData.getNumPartitions()); }
public boolean checkURL(String url) { return url.contains("?"); }
@Test public void testcheckURL() throws Exception { assertFalse(tclp.checkURL(URL1), "URL does not have a query"); assertTrue(tclp.checkURL(URL2), "URL is a query"); }
public static Number getExactlyNumber(final String value, final int radix) { try { return getBigInteger(value, radix); } catch (final NumberFormatException ex) { return new BigDecimal(value); } }
@Test void assertGetExactlyNumberForBigInteger() { assertThat(SQLUtils.getExactlyNumber("10000000000000000000", 10), is(new BigInteger("10000000000000000000"))); assertThat(SQLUtils.getExactlyNumber("10000000000000000000", 16), is(new BigInteger("75557863725914323419136"))); assertThat(SQLUtils.getExactlyNumber(String.valueOf(Long.MIN_VALUE + 1L), 10), is(Long.MIN_VALUE + 1L)); assertThat(SQLUtils.getExactlyNumber(String.valueOf(Long.MAX_VALUE - 1L), 10), is(Long.MAX_VALUE - 1L)); }
public static ExternalSorter create(Options options) { return options.getSorterType() == Options.SorterType.HADOOP ? HadoopExternalSorter.create(options) : NativeExternalSorter.create(options); }
@Test public void testSingleElement() throws Exception { SorterTestUtils.testSingleElement( ExternalSorter.create( new ExternalSorter.Options() .setTempLocation(getTmpLocation().toString()) .setSorterType(sorterType))); }
@Override public WsResponse call(WsRequest wsRequest) { DefaultLocalRequest localRequest = new DefaultLocalRequest(wsRequest); LocalConnector.LocalResponse localResponse = localConnector.call(localRequest); return new ByteArrayResponse(wsRequest.getPath(), localResponse); }
@Test public void call_request_with_defaults() throws Exception { // no parameters, no media type WsRequest wsRequest = new GetRequest("api/issues/search"); answer(new DumbLocalResponse(200, MediaTypes.JSON, "".getBytes(UTF_8), Collections.<String>emptyList())); WsResponse wsResponse = underTest.call(wsRequest); verifyRequested("GET", "api/issues/search", MediaTypes.JSON, Collections.<String, String>emptyMap()); assertThat(wsResponse.code()).isEqualTo(200); assertThat(wsResponse.content()).isEmpty(); assertThat(IOUtils.toString(wsResponse.contentReader())).isEmpty(); assertThat(IOUtils.toString(wsResponse.contentStream())).isEmpty(); assertThat(wsResponse.contentType()).isEqualTo(MediaTypes.JSON); }
static BlockStmt getFieldColumnPairVariableDeclaration(final String variableName, final FieldColumnPair fieldColumnPair) { final MethodDeclaration methodDeclaration = FIELDCOLUMNPAIR_TEMPLATE.getMethodsByName(GETKIEPMMLFIELDCOLUMNPAIR).get(0).clone(); final BlockStmt toReturn = methodDeclaration.getBody().orElseThrow(() -> new KiePMMLException(String.format(MISSING_BODY_TEMPLATE, methodDeclaration))); final VariableDeclarator variableDeclarator = getVariableDeclarator(toReturn, FIELDCOLUMNPAIR) .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, FIELDCOLUMNPAIR, toReturn))); variableDeclarator.setName(variableName); final ObjectCreationExpr objectCreationExpr = variableDeclarator.getInitializer() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, FIELDCOLUMNPAIR, toReturn))) .asObjectCreationExpr(); objectCreationExpr.getArguments().set(0, new StringLiteralExpr(fieldColumnPair.getField())); objectCreationExpr.getArguments().set(2, new StringLiteralExpr(fieldColumnPair.getColumn())); return toReturn; }
@Test void getRowVariableDeclaration() throws IOException { String variableName = "variableName"; String fieldName = "fieldName"; String column = "column"; FieldColumnPair fieldColumnPair = new FieldColumnPair(); fieldColumnPair.setField(fieldName); fieldColumnPair.setColumn(column); BlockStmt retrieved = KiePMMLFieldColumnPairFactory.getFieldColumnPairVariableDeclaration(variableName, fieldColumnPair); String text = getFileContent(TEST_01_SOURCE); Statement expected = JavaParserUtils.parseBlock(String.format(text, variableName, fieldName, column)); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); List<Class<?>> imports = Arrays.asList(Collections.class, KiePMMLFieldColumnPair.class); commonValidateCompilationWithImports(retrieved, imports); }
public void close() { close(Long.MAX_VALUE, false); }
@Test public void shouldNotBlockInCloseWithCloseOptionLeaveGroupTrueForZeroDuration() throws Exception { prepareStreams(); prepareStreamThread(streamThreadOne, 1); prepareStreamThread(streamThreadTwo, 2); prepareTerminableThread(streamThreadOne); final MockClientSupplier mockClientSupplier = spy(MockClientSupplier.class); when(mockClientSupplier.getAdmin(any())).thenReturn(adminClient); final KafkaStreams.CloseOptions closeOptions = new KafkaStreams.CloseOptions(); closeOptions.timeout(Duration.ZERO); closeOptions.leaveGroup(true); try (final KafkaStreams streams = new KafkaStreams(getBuilderWithSource().build(), props, mockClientSupplier)) { assertFalse(streams.close(closeOptions)); } }
public ProtocolBuilder charset(String charset) { this.charset = charset; return getThis(); }
@Test void charset() { ProtocolBuilder builder = new ProtocolBuilder(); builder.charset("utf-8"); Assertions.assertEquals("utf-8", builder.build().getCharset()); }
public ColumnConstraints getConstraints() { return constraints; }
@Test public void shouldReturnPrimaryKey() { // Given: final TableElement valueElement = new TableElement(NAME, new Type(SqlTypes.STRING), PRIMARY_KEY_CONSTRAINT); // Then: assertThat(valueElement.getConstraints(), is(PRIMARY_KEY_CONSTRAINT)); }
public static boolean isExpected(final Class<?> exceptionClass) { return EXCEPTIONS.stream().anyMatch(each -> each.isAssignableFrom(exceptionClass)); }
@Test void assertIsExpected() { assertTrue(ExpectedExceptions.isExpected(ShardingSphereServerException.class)); assertTrue(ExpectedExceptions.isExpected(SQLDialectException.class)); assertTrue(ExpectedExceptions.isExpected(ShardingSphereSQLException.class)); }
public static Map<String, Object> merge(Map<String, Object> orig, Map<String, Object> update) { final Map<String, Object> merged = new HashMap<>(orig); update.forEach((k, v) -> { if (orig.get(k) instanceof EncryptedValue origValue && v instanceof EncryptedValue newValue) { merged.put(k, mergeEncryptedValues(origValue, newValue)); } else { merged.put(k, v); } }); return merged; }
@Test void testMerge_replaceValue() { final Map<String, Object> orig = Map.of( "unencrypted", "old unencrypted", "encrypted", encryptedValueService.encrypt("old encrypted") ); final Map<String, Object> update = Map.of( "unencrypted", "new unencrypted", "encrypted", encryptedValueService.encrypt("new encrypted") ); assertThat(EncryptedInputConfigs.merge(orig, update)).isEqualTo(update); }
public static CodecFactory fromHadoopString(String hadoopCodecClass) { CodecFactory o = null; try { String avroCodec = HADOOP_AVRO_NAME_MAP.get(hadoopCodecClass); if (avroCodec != null) { o = CodecFactory.fromString(avroCodec); } } catch (Exception e) { throw new AvroRuntimeException("Unrecognized hadoop codec: " + hadoopCodecClass, e); } return o; }
@Test void hadoopCodecFactoryFail() { CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.FooCodec"); assertNull(hadoopSnappyCodec); }
@Deprecated @Override public void init(final ProcessorContext context, final StateStore root) { this.context = asInternalProcessorContext(context); super.init(context, root); }
@SuppressWarnings("deprecation") @Test public void shouldDelegateDeprecatedInit() { store.init((ProcessorContext) context, store); verify(inner).init((ProcessorContext) context, store); }
@Override public void onMetadataUpdate( MetadataDelta delta, MetadataImage newImage, LoaderManifest manifest ) { boolean checkBrokerRegistration = false; if (delta.featuresDelta() != null) { if (delta.metadataVersionChanged().isPresent()) { if (log.isTraceEnabled()) { log.trace("Metadata version change is present: {}", delta.metadataVersionChanged()); } checkBrokerRegistration = true; } } if (delta.clusterDelta() != null) { if (delta.clusterDelta().changedBrokers().get(id) != null) { if (log.isTraceEnabled()) { log.trace("Broker change is present: {}", delta.clusterDelta().changedBrokers().get(id)); } checkBrokerRegistration = true; } } if (checkBrokerRegistration) { if (brokerRegistrationNeedsRefresh(newImage.features().metadataVersion(), delta.clusterDelta().broker(id))) { refreshRegistrationCallback.run(); } } }
@Test public void testMetadataVersionUpdateWithoutRegistrationDoesNothing() { BrokerRegistrationTrackerTestContext ctx = new BrokerRegistrationTrackerTestContext(); MetadataDelta delta = ctx.newDelta(); delta.replay(new FeatureLevelRecord(). setName(MetadataVersion.FEATURE_NAME). setFeatureLevel(MetadataVersion.IBP_3_7_IV2.featureLevel())); ctx.onMetadataUpdate(delta); assertEquals(0, ctx.numCalls.get()); }
public synchronized boolean maybeUpdatePushRequestTimestamp(long currentTime) { /* Immediate push request after get subscriptions fetch can be accepted outside push interval time as client applies a jitter to the push interval, which might result in a request being sent between 0.5 * pushIntervalMs and 1.5 * pushIntervalMs. */ boolean canAccept = lastGetRequestTimestamp > lastPushRequestTimestamp; if (!canAccept) { long timeElapsedSinceLastMsg = currentTime - lastPushRequestTimestamp; canAccept = timeElapsedSinceLastMsg >= pushIntervalMs; } // Update the timestamp only if the request can be accepted. if (canAccept) { lastPushRequestTimestamp = currentTime; } return canAccept; }
@Test public void testMaybeUpdatePushRequestWithImmediateRetryFail() { assertTrue(clientInstance.maybeUpdatePushRequestTimestamp(System.currentTimeMillis())); // Second request should be rejected as time since last request is less than the push interval. assertFalse(clientInstance.maybeUpdatePushRequestTimestamp(System.currentTimeMillis())); }
public CsvData read() throws IORuntimeException { return read(this.reader, false); }
@Test public void readBeanListTest() { final CsvReader reader = CsvUtil.getReader(); final List<TestBean> result = reader.read( ResourceUtil.getUtf8Reader("test_bean.csv"), TestBean.class); assertEquals("张三", result.get(0).getName()); assertEquals("男", result.get(0).getGender()); assertEquals("无", result.get(0).getFocus()); assertEquals(Integer.valueOf(33), result.get(0).getAge()); assertEquals("李四", result.get(1).getName()); assertEquals("男", result.get(1).getGender()); assertEquals("好对象", result.get(1).getFocus()); assertEquals(Integer.valueOf(23), result.get(1).getAge()); assertEquals("王妹妹", result.get(2).getName()); assertEquals("女", result.get(2).getGender()); assertEquals("特别关注", result.get(2).getFocus()); assertEquals(Integer.valueOf(22), result.get(2).getAge()); }
@Override public boolean isOpen() { return internal.isOpen(); }
@Test public void shouldDelegateIsOpen() { // `isOpen = true` case when(inner.isOpen()).thenReturn(true); assertThat(store.isOpen(), is(true)); // `isOpen = false` case when(inner.isOpen()).thenReturn(false); assertThat(store.isOpen(), is(false)); }
public static Serializable decode(final ByteBuf byteBuf) { int valueType = byteBuf.readUnsignedByte() & 0xff; StringBuilder result = new StringBuilder(); decodeValue(valueType, 1, byteBuf, result); return result.toString(); }
@Test void assertDecodeLargeJsonObjectWithSubJson() { List<JsonEntry> subJsons = Collections.singletonList(new JsonEntry(JsonValueTypes.INT32, "key1", 111)); ByteBuf payload = mockJsonObjectByteBuf(Collections.singletonList(new JsonEntry(JsonValueTypes.SMALL_JSON_OBJECT, "subJson", subJsons)), false); String actual = (String) MySQLJsonValueDecoder.decode(payload); assertThat(actual, is("{\"subJson\":{\"key1\":111}}")); }
public static boolean isFloatingNumber(String text) { final int startPos = findStartPosition(text); if (startPos < 0) { return false; } boolean dots = false; for (int i = startPos; i < text.length(); i++) { char ch = text.charAt(i); if (!Character.isDigit(ch)) { if (ch == '.') { if (dots) { return false; } dots = true; } else { return false; } } } return true; }
@Test @DisplayName("Tests that isFloatingNumber returns false for valid floats using comma instead of dot") void isFloatingNumberFloatsWithComma() { assertFalse(ObjectHelper.isFloatingNumber("12,34")); assertFalse(ObjectHelper.isFloatingNumber("-12,34")); assertFalse(ObjectHelper.isFloatingNumber("1,0")); assertFalse(ObjectHelper.isFloatingNumber("0,0")); }
public static <FnT extends DoFn<?, ?>> DoFnSignature getSignature(Class<FnT> fn) { return signatureCache.computeIfAbsent(fn, DoFnSignatures::parseSignature); }
@Test public void testUsageOfStateDeclaredInSuperclass() throws Exception { DoFnDeclaringState fn = new DoFnDeclaringState() { @ProcessElement public void process( ProcessContext context, @StateId(DoFnDeclaringState.STATE_ID) ValueState<Integer> state) {} }; thrown.expect(IllegalArgumentException.class); thrown.expectMessage("process"); thrown.expectMessage("declared in a different class"); thrown.expectMessage(DoFnDeclaringState.STATE_ID); thrown.expectMessage(fn.getClass().getSimpleName()); DoFnSignatures.getSignature(fn.getClass()); }
@Override @Transactional(rollbackFor = Exception.class) public Long createJob(JobSaveReqVO createReqVO) throws SchedulerException { validateCronExpression(createReqVO.getCronExpression()); // 1.1 校验唯一性 if (jobMapper.selectByHandlerName(createReqVO.getHandlerName()) != null) { throw exception(JOB_HANDLER_EXISTS); } // 1.2 校验 JobHandler 是否存在 validateJobHandlerExists(createReqVO.getHandlerName()); // 2. 插入 JobDO JobDO job = BeanUtils.toBean(createReqVO, JobDO.class); job.setStatus(JobStatusEnum.INIT.getStatus()); fillJobMonitorTimeoutEmpty(job); jobMapper.insert(job); // 3.1 添加 Job 到 Quartz 中 schedulerManager.addJob(job.getId(), job.getHandlerName(), job.getHandlerParam(), job.getCronExpression(), createReqVO.getRetryCount(), createReqVO.getRetryInterval()); // 3.2 更新 JobDO JobDO updateObj = JobDO.builder().id(job.getId()).status(JobStatusEnum.NORMAL.getStatus()).build(); jobMapper.updateById(updateObj); return job.getId(); }
@Test public void testCreateJob_jobHandlerExists() throws SchedulerException { // 准备参数 指定 Cron 表达式 JobSaveReqVO reqVO = randomPojo(JobSaveReqVO.class, o -> o.setCronExpression("0 0/1 * * * ? *")); try (MockedStatic<SpringUtil> springUtilMockedStatic = mockStatic(SpringUtil.class)) { springUtilMockedStatic.when(() -> SpringUtil.getBean(eq(reqVO.getHandlerName()))) .thenReturn(jobLogCleanJob); // 调用 jobService.createJob(reqVO); // 调用,并断言异常 assertServiceException(() -> jobService.createJob(reqVO), JOB_HANDLER_EXISTS); } }
public static Schema schemaFor(Table table, long snapshotId) { Snapshot snapshot = table.snapshot(snapshotId); Preconditions.checkArgument(snapshot != null, "Cannot find snapshot with ID %s", snapshotId); Integer schemaId = snapshot.schemaId(); // schemaId could be null, if snapshot was created before Iceberg added schema id to snapshot if (schemaId != null) { Schema schema = table.schemas().get(schemaId); Preconditions.checkState(schema != null, "Cannot find schema with schema id %s", schemaId); return schema; } // TODO: recover the schema by reading previous metadata files return table.schema(); }
@Test public void schemaForBranch() { Schema initialSchema = new Schema( required(1, "id", Types.IntegerType.get()), required(2, "data", Types.StringType.get())); assertThat(table.schema().asStruct()).isEqualTo(initialSchema.asStruct()); String branch = "branch"; table.manageSnapshots().createBranch(branch).commit(); assertThat(SnapshotUtil.schemaFor(table, branch).asStruct()) .isEqualTo(initialSchema.asStruct()); table.updateSchema().addColumn("zip", Types.IntegerType.get()).commit(); Schema expected = new Schema( required(1, "id", Types.IntegerType.get()), required(2, "data", Types.StringType.get()), optional(3, "zip", Types.IntegerType.get())); assertThat(table.schema().asStruct()).isEqualTo(expected.asStruct()); assertThat(SnapshotUtil.schemaFor(table, branch).asStruct()).isEqualTo(expected.asStruct()); }
public RingbufferConfig setTimeToLiveSeconds(int timeToLiveSeconds) { this.timeToLiveSeconds = checkNotNegative(timeToLiveSeconds, "timeToLiveSeconds can't be smaller than 0"); return this; }
@Test public void setTimeToLiveSeconds() { RingbufferConfig config = new RingbufferConfig(NAME); RingbufferConfig returned = config.setTimeToLiveSeconds(10); assertSame(returned, config); assertEquals(10, config.getTimeToLiveSeconds()); }
@Override public List<Service> getServiceDefinitions() throws MockRepositoryImportException { List<Service> results = new ArrayList<>(); // Start checking version and comment. String version = spec.path("log").path("version").asText(); if (!VALID_VERSIONS.contains(version)) { throw new MockRepositoryImportException( "HAR version is not supported. Currently supporting: " + VALID_VERSIONS); } String comment = spec.path("log").path("comment").asText(); if (comment == null || comment.length() == 0) { throw new MockRepositoryImportException( "Expecting a comment in HAR log to specify Microcks service identifier"); } // We can start building something. Service service = new Service(); service.setType(ServiceType.REST); // 1st thing: look for comments to get service and version identifiers. String[] commentLines = comment.split("\\r?\\n|\\r"); for (String commentLine : commentLines) { if (commentLine.trim().startsWith(MICROCKS_ID_STARTER)) { String identifiers = commentLine.trim().substring(MICROCKS_ID_STARTER.length()); if (identifiers.indexOf(":") != -1) { String[] serviceAndVersion = identifiers.split(":"); service.setName(serviceAndVersion[0].trim()); service.setVersion(serviceAndVersion[1].trim()); } else { log.error("microcksId comment is malformed. Expecting \'microcksId: <API_name>:<API_version>\'"); throw new MockRepositoryImportException( "microcksId comment is malformed. Expecting \'microcksId: <API_name>:<API_version>\'"); } } else if (commentLine.trim().startsWith(API_PREFIX_STARTER)) { apiPrefix = commentLine.trim().substring(API_PREFIX_STARTER.length()).trim(); log.info("Found an API prefix to use for shortening URLs: {}", apiPrefix); } } if (service.getName() == null || service.getVersion() == null) { log.error("No microcksId: comment found into GraphQL schema to get API name and version"); throw new MockRepositoryImportException( "No microcksId: comment found into GraphQL schema to get API name and version"); } // Inspect requests content to determine the most probable service type. Map<ServiceType, Integer> requestsCounters = countRequestsByServiceType( spec.path("log").path("entries").elements()); if ((requestsCounters.get(ServiceType.GRAPHQL) > requestsCounters.get(ServiceType.REST)) && (requestsCounters.get(ServiceType.GRAPHQL) > requestsCounters.get(ServiceType.SOAP_HTTP))) { service.setType(ServiceType.GRAPHQL); } else if ((requestsCounters.get(ServiceType.SOAP_HTTP) > requestsCounters.get(ServiceType.REST)) && (requestsCounters.get(ServiceType.SOAP_HTTP) > requestsCounters.get(ServiceType.GRAPHQL))) { service.setType(ServiceType.SOAP_HTTP); } // Extract service operations. service.setOperations(extractOperations(service.getType(), spec.path("log").path("entries").elements())); results.add(service); return results; }
@Test void testMissingCommentHARImport() { HARImporter importer = null; try { importer = new HARImporter("target/test-classes/io/github/microcks/util/har/microcks.har"); } catch (IOException ioe) { fail("Exception should not be thrown"); } // Check that basic service properties import fail because of missing comment. boolean failure = false; List<Service> services = null; try { services = importer.getServiceDefinitions(); } catch (MockRepositoryImportException e) { failure = true; assertNotEquals(-1, e.getMessage().indexOf("Expecting a comment in HAR")); } assertTrue(failure); }
public static Map<String, Range<PartitionKey>> getPartitionKeyRange(Table table, Column partitionColumn, Expr partitionExpr) throws UserException { return ConnectorPartitionTraits.build(table).getPartitionKeyRange(partitionColumn, partitionExpr); }
@Test public void testGetPartitionRange(@Mocked HiveTable table) throws UserException { Column partitionColumn = new Column("date", Type.DATE); List<String> partitionNames = ImmutableList.of("date=2022-08-02", "date=2022-08-19", "date=2022-08-21", "date=2022-09-01", "date=2022-10-01", "date=2022-12-02"); new MockUp<PartitionUtil>() { @Mock public List<Column> getPartitionColumns(Table table) { return ImmutableList.of(partitionColumn); } }; new MockUp<MetadataMgr>() { @Mock public List<String> listPartitionNames(String catalogName, String dbName, String tableName) { return partitionNames; } }; new Expectations() { { table.getType(); result = Table.TableType.HIVE; minTimes = 0; table.isHiveTable(); result = true; minTimes = 0; } }; Map<String, Range<PartitionKey>> partitionMap = PartitionUtil.getPartitionKeyRange(table, partitionColumn, null); Assert.assertEquals(partitionMap.size(), partitionNames.size()); Assert.assertTrue(partitionMap.containsKey("p20221202")); PartitionKey upperBound = new PartitionKey(); upperBound.pushColumn(new DateLiteral(2022, 12, 03), PrimitiveType.DATE); Assert.assertTrue(partitionMap.get("p20221202").upperEndpoint().equals(upperBound)); }
@Override public void update(Component component, Metric metric, Measure measure) { requireNonNull(component); checkValueTypeConsistency(metric, measure); Optional<Measure> existingMeasure = find(component, metric); if (!existingMeasure.isPresent()) { throw new UnsupportedOperationException( format( "a measure can be updated only if one already exists for a specific Component (key=%s), Metric (key=%s). Use add method", component.getKey(), metric.getKey())); } add(component, metric, measure, OverridePolicy.OVERRIDE); }
@Test public void update_throws_NPE_if_Component_argument_is_null() { assertThatThrownBy(() -> underTest.update(null, metric1, SOME_MEASURE)) .isInstanceOf(NullPointerException.class); }
public static GetQueueUserAclsInfoResponse mergeQueueUserAcls( Collection<GetQueueUserAclsInfoResponse> responses) { GetQueueUserAclsInfoResponse aclsInfoResponse = Records.newRecord( GetQueueUserAclsInfoResponse.class); Set<QueueUserACLInfo> queueUserACLInfos = new HashSet<>(); for (GetQueueUserAclsInfoResponse response : responses) { if (response != null && response.getUserAclsInfoList() != null) { queueUserACLInfos.addAll(response.getUserAclsInfoList()); } } aclsInfoResponse.setUserAclsInfoList(new ArrayList<>(queueUserACLInfos)); return aclsInfoResponse; }
@Test public void testMergeQueueUserAclsResponse() { List<QueueACL> submitOnlyAcl = new ArrayList<>(); submitOnlyAcl.add(QueueACL.SUBMIT_APPLICATIONS); List<QueueACL> administerOnlyAcl = new ArrayList<>(); administerOnlyAcl.add(QueueACL.ADMINISTER_QUEUE); List<QueueACL> submitAndAdministerAcl = new ArrayList<>(); submitAndAdministerAcl.add(QueueACL.ADMINISTER_QUEUE); submitAndAdministerAcl.add(QueueACL.SUBMIT_APPLICATIONS); QueueUserACLInfo queueUserACLInfo1 = QueueUserACLInfo.newInstance( "root", submitAndAdministerAcl); QueueUserACLInfo queueUserACLInfo2 = QueueUserACLInfo.newInstance( "default", submitOnlyAcl); QueueUserACLInfo queueUserACLInfo3 = QueueUserACLInfo.newInstance( "root", submitAndAdministerAcl); QueueUserACLInfo queueUserACLInfo4 = QueueUserACLInfo.newInstance( "yarn", administerOnlyAcl); List<QueueUserACLInfo> queueUserACLInfoList1 = new ArrayList<>(); List<QueueUserACLInfo> queueUserACLInfoList2 = new ArrayList<>(); queueUserACLInfoList1.add(queueUserACLInfo1); queueUserACLInfoList1.add(queueUserACLInfo2); queueUserACLInfoList2.add(queueUserACLInfo3); queueUserACLInfoList2.add(queueUserACLInfo4); // normal response GetQueueUserAclsInfoResponse response1 = Records.newRecord( GetQueueUserAclsInfoResponse.class); response1.setUserAclsInfoList(queueUserACLInfoList1); GetQueueUserAclsInfoResponse response2 = Records.newRecord( GetQueueUserAclsInfoResponse.class); response2.setUserAclsInfoList(queueUserACLInfoList2); // empty response GetQueueUserAclsInfoResponse response3 = Records.newRecord( GetQueueUserAclsInfoResponse.class); // null response GetQueueUserAclsInfoResponse response4 = null; List<GetQueueUserAclsInfoResponse> responses = new ArrayList<>(); responses.add(response1); responses.add(response2); responses.add(response3); responses.add(response4); // expected user acls List<QueueUserACLInfo> expectedOutput = new ArrayList<>(); expectedOutput.add(queueUserACLInfo1); expectedOutput.add(queueUserACLInfo2); expectedOutput.add(queueUserACLInfo4); GetQueueUserAclsInfoResponse response = RouterYarnClientUtils.mergeQueueUserAcls(responses); Assert.assertTrue(CollectionUtils.isEqualCollection(expectedOutput, response.getUserAclsInfoList())); }
@CanDistro @DeleteMapping @TpsControl(pointName = "NamingInstanceDeregister", name = "HttpNamingInstanceDeregister") @Secured(action = ActionTypes.WRITE) public Result<String> deregister(InstanceForm instanceForm) throws NacosException { // check param instanceForm.validate(); checkWeight(instanceForm.getWeight()); // build instance Instance instance = buildInstance(instanceForm); instanceServiceV2.removeInstance(instanceForm.getNamespaceId(), buildCompositeServiceName(instanceForm), instance); NotifyCenter.publishEvent( new DeregisterInstanceTraceEvent(System.currentTimeMillis(), NamingRequestUtil.getSourceIp(), false, DeregisterInstanceReason.REQUEST, instanceForm.getNamespaceId(), instanceForm.getGroupName(), instanceForm.getServiceName(), instance.getIp(), instance.getPort())); return Result.success("ok"); }
@Test void deregisterInstance() throws Exception { InstanceForm instanceForm = new InstanceForm(); instanceForm.setNamespaceId(TEST_NAMESPACE); instanceForm.setGroupName("DEFAULT_GROUP"); instanceForm.setServiceName("test-service"); instanceForm.setIp(TEST_IP); instanceForm.setClusterName(TEST_CLUSTER_NAME); instanceForm.setPort(9999); instanceForm.setHealthy(true); instanceForm.setWeight(1.0); instanceForm.setEnabled(true); instanceForm.setMetadata(TEST_METADATA); instanceForm.setEphemeral(true); Result<String> result = instanceControllerV2.deregister(instanceForm); verify(instanceServiceV2).removeInstance(eq(TEST_NAMESPACE), eq(TEST_SERVICE_NAME), any()); assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode()); assertEquals("ok", result.getData()); }
protected boolean inList(String includeMethods, String excludeMethods, String methodName) { //判断是否在白名单中 if (!StringUtils.ALL.equals(includeMethods)) { if (!inMethodConfigs(includeMethods, methodName)) { return false; } } //判断是否在黑白单中 if (inMethodConfigs(excludeMethods, methodName)) { return false; } //默认还是要发布 return true; }
@Test public void includeListTest() { ProviderConfig providerConfig = new ProviderConfig(); DefaultProviderBootstrap defaultProviderBootstra = new DefaultProviderBootstrap(providerConfig); boolean result = defaultProviderBootstra.inList("hello1", "hello2", "hello1"); Assert.assertTrue(result); }
@Override public InetSocketAddress resolve(ServerWebExchange exchange) { List<String> xForwardedValues = extractXForwardedValues(exchange); if (!xForwardedValues.isEmpty()) { int index = Math.max(0, xForwardedValues.size() - maxTrustedIndex); return new InetSocketAddress(xForwardedValues.get(index), 0); } return defaultRemoteIpResolver.resolve(exchange); }
@Test public void trustAllFallsBackOnMultipleHeaders() { ServerWebExchange exchange = buildExchange( remoteAddressOnlyBuilder().header("X-Forwarded-For", "0.0.0.1").header("X-Forwarded-For", "0.0.0.2")); InetSocketAddress address = trustAll.resolve(exchange); assertThat(address.getHostName()).isEqualTo("0.0.0.0"); }
static void dissectUntetheredSubscriptionStateChange( final MutableDirectBuffer buffer, final int offset, final StringBuilder builder) { int absoluteOffset = offset; absoluteOffset += dissectLogHeader( CONTEXT, UNTETHERED_SUBSCRIPTION_STATE_CHANGE, buffer, absoluteOffset, builder); builder.append(": subscriptionId=").append(buffer.getLong(absoluteOffset, LITTLE_ENDIAN)); absoluteOffset += SIZE_OF_LONG; builder.append(" streamId=").append(buffer.getInt(absoluteOffset, LITTLE_ENDIAN)); absoluteOffset += SIZE_OF_INT; builder.append(" sessionId=").append(buffer.getInt(absoluteOffset, LITTLE_ENDIAN)); absoluteOffset += SIZE_OF_INT; builder.append(" "); buffer.getStringAscii(absoluteOffset, builder); }
@Test void dissectUntetheredSubscriptionStateChange() { final int offset = 12; internalEncodeLogHeader(buffer, offset, 22, 88, () -> 1_500_000_000L); buffer.putLong(offset + LOG_HEADER_LENGTH, 88, LITTLE_ENDIAN); buffer.putInt(offset + LOG_HEADER_LENGTH + SIZE_OF_LONG, 123, LITTLE_ENDIAN); buffer.putInt(offset + LOG_HEADER_LENGTH + SIZE_OF_LONG + SIZE_OF_INT, 777, LITTLE_ENDIAN); buffer.putStringAscii(offset + LOG_HEADER_LENGTH + SIZE_OF_LONG + 2 * SIZE_OF_INT, "state changed"); DriverEventDissector.dissectUntetheredSubscriptionStateChange(buffer, offset, builder); assertEquals("[1.500000000] " + CONTEXT + ": " + UNTETHERED_SUBSCRIPTION_STATE_CHANGE.name() + " [22/88]: subscriptionId=88 streamId=123 sessionId=777 state changed", builder.toString()); }
public static <K, V> Read<K, V> read() { return new AutoValue_CdapIO_Read.Builder<K, V>().build(); }
@Test public void testReadFromCdapBatchPlugin() { EmployeeConfig pluginConfig = new ConfigWrapper<>(EmployeeConfig.class).withParams(TEST_EMPLOYEE_PARAMS_MAP).build(); CdapIO.Read<String, String> read = CdapIO.<String, String>read() .withCdapPlugin( Plugin.createBatch( EmployeeBatchSource.class, EmployeeInputFormat.class, EmployeeInputFormatProvider.class)) .withPluginConfig(pluginConfig) .withKeyClass(String.class) .withValueClass(String.class); List<KV<String, String>> expected = new ArrayList<>(); for (int i = 1; i < EmployeeInputFormat.NUM_OF_TEST_EMPLOYEE_RECORDS; i++) { expected.add(KV.of(String.valueOf(i), EmployeeInputFormat.EMPLOYEE_NAME_PREFIX + i)); } PCollection<KV<String, String>> actual = p.apply("ReadBatchTest", read); PAssert.that(actual).containsInAnyOrder(expected); p.run(); }
public static Set<ConfigKey<?>> getAllConfigsProduced(Class<? extends ConfigProducer> producerClass, String configId) { // TypeToken is @Beta in guava, so consider implementing a simple recursive method instead. TypeToken<? extends ConfigProducer>.TypeSet interfaces = TypeToken.of(producerClass).getTypes().interfaces(); return interfaces.rawTypes().stream() .filter(ReflectionUtil::isConcreteProducer) .map(i -> createConfigKeyFromInstance(i.getEnclosingClass(), configId)) .collect(Collectors.toCollection(() -> new LinkedHashSet<>())); }
@Test void getAllConfigsProduced_includes_configs_produced_by_implemented_interface() { Set<ConfigKey<?>> configs = getAllConfigsProduced(InterfaceImplementingProducer.class, "foo"); assertEquals(2, configs.size()); assertTrue(configs.contains(new ConfigKey<>(SimpletypesConfig.CONFIG_DEF_NAME, "foo", SimpletypesConfig.CONFIG_DEF_NAMESPACE))); assertTrue(configs.contains(new ConfigKey<>(ArraytypesConfig.CONFIG_DEF_NAME, "foo", ArraytypesConfig.CONFIG_DEF_NAMESPACE))); }
public static int readUint16(ByteBuffer buf) throws BufferUnderflowException { return Short.toUnsignedInt(buf.order(ByteOrder.LITTLE_ENDIAN).getShort()); }
@Test public void testReadUint16() { assertEquals(258L, ByteUtils.readUint16(new byte[]{2, 1}, 0)); assertEquals(258L, ByteUtils.readUint16(new byte[]{2, 1, 3, 4}, 0)); assertEquals(772L, ByteUtils.readUint16(new byte[]{1, 2, 4, 3}, 2)); }
@Override // mappedStatementId 参数,暂时没有用。以后,可以基于 mappedStatementId + DataPermission 进行缓存 public List<DataPermissionRule> getDataPermissionRule(String mappedStatementId) { // 1. 无数据权限 if (CollUtil.isEmpty(rules)) { return Collections.emptyList(); } // 2. 未配置,则默认开启 DataPermission dataPermission = DataPermissionContextHolder.get(); if (dataPermission == null) { return rules; } // 3. 已配置,但禁用 if (!dataPermission.enable()) { return Collections.emptyList(); } // 4. 已配置,只选择部分规则 if (ArrayUtil.isNotEmpty(dataPermission.includeRules())) { return rules.stream().filter(rule -> ArrayUtil.contains(dataPermission.includeRules(), rule.getClass())) .collect(Collectors.toList()); // 一般规则不会太多,所以不采用 HashSet 查询 } // 5. 已配置,只排除部分规则 if (ArrayUtil.isNotEmpty(dataPermission.excludeRules())) { return rules.stream().filter(rule -> !ArrayUtil.contains(dataPermission.excludeRules(), rule.getClass())) .collect(Collectors.toList()); // 一般规则不会太多,所以不采用 HashSet 查询 } // 6. 已配置,全部规则 return rules; }
@Test public void testGetDataPermissionRule_05() { // 准备参数 String mappedStatementId = randomString(); // mock 方法 DataPermissionContextHolder.add(AnnotationUtils.findAnnotation(TestClass05.class, DataPermission.class)); // 调用 List<DataPermissionRule> result = dataPermissionRuleFactory.getDataPermissionRule(mappedStatementId); // 断言 assertEquals(1, result.size()); assertEquals(DataPermissionRule02.class, result.get(0).getClass()); }
public static List<String> getJavaOpts(Configuration conf) { String adminOpts = conf.get(YarnConfiguration.NM_CONTAINER_LOCALIZER_ADMIN_JAVA_OPTS_KEY, YarnConfiguration.NM_CONTAINER_LOCALIZER_ADMIN_JAVA_OPTS_DEFAULT); String userOpts = conf.get(YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_KEY, YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_DEFAULT); boolean isExtraJDK17OptionsConfigured = conf.getBoolean(YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_ADD_EXPORTS_KEY, YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_ADD_EXPORTS_DEFAULT); if (Shell.isJavaVersionAtLeast(17) && isExtraJDK17OptionsConfigured) { userOpts = userOpts.trim().concat(" " + ADDITIONAL_JDK17_PLUS_OPTIONS); } List<String> adminOptionList = Arrays.asList(adminOpts.split("\\s+")); List<String> userOptionList = Arrays.asList(userOpts.split("\\s+")); return Stream.concat(adminOptionList.stream(), userOptionList.stream()) .filter(s -> !s.isEmpty()) .collect(Collectors.toList()); }
@Test public void testAdminOptionsPrecedeDefaultUserOptions() throws Exception { ContainerLocalizerWrapper wrapper = new ContainerLocalizerWrapper(); ContainerLocalizer localizer = wrapper.setupContainerLocalizerForTest(); Configuration conf = new Configuration(); conf.setStrings(YarnConfiguration.NM_CONTAINER_LOCALIZER_ADMIN_JAVA_OPTS_KEY, "adminOption1 adminOption2"); List<String> javaOpts = localizer.getJavaOpts(conf); Assert.assertEquals(3, javaOpts.size()); Assert.assertTrue(javaOpts.get(0).equals("adminOption1")); Assert.assertTrue(javaOpts.get(1).equals("adminOption2")); Assert.assertTrue(javaOpts.get(2).equals("-Xmx256m")); }
public static String substVars(String val, PropertyContainer pc1) { return substVars(val, pc1, null); }
@Test public void trailingColon_LOGBACK_1140() { String prefix = "c:"; String suffix = "/tmp"; context.putProperty("var", prefix); String r = OptionHelper.substVars("${var}" + suffix, context); assertEquals(prefix + suffix, r); }
static Segment fromString(String segmentString) { return parseFromString(segmentString); }
@Test public void fromString_allExcludedTokens_returnsNullSegment() { assertThat(Segment.fromString("gg.N/A")).isEqualTo(Segment.NULL); }
public static SchemaAndValue parseString(String value) { if (value == null) { return NULL_SCHEMA_AND_VALUE; } if (value.isEmpty()) { return new SchemaAndValue(Schema.STRING_SCHEMA, value); } ValueParser parser = new ValueParser(new Parser(value)); return parser.parse(false); }
@Test public void shouldParseTimestampStringWithEscapedColonsAsTimestamp() throws Exception { String str = "2019-08-23T14\\:34\\:54.346Z"; SchemaAndValue result = Values.parseString(str); assertEquals(Type.INT64, result.schema().type()); assertEquals(Timestamp.LOGICAL_NAME, result.schema().name()); String expectedStr = "2019-08-23T14:34:54.346Z"; java.util.Date expected = new SimpleDateFormat(Values.ISO_8601_TIMESTAMP_FORMAT_PATTERN).parse(expectedStr); assertEquals(expected, result.value()); }
public static String findAddress(List<NodeAddress> addresses, NodeAddressType preferredAddressType) { if (addresses == null) { return null; } Map<String, String> addressMap = addresses.stream() .collect(Collectors.toMap(NodeAddress::getType, NodeAddress::getAddress, (address1, address2) -> { LOGGER.warnOp("Found multiple addresses with the same type. Only the first address '{}' will be used.", address1); return address1; })); // If user set preferred address type, we should check it first if (preferredAddressType != null && addressMap.containsKey(preferredAddressType.toValue())) { return addressMap.get(preferredAddressType.toValue()); } if (addressMap.containsKey("ExternalDNS")) { return addressMap.get("ExternalDNS"); } else if (addressMap.containsKey("ExternalIP")) { return addressMap.get("ExternalIP"); } else if (addressMap.containsKey("InternalDNS")) { return addressMap.get("InternalDNS"); } else if (addressMap.containsKey("InternalIP")) { return addressMap.get("InternalIP"); } else if (addressMap.containsKey("Hostname")) { return addressMap.get("Hostname"); } return null; }
@Test public void testFindAddressWithMultipleAddressesOfSameType() { List<NodeAddress> addresses = new ArrayList<>(3); addresses.add(new NodeAddressBuilder().withType("ExternalDNS").withAddress("my.external.address").build()); addresses.add(new NodeAddressBuilder().withType("ExternalDNS").withAddress("my.external.address2").build()); addresses.add(new NodeAddressBuilder().withType("InternalDNS").withAddress("my.internal.address").build()); addresses.add(new NodeAddressBuilder().withType("InternalDNS").withAddress("my.internal.address2").build()); addresses.add(new NodeAddressBuilder().withType("InternalIP").withAddress("192.168.2.94").build()); String address = NodeUtils.findAddress(addresses, null); assertThat(address, is("my.external.address")); }
@Override public void delete(EventDefinitionDto nativeEntity) { eventDefinitionHandler.deleteImmutable(nativeEntity.id()); }
@Test @MongoDBFixtures("EventDefinitionFacadeTest.json") public void delete() { long countBefore = eventDefinitionService.streamAll().count(); assertThat(countBefore).isEqualTo(1); final Optional<EventDefinitionDto> eventDefinitionDto = eventDefinitionService.get( "5d4032513d2746703d1467f6"); assertThat(eventDefinitionDto).isPresent(); facade.delete(eventDefinitionDto.get()); long countAfter = eventDefinitionService.streamAll().count(); assertThat(countAfter).isEqualTo(0); }
public Optional<GroupInformation> findGroupByUuid(DbSession dbSession, String groupUuid) { return Optional.ofNullable(dbClient.groupDao().selectByUuid(dbSession, groupUuid)) .map(group -> groupDtoToGroupInformation(group, dbSession)); }
@Test public void findGroupByUuid_whenGroupDoesntExist_returnsEmptyOptional() { when(dbClient.groupDao().selectByUuid(dbSession, GROUP_UUID)) .thenReturn(null); assertThat(groupService.findGroupByUuid(dbSession, GROUP_UUID)).isEmpty(); }
public String getStructuralGuid() { return this.structuralGuid; }
@Test public void queriesWithDifferentAnonFormShouldGetSameStructurallySimilarId() { // Given: final String anonQuery1 = "CREATE STREAM stream1 (column1 VARCHAR, column2 DOUBLE) WITH " + "(kafka_topic=['string'], value_format=['string'], partitions='0';"; final String anonQuery2 = "CREATE STREAM stream1 (column1 VARCHAR, column2 INT) WITH " + "(kafka_topic=['string'], value_format=['string'], partitions='0';"; // When: final String id1 = new QueryGuid(TEST_NAMESPACE, "TEST", anonQuery1) .getStructuralGuid(); final String id2 = new QueryGuid(TEST_NAMESPACE, "TEST", anonQuery2) .getStructuralGuid(); // Then: Assert.assertNotEquals(id1, id2); }
public static URL parseURL(String address, Map<String, String> defaults) { if (StringUtils.isEmpty(address)) { throw new IllegalArgumentException("Address is not allowed to be empty, please re-enter."); } String url; if (address.contains("://") || address.contains(URL_PARAM_STARTING_SYMBOL)) { url = address; } else { String[] addresses = COMMA_SPLIT_PATTERN.split(address); url = addresses[0]; if (addresses.length > 1) { StringBuilder backup = new StringBuilder(); for (int i = 1; i < addresses.length; i++) { if (i > 1) { backup.append(','); } backup.append(addresses[i]); } url += URL_PARAM_STARTING_SYMBOL + RemotingConstants.BACKUP_KEY + "=" + backup.toString(); } } String defaultProtocol = defaults == null ? null : defaults.get(PROTOCOL_KEY); if (StringUtils.isEmpty(defaultProtocol)) { defaultProtocol = DUBBO_PROTOCOL; } String defaultUsername = defaults == null ? null : defaults.get(USERNAME_KEY); String defaultPassword = defaults == null ? null : defaults.get(PASSWORD_KEY); int defaultPort = StringUtils.parseInteger(defaults == null ? null : defaults.get(PORT_KEY)); String defaultPath = defaults == null ? null : defaults.get(PATH_KEY); Map<String, String> defaultParameters = defaults == null ? null : new HashMap<>(defaults); if (defaultParameters != null) { defaultParameters.remove(PROTOCOL_KEY); defaultParameters.remove(USERNAME_KEY); defaultParameters.remove(PASSWORD_KEY); defaultParameters.remove(HOST_KEY); defaultParameters.remove(PORT_KEY); defaultParameters.remove(PATH_KEY); } URL u = URL.cacheableValueOf(url); boolean changed = false; String protocol = u.getProtocol(); String username = u.getUsername(); String password = u.getPassword(); String host = u.getHost(); int port = u.getPort(); String path = u.getPath(); Map<String, String> parameters = new HashMap<>(u.getParameters()); if (StringUtils.isEmpty(protocol)) { changed = true; protocol = defaultProtocol; } if (StringUtils.isEmpty(username) && StringUtils.isNotEmpty(defaultUsername)) { changed = true; username = defaultUsername; } if (StringUtils.isEmpty(password) && StringUtils.isNotEmpty(defaultPassword)) { changed = true; password = defaultPassword; } /*if (u.isAnyHost() || u.isLocalHost()) { changed = true; host = NetUtils.getLocalHost(); }*/ if (port <= 0) { if (defaultPort > 0) { changed = true; port = defaultPort; } else { changed = true; port = 9090; } } if (StringUtils.isEmpty(path)) { if (StringUtils.isNotEmpty(defaultPath)) { changed = true; path = defaultPath; } } if (defaultParameters != null && defaultParameters.size() > 0) { for (Map.Entry<String, String> entry : defaultParameters.entrySet()) { String key = entry.getKey(); String defaultValue = entry.getValue(); if (StringUtils.isNotEmpty(defaultValue)) { String value = parameters.get(key); if (StringUtils.isEmpty(value)) { changed = true; parameters.put(key, defaultValue); } } } } if (changed) { u = new ServiceConfigURL(protocol, username, password, host, port, path, parameters); } return u; }
@Test void testAddressNull() { String exceptionMessage = "Address is not allowed to be empty, please re-enter."; try { UrlUtils.parseURL(null, null); } catch (IllegalArgumentException illegalArgumentException) { assertEquals(exceptionMessage, illegalArgumentException.getMessage()); } }
public ClientConfig setUserContext(ConcurrentMap<String, Object> userContext) { isNotNull(userContext, "userContext"); this.userContext.clear(); this.userContext.putAll(userContext); return this; }
@Test(expected = IllegalArgumentException.class) public void testUserContext_throwExceptionWhenContextNull() { ClientConfig clientConfig = new ClientConfig(); clientConfig.setUserContext(null); }
@Override public KsMaterializedQueryResult<WindowedRow> get( final GenericKey key, final int partition, final Range<Instant> windowStart, final Range<Instant> windowEnd, final Optional<Position> position ) { try { final WindowRangeQuery<GenericKey, GenericRow> query = WindowRangeQuery.withKey(key); StateQueryRequest<KeyValueIterator<Windowed<GenericKey>, GenericRow>> request = inStore(stateStore.getStateStoreName()).withQuery(query); if (position.isPresent()) { request = request.withPositionBound(PositionBound.at(position.get())); } final StateQueryResult<KeyValueIterator<Windowed<GenericKey>, GenericRow>> result = stateStore.getKafkaStreams().query(request); final QueryResult<KeyValueIterator<Windowed<GenericKey>, GenericRow>> queryResult = result.getPartitionResults().get(partition); if (queryResult.isFailure()) { throw failedQueryException(queryResult); } try (KeyValueIterator<Windowed<GenericKey>, GenericRow> it = queryResult.getResult()) { final Builder<WindowedRow> builder = ImmutableList.builder(); while (it.hasNext()) { final KeyValue<Windowed<GenericKey>, GenericRow> next = it.next(); final Window wnd = next.key.window(); if (!windowStart.contains(wnd.startTime())) { continue; } if (!windowEnd.contains(wnd.endTime())) { continue; } final long rowTime = wnd.end(); final WindowedRow row = WindowedRow.of( stateStore.schema(), next.key, next.value, rowTime ); builder.add(row); } return KsMaterializedQueryResult.rowIteratorWithPosition( builder.build().iterator(), queryResult.getPosition()); } } catch (final NotUpToBoundException | MaterializationException e) { throw e; } catch (final Exception e) { throw new MaterializationException("Failed to get value from materialized table", e); } }
@Test @SuppressWarnings("unchecked") public void shouldThrowIfQueryFails() { // Given: final StateQueryResult<?> partitionResult = new StateQueryResult<>(); partitionResult.addResult(PARTITION, QueryResult.forFailure(FailureReason.STORE_EXCEPTION, "Boom")); when(kafkaStreams.query(any(StateQueryRequest.class))).thenReturn(partitionResult); // When: final Exception e = assertThrows( MaterializationException.class, () -> table.get(A_KEY, PARTITION, WINDOW_START_BOUNDS, WINDOW_END_BOUNDS) ); // Then: assertThat(e.getMessage(), containsString("Boom")); assertThat(e, (instanceOf(MaterializationException.class))); }
public Optional<Throwable> run(String... arguments) { try { if (isFlag(HELP, arguments)) { parser.printHelp(stdOut); } else if (isFlag(VERSION, arguments)) { parser.printVersion(stdOut); } else { final Namespace namespace = parser.parseArgs(arguments); final Command command = requireNonNull(commands.get(namespace.getString(COMMAND_NAME_ATTR)), "Command is not found"); try { command.run(bootstrap, namespace); } catch (Throwable e) { // The command failed to run, and the command knows // best how to cleanup / debug exception command.onError(this, namespace, e); return Optional.of(e); } } return Optional.empty(); } catch (HelpScreenException ignored) { // This exception is triggered when the user passes in a help flag. // Return true to signal that the process executed normally. return Optional.empty(); } catch (ArgumentParserException e) { stdErr.println(e.getMessage()); e.getParser().printHelp(stdErr); return Optional.of(e); } }
@Test void rejectsBadSubcommandFlags() throws Exception { assertThat(cli.run("check", "--yes")) .hasValueSatisfying(t -> assertThat(t).isExactlyInstanceOf(UnrecognizedArgumentException.class)); assertThat(stdOut.toString()) .isEmpty(); assertThat(stdErr) .hasToString(String.format( "unrecognized arguments: '--yes'%n" + "usage: java -jar dw-thing.jar check [-h] [file]%n" + "%n" + "Parses and validates the configuration file%n" + "%n" + "positional arguments:%n" + " file application configuration file%n" + "%n" + "named arguments:%n" + " -h, --help show this help message and exit%n" )); }
@Override public Optional<String> nodeIdToName(String nodeId) { return nodeById(nodeId) .map(jsonNode -> jsonNode.get("name").asText()); }
@Test void returnsNameForNodeId() throws Exception { mockNodesResponse(); assertThat(this.clusterAdapter.nodeIdToName(nodeId)).isNotEmpty() .contains("es02"); }
@Override protected void parse(final ProtocolFactory protocols, final Local folder) throws AccessDeniedException { for(Local settings : folder.list().filter(new NullFilter<Local>() { @Override public boolean accept(Local file) { return file.isDirectory(); } })) { for(Local child : settings.list().filter(new NullFilter<Local>() { @Override public boolean accept(Local file) { if(file.isFile()) { return "fireFTPsites.dat".equals(file.getName()); } return false; } })) { this.read(protocols, child); } } }
@Test public void testParse() throws Exception { FireFtpBookmarkCollection c = new FireFtpBookmarkCollection(); assertEquals(0, c.size()); c.parse(new ProtocolFactory(new HashSet<>(Arrays.asList(new TestProtocol(Scheme.ftp), new TestProtocol(Scheme.ftps), new TestProtocol(Scheme.sftp)))), new Local("src/test/resources/org.mozdev.fireftp")); assertEquals(1, c.size()); }
@Async @Transactional public SamlMetadataProcessResult startCollectMetadata(Connection con, Map<String, String> map) { SamlMetadataProcessResult result = new SamlMetadataProcessResult(con.getId()); EntitiesDescriptor descriptor; try { String metadataXML = getMetadataFromConnection(con); descriptor = convertMetadataXMLtoEntitiesDescriptor(metadataXML); String hash = getSignatureValue(descriptor.getSignature()); Optional<SamlMetadataProcessResult> process = samlMetadataProcessResultRepository.findByConnectionIdAndHash(con.getId(), hash); if (process.isPresent()) return result; updateMetadata(descriptor, con, map, result); result.setMetadata(metadataXML); if (result.allEntriesSuccessful()) { result.setHash(hash); } } catch (InitializationException | ComponentInitializationException | UnmarshallingException | IOException | MetadataParseException e) { map.put("status", "failed"); LOGGER.error("Failed to collect/parse metadata: {}", e.getMessage()); result.addProcessError(e.getMessage(), ""); } samlMetadataProcessResultRepository.saveAndFlush(result); return result; }
@Test public void startCollectMetadataWithUnknownConnectionEntityIDTest() throws IOException { Map<String, String> map = new HashMap<>(); Connection connection = new Connection(); connection.setMetadataUrl("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS"); connection.setSamlMetadata("samlmetadata"); connection.setEntityId("entity id"); when(httpClientMock.execute(any(HttpGet.class))).thenReturn(httpResponseMock); when(httpResponseMock.getEntity()).thenReturn(httpEntityMock); when(httpEntityMock.getContent()).thenReturn(getClass().getClassLoader().getResourceAsStream("metadata/valid-metadata.xml")); SamlMetadataProcessResult result = metadataProcessorServiceMock.startCollectMetadata(connection, map); assertEquals(0, result.getTotalUpdated()); assertEquals(1, result.getTotalErrors()); assertEquals(0, result.getTotalProcessed()); assertEquals(1, result.getSamlMetadataProcessErrors().size()); assertEquals("failed", map.get("status")); assertEquals("EntityID aansluiting niet gevonden", result.getSamlMetadataProcessErrors().get(0).getErrorReason()); assertNotNull(result.getSamlMetadataProcessErrors().get(0).getService()); assertNotNull(result.getMetadata()); assertEquals(0, connection.getCertificates().size()); }
@Override public V get(Object k) { return containsKey(k) ? super.get(k) : defaultValue; }
@Test public void defaultToOmega() { chartis = new DefaultHashMap<>(OMEGA); fortioCharti(); assertEquals("missing 1", ALPHA, chartis.get(ONE)); assertEquals("missing 2", BETA, chartis.get(TWO)); assertEquals("three?", OMEGA, chartis.get(THREE)); assertEquals("four?", OMEGA, chartis.get(FOUR)); }
@Nullable public Iterable<String> searchDomains() { return searchDomains; }
@Test void searchDomainsBadValues() { assertThatExceptionOfType(NullPointerException.class) .isThrownBy(() -> builder.searchDomains(null)); }
@Udf public <T extends Comparable<? super T>> List<T> arraySortWithDirection(@UdfParameter( description = "The array to sort") final List<T> input, @UdfParameter( description = "Marks the end of the series (inclusive)") final String direction) { if (input == null || direction == null) { return null; } if (SORT_DIRECTION_ASC.contains(direction.toUpperCase())) { input.sort(nullsLast(naturalOrder())); } else if (SORT_DIRECTION_DESC.contains(direction.toUpperCase())) { input.sort(nullsLast(Collections.reverseOrder())); } else { return null; } return input; }
@Test public void shouldReturnNullWithBadSortDirection() { final List<Integer> input = Arrays.asList(1, 3, -2); final List<Integer> output = udf.arraySortWithDirection(input, "ASCDESC"); assertThat(output, is(nullValue())); }
@Override @SuppressWarnings({"unchecked", "rawtypes"}) protected Mono<Void> doExecute(final ServerWebExchange exchange, final ShenyuPluginChain chain, final SelectorData selector, final RuleData rule) { String body = exchange.getAttribute(Constants.PARAM_TRANSFORM); ShenyuContext shenyuContext = exchange.getAttribute(Constants.CONTEXT); assert shenyuContext != null; MetaData metaData = exchange.getAttribute(Constants.META_DATA); if (!checkMetaData(metaData)) { assert metaData != null; LOG.error(" path is :{}, meta data have error.... {}", shenyuContext.getPath(), metaData); exchange.getResponse().setStatusCode(HttpStatus.INTERNAL_SERVER_ERROR); Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.META_DATA_ERROR); return WebFluxResultUtils.result(exchange, error); } if (StringUtils.isNoneBlank(metaData.getParameterTypes()) && StringUtils.isBlank(body)) { exchange.getResponse().setStatusCode(HttpStatus.INTERNAL_SERVER_ERROR); Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.TARS_HAVE_BODY_PARAM); return WebFluxResultUtils.result(exchange, error); } TarsInvokePrxList tarsInvokePrxList = ApplicationConfigCache.getInstance().get(metaData.getPath()); int index = ThreadLocalRandom.current().nextInt(tarsInvokePrxList.getTarsInvokePrxList().size()); Object prx = tarsInvokePrxList.getTarsInvokePrxList().get(index).getInvokePrx(); Method method = tarsInvokePrxList.getMethod(); CompletableFuture future; try { future = (CompletableFuture) method .invoke(prx, PrxInfoUtil.getParamArray(tarsInvokePrxList.getParamTypes(), tarsInvokePrxList.getParamNames(), body)); } catch (Exception e) { LOG.error("Invoke tars error", e); exchange.getResponse().setStatusCode(HttpStatus.INTERNAL_SERVER_ERROR); Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.TARS_INVOKE); return WebFluxResultUtils.result(exchange, error); } return Mono.fromFuture(future.thenApply(ret -> { if (Objects.isNull(ret)) { ret = Constants.TARS_RPC_RESULT_EMPTY; } exchange.getAttributes().put(Constants.RPC_RESULT, ret); exchange.getAttributes().put(Constants.CLIENT_RESPONSE_RESULT_TYPE, ResultEnum.SUCCESS.getName()); return ret; })).onErrorMap(m -> new ShenyuException("failed to invoke tars")).then(chain.execute(exchange)); }
@Test public void testTarsPluginWithArgumentTypeMissMatch() { ShenyuContext context = mock(ShenyuContext.class); exchange.getAttributes().put(Constants.CONTEXT, context); exchange.getAttributes().put(Constants.META_DATA, metaData); exchange.getAttributes().put(Constants.PARAM_TRANSFORM, "{\"param1\":1,\"param2\":2}"); when(chain.execute(exchange)).thenReturn(Mono.empty()); RuleData data = mock(RuleData.class); SelectorData selectorData = mock(SelectorData.class); assertThrows(IllegalArgumentException.class, () -> StepVerifier.create(tarsPluginUnderTest.doExecute(exchange, chain, selectorData, data)).expectSubscription().verifyComplete()); }
public static void copyProperties(Object src, Object dst, String... ignoreFields) { Class srcClazz = src.getClass(); Class distClazz = dst.getClass(); Method[] methods = distClazz.getMethods(); List<String> ignoreFiledList = Arrays.asList(ignoreFields); for (Method dstMethod : methods) { // 遍历目标对象的方法 if (Modifier.isStatic(dstMethod.getModifiers()) || !ReflectUtils.isBeanPropertyReadMethod(dstMethod)) { // 不是static方法, 是getter方法 continue; } String propertyName = ReflectUtils.getPropertyNameFromBeanReadMethod(dstMethod); if (ignoreFiledList.contains(propertyName)) { // 忽略字段 continue; } Class dstReturnType = dstMethod.getReturnType(); try { // 同时目标字段还需要有set方法 Method dstSetterMethod = ReflectUtils.getPropertySetterMethod(distClazz, propertyName, dstReturnType); if (dstSetterMethod != null) { // 再检查原始对象方法 Method srcGetterMethod = ReflectUtils.getPropertyGetterMethod(srcClazz, propertyName); // 原始字段有getter方法 Class srcReturnType = srcGetterMethod.getReturnType(); if (srcReturnType.equals(dstReturnType)) { // 原始字段和目标字段返回类型一样 Object val = srcGetterMethod.invoke(src); // 从原始对象读取值 if (val != null) { dstSetterMethod.invoke(dst, val); // 设置到目标对象 } } } } catch (Exception ignore) { // ignore 下一循环 } } }
@Test public void testCopyPropterties() throws Exception { TestBean bean = new TestBean(); bean.setAlias("aaa:1.0.0"); List<TestSubBean> subBeans = new ArrayList<TestSubBean>(); TestSubBean sub = new TestSubBean(); sub.setName("xxxxxx"); sub.setParameter("maaaaak", "maaaav"); subBeans.add(sub); bean.setSubBeans(subBeans); TestOtherBean otherBean = new TestOtherBean(); BeanUtils.copyProperties(bean, otherBean, "alias"); Assert.assertEquals(bean.getHeartbeat(), otherBean.getHeartbeat()); Assert.assertFalse(bean.getAlias().equals(otherBean.getAlias())); Assert.assertEquals(bean.getSubBeans(), otherBean.getSubBeans()); Assert.assertEquals(bean.isRegister(), otherBean.isRegister()); }
public ExportMessagesCommand buildFromRequest(MessagesRequest request) { ExportMessagesCommand.Builder builder = ExportMessagesCommand.builder() .timeRange(toAbsolute(request.timeRange())) .queryString(request.queryString()) .streams(request.streams()) .fieldsInOrder(request.fieldsInOrder()) .chunkSize(request.chunkSize()); request.timeZone().ifPresent(builder::timeZone); request.limit().ifPresent(builder::limit); return builder.build(); }
@Test void convertsTimeRangeToAbsolute() { RelativeRange relative = relativeRange(100); MessagesRequest request = MessagesRequest.builder().timeRange(relative).build(); ExportMessagesCommand command = sut.buildFromRequest(request); assertThat(command.timeRange()).isInstanceOf(AbsoluteRange.class); }
@Override public Long getInteger( Object object ) throws KettleValueException { Timestamp timestamp = getTimestamp( object ); if ( timestamp == null ) { return null; } TimeZone defaultTimeZone = TimeZone.getDefault(); TimeZone currentZone = getDateFormatTimeZone(); long milliseconds = timestamp.getTime(); int timezoneDifference = currentZone.getOffset(milliseconds) - defaultTimeZone.getOffset(milliseconds); if ( Const.KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_NANOSECONDS.equalsIgnoreCase( conversionMode ) ) { long seconds = TimeUnit.SECONDS.convert( milliseconds, TimeUnit.MILLISECONDS ); long nanos = timestamp.getNanos(); nanos += TimeUnit.NANOSECONDS.convert( timezoneDifference, TimeUnit.MILLISECONDS ); return seconds * 1000000000L + nanos; } else { return milliseconds + timezoneDifference; } }
@Test public void testConvertTimestampToInteger_Milliseconds() throws KettleValueException { System.setProperty( Const.KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE, Const.KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_MILLISECONDS ); ValueMetaTimestamp valueMetaTimestamp = new ValueMetaTimestamp(); long result = valueMetaTimestamp.getInteger( TIMESTAMP_WITH_NANOSECONDS ); assertEquals( 1567308896123L, result ); }
@Override public Page<PermissionInfo> getPermissions(String role, int pageNo, int pageSize) { AuthPaginationHelper<PermissionInfo> helper = createPaginationHelper(); String sqlCountRows = "SELECT count(*) FROM permissions WHERE "; String sqlFetchRows = "SELECT role,resource,action FROM permissions WHERE "; String where = " role= ? "; List<String> params = new ArrayList<>(); if (StringUtils.isNotBlank(role)) { params = Collections.singletonList(role); } else { where = " 1=1 "; } Page<PermissionInfo> pageInfo = helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, PERMISSION_ROW_MAPPER); if (pageInfo == null) { pageInfo = new Page<>(); pageInfo.setTotalCount(0); pageInfo.setPageItems(new ArrayList<>()); } return pageInfo; }
@Test void testGetPermissions() { String role = "role"; Page<PermissionInfo> permissions = embeddedPermissionPersistService.getPermissions(role, 1, 10); assertNotNull(permissions); }
public static Optional<JksOptions> buildJksKeyStoreOptions( final Map<String, String> props, final Optional<String> alias ) { final String location = getKeyStoreLocation(props); final String keyStorePassword = getKeyStorePassword(props); final String keyPassword = getKeyPassword(props); if (!Strings.isNullOrEmpty(location)) { final JksOptions jksOptions; if (alias.isPresent() && !alias.get().isEmpty()) { jksOptions = buildJksOptions( loadJksKeyStore(location, keyStorePassword, keyPassword, alias.get()), keyStorePassword ); } else { jksOptions = buildJksOptions(location, keyStorePassword); } return Optional.of(jksOptions); } return Optional.empty(); }
@Test public void shouldBuildKeyStoreJksOptionsWithKeyPasswordWhenAliasIsPassed() { // When final Optional<JksOptions> jksOptions = VertxSslOptionsFactory.buildJksKeyStoreOptions( ImmutableMap.of( SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, SERVER_KEY_STORE.keyStoreProps().get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG), SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, SERVER_KEY_STORE.keyStoreProps().get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG), SslConfigs.SSL_KEY_PASSWORD_CONFIG, SERVER_KEY_STORE.keyStoreProps().get(SslConfigs.SSL_KEY_PASSWORD_CONFIG) ), Optional.of("localhost") ); // Then assertThat(jksOptions.get().getValue(), not(nullValue())); assertThat(jksOptions.get().getPassword(), is(SERVER_KEY_STORE.keyStoreProps().get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG))); // When a key password is set, the Vert.x options are built without a keyStore location assertThat(jksOptions.get().getPath(), is(nullValue())); }
public static String unescapeIdentifier(String identifier) { return ESCAPE_CHAR_PATTERN.matcher(identifier).replaceAll(""); }
@Test public void testUnescapeIdentifier() { assertEquals(PdlParseUtils.unescapeIdentifier("`record`"), "record"); assertEquals(PdlParseUtils.unescapeIdentifier("notEscaped"), "notEscaped"); }
public ZKClient(String string) throws IOException { zkClient = new ZooKeeper(string, 30000, new ZKWatcher()); }
@Test public void testzkClient() throws Exception { test("/some/test"); }
protected static PrivateKey toPrivateKey(File keyFile, String keyPassword) throws NoSuchAlgorithmException, NoSuchPaddingException, InvalidKeySpecException, InvalidAlgorithmParameterException, KeyException, IOException { return toPrivateKey(keyFile, keyPassword, true); }
@Test public void testPkcs1UnencryptedDsaEmptyPassword() throws Exception { assertThrows(IOException.class, new Executable() { @Override public void execute() throws Throwable { PrivateKey key = SslContext.toPrivateKey( new File(getClass().getResource("dsa_pkcs1_unencrypted.key").getFile()), ""); } }); }
public Stream<Block> stream() { return streamBuffers() .map(serializer::makeBlock); }
@Test public void streamFirst100kCountTransactions() { File blockFile = new File(getClass().getResource("../core/first-100k-blocks.dat").getFile()); BlockFileLoader loader = new BlockFileLoader(BitcoinNetwork.MAINNET, Collections.singletonList(blockFile)); long transactionCount = loader.stream() .map(Block::getTransactions) .filter(Objects::nonNull) .mapToLong(Collection::size) .sum(); assertEquals(446, transactionCount); }
@Override public long get(long key1, long key2) { return super.get0(key1, key2); }
@Test public void testAuxAllocator() { final HeapMemoryManager mgr2 = new HeapMemoryManager(memMgr); final int initialCapacity = 4; final int factor = 13; hsa = new HashSlotArray16byteKeyImpl(0, memMgr, mgr2.getAllocator(), VALUE_LENGTH, initialCapacity, DEFAULT_LOAD_FACTOR); hsa.gotoNew(); for (int i = 1; i <= 2 * initialCapacity; i++) { insert(i, factor * i); } for (int i = 1; i <= 2 * initialCapacity; i++) { verifyValue(i, factor * i, hsa.get(i, factor * i)); } assertEquals(0, mgr2.getUsedMemory()); }
@Override public HttpResponseOutputStream<FileEntity> write(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { final MultipartOutputStream proxy = new MultipartOutputStream(file, status); return new HttpResponseOutputStream<FileEntity>(new MemorySegementingOutputStream(proxy, partsize), new BrickAttributesFinderFeature(session), status) { @Override public FileEntity getStatus() { return proxy.getResponse(); } }; }
@Test public void testWriteZeroLength() throws Exception { final BrickMultipartWriteFeature feature = new BrickMultipartWriteFeature(session); final Path container = new Path("/", EnumSet.of(Path.Type.directory, Path.Type.volume)); final byte[] content = RandomUtils.nextBytes(0); final TransferStatus status = new TransferStatus(); status.setLength(-1L); final Path file = new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final HttpResponseOutputStream<FileEntity> out = feature.write(file, status, new DisabledConnectionCallback()); final ByteArrayInputStream in = new ByteArrayInputStream(content); assertEquals(content.length, IOUtils.copyLarge(in, out)); in.close(); out.close(); assertTrue(new DefaultFindFeature(session).find(file)); final PathAttributes attributes = new BrickAttributesFinderFeature(session).find(file); assertEquals(content.length, attributes.getSize()); final byte[] compare = new byte[content.length]; final InputStream stream = new BrickReadFeature(session).read(file, new TransferStatus().withLength(content.length), new DisabledConnectionCallback()); IOUtils.readFully(stream, compare); stream.close(); assertArrayEquals(content, compare); new BrickDeleteFeature(session).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public V takeFirst() { return get(takeFirstAsync()); }
@Test public void testTakeFirst() { final RScoredSortedSet<Integer> queue1 = redisson.getScoredSortedSet("queue:pollany"); Executors.newSingleThreadScheduledExecutor().schedule(() -> { RScoredSortedSet<Integer> queue2 = redisson.getScoredSortedSet("queue:pollany1"); RScoredSortedSet<Integer> queue3 = redisson.getScoredSortedSet("queue:pollany2"); queue1.add(0.1, 1); }, 3, TimeUnit.SECONDS); long s = System.currentTimeMillis(); int l = queue1.takeFirst(); Assertions.assertEquals(1, l); Assertions.assertTrue(System.currentTimeMillis() - s > 2000); }
public void begin(InterpretationContext ec, String localName, Attributes attributes) { if ("substitutionProperty".equals(localName)) { addWarn("[substitutionProperty] element has been deprecated. Please use the [property] element instead."); } String name = attributes.getValue(NAME_ATTRIBUTE); String value = attributes.getValue(VALUE_ATTRIBUTE); String scopeStr = attributes.getValue(SCOPE_ATTRIBUTE); Scope scope = ActionUtil.stringToScope(scopeStr); if (checkFileAttributeSanity(attributes)) { String file = attributes.getValue(FILE_ATTRIBUTE); file = ec.subst(file); try { FileInputStream istream = new FileInputStream(file); loadAndSetProperties(ec, istream, scope); } catch (FileNotFoundException e) { addError("Could not find properties file [" + file + "].", e); } catch (IOException e1) { addError("Could not read properties file [" + file + "].", e1); } } else if (checkResourceAttributeSanity(attributes)) { String resource = attributes.getValue(RESOURCE_ATTRIBUTE); resource = ec.subst(resource); URL resourceURL = Loader.getResourceBySelfClassLoader(resource); if (resourceURL == null) { addError("Could not find resource [" + resource + "]."); } else { try { InputStream istream = resourceURL.openStream(); loadAndSetProperties(ec, istream, scope); } catch (IOException e) { addError("Could not read resource file [" + resource + "].", e); } } } else if (checkValueNameAttributesSanity(attributes)) { value = RegularEscapeUtil.basicEscape(value); // now remove both leading and trailing spaces value = value.trim(); value = ec.subst(value); ActionUtil.setProperty(ec, name, value, scope); } else { addError(INVALID_ATTRIBUTES); } }
@Test public void noValue() { atts.setValue("name", "v1"); propertyAction.begin(ec, null, atts); assertEquals(1, context.getStatusManager().getCount()); assertTrue(checkError()); }
Map<String, DocumentField> readFields(String[] externalNames, String dataConnectionName, Map<String, String> options, boolean stream) { String collectionName = externalNames.length == 2 ? externalNames[1] : externalNames[0]; String databaseName = Options.getDatabaseName(nodeEngine, externalNames, dataConnectionName); Map<String, DocumentField> fields = new LinkedHashMap<>(); try (MongoClient client = connect(dataConnectionName, options)) { requireNonNull(client); ResourceChecks resourceChecks = readExistenceChecksFlag(options); if (resourceChecks.isEverPerformed()) { checkDatabaseAndCollectionExists(client, databaseName, collectionName); } MongoDatabase database = client.getDatabase(databaseName); List<Document> collections = database.listCollections() .filter(eq("name", collectionName)) .into(new ArrayList<>()); if (collections.isEmpty()) { ArrayList<String> list = database.listCollectionNames().into(new ArrayList<>()); throw new IllegalArgumentException("collection " + collectionName + " was not found, maybe you mean: " + list); } Document collectionInfo = collections.get(0); Document properties = getIgnoringNulls(collectionInfo, "options", "validator", "$jsonSchema", "properties"); if (properties != null) { for (Entry<String, Object> property : properties.entrySet()) { Document props = (Document) property.getValue(); BsonType bsonType = getBsonType(props); String key = property.getKey(); if (stream) { key = "fullDocument." + key; } fields.put(key, new DocumentField(bsonType, key)); } } else { // fall back to sampling ArrayList<Document> samples = database.getCollection(collectionName).find().limit(1).into(new ArrayList<>()); if (samples.isEmpty()) { throw new IllegalStateException("Cannot infer schema of collection " + collectionName + ", no documents found"); } Document sample = samples.get(0); for (Entry<String, Object> entry : sample.entrySet()) { if (entry.getValue() == null) { continue; } String key = entry.getKey(); if (stream) { key = "fullDocument." + key; } DocumentField field = new DocumentField(resolveTypeFromJava(entry.getValue()), key); fields.put(key, field); } } if (stream) { fields.put("operationType", new DocumentField(BsonType.STRING, "operationType")); fields.put("resumeToken", new DocumentField(BsonType.STRING, "resumeToken")); fields.put("wallTime", new DocumentField(BsonType.DATE_TIME, "wallTime")); fields.put("ts", new DocumentField(BsonType.TIMESTAMP, "ts")); fields.put("clusterTime", new DocumentField(BsonType.TIMESTAMP, "clusterTime")); } } return fields; }
@Test public void testResolvesFieldsViaSchema() { try (MongoClient client = MongoClients.create(mongoContainer.getConnectionString())) { String databaseName = "testDatabase"; String collectionName = "people"; MongoDatabase testDatabase = client.getDatabase(databaseName); CreateCollectionOptions options = new CreateCollectionOptions(); ValidationOptions validationOptions = new ValidationOptions(); validationOptions.validator(BsonDocument.parse( """ { $jsonSchema: { bsonType: "object", title: "Person Object Validation", required: [ "firstName", "lastName", "birthYear" ], properties: {\ "firstName": { "bsonType": "string" } "lastName": { "bsonType": "string" } "birthYear": { "bsonType": "int" } "title": { "enum": [ "Bsc", "Msc", "PhD" ] } "intOrString": { "enum": [ "String", 1 ] } "unionType": { "bsonType": [ 'int', 'string' ] } } } } """ )); options.validationOptions(validationOptions); testDatabase.createCollection(collectionName, options); FieldResolver resolver = new FieldResolver(null); Map<String, String> readOpts = new HashMap<>(); readOpts.put("connectionString", mongoContainer.getConnectionString()); readOpts.put("database", databaseName); Map<String, DocumentField> fields = resolver.readFields(new String[]{databaseName, collectionName}, null, readOpts, false); assertThat(fields) .containsOnlyKeys("firstName", "lastName", "birthYear", "title", "unionType", "intOrString"); assertThat(fields.get("lastName").columnType).isEqualTo(BsonType.STRING); assertThat(fields.get("birthYear").columnType).isEqualTo(BsonType.INT32); assertThat(fields.get("title").columnType).isEqualTo(BsonType.STRING); assertThat(fields.get("intOrString").columnType).isEqualTo(BsonType.DOCUMENT); assertThat(fields.get("unionType").columnType).isEqualTo(BsonType.DOCUMENT); } }
@Override public KvMetadata resolveMetadata( boolean isKey, List<MappingField> resolvedFields, Map<String, String> options, InternalSerializationService serializationService ) { Map<QueryPath, MappingField> externalFieldsByPath = extractFields(resolvedFields, isKey); List<TableField> fields = new ArrayList<>(); for (Entry<QueryPath, MappingField> entry : externalFieldsByPath.entrySet()) { QueryPath path = entry.getKey(); QueryDataType type = entry.getValue().type(); String name = entry.getValue().name(); fields.add(new MapTableField(name, type, false, path)); } maybeAddDefaultField(isKey, resolvedFields, fields, QueryDataType.OBJECT); return new KvMetadata( fields, HazelcastJsonQueryTargetDescriptor.INSTANCE, HazelcastJsonUpsertTargetDescriptor.INSTANCE ); }
@Test @Parameters({ "true, __key", "false, this" }) public void test_resolveMetadata(boolean key, String prefix) { KvMetadata metadata = INSTANCE.resolveMetadata( key, singletonList(field("field", QueryDataType.INT, prefix + ".field")), emptyMap(), null ); assertThat(metadata.getFields()).containsExactly( new MapTableField("field", QueryDataType.INT, false, QueryPath.create(prefix + ".field")), new MapTableField(prefix, QueryDataType.OBJECT, true, QueryPath.create(prefix)) ); assertThat(metadata.getQueryTargetDescriptor()).isEqualTo(HazelcastJsonQueryTargetDescriptor.INSTANCE); assertThat(metadata.getUpsertTargetDescriptor()).isEqualTo(HazelcastJsonUpsertTargetDescriptor.INSTANCE); }