focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static AssertionResult getResult(SMIMEAssertionTestElement testElement, SampleResult response, String name) { checkForBouncycastle(); AssertionResult res = new AssertionResult(name); try { MimeMessage msg; final int msgPos = testElement.getSpecificMessagePositionAsInt(); if (msgPos < 0){ // means counting from end SampleResult[] subResults = response.getSubResults(); final int pos = subResults.length + msgPos; log.debug("Getting message number: {} of {}", pos, subResults.length); msg = getMessageFromResponse(response,pos); } else { log.debug("Getting message number: {}", msgPos); msg = getMessageFromResponse(response, msgPos); } SMIMESignedParser signedParser = null; if(log.isDebugEnabled()) { log.debug("Content-type: {}", msg.getContentType()); } if (msg.isMimeType("multipart/signed")) { // $NON-NLS-1$ MimeMultipart multipart = (MimeMultipart) msg.getContent(); signedParser = new SMIMESignedParser(new BcDigestCalculatorProvider(), multipart); } else if (msg.isMimeType("application/pkcs7-mime") // $NON-NLS-1$ || msg.isMimeType("application/x-pkcs7-mime")) { // $NON-NLS-1$ signedParser = new SMIMESignedParser(new BcDigestCalculatorProvider(), msg); } if (null != signedParser) { log.debug("Found signature"); if (testElement.isNotSigned()) { res.setFailure(true); res.setFailureMessage("Mime message is signed"); } else if (testElement.isVerifySignature() || !testElement.isSignerNoCheck()) { res = verifySignature(testElement, signedParser, name); } } else { log.debug("Did not find signature"); if (!testElement.isNotSigned()) { res.setFailure(true); res.setFailureMessage("Mime message is not signed"); } } } catch (MessagingException e) { String msg = "Cannot parse mime msg: " + e.getMessage(); log.warn(msg, e); res.setFailure(true); res.setFailureMessage(msg); } catch (CMSException e) { res.setFailure(true); res.setFailureMessage("Error reading the signature: " + e.getMessage()); } catch (SMIMEException e) { res.setFailure(true); res.setFailureMessage("Cannot extract signed body part from signature: " + e.getMessage()); } catch (IOException e) { // should never happen log.error("Cannot read mime message content: {}", e.getMessage(), e); res.setError(true); res.setFailureMessage(e.getMessage()); } return res; }
@Test public void testSignerSignerDN() { SMIMEAssertionTestElement testElement = new SMIMEAssertionTestElement(); testElement.setSignerCheckConstraints(true); String signerDn = "C=AU, L=Wherever, O=Example Ltd, E=alice@a.example.com, CN=alice example"; testElement .setSignerDn(signerDn); AssertionResult result = SMIMEAssertion.getResult(testElement, parent, "Test"); assertFalse(result.isError(), "Result should not be an error"); assertFalse(result.isFailure(), "Result should not fail: " + result.getFailureMessage()); }
@PostMapping("") public ShenyuAdminResult createTag(@Valid @RequestBody final TagDTO tagDTO) { Integer createCount = tagService.create(tagDTO); return ShenyuAdminResult.success(ShenyuResultMessage.CREATE_SUCCESS, createCount); }
@Test public void testCreateTag() throws Exception { TagDTO tagDTO = buildTagDTO(); given(tagService.create(tagDTO)).willReturn(1); this.mockMvc.perform(MockMvcRequestBuilders.post("/tag") .contentType(MediaType.APPLICATION_JSON) .content(GsonUtils.getInstance().toJson(tagDTO))) .andExpect(status().isOk()) .andExpect(jsonPath("$.message", is(ShenyuResultMessage.CREATE_SUCCESS))) .andReturn(); }
@Override public ClientDetailsEntity saveNewClient(ClientDetailsEntity client) { if (client.getId() != null) { // if it's not null, it's already been saved, this is an error throw new IllegalArgumentException("Tried to save a new client with an existing ID: " + client.getId()); } if (client.getRegisteredRedirectUri() != null) { for (String uri : client.getRegisteredRedirectUri()) { if (blacklistedSiteService.isBlacklisted(uri)) { throw new IllegalArgumentException("Client URI is blacklisted: " + uri); } } } // assign a random clientid if it's empty // NOTE: don't assign a random client secret without asking, since public clients have no secret if (Strings.isNullOrEmpty(client.getClientId())) { client = generateClientId(client); } // make sure that clients with the "refresh_token" grant type have the "offline_access" scope, and vice versa ensureRefreshTokenConsistency(client); // make sure we don't have both a JWKS and a JWKS URI ensureKeyConsistency(client); // check consistency when using HEART mode checkHeartMode(client); // timestamp this to right now client.setCreatedAt(new Date()); // check the sector URI checkSectorIdentifierUri(client); ensureNoReservedScopes(client); ClientDetailsEntity c = clientRepository.saveClient(client); statsService.resetCache(); return c; }
@Test(expected = IllegalArgumentException.class) public void heartMode_noJwks() { Mockito.when(config.isHeartMode()).thenReturn(true); ClientDetailsEntity client = new ClientDetailsEntity(); Set<String> grantTypes = new LinkedHashSet<>(); grantTypes.add("authorization_code"); client.setGrantTypes(grantTypes); client.setTokenEndpointAuthMethod(AuthMethod.PRIVATE_KEY); client.setRedirectUris(Sets.newHashSet("https://foo.bar/")); client.setJwks(null); client.setJwksUri(null); service.saveNewClient(client); }
@Override public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) { throw new ReadOnlyBufferException(); }
@Test public void shouldRejectSetBytes4() { assertThrows(UnsupportedOperationException.class, new Executable() { @Override public void execute() { unmodifiableBuffer(EMPTY_BUFFER).setBytes(0, (ByteBuf) null, 0, 0); } }); }
@Override public void applySchemaChange(SchemaChangeEvent schemaChangeEvent) throws SchemaEvolveException { if (!isOpened) { isOpened = true; catalog.open(); } SchemaChangeEventVisitor.visit( schemaChangeEvent, addColumnEvent -> { applyAddColumn(addColumnEvent); return null; }, alterColumnTypeEvent -> { applyAlterColumnType(alterColumnTypeEvent); return null; }, createTableEvent -> { applyCreateTable(createTableEvent); return null; }, dropColumnEvent -> { applyDropColumn(dropColumnEvent); return null; }, dropTableEvent -> { throw new UnsupportedSchemaChangeEventException(dropTableEvent); }, renameColumnEvent -> { applyRenameColumn(renameColumnEvent); return null; }, truncateTableEvent -> { throw new UnsupportedSchemaChangeEventException(truncateTableEvent); }); }
@Test public void testAddColumn() throws Exception { TableId tableId = TableId.parse("test.tbl2"); Schema schema = Schema.newBuilder() .physicalColumn("col1", new IntType()) .primaryKey("col1") .build(); CreateTableEvent createTableEvent = new CreateTableEvent(tableId, schema); metadataApplier.applySchemaChange(createTableEvent); AddColumnEvent addColumnEvent = new AddColumnEvent( tableId, Arrays.asList( new AddColumnEvent.ColumnWithPosition( Column.physicalColumn("col2", new DecimalType(20, 5))), new AddColumnEvent.ColumnWithPosition( Column.physicalColumn("col3", new SmallIntType())))); metadataApplier.applySchemaChange(addColumnEvent); StarRocksTable actualTable = catalog.getTable(tableId.getSchemaName(), tableId.getTableName()).orElse(null); assertNotNull(actualTable); List<StarRocksColumn> columns = new ArrayList<>(); columns.add( new StarRocksColumn.Builder() .setColumnName("col1") .setOrdinalPosition(0) .setDataType("int") .setNullable(true) .build()); columns.add( new StarRocksColumn.Builder() .setColumnName("col2") .setOrdinalPosition(1) .setDataType("decimal") .setColumnSize(20) .setDecimalDigits(5) .setNullable(true) .build()); columns.add( new StarRocksColumn.Builder() .setColumnName("col3") .setOrdinalPosition(2) .setDataType("smallint") .setNullable(true) .build()); StarRocksTable expectTable = new StarRocksTable.Builder() .setDatabaseName(tableId.getSchemaName()) .setTableName(tableId.getTableName()) .setTableType(StarRocksTable.TableType.PRIMARY_KEY) .setColumns(columns) .setTableKeys(schema.primaryKeys()) .setDistributionKeys(schema.primaryKeys()) .setNumBuckets(10) .setTableProperties(Collections.singletonMap("replication_num", "5")) .build(); assertEquals(expectTable, actualTable); }
@Override public void releaseAllResources() throws IOException { synchronized (lock) { for (ResultSubpartitionView view : allViews.values()) { view.releaseAllResources(); } allViews.clear(); for (ResultSubpartitionView view : unregisteredAvailableViews) { view.releaseAllResources(); } unregisteredAvailableViews.clear(); for (Tuple2<ResultSubpartition.BufferAndBacklog, Integer> tuple2 : cachedBuffers) { tuple2.f0.buffer().recycleBuffer(); } cachedBuffers.clear(); isReleased = true; } }
@Test void testReleaseAllResources() throws IOException { assertThat(view.isReleased()).isFalse(); assertThat(view0.isReleased()).isFalse(); assertThat(view1.isReleased()).isFalse(); assertThat(buffers0).allMatch(x -> !x.isRecycled()); assertThat(buffers1).allMatch(x -> !x.isRecycled()); // Verifies that cached buffers are also recycled. view0.notifyDataAvailable(); view.releaseAllResources(); assertThat(view.isReleased()).isTrue(); assertThat(view0.isReleased()).isTrue(); assertThat(view1.isReleased()).isTrue(); assertThat(buffers0).allMatch(Buffer::isRecycled); assertThat(buffers1).allMatch(Buffer::isRecycled); }
public static SegmentAssignmentStrategy getSegmentAssignmentStrategy(HelixManager helixManager, TableConfig tableConfig, String assignmentType, InstancePartitions instancePartitions) { String assignmentStrategy = null; TableType currentTableType = tableConfig.getTableType(); // TODO: Handle segment assignment strategy in future for CONSUMING segments in follow up PR // See https://github.com/apache/pinot/issues/9047 // Accommodate new changes for assignment strategy Map<String, SegmentAssignmentConfig> segmentAssignmentConfigMap = tableConfig.getSegmentAssignmentConfigMap(); if (tableConfig.isDimTable()) { // Segment Assignment Strategy for DIM tables Preconditions.checkState(currentTableType == TableType.OFFLINE, "All Servers Segment assignment Strategy is only applicable to Dim OfflineTables"); SegmentAssignmentStrategy segmentAssignmentStrategy = new AllServersSegmentAssignmentStrategy(); segmentAssignmentStrategy.init(helixManager, tableConfig); return segmentAssignmentStrategy; } else { // Try to determine segment assignment strategy from table config if (segmentAssignmentConfigMap != null) { SegmentAssignmentConfig segmentAssignmentConfig; // Use the pre defined segment assignment strategy segmentAssignmentConfig = segmentAssignmentConfigMap.get(assignmentType.toUpperCase()); // Segment assignment config is only applicable to offline tables and completed segments of real time tables if (segmentAssignmentConfig != null) { assignmentStrategy = segmentAssignmentConfig.getAssignmentStrategy().toLowerCase(); } } } // Use the existing information to determine segment assignment strategy SegmentAssignmentStrategy segmentAssignmentStrategy; if (assignmentStrategy == null) { // Calculate numReplicaGroups and numPartitions to determine segment assignment strategy Preconditions .checkState(instancePartitions != null, "Failed to find instance partitions for segment assignment strategy"); int numReplicaGroups = instancePartitions.getNumReplicaGroups(); int numPartitions = instancePartitions.getNumPartitions(); if (numReplicaGroups == 1 && numPartitions == 1) { segmentAssignmentStrategy = new BalancedNumSegmentAssignmentStrategy(); } else { segmentAssignmentStrategy = new ReplicaGroupSegmentAssignmentStrategy(); } } else { // Set segment assignment strategy depending on strategy set in table config switch (assignmentStrategy) { case AssignmentStrategy.REPLICA_GROUP_SEGMENT_ASSIGNMENT_STRATEGY: segmentAssignmentStrategy = new ReplicaGroupSegmentAssignmentStrategy(); break; case AssignmentStrategy.BALANCE_NUM_SEGMENT_ASSIGNMENT_STRATEGY: default: segmentAssignmentStrategy = new BalancedNumSegmentAssignmentStrategy(); break; } } segmentAssignmentStrategy.init(helixManager, tableConfig); return segmentAssignmentStrategy; }
@Test public void testBalancedNumSegmentAssignmentStrategyForRealtimeTables() { Map<String, String> streamConfigs = FakeStreamConfigUtils.getDefaultLowLevelStreamConfigs().getStreamConfigsMap(); TableConfig tableConfig = new TableConfigBuilder(TableType.REALTIME).setTableName(RAW_TABLE_NAME).setStreamConfigs(streamConfigs).build(); InstancePartitions instancePartitions = new InstancePartitions(INSTANCE_PARTITIONS_NAME); instancePartitions.setInstances(0, 0, INSTANCES); SegmentAssignmentStrategy segmentAssignmentStrategy = SegmentAssignmentStrategyFactory.getSegmentAssignmentStrategy(null, tableConfig, InstancePartitionsType.COMPLETED.toString(), instancePartitions); Assert.assertNotNull(segmentAssignmentStrategy); Assert.assertTrue(segmentAssignmentStrategy instanceof BalancedNumSegmentAssignmentStrategy); }
public static NamingSelector newIpSelector(String regex) { if (regex == null) { throw new IllegalArgumentException("The parameter 'regex' cannot be null."); } return new DefaultNamingSelector(instance -> Pattern.matches(regex, instance.getIp())); }
@Test public void testNewIpSelector() { Instance ins1 = new Instance(); ins1.setIp("172.18.137.120"); Instance ins2 = new Instance(); ins2.setIp("172.18.137.121"); Instance ins3 = new Instance(); ins3.setIp("172.18.136.111"); NamingContext namingContext = mock(NamingContext.class); when(namingContext.getInstances()).thenReturn(Arrays.asList(ins1, ins2, ins3)); NamingSelector ipSelector = NamingSelectorFactory.newIpSelector("^172\\.18\\.137.*"); NamingResult result = ipSelector.select(namingContext); List<Instance> list = result.getResult(); assertEquals(2, list.size()); assertEquals(ins1.getIp(), list.get(0).getIp()); assertEquals(ins2.getIp(), list.get(1).getIp()); }
public void updateTaskConfig(Map<String, String> taskConfig) { try { taskLifecycleLock.lock(); if (!Objects.equals(this.taskConfigReference, taskConfig)) { logger.info("Updating task '" + name + "' configuration"); taskConfigReference = taskConfig; reconfigurationNeeded = true; } } finally { taskLifecycleLock.unlock(); } }
@Test public void should_not_poll_data_without_task_config() { taskRunner.updateTaskConfig(null); assertPolledRecordsSize(0); }
@SuppressWarnings("IntLongMath") public long getExpirationDelay() { for (int i = 0; i < SHIFT.length; i++) { Node<K, V>[] timerWheel = wheel[i]; long ticks = (nanos >>> SHIFT[i]); long spanMask = SPANS[i] - 1; int start = (int) (ticks & spanMask); int end = start + timerWheel.length; int mask = timerWheel.length - 1; for (int j = start; j < end; j++) { Node<K, V> sentinel = timerWheel[(j & mask)]; Node<K, V> next = sentinel.getNextInVariableOrder(); if (next == sentinel) { continue; } long buckets = (j - start); long delay = (buckets << SHIFT[i]) - (nanos & spanMask); delay = (delay > 0) ? delay : SPANS[i]; for (int k = i + 1; k < SHIFT.length; k++) { long nextDelay = peekAhead(k); delay = Math.min(delay, nextDelay); } return delay; } } return Long.MAX_VALUE; }
@Test(dataProvider = "clock") public void getExpirationDelay_empty(long clock) { when(cache.evictEntry(any(), any(), anyLong())).thenReturn(true); timerWheel.nanos = clock; assertThat(timerWheel.getExpirationDelay()).isEqualTo(Long.MAX_VALUE); }
boolean isPublicAndStatic() { int modifiers = mainMethod.getModifiers(); return isPublic(modifiers) && isStatic(modifiers); }
@Test public void testPublicAndStaticForSelf() throws NoSuchMethodException { Method method = MainMethodFinderTest.class.getDeclaredMethod("testPublicAndStaticForSelf"); MainMethodFinder mainMethodFinder = new MainMethodFinder(); mainMethodFinder.mainMethod = method; boolean publicAndStatic = mainMethodFinder.isPublicAndStatic(); assertFalse(publicAndStatic); }
public void translate(Pipeline pipeline) { this.flinkBatchEnv = null; this.flinkStreamEnv = null; final boolean hasUnboundedOutput = PipelineTranslationModeOptimizer.hasUnboundedOutput(pipeline); if (hasUnboundedOutput) { LOG.info("Found unbounded PCollection. Switching to streaming execution."); options.setStreaming(true); } // Staged files need to be set before initializing the execution environments prepareFilesToStageForRemoteClusterExecution(options); FlinkPipelineTranslator translator; if (options.isStreaming() || options.getUseDataStreamForBatch()) { this.flinkStreamEnv = FlinkExecutionEnvironments.createStreamExecutionEnvironment(options); if (hasUnboundedOutput && !flinkStreamEnv.getCheckpointConfig().isCheckpointingEnabled()) { LOG.warn( "UnboundedSources present which rely on checkpointing, but checkpointing is disabled."); } translator = new FlinkStreamingPipelineTranslator(flinkStreamEnv, options, options.isStreaming()); if (!options.isStreaming()) { flinkStreamEnv.setRuntimeMode(RuntimeExecutionMode.BATCH); } } else { this.flinkBatchEnv = FlinkExecutionEnvironments.createBatchExecutionEnvironment(options); translator = new FlinkBatchPipelineTranslator(flinkBatchEnv, options); } // Transform replacements need to receive the finalized PipelineOptions // including execution mode (batch/streaming) and parallelism. pipeline.replaceAll(FlinkTransformOverrides.getDefaultOverrides(options)); translator.translate(pipeline); }
@Test public void shouldProvideParallelismToTransformOverrides() { FlinkPipelineOptions options = getDefaultPipelineOptions(); options.setStreaming(true); options.setRunner(FlinkRunner.class); FlinkPipelineExecutionEnvironment flinkEnv = new FlinkPipelineExecutionEnvironment(options); Pipeline p = Pipeline.create(options); // Create a transform applicable for PTransformMatchers.writeWithRunnerDeterminedSharding() // which requires parallelism p.apply(Create.of("test")).apply(TextIO.write().to("/tmp")); p = Mockito.spy(p); // If this succeeds we're ok flinkEnv.translate(p); // Verify we were using desired replacement transform ArgumentCaptor<ImmutableList> captor = ArgumentCaptor.forClass(ImmutableList.class); Mockito.verify(p).replaceAll(captor.capture()); ImmutableList<PTransformOverride> overridesList = captor.getValue(); assertThat( overridesList, hasItem( new BaseMatcher<PTransformOverride>() { @Override public void describeTo(Description description) {} @Override public boolean matches(Object actual) { if (actual instanceof PTransformOverride) { PTransformOverrideFactory overrideFactory = ((PTransformOverride) actual).getOverrideFactory(); if (overrideFactory instanceof FlinkStreamingPipelineTranslator.StreamingShardedWriteFactory) { FlinkStreamingPipelineTranslator.StreamingShardedWriteFactory factory = (FlinkStreamingPipelineTranslator.StreamingShardedWriteFactory) overrideFactory; return factory.options.getParallelism() > 0; } } return false; } })); }
public WorkflowDefinition addWorkflowDefinition( WorkflowDefinition workflowDef, Properties changes) { LOG.info("Adding a new workflow definition with an id [{}]", workflowDef.getWorkflow().getId()); final Workflow workflow = workflowDef.getWorkflow(); final Metadata metadata = workflowDef.getMetadata(); return withMetricLogError( () -> withRetryableTransaction( conn -> { WorkflowInfo workflowInfo = getWorkflowInfoForUpdate(conn, workflow.getId()); final long nextVersionId = workflowInfo.getLatestVersionId() + 1; // update the metadata with version info and then metadata is complete. metadata.setWorkflowVersionId(nextVersionId); TriggerUuids triggerUuids = insertMaestroWorkflowVersion(conn, metadata, workflow); PropertiesSnapshot snapshot = updateWorkflowProps( conn, workflow.getId(), metadata.getVersionAuthor(), metadata.getCreateTime(), workflowInfo.getPrevPropertiesSnapshot(), changes, new PropertiesUpdate(Type.ADD_WORKFLOW_DEFINITION)); // add new snapshot to workflowDef if (snapshot != null) { workflowDef.setPropertiesSnapshot(snapshot); } else { workflowDef.setPropertiesSnapshot(workflowInfo.getPrevPropertiesSnapshot()); } final long[] upsertRes = upsertMaestroWorkflow(conn, workflowDef); Checks.notNull( upsertRes, "the upsert result should not be null for workflow [%s]", workflow.getId()); workflowDef.setIsLatest(true); // a new version will always be latest // add default flag and modified_time and then workflowDef is complete workflowDef.setIsDefault( workflowInfo.getPrevActiveVersionId() == Constants.INACTIVE_VERSION_ID || workflowDef.getIsActive()); workflowDef.setModifyTime(upsertRes[0]); workflowDef.setInternalId(upsertRes[1]); if (workflowDef.getIsActive()) { workflowInfo.setNextActiveWorkflow( MaestroWorkflowVersion.builder() .definition(workflow) .triggerUuids(triggerUuids) .metadata(metadata) .build(), workflowDef.getPropertiesSnapshot()); } else if (workflowInfo.getPrevActiveVersionId() != Constants.INACTIVE_VERSION_ID) { // getting an inactive new version but having an active old version updateWorkflowInfoForNextActiveWorkflow( conn, workflow.getId(), workflowInfo.getPrevActiveVersionId(), workflowInfo, workflowDef.getPropertiesSnapshot()); } if (workflowInfo.withWorkflow()) { addWorkflowTriggersIfNeeded(conn, workflowInfo); } MaestroJobEvent jobEvent = logToTimeline( conn, workflowDef, snapshot, workflowInfo.getPrevActiveVersionId()); publisher.publishOrThrow( jobEvent, "Failed to publish maestro definition change job event."); return workflowDef; }), "addWorkflowDefinition", "Failed creating a new workflow definition {}", workflow.getId()); }
@Test public void testAddWorkflowDefinitionWithOutputSignals() throws Exception { WorkflowDefinition wfd = loadWorkflow(TEST_WORKFLOW_ID7); WorkflowDefinition definition = workflowDao.addWorkflowDefinition(wfd, wfd.getPropertiesSnapshot().extractProperties()); assertEquals(wfd, definition); assertNotNull(wfd.getInternalId()); List<MapParamDefinition> step1Signals = definition .getWorkflow() .getSteps() .get(0) .getOutputs() .get(StepOutputsDefinition.StepOutputType.SIGNAL) .asSignalOutputsDefinition() .getDefinitions(); assertEquals(2, step1Signals.size()); assertEquals("dummy/test/signal1", step1Signals.get(0).getValue().get("name").getValue()); assertEquals( 1, step1Signals.get(0).getValue().get("p1").asLongParamDef().getValue().longValue()); assertEquals("aaa", step1Signals.get(1).getValue().get("name").getExpression()); assertEquals("auu+1", step1Signals.get(1).getValue().get("p2").getExpression()); verify(publisher, times(1)).publishOrThrow(any(), any()); }
public String getContextPath() { return contextPath; }
@Test public void sanitize_context_path_from_settings() { settings.setProperty(CONTEXT_PROPERTY, "/my_path///"); assertThat(underTest().getContextPath()).isEqualTo("/my_path"); }
public void runExtractor(Message msg) { try(final Timer.Context ignored = completeTimer.time()) { final String field; try (final Timer.Context ignored2 = conditionTimer.time()) { // We can only work on Strings. if (!(msg.getField(sourceField) instanceof String)) { conditionMissesCounter.inc(); return; } field = (String) msg.getField(sourceField); // Decide if to extract at all. if (conditionType.equals(ConditionType.STRING)) { if (field.contains(conditionValue)) { conditionHitsCounter.inc(); } else { conditionMissesCounter.inc(); return; } } else if (conditionType.equals(ConditionType.REGEX)) { if (regexConditionPattern.matcher(field).find()) { conditionHitsCounter.inc(); } else { conditionMissesCounter.inc(); return; } } } try (final Timer.Context ignored2 = executionTimer.time()) { Result[] results; try { results = run(field); } catch (ExtractorException e) { final String error = "Could not apply extractor <" + getTitle() + " (" + getId() + ")>"; msg.addProcessingError(new Message.ProcessingError( ProcessingFailureCause.ExtractorException, error, ExceptionUtils.getRootCauseMessage(e))); return; } if (results == null || results.length == 0 || Arrays.stream(results).anyMatch(result -> result.getValue() == null)) { return; } else if (results.length == 1 && results[0].target == null) { // results[0].target is null if this extractor cannot produce multiple fields use targetField in that case msg.addField(targetField, results[0].getValue()); } else { for (final Result result : results) { msg.addField(result.getTarget(), result.getValue()); } } // Remove original from message? if (cursorStrategy.equals(CursorStrategy.CUT) && !targetField.equals(sourceField) && !Message.RESERVED_FIELDS.contains(sourceField) && results[0].beginIndex != -1) { final StringBuilder sb = new StringBuilder(field); final List<Result> reverseList = Arrays.stream(results) .sorted(Comparator.<Result>comparingInt(result -> result.endIndex).reversed()) .collect(Collectors.toList()); // remove all from reverse so that the indices still match for (final Result result : reverseList) { sb.delete(result.getBeginIndex(), result.getEndIndex()); } final String builtString = sb.toString(); final String finalResult = builtString.trim().isEmpty() ? "fullyCutByExtractor" : builtString; msg.removeField(sourceField); // TODO don't add an empty field back, or rather don't add fullyCutByExtractor msg.addField(sourceField, finalResult); } runConverters(msg); } } }
@Test public void testCursorStrategyCopyWithMultipleResults() throws Exception { final TestExtractor extractor = new TestExtractor.Builder() .cursorStrategy(COPY) .sourceField("msg") .callback(new Callable<Result[]>() { @Override public Result[] call() throws Exception { return new Result[]{ new Result("the", "one", 0, 3), new Result("hello", "two", 10, 15), }; } }) .build(); final Message msg = createMessage("message"); msg.addField("msg", "the great hello"); extractor.runExtractor(msg); // With the copy strategy, the source field will not be modified. assertThat(msg.getField("msg")).isEqualTo("the great hello"); }
@Override public void failover(NamedNode master) { connection.sync(RedisCommands.SENTINEL_FAILOVER, master.getName()); }
@Test public void testFailover() throws InterruptedException { Collection<RedisServer> masters = connection.masters(); connection.failover(masters.iterator().next()); Thread.sleep(10000); RedisServer newMaster = connection.masters().iterator().next(); assertThat(masters.iterator().next().getPort()).isNotEqualTo(newMaster.getPort()); }
public static Ip6Prefix valueOf(byte[] address, int prefixLength) { return new Ip6Prefix(Ip6Address.valueOf(address), prefixLength); }
@Test public void testValueOfByteArrayIPv6() { Ip6Prefix ipPrefix; byte[] value; value = new byte[] {0x11, 0x11, 0x22, 0x22, 0x33, 0x33, 0x44, 0x44, 0x55, 0x55, 0x66, 0x66, 0x77, 0x77, (byte) 0x88, (byte) 0x88}; ipPrefix = Ip6Prefix.valueOf(value, 120); assertThat(ipPrefix.toString(), is("1111:2222:3333:4444:5555:6666:7777:8800/120")); ipPrefix = Ip6Prefix.valueOf(value, 128); assertThat(ipPrefix.toString(), is("1111:2222:3333:4444:5555:6666:7777:8888/128")); value = new byte[] {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}; ipPrefix = Ip6Prefix.valueOf(value, 0); assertThat(ipPrefix.toString(), is("::/0")); ipPrefix = Ip6Prefix.valueOf(value, 128); assertThat(ipPrefix.toString(), is("::/128")); value = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff}; ipPrefix = Ip6Prefix.valueOf(value, 0); assertThat(ipPrefix.toString(), is("::/0")); ipPrefix = Ip6Prefix.valueOf(value, 64); assertThat(ipPrefix.toString(), is("ffff:ffff:ffff:ffff::/64")); ipPrefix = Ip6Prefix.valueOf(value, 128); assertThat(ipPrefix.toString(), is("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128")); }
public DirectGraph getGraph() { checkState(finalized, "Can't get a graph before the Pipeline has been completely traversed"); return DirectGraph.create( producers, viewWriters, perElementConsumers, rootTransforms, stepNames); }
@Test public void getViewsReturnsViews() { PCollectionView<List<String>> listView = p.apply("listCreate", Create.of("foo", "bar")) .apply( ParDo.of( new DoFn<String, String>() { @ProcessElement public void processElement(DoFn<String, String>.ProcessContext c) throws Exception { c.output(Integer.toString(c.element().length())); } })) .apply(View.asList()); PCollectionView<Object> singletonView = p.apply("singletonCreate", Create.<Object>of(1, 2, 3)).apply(View.asSingleton()); // Views are not materialized unless they are consumed p.apply(Create.of(1, 2, 3)) .apply( ParDo.of( new DoFn<Integer, Void>() { @ProcessElement public void process() {} }) .withSideInputs(listView, singletonView)); DirectRunner.fromOptions(TestPipeline.testingPipelineOptions()).performRewrites(p); p.traverseTopologically(visitor); assertThat(visitor.getGraph().getViews(), Matchers.containsInAnyOrder(listView, singletonView)); }
public Future<KafkaVersionChange> reconcile() { return getVersionFromController() .compose(i -> getPods()) .compose(this::detectToAndFromVersions) .compose(i -> prepareVersionChange()); }
@Test public void testUpgradeWithMixedPodsAndNewSpsWhenUpgradingKafka(VertxTestContext context) { String oldKafkaVersion = KafkaVersionTestUtils.PREVIOUS_KAFKA_VERSION; String oldInterBrokerProtocolVersion = KafkaVersionTestUtils.PREVIOUS_PROTOCOL_VERSION; String oldLogMessageFormatVersion = KafkaVersionTestUtils.PREVIOUS_FORMAT_VERSION; String kafkaVersion = VERSIONS.defaultVersion().version(); String interBrokerProtocolVersion = VERSIONS.defaultVersion().protocolVersion(); String logMessageFormatVersion = VERSIONS.defaultVersion().messageVersion(); VersionChangeCreator vcc = mockVersionChangeCreator( mockKafka(kafkaVersion, interBrokerProtocolVersion, logMessageFormatVersion), mockNewCluster( null, mockSps(kafkaVersion), mockMixedPods(oldKafkaVersion, oldInterBrokerProtocolVersion, oldLogMessageFormatVersion, kafkaVersion, oldInterBrokerProtocolVersion, oldLogMessageFormatVersion) ) ); Checkpoint async = context.checkpoint(); vcc.reconcile().onComplete(context.succeeding(c -> context.verify(() -> { assertThat(c.from(), is(VERSIONS.version(oldKafkaVersion))); assertThat(c.to(), is(VERSIONS.defaultVersion())); assertThat(c.interBrokerProtocolVersion(), is(oldInterBrokerProtocolVersion)); assertThat(c.logMessageFormatVersion(), is(oldLogMessageFormatVersion)); async.flag(); }))); }
@Override public String getTaskType() { return "killallchildprocess"; }
@Test public void shouldKnowItsType() { assertThat(new KillAllChildProcessTask().getTaskType(), is("killallchildprocess")); }
@Override public Batch toBatch() { return new SparkBatch( sparkContext, table, readConf, groupingKeyType(), taskGroups(), expectedSchema, hashCode()); }
@TestTemplate public void testPartitionedBucketLong() throws Exception { createPartitionedTable(spark, tableName, "bucket(5, id)"); SparkScanBuilder builder = scanBuilder(); BucketFunction.BucketLong function = new BucketFunction.BucketLong(DataTypes.LongType); UserDefinedScalarFunc udf = toUDF(function, expressions(intLit(5), fieldRef("id"))); Predicate predicate = new Predicate(">=", expressions(udf, intLit(2))); pushFilters(builder, predicate); Batch scan = builder.build().toBatch(); assertThat(scan.planInputPartitions().length).isEqualTo(6); // NOT GTEQ builder = scanBuilder(); predicate = new Not(predicate); pushFilters(builder, predicate); scan = builder.build().toBatch(); assertThat(scan.planInputPartitions().length).isEqualTo(4); }
static ConfigNode propsToNode(Map<String, String> properties) { String rootNode = findRootNode(properties); ConfigNode root = new ConfigNode(rootNode); for (Map.Entry<String, String> e : properties.entrySet()) { parseEntry(e.getKey().replaceFirst(rootNode + ".", ""), e.getValue(), root); } return root; }
@Test public void shouldParse() { Map<String, String> m = new HashMap<>(); m.put("foo.bar1", "1"); m.put("foo.bar2", "2"); m.put("foo.bar3.bar4", "4"); ConfigNode configNode = PropertiesToNodeConverter.propsToNode(m); assertNull(configNode.getValue()); assertEquals("foo", configNode.getName()); assertEquals(3, configNode.getChildren().size()); assertEquals("1", configNode.getChildren().get("bar1").getValue()); assertEquals("2", configNode.getChildren().get("bar2").getValue()); assertEquals("4", configNode.getChildren().get("bar3").getChildren().get("bar4").getValue()); }
public Optional<YamlRuleConfiguration> swapToYamlRuleConfiguration(final Collection<RepositoryTuple> repositoryTuples, final Class<? extends YamlRuleConfiguration> toBeSwappedType) { RepositoryTupleEntity tupleEntity = toBeSwappedType.getAnnotation(RepositoryTupleEntity.class); if (null == tupleEntity) { return Optional.empty(); } return tupleEntity.leaf() ? swapToYamlRuleConfiguration(repositoryTuples, toBeSwappedType, tupleEntity) : swapToYamlRuleConfiguration(repositoryTuples, toBeSwappedType, getFields(toBeSwappedType)); }
@Test void assertSwapToYamlRuleConfigurationWithoutRepositoryTupleEntityAnnotation() { assertFalse(new RepositoryTupleSwapperEngine().swapToYamlRuleConfiguration(Collections.emptyList(), NoneYamlRuleConfiguration.class).isPresent()); }
@Override public ICardinality merge(ICardinality... estimators) throws CardinalityMergeException { HyperLogLog merged = new HyperLogLog(log2m, new RegisterSet(this.registerSet.count)); merged.addAll(this); if (estimators == null) { return merged; } for (ICardinality estimator : estimators) { if (!(estimator instanceof HyperLogLog)) { throw new HyperLogLogMergeException("Cannot merge estimators of different class"); } HyperLogLog hll = (HyperLogLog) estimator; merged.addAll(hll); } return merged; }
@Test public void testMerge() throws CardinalityMergeException { int numToMerge = 5; int bits = 16; int cardinality = 1000000; HyperLogLog[] hyperLogLogs = new HyperLogLog[numToMerge]; HyperLogLog baseline = new HyperLogLog(bits); for (int i = 0; i < numToMerge; i++) { hyperLogLogs[i] = new HyperLogLog(bits); for (int j = 0; j < cardinality; j++) { double val = Math.random(); hyperLogLogs[i].offer(val); baseline.offer(val); } } long expectedCardinality = numToMerge * cardinality; HyperLogLog hll = hyperLogLogs[0]; hyperLogLogs = Arrays.asList(hyperLogLogs).subList(1, hyperLogLogs.length).toArray(new HyperLogLog[0]); long mergedEstimate = hll.merge(hyperLogLogs).cardinality(); long baselineEstimate = baseline.cardinality(); double se = expectedCardinality * (1.04 / Math.sqrt(Math.pow(2, bits))); System.out.println("Baseline estimate: " + baselineEstimate); System.out.println("Expect estimate: " + mergedEstimate + " is between " + (expectedCardinality - (3 * se)) + " and " + (expectedCardinality + (3 * se))); assertTrue(mergedEstimate >= expectedCardinality - (3 * se)); assertTrue(mergedEstimate <= expectedCardinality + (3 * se)); assertEquals(mergedEstimate, baselineEstimate); }
public static PostgreSQLCommandPacket newInstance(final CommandPacketType commandPacketType, final PostgreSQLPacketPayload payload) { if (!OpenGaussCommandPacketType.isExtendedProtocolPacketType(commandPacketType)) { payload.getByteBuf().skipBytes(1); return getCommandPacket(commandPacketType, payload); } List<PostgreSQLCommandPacket> result = new ArrayList<>(); while (payload.hasCompletePacket()) { CommandPacketType type = OpenGaussCommandPacketType.valueOf(payload.readInt1()); int length = payload.getByteBuf().getInt(payload.getByteBuf().readerIndex()); PostgreSQLPacketPayload slicedPayload = new PostgreSQLPacketPayload(payload.getByteBuf().readSlice(length), payload.getCharset()); result.add(getCommandPacket(type, slicedPayload)); } return new PostgreSQLAggregatedCommandPacket(result); }
@Test void assertNewPostgreSQLPacket() { assertThat(OpenGaussCommandPacketFactory.newInstance(mock(PostgreSQLCommandPacketType.class), payload), instanceOf(PostgreSQLCommandPacket.class)); }
@Override public boolean isImmutableType() { return false; }
@Test void arrayTypeIsMutable() { StringArraySerializer serializer = (StringArraySerializer) createSerializer(); assertThat(serializer.isImmutableType()).isFalse(); }
private List<Object> getTargetInstancesByRules(String targetName, List<Object> instances, String path, Map<String, List<String>> header) { RouterConfiguration configuration = ConfigCache.getLabel(RouterConstant.SPRING_CACHE_NAME); if (RouterConfiguration.isInValid(configuration, RouterConstant.FLOW_MATCH_KIND)) { return instances; } List<Rule> rules = FlowRuleUtils.getFlowRules(configuration, targetName, path, AppCache.INSTANCE.getAppName()); if (CollectionUtils.isEmpty(rules)) { return instances; } Optional<Rule> ruleOptional = getRule(rules, header); if (ruleOptional.isPresent()) { return RuleStrategyHandler.INSTANCE.getFlowMatchInstances(targetName, instances, ruleOptional.get()); } return RuleStrategyHandler.INSTANCE .getMismatchInstances(targetName, instances, RuleUtils.getTags(rules), true); }
@Test public void testGetTargetInstancesByRules() { RuleInitializationUtils.initFlowMatchRule(); List<Object> instances = new ArrayList<>(); ServiceInstance instance1 = TestDefaultServiceInstance.getTestDefaultServiceInstance("1.0.0"); instances.add(instance1); ServiceInstance instance2 = TestDefaultServiceInstance.getTestDefaultServiceInstance("1.0.1"); instances.add(instance2); Map<String, List<String>> header = new HashMap<>(); header.put("bar", Collections.singletonList("bar1")); List<Object> targetInstances = flowRouteHandler.handle("foo", instances, new RequestData(header, null, null)); Assert.assertEquals(1, targetInstances.size()); Assert.assertEquals(instance2, targetInstances.get(0)); ConfigCache.getLabel(RouterConstant.SPRING_CACHE_NAME).resetRouteRule(Collections.emptyMap()); }
public void transmit(final int msgTypeId, final DirectBuffer srcBuffer, final int srcIndex, final int length) { checkTypeId(msgTypeId); checkMessageLength(length); final AtomicBuffer buffer = this.buffer; long currentTail = buffer.getLong(tailCounterIndex); int recordOffset = (int)currentTail & (capacity - 1); final int recordLength = HEADER_LENGTH + length; final int recordLengthAligned = BitUtil.align(recordLength, RECORD_ALIGNMENT); final long newTail = currentTail + recordLengthAligned; final int toEndOfBuffer = capacity - recordOffset; if (toEndOfBuffer < recordLengthAligned) { signalTailIntent(buffer, newTail + toEndOfBuffer); insertPaddingRecord(buffer, recordOffset, toEndOfBuffer); currentTail += toEndOfBuffer; recordOffset = 0; } else { signalTailIntent(buffer, newTail); } buffer.putInt(lengthOffset(recordOffset), recordLength); buffer.putInt(typeOffset(recordOffset), msgTypeId); buffer.putBytes(msgOffset(recordOffset), srcBuffer, srcIndex, length); buffer.putLongOrdered(latestCounterIndex, currentTail); buffer.putLongOrdered(tailCounterIndex, currentTail + recordLengthAligned); }
@Test void shouldTransmitIntoEndOfBuffer() { final int length = 8; final int recordLength = length + HEADER_LENGTH; final int recordLengthAligned = align(recordLength, RECORD_ALIGNMENT); final long tail = CAPACITY - recordLengthAligned; final int recordOffset = (int)tail; when(buffer.getLong(TAIL_COUNTER_INDEX)).thenReturn(tail); final UnsafeBuffer srcBuffer = new UnsafeBuffer(new byte[1024]); final int srcIndex = 0; broadcastTransmitter.transmit(MSG_TYPE_ID, srcBuffer, srcIndex, length); final InOrder inOrder = inOrder(buffer); inOrder.verify(buffer).getLong(TAIL_COUNTER_INDEX); inOrder.verify(buffer).putLongOrdered(TAIL_INTENT_COUNTER_OFFSET, tail + recordLengthAligned); inOrder.verify(buffer).putInt(lengthOffset(recordOffset), recordLength); inOrder.verify(buffer).putInt(typeOffset(recordOffset), MSG_TYPE_ID); inOrder.verify(buffer).putBytes(msgOffset(recordOffset), srcBuffer, srcIndex, length); inOrder.verify(buffer).putLongOrdered(LATEST_COUNTER_INDEX, tail); inOrder.verify(buffer).putLongOrdered(TAIL_COUNTER_INDEX, tail + recordLengthAligned); }
@Override protected void propagationField(String keyName, String value) { if (keyName == null) throw new NullPointerException("keyName == null"); if (value == null) throw new NullPointerException("value == null"); Key<String> key = nameToKey.get(keyName); if (key == null) { assert false : "We currently don't support setting headers except propagation fields"; return; } headers.removeAll(key); headers.put(key, value); }
@Test void propagationField_replace() { headers.put(b3Key, "0"); request.propagationField("b3", "1"); assertThat(request.headers.get(b3Key)).isEqualTo("1"); }
@Udf(description = "Converts a number of milliseconds since 1970-01-01 00:00:00 UTC/GMT into the" + " string representation of the timestamp in the given format. Single quotes in the" + " timestamp format can be escaped with '', for example: 'yyyy-MM-dd''T''HH:mm:ssX'." + " The system default time zone is used when no time zone is explicitly provided." + " The format pattern should be in the format expected" + " by java.time.format.DateTimeFormatter") public String timestampToString( @UdfParameter( description = "Milliseconds since" + " January 1, 1970, 00:00:00 UTC/GMT.") final long epochMilli, @UdfParameter( description = "The format pattern should be in the format expected by" + " java.time.format.DateTimeFormatter.") final String formatPattern) { if (formatPattern == null) { return null; } try { final Timestamp timestamp = new Timestamp(epochMilli); final DateTimeFormatter formatter = formatters.get(formatPattern); return timestamp.toInstant() .atZone(ZoneId.systemDefault()) .format(formatter); } catch (final ExecutionException | RuntimeException e) { throw new KsqlFunctionException("Failed to format timestamp " + epochMilli + " with formatter '" + formatPattern + "': " + e.getMessage(), e); } }
@Test public void shouldThrowIfFormatInvalid() { // When: final KsqlException e = assertThrows( KsqlFunctionException.class, () -> udf.timestampToString(1638360611123L, "invalid") ); // Then: assertThat(e.getMessage(), containsString("Unknown pattern letter: i")); }
public static boolean isValidCard18(String idcard) { return isValidCard18(idcard, true); }
@Test public void isValidCard18Test(){ boolean isValidCard18 = IdcardUtil.isValidCard18("3301022011022000D6"); assertFalse(isValidCard18); // 不忽略大小写情况下,X严格校验必须大写 isValidCard18 = IdcardUtil.isValidCard18("33010219200403064x", false); assertFalse(isValidCard18); isValidCard18 = IdcardUtil.isValidCard18("33010219200403064X", false); assertTrue(isValidCard18); // 非严格校验下大小写皆可 isValidCard18 = IdcardUtil.isValidCard18("33010219200403064x"); assertTrue(isValidCard18); isValidCard18 = IdcardUtil.isValidCard18("33010219200403064X"); assertTrue(isValidCard18); // 香港人在大陆身份证 isValidCard18 = IdcardUtil.isValidCard18("81000019980902013X"); assertTrue(isValidCard18); // 澳门人在大陆身份证 isValidCard18 = IdcardUtil.isValidCard18("820000200009100032"); assertTrue(isValidCard18); // 台湾人在大陆身份证 isValidCard18 = IdcardUtil.isValidCard18("830000200209060065"); assertTrue(isValidCard18); // 新版外国人永久居留身份证 isValidCard18 = IdcardUtil.isValidCard18("932682198501010017"); assertTrue(isValidCard18); }
@Override public void run() { serverMonitoringMetrics.setNumberOfConnectedSonarLintClients(sonarLintClientsRegistry.countConnectedClients()); }
@Test public void run_when5ConnectedClients_updateWith5() { when(sonarLintClientsRegistry.countConnectedClients()).thenReturn(5L); underTest.run(); verify(metrics).setNumberOfConnectedSonarLintClients(5L); }
public Map<String, StepRuntimeState> getStepStates( String workflowId, long workflowInstanceId, long workflowRunId, List<String> stepIds) { return withMetricLogError( () -> withRetryableTransaction( conn -> { try (PreparedStatement stmt = conn.prepareStatement(GET_STEP_LAST_ATTEMPT_STATE_QUERY)) { int idx = 0; stmt.setString(++idx, workflowId); stmt.setLong(++idx, workflowInstanceId); stmt.setLong(++idx, workflowRunId); stmt.setArray( ++idx, conn.createArrayOf(ARRAY_TYPE_NAME, stepIds.toArray(new String[0]))); try (ResultSet result = stmt.executeQuery()) { return getStringStepRuntimeStateMap(result); } } }), "getStepStates", "Failed to get steps [{}] latest attempt stats for [{}][{}][{}]", stepIds, workflowId, workflowInstanceId, workflowRunId); }
@Test public void testGetStepStates() { Map<String, StepRuntimeState> stats = stepDao.getStepStates(TEST_WORKFLOW_ID, 1, 1, Arrays.asList("job1", "job2")); assertEquals(singletonMap(si.getStepId(), si.getRuntimeState()), stats); }
@JsonCreator public static AuditEventType create(@JsonProperty(FIELD_NAMESPACE) String namespace, @JsonProperty(FIELD_OBJECT) String object, @JsonProperty(FIELD_ACTION) String action) { return new AutoValue_AuditEventType(namespace, object, action); }
@Test public void testInvalid3() throws Exception { expectedException.expect(IllegalArgumentException.class); AuditEventType.create(null); }
@Override public void createDb(String dbName, Map<String, String> properties) throws AlreadyExistsException { if (dbExists(dbName)) { throw new AlreadyExistsException("Database Already Exists"); } icebergCatalog.createDb(dbName, properties); }
@Test(expected = IllegalArgumentException.class) public void testCreateDbWithErrorConfig() throws AlreadyExistsException { IcebergHiveCatalog hiveCatalog = new IcebergHiveCatalog(CATALOG_NAME, new Configuration(), new HashMap<>()); IcebergMetadata metadata = new IcebergMetadata(CATALOG_NAME, HDFS_ENVIRONMENT, hiveCatalog, Executors.newSingleThreadExecutor(), Executors.newSingleThreadExecutor(), null); new Expectations(hiveCatalog) { { hiveCatalog.listAllDatabases(); result = Lists.newArrayList(); minTimes = 0; } }; metadata.createDb("iceberg_db", ImmutableMap.of("error_key", "error_value")); }
public void handleAssignment(final Map<TaskId, Set<TopicPartition>> activeTasks, final Map<TaskId, Set<TopicPartition>> standbyTasks) { log.info("Handle new assignment with:\n" + "\tNew active tasks: {}\n" + "\tNew standby tasks: {}\n" + "\tExisting active tasks: {}\n" + "\tExisting standby tasks: {}", activeTasks.keySet(), standbyTasks.keySet(), activeTaskIds(), standbyTaskIds()); topologyMetadata.addSubscribedTopicsFromAssignment( activeTasks.values().stream().flatMap(Collection::stream).collect(Collectors.toSet()), logPrefix ); final Map<TaskId, Set<TopicPartition>> activeTasksToCreate = new HashMap<>(activeTasks); final Map<TaskId, Set<TopicPartition>> standbyTasksToCreate = new HashMap<>(standbyTasks); final Map<Task, Set<TopicPartition>> tasksToRecycle = new HashMap<>(); final Set<Task> tasksToCloseClean = new TreeSet<>(Comparator.comparing(Task::id)); final Set<TaskId> tasksToLock = tasks.allTaskIds().stream() .filter(x -> activeTasksToCreate.containsKey(x) || standbyTasksToCreate.containsKey(x)) .collect(Collectors.toSet()); maybeLockTasks(tasksToLock); // first put aside those unrecognized tasks because of unknown named-topologies tasks.clearPendingTasksToCreate(); tasks.addPendingActiveTasksToCreate(pendingTasksToCreate(activeTasksToCreate)); tasks.addPendingStandbyTasksToCreate(pendingTasksToCreate(standbyTasksToCreate)); // first rectify all existing tasks: // 1. for tasks that are already owned, just update input partitions / resume and skip re-creating them // 2. for tasks that have changed active/standby status, just recycle and skip re-creating them // 3. otherwise, close them since they are no longer owned final Map<TaskId, RuntimeException> failedTasks = new LinkedHashMap<>(); if (stateUpdater == null) { handleTasksWithoutStateUpdater(activeTasksToCreate, standbyTasksToCreate, tasksToRecycle, tasksToCloseClean); } else { handleTasksWithStateUpdater( activeTasksToCreate, standbyTasksToCreate, tasksToRecycle, tasksToCloseClean, failedTasks ); failedTasks.putAll(collectExceptionsAndFailedTasksFromStateUpdater()); } final Map<TaskId, RuntimeException> taskCloseExceptions = closeAndRecycleTasks(tasksToRecycle, tasksToCloseClean); maybeUnlockTasks(tasksToLock); failedTasks.putAll(taskCloseExceptions); maybeThrowTaskExceptions(failedTasks); createNewTasks(activeTasksToCreate, standbyTasksToCreate); }
@Test public void shouldRemoveUnusedFailedActiveTaskFromStateUpdaterAndCloseDirty() { final StreamTask activeTaskToClose = statefulTask(taskId03, taskId03ChangelogPartitions) .inState(State.RESTORING) .withInputPartitions(taskId03Partitions).build(); final TasksRegistry tasks = mock(TasksRegistry.class); final TaskManager taskManager = setUpTaskManager(ProcessingMode.AT_LEAST_ONCE, tasks, true); when(stateUpdater.getTasks()).thenReturn(mkSet(activeTaskToClose)); final CompletableFuture<StateUpdater.RemovedTaskResult> future = new CompletableFuture<>(); when(stateUpdater.remove(activeTaskToClose.id())).thenReturn(future); future.complete(new StateUpdater.RemovedTaskResult(activeTaskToClose, new RuntimeException("KABOOM!"))); taskManager.handleAssignment(Collections.emptyMap(), Collections.emptyMap()); verify(activeTaskToClose).prepareCommit(); verify(activeTaskToClose).suspend(); verify(activeTaskToClose).closeDirty(); verify(activeTaskCreator).closeAndRemoveTaskProducerIfNeeded(activeTaskToClose.id()); verify(activeTaskCreator).createTasks(consumer, Collections.emptyMap()); verify(standbyTaskCreator).createTasks(Collections.emptyMap()); }
public long timeout() { long timer = this.timer.timeout(); long ticket = this.ticket.timeout(); if (timer < 0 || ticket < 0) { return Math.max(timer, ticket); } else { return Math.min(timer, ticket); } }
@Test public void testNoTicket() { assertThat(ticker.timeout(), is(-1L)); }
@Override public int remainingCapacity() { return createSemaphore(null).availablePermits(); }
@Test public void testRemainingCapacity() throws InterruptedException { RBoundedBlockingQueue<Integer> queue1 = redisson.getBoundedBlockingQueue("bounded-queue:testRemainingCapacity"); assertThat(queue1.trySetCapacity(3)).isTrue(); assertThat(queue1.remainingCapacity()).isEqualTo(3); assertThat(queue1.add(1)).isTrue(); assertThat(queue1.remainingCapacity()).isEqualTo(2); assertThat(queue1.add(2)).isTrue(); assertThat(queue1.remainingCapacity()).isEqualTo(1); assertThat(queue1.add(3)).isTrue(); assertThat(queue1.remainingCapacity()).isEqualTo(0); RBoundedBlockingQueue<Integer> queue2 = redisson.getBoundedBlockingQueue("bounded-queue:testRemainingCapacityEmpty"); assertThat(queue2.trySetCapacity(3)).isTrue(); for (int i = 0; i < 5; i++) { queue2.poll(1, TimeUnit.SECONDS); assertThat(queue2.remainingCapacity()).isEqualTo(3); } }
@Override @MethodNotAvailable public void removeAll() { throw new MethodNotAvailableException(); }
@Test(expected = MethodNotAvailableException.class) public void testRemoveAll() { adapter.removeAll(); }
public static String calculateTypeName(CompilationUnit compilationUnit, FullyQualifiedJavaType fqjt) { if (fqjt.isArray()) { // if array, then calculate the name of the base (non-array) type // then add the array indicators back in String fqn = fqjt.getFullyQualifiedName(); String typeName = calculateTypeName(compilationUnit, new FullyQualifiedJavaType(fqn.substring(0, fqn.indexOf('[')))); return typeName + fqn.substring(fqn.indexOf('[')); } if (!fqjt.getTypeArguments().isEmpty()) { return calculateParameterizedTypeName(compilationUnit, fqjt); } if (compilationUnit == null || typeDoesNotRequireImport(fqjt) || typeIsInSamePackage(compilationUnit, fqjt) || typeIsAlreadyImported(compilationUnit, fqjt)) { return fqjt.getShortName(); } else { return fqjt.getFullyQualifiedName(); } }
@Test void testGenericTypeNothingImported() { Interface interfaze = new Interface(new FullyQualifiedJavaType("com.foo.UserMapper")); FullyQualifiedJavaType fqjt = new FullyQualifiedJavaType("java.util.Map<java.math.BigDecimal, java.util.List<com.beeant.dto.User>>"); assertEquals("java.util.Map<java.math.BigDecimal, java.util.List<com.beeant.dto.User>>", JavaDomUtils.calculateTypeName(interfaze, fqjt)); }
public synchronized boolean getHasError() { return hasError; }
@Test public void shouldDefaultToFalseForHasError() { // Given: final ProcessingQueue queue = new ProcessingQueue(new QueryId("a")); // Then: assertThat(queue.getHasError(),is(false)); }
@Override public Set<String> getWorkerUUIDs() { Set<UUID> connectedWorkerUUIDs = hazelcastMember.getMemberUuids(); return getClusteredWorkerUUIDs().entrySet().stream() .filter(e -> connectedWorkerUUIDs.contains(e.getKey())) .map(Map.Entry::getValue) .flatMap(Set::stream) .collect(Collectors.toSet()); }
@Test public void getWorkerUUIDs_must_filter_absent_client() { when(hzClientWrapper.getUuid()).thenReturn(clientUUID1); when(hzClientWrapper.getMemberUuids()).thenReturn(ImmutableSet.of(clientUUID1, clientUUID2)); when(hzClientWrapper.<UUID, Set<String>>getReplicatedMap(WORKER_UUIDS)).thenReturn(workerMap); CeDistributedInformation ceDistributedInformation = new CeDistributedInformationImpl(hzClientWrapper, mock(CeWorkerFactory.class)); assertThat(ceDistributedInformation.getWorkerUUIDs()).containsExactlyInAnyOrder(w1, w2, w3); }
@Override public SCMPropertyConfiguration responseMessageForSCMConfiguration(String responseBody) { try { SCMPropertyConfiguration scmConfiguration = new SCMPropertyConfiguration(); Map<String, Map> configurations; try { configurations = parseResponseToMap(responseBody); } catch (Exception e) { throw new RuntimeException("SCM configuration should be returned as a map"); } if (configurations == null || configurations.isEmpty()) { throw new RuntimeException("Empty response body"); } for (String key : configurations.keySet()) { if (isEmpty(key)) { throw new RuntimeException("SCM configuration key cannot be empty"); } if (!(configurations.get(key) instanceof Map)) { throw new RuntimeException(format("SCM configuration properties for key '%s' should be represented as a Map", key)); } scmConfiguration.add(toSCMProperty(key, configurations.get(key))); } return scmConfiguration; } catch (Exception e) { throw new RuntimeException(format("Unable to de-serialize json response. %s", e.getMessage())); } }
@Test public void shouldBuildSCMConfigurationFromResponseBody() throws Exception { String responseBody = "{" + "\"key-one\":{}," + "\"key-two\":{\"default-value\":\"two\",\"part-of-identity\":true,\"secure\":true,\"required\":true,\"display-name\":\"display-two\",\"display-order\":\"1\"}," + "\"key-three\":{\"default-value\":\"three\",\"part-of-identity\":false,\"secure\":false,\"required\":false,\"display-name\":\"display-three\",\"display-order\":\"2\"}" + "}"; SCMPropertyConfiguration scmConfiguration = messageHandler.responseMessageForSCMConfiguration(responseBody); assertPropertyConfiguration((SCMProperty) scmConfiguration.get("key-one"), "key-one", "", true, true, false, "", 0); assertPropertyConfiguration((SCMProperty) scmConfiguration.get("key-two"), "key-two", "two", true, true, true, "display-two", 1); assertPropertyConfiguration((SCMProperty) scmConfiguration.get("key-three"), "key-three", "three", false, false, false, "display-three", 2); }
@PostMapping("create-review") public Mono<String> createReview(@ModelAttribute("product") Mono<Product> productMono, NewProductReviewPayload payload, Model model, ServerHttpResponse response) { return productMono.flatMap(product -> this.productReviewsClient.createProductReview(product.id(), payload.rating(), payload.review()) .thenReturn("redirect:/customer/products/%d".formatted(product.id())) .onErrorResume(ClientBadRequestException.class, exception -> { model.addAttribute("inFavourite", false); model.addAttribute("payload", payload); model.addAttribute("errors", exception.getErrors()); response.setStatusCode(HttpStatus.BAD_REQUEST); return this.favouriteProductsClient.findFavouriteProductByProductId(product.id()) .doOnNext(favouriteProduct -> model.addAttribute("inFavourite", true)) .thenReturn("customer/products/product"); })); }
@Test void createReview_RequestIsInvalid_ReturnsProductPageWithPayloadAndErrors() { // given var model = new ConcurrentModel(); var response = new MockServerHttpResponse(); var favouriteProduct = new FavouriteProduct(UUID.fromString("af5f9496-cbaa-11ee-a407-27b46917819e"), 1); doReturn(Mono.just(favouriteProduct)).when(this.favouriteProductsClient).findFavouriteProductByProductId(1); doReturn(Mono.error(new ClientBadRequestException("Возникла какая-то ошибка", null, List.of("Ошибка 1", "Ошибка 2")))) .when(this.productReviewsClient).createProductReview(1, null, "Очень длинный отзыв"); // when StepVerifier.create(this.controller.createReview( Mono.just(new Product(1, "Товар №1", "Описание товара №1")), new NewProductReviewPayload(null, "Очень длинный отзыв"), model, response)) // then .expectNext("customer/products/product") .verifyComplete(); assertEquals(HttpStatus.BAD_REQUEST, response.getStatusCode()); assertEquals(true, model.getAttribute("inFavourite")); assertEquals(new NewProductReviewPayload(null, "Очень длинный отзыв"), model.getAttribute("payload")); assertEquals(List.of("Ошибка 1", "Ошибка 2"), model.getAttribute("errors")); verify(this.productReviewsClient).createProductReview(1, null, "Очень длинный отзыв"); verify(this.favouriteProductsClient).findFavouriteProductByProductId(1); verifyNoMoreInteractions(this.productReviewsClient, this.favouriteProductsClient); verifyNoInteractions(this.productsClient); }
public static boolean isSentinelResource(HasMetadata resource) { Map<String, String> labels = resource.getMetadata().getLabels(); if (labels == null) { return false; } String namespace = resource.getMetadata().getNamespace(); return shouldSentinelWatchGivenNamespace(namespace) && Boolean.TRUE .toString() .equalsIgnoreCase( labels.getOrDefault(Constants.LABEL_SENTINEL_RESOURCE, Boolean.FALSE.toString())); }
@Test @Order(1) void testIsSentinelResource() { SparkApplication sparkApplication = new SparkApplication(); Map<String, String> lableMap = sparkApplication.getMetadata().getLabels(); lableMap.put(Constants.LABEL_SENTINEL_RESOURCE, "true"); Set<String> namespaces = new HashSet<>(); sparkApplication.getMetadata().setNamespace("spark-test"); namespaces.add("spark-test"); try (MockedStatic<Utils> mockUtils = mockStatic(Utils.class)) { mockUtils.when(Utils::getWatchedNamespaces).thenReturn(namespaces); assertTrue(SentinelManager.isSentinelResource(sparkApplication)); } }
@Override public void replay( long offset, long producerId, short producerEpoch, CoordinatorRecord record ) throws RuntimeException { ApiMessageAndVersion key = record.key(); ApiMessageAndVersion value = record.value(); switch (key.version()) { case 0: case 1: offsetMetadataManager.replay( offset, producerId, (OffsetCommitKey) key.message(), (OffsetCommitValue) Utils.messageOrNull(value) ); break; case 2: groupMetadataManager.replay( (GroupMetadataKey) key.message(), (GroupMetadataValue) Utils.messageOrNull(value) ); break; case 3: groupMetadataManager.replay( (ConsumerGroupMetadataKey) key.message(), (ConsumerGroupMetadataValue) Utils.messageOrNull(value) ); break; case 4: groupMetadataManager.replay( (ConsumerGroupPartitionMetadataKey) key.message(), (ConsumerGroupPartitionMetadataValue) Utils.messageOrNull(value) ); break; case 5: groupMetadataManager.replay( (ConsumerGroupMemberMetadataKey) key.message(), (ConsumerGroupMemberMetadataValue) Utils.messageOrNull(value) ); break; case 6: groupMetadataManager.replay( (ConsumerGroupTargetAssignmentMetadataKey) key.message(), (ConsumerGroupTargetAssignmentMetadataValue) Utils.messageOrNull(value) ); break; case 7: groupMetadataManager.replay( (ConsumerGroupTargetAssignmentMemberKey) key.message(), (ConsumerGroupTargetAssignmentMemberValue) Utils.messageOrNull(value) ); break; case 8: groupMetadataManager.replay( (ConsumerGroupCurrentMemberAssignmentKey) key.message(), (ConsumerGroupCurrentMemberAssignmentValue) Utils.messageOrNull(value) ); break; case 9: groupMetadataManager.replay( (ShareGroupPartitionMetadataKey) key.message(), (ShareGroupPartitionMetadataValue) Utils.messageOrNull(value) ); break; case 10: groupMetadataManager.replay( (ShareGroupMemberMetadataKey) key.message(), (ShareGroupMemberMetadataValue) Utils.messageOrNull(value) ); break; case 11: groupMetadataManager.replay( (ShareGroupMetadataKey) key.message(), (ShareGroupMetadataValue) Utils.messageOrNull(value) ); break; case 12: groupMetadataManager.replay( (ShareGroupTargetAssignmentMetadataKey) key.message(), (ShareGroupTargetAssignmentMetadataValue) Utils.messageOrNull(value) ); break; case 13: groupMetadataManager.replay( (ShareGroupTargetAssignmentMemberKey) key.message(), (ShareGroupTargetAssignmentMemberValue) Utils.messageOrNull(value) ); break; case 14: groupMetadataManager.replay( (ShareGroupCurrentMemberAssignmentKey) key.message(), (ShareGroupCurrentMemberAssignmentValue) Utils.messageOrNull(value) ); break; default: throw new IllegalStateException("Received an unknown record type " + key.version() + " in " + record); } }
@Test public void testReplayConsumerGroupTargetAssignmentMember() { GroupMetadataManager groupMetadataManager = mock(GroupMetadataManager.class); OffsetMetadataManager offsetMetadataManager = mock(OffsetMetadataManager.class); CoordinatorMetrics coordinatorMetrics = mock(CoordinatorMetrics.class); CoordinatorMetricsShard metricsShard = mock(CoordinatorMetricsShard.class); GroupCoordinatorShard coordinator = new GroupCoordinatorShard( new LogContext(), groupMetadataManager, offsetMetadataManager, Time.SYSTEM, new MockCoordinatorTimer<>(Time.SYSTEM), mock(GroupCoordinatorConfig.class), coordinatorMetrics, metricsShard ); ConsumerGroupTargetAssignmentMemberKey key = new ConsumerGroupTargetAssignmentMemberKey(); ConsumerGroupTargetAssignmentMemberValue value = new ConsumerGroupTargetAssignmentMemberValue(); coordinator.replay(0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, new CoordinatorRecord( new ApiMessageAndVersion(key, (short) 7), new ApiMessageAndVersion(value, (short) 0) )); verify(groupMetadataManager, times(1)).replay(key, value); }
@Override public ObjectNode encode(Criterion criterion, CodecContext context) { EncodeCriterionCodecHelper encoder = new EncodeCriterionCodecHelper(criterion, context); return encoder.encode(); }
@Test public void matchTcpSrcTest() { Criterion criterion = Criteria.matchTcpSrc(tpPort); ObjectNode result = criterionCodec.encode(criterion, context); assertThat(result, matchesCriterion(criterion)); }
@Override public void commit() { throw new UnsupportedOperationException("StateStores can't access commit."); }
@Test public void shouldThrowOnCommit() { assertThrows(UnsupportedOperationException.class, () -> context.commit()); }
@Override public String getRawSourceHash(Component file) { checkComponentArgument(file); if (rawSourceHashesByKey.containsKey(file.getKey())) { return checkSourceHash(file.getKey(), rawSourceHashesByKey.get(file.getKey())); } else { String newSourceHash = computeRawSourceHash(file); rawSourceHashesByKey.put(file.getKey(), newSourceHash); return checkSourceHash(file.getKey(), newSourceHash); } }
@Test void getRawSourceHash_reads_lines_from_SourceLinesRepository_only_the_first_time() { when(mockedSourceLinesRepository.readLines(FILE_COMPONENT)).thenReturn(CloseableIterator.from(Arrays.asList(SOME_LINES).iterator())); String rawSourceHash = mockedUnderTest.getRawSourceHash(FILE_COMPONENT); String rawSourceHash1 = mockedUnderTest.getRawSourceHash(FILE_COMPONENT); assertThat(rawSourceHash).isSameAs(rawSourceHash1); verify(mockedSourceLinesRepository, times(1)).readLines(FILE_COMPONENT); }
@Override public void received(Channel channel, Object message) throws RemotingException { if (message instanceof Decodeable) { decode(message); } if (message instanceof Request) { decode(((Request) message).getData()); } if (message instanceof Response) { decode(((Response) message).getResult()); } handler.received(channel, message); }
@Test void test() throws Exception { ChannelHandler handler = Mockito.mock(ChannelHandler.class); Channel channel = Mockito.mock(Channel.class); DecodeHandler decodeHandler = new DecodeHandler(handler); MockData mockData = new MockData(); decodeHandler.received(channel, mockData); Assertions.assertTrue(mockData.isDecoded()); MockData mockRequestData = new MockData(); Request request = new Request(1); request.setData(mockRequestData); decodeHandler.received(channel, request); Assertions.assertTrue(mockRequestData.isDecoded()); MockData mockResponseData = new MockData(); Response response = new Response(1); response.setResult(mockResponseData); decodeHandler.received(channel, response); Assertions.assertTrue(mockResponseData.isDecoded()); mockData.setThrowEx(true); decodeHandler.received(channel, mockData); }
public static ViewMetadata fromJson(String metadataLocation, String json) { return JsonUtil.parse(json, node -> ViewMetadataParser.fromJson(metadataLocation, node)); }
@Test public void failReadingViewMetadataInvalidSchemaId() throws Exception { String json = readViewMetadataInputFile("org/apache/iceberg/view/ViewMetadataInvalidCurrentSchema.json"); ViewMetadata metadata = ViewMetadataParser.fromJson(json); assertThatThrownBy(metadata::currentSchemaId) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot find current schema with id 1234 in schemas: [1]"); }
static void readFromGetEndpointExample(List<String> urls, Pipeline pipeline) { // Pipeline pipeline = Pipeline.create(); // List<String> urls = ImmutableList.of( // "https://storage.googleapis.com/generativeai-downloads/images/cake.jpg", // "https://storage.googleapis.com/generativeai-downloads/images/chocolate.png", // "https://storage.googleapis.com/generativeai-downloads/images/croissant.jpg", // "https://storage.googleapis.com/generativeai-downloads/images/dog_form.jpg", // "https://storage.googleapis.com/generativeai-downloads/images/factory.png", // "https://storage.googleapis.com/generativeai-downloads/images/scones.jpg" // ); // Step 1: Convert the list of URLs to a PCollection of ImageRequests. PCollection<KV<String, ImageRequest>> requests = Images.requestsOf(urls, pipeline); // Step 2: RequestResponseIO requires a Coder as its second parameter. KvCoder<String, ImageResponse> responseCoder = KvCoder.of(StringUtf8Coder.of(), ImageResponseCoder.of()); // Step 3: Process ImageRequests using RequestResponseIO instantiated from the Caller // implementation and the expected PCollection response Coder. Result<KV<String, ImageResponse>> result = requests.apply( ImageResponse.class.getSimpleName(), RequestResponseIO.of(HttpImageClient.of(), responseCoder)); // Step 4: Log any failures to stderr. result.getFailures().apply("logErrors", Log.errorOf()); // Step 5: Log output to stdout. Images.displayOf(result.getResponses()).apply("logResponses", Log.infoOf()); }
@Test public void testReadFromGetEndpointExample() { Pipeline pipeline = Pipeline.create(); List<String> urls = ImmutableList.of( "https://storage.googleapis.com/generativeai-downloads/images/cake.jpg", "https://storage.googleapis.com/generativeai-downloads/images/chocolate.png", "https://storage.googleapis.com/generativeai-downloads/images/croissant.jpg", "https://storage.googleapis.com/generativeai-downloads/images/dog_form.jpg", "https://storage.googleapis.com/generativeai-downloads/images/factory.png", "https://storage.googleapis.com/generativeai-downloads/images/scones.jpg"); UsingHttpClientExample.readFromGetEndpointExample(urls, pipeline); pipeline.run(); }
public static CharSequence unescapeCsv(CharSequence value) { int length = checkNotNull(value, "value").length(); if (length == 0) { return value; } int last = length - 1; boolean quoted = isDoubleQuote(value.charAt(0)) && isDoubleQuote(value.charAt(last)) && length != 1; if (!quoted) { validateCsvFormat(value); return value; } StringBuilder unescaped = InternalThreadLocalMap.get().stringBuilder(); for (int i = 1; i < last; i++) { char current = value.charAt(i); if (current == DOUBLE_QUOTE) { if (isDoubleQuote(value.charAt(i + 1)) && (i + 1) != last) { // Followed by a double-quote but not the last character // Just skip the next double-quote i++; } else { // Not followed by a double-quote or the following double-quote is the last character throw newInvalidEscapedCsvFieldException(value, i); } } unescaped.append(current); } return unescaped.toString(); }
@Test public void unescapeCsvWithLFAndWithoutQuote() { assertThrows(IllegalArgumentException.class, new Executable() { @Override public void execute() { unescapeCsv("\n"); } }); }
protected void resolveProvider( FileDialogOperation op ) { if ( op.getProvider() == null ) { if ( isVfsPath( op.getPath() ) ) { op.setProvider( VFSFileProvider.TYPE ); } else if ( spoonSupplier.get().getRepository() != null ) { op.setProvider( RepositoryFileProvider.TYPE ); } else { op.setProvider( LocalFileProvider.TYPE ); } } }
@Test public void testResolveProvider_AlreadySet() throws Exception { // SETUP FileOpenSaveExtensionPoint testInstance = new FileOpenSaveExtensionPoint( null, null ); FileDialogOperation opAlreadySet = new FileDialogOperation( FILE_OP_DUMMY_COMMAND ); String providerNonProductionValue = "DontChangeMe"; // NON production value, just want to ensure it deosn't get overwritten opAlreadySet.setProvider( providerNonProductionValue ); opAlreadySet.setPath( "/tmp/someRandomPath" ); // EXECUTE testInstance.resolveProvider( opAlreadySet ); // VERIFY assertEquals( providerNonProductionValue, opAlreadySet.getProvider() ); }
@Override public BufferedSink writeUtf8(String string) throws IOException { if (closed) throw new IllegalStateException("closed"); buffer.writeUtf8(string); return emitCompleteSegments(); }
@Test public void incompleteSegmentsNotEmitted() throws Exception { Buffer sink = new Buffer(); BufferedSink bufferedSink = new RealBufferedSink(sink); bufferedSink.writeUtf8(repeat('a', Segment.SIZE * 3 - 1)); assertEquals(Segment.SIZE * 2, sink.size()); }
public static <T> byte[] encodeToByteArray(Coder<T> coder, T value) throws CoderException { return encodeToByteArray(coder, value, Coder.Context.OUTER); }
@Test public void testClosingCoderFailsWhenEncodingToByteArray() throws Exception { expectedException.expect(UnsupportedOperationException.class); expectedException.expectMessage("Caller does not own the underlying"); CoderUtils.encodeToByteArray(new ClosingCoder(), "test-value"); }
public <T> void resolve(T resolvable) { ParamResolver resolver = this; if (ParamScope.class.isAssignableFrom(resolvable.getClass())) { ParamScope newScope = (ParamScope) resolvable; resolver = newScope.applyOver(resolver); } resolveStringLeaves(resolvable, resolver); resolveNonStringLeaves(resolvable, resolver); resolveNodes(resolvable, resolver); }
@Test public void shouldResolve_ConfigValue_MappedAsObject() { SecurityConfig securityConfig = new SecurityConfig(); securityConfig.adminsConfig().add(new AdminUser(new CaseInsensitiveString("lo#{foo}"))); securityConfig.addRole(new RoleConfig(new CaseInsensitiveString("boo#{bar}"), new RoleUser(new CaseInsensitiveString("choo#{foo}")))); new ParamResolver(new ParamSubstitutionHandlerFactory(params(param("foo", "ser"), param("bar", "zer"))), fieldCache).resolve(securityConfig); assertThat(CaseInsensitiveString.str(securityConfig.adminsConfig().get(0).getName()), is("loser")); assertThat(CaseInsensitiveString.str(securityConfig.getRoles().get(0).getName()), is("boozer")); assertThat(CaseInsensitiveString.str(securityConfig.getRoles().get(0).getUsers().get(0).getName()), is("chooser")); }
@Override public ExecuteContext before(ExecuteContext context) { Object object = context.getObject(); String serviceId = getServiceId(object).orElse(null); if (StringUtils.isBlank(serviceId)) { return context; } Object obj = context.getMemberFieldValue("serviceInstances"); if (obj instanceof Flux<?>) { List<Object> instances = getInstances((Flux<Object>) obj, object); if (CollectionUtils.isEmpty(instances)) { return context; } RequestData requestData = ThreadLocalUtils.getRequestData(); List<Object> targetInstances = loadBalancerService.getTargetInstances(serviceId, instances, requestData); context.skip(Flux.just(targetInstances)); } return context; }
@Test public void testBeforeWhenInvalid() { interceptor.before(context); ServiceInstanceListSupplier supplier = (ServiceInstanceListSupplier) context.getObject(); List<ServiceInstance> instances = supplier.get().blockFirst(); Assert.assertNotNull(instances); Assert.assertEquals(2, instances.size()); }
@VisibleForTesting void handleResponse(DiscoveryResponseData response) { ResourceType resourceType = response.getResourceType(); switch (resourceType) { case NODE: handleD2NodeResponse(response); break; case D2_URI_MAP: handleD2URIMapResponse(response); break; case D2_URI: handleD2URICollectionResponse(response); break; default: throw new AssertionError("Missing case in enum switch: " + resourceType); } }
@Test public void testHandleD2NodeResponseWithRemoval() { XdsClientImplFixture fixture = new XdsClientImplFixture(); fixture._nodeSubscriber.setData(NODE_UPDATE1); fixture._xdsClientImpl.handleResponse(DISCOVERY_RESPONSE_NODE_DATA_WITH_REMOVAL); fixture.verifyAckSent(1); verify(fixture._resourceWatcher).onChanged(eq(NODE_UPDATE1)); verify(fixture._nodeSubscriber).onRemoval(); XdsClient.NodeUpdate actualData = (XdsClient.NodeUpdate) fixture._nodeSubscriber.getData(); // removed resource will not overwrite the original valid data Assert.assertEquals(Objects.requireNonNull(actualData).getNodeData(), NODE_UPDATE1.getNodeData()); }
public String getExpectedValue() { return getPropertyAsString(EXPECTEDVALUE); }
@Test void testGetExpectedValue() { JSONPathAssertion instance = new JSONPathAssertion(); String expResult = ""; String result = instance.getExpectedValue(); assertEquals(expResult, result); }
@Override public ConsumeMessageDirectlyResult consumeMessageDirectly(MessageExt msg, String brokerName) { ConsumeMessageDirectlyResult result = new ConsumeMessageDirectlyResult(); result.setOrder(true); List<MessageExt> msgs = new ArrayList<>(); msgs.add(msg); MessageQueue mq = new MessageQueue(); mq.setBrokerName(brokerName); mq.setTopic(msg.getTopic()); mq.setQueueId(msg.getQueueId()); ConsumeOrderlyContext context = new ConsumeOrderlyContext(mq); this.defaultMQPushConsumerImpl.resetRetryAndNamespace(msgs, this.consumerGroup); final long beginTime = System.currentTimeMillis(); log.info("consumeMessageDirectly receive new message: {}", msg); try { ConsumeOrderlyStatus status = this.messageListener.consumeMessage(msgs, context); if (status != null) { switch (status) { case COMMIT: result.setConsumeResult(CMResult.CR_COMMIT); break; case ROLLBACK: result.setConsumeResult(CMResult.CR_ROLLBACK); break; case SUCCESS: result.setConsumeResult(CMResult.CR_SUCCESS); break; case SUSPEND_CURRENT_QUEUE_A_MOMENT: result.setConsumeResult(CMResult.CR_LATER); break; default: break; } } else { result.setConsumeResult(CMResult.CR_RETURN_NULL); } } catch (Throwable e) { result.setConsumeResult(CMResult.CR_THROW_EXCEPTION); result.setRemark(UtilAll.exceptionSimpleDesc(e)); log.warn("consumeMessageDirectly exception: {} Group: {} Msgs: {} MQ: {}", UtilAll.exceptionSimpleDesc(e), ConsumeMessagePopOrderlyService.this.consumerGroup, msgs, mq, e); } result.setAutoCommit(context.isAutoCommit()); result.setSpentTimeMills(System.currentTimeMillis() - beginTime); log.info("consumeMessageDirectly Result: {}", result); return result; }
@Test public void testConsumeMessageDirectlyWithCrThrowException() { when(messageListener.consumeMessage(any(), any(ConsumeOrderlyContext.class))).thenThrow(new RuntimeException("exception")); ConsumeMessageDirectlyResult actual = popService.consumeMessageDirectly(createMessageExt(), defaultBroker); assertEquals(CMResult.CR_THROW_EXCEPTION, actual.getConsumeResult()); }
@CanIgnoreReturnValue public final Ordered containsAtLeast( @Nullable Object firstExpected, @Nullable Object secondExpected, @Nullable Object @Nullable ... restOfExpected) { return containsAtLeastElementsIn(accumulate(firstExpected, secondExpected, restOfExpected)); }
@Test public void iterableContainsAtLeastFailsWithSameToStringAndHeterogeneousListWithDuplicates() { expectFailureWhenTestingThat(asList(1, 2, 2L, 3L, 3L)).containsAtLeast(2L, 2L, 3, 3); assertFailureValue("missing (3)", "2 (java.lang.Long), 3 (java.lang.Integer) [2 copies]"); assertFailureValue( "though it did contain (3)", "2 (java.lang.Integer), 3 (java.lang.Long) [2 copies]"); }
@Override public RelativeRange apply(final Period period) { if (period != null) { return RelativeRange.Builder.builder() .from(period.withYears(0).withMonths(0).plusDays(period.getYears() * 365).plusDays(period.getMonths() * 30).toStandardSeconds().getSeconds()) .build(); } else { return null; } }
@Test void testDayConversion() { final RelativeRange result = converter.apply(Period.days(2)); verifyResult(result, 172800); }
public static Getter newFieldGetter(Object object, Getter parent, Field field, String modifier) throws Exception { return newGetter(object, parent, modifier, field.getType(), field::get, (t, et) -> new FieldGetter(parent, field, modifier, t, et)); }
@Test public void newFieldGetter_whenExtractingFromNonEmpty_Collection_FieldAndParentIsNonEmptyMultiResult_nullValueFirst_thenInferReturnType() throws Exception { OuterObject object = new OuterObject("name", new InnerObject("inner", null, 0, 1, 2, 3)); Getter parentGetter = GetterFactory.newFieldGetter(object, null, innersCollectionField, "[any]"); Getter innerObjectNameGetter = GetterFactory.newFieldGetter(object, parentGetter, innerAttributesCollectionField, "[any]"); Class<?> returnType = innerObjectNameGetter.getReturnType(); assertEquals(Integer.class, returnType); }
public static boolean analyzeRebalance(final TreeMap<String/* clientId */, ConsumerRunningInfo> criTable) { return true; }
@Test public void testAnalyzeRebalance() { boolean result = ConsumerRunningInfo.analyzeRebalance(criTable); assertThat(result).isTrue(); }
@Override public List<FileEntriesLayer> createLayers() throws IOException { // Clear the exploded-artifact root first if (Files.exists(targetExplodedJarRoot)) { MoreFiles.deleteRecursively(targetExplodedJarRoot, RecursiveDeleteOption.ALLOW_INSECURE); } try (JarFile jarFile = new JarFile(jarPath.toFile())) { ZipUtil.unzip(jarPath, targetExplodedJarRoot, true); ZipEntry layerIndex = jarFile.getEntry(BOOT_INF + "/layers.idx"); if (layerIndex != null) { return createLayersForLayeredSpringBootJar(targetExplodedJarRoot); } Predicate<Path> isFile = Files::isRegularFile; // Non-snapshot layer Predicate<Path> isInBootInfLib = path -> path.startsWith(targetExplodedJarRoot.resolve(BOOT_INF).resolve("lib")); Predicate<Path> isSnapshot = path -> path.getFileName().toString().contains("SNAPSHOT"); Predicate<Path> isInBootInfLibAndIsNotSnapshot = isInBootInfLib.and(isSnapshot.negate()); Predicate<Path> nonSnapshotPredicate = isFile.and(isInBootInfLibAndIsNotSnapshot); FileEntriesLayer nonSnapshotLayer = ArtifactLayers.getDirectoryContentsAsLayer( ArtifactLayers.DEPENDENCIES, targetExplodedJarRoot, nonSnapshotPredicate, JarLayers.APP_ROOT); // Snapshot layer Predicate<Path> isInBootInfLibAndIsSnapshot = isInBootInfLib.and(isSnapshot); Predicate<Path> snapshotPredicate = isFile.and(isInBootInfLibAndIsSnapshot); FileEntriesLayer snapshotLayer = ArtifactLayers.getDirectoryContentsAsLayer( ArtifactLayers.SNAPSHOT_DEPENDENCIES, targetExplodedJarRoot, snapshotPredicate, JarLayers.APP_ROOT); // Spring-boot-loader layer. Predicate<Path> isLoader = path -> path.startsWith(targetExplodedJarRoot.resolve("org")); Predicate<Path> loaderPredicate = isFile.and(isLoader); FileEntriesLayer loaderLayer = ArtifactLayers.getDirectoryContentsAsLayer( "spring-boot-loader", targetExplodedJarRoot, loaderPredicate, JarLayers.APP_ROOT); // Classes layer. Predicate<Path> isClass = path -> path.getFileName().toString().endsWith(".class"); Predicate<Path> isInBootInfClasses = path -> path.startsWith(targetExplodedJarRoot.resolve(BOOT_INF).resolve("classes")); Predicate<Path> classesPredicate = isInBootInfClasses.and(isClass); FileEntriesLayer classesLayer = ArtifactLayers.getDirectoryContentsAsLayer( ArtifactLayers.CLASSES, targetExplodedJarRoot, classesPredicate, JarLayers.APP_ROOT); // Resources layer. Predicate<Path> isInMetaInf = path -> path.startsWith(targetExplodedJarRoot.resolve("META-INF")); Predicate<Path> isResource = isInMetaInf.or(isInBootInfClasses.and(isClass.negate())); Predicate<Path> resourcesPredicate = isFile.and(isResource); FileEntriesLayer resourcesLayer = ArtifactLayers.getDirectoryContentsAsLayer( ArtifactLayers.RESOURCES, targetExplodedJarRoot, resourcesPredicate, JarLayers.APP_ROOT); return Arrays.asList( nonSnapshotLayer, loaderLayer, snapshotLayer, resourcesLayer, classesLayer); } }
@Test public void testCreateLayers_layered_singleEmptyLayerListed() throws IOException, URISyntaxException { // BOOT-INF/layers.idx for this spring-boot jar is as follows: // - "dependencies": // - "BOOT-INF/lib/dependency1.jar" // - "BOOT-INF/lib/dependency2.jar" // - "spring-boot-loader": // - "org/" // - "snapshot-dependencies": // - "application": // - "BOOT-INF/classes/" // - "META-INF/" Path springBootJar = Paths.get(Resources.getResource(SPRING_BOOT_LAYERED_WITH_EMPTY_LAYER).toURI()); Path destDir = temporaryFolder.newFolder().toPath(); SpringBootExplodedProcessor springBootExplodedModeProcessor = new SpringBootExplodedProcessor(springBootJar, destDir, JAR_JAVA_VERSION); List<FileEntriesLayer> layers = springBootExplodedModeProcessor.createLayers(); assertThat(layers.size()).isEqualTo(3); FileEntriesLayer nonSnapshotLayer = layers.get(0); FileEntriesLayer loaderLayer = layers.get(1); FileEntriesLayer applicationLayer = layers.get(2); assertThat(nonSnapshotLayer.getName()).isEqualTo("dependencies"); assertThat( nonSnapshotLayer.getEntries().stream() .map(FileEntry::getExtractionPath) .collect(Collectors.toList())) .containsExactly( AbsoluteUnixPath.get("/app/BOOT-INF/lib/dependency1.jar"), AbsoluteUnixPath.get("/app/BOOT-INF/lib/dependency2.jar")); assertThat(loaderLayer.getName()).isEqualTo("spring-boot-loader"); assertThat( loaderLayer.getEntries().stream() .map(FileEntry::getExtractionPath) .collect(Collectors.toList())) .containsExactly( AbsoluteUnixPath.get("/app/org/springframework/boot/loader/data/data1.class"), AbsoluteUnixPath.get("/app/org/springframework/boot/loader/launcher1.class")); assertThat(applicationLayer.getName()).isEqualTo("application"); assertThat( applicationLayer.getEntries().stream() .map(FileEntry::getExtractionPath) .collect(Collectors.toList())) .containsExactly( AbsoluteUnixPath.get("/app/BOOT-INF/classes/class1.class"), AbsoluteUnixPath.get("/app/BOOT-INF/classes/classDirectory/class2.class"), AbsoluteUnixPath.get("/app/META-INF/MANIFEST.MF")); }
public Set<Integer> nodesThatShouldBeDown(ClusterState state) { return calculate(state).nodesThatShouldBeDown(); }
@Test void implicitly_downed_node_at_state_end_is_counted_as_explicitly_down() { GroupAvailabilityCalculator calc = calcForHierarchicCluster( DistributionBuilder.withGroups(3).eachWithNodeCount(2), 0.99); assertThat(calc.nodesThatShouldBeDown(clusterState( "distributor:6 storage:5")), equalTo(indices(4))); }
public static Future<Integer> authTlsHash(SecretOperator secretOperations, String namespace, KafkaClientAuthentication auth, List<CertSecretSource> certSecretSources) { Future<Integer> tlsFuture; if (certSecretSources == null || certSecretSources.isEmpty()) { tlsFuture = Future.succeededFuture(0); } else { // get all TLS trusted certs, compute hash from each of them, sum hashes tlsFuture = Future.join(certSecretSources.stream().map(certSecretSource -> getCertificateAsync(secretOperations, namespace, certSecretSource) .compose(cert -> Future.succeededFuture(cert.hashCode()))).collect(Collectors.toList())) .compose(hashes -> Future.succeededFuture(hashes.list().stream().mapToInt(e -> (int) e).sum())); } if (auth == null) { return tlsFuture; } else { // compute hash from Auth if (auth instanceof KafkaClientAuthenticationScram) { // only passwordSecret can be changed return tlsFuture.compose(tlsHash -> getPasswordAsync(secretOperations, namespace, auth) .compose(password -> Future.succeededFuture(password.hashCode() + tlsHash))); } else if (auth instanceof KafkaClientAuthenticationPlain) { // only passwordSecret can be changed return tlsFuture.compose(tlsHash -> getPasswordAsync(secretOperations, namespace, auth) .compose(password -> Future.succeededFuture(password.hashCode() + tlsHash))); } else if (auth instanceof KafkaClientAuthenticationTls) { // custom cert can be used (and changed) return ((KafkaClientAuthenticationTls) auth).getCertificateAndKey() == null ? tlsFuture : tlsFuture.compose(tlsHash -> getCertificateAndKeyAsync(secretOperations, namespace, (KafkaClientAuthenticationTls) auth) .compose(crtAndKey -> Future.succeededFuture(crtAndKey.certAsBase64String().hashCode() + crtAndKey.keyAsBase64String().hashCode() + tlsHash))); } else if (auth instanceof KafkaClientAuthenticationOAuth) { List<Future<Integer>> futureList = ((KafkaClientAuthenticationOAuth) auth).getTlsTrustedCertificates() == null ? new ArrayList<>() : ((KafkaClientAuthenticationOAuth) auth).getTlsTrustedCertificates().stream().map(certSecretSource -> getCertificateAsync(secretOperations, namespace, certSecretSource) .compose(cert -> Future.succeededFuture(cert.hashCode()))).collect(Collectors.toList()); futureList.add(tlsFuture); futureList.add(addSecretHash(secretOperations, namespace, ((KafkaClientAuthenticationOAuth) auth).getAccessToken())); futureList.add(addSecretHash(secretOperations, namespace, ((KafkaClientAuthenticationOAuth) auth).getClientSecret())); futureList.add(addSecretHash(secretOperations, namespace, ((KafkaClientAuthenticationOAuth) auth).getRefreshToken())); return Future.join(futureList) .compose(hashes -> Future.succeededFuture(hashes.list().stream().mapToInt(e -> (int) e).sum())); } else { // unknown Auth type return tlsFuture; } } }
@Test void testAuthTlsPlainSecretAndPasswordFound() { SecretOperator secretOperator = mock(SecretOperator.class); Map<String, String> data = new HashMap<>(); data.put("passwordKey", "my-password"); Secret secret = new Secret(); secret.setData(data); CompletionStage<Secret> cf = CompletableFuture.supplyAsync(() -> secret); when(secretOperator.getAsync(anyString(), anyString())).thenReturn(Future.fromCompletionStage(cf)); KafkaClientAuthenticationPlain auth = new KafkaClientAuthenticationPlain(); PasswordSecretSource passwordSecretSource = new PasswordSecretSource(); passwordSecretSource.setSecretName("my-secret"); passwordSecretSource.setPassword("passwordKey"); auth.setPasswordSecret(passwordSecretSource); Future<Integer> result = VertxUtil.authTlsHash(secretOperator, "anyNamespace", auth, List.of()); result.onComplete(handler -> { assertTrue(handler.succeeded()); assertEquals("my-password".hashCode(), handler.result()); }); }
public void poll(RequestFuture<?> future) { while (!future.isDone()) poll(time.timer(Long.MAX_VALUE), future); }
@Test public void testInvalidTopicExceptionPropagatedFromMetadata() { MetadataResponse metadataResponse = RequestTestUtils.metadataUpdateWith("clusterId", 1, Collections.singletonMap("topic", Errors.INVALID_TOPIC_EXCEPTION), Collections.emptyMap()); metadata.updateWithCurrentRequestVersion(metadataResponse, false, time.milliseconds()); assertThrows(InvalidTopicException.class, () -> consumerClient.poll(time.timer(Duration.ZERO))); }
public T run() throws Exception { try { return execute(); } catch(Exception e) { if (e.getClass().equals(retryExceptionType)){ tries++; if (MAX_RETRIES == tries) { throw e; } else { return run(); } } else { throw e; } } }
@Test public void testRetryFailure() { Retry<Void> retriable = new Retry<Void>(NullPointerException.class) { @Override public Void execute() { throw new RuntimeException(); } }; try { retriable.run(); Assert.fail(); } catch (Exception e) { Assert.assertEquals(RuntimeException.class, e.getClass()); } }
public Optional<Throwable> run(String... arguments) { try { if (isFlag(HELP, arguments)) { parser.printHelp(stdOut); } else if (isFlag(VERSION, arguments)) { parser.printVersion(stdOut); } else { final Namespace namespace = parser.parseArgs(arguments); final Command command = requireNonNull(commands.get(namespace.getString(COMMAND_NAME_ATTR)), "Command is not found"); try { command.run(bootstrap, namespace); } catch (Throwable e) { // The command failed to run, and the command knows // best how to cleanup / debug exception command.onError(this, namespace, e); return Optional.of(e); } } return Optional.empty(); } catch (HelpScreenException ignored) { // This exception is triggered when the user passes in a help flag. // Return true to signal that the process executed normally. return Optional.empty(); } catch (ArgumentParserException e) { stdErr.println(e.getMessage()); e.getParser().printHelp(stdErr); return Optional.of(e); } }
@Test void handlesLongHelpSubcommands() throws Exception { assertThat(cli.run("check", "--help")) .isEmpty(); assertThat(stdOut) .hasToString(String.format( "usage: java -jar dw-thing.jar check [-h] [file]%n" + "%n" + "Parses and validates the configuration file%n" + "%n" + "positional arguments:%n" + " file application configuration file%n" + "%n" + "named arguments:%n" + " -h, --help show this help message and exit%n" )); assertThat(stdErr.toString()) .isEmpty(); verify(command, never()).run(any(), any(Namespace.class), any(Configuration.class)); }
@Override public AdminUserDO authenticate(String username, String password) { final LoginLogTypeEnum logTypeEnum = LoginLogTypeEnum.LOGIN_USERNAME; // 校验账号是否存在 AdminUserDO user = userService.getUserByUsername(username); if (user == null) { createLoginLog(null, username, logTypeEnum, LoginResultEnum.BAD_CREDENTIALS); throw exception(AUTH_LOGIN_BAD_CREDENTIALS); } if (!userService.isPasswordMatch(password, user.getPassword())) { createLoginLog(user.getId(), username, logTypeEnum, LoginResultEnum.BAD_CREDENTIALS); throw exception(AUTH_LOGIN_BAD_CREDENTIALS); } // 校验是否禁用 if (CommonStatusEnum.isDisable(user.getStatus())) { createLoginLog(user.getId(), username, logTypeEnum, LoginResultEnum.USER_DISABLED); throw exception(AUTH_LOGIN_USER_DISABLED); } return user; }
@Test public void testAuthenticate_success() { // 准备参数 String username = randomString(); String password = randomString(); // mock user 数据 AdminUserDO user = randomPojo(AdminUserDO.class, o -> o.setUsername(username) .setPassword(password).setStatus(CommonStatusEnum.ENABLE.getStatus())); when(userService.getUserByUsername(eq(username))).thenReturn(user); // mock password 匹配 when(userService.isPasswordMatch(eq(password), eq(user.getPassword()))).thenReturn(true); // 调用 AdminUserDO loginUser = authService.authenticate(username, password); // 校验 assertPojoEquals(user, loginUser); }
public SerializableFunction<Row, T> getFromRowFunction() { return fromRowFunction; }
@Test public void testNestedRowToProto() throws InvalidProtocolBufferException { ProtoDynamicMessageSchema schemaProvider = schemaFromDescriptor(Nested.getDescriptor()); SerializableFunction<Row, DynamicMessage> fromRow = schemaProvider.getFromRowFunction(); Nested proto = parseFrom(fromRow.apply(NESTED_ROW).toString(), Nested.newBuilder()).build(); assertEquals(NESTED_PROTO, proto); }
@Override public BasicTypeDefine reconvert(Column column) { BasicTypeDefine.BasicTypeDefineBuilder builder = BasicTypeDefine.builder() .name(column.getName()) .precision(column.getColumnLength()) .length(column.getColumnLength()) .nullable(column.isNullable()) .comment(column.getComment()) .scale(column.getScale()) .defaultValue(column.getDefaultValue()); switch (column.getDataType().getSqlType()) { case NULL: builder.columnType(IRIS_NULL); builder.dataType(IRIS_NULL); break; case STRING: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(String.format("%s(%s)", IRIS_VARCHAR, MAX_VARCHAR_LENGTH)); builder.dataType(IRIS_VARCHAR); } else if (column.getColumnLength() < MAX_VARCHAR_LENGTH) { builder.columnType( String.format("%s(%s)", IRIS_VARCHAR, column.getColumnLength())); builder.dataType(IRIS_VARCHAR); } else { builder.columnType(IRIS_LONG_VARCHAR); builder.dataType(IRIS_LONG_VARCHAR); } break; case BOOLEAN: builder.columnType(IRIS_BIT); builder.dataType(IRIS_BIT); break; case TINYINT: builder.columnType(IRIS_TINYINT); builder.dataType(IRIS_TINYINT); break; case SMALLINT: builder.columnType(IRIS_SMALLINT); builder.dataType(IRIS_SMALLINT); break; case INT: builder.columnType(IRIS_INTEGER); builder.dataType(IRIS_INTEGER); break; case BIGINT: builder.columnType(IRIS_BIGINT); builder.dataType(IRIS_BIGINT); break; case FLOAT: builder.columnType(IRIS_FLOAT); builder.dataType(IRIS_FLOAT); break; case DOUBLE: builder.columnType(IRIS_DOUBLE); builder.dataType(IRIS_DOUBLE); break; case DECIMAL: DecimalType decimalType = (DecimalType) column.getDataType(); long precision = decimalType.getPrecision(); int scale = decimalType.getScale(); if (scale < 0) { scale = 0; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is scale less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (scale > MAX_SCALE) { scale = MAX_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_SCALE, precision, scale); } if (precision < scale) { precision = scale; } if (precision <= 0) { precision = DEFAULT_PRECISION; scale = DEFAULT_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is precision less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (precision > MAX_PRECISION) { scale = MAX_SCALE; precision = MAX_PRECISION; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum precision of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_PRECISION, precision, scale); } builder.columnType(String.format("%s(%s,%s)", IRIS_DECIMAL, precision, scale)); builder.dataType(IRIS_DECIMAL); builder.precision(precision); builder.scale(scale); break; case BYTES: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(IRIS_LONG_BINARY); builder.dataType(IRIS_LONG_BINARY); } else if (column.getColumnLength() < MAX_BINARY_LENGTH) { builder.dataType(IRIS_BINARY); builder.columnType( String.format("%s(%s)", IRIS_BINARY, column.getColumnLength())); } else { builder.columnType(IRIS_LONG_BINARY); builder.dataType(IRIS_LONG_BINARY); } break; case DATE: builder.columnType(IRIS_DATE); builder.dataType(IRIS_DATE); break; case TIME: builder.dataType(IRIS_TIME); if (Objects.nonNull(column.getScale()) && column.getScale() > 0) { Integer timeScale = column.getScale(); if (timeScale > MAX_TIME_SCALE) { timeScale = MAX_TIME_SCALE; log.warn( "The time column {} type time({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to time({})", column.getName(), column.getScale(), MAX_TIME_SCALE, timeScale); } builder.columnType(String.format("%s(%s)", IRIS_TIME, timeScale)); builder.scale(timeScale); } else { builder.columnType(IRIS_TIME); } break; case TIMESTAMP: builder.columnType(IRIS_TIMESTAMP2); builder.dataType(IRIS_TIMESTAMP2); break; default: throw CommonError.convertToConnectorTypeError( DatabaseIdentifier.IRIS, column.getDataType().getSqlType().name(), column.getName()); } return builder.build(); }
@Test public void testReconvertInt() { Column column = PhysicalColumn.builder().name("test").dataType(BasicType.INT_TYPE).build(); BasicTypeDefine typeDefine = IrisTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(IrisTypeConverter.IRIS_INTEGER, typeDefine.getColumnType()); Assertions.assertEquals(IrisTypeConverter.IRIS_INTEGER, typeDefine.getDataType()); }
public static Builder custom() { return new Builder(); }
@Test(expected = IllegalArgumentException.class) public void zeroFailureRateThresholdShouldFail() { custom().failureRateThreshold(0).build(); }
public static void refreshSuperUserGroupsConfiguration() { //load server side configuration; refreshSuperUserGroupsConfiguration(new Configuration()); }
@Test public void testNetgroups () throws IOException{ if(!NativeCodeLoader.isNativeCodeLoaded()) { LOG.info("Not testing netgroups, " + "this test only runs when native code is compiled"); return; } String groupMappingClassName = System.getProperty("TestProxyUsersGroupMapping"); if(groupMappingClassName == null) { LOG.info("Not testing netgroups, no group mapping class specified, " + "use -DTestProxyUsersGroupMapping=$className to specify " + "group mapping class (must implement GroupMappingServiceProvider " + "interface and support netgroups)"); return; } LOG.info("Testing netgroups using: " + groupMappingClassName); Configuration conf = new Configuration(); conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_GROUP_MAPPING, groupMappingClassName); conf.set( DefaultImpersonationProvider.getTestProvider(). getProxySuperuserGroupConfKey(REAL_USER_NAME), StringUtils.join(",", Arrays.asList(NETGROUP_NAMES))); conf.set( DefaultImpersonationProvider.getTestProvider(). getProxySuperuserIpConfKey(REAL_USER_NAME), PROXY_IP); ProxyUsers.refreshSuperUserGroupsConfiguration(conf); Groups groups = Groups.getUserToGroupsMappingService(conf); // try proxying a group that's allowed UserGroupInformation realUserUgi = UserGroupInformation .createRemoteUser(REAL_USER_NAME); UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting( PROXY_USER_NAME, realUserUgi, groups.getGroups(PROXY_USER_NAME).toArray( new String[groups.getGroups(PROXY_USER_NAME).size()])); assertAuthorized(proxyUserUgi, PROXY_IP); }
@Override public MaterialPollResult responseMessageForLatestRevisionsSince(String responseBody) { if (isEmpty(responseBody)) return new MaterialPollResult(); Map responseBodyMap = getResponseMap(responseBody); return new MaterialPollResult(toMaterialDataMap(responseBodyMap), toSCMRevisions(responseBodyMap)); }
@Test public void shouldBuildSCMDataFromLatestRevisionsSinceResponse() throws Exception { String responseBodyWithSCMData = "{\"revisions\":[],\"scm-data\":{\"key-one\":\"value-one\"}}"; MaterialPollResult pollResult = messageHandler.responseMessageForLatestRevisionsSince(responseBodyWithSCMData); Map<String, String> scmData = new HashMap<>(); scmData.put("key-one", "value-one"); assertThat(pollResult.getMaterialData(), is(scmData)); assertThat(pollResult.getRevisions().isEmpty(), is(true)); }
public Node parse() throws ScanException { if (tokenList == null || tokenList.isEmpty()) return null; return E(); }
@Test public void literalWithAccolade0() throws ScanException { Tokenizer tokenizer = new Tokenizer("{}"); Parser parser = new Parser(tokenizer.tokenize()); Node node = parser.parse(); Node witness = new Node(Node.Type.LITERAL, "{"); witness.next = new Node(Node.Type.LITERAL, "}"); assertEquals(witness, node); }
@Override public void setApplicationContext(@NonNull final ApplicationContext applicationContext) throws BeansException { SpringBeanUtils.getInstance().setApplicationContext(applicationContext); ShenyuConfig shenyuConfig = SpringBeanUtils.getInstance().getBean(ShenyuConfig.class); Singleton.INST.single(ShenyuConfig.class, shenyuConfig); }
@Test public void applicationContextAwareTest() { ConfigurableApplicationContext applicationContext = mock(ConfigurableApplicationContext.class); SpringBeanUtils.getInstance().setApplicationContext(applicationContext); when(SpringBeanUtils.getInstance().getBean(ShenyuConfig.class)) .thenReturn(new ShenyuConfig()); }
public static String hashpw(String password, String salt) throws IllegalArgumentException { BCrypt B; String real_salt; byte passwordb[], saltb[], hashed[]; char minor = (char) 0; int rounds, off = 0; StringBuilder rs = new StringBuilder(); if (salt == null) { throw new IllegalArgumentException("salt cannot be null"); } int saltLength = salt.length(); if (saltLength < 28) { throw new IllegalArgumentException("Invalid salt"); } if (salt.charAt(0) != '$' || salt.charAt(1) != '2') { throw new IllegalArgumentException("Invalid salt version"); } if (salt.charAt(2) == '$') { off = 3; } else { minor = salt.charAt(2); if (minor != 'a' || salt.charAt(3) != '$') { throw new IllegalArgumentException("Invalid salt revision"); } off = 4; } if (saltLength - off < 25) { throw new IllegalArgumentException("Invalid salt"); } // Extract number of rounds if (salt.charAt(off + 2) > '$') { throw new IllegalArgumentException("Missing salt rounds"); } rounds = Integer.parseInt(salt.substring(off, off + 2)); real_salt = salt.substring(off + 3, off + 25); try { passwordb = (password + (minor >= 'a' ? "\000" : "")).getBytes("UTF-8"); } catch (UnsupportedEncodingException uee) { throw new AssertionError("UTF-8 is not supported"); } saltb = decode_base64(real_salt, BCRYPT_SALT_LEN); B = new BCrypt(); hashed = B.crypt_raw(passwordb, saltb, rounds); rs.append("$2"); if (minor >= 'a') { rs.append(minor); } rs.append("$"); if (rounds < 10) { rs.append("0"); } rs.append(rounds); rs.append("$"); encode_base64(saltb, saltb.length, rs); encode_base64(hashed, bf_crypt_ciphertext.length * 4 - 1, rs); return rs.toString(); }
@Test public void testHashpwInvalidSaltRevision2() throws IllegalArgumentException { thrown.expect(IllegalArgumentException.class); BCrypt.hashpw("foo", "$2a+10$....................."); }
public static boolean isCompressData(byte[] bytes) { if (bytes != null && bytes.length > 2) { int header = ((bytes[0] & 0xff)) | (bytes[1] & 0xff) << 8; return GZIPInputStream.GZIP_MAGIC == header; } return false; }
@Test public void testIsCompressData() { Assertions.assertFalse(CompressUtil.isCompressData(null)); Assertions.assertFalse(CompressUtil.isCompressData(new byte[0])); Assertions.assertFalse(CompressUtil.isCompressData(new byte[]{31, 11})); Assertions.assertFalse( CompressUtil.isCompressData(new byte[]{31, 11, 0})); Assertions.assertTrue( CompressUtil.isCompressData(new byte[]{31, -117, 0})); }
public static short translateBucketAcl(GSAccessControlList acl, String userId) { short mode = (short) 0; for (GrantAndPermission gp : acl.getGrantAndPermissions()) { Permission perm = gp.getPermission(); GranteeInterface grantee = gp.getGrantee(); if (perm.equals(Permission.PERMISSION_READ)) { if (isUserIdInGrantee(grantee, userId)) { // If the bucket is readable by the user, add r and x to the owner mode. mode |= (short) 0500; } } else if (perm.equals(Permission.PERMISSION_WRITE)) { if (isUserIdInGrantee(grantee, userId)) { // If the bucket is writable by the user, +w to the owner mode. mode |= (short) 0200; } } else if (perm.equals(Permission.PERMISSION_FULL_CONTROL)) { if (isUserIdInGrantee(grantee, userId)) { // If the user has full control to the bucket, +rwx to the owner mode. mode |= (short) 0700; } } } return mode; }
@Test public void translateEveryoneFullPermission() { GroupGrantee allUsersGrantee = GroupGrantee.ALL_USERS; mAcl.grantPermission(allUsersGrantee, Permission.PERMISSION_FULL_CONTROL); assertEquals((short) 0700, GCSUtils.translateBucketAcl(mAcl, ID)); assertEquals((short) 0700, GCSUtils.translateBucketAcl(mAcl, OTHER_ID)); }
@Override public void setResult(CeTaskResult taskResult) { requireNonNull(taskResult, "taskResult can not be null"); checkState(this.result == null, "CeTaskResult has already been set in the holder"); this.result = taskResult; }
@Test public void setResult_throws_NPE_if_CeTaskResult_argument_is_null() { assertThatThrownBy(() -> underTest.setResult(null)) .isInstanceOf(NullPointerException.class) .hasMessage("taskResult can not be null"); }
@SuppressWarnings("MethodLength") public void onFragment(final DirectBuffer buffer, final int offset, final int length, final Header header) { final MessageHeaderDecoder headerDecoder = decoders.header; headerDecoder.wrap(buffer, offset); final int schemaId = headerDecoder.schemaId(); if (schemaId != MessageHeaderDecoder.SCHEMA_ID) { throw new ArchiveException("expected schemaId=" + MessageHeaderDecoder.SCHEMA_ID + ", actual=" + schemaId); } final int templateId = headerDecoder.templateId(); switch (templateId) { case ConnectRequestDecoder.TEMPLATE_ID: { final ConnectRequestDecoder decoder = decoders.connectRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final ControlSession session = conductor.newControlSession( image.correlationId(), decoder.correlationId(), decoder.responseStreamId(), decoder.version(), decoder.responseChannel(), ArrayUtil.EMPTY_BYTE_ARRAY, this); controlSessionByIdMap.put(session.sessionId(), session); break; } case CloseSessionRequestDecoder.TEMPLATE_ID: { final CloseSessionRequestDecoder decoder = decoders.closeSessionRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final ControlSession session = controlSessionByIdMap.get(controlSessionId); if (null != session) { session.abort(); } break; } case StartRecordingRequestDecoder.TEMPLATE_ID: { final StartRecordingRequestDecoder decoder = decoders.startRecordingRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onStartRecording( correlationId, decoder.streamId(), decoder.sourceLocation(), false, decoder.channel()); } break; } case StopRecordingRequestDecoder.TEMPLATE_ID: { final StopRecordingRequestDecoder decoder = decoders.stopRecordingRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onStopRecording(correlationId, decoder.streamId(), decoder.channel()); } break; } case ReplayRequestDecoder.TEMPLATE_ID: { final ReplayRequestDecoder decoder = decoders.replayRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final int fileIoMaxLength = FILE_IO_MAX_LENGTH_VERSION <= headerDecoder.version() ? decoder.fileIoMaxLength() : Aeron.NULL_VALUE; final long recordingId = decoder.recordingId(); final long position = decoder.position(); final long replayLength = decoder.length(); final int replayStreamId = decoder.replayStreamId(); final long replayToken = REPLAY_TOKEN_VERSION <= headerDecoder.version() ? decoder.replayToken() : Aeron.NULL_VALUE; final String replayChannel = decoder.replayChannel(); final ChannelUri channelUri = ChannelUri.parse(replayChannel); final ControlSession controlSession = setupSessionAndChannelForReplay( channelUri, replayToken, recordingId, correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onStartReplay( correlationId, recordingId, position, replayLength, fileIoMaxLength, replayStreamId, channelUri.toString()); } break; } case StopReplayRequestDecoder.TEMPLATE_ID: { final StopReplayRequestDecoder decoder = decoders.stopReplayRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onStopReplay(correlationId, decoder.replaySessionId()); } break; } case ListRecordingsRequestDecoder.TEMPLATE_ID: { final ListRecordingsRequestDecoder decoder = decoders.listRecordingsRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onListRecordings(correlationId, decoder.fromRecordingId(), decoder.recordCount()); } break; } case ListRecordingsForUriRequestDecoder.TEMPLATE_ID: { final ListRecordingsForUriRequestDecoder decoder = decoders.listRecordingsForUriRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { final int channelLength = decoder.channelLength(); final byte[] bytes = 0 == channelLength ? ArrayUtil.EMPTY_BYTE_ARRAY : new byte[channelLength]; decoder.getChannel(bytes, 0, channelLength); controlSession.onListRecordingsForUri( correlationId, decoder.fromRecordingId(), decoder.recordCount(), decoder.streamId(), bytes); } break; } case ListRecordingRequestDecoder.TEMPLATE_ID: { final ListRecordingRequestDecoder decoder = decoders.listRecordingRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onListRecording(correlationId, decoder.recordingId()); } break; } case ExtendRecordingRequestDecoder.TEMPLATE_ID: { final ExtendRecordingRequestDecoder decoder = decoders.extendRecordingRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onExtendRecording( correlationId, decoder.recordingId(), decoder.streamId(), decoder.sourceLocation(), false, decoder.channel()); } break; } case RecordingPositionRequestDecoder.TEMPLATE_ID: { final RecordingPositionRequestDecoder decoder = decoders.recordingPositionRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onGetRecordingPosition(correlationId, decoder.recordingId()); } break; } case TruncateRecordingRequestDecoder.TEMPLATE_ID: { final TruncateRecordingRequestDecoder decoder = decoders.truncateRecordingRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onTruncateRecording(correlationId, decoder.recordingId(), decoder.position()); } break; } case StopRecordingSubscriptionRequestDecoder.TEMPLATE_ID: { final StopRecordingSubscriptionRequestDecoder decoder = decoders.stopRecordingSubscriptionRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onStopRecordingSubscription(correlationId, decoder.subscriptionId()); } break; } case StopPositionRequestDecoder.TEMPLATE_ID: { final StopPositionRequestDecoder decoder = decoders.stopPositionRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onGetStopPosition(correlationId, decoder.recordingId()); } break; } case FindLastMatchingRecordingRequestDecoder.TEMPLATE_ID: { final FindLastMatchingRecordingRequestDecoder decoder = decoders.findLastMatchingRecordingRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { final int channelLength = decoder.channelLength(); final byte[] bytes = 0 == channelLength ? ArrayUtil.EMPTY_BYTE_ARRAY : new byte[channelLength]; decoder.getChannel(bytes, 0, channelLength); controlSession.onFindLastMatchingRecording( correlationId, decoder.minRecordingId(), decoder.sessionId(), decoder.streamId(), bytes); } break; } case ListRecordingSubscriptionsRequestDecoder.TEMPLATE_ID: { final ListRecordingSubscriptionsRequestDecoder decoder = decoders.listRecordingSubscriptionsRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onListRecordingSubscriptions( correlationId, decoder.pseudoIndex(), decoder.subscriptionCount(), decoder.applyStreamId() == BooleanType.TRUE, decoder.streamId(), decoder.channel()); } break; } case BoundedReplayRequestDecoder.TEMPLATE_ID: { final BoundedReplayRequestDecoder decoder = decoders.boundedReplayRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final long position = decoder.position(); final long replayLength = decoder.length(); final long recordingId = decoder.recordingId(); final int limitCounterId = decoder.limitCounterId(); final int replayStreamId = decoder.replayStreamId(); final int fileIoMaxLength = FILE_IO_MAX_LENGTH_VERSION <= headerDecoder.version() ? decoder.fileIoMaxLength() : Aeron.NULL_VALUE; final long replayToken = REPLAY_TOKEN_VERSION <= headerDecoder.version() ? decoder.replayToken() : Aeron.NULL_VALUE; final String replayChannel = decoder.replayChannel(); final ChannelUri channelUri = ChannelUri.parse(replayChannel); final ControlSession controlSession = setupSessionAndChannelForReplay( channelUri, replayToken, recordingId, correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onStartBoundedReplay( correlationId, recordingId, position, replayLength, limitCounterId, fileIoMaxLength, replayStreamId, channelUri.toString()); } break; } case StopAllReplaysRequestDecoder.TEMPLATE_ID: { final StopAllReplaysRequestDecoder decoder = decoders.stopAllReplaysRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onStopAllReplays(correlationId, decoder.recordingId()); } break; } case ReplicateRequestDecoder.TEMPLATE_ID: { final ReplicateRequestDecoder decoder = decoders.replicateRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onReplicate( correlationId, decoder.srcRecordingId(), decoder.dstRecordingId(), AeronArchive.NULL_POSITION, Aeron.NULL_VALUE, Aeron.NULL_VALUE, decoder.srcControlStreamId(), Aeron.NULL_VALUE, Aeron.NULL_VALUE, decoder.srcControlChannel(), decoder.liveDestination(), "", NullCredentialsSupplier.NULL_CREDENTIAL, ""); } break; } case StopReplicationRequestDecoder.TEMPLATE_ID: { final StopReplicationRequestDecoder decoder = decoders.stopReplicationRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onStopReplication(correlationId, decoder.replicationId()); } break; } case StartPositionRequestDecoder.TEMPLATE_ID: { final StartPositionRequestDecoder decoder = decoders.startPositionRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onGetStartPosition(correlationId, decoder.recordingId()); } break; } case DetachSegmentsRequestDecoder.TEMPLATE_ID: { final DetachSegmentsRequestDecoder decoder = decoders.detachSegmentsRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onDetachSegments(correlationId, decoder.recordingId(), decoder.newStartPosition()); } break; } case DeleteDetachedSegmentsRequestDecoder.TEMPLATE_ID: { final DeleteDetachedSegmentsRequestDecoder decoder = decoders.deleteDetachedSegmentsRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onDeleteDetachedSegments(correlationId, decoder.recordingId()); } break; } case PurgeSegmentsRequestDecoder.TEMPLATE_ID: { final PurgeSegmentsRequestDecoder decoder = decoders.purgeSegmentsRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onPurgeSegments(correlationId, decoder.recordingId(), decoder.newStartPosition()); } break; } case AttachSegmentsRequestDecoder.TEMPLATE_ID: { final AttachSegmentsRequestDecoder decoder = decoders.attachSegmentsRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onAttachSegments(correlationId, decoder.recordingId()); } break; } case MigrateSegmentsRequestDecoder.TEMPLATE_ID: { final MigrateSegmentsRequestDecoder decoder = decoders.migrateSegmentsRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onMigrateSegments(correlationId, decoder.srcRecordingId(), decoder.dstRecordingId()); } break; } case AuthConnectRequestDecoder.TEMPLATE_ID: { final AuthConnectRequestDecoder decoder = decoders.authConnectRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final String responseChannel = decoder.responseChannel(); final int credentialsLength = decoder.encodedCredentialsLength(); final byte[] credentials; if (credentialsLength > 0) { credentials = new byte[credentialsLength]; decoder.getEncodedCredentials(credentials, 0, credentialsLength); } else { credentials = ArrayUtil.EMPTY_BYTE_ARRAY; } final ControlSession session = conductor.newControlSession( image.correlationId(), decoder.correlationId(), decoder.responseStreamId(), decoder.version(), responseChannel, credentials, this); controlSessionByIdMap.put(session.sessionId(), session); break; } case ChallengeResponseDecoder.TEMPLATE_ID: { final ChallengeResponseDecoder decoder = decoders.challengeResponse; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final ControlSession session = controlSessionByIdMap.get(controlSessionId); if (null != session) { final int credentialsLength = decoder.encodedCredentialsLength(); final byte[] credentials; if (credentialsLength > 0) { credentials = new byte[credentialsLength]; decoder.getEncodedCredentials(credentials, 0, credentialsLength); } else { credentials = ArrayUtil.EMPTY_BYTE_ARRAY; } session.onChallengeResponse(decoder.correlationId(), credentials); } break; } case KeepAliveRequestDecoder.TEMPLATE_ID: { final KeepAliveRequestDecoder decoder = decoders.keepAliveRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onKeepAlive(correlationId); } break; } case TaggedReplicateRequestDecoder.TEMPLATE_ID: { final TaggedReplicateRequestDecoder decoder = decoders.taggedReplicateRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onReplicate( correlationId, decoder.srcRecordingId(), decoder.dstRecordingId(), AeronArchive.NULL_POSITION, decoder.channelTagId(), decoder.subscriptionTagId(), decoder.srcControlStreamId(), Aeron.NULL_VALUE, Aeron.NULL_VALUE, decoder.srcControlChannel(), decoder.liveDestination(), "", NullCredentialsSupplier.NULL_CREDENTIAL, ""); } break; } case StartRecordingRequest2Decoder.TEMPLATE_ID: { final StartRecordingRequest2Decoder decoder = decoders.startRecordingRequest2; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onStartRecording( correlationId, decoder.streamId(), decoder.sourceLocation(), decoder.autoStop() == BooleanType.TRUE, decoder.channel()); } break; } case ExtendRecordingRequest2Decoder.TEMPLATE_ID: { final ExtendRecordingRequest2Decoder decoder = decoders.extendRecordingRequest2; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onExtendRecording( correlationId, decoder.recordingId(), decoder.streamId(), decoder.sourceLocation(), decoder.autoStop() == BooleanType.TRUE, decoder.channel()); } break; } case StopRecordingByIdentityRequestDecoder.TEMPLATE_ID: { final StopRecordingByIdentityRequestDecoder decoder = decoders.stopRecordingByIdentityRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onStopRecordingByIdentity(correlationId, decoder.recordingId()); } break; } case ReplicateRequest2Decoder.TEMPLATE_ID: { final ReplicateRequest2Decoder decoder = decoders.replicateRequest2; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); final int fileIoMaxLength = FILE_IO_MAX_LENGTH_VERSION <= headerDecoder.version() ? decoder.fileIoMaxLength() : Aeron.NULL_VALUE; final int sessionId = SESSION_ID_VERSION <= headerDecoder.version() ? decoder.replicationSessionId() : Aeron.NULL_VALUE; final String srcControlChannel = decoder.srcControlChannel(); final String liveDestination = decoder.liveDestination(); final String replicationChannel = decoder.replicationChannel(); final byte[] encodedCredentials; if (ENCODED_CREDENTIALS_VERSION <= headerDecoder.version()) { encodedCredentials = new byte[decoder.encodedCredentialsLength()]; decoder.getEncodedCredentials(encodedCredentials, 0, decoder.encodedCredentialsLength()); } else { encodedCredentials = NullCredentialsSupplier.NULL_CREDENTIAL; } final String srcResponseChannel = decoder.srcResponseChannel(); if (null != controlSession) { controlSession.onReplicate( correlationId, decoder.srcRecordingId(), decoder.dstRecordingId(), decoder.stopPosition(), decoder.channelTagId(), decoder.subscriptionTagId(), decoder.srcControlStreamId(), fileIoMaxLength, sessionId, srcControlChannel, liveDestination, replicationChannel, encodedCredentials, srcResponseChannel ); } break; } case PurgeRecordingRequestDecoder.TEMPLATE_ID: { final PurgeRecordingRequestDecoder decoder = decoders.purgeRecordingRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onPurgeRecording(correlationId, decoder.recordingId()); } break; } case MaxRecordedPositionRequestDecoder.TEMPLATE_ID: { final MaxRecordedPositionRequestDecoder decoder = decoders.maxRecordedPositionRequest; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onGetMaxRecordedPosition(correlationId, decoder.recordingId()); } break; } case ArchiveIdRequestDecoder.TEMPLATE_ID: { final ArchiveIdRequestDecoder decoder = decoders.archiveIdRequestDecoder; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { controlSession.onArchiveId(correlationId); } break; } case ReplayTokenRequestDecoder.TEMPLATE_ID: { final ReplayTokenRequestDecoder decoder = decoders.replayTokenRequestDecoder; decoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, headerDecoder.blockLength(), headerDecoder.version()); final long controlSessionId = decoder.controlSessionId(); final long correlationId = decoder.correlationId(); final long recordingId = decoder.recordingId(); final ControlSession controlSession = getControlSession(correlationId, controlSessionId, templateId); if (null != controlSession) { final long replayToken = conductor.generateReplayToken(controlSession, recordingId); controlSession.sendResponse( correlationId, replayToken, ControlResponseCode.OK, "", conductor.controlResponseProxy()); } } } }
@Test void shouldHandleBoundedReplayRequest() { final ControlSessionDemuxer controlSessionDemuxer = new ControlSessionDemuxer( new ControlRequestDecoders(), mockImage, mockConductor, mockAuthorisationService); setupControlSession(controlSessionDemuxer, CONTROL_SESSION_ID); final ExpandableArrayBuffer buffer = new ExpandableArrayBuffer(); final MessageHeaderEncoder headerEncoder = new MessageHeaderEncoder(); final BoundedReplayRequestEncoder replayRequestEncoder = new BoundedReplayRequestEncoder(); replayRequestEncoder.wrapAndApplyHeader(buffer, 0, headerEncoder); replayRequestEncoder .controlSessionId(928374L) .correlationId(9382475L) .recordingId(9827345897L) .position(982374L) .limitCounterId(92734) .replayStreamId(9832475) .fileIoMaxLength(4096) .replayChannel("aeron:ipc?alias=replay"); final int replicateRequestLength = replayRequestEncoder.encodedLength(); controlSessionDemuxer.onFragment(buffer, 0, replicateRequestLength, mockHeader); final BoundedReplayRequestDecoder expected = new BoundedReplayRequestDecoder() .wrapAndApplyHeader(buffer, 0, new MessageHeaderDecoder()); verify(mockSession).onStartBoundedReplay( expected.correlationId(), expected.recordingId(), expected.position(), expected.length(), expected.limitCounterId(), expected.fileIoMaxLength(), expected.replayStreamId(), expected.replayChannel()); }
public void toPdf() throws IOException { try { document.open(); // il serait possible d'ouvrir la boîte de dialogue Imprimer de Adobe Reader // if (writer instanceof PdfWriter) { // ((PdfWriter) writer).addJavaScript("this.print(true);", false); // } pdfCoreReport.toPdf(); } catch (final DocumentException e) { throw createIOException(e); } document.close(); }
@Test public void testEmptyPdfCounterRequestContext() throws IOException, DocumentException { final ByteArrayOutputStream output = new ByteArrayOutputStream(); final PdfDocumentFactory pdfDocumentFactory = new PdfDocumentFactory(TEST_APP, null, output); final Document document = pdfDocumentFactory.createDocument(); document.open(); final PdfCounterRequestContextReport report = new PdfCounterRequestContextReport( Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), true, pdfDocumentFactory, document); report.toPdf(); report.setTimeOfSnapshot(System.currentTimeMillis()); report.writeContextDetails(); // on ne peut fermer le document car on n'a rien écrit normalement assertNotNull("PdfCounterRequestContextReport", report); }
@Override public String execute(SampleResult previousResult, Sampler currentSampler) throws InvalidVariableException { String originalString = values[0].execute(); String mode = null; // default if (values.length > 1) { mode = values[1].execute(); } if(StringUtils.isEmpty(mode)){ mode = ChangeCaseMode.UPPER.getName(); // default } String targetString = changeCase(originalString, mode); addVariableValue(targetString, values, 2); return targetString; }
@Test public void testChangeCaseLower() throws Exception { String returnValue = execute("myUpperTest", "LOWER"); assertEquals("myuppertest", returnValue); }
@Override public void applyToConfiguration(Configuration configuration) { super.applyToConfiguration(configuration); merge(configuration, pythonConfiguration); }
@Test void testCreateProgramOptionsWithPythonCommandLine() throws CliArgsException { String[] parameters = { "-py", "test.py", "-pym", "test", "-pyfs", "test1.py,test2.zip,test3.egg,test4_dir", "-pyreq", "a.txt#b_dir", "-pyarch", "c.zip#venv,d.zip", "-pyexec", "bin/python", "-pypath", "bin/python/lib/:bin/python/lib64", "userarg1", "userarg2" }; CommandLine line = CliFrontendParser.parse(options, parameters, false); PythonProgramOptions programOptions = (PythonProgramOptions) ProgramOptions.create(line); Configuration config = new Configuration(); programOptions.applyToConfiguration(config); assertThat(config.get(PythonOptions.PYTHON_FILES)) .isEqualTo("test1.py,test2.zip,test3.egg,test4_dir"); assertThat(config.get(PYTHON_REQUIREMENTS)).isEqualTo("a.txt#b_dir"); assertThat(config.get(PythonOptions.PYTHON_ARCHIVES)).isEqualTo("c.zip#venv,d.zip"); assertThat(config.get(PYTHON_EXECUTABLE)).isEqualTo("bin/python"); assertThat(config.get(PythonOptions.PYTHON_PATH)) .isEqualTo("bin/python/lib/:bin/python/lib64"); assertThat(programOptions.getProgramArgs()) .containsExactly( "--python", "test.py", "--pyModule", "test", "userarg1", "userarg2"); }
@Override public String toString() { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("minLatitude="); stringBuilder.append(this.minLatitude); stringBuilder.append(", minLongitude="); stringBuilder.append(this.minLongitude); stringBuilder.append(", maxLatitude="); stringBuilder.append(this.maxLatitude); stringBuilder.append(", maxLongitude="); stringBuilder.append(this.maxLongitude); return stringBuilder.toString(); }
@Test public void toStringTest() { BoundingBox boundingBox = new BoundingBox(MIN_LATITUDE, MIN_LONGITUDE, MAX_LATITUDE, MAX_LONGITUDE); Assert.assertEquals(BOUNDING_BOX_TO_STRING, boundingBox.toString()); }
@Override public AppResponse process(Flow flow, MultipleSessionsRequest request) throws FlowNotDefinedException, IOException, NoSuchAlgorithmException { var authAppSession = appSessionService.getSession(request.getAuthSessionId()); if (!isAppSessionAuthenticated(authAppSession)) return new NokResponse(); appAuthenticator = appAuthenticatorService.findByUserAppId(authAppSession.getUserAppId()); if (!isAppAuthenticatorActivated(appAuthenticator)) return new NokResponse(); var response = checkEidasUIT(); return response.orElseGet(() -> addDetailsToResponse(new WebSessionInformationResponse())); }
@Test public void processSessionInformationReceivedActivateResponseOk() throws FlowNotDefinedException, IOException, NoSuchAlgorithmException { //given Map<String, String> activateResponse = Map.of("status", "OK", "faultReason", "NotUnique", "pip", "testpip"); when(appAuthenticatorService.findByUserAppId(authAppSession.getUserAppId())).thenReturn(mockedAppAuthenticator); when(dwsClient.bsnkActivate(response.get("bsn"))).thenReturn(activateResponse); //when AppResponse appResponse = sessionInformationReceived.process(mockedFlow, multipleSessionsRequest); //then assertEquals("testpip", mockedAppAuthenticator.getPip()); }
protected static String assureRoot(String queueName) { if (queueName != null && !queueName.isEmpty()) { if (!queueName.startsWith(ROOT_QUEUE + DOT) && !queueName.equals(ROOT_QUEUE)) { queueName = ROOT_QUEUE + DOT + queueName; } } else { LOG.warn("AssureRoot: queueName is empty or null."); } return queueName; }
@Test public void testAssureRoot() { // permutations of rooted queue names final String queueName = "base"; final String rootOnly = "root"; final String rootNoDot = "rootbase"; final String alreadyRoot = "root.base"; String rooted = assureRoot(queueName); assertTrue("Queue should have root prefix (base)", rooted.startsWith(ROOT_QUEUE + DOT)); rooted = assureRoot(rootOnly); assertEquals("'root' queue should not have root prefix (root)", rootOnly, rooted); rooted = assureRoot(rootNoDot); assertTrue("Queue should have root prefix (rootbase)", rooted.startsWith(ROOT_QUEUE + DOT)); assertEquals("'root' queue base was replaced and not prefixed", 5, rooted.lastIndexOf(ROOT_QUEUE)); rooted = assureRoot(alreadyRoot); assertEquals("Root prefixed queue changed and it should not (root.base)", rooted, alreadyRoot); assertNull("Null queue did not return null queue", assureRoot(null)); assertEquals("Empty queue did not return empty name", "", assureRoot("")); }
@Override public void startAsync() { if (!shouldCheckPreconditions()) { LOG.info("All preconditions passed, skipping precondition server start"); return; } LOG.info("Some preconditions not passed, starting precondition server"); server.start(); }
@Test public void shouldNotStartServerIfPreconditionsPass() { // When: checker.startAsync(); // Then: verifyNoInteractions(server); }
@Override public Mono<UserDetails> findByUsername(String username) { return userService.getUser(username) .onErrorMap(UserNotFoundException.class, e -> new BadCredentialsException("Invalid Credentials")) .flatMap(user -> { var name = user.getMetadata().getName(); var userBuilder = User.withUsername(name) .password(user.getSpec().getPassword()) .disabled(requireNonNullElse(user.getSpec().getDisabled(), false)); var setAuthorities = roleService.getRolesByUsername(name) // every authenticated user should have authenticated and anonymous roles. .concatWithValues(AUTHENTICATED_ROLE_NAME, ANONYMOUS_ROLE_NAME) .map(roleName -> new SimpleGrantedAuthority(ROLE_PREFIX + roleName)) .distinct() .collectList() .doOnNext(userBuilder::authorities); return setAuthorities.then(Mono.fromSupplier(() -> { var twoFactorAuthSettings = TwoFactorUtils.getTwoFactorAuthSettings(user); return new HaloUser.Builder(userBuilder.build()) .twoFactorAuthEnabled( (!twoFactorAuthDisabled) && twoFactorAuthSettings.isAvailable() ) .totpEncryptedSecret(user.getSpec().getTotpEncryptedSecret()) .build(); })); }); }
@Test void shouldNotFindUserDetailsByNonExistingUsername() { when(userService.getUser("non-existing-user")).thenReturn( Mono.error(() -> new UserNotFoundException("non-existing-user"))); var userDetailsMono = userDetailService.findByUsername("non-existing-user"); StepVerifier.create(userDetailsMono) .expectError(AuthenticationException.class) .verify(); }
public static void recursivelyRegisterType( TypeInformation<?> typeInfo, SerializerConfig config, Set<Class<?>> alreadySeen) { if (typeInfo instanceof GenericTypeInfo) { GenericTypeInfo<?> genericTypeInfo = (GenericTypeInfo<?>) typeInfo; Serializers.recursivelyRegisterType( genericTypeInfo.getTypeClass(), config, alreadySeen); } else if (typeInfo instanceof CompositeType) { List<GenericTypeInfo<?>> genericTypesInComposite = new ArrayList<>(); getContainedGenericTypes((CompositeType<?>) typeInfo, genericTypesInComposite); for (GenericTypeInfo<?> gt : genericTypesInComposite) { Serializers.recursivelyRegisterType(gt.getTypeClass(), config, alreadySeen); } } else if (typeInfo instanceof ObjectArrayTypeInfo) { ObjectArrayTypeInfo<?, ?> objectArrayTypeInfo = (ObjectArrayTypeInfo<?, ?>) typeInfo; recursivelyRegisterType(objectArrayTypeInfo.getComponentInfo(), config, alreadySeen); } }
@Test void testTypeRegistrationFromTypeInfo() { SerializerConfigImpl conf = new SerializerConfigImpl(); Serializers.recursivelyRegisterType( new GenericTypeInfo<>(ClassWithNested.class), conf, new HashSet<Class<?>>()); KryoSerializer<String> kryo = new KryoSerializer<>(String.class, conf); // we create Kryo from another type. assertThat(kryo.getKryo().getRegistration(FromNested.class).getId()).isPositive(); assertThat(kryo.getKryo().getRegistration(ClassWithNested.class).getId()).isPositive(); assertThat(kryo.getKryo().getRegistration(Path.class).getId()).isPositive(); // check if the generic type from one field is also registered (its very likely that // generic types are also used as fields somewhere. assertThat(kryo.getKryo().getRegistration(FromGeneric1.class).getId()).isPositive(); assertThat(kryo.getKryo().getRegistration(FromGeneric2.class).getId()).isPositive(); assertThat(kryo.getKryo().getRegistration(Node.class).getId()).isPositive(); }
public static Long getLongOrNull(String property, JsonNode node) { if (!node.hasNonNull(property)) { return null; } return getLong(property, node); }
@Test public void getLongOrNull() throws JsonProcessingException { assertThat(JsonUtil.getLongOrNull("x", JsonUtil.mapper().readTree("{}"))).isNull(); assertThat(JsonUtil.getLongOrNull("x", JsonUtil.mapper().readTree("{\"x\": 23}"))) .isEqualTo(23); assertThat(JsonUtil.getLongOrNull("x", JsonUtil.mapper().readTree("{\"x\": null}"))).isNull(); assertThatThrownBy( () -> JsonUtil.getLongOrNull("x", JsonUtil.mapper().readTree("{\"x\": \"23\"}"))) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot parse to a long value: x: \"23\""); assertThatThrownBy( () -> JsonUtil.getLongOrNull("x", JsonUtil.mapper().readTree("{\"x\": 23.0}"))) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot parse to a long value: x: 23.0"); }
@Override public CommandLineImpl parse(final List<String> originalArgs, final Logger logger) { return CommandLineImpl.of(originalArgs, logger); }
@Test public void testRun() throws Exception { final CommandLineParserImpl parser = new CommandLineParserImpl(); final CommandLineImpl commandLine = parse( parser, "-R--dev", "--log-level", "warn", "--log-path", "/var/log/some", "-Xjruby=file:///some/jruby.jar", "-X", "bar=baz", "run", "-L", "test", "-R", "bar", "file3.yml", "-I", "example"); assertEquals(Command.RUN, commandLine.getCommand()); assertEquals(Arrays.asList("file3.yml"), commandLine.getArguments()); final Properties actualProperties = commandLine.getCommandLineProperties(); assertEquals(6, actualProperties.size()); assertEquals( "test" + File.separator + "lib" + File.pathSeparator + "example", actualProperties.getProperty("jruby_load_path")); assertEquals("warn", actualProperties.getProperty("log_level")); assertEquals("/var/log/some", actualProperties.getProperty("log_path")); assertEquals("--dev,bar", actualProperties.getProperty("jruby_command_line_options")); assertEquals("baz", actualProperties.getProperty("bar")); assertEquals("file:///some/jruby.jar", actualProperties.getProperty("jruby")); assertEquals("", commandLine.getStdOut()); assertEquals("", commandLine.getStdErr()); }
@Override public int size() { return get(sizeAsync()); }
@Test public void testSize() { RSetCache<Integer> set = redisson.getSetCache("set"); set.add(1); set.add(2); set.add(3); set.add(3); set.add(4); set.add(5); set.add(5); Assertions.assertEquals(5, set.size()); set.destroy(); }
@Override public void close() { close(Duration.ofMillis(0)); }
@Test public void shouldThrowOnCommitTransactionIfProducerIsClosed() { buildMockProducer(true); producer.close(); assertThrows(IllegalStateException.class, producer::commitTransaction); }
public static PredicateTreeAnnotations createPredicateTreeAnnotations(Predicate predicate) { PredicateTreeAnalyzerResult analyzerResult = PredicateTreeAnalyzer.analyzePredicateTree(predicate); // The tree size is used as the interval range. int intervalEnd = analyzerResult.treeSize; AnnotatorContext context = new AnnotatorContext(intervalEnd, analyzerResult.sizeMap); assignIntervalLabels(predicate, Interval.INTERVAL_BEGIN, intervalEnd, false, context); return new PredicateTreeAnnotations( analyzerResult.minFeature, intervalEnd, context.intervals, context.intervalsWithBounds, context.featureConjunctions); }
@Test void require_that_ands_below_ors_get_different_intervals() { Predicate p = or( and( feature("key1").inSet("value1"), feature("key1").inSet("value1"), feature("key1").inSet("value1")), and( feature("key2").inSet("value2"), feature("key2").inSet("value2"), feature("key2").inSet("value2"))); PredicateTreeAnnotations r = PredicateTreeAnnotator.createPredicateTreeAnnotations(p); assertEquals(1, r.minFeature); assertEquals(6, r.intervalEnd); assertEquals(2, r.intervalMap.size()); assertIntervalContains(r, "key1=value1", 0x00010001, 0x00020002, 0x00030006); assertIntervalContains(r, "key2=value2", 0x00010004, 0x00050005, 0x00060006); }
public String queryParam(String queryParamName) { return optionalQueryParam(queryParamName).orElse(null); }
@Test void testRequestUrlQueryParamWhichIsNotPresentUsingClass() { RequestUrl requestUrl = new MatchUrl("/api/jobs").toRequestUrl("/api/jobs"); assertThat(requestUrl.queryParam("state", StateName.class, ENQUEUED)).isEqualTo(ENQUEUED); }
@Override public CompletableFuture<JoinGroupResponseData> joinGroup( RequestContext context, JoinGroupRequestData request, BufferSupplier bufferSupplier ) { if (!isActive.get()) { return CompletableFuture.completedFuture(new JoinGroupResponseData() .setMemberId(request.memberId()) .setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code()) ); } if (!isGroupIdNotEmpty(request.groupId())) { return CompletableFuture.completedFuture(new JoinGroupResponseData() .setMemberId(request.memberId()) .setErrorCode(Errors.INVALID_GROUP_ID.code()) ); } if (request.sessionTimeoutMs() < config.classicGroupMinSessionTimeoutMs() || request.sessionTimeoutMs() > config.classicGroupMaxSessionTimeoutMs()) { return CompletableFuture.completedFuture(new JoinGroupResponseData() .setMemberId(request.memberId()) .setErrorCode(Errors.INVALID_SESSION_TIMEOUT.code()) ); } CompletableFuture<JoinGroupResponseData> responseFuture = new CompletableFuture<>(); runtime.scheduleWriteOperation( "classic-group-join", topicPartitionFor(request.groupId()), Duration.ofMillis(config.offsetCommitTimeoutMs()), coordinator -> coordinator.classicGroupJoin(context, request, responseFuture) ).exceptionally(exception -> { if (!responseFuture.isDone()) { responseFuture.complete(handleOperationException( "classic-group-join", request, exception, (error, __) -> new JoinGroupResponseData().setErrorCode(error.code()) )); } return null; }); return responseFuture; }
@Test public void testJoinGroup() { CoordinatorRuntime<GroupCoordinatorShard, CoordinatorRecord> runtime = mockRuntime(); GroupCoordinatorService service = new GroupCoordinatorService( new LogContext(), createConfig(), runtime, new GroupCoordinatorMetrics(), createConfigManager() ); JoinGroupRequestData request = new JoinGroupRequestData() .setGroupId("foo") .setSessionTimeoutMs(1000); service.startup(() -> 1); when(runtime.scheduleWriteOperation( ArgumentMatchers.eq("classic-group-join"), ArgumentMatchers.eq(new TopicPartition("__consumer_offsets", 0)), ArgumentMatchers.eq(Duration.ofMillis(5000)), ArgumentMatchers.any() )).thenReturn(CompletableFuture.completedFuture( new JoinGroupResponseData() )); CompletableFuture<JoinGroupResponseData> responseFuture = service.joinGroup( requestContext(ApiKeys.JOIN_GROUP), request, BufferSupplier.NO_CACHING ); assertFalse(responseFuture.isDone()); }