focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public synchronized void endOfWashing() { washingMachineState = WashingMachineState.ENABLED; LOGGER.info("{}: Washing completed.", Thread.currentThread().getId()); }
@Test void endOfWashing() { var washingMachine = new WashingMachine(); washingMachine.wash(); assertEquals(WashingMachineState.ENABLED, washingMachine.getWashingMachineState()); }
public static <T> PCollections<T> pCollections() { return new PCollections<>(); }
@Test @Category(ValidatesRunner.class) public void testFlattenPCollections() { List<List<String>> inputs = Arrays.asList(LINES, NO_LINES, LINES2, NO_LINES, LINES, NO_LINES); PCollection<String> output = makePCollectionListOfStrings(p, inputs).apply(Flatten.pCollections()); PAssert.that(output).containsInAnyOrder(flattenLists(inputs)); p.run(); }
public String getName() { return name; }
@Test public void testCacheConfigLoaderWriter() throws Exception { CacheSimpleConfig simpleConfig = new CacheSimpleConfig(); simpleConfig.setCacheLoader(MyCacheLoader.class.getName()); simpleConfig.setCacheWriter(EmptyCacheWriter.class.getName()); CacheConfig cacheConfig = new CacheConfig(simpleConfig); CacheLoader loader = (CacheLoader) cacheConfig.getCacheLoaderFactory().create(); CacheWriter writer = (CacheWriter) cacheConfig.getCacheWriterFactory().create(); assertTrue(loader instanceof MyCacheLoader); assertTrue(writer instanceof EmptyCacheWriter); }
public int id() { return id; }
@Test public void testValues() { assertEquals(0, REGISTRATIONS.get(0).id()); assertEquals(1, REGISTRATIONS.get(1).id()); assertEquals(2, REGISTRATIONS.get(2).id()); }
@Override public QueryResponseRow getRowData() throws SQLException { List<QueryResponseCell> cells = new ArrayList<>(queryHeaders.size()); for (int columnIndex = 1; columnIndex <= queryHeaders.size(); columnIndex++) { Object data = mergedResult.getValue(columnIndex, Object.class); cells.add(new QueryResponseCell(queryHeaders.get(columnIndex - 1).getColumnType(), data, queryHeaders.get(columnIndex - 1).getColumnTypeName())); } return new QueryResponseRow(cells); }
@Test void assertBinaryProtocolQueryHeader() throws SQLException, NoSuchFieldException, IllegalAccessException { SQLStatementContext sqlStatementContext = mock(SQLStatementContext.class, RETURNS_DEEP_STUBS); when(sqlStatementContext.getDatabaseType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); DatabaseConnector engine = DatabaseConnectorFactory.getInstance().newInstance(createQueryContext(sqlStatementContext), databaseConnectionManager, true); assertNotNull(engine); assertThat(engine, instanceOf(DatabaseConnector.class)); Field queryHeadersField = DatabaseConnector.class.getDeclaredField("queryHeaders"); ShardingSphereDatabase database = createDatabaseMetaData(); MemberAccessor accessor = Plugins.getMemberAccessor(); try (MockedStatic<DatabaseTypedSPILoader> spiLoader = mockStatic(DatabaseTypedSPILoader.class)) { spiLoader.when(() -> DatabaseTypedSPILoader.getService(QueryHeaderBuilder.class, TypedSPILoader.getService(DatabaseType.class, "MySQL"))).thenReturn(new QueryHeaderBuilderFixture()); accessor.set(queryHeadersField, engine, Collections.singletonList(new QueryHeaderBuilderEngine(TypedSPILoader.getService(DatabaseType.class, "MySQL")).build(createQueryResultMetaData(), database, 1))); Field mergedResultField = DatabaseConnector.class.getDeclaredField("mergedResult"); accessor.set(mergedResultField, engine, new MemoryMergedResult<ShardingSphereRule>(null, null, null, Collections.emptyList()) { @Override protected List<MemoryQueryResultRow> init(final ShardingSphereRule rule, final ShardingSphereSchema schema, final SQLStatementContext sqlStatementContext, final List<QueryResult> queryResults) { return Collections.singletonList(mock(MemoryQueryResultRow.class)); } }); Exception ex = null; try { engine.getRowData(); } catch (final SQLException | IndexOutOfBoundsException exception) { ex = exception; } finally { assertFalse(ex instanceof IndexOutOfBoundsException); } } }
@Override public void flush() throws IOException { dataOut.flush(); }
@Test public void testFlush() throws Exception { dataOutputStream.flush(); verify(mockOutputStream).flush(); }
@Override public PipelineConfigs getLocal() { if (this.isLocal()) return this; return null; }
@Test public void shouldReturnSelfForGetLocalWhenOriginIsNull() { PipelineConfigs pipelineConfigs = createEmpty(); assertThat(pipelineConfigs.getLocal().size(), is(0)); assertSame(pipelineConfigs, pipelineConfigs.getLocal()); }
public void scheduledPendingTaskRun(Consumer<TaskRun> action) { int currentRunning = runningTaskRunMap.size(); if (currentRunning >= Config.task_runs_concurrency) { return; } while (!pendingTaskRunQueue.isEmpty()) { if (currentRunning >= Config.task_runs_concurrency) { break; } TaskRun taskRun = pendingTaskRunQueue.poll(this::canTaskRunBeScheduled); if (taskRun == null) { break; } // do schedule action action.accept(taskRun); // put it into running task run map runningTaskRunMap.put(taskRun.getTaskId(), taskRun); currentRunning += 1; } }
@Test public void testScheduledPendingTaskRun() { Task task = new Task("test"); task.setDefinition("select 1"); List<TaskRun> taskRuns = Lists.newArrayList(); TaskRunScheduler scheduler = new TaskRunScheduler(); for (int i = 0; i < N; i++) { TaskRun taskRun = makeTaskRun(i, task, makeExecuteOption(true, false, 1), i); taskRuns.add(taskRun); scheduler.addPendingTaskRun(taskRun); } Set<TaskRun> runningTaskRuns = Sets.newHashSet(taskRuns.subList(0, Config.task_runs_concurrency)); scheduler.scheduledPendingTaskRun(taskRun -> { Assert.assertTrue(runningTaskRuns.contains(taskRun)); }); Assert.assertTrue(scheduler.getRunningTaskCount() == Config.task_runs_concurrency); Assert.assertTrue(scheduler.getPendingQueueCount() == N - Config.task_runs_concurrency); for (int i = 0; i < Config.task_runs_concurrency; i++) { Assert.assertTrue(scheduler.getRunnableTaskRun(i).equals(taskRuns.get(i))); } for (int i = Config.task_runs_concurrency; i < N; i++) { Assert.assertTrue(scheduler.getRunnableTaskRun(i).equals(taskRuns.get(i))); } }
public boolean send(TransferableBlock block) throws Exception { if (block.isErrorBlock()) { // Send error block to all mailboxes to propagate the error for (SendingMailbox sendingMailbox : _sendingMailboxes) { sendBlock(sendingMailbox, block); } return false; } if (block.isSuccessfulEndOfStreamBlock()) { // Send metadata to only one randomly picked mailbox, and empty EOS block to other mailboxes int numMailboxes = _sendingMailboxes.size(); int mailboxIdToSendMetadata = ThreadLocalRandom.current().nextInt(numMailboxes); assert block.getQueryStats() != null; for (int i = 0; i < numMailboxes; i++) { SendingMailbox sendingMailbox = _sendingMailboxes.get(i); TransferableBlock blockToSend = i == mailboxIdToSendMetadata ? block : TransferableBlockUtils.getEndOfStreamTransferableBlock(); sendBlock(sendingMailbox, blockToSend); } return false; } assert block.isDataBlock(); boolean isEarlyTerminated = true; for (SendingMailbox sendingMailbox : _sendingMailboxes) { if (!sendingMailbox.isEarlyTerminated()) { isEarlyTerminated = false; break; } } if (!isEarlyTerminated) { route(_sendingMailboxes, block); } return isEarlyTerminated; }
@Test public void shouldSendDataBlocksOnlyToTargetDestination() throws Exception { // Given: List<SendingMailbox> destinations = ImmutableList.of(_mailbox1); BlockExchange exchange = new TestBlockExchange(destinations); TransferableBlock block = new TransferableBlock(ImmutableList.of(new Object[]{"val"}), new DataSchema(new String[]{"foo"}, new ColumnDataType[]{ColumnDataType.STRING}), DataBlock.Type.ROW); // When: exchange.send(block); // Then: ArgumentCaptor<TransferableBlock> captor = ArgumentCaptor.forClass(TransferableBlock.class); Mockito.verify(_mailbox1, Mockito.times(1)).send(captor.capture()); Assert.assertEquals(captor.getValue().getContainer(), block.getContainer()); Mockito.verify(_mailbox2, Mockito.never()).send(Mockito.any()); }
@Override public void runIfState(State expectedState, Runnable action) { if (isState(expectedState)) { try { action.run(); } catch (Throwable t) { fatalErrorHandler.onFatalError(t); } } else { LOG.debug( "Ignoring scheduled action because expected state {} is not the actual state {}.", expectedState, state); } }
@Test void testRunIfState() throws Exception { final AdaptiveScheduler scheduler = new AdaptiveSchedulerBuilder( createJobGraph(), mainThreadExecutor, EXECUTOR_RESOURCE.getExecutor()) .build(); AtomicBoolean ran = new AtomicBoolean(false); scheduler.runIfState(scheduler.getState(), () -> ran.set(true)); assertThat(ran.get()).isTrue(); }
static int getShort(byte[] buffer, int offset) { return buffer[offset] << 8 | (buffer[offset + 1] & 0xff); }
@Test public void getShortTest() { byte[] buffer = new byte[]{0, 0}; Assert.assertEquals(0, Deserializer.getShort(buffer, 0)); buffer = new byte[]{0, 1}; Assert.assertEquals(1, Deserializer.getShort(buffer, 0)); buffer = new byte[]{0, 127}; Assert.assertEquals(127, Deserializer.getShort(buffer, 0)); buffer = new byte[]{0, -128}; Assert.assertEquals(128, Deserializer.getShort(buffer, 0)); buffer = new byte[]{0, -127}; Assert.assertEquals(129, Deserializer.getShort(buffer, 0)); buffer = new byte[]{0, -1}; Assert.assertEquals(255, Deserializer.getShort(buffer, 0)); buffer = new byte[]{1, 0}; Assert.assertEquals(256, Deserializer.getShort(buffer, 0)); buffer = new byte[]{1, 1}; Assert.assertEquals(257, Deserializer.getShort(buffer, 0)); }
long getTotalMemorySize() { return totalMemorySize; }
@Test void testGetTotalMemory() { UnsafeMemoryBudget budget = createUnsafeMemoryBudget(); assertThat(budget.getTotalMemorySize()).isEqualTo(100L); }
public OpenAPI filter(OpenAPI openAPI, OpenAPISpecFilter filter, Map<String, List<String>> params, Map<String, String> cookies, Map<String, List<String>> headers) { OpenAPI filteredOpenAPI = filterOpenAPI(filter, openAPI, params, cookies, headers); if (filteredOpenAPI == null) { return filteredOpenAPI; } OpenAPI clone = new OpenAPI(); clone.info(filteredOpenAPI.getInfo()); clone.openapi(filteredOpenAPI.getOpenapi()); clone.jsonSchemaDialect(filteredOpenAPI.getJsonSchemaDialect()); clone.setSpecVersion(filteredOpenAPI.getSpecVersion()); clone.setExtensions(filteredOpenAPI.getExtensions()); clone.setExternalDocs(filteredOpenAPI.getExternalDocs()); clone.setSecurity(filteredOpenAPI.getSecurity()); clone.setServers(filteredOpenAPI.getServers()); clone.tags(filteredOpenAPI.getTags() == null ? null : new ArrayList<>(openAPI.getTags())); final Set<String> allowedTags = new HashSet<>(); final Set<String> filteredTags = new HashSet<>(); Paths clonedPaths = new Paths(); if (filteredOpenAPI.getPaths() != null) { for (String resourcePath : filteredOpenAPI.getPaths().keySet()) { PathItem pathItem = filteredOpenAPI.getPaths().get(resourcePath); PathItem filteredPathItem = filterPathItem(filter, pathItem, resourcePath, params, cookies, headers); PathItem clonedPathItem = cloneFilteredPathItem(filter,filteredPathItem, resourcePath, params, cookies, headers, allowedTags, filteredTags); if (clonedPathItem != null) { if (!clonedPathItem.readOperations().isEmpty()) { clonedPaths.addPathItem(resourcePath, clonedPathItem); } } } clone.paths(clonedPaths); } filteredTags.removeAll(allowedTags); final List<Tag> tags = clone.getTags(); if (tags != null && !filteredTags.isEmpty()) { tags.removeIf(tag -> filteredTags.contains(tag.getName())); if (clone.getTags().isEmpty()) { clone.setTags(null); } } if (filteredOpenAPI.getWebhooks() != null) { for (String resourcePath : filteredOpenAPI.getWebhooks().keySet()) { PathItem pathItem = filteredOpenAPI.getPaths().get(resourcePath); PathItem filteredPathItem = filterPathItem(filter, pathItem, resourcePath, params, cookies, headers); PathItem clonedPathItem = cloneFilteredPathItem(filter,filteredPathItem, resourcePath, params, cookies, headers, allowedTags, filteredTags); if (clonedPathItem != null) { if (!clonedPathItem.readOperations().isEmpty()) { clone.addWebhooks(resourcePath, clonedPathItem); } } } } if (filteredOpenAPI.getComponents() != null) { clone.components(new Components()); clone.getComponents().setSchemas(filterComponentsSchema(filter, filteredOpenAPI.getComponents().getSchemas(), params, cookies, headers)); clone.getComponents().setSecuritySchemes(filteredOpenAPI.getComponents().getSecuritySchemes()); clone.getComponents().setCallbacks(filteredOpenAPI.getComponents().getCallbacks()); clone.getComponents().setExamples(filteredOpenAPI.getComponents().getExamples()); clone.getComponents().setExtensions(filteredOpenAPI.getComponents().getExtensions()); clone.getComponents().setHeaders(filteredOpenAPI.getComponents().getHeaders()); clone.getComponents().setLinks(filteredOpenAPI.getComponents().getLinks()); clone.getComponents().setParameters(filteredOpenAPI.getComponents().getParameters()); clone.getComponents().setRequestBodies(filteredOpenAPI.getComponents().getRequestBodies()); clone.getComponents().setResponses(filteredOpenAPI.getComponents().getResponses()); clone.getComponents().setPathItems(filteredOpenAPI.getComponents().getPathItems()); } if (filter.isRemovingUnreferencedDefinitions()) { clone = removeBrokenReferenceDefinitions(clone); } return clone; }
@Test(description = "it should filter any query parameter") public void filterAwayQueryParameters() throws IOException { final OpenAPI openAPI = getOpenAPI(RESOURCE_PATH); final OpenAPI filtered = new SpecFilter().filter(openAPI, new NoParametersWithoutQueryInFilter(), null, null, null); if (filtered.getPaths() != null) { for (Map.Entry<String, PathItem> entry : filtered.getPaths().entrySet()) { validateParameters(entry.getValue().getGet()); validateParameters(entry.getValue().getPost()); validateParameters(entry.getValue().getPut()); validateParameters(entry.getValue().getPatch()); validateParameters(entry.getValue().getHead()); validateParameters(entry.getValue().getDelete()); validateParameters(entry.getValue().getOptions()); } } }
@Override public Map<K, V> getCachedMap() { return localCacheView.getCachedMap(); }
@Test public void testRemoveValue() throws InterruptedException { RLocalCachedMap<SimpleKey, SimpleValue> map = redisson.getLocalCachedMap(LocalCachedMapOptions.name("test")); Map<SimpleKey, SimpleValue> cache = map.getCachedMap(); map.put(new SimpleKey("1"), new SimpleValue("2")); boolean res = map.remove(new SimpleKey("1"), new SimpleValue("2")); Assertions.assertTrue(res); Thread.sleep(50); SimpleValue val1 = map.get(new SimpleKey("1")); Assertions.assertNull(val1); Assertions.assertEquals(0, map.size()); assertThat(cache.size()).isEqualTo(0); }
@Override public Mono<DeleteUsernameLinkResponse> deleteUsernameLink(final DeleteUsernameLinkRequest request) { final AuthenticatedDevice authenticatedDevice = AuthenticationUtil.requireAuthenticatedDevice(); return rateLimiters.getUsernameLinkOperationLimiter().validateReactive(authenticatedDevice.accountIdentifier()) .then(Mono.fromFuture(() -> accountsManager.getByAccountIdentifierAsync(authenticatedDevice.accountIdentifier()))) .map(maybeAccount -> maybeAccount.orElseThrow(Status.UNAUTHENTICATED::asRuntimeException)) .flatMap(account -> Mono.fromFuture(() -> accountsManager.updateAsync(account, a -> a.setUsernameLinkDetails(null, null)))) .thenReturn(DeleteUsernameLinkResponse.newBuilder().build()); }
@Test void deleteUsernameLinkRateLimited() { final Duration retryAfter = Duration.ofSeconds(11); when(rateLimiter.validateReactive(any(UUID.class))) .thenReturn(Mono.error(new RateLimitExceededException(retryAfter))); //noinspection ResultOfMethodCallIgnored GrpcTestUtils.assertRateLimitExceeded(retryAfter, () -> authenticatedServiceStub().deleteUsernameLink(DeleteUsernameLinkRequest.newBuilder().build()), accountsManager); }
@Override protected void setDefaults() { super.setDefaults(); try { final Process echo = Runtime.getRuntime().exec(new String[]{"/bin/sh", "-c", "echo ~"}); this.setDefault("local.user.home", StringUtils.strip(IOUtils.toString(echo.getInputStream(), Charset.defaultCharset()))); } catch(IOException e) { log.warn("Failure determining user home with `echo ~`"); } this.setDefault("ssh.authentication.agent.enable", String.valueOf(false)); // Lowercase folder names to use when looking for profiles and bookmarks in user support directory this.setDefault("bookmarks.folder.name", "bookmarks"); this.setDefault("profiles.folder.name", "profiles"); this.setDefault("connection.ssl.securerandom.algorithm", "NativePRNGNonBlocking"); }
@Test public void setDefaults() { final LinuxTerminalPreferences prefs = new LinuxTerminalPreferences(); prefs.load(); prefs.setLogging("debug"); prefs.setFactories(); prefs.setDefaults(); assertEquals("NativePRNGNonBlocking", prefs.getProperty("connection.ssl.securerandom.algorithm")); assertEquals(UnsecureHostPasswordStore.class.getName(), prefs.getProperty("factory.passwordstore.class")); }
public static AlterReplicaTask alterLocalTablet(long backendId, long dbId, long tableId, long partitionId, long rollupIndexId, long rollupTabletId, long baseTabletId, long newReplicaId, int newSchemaHash, int baseSchemaHash, long version, long jobId, TAlterTabletMaterializedColumnReq generatedColumnReq, List<Column> baseSchemaColumns) { return new AlterReplicaTask(backendId, dbId, tableId, partitionId, rollupIndexId, rollupTabletId, baseTabletId, newReplicaId, newSchemaHash, baseSchemaHash, version, jobId, AlterJobV2.JobType.SCHEMA_CHANGE, TTabletType.TABLET_TYPE_DISK, 0, generatedColumnReq, baseSchemaColumns, null); }
@Test public void testAlterLocalTablet() { AlterReplicaTask task = AlterReplicaTask.alterLocalTablet(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, null, Collections.emptyList()); Assert.assertEquals(1, task.getBackendId()); Assert.assertEquals(2, task.getDbId()); Assert.assertEquals(3, task.getTableId()); Assert.assertEquals(4, task.getPartitionId()); Assert.assertEquals(5, task.getIndexId()); Assert.assertEquals(6, task.getTabletId()); Assert.assertEquals(7, task.getBaseTabletId()); Assert.assertEquals(8, task.getNewReplicaId()); Assert.assertEquals(9, task.getNewSchemaHash()); Assert.assertEquals(10, task.getBaseSchemaHash()); Assert.assertEquals(11, task.getVersion()); Assert.assertEquals(12, task.getJobId()); Assert.assertEquals(AlterJobV2.JobType.SCHEMA_CHANGE, task.getJobType()); TAlterTabletReqV2 request = task.toThrift(); Assert.assertEquals(7, request.base_tablet_id); Assert.assertEquals(6, request.new_tablet_id); Assert.assertEquals(10, request.base_schema_hash); Assert.assertEquals(9, request.new_schema_hash); Assert.assertEquals(11, request.alter_version); Assert.assertFalse(request.isSetMaterialized_view_params()); Assert.assertEquals(TTabletType.TABLET_TYPE_DISK, request.tablet_type); }
@Override public void execute(final CommandLine commandLine, final Options options, final RPCHook rpcHook) throws SubCommandException { DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook); defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis())); try { if (commandLine.hasOption('b')) { String brokerAddr = commandLine.getOptionValue('b').trim(); defaultMQAdminExt.start(); getAndPrint(defaultMQAdminExt, String.format("============%s============\n", brokerAddr), brokerAddr); } else if (commandLine.hasOption('c')) { String clusterName = commandLine.getOptionValue('c').trim(); defaultMQAdminExt.start(); Map<String, List<String>> masterAndSlaveMap = CommandUtil.fetchMasterAndSlaveDistinguish(defaultMQAdminExt, clusterName); for (String masterAddr : masterAndSlaveMap.keySet()) { if (masterAddr == null) { continue; } getAndPrint( defaultMQAdminExt, String.format("============Master: %s============\n", masterAddr), masterAddr ); for (String slaveAddr : masterAndSlaveMap.get(masterAddr)) { if (slaveAddr == null) { continue; } getAndPrint( defaultMQAdminExt, String.format("============My Master: %s=====Slave: %s============\n", masterAddr, slaveAddr), slaveAddr ); } } } } catch (Exception e) { throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e); } finally { defaultMQAdminExt.shutdown(); } }
@Test public void testExecute() throws SubCommandException { GetBrokerConfigCommand cmd = new GetBrokerConfigCommand(); Options options = ServerUtil.buildCommandlineOptions(new Options()); String[] subargs = new String[] {"-b 127.0.0.1:" + listenPort(), "-c default-cluster"}; final CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new DefaultParser()); cmd.execute(commandLine, options, null); }
@Override public <T> T persist(T detachedObject) { Map<Object, Object> alreadyPersisted = new HashMap<Object, Object>(); return persist(detachedObject, alreadyPersisted, RCascadeType.PERSIST); }
@Test public void testPersistInCluster() { testInCluster(redisson -> { RLiveObjectService liveObjectService = redisson.getLiveObjectService(); TestIndexed item1 = new TestIndexed("1"); item1.setName1("name1"); item1.setName2("name2"); item1.setNum1(123); TestIndexed item2 = new TestIndexed("2"); liveObjectService.persist(item1, item2); }); }
public String get(Component c) { return get(Group.Alphabetic, c); }
@Test public void testValueOf2() { PersonName pn = new PersonName("Hong^Gildong=洪^吉洞=홍^길동"); assertEquals("Hong", pn.get(PersonName.Group.Alphabetic, PersonName.Component.FamilyName)); assertEquals("Gildong", pn.get(PersonName.Group.Alphabetic, PersonName.Component.GivenName)); assertEquals("洪", pn.get(PersonName.Group.Ideographic, PersonName.Component.FamilyName)); assertEquals("吉洞", pn.get(PersonName.Group.Ideographic, PersonName.Component.GivenName)); assertEquals("홍", pn.get(PersonName.Group.Phonetic, PersonName.Component.FamilyName)); assertEquals("길동", pn.get(PersonName.Group.Phonetic, PersonName.Component.GivenName)); }
public Map<String, String> clientTags() { return clientTags; }
@Test public void shouldReturnClientTags() { final Map<String, String> clientTags = mkMap(mkEntry("k1", "v1")); assertEquals(clientTags, new ClientState(null, 0, clientTags).clientTags()); }
@Override protected void deregisterManagement() { if (!this.registration.isRegisterEnabled()) { return; } super.deregisterManagement(); }
@Test public void testDeregisterManagement() { doReturn(false).when(registration).isRegisterEnabled(); assertThatCode(() -> { polarisAutoServiceRegistration.registerManagement(); }).doesNotThrowAnyException(); doReturn(true).when(registration).isRegisterEnabled(); assertThatCode(() -> { polarisAutoServiceRegistration.deregisterManagement(); }).doesNotThrowAnyException(); }
@Override protected String selectorHandler(final MetaDataRegisterDTO metaDataDTO) { return ""; }
@Test public void testSelectorHandler() { MetaDataRegisterDTO metaDataRegisterDTO = MetaDataRegisterDTO.builder().build(); assertEquals(StringUtils.EMPTY, shenyuClientRegisterDubboService.selectorHandler(metaDataRegisterDTO)); }
public void updateSubscription(String subscriptionName, Properties properties) { // Validate the subscription properties. ClientMetricsConfigs.validate(subscriptionName, properties); // IncrementalAlterConfigs API will send empty configs when all the configs are deleted // for respective subscription. In that case, we need to remove the subscription from the map. if (properties.isEmpty()) { // Remove the subscription from the map if it exists, else ignore the config update. if (subscriptionMap.containsKey(subscriptionName)) { log.info("Removing subscription [{}] from the subscription map", subscriptionName); subscriptionMap.remove(subscriptionName); subscriptionUpdateVersion.incrementAndGet(); } return; } updateClientSubscription(subscriptionName, new ClientMetricsConfigs(properties)); /* Increment subscription update version to indicate that there is a change in the subscription. This will be used to determine if the next telemetry request needs to re-evaluate the subscription id as per the changed subscriptions. */ subscriptionUpdateVersion.incrementAndGet(); }
@Test public void testUpdateSubscription() throws Exception { assertTrue(clientMetricsManager.subscriptions().isEmpty()); assertEquals(0, clientMetricsManager.subscriptionUpdateVersion()); clientMetricsManager.updateSubscription("sub-1", ClientMetricsTestUtils.defaultProperties()); assertEquals(1, clientMetricsManager.subscriptions().size()); assertNotNull(clientMetricsManager.subscriptionInfo("sub-1")); ClientMetricsManager.SubscriptionInfo subscriptionInfo = clientMetricsManager.subscriptionInfo("sub-1"); Set<String> metrics = subscriptionInfo.metrics(); // Validate metrics. assertEquals(ClientMetricsTestUtils.DEFAULT_METRICS.split(",").length, metrics.size()); Arrays.stream(ClientMetricsTestUtils.DEFAULT_METRICS.split(",")).forEach(metric -> assertTrue(metrics.contains(metric))); // Validate push interval. assertEquals(ClientMetricsTestUtils.defaultProperties().getProperty(ClientMetricsConfigs.PUSH_INTERVAL_MS), String.valueOf(subscriptionInfo.intervalMs())); // Validate match patterns. assertEquals(ClientMetricsTestUtils.DEFAULT_CLIENT_MATCH_PATTERNS.size(), subscriptionInfo.matchPattern().size()); ClientMetricsTestUtils.DEFAULT_CLIENT_MATCH_PATTERNS.forEach(pattern -> { String[] split = pattern.split("="); assertTrue(subscriptionInfo.matchPattern().containsKey(split[0])); assertEquals(split[1], subscriptionInfo.matchPattern().get(split[0]).pattern()); }); assertEquals(1, clientMetricsManager.subscriptionUpdateVersion()); // Validate metrics should have instance count metric, 2 unknown subscription count metrics // and kafka metrics count registered i.e. 4 metrics. assertEquals(4, kafkaMetrics.metrics().size()); // Metrics should not have any instance while updating the subscriptions. assertEquals((double) 0, getMetric(ClientMetricsManager.ClientMetricsStats.INSTANCE_COUNT).metricValue()); }
@Override public Set<Entry<Integer, R>> entrySet() { assert baseDirInitialized(); return entrySet; }
@Issue("JENKINS-18065") @Test public void entrySetContains() { for (Map.Entry<Integer, Build> e : a.entrySet()) { assertTrue(a.entrySet().contains(e)); } }
@Override public ProxyInvocationHandler parserInterfaceToProxy(Object target, String objectName) { // eliminate the bean without two phase annotation. Set<String> methodsToProxy = this.tccProxyTargetMethod(target); if (methodsToProxy.isEmpty()) { return null; } // register resource and enhance with interceptor DefaultResourceRegisterParser.get().registerResource(target, objectName); return new TccActionInterceptorHandler(target, methodsToProxy); }
@Test public void testNestTcc_required_new_should_rollback_commit() throws Exception { TccActionImpl tccAction = new TccActionImpl(); TccAction tccActionProxy = ProxyUtil.createProxy(tccAction, "oldtccAction"); Assertions.assertNotNull(tccActionProxy); NestTccActionImpl nestTccAction = new NestTccActionImpl(); nestTccAction.setTccAction(tccActionProxy); //when ProxyInvocationHandler proxyInvocationHandler = DefaultInterfaceParser.get().parserInterfaceToProxy(nestTccAction, nestTccAction.getClass().getName()); //then Assertions.assertNotNull(proxyInvocationHandler); //when NestTccAction nestTccActionProxy = ProxyUtil.createProxy(nestTccAction, "oldtccActionProxy"); //then Assertions.assertNotNull(nestTccActionProxy); // transaction commit test GlobalTransaction tx = GlobalTransactionContext.getCurrentOrCreate(); try { tx.begin(60000, "testBiz"); boolean result = nestTccActionProxy.prepareNestRequiredNew(null, 1); Assertions.assertFalse(result); if (result) { tx.commit(); } else { tx.rollback(); } } catch (Exception exx) { tx.rollback(); throw exx; } Assertions.assertTrue(nestTccAction.isCommit()); Assertions.assertTrue(tccAction.isCommit()); }
@Override public void commence(final HttpServletRequest httpServletRequest, final HttpServletResponse httpServletResponse, final AuthenticationException authenticationException) throws IOException { httpServletResponse.setContentType(MediaType.APPLICATION_JSON_VALUE); httpServletResponse.setStatus(HttpStatus.UNAUTHORIZED.value()); final CustomError customError = CustomError.builder() .header(CustomError.Header.AUTH_ERROR.getName()) .httpStatus(HttpStatus.UNAUTHORIZED) .isSuccess(false) .build(); final String responseBody = OBJECT_MAPPER .writer(DateFormat.getDateInstance()) .writeValueAsString(customError); httpServletResponse.getOutputStream() .write(responseBody.getBytes()); }
@Test public void testCommence() throws IOException { // Mock objects HttpServletRequest httpServletRequest = mock(HttpServletRequest.class); HttpServletResponse httpServletResponse = mock(HttpServletResponse.class); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); ServletOutputStream servletOutputStream = new ServletOutputStream() { @Override public void write(int b) throws IOException { byteArrayOutputStream.write(b); } @Override public boolean isReady() { return true; } @Override public void setWriteListener(WriteListener writeListener) { // No-op } }; // Set up the mocks when(httpServletResponse.getOutputStream()).thenReturn(servletOutputStream); // Call the method to test customAuthenticationEntryPoint.commence(httpServletRequest, httpServletResponse, new AuthenticationException("Test") {}); // Verify that the response status was set verify(httpServletResponse).setStatus(HttpStatus.UNAUTHORIZED.value()); verify(httpServletResponse).setContentType(MediaType.APPLICATION_JSON_VALUE); // Convert the response to a string and verify the content String responseBody = byteArrayOutputStream.toString(); // Use ByteArrayOutputStream ObjectMapper objectMapper = new ObjectMapper(); objectMapper.registerModule(new JavaTimeModule()); CustomError expectedCustomError = CustomError.builder() .header(CustomError.Header.AUTH_ERROR.getName()) .httpStatus(HttpStatus.UNAUTHORIZED) .isSuccess(false) .build(); String expectedResponseBody = objectMapper.writeValueAsString(expectedCustomError); // Parse the JSON response and expected response JsonNode responseNode = objectMapper.readTree(responseBody); JsonNode expectedNode = objectMapper.readTree(expectedResponseBody); // Extract and format the 'time' fields String responseTime = responseNode.get("time").asText(); JsonNode expectedTimeNode = expectedNode.get("time"); // Define a DateTimeFormatter to compare up to minutes DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME; // Parse the time strings into LocalDateTime objects LocalDateTime responseDateTime = LocalDateTime.parse(responseTime, formatter); LocalDateTime expectedDateTime = convertArrayToLocalDateTime(expectedTimeNode); // Truncate to minutes for comparison responseDateTime = responseDateTime.truncatedTo(ChronoUnit.MINUTES); expectedDateTime = expectedDateTime.truncatedTo(ChronoUnit.MINUTES); // Compare only the date and time up to minutes assertEquals(expectedDateTime, responseDateTime); }
public final void containsAnyOf( @Nullable Object first, @Nullable Object second, @Nullable Object @Nullable ... rest) { containsAnyIn(accumulate(first, second, rest)); }
@Test public void iterableContainsAnyOfFailure() { expectFailureWhenTestingThat(asList(1, 2, 3)).containsAnyOf(5, 6, 0); assertFailureKeys("expected to contain any of", "but was"); assertFailureValue("expected to contain any of", "[5, 6, 0]"); }
public MetricName resolve(String p) { final String next; if (p != null && !p.isEmpty()) { if (key != null && !key.isEmpty()) { next = key + SEPARATOR + p; } else { next = p; } } else { next = this.key; } return new MetricName(next, tags); }
@Test public void testResolve() throws Exception { final MetricName name = new MetricName("foo"); assertThat(name.resolve("bar")).isEqualTo(new MetricName("foo.bar")); }
public double rank(int tf, int maxtf, long N, long n) { if (tf == 0) return 0.0; return (a + (1-a) * tf / maxtf) * Math.log((double) N / n); }
@Test public void testRank() { System.out.println("rank"); int freq = 3; int maxFreq = 10; int N = 10000000; int n = 1000; TFIDF instance = new TFIDF(); double expResult = 5.341997; double result = instance.rank(freq, maxFreq, N, n); assertEquals(expResult, result, 1E-6); }
@Override public Result apply(ApplyNode applyNode, Captures captures, Context context) { if (applyNode.getMayParticipateInAntiJoin()) { return Result.empty(); } Assignments subqueryAssignments = applyNode.getSubqueryAssignments(); if (subqueryAssignments.size() != 1) { return Result.empty(); } RowExpression expression = getOnlyElement(subqueryAssignments.getExpressions()); if (!(expression instanceof InSubqueryExpression)) { return Result.empty(); } InSubqueryExpression inPredicate = (InSubqueryExpression) expression; VariableReferenceExpression inPredicateOutputVariable = getOnlyElement(subqueryAssignments.getVariables()); PlanNode leftInput = applyNode.getInput(); // Add unique id column if the set of columns do not form a unique key already if (!((GroupReference) leftInput).getLogicalProperties().isPresent() || !((GroupReference) leftInput).getLogicalProperties().get().isDistinct(ImmutableSet.copyOf(leftInput.getOutputVariables()))) { VariableReferenceExpression uniqueKeyVariable = context.getVariableAllocator().newVariable("unique", BIGINT); leftInput = new AssignUniqueId( applyNode.getSourceLocation(), context.getIdAllocator().getNextId(), leftInput, uniqueKeyVariable); } VariableReferenceExpression leftVariableReference = inPredicate.getValue(); VariableReferenceExpression rightVariableReference = inPredicate.getSubquery(); JoinNode innerJoin = new JoinNode( applyNode.getSourceLocation(), context.getIdAllocator().getNextId(), JoinType.INNER, leftInput, applyNode.getSubquery(), ImmutableList.of(new EquiJoinClause( leftVariableReference, rightVariableReference)), ImmutableList.<VariableReferenceExpression>builder() .addAll(leftInput.getOutputVariables()) .build(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), ImmutableMap.of()); AggregationNode distinctNode = new AggregationNode( innerJoin.getSourceLocation(), context.getIdAllocator().getNextId(), innerJoin, ImmutableMap.of(), singleGroupingSet(ImmutableList.<VariableReferenceExpression>builder() .addAll(innerJoin.getOutputVariables()) .build()), ImmutableList.of(), SINGLE, Optional.empty(), Optional.empty(), Optional.empty()); ImmutableList<VariableReferenceExpression> referencedOutputs = ImmutableList.<VariableReferenceExpression>builder() .addAll(applyNode.getInput().getOutputVariables()) .add(inPredicateOutputVariable) .build(); ProjectNode finalProjectNdde = new ProjectNode( context.getIdAllocator().getNextId(), distinctNode, Assignments.builder() .putAll(identityAssignments(distinctNode.getOutputVariables())) .put(inPredicateOutputVariable, TRUE_CONSTANT) .build() .filter(referencedOutputs)); return Result.ofPlanNode(finalProjectNdde); }
@Test public void testDoesNotFireOnCorrelation() { tester().assertThat(new TransformUncorrelatedInPredicateSubqueryToDistinctInnerJoin()) .on(p -> p.apply( assignment( p.variable("x"), inSubquery(p.variable("y"), p.variable("z"))), ImmutableList.of(p.variable("y")), p.values(p.variable("y")), p.values())) .doesNotFire(); }
public void setSimpleLoadBalancerState(SimpleLoadBalancerState state) { _watcherManager.updateWatcher(state, this::doRegisterLoadBalancerState); doRegisterLoadBalancerState(state, null); state.register(new SimpleLoadBalancerStateListener() { @Override public void onStrategyAdded(String serviceName, String scheme, LoadBalancerStrategy strategy) { _watcherManager.updateWatcher(serviceName, scheme, strategy, (item, mode) -> doRegisterLoadBalancerStrategy(serviceName, scheme, item, mode)); doRegisterLoadBalancerStrategy(serviceName, scheme, strategy, null); } @Override public void onStrategyRemoved(String serviceName, String scheme, LoadBalancerStrategy strategy) { _watcherManager.removeWatcherForLoadBalancerStrategy(serviceName, scheme); _jmxManager.unregister(getLoadBalancerStrategyJmxName(serviceName, scheme, null)); } @Override public void onClientAdded(String clusterName, TrackerClient client) { // We currently think we can make this no-op as the info provided is not helpful // _jmxManager.checkReg(new DegraderControl((DegraderImpl) client.getDegrader(DefaultPartitionAccessor.DEFAULT_PARTITION_ID)), // _prefix + "-" + clusterName + "-" + client.getUri().toString().replace("://", "-") + "-TrackerClient-Degrader"); } @Override public void onClientRemoved(String clusterName, TrackerClient client) { // We currently think we can make this no-op as the info provided is not helpful // _jmxManager.unregister(_prefix + "-" + clusterName + "-" + client.getUri().toString().replace("://", "-") + "-TrackerClient-Degrader"); } @Override public void onClusterInfoUpdate(ClusterInfoItem clusterInfoItem) { if (clusterInfoItem != null && clusterInfoItem.getClusterPropertiesItem() != null && clusterInfoItem.getClusterPropertiesItem().getProperty() != null) { String clusterName = clusterInfoItem.getClusterPropertiesItem().getProperty().getClusterName(); _watcherManager.updateWatcher(clusterName, clusterInfoItem, (item, mode) -> doRegisterClusterInfo(clusterName, item, mode)); doRegisterClusterInfo(clusterName, clusterInfoItem, null); } } @Override public void onClusterInfoRemoval(ClusterInfoItem clusterInfoItem) { if (clusterInfoItem != null && clusterInfoItem.getClusterPropertiesItem() != null && clusterInfoItem.getClusterPropertiesItem().getProperty() != null) { String clusterName = clusterInfoItem.getClusterPropertiesItem().getProperty().getClusterName(); _watcherManager.removeWatcherForClusterInfoItem(clusterName); _jmxManager.unregister(getClusterInfoJmxName(clusterName, null)); } } @Override public void onServicePropertiesUpdate(LoadBalancerStateItem<ServiceProperties> serviceProperties) { if (serviceProperties != null && serviceProperties.getProperty() != null) { String serviceName = serviceProperties.getProperty().getServiceName(); _watcherManager.updateWatcher(serviceName, serviceProperties, (item, mode) -> doRegisterServiceProperties(serviceName, item, mode)); doRegisterServiceProperties(serviceName, serviceProperties, null); } } @Override public void onServicePropertiesRemoval(LoadBalancerStateItem<ServiceProperties> serviceProperties) { if (serviceProperties != null && serviceProperties.getProperty() != null) { String serviceName = serviceProperties.getProperty().getServiceName(); _watcherManager.removeWatcherForServiceProperties(serviceName); _jmxManager.unregister(getServicePropertiesJmxName(serviceName, null)); } } private void doRegisterLoadBalancerStrategy(String serviceName, String scheme, LoadBalancerStrategy strategy, @Nullable DualReadModeProvider.DualReadMode mode) { String jmxName = getLoadBalancerStrategyJmxName(serviceName, scheme, mode); _jmxManager.registerLoadBalancerStrategy(jmxName, strategy); } private void doRegisterClusterInfo(String clusterName, ClusterInfoItem clusterInfoItem, @Nullable DualReadModeProvider.DualReadMode mode) { String jmxName = getClusterInfoJmxName(clusterName, mode); _jmxManager.registerClusterInfo(jmxName, clusterInfoItem); } private void doRegisterServiceProperties(String serviceName, LoadBalancerStateItem<ServiceProperties> serviceProperties, @Nullable DualReadModeProvider.DualReadMode mode) { _jmxManager.registerServiceProperties(getServicePropertiesJmxName(serviceName, mode), serviceProperties); } private String getClusterInfoJmxName(String clusterName, @Nullable DualReadModeProvider.DualReadMode mode) { return String.format("%s%s-ClusterInfo", getClusterPrefixForLBPropertyJmxNames(clusterName, mode), clusterName); } private String getServicePropertiesJmxName(String serviceName, @Nullable DualReadModeProvider.DualReadMode mode) { return String.format("%s%s-ServiceProperties", getServicePrefixForLBPropertyJmxNames(serviceName, mode), serviceName); } private String getLoadBalancerStrategyJmxName(String serviceName, String scheme, @Nullable DualReadModeProvider.DualReadMode mode) { return String.format("%s%s-%s-LoadBalancerStrategy", getServicePrefixForLBPropertyJmxNames(serviceName, mode), serviceName, scheme); } }); }
@Test(dataProvider = "nonDualReadD2ClientJmxManagers") public void testSetSimpleLBStateListenerRemoveClusterInfo(String prefix, D2ClientJmxManager.DiscoverySourceType sourceType, Boolean isDualReadLB) { D2ClientJmxManagerFixture fixture = new D2ClientJmxManagerFixture(); D2ClientJmxManager d2ClientJmxManager = fixture.getD2ClientJmxManager(prefix, sourceType, isDualReadLB); d2ClientJmxManager.setSimpleLoadBalancerState(fixture._simpleLoadBalancerState); Assert.assertEquals(fixture._simpleLoadBalancerStateNameCaptor.getValue(), "Foo-LoadBalancerState"); Assert.assertEquals(fixture._simpleLoadBalancerStateCaptor.getValue(), fixture._simpleLoadBalancerState); fixture._simpleLoadBalancerStateListenerCaptor.getValue().onClusterInfoRemoval(null); Mockito.verify(fixture._jmxManager, never()).unregister(anyString()); fixture._simpleLoadBalancerStateListenerCaptor.getValue().onClusterInfoRemoval(fixture._noPropertyClusterInfoItem); Mockito.verify(fixture._jmxManager, never()).unregister(anyString()); fixture._simpleLoadBalancerStateListenerCaptor.getValue().onClusterInfoRemoval(fixture._clusterInfoItem); Assert.assertEquals( fixture._unregisteredObjectNameCaptor.getValue(), fixture._clusterInfoItem.getClusterPropertiesItem().getProperty().getClusterName() + "-ClusterInfo"); }
@Override public Num calculate(BarSeries series, Position position) { if (position == null || position.getEntry() == null || position.getExit() == null) { return series.zero(); } CashFlow cashFlow = new CashFlow(series, position); return calculateMaximumDrawdown(series, null, cashFlow); }
@Test public void calculateWithNullSeriesSizeShouldReturn0() { MockBarSeries series = new MockBarSeries(numFunction, new double[] {}); AnalysisCriterion mdd = getCriterion(); assertNumEquals(0d, mdd.calculate(series, new BaseTradingRecord())); }
@Nonnull public static <C> SourceBuilder<C>.Batch<Void> batch( @Nonnull String name, @Nonnull FunctionEx<? super Processor.Context, ? extends C> createFn ) { return new SourceBuilder<C>(name, createFn).new Batch<>(); }
@Test public void batch_fileSource_distributed() throws Exception { // Given File textFile = createTestFile(); // When BatchSource<String> fileSource = SourceBuilder .batch("distributed-file-source", ctx -> fileReader(textFile)) .<String>fillBufferFn((in, buf) -> { String line = in.readLine(); if (line != null) { buf.add(line); } else { buf.close(); } }) .destroyFn(BufferedReader::close) .distributed(PREFERRED_LOCAL_PARALLELISM) .build(); // Then Pipeline p = Pipeline.create(); p.readFrom(fileSource) .writeTo(sinkList()); hz().getJet().newJob(p).join(); Map<String, Integer> actual = sinkToBag(); Map<String, Integer> expected = IntStream.range(0, itemCount) .boxed() .collect(Collectors.toMap(i -> "line" + i, i -> PREFERRED_LOCAL_PARALLELISM * MEMBER_COUNT)); assertEquals(expected, actual); }
void runOnce() { if (transactionManager != null) { try { transactionManager.maybeResolveSequences(); RuntimeException lastError = transactionManager.lastError(); // do not continue sending if the transaction manager is in a failed state if (transactionManager.hasFatalError()) { if (lastError != null) maybeAbortBatches(lastError); client.poll(retryBackoffMs, time.milliseconds()); return; } if (transactionManager.hasAbortableError() && shouldHandleAuthorizationError(lastError)) { return; } // Check whether we need a new producerId. If so, we will enqueue an InitProducerId // request which will be sent below transactionManager.bumpIdempotentEpochAndResetIdIfNeeded(); if (maybeSendAndPollTransactionalRequest()) { return; } } catch (AuthenticationException e) { // This is already logged as error, but propagated here to perform any clean ups. log.trace("Authentication exception while processing transactional request", e); transactionManager.authenticationFailed(e); } } long currentTimeMs = time.milliseconds(); long pollTimeout = sendProducerData(currentTimeMs); client.poll(pollTimeout, currentTimeMs); }
@Test public void testEpochBumpOnOutOfOrderSequenceForNextBatchWhenBatchInFlightFails() throws Exception { // When a batch failed after the producer epoch is bumped, the sequence number of // that partition must be reset for any subsequent batches sent. final long producerId = 343434L; TransactionManager transactionManager = createTransactionManager(); // Retries once setupWithTransactionState(transactionManager, false, null, true, 1, 0); // Init producer id/epoch prepareAndReceiveInitProducerId(producerId, Errors.NONE); assertEquals(producerId, transactionManager.producerIdAndEpoch().producerId); assertEquals(0, transactionManager.producerIdAndEpoch().epoch); // Partition 0 - Send first batch appendToAccumulator(tp0); sender.runOnce(); // Partition 0 - State is lazily initialized assertPartitionState(transactionManager, tp0, producerId, (short) 0, 1, OptionalInt.empty()); // Partition 0 - Successful response sendIdempotentProducerResponse(0, 0, tp0, Errors.NONE, 0, -1); sender.runOnce(); // Partition 0 - Last ack is updated assertPartitionState(transactionManager, tp0, producerId, (short) 0, 1, OptionalInt.of(0)); // Partition 1 - Send first batch appendToAccumulator(tp1); sender.runOnce(); // Partition 1 - State is lazily initialized assertPartitionState(transactionManager, tp1, producerId, (short) 0, 1, OptionalInt.empty()); // Partition 1 - Successful response sendIdempotentProducerResponse(0, 0, tp1, Errors.NONE, 0, -1); sender.runOnce(); // Partition 1 - Last ack is updated assertPartitionState(transactionManager, tp1, producerId, (short) 0, 1, OptionalInt.of(0)); // Partition 0 - Send second batch appendToAccumulator(tp0); sender.runOnce(); // Partition 0 - Sequence is incremented assertPartitionState(transactionManager, tp0, producerId, (short) 0, 2, OptionalInt.of(0)); // Partition 1 - Send second batch appendToAccumulator(tp1); sender.runOnce(); // Partition 1 - Sequence is incremented assertPartitionState(transactionManager, tp1, producerId, (short) 0, 2, OptionalInt.of(0)); // Partition 0 - Failed response with OUT_OF_ORDER_SEQUENCE_NUMBER sendIdempotentProducerResponse(0, 1, tp0, Errors.OUT_OF_ORDER_SEQUENCE_NUMBER, -1, -1); sender.runOnce(); // Receive sender.runOnce(); // Bump epoch & Retry // Producer epoch is bumped assertEquals(1, transactionManager.producerIdAndEpoch().epoch); // Partition 0 - State is reset to current producer epoch assertPartitionState(transactionManager, tp0, producerId, (short) 1, 1, OptionalInt.empty()); // Partition 1 - State is not changed. The epoch will be lazily bumped when all in-flight // batches are completed assertPartitionState(transactionManager, tp1, producerId, (short) 0, 2, OptionalInt.of(0)); assertTrue(transactionManager.hasStaleProducerIdAndEpoch(tp1)); // Partition 1 - Failed response with NOT_LEADER_OR_FOLLOWER sendIdempotentProducerResponse(0, 1, tp1, Errors.NOT_LEADER_OR_FOLLOWER, -1, -1); sender.runOnce(); // Receive & Retry // Partition 1 - State is not changed. assertPartitionState(transactionManager, tp1, producerId, (short) 0, 2, OptionalInt.of(0)); assertTrue(transactionManager.hasStaleProducerIdAndEpoch(tp1)); // Partition 0 - Successful Response sendIdempotentProducerResponse(1, 0, tp0, Errors.NONE, 1, -1); sender.runOnce(); // Partition 0 - Last ack is updated assertPartitionState(transactionManager, tp0, producerId, (short) 1, 1, OptionalInt.of(0)); // Partition 1 - Failed response with NOT_LEADER_OR_FOLLOWER sendIdempotentProducerResponse(0, 1, tp1, Errors.NOT_LEADER_OR_FOLLOWER, -1, -1); sender.runOnce(); // Receive & Fail the batch (retries exhausted) // Partition 1 - State is not changed. It will be lazily updated when the next batch is sent. assertPartitionState(transactionManager, tp1, producerId, (short) 0, 2, OptionalInt.of(0)); assertTrue(transactionManager.hasStaleProducerIdAndEpoch(tp1)); // Partition 1 - Send third batch appendToAccumulator(tp1); sender.runOnce(); // Partition 1 - Epoch is bumped, sequence is reset assertPartitionState(transactionManager, tp1, producerId, (short) 1, 1, OptionalInt.empty()); assertFalse(transactionManager.hasStaleProducerIdAndEpoch(tp1)); // Partition 1 - Successful Response sendIdempotentProducerResponse(1, 0, tp1, Errors.NONE, 0, -1); sender.runOnce(); // Partition 1 - Last ack is updated assertPartitionState(transactionManager, tp1, producerId, (short) 1, 1, OptionalInt.of(0)); // Partition 0 - Send third batch appendToAccumulator(tp0); sender.runOnce(); // Partition 0 - Sequence is incremented assertPartitionState(transactionManager, tp0, producerId, (short) 1, 2, OptionalInt.of(0)); // Partition 0 - Successful Response sendIdempotentProducerResponse(1, 1, tp0, Errors.NONE, 0, -1); sender.runOnce(); // Partition 0 - Last ack is updated assertPartitionState(transactionManager, tp0, producerId, (short) 1, 2, OptionalInt.of(1)); }
@Override public boolean containsAll(Collection<?> c) { throw new UnsupportedOperationException("LazySet does not support contains requests"); }
@Test(expected = UnsupportedOperationException.class) public void testContainsAll_throwsException() { set.containsAll(Collections.emptyList()); }
public <T> Map<String, List<T>> toObjectMapList(final String json, final Class<T> clazz) { return GSON.fromJson(json, TypeToken.getParameterized(Map.class, String.class, TypeToken.getParameterized(List.class, clazz).getType()).getType()); }
@Test public void testToObjectMapList() { List<String> listFirst = ImmutableList.of("111", "222"); List<String> listSecond = ImmutableList.of("333", "555"); Map<String, List<String>> map = ImmutableMap.of("data1", listFirst, "data2", listSecond); String json = "{\"data1\":[\"111\",\"222\"],\"data2\":[\"333\",\"555\"]}"; Map<String, List<String>> parseMap = GsonUtils.getInstance().toObjectMapList(json, String.class); map.forEach((key, value) -> { assertTrue(parseMap.containsKey(key)); assertEquals(value, parseMap.get(key)); }); assertNull(GsonUtils.getInstance().toObjectMapList(null, String.class)); }
public static byte[] readBytes(ByteBuffer buffer) { final int remaining = buffer.remaining(); byte[] ab = new byte[remaining]; buffer.get(ab); return ab; }
@Test public void readBytes2Test() { byte[] bytes = "AAABBB".getBytes(); ByteBuffer buffer = ByteBuffer.wrap(bytes); byte[] bs = BufferUtil.readBytes(buffer, 5); assertEquals("AAABB", StrUtil.utf8Str(bs)); }
@Override public String getPath() { var fullPath = request.getRequestURI(); // it shouldn't be null, but in case it is, it's better to return empty string if (fullPath == null) { return Pac4jConstants.EMPTY_STRING; } // very strange use case if (fullPath.startsWith("//")) { fullPath = fullPath.substring(1); } val context = request.getContextPath(); // this one shouldn't be null either, but in case it is, then let's consider it is empty if (context != null) { return fullPath.substring(context.length()); } return fullPath; }
@Test public void testGetPathNullFullPath() { when(request.getRequestURI()).thenReturn(null); WebContext context = new JEEContext(request, response); assertEquals(Pac4jConstants.EMPTY_STRING, context.getPath()); }
@Override public boolean checkCredentials(String username, String password) { if (username == null || password == null) { return false; } Credentials credentials = new Credentials(username, password); if (validCredentialsCache.contains(credentials)) { return true; } else if (invalidCredentialsCache.contains(credentials)) { return false; } boolean isValid = this.username.equals(username) && this.passwordHash.equals( generatePasswordHash( algorithm, salt, iterations, keyLength, password)); if (isValid) { validCredentialsCache.add(credentials); } else { invalidCredentialsCache.add(credentials); } return isValid; }
@Test public void testPBKDF2WithHmacSHA1_upperCaseWithoutColon() throws Exception { String algorithm = "PBKDF2WithHmacSHA1"; int iterations = 1000; int keyLength = 128; String hash = "17:87:CA:B9:14:73:60:36:8B:20:82:87:92:58:43:B8:A3:85:66:BC:C1:6D:C3:31:6C:1D:47:48:C7:F2:E4:1D:96" + ":00:11:F8:4D:94:63:2F:F2:7A:F0:3B:72:63:16:5D:EF:5C:97:CC:EC:59:CB:18:4A:AA:F5:23:63:0B:6E:3B:65" + ":E0:72:6E:69:7D:EB:83:05:05:E5:D6:F2:19:99:49:3F:89:DA:DE:83:D7:2B:5B:7D:C9:56:B4:F2:F6:A5:61:29" + ":29:ED:DF:4C:4E:8D:EA:DF:47:A2:B0:89:11:86:D4:77:A1:02:E9:0C:26:A4:1E:2A:C1:A8:71:E0:93:8F:A4"; hash = hash.toUpperCase().replace(":", ""); PBKDF2Authenticator PBKDF2Authenticator = new PBKDF2Authenticator( "/", VALID_USERNAME, hash, algorithm, SALT, iterations, keyLength); for (String username : TEST_USERNAMES) { for (String password : TEST_PASSWORDS) { boolean expectedIsAuthenticated = VALID_USERNAME.equals(username) && VALID_PASSWORD.equals(password); boolean actualIsAuthenticated = PBKDF2Authenticator.checkCredentials(username, password); assertEquals(expectedIsAuthenticated, actualIsAuthenticated); } } }
public static String stripPartitionDecorator(String tableSpec) { int index = tableSpec.lastIndexOf('$'); return (index == -1) ? tableSpec : tableSpec.substring(0, index); }
@Test public void testTableDecoratorStripping() { assertEquals( "project:dataset.table", BigQueryHelpers.stripPartitionDecorator("project:dataset.table$20171127")); assertEquals( "project:dataset.table", BigQueryHelpers.stripPartitionDecorator("project:dataset.table")); }
@Override public Result reconcile(Request request) { String name = request.name(); if (!isSystemSetting(name)) { return new Result(false, null); } client.fetch(ConfigMap.class, name) .ifPresent(configMap -> { addFinalizerIfNecessary(configMap); routeRuleReconciler.reconcile(name); customizeSystem(name); }); return new Result(false, null); }
@Test void reconcilePostRule() { ConfigMap configMap = systemConfigMapForRouteRule(rules -> { rules.setPost("/post-new/{slug}"); return rules; }); when(environmentFetcher.getConfigMapBlocking()).thenReturn(Optional.of(configMap)); when(client.fetch(eq(ConfigMap.class), eq(SystemSetting.SYSTEM_CONFIG))) .thenReturn(Optional.of(configMap)); systemSettingReconciler.reconcile(new Reconciler.Request(SystemSetting.SYSTEM_CONFIG)); ArgumentCaptor<ConfigMap> captor = ArgumentCaptor.forClass(ConfigMap.class); verify(client, times(1)).update(captor.capture()); ConfigMap updatedConfigMap = captor.getValue(); assertThat(rulesFrom(updatedConfigMap).getPost()).isEqualTo("/post-new/{slug}"); assertThat(oldRulesFromAnno(updatedConfigMap).getPost()).isEqualTo("/post-new/{slug}"); verify(applicationContext, times(1)).publishEvent(any()); }
@Override public Map<String, Metric> getMetrics() { final Map<String, Metric> gauges = new HashMap<>(); for (String pool : POOLS) { for (int i = 0; i < ATTRIBUTES.length; i++) { final String attribute = ATTRIBUTES[i]; final String name = NAMES[i]; try { final ObjectName on = new ObjectName("java.nio:type=BufferPool,name=" + pool); mBeanServer.getMBeanInfo(on); gauges.put(name(pool, name), new JmxAttributeGauge(mBeanServer, on, attribute)); } catch (JMException ignored) { LOGGER.debug("Unable to load buffer pool MBeans, possibly running on Java 6"); } } } return Collections.unmodifiableMap(gauges); }
@Test public void includesAGaugeForDirectCount() throws Exception { final Gauge gauge = (Gauge) buffers.getMetrics().get("direct.count"); when(mBeanServer.getAttribute(direct, "Count")).thenReturn(100); assertThat(gauge.getValue()) .isEqualTo(100); }
public static Entry entry(String name) throws BlockException { return Env.sph.entry(name, EntryType.OUT, 1, OBJECTS0); }
@Test public void testMethodEntryCountType() throws BlockException, NoSuchMethodException, SecurityException { Method method = SphUTest.class.getMethod("testMethodEntryNormal"); Entry e = SphU.entry(method, EntryType.IN, 2); assertSame(e.resourceWrapper.getEntryType(), EntryType.IN); e.exit(); }
public String getUpstreamIp() { return upstreamIp; }
@Test public void testGetUpstreamIp() { shenyuRequestLog.setUpstreamIp("0.0.0.0"); Assertions.assertEquals(shenyuRequestLog.getUpstreamIp(), "0.0.0.0"); }
@SneakyThrows(ReflectiveOperationException.class) public static <T extends YamlConfiguration> T unmarshal(final File yamlFile, final Class<T> classType) throws IOException { try (BufferedReader inputStreamReader = Files.newBufferedReader(Paths.get(yamlFile.toURI()))) { T result = new Yaml(new ShardingSphereYamlConstructor(classType)).loadAs(inputStreamReader, classType); return null == result ? classType.getConstructor().newInstance() : result; } }
@Test void assertUnmarshalWithFile() throws IOException { URL url = getClass().getClassLoader().getResource("yaml/shortcuts-fixture.yaml"); assertNotNull(url); YamlShortcutsConfigurationFixture actual = YamlEngine.unmarshal(new File(url.getFile()), YamlShortcutsConfigurationFixture.class); assertThat(actual.getName(), is("test")); }
public static Driver load(String className) throws DriverLoadException { final ClassLoader loader = DriverLoader.class.getClassLoader(); return load(className, loader); }
@Test public void testLoad_String_String_multiple_paths() { final String className = "com.mysql.jdbc.Driver"; //we know this is in target/test-classes //final File testClassPath = (new File(this.getClass().getClassLoader().getResource("org.mortbay.jetty.jar").getPath())).getParentFile(); final File testClassPath = BaseTest.getResourceAsFile(this, "org.mortbay.jetty.jar").getParentFile(); final File dir1 = new File(testClassPath, "../../src/test/"); final File dir2 = new File(testClassPath, "../../src/test/resources/"); final String paths = String.format("%s" + File.pathSeparator + "%s", dir1.getAbsolutePath(), dir2.getAbsolutePath()); Driver d = null; try { d = DriverLoader.load(className, paths); } catch (DriverLoadException ex) { fail(ex.getMessage()); } finally { if (d != null) { try { DriverManager.deregisterDriver(d); } catch (SQLException ex) { fail(ex.getMessage()); } } } }
@Override public <T> void register(Class<T> remoteInterface, T object) { register(remoteInterface, object, 1); }
@Test public void testTimeout() { Assertions.assertThrows(RemoteServiceTimeoutException.class, () -> { RedissonClient r1 = createInstance(); r1.getRemoteService().register(RemoteInterface.class, new RemoteImpl()); RedissonClient r2 = createInstance(); RemoteInterface ri = r2.getRemoteService().get(RemoteInterface.class, 1, TimeUnit.SECONDS); try { ri.timeoutMethod(); } finally { r1.shutdown(); r2.shutdown(); } }); }
public static Ip6Address valueOf(byte[] value) { return new Ip6Address(value); }
@Test(expected = NullPointerException.class) public void testInvalidValueOfNullString() { Ip6Address ipAddress; String fromString = null; ipAddress = Ip6Address.valueOf(fromString); }
public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { // verify if the field has correct syntax if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); // working with custom objects and relationship // cut off _r and then add _c in the end of the name if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; }
@Test public void testFinalNullFieldNameIsLoggedInDebugMode_StandartObject() { when( logMock.isDebug() ).thenReturn( true ); inputFieldName = "Account:ExtID_AccountId__c/Account"; verify( logMock, never() ).logDebug( anyString() ); SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true ); verify( logMock ).logDebug( "fieldToNullName=AccountId" ); }
public Collection<String> getUsedConversionClasses(Schema schema) { Collection<String> result = new HashSet<>(); for (Conversion<?> conversion : getUsedConversions(schema)) { result.add(conversion.getClass().getCanonicalName()); } return result; }
@Test void getUsedConversionClassesForNullableLogicalTypesInUnionOfRecords() throws Exception { SpecificCompiler compiler = createCompiler(); final Schema schema = new Schema.Parser().parse( "{\"type\":\"record\",\"name\":\"NestedLogicalTypesUnion\",\"namespace\":\"org.apache.avro.codegentest.testdata\",\"doc\":\"Test nested types with logical types in generated Java classes\",\"fields\":[{\"name\":\"unionOfRecords\",\"type\":[\"null\",{\"type\":\"record\",\"name\":\"RecordInUnion\",\"fields\":[{\"name\":\"nullableDateField\",\"type\":[\"null\",{\"type\":\"int\",\"logicalType\":\"date\"}]}]}]}]}"); final Collection<String> usedConversionClasses = compiler.getUsedConversionClasses(schema); assertEquals(1, usedConversionClasses.size()); assertEquals("org.apache.avro.data.TimeConversions.DateConversion", usedConversionClasses.iterator().next()); }
public static Object get(Object object, int index) { if (index < 0) { throw new IndexOutOfBoundsException("Index cannot be negative: " + index); } if (object instanceof Map) { Map map = (Map) object; Iterator iterator = map.entrySet().iterator(); return get(iterator, index); } else if (object instanceof List) { return ((List) object).get(index); } else if (object instanceof Object[]) { return ((Object[]) object)[index]; } else if (object instanceof Iterator) { Iterator it = (Iterator) object; while (it.hasNext()) { index--; if (index == -1) { return it.next(); } else { it.next(); } } throw new IndexOutOfBoundsException("Entry does not exist: " + index); } else if (object instanceof Collection) { Iterator iterator = ((Collection) object).iterator(); return get(iterator, index); } else if (object instanceof Enumeration) { Enumeration it = (Enumeration) object; while (it.hasMoreElements()) { index--; if (index == -1) { return it.nextElement(); } else { it.nextElement(); } } throw new IndexOutOfBoundsException("Entry does not exist: " + index); } else if (object == null) { throw new IllegalArgumentException("Unsupported object type: null"); } else { try { return Array.get(object, index); } catch (IllegalArgumentException ex) { throw new IllegalArgumentException("Unsupported object type: " + object.getClass().getName()); } } }
@Test void testGetMap1() { assertThrows(IndexOutOfBoundsException.class, () -> { Map<String, String> map = new HashMap<>(); map.put("key1", "value1"); CollectionUtils.get(map, -1); }); }
public static String escapeJava(String str) { return escapeJavaStyleString(str, false); }
@Test void testJavaEscaping() { assertThat(EncodingUtils.escapeJava("\\hello\"world'space/")) .isEqualTo("\\\\hello\\\"world'space/"); }
@UdafFactory(description = "Compute sample standard deviation of column with type Integer.", aggregateSchema = "STRUCT<SUM integer, COUNT bigint, M2 double>") public static TableUdaf<Integer, Struct, Double> stdDevInt() { return getStdDevImplementation( 0, STRUCT_INT, (agg, newValue) -> newValue + agg.getInt32(SUM), (agg, newValue) -> Double.valueOf(newValue * (agg.getInt64(COUNT) + 1) - (agg.getInt32(SUM) + newValue)), (agg1, agg2) -> Double.valueOf( agg1.getInt32(SUM) / agg1.getInt64(COUNT) - agg2.getInt32(SUM) / agg2.getInt64(COUNT)), (agg1, agg2) -> agg1.getInt32(SUM) + agg2.getInt32(SUM), (agg, valueToRemove) -> agg.getInt32(SUM) - valueToRemove); }
@Test public void shouldMergeInts() { final TableUdaf<Integer, Struct, Double> udaf = stdDevInt(); Struct left = udaf.initialize(); final Integer[] leftValues = new Integer[] {5, 8, 10}; for (final Integer thisValue : leftValues) { left = udaf.aggregate(thisValue, left); } Struct right = udaf.initialize(); final Integer[] rightValues = new Integer[] {6, 7, 9}; for (final Integer thisValue : rightValues) { right = udaf.aggregate(thisValue, right); } final Struct merged = udaf.merge(left, right); assertThat(merged.getInt64(COUNT), equalTo(6L)); assertThat(merged.getInt32(SUM), equalTo(45)); assertThat(merged.getFloat64(M2), equalTo(17.333333333333332)); final double standardDev = udaf.map(merged); assertThat(standardDev, equalTo(3.4666666666666663)); }
public <T extends Notification> int deliverEmails(Collection<T> notifications) { if (handlers.isEmpty()) { return 0; } Class<T> aClass = typeClassOf(notifications); if (aClass == null) { return 0; } checkArgument(aClass != Notification.class, "Type of notification objects must be a subtype of " + Notification.class.getSimpleName()); return handlers.stream() .filter(t -> t.getNotificationClass() == aClass) .map(t -> (NotificationHandler<T>) t) .mapToInt(handler -> handler.deliver(notifications)) .sum(); }
@Test public void deliverEmails_collection_has_no_effect_if_no_handler_nor_dispatcher() { List<Notification> notifications = IntStream.range(0, 10) .mapToObj(i -> mock(Notification.class)) .toList(); NotificationService underTest = new NotificationService(dbClient); assertThat(underTest.deliverEmails(notifications)).isZero(); verifyNoInteractions(dbClient); }
public static IndexMainType main(Index index, String type) { return new IndexMainType(index, type); }
@Test @UseDataProvider("nullOrEmpty") public void main_fails_with_IAE_if_index_name_is_null_or_empty(String nullOrEmpty) { Index index = Index.simple("foo"); assertThatThrownBy(() -> IndexType.main(index, nullOrEmpty)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("type name can't be null nor empty"); }
@Override public Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException { synchronized (getClassLoadingLock(name)) { Class<?> loadedClass = findLoadedClass(name); if (loadedClass != null) { return loadedClass; } if (isClosed) { throw new ClassNotFoundException("This ClassLoader is closed"); } if (config.shouldAcquire(name)) { loadedClass = PerfStatsCollector.getInstance() .measure("load sandboxed class", () -> maybeInstrumentClass(name)); } else { loadedClass = getParent().loadClass(name); } if (resolve) { resolveClass(loadedClass); } return loadedClass; } }
@Test public void shouldFixTypesInFieldAccess() throws Exception { setClassLoader(new SandboxClassLoader(createRemappingConfig())); Class<?> theClass = loadClass(AClassThatRefersToAForgettableClassInItsConstructor.class); Object instance = theClass.getDeclaredConstructor().newInstance(); Method method = theClass.getDeclaredMethod( shadow.directMethodName(theClass.getName(), ShadowConstants.CONSTRUCTOR_METHOD_NAME)); method.setAccessible(true); method.invoke(instance); }
public void start() { super.start(); setupStackTraceLayout(); }
@Test public void unknownHostShouldNotCauseStopToFail() { // See LOGBACK-960 sa.setSyslogHost("unknown.host"); sa.setFacility("MAIL"); sa.start(); sa.stop(); }
@Override public R apply(R record) { final Object value = operatingValue(record); final Schema schema = operatingSchema(record); if (value == null && schema == null) { return record; } requireSchema(schema, "updating schema metadata"); final boolean isArray = schema.type() == Schema.Type.ARRAY; final boolean isMap = schema.type() == Schema.Type.MAP; final Schema updatedSchema = new ConnectSchema( schema.type(), schema.isOptional(), schema.defaultValue(), schemaName != null ? schemaName : schema.name(), schemaVersion != null ? schemaVersion : schema.version(), schema.doc(), schema.parameters(), schema.fields(), isMap ? schema.keySchema() : null, isMap || isArray ? schema.valueSchema() : null ); log.trace("Applying SetSchemaMetadata SMT. Original schema: {}, updated schema: {}", schema, updatedSchema); return newRecord(record, updatedSchema); }
@Test public void ignoreRecordWithNullValue() { final SinkRecord record = new SinkRecord("", 0, null, null, null, null, 0); final SinkRecord updatedRecord = xform.apply(record); assertNull(updatedRecord.key()); assertNull(updatedRecord.keySchema()); assertNull(updatedRecord.value()); assertNull(updatedRecord.valueSchema()); }
private static Map<String, Object> getProperties(Step step) { Map<String, Object> properties = step.getProperties(); if (properties == null) { properties = new HashMap<>(); step.setProperties(properties); } return properties; }
@Test public void testBatchGroupIntoBatchesTranslation() throws Exception { JobSpecification jobSpec = runBatchGroupIntoBatchesAndGetJobSpec(false, Collections.emptyList()); List<Step> steps = jobSpec.getJob().getSteps(); Step shardedStateStep = steps.get(steps.size() - 1); Map<String, Object> properties = shardedStateStep.getProperties(); assertTrue(properties.containsKey(PropertyNames.PRESERVES_KEYS)); assertEquals("true", getString(properties, PropertyNames.PRESERVES_KEYS)); }
@Nullable public Object sanitize(String key, @Nullable Object value) { for (Pattern pattern : sanitizeKeysPatterns) { if (pattern.matcher(key).matches()) { return SANITIZED_VALUE; } } return value; }
@Test void obfuscateCredentials() { final var sanitizer = new KafkaConfigSanitizer(true, List.of()); assertThat(sanitizer.sanitize("sasl.jaas.config", "secret")).isEqualTo("******"); assertThat(sanitizer.sanitize("consumer.sasl.jaas.config", "secret")).isEqualTo("******"); assertThat(sanitizer.sanitize("producer.sasl.jaas.config", "secret")).isEqualTo("******"); assertThat(sanitizer.sanitize("main.consumer.sasl.jaas.config", "secret")).isEqualTo("******"); assertThat(sanitizer.sanitize("database.password", "secret")).isEqualTo("******"); assertThat(sanitizer.sanitize("basic.auth.user.info", "secret")).isEqualTo("******"); //AWS var sanitizing assertThat(sanitizer.sanitize("aws.access.key.id", "secret")).isEqualTo("******"); assertThat(sanitizer.sanitize("aws.accessKeyId", "secret")).isEqualTo("******"); assertThat(sanitizer.sanitize("aws.secret.access.key", "secret")).isEqualTo("******"); assertThat(sanitizer.sanitize("aws.secretAccessKey", "secret")).isEqualTo("******"); assertThat(sanitizer.sanitize("aws.sessionToken", "secret")).isEqualTo("******"); //Mongo var sanitizing assertThat(sanitizer.sanitize("connection.uri", "secret")).isEqualTo("******"); }
public void replay(ConfigRecord record) { Type type = Type.forId(record.resourceType()); ConfigResource configResource = new ConfigResource(type, record.resourceName()); TimelineHashMap<String, String> configs = configData.get(configResource); if (configs == null) { configs = new TimelineHashMap<>(snapshotRegistry, 0); configData.put(configResource, configs); } if (record.value() == null) { configs.remove(record.name()); } else { configs.put(record.name(), record.value()); } if (configs.isEmpty()) { configData.remove(configResource); } if (configSchema.isSensitive(record)) { log.info("Replayed ConfigRecord for {} which set configuration {} to {}", configResource, record.name(), Password.HIDDEN); } else { log.info("Replayed ConfigRecord for {} which set configuration {} to {}", configResource, record.name(), record.value()); } }
@Test public void testReplay() throws Exception { ConfigurationControlManager manager = new ConfigurationControlManager.Builder(). setKafkaConfigSchema(SCHEMA). build(); assertEquals(Collections.emptyMap(), manager.getConfigs(BROKER0)); manager.replay(new ConfigRecord(). setResourceType(BROKER.id()).setResourceName("0"). setName("foo.bar").setValue("1,2")); assertEquals(Collections.singletonMap("foo.bar", "1,2"), manager.getConfigs(BROKER0)); manager.replay(new ConfigRecord(). setResourceType(BROKER.id()).setResourceName("0"). setName("foo.bar").setValue(null)); assertEquals(Collections.emptyMap(), manager.getConfigs(BROKER0)); manager.replay(new ConfigRecord(). setResourceType(TOPIC.id()).setResourceName("mytopic"). setName("abc").setValue("x,y,z")); manager.replay(new ConfigRecord(). setResourceType(TOPIC.id()).setResourceName("mytopic"). setName("def").setValue("blah")); assertEquals(toMap(entry("abc", "x,y,z"), entry("def", "blah")), manager.getConfigs(MYTOPIC)); assertEquals("x,y,z", manager.getTopicConfig(MYTOPIC.name(), "abc")); assertNull(manager.getTopicConfig(MYTOPIC.name(), "none-exists")); }
public static SslContextFactory.Server createSslContextFactory(String sslProviderString, PulsarSslFactory pulsarSslFactory, boolean requireTrustedClientCertOnConnect, Set<String> ciphers, Set<String> protocols) { return new JettySslContextFactory.Server(sslProviderString, pulsarSslFactory, requireTrustedClientCertOnConnect, ciphers, protocols); }
@Test(expectedExceptions = SSLHandshakeException.class) public void testJettyTlsServerInvalidCipher() throws Exception { @Cleanup("stop") Server server = new Server(); List<ServerConnector> connectors = new ArrayList<>(); PulsarSslConfiguration sslConfiguration = PulsarSslConfiguration.builder() .tlsCiphers(new HashSet<String>() { { this.add("TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256"); } }) .tlsProtocols(new HashSet<String>() { { this.add("TLSv1.3"); } }) .tlsTrustCertsFilePath(Resources.getResource("ssl/my-ca/ca.pem").getPath()) .tlsCertificateFilePath(Resources.getResource("ssl/my-ca/server-ca.pem").getPath()) .tlsKeyFilePath(Resources.getResource("ssl/my-ca/server-key.pem").getPath()) .allowInsecureConnection(false) .requireTrustedClientCertOnConnect(true) .isHttps(true) .tlsEnabledWithKeystore(false) .build(); PulsarSslFactory sslFactory = new DefaultPulsarSslFactory(); sslFactory.initialize(sslConfiguration); sslFactory.createInternalSslContext(); SslContextFactory factory = JettySslContextFactory.createSslContextFactory(null, sslFactory, true, new HashSet<String>() { { this.add("TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256"); } }, new HashSet<String>() { { this.add("TLSv1.3"); } }); factory.setHostnameVerifier((s, sslSession) -> true); ServerConnector connector = new ServerConnector(server, factory); connector.setPort(0); connectors.add(connector); server.setConnectors(connectors.toArray(new ServerConnector[0])); server.start(); // client connect HttpClientBuilder httpClientBuilder = HttpClients.custom(); RegistryBuilder<ConnectionSocketFactory> registryBuilder = RegistryBuilder.create(); registryBuilder.register("https", new SSLConnectionSocketFactory(getClientSslContext(), new String[]{"TLSv1.2"}, new String[]{"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384"}, new NoopHostnameVerifier())); PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager(registryBuilder.build()); httpClientBuilder.setConnectionManager(cm); @Cleanup CloseableHttpClient httpClient = httpClientBuilder.build(); HttpGet httpGet = new HttpGet("https://localhost:" + connector.getLocalPort()); httpClient.execute(httpGet); }
public boolean isSync() { return (state & MASK_SYNC) != 0; }
@Test public void isSync() { LacpState state = new LacpState((byte) 0x8); assertTrue(state.isSync()); }
public Future<KafkaVersionChange> reconcile() { return getVersionFromController() .compose(i -> getPods()) .compose(this::detectToAndFromVersions) .compose(i -> prepareVersionChange()); }
@Test public void testNoopWithOldProtocolVersion(VertxTestContext context) { String kafkaVersion = VERSIONS.defaultVersion().version(); String interBrokerProtocolVersion = "2.8"; String logMessageFormatVersion = "2.7"; VersionChangeCreator vcc = mockVersionChangeCreator( mockKafka(kafkaVersion, interBrokerProtocolVersion, logMessageFormatVersion), mockNewCluster( null, mockSps(kafkaVersion), mockUniformPods(kafkaVersion, interBrokerProtocolVersion, logMessageFormatVersion) ) ); Checkpoint async = context.checkpoint(); vcc.reconcile().onComplete(context.succeeding(c -> context.verify(() -> { assertThat(c.from(), is(VERSIONS.defaultVersion())); assertThat(c.to(), is(VERSIONS.defaultVersion())); assertThat(c.interBrokerProtocolVersion(), nullValue()); assertThat(c.logMessageFormatVersion(), nullValue()); assertThat(c.metadataVersion(), is(VERSIONS.defaultVersion().metadataVersion())); async.flag(); }))); }
public static void checkNullOrNonNullNonEmptyEntries( @Nullable Collection<String> values, String propertyName) { if (values == null) { // pass return; } for (String value : values) { Preconditions.checkNotNull( value, "Property '" + propertyName + "' cannot contain null entries"); Preconditions.checkArgument( !value.trim().isEmpty(), "Property '" + propertyName + "' cannot contain empty strings"); } }
@Test public void testCheckNullOrNonNullNonEmptyEntries_emptyValueFail() { try { Validator.checkNullOrNonNullNonEmptyEntries(ImmutableList.of("first", " "), "test"); Assert.fail(); } catch (IllegalArgumentException iae) { Assert.assertEquals("Property 'test' cannot contain empty strings", iae.getMessage()); } }
@Override public String arguments() { ArrayList<String> args = new ArrayList<>(); if (buildFile != null) { args.add("-f \"" + FilenameUtils.separatorsToUnix(buildFile) + "\""); } if (target != null) { args.add(target); } return StringUtils.join(args, " "); }
@Test public void shouldGiveArgumentsForRakeTask(){ RakeTask rakeTask = new RakeTask(); rakeTask.setBuildFile("myrakefile.rb"); rakeTask.setTarget("db:migrate VERSION=0"); assertThat(rakeTask.arguments(), is("-f \"myrakefile.rb\" db:migrate VERSION=0")); }
@SuppressWarnings("WeakerAccess") public Map<String, Object> getRestoreConsumerConfigs(final String clientId) { final Map<String, Object> baseConsumerProps = getCommonConsumerConfigs(); // Get restore consumer override configs final Map<String, Object> restoreConsumerProps = originalsWithPrefix(RESTORE_CONSUMER_PREFIX); baseConsumerProps.putAll(restoreConsumerProps); // no need to set group id for a restore consumer baseConsumerProps.remove(ConsumerConfig.GROUP_ID_CONFIG); // no need to set instance id for a restore consumer baseConsumerProps.remove(ConsumerConfig.GROUP_INSTANCE_ID_CONFIG); // add client id with stream client id prefix baseConsumerProps.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId); baseConsumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "none"); return baseConsumerProps; }
@Test public void testGetRestoreConsumerConfigs() { final Map<String, Object> returnedProps = streamsConfig.getRestoreConsumerConfigs(clientId); assertEquals(returnedProps.get(ConsumerConfig.CLIENT_ID_CONFIG), clientId); assertNull(returnedProps.get(ConsumerConfig.GROUP_ID_CONFIG)); }
public FEELFnResult<List<Object>> invoke(@ParameterName( "ctx" ) EvaluationContext ctx, @ParameterName("list") List list, @ParameterName("precedes") FEELFunction function) { if ( function == null ) { return invoke( list ); } else { return invoke(list, (a, b) -> { final Object result = function.invokeReflectively(ctx, new Object[]{a, b}); if (!(result instanceof Boolean) || ((Boolean) result)) { return -1; } else { return 1; } } ); } }
@Test void invokeWithSortFunctionNull() { FunctionTestUtil.assertResultList( sortFunction.invoke(null, Arrays.asList(10, 4, 5, 12), null), Arrays.asList(4, 5, 10, 12)); }
@Override public byte[] fromConnectData(String topic, Schema schema, Object value) { if (schema != null && schema.type() != Schema.Type.BYTES) throw new DataException("Invalid schema type for ByteArrayConverter: " + schema.type().toString()); if (value != null && !(value instanceof byte[]) && !(value instanceof ByteBuffer)) throw new DataException("ByteArrayConverter is not compatible with objects of type " + value.getClass()); return value instanceof ByteBuffer ? getBytesFromByteBuffer((ByteBuffer) value) : (byte[]) value; }
@Test public void testFromConnect() { assertArrayEquals( SAMPLE_BYTES, converter.fromConnectData(TOPIC, Schema.BYTES_SCHEMA, SAMPLE_BYTES) ); }
@ApiOperation(value = "Create Or update Tenant (saveTenant)", notes = "Create or update the Tenant. When creating tenant, platform generates Tenant Id as " + UUID_WIKI_LINK + "Default Rule Chain and Device profile are also generated for the new tenants automatically. " + "The newly created Tenant Id will be present in the response. " + "Specify existing Tenant Id id to update the Tenant. " + "Referencing non-existing Tenant Id will cause 'Not Found' error." + "Remove 'id', 'tenantId' from the request body example (below) to create new Tenant entity." + SYSTEM_AUTHORITY_PARAGRAPH) @PreAuthorize("hasAuthority('SYS_ADMIN')") @RequestMapping(value = "/tenant", method = RequestMethod.POST) @ResponseBody public Tenant saveTenant(@Parameter(description = "A JSON value representing the tenant.") @RequestBody Tenant tenant) throws Exception { checkEntity(tenant.getId(), tenant, Resource.TENANT); return tbTenantService.save(tenant); }
@Test public void testFindTenantsByTitle() throws Exception { log.debug("login sys admin"); loginSysAdmin(); log.debug("test started"); String title1 = "Tenant title 1"; List<ListenableFuture<Tenant>> createFutures = new ArrayList<>(134); for (int i = 0; i < 134; i++) { Tenant tenant = new Tenant(); String suffix = StringUtils.randomAlphanumeric((int) (5 + Math.random() * 10)); String title = title1 + suffix; title = i % 2 == 0 ? title.toLowerCase() : title.toUpperCase(); tenant.setTitle(title); createFutures.add(executor.submit(() -> saveTenant(tenant))); } List<Tenant> tenantsTitle1 = Futures.allAsList(createFutures).get(TIMEOUT, TimeUnit.SECONDS); log.debug("saved '{}', qty {}", title1, tenantsTitle1.size()); String title2 = "Tenant title 2"; createFutures = new ArrayList<>(127); for (int i = 0; i < 127; i++) { Tenant tenant = new Tenant(); String suffix = StringUtils.randomAlphanumeric((int) (5 + Math.random() * 10)); String title = title2 + suffix; title = i % 2 == 0 ? title.toLowerCase() : title.toUpperCase(); tenant.setTitle(title); createFutures.add(executor.submit(() -> saveTenant(tenant))); } List<Tenant> tenantsTitle2 = Futures.allAsList(createFutures).get(TIMEOUT, TimeUnit.SECONDS); log.debug("saved '{}', qty {}", title2, tenantsTitle2.size()); List<Tenant> loadedTenantsTitle1 = new ArrayList<>(134); PageLink pageLink = new PageLink(15, 0, title1); PageData<Tenant> pageData = null; do { pageData = doGetTypedWithPageLink("/api/tenants?", PAGE_DATA_TENANT_TYPE_REF, pageLink); loadedTenantsTitle1.addAll(pageData.getData()); if (pageData.hasNext()) { pageLink = pageLink.nextPageLink(); } } while (pageData.hasNext()); log.debug("found by name '{}', step 15 {}", title1, loadedTenantsTitle1.size()); assertThat(tenantsTitle1).as(title1).containsExactlyInAnyOrderElementsOf(loadedTenantsTitle1); log.debug("asserted"); List<Tenant> loadedTenantsTitle2 = new ArrayList<>(127); pageLink = new PageLink(4, 0, title2); do { pageData = doGetTypedWithPageLink("/api/tenants?", PAGE_DATA_TENANT_TYPE_REF, pageLink); loadedTenantsTitle2.addAll(pageData.getData()); if (pageData.hasNext()) { pageLink = pageLink.nextPageLink(); } } while (pageData.hasNext()); log.debug("found by name '{}', step 4 {}", title1, loadedTenantsTitle2.size()); assertThat(tenantsTitle2).as(title2).containsExactlyInAnyOrderElementsOf(loadedTenantsTitle2); log.debug("asserted"); deleteEntitiesAsync("/api/tenant/", loadedTenantsTitle1, executor).get(TIMEOUT, TimeUnit.SECONDS); log.debug("deleted '{}', size {}", title1, loadedTenantsTitle1.size()); pageLink = new PageLink(4, 0, title1); pageData = doGetTypedWithPageLink("/api/tenants?", PAGE_DATA_TENANT_TYPE_REF, pageLink); Assert.assertFalse(pageData.hasNext()); Assert.assertEquals(0, pageData.getData().size()); log.debug("tried to search another '{}', step 4", title1); deleteEntitiesAsync("/api/tenant/", loadedTenantsTitle2, executor).get(TIMEOUT, TimeUnit.SECONDS); log.debug("deleted '{}', size {}", title2, loadedTenantsTitle2.size()); pageLink = new PageLink(4, 0, title2); pageData = doGetTypedWithPageLink("/api/tenants?", PAGE_DATA_TENANT_TYPE_REF, pageLink); Assert.assertFalse(pageData.hasNext()); Assert.assertEquals(0, pageData.getData().size()); log.debug("tried to search another '{}', step 4", title2); }
@Deprecated @Restricted(DoNotUse.class) public static String resolve(ConfigurationContext context, String toInterpolate) { return context.getSecretSourceResolver().resolve(toInterpolate); }
@Test public void resolve_SystemProperty() throws Exception { String input = "java.version"; String expected = System.getProperty(input); String output = resolve("${sysProp:" + input + "}"); assertThat(output, equalTo(SYSPROP.lookup(input))); assertThat(output, equalTo(expected)); }
@VisibleForTesting static LookupResult parseBody(JsonPath singleJsonPath, @Nullable JsonPath multiJsonPath, InputStream body) { try { final DocumentContext documentContext = JsonPath.parse(body); LookupResult.Builder builder = LookupResult.builder().cacheTTL(Long.MAX_VALUE); if (multiJsonPath != null) { try { final Object multiValue = documentContext.read(multiJsonPath); if (multiValue instanceof Map) { //noinspection unchecked builder = builder.multiValue((Map<Object, Object>) multiValue); } else if (multiValue instanceof List) { //noinspection unchecked final List<String> stringList = ((List<Object>) multiValue).stream().map(Object::toString).collect(Collectors.toList()); builder = builder.stringListValue(stringList); // for backwards compatibility builder = builder.multiSingleton(multiValue); } else { builder = builder.multiSingleton(multiValue); } } catch (PathNotFoundException e) { LOG.warn("Couldn't read multi JSONPath from response - skipping multi value ({})", e.getMessage()); } } try { final Object singleValue = documentContext.read(singleJsonPath); if (singleValue instanceof CharSequence) { return builder.single((CharSequence) singleValue).build(); } else if (singleValue instanceof Number) { return builder.single((Number) singleValue).build(); } else if (singleValue instanceof Boolean) { return builder.single((Boolean) singleValue).build(); } else { throw new IllegalArgumentException("Single value data type cannot be: " + singleValue.getClass().getCanonicalName()); } } catch (PathNotFoundException e) { LOG.warn("Couldn't read single JSONPath from response - returning empty result ({})", e.getMessage()); return null; } } catch (InvalidJsonException e) { LOG.error("Couldn't parse JSON response", e); return null; } catch (ClassCastException e) { LOG.error("Couldn't assign value type", e); return null; } catch (Exception e) { LOG.error("Unexpected error parsing JSON response", e); return null; } }
@Test public void parseBodyWithMapMultiValue() throws Exception { final JsonPath singlePath = JsonPath.compile("$.hello"); final JsonPath multiPath = JsonPath.compile("$.map"); final LookupResult result = HTTPJSONPathDataAdapter.parseBody(singlePath, multiPath, body); assertThat(result.isEmpty()).isFalse(); assertThat(result.hasError()).isFalse(); assertThat(result.singleValue()).isEqualTo("world"); assertThat(result.multiValue()).isNotNull(); assertThat(result.multiValue()).isInstanceOf(Map.class); assertThat(result.multiValue()).containsOnly( entry("key1", "value1"), entry("key2", "value2") ); }
@Override public Histogram histogram(String name) { return new FixedReservoirHistogram(DEFAULT_HISTOGRAM_RESERVOIR_SIZE); }
@Test public void histogram() { MetricsContext metricsContext = new DefaultMetricsContext(); int reservoirSize = 1000; Histogram histogram = metricsContext.histogram("test"); for (int i = 1; i <= reservoirSize; ++i) { histogram.update(i); } assertThat(histogram.count()).isEqualTo(reservoirSize); Histogram.Statistics statistics = histogram.statistics(); assertThat(statistics.size()).isEqualTo(reservoirSize); assertThat(statistics.mean()).isEqualTo(500.5); assertThat(statistics.stdDev()).isCloseTo(288.67499, withinPercentage(0.001)); assertThat(statistics.max()).isEqualTo(1000L); assertThat(statistics.min()).isEqualTo(1L); assertThat(statistics.percentile(0.50)).isEqualTo(500); assertThat(statistics.percentile(0.75)).isEqualTo(750); assertThat(statistics.percentile(0.90)).isEqualTo(900); assertThat(statistics.percentile(0.95)).isEqualTo(950); assertThat(statistics.percentile(0.99)).isEqualTo(990); assertThat(statistics.percentile(0.999)).isEqualTo(999); }
@Override public TCreatePartitionResult createPartition(TCreatePartitionRequest request) throws TException { LOG.info("Receive create partition: {}", request); TCreatePartitionResult result; try { if (partitionRequestNum.incrementAndGet() >= Config.thrift_server_max_worker_threads / 4) { result = new TCreatePartitionResult(); TStatus errorStatus = new TStatus(SERVICE_UNAVAILABLE); errorStatus.setError_msgs(Lists.newArrayList( String.format("Too many create partition requests, please try again later txn_id=%d", request.getTxn_id()))); result.setStatus(errorStatus); return result; } result = createPartitionProcess(request); } catch (Exception t) { LOG.warn(DebugUtil.getStackTrace(t)); result = new TCreatePartitionResult(); TStatus errorStatus = new TStatus(RUNTIME_ERROR); errorStatus.setError_msgs(Lists.newArrayList(String.format("txn_id=%d failed. %s", request.getTxn_id(), t.getMessage()))); result.setStatus(errorStatus); } finally { partitionRequestNum.decrementAndGet(); } return result; }
@Test public void testCreatePartitionApiMultiValues() throws TException { new MockUp<GlobalTransactionMgr>() { @Mock public TransactionState getTransactionState(long dbId, long transactionId) { return new TransactionState(); } }; Database db = GlobalStateMgr.getCurrentState().getDb("test"); Table table = db.getTable("site_access_day"); List<List<String>> partitionValues = Lists.newArrayList(); List<String> values = Lists.newArrayList(); values.add("1990-04-24"); partitionValues.add(values); List<String> values1 = Lists.newArrayList(); partitionValues.add(values1); values1.add("1990-04-24"); List<String> values2 = Lists.newArrayList(); values2.add("1989-11-02"); partitionValues.add(values2); FrontendServiceImpl impl = new FrontendServiceImpl(exeEnv); TCreatePartitionRequest request = new TCreatePartitionRequest(); request.setDb_id(db.getId()); request.setTable_id(table.getId()); request.setPartition_values(partitionValues); TCreatePartitionResult partition = impl.createPartition(request); Assert.assertEquals(partition.getStatus().getStatus_code(), TStatusCode.OK); Partition p19891102 = table.getPartition("p19891102"); Assert.assertNotNull(p19891102); partition = impl.createPartition(request); Assert.assertEquals(2, partition.partitions.size()); }
@Override public Long createDataSourceConfig(DataSourceConfigSaveReqVO createReqVO) { DataSourceConfigDO config = BeanUtils.toBean(createReqVO, DataSourceConfigDO.class); validateConnectionOK(config); // 插入 dataSourceConfigMapper.insert(config); // 返回 return config.getId(); }
@Test public void testCreateDataSourceConfig_success() { try (MockedStatic<JdbcUtils> databaseUtilsMock = mockStatic(JdbcUtils.class)) { // 准备参数 DataSourceConfigSaveReqVO reqVO = randomPojo(DataSourceConfigSaveReqVO.class) .setId(null); // 避免 id 被设置 // mock 方法 databaseUtilsMock.when(() -> JdbcUtils.isConnectionOK(eq(reqVO.getUrl()), eq(reqVO.getUsername()), eq(reqVO.getPassword()))).thenReturn(true); // 调用 Long dataSourceConfigId = dataSourceConfigService.createDataSourceConfig(reqVO); // 断言 assertNotNull(dataSourceConfigId); // 校验记录的属性是否正确 DataSourceConfigDO dataSourceConfig = dataSourceConfigMapper.selectById(dataSourceConfigId); assertPojoEquals(reqVO, dataSourceConfig, "id"); } }
public NonClosedTracking<RAW, BASE> trackNonClosed(Input<RAW> rawInput, Input<BASE> baseInput) { NonClosedTracking<RAW, BASE> tracking = NonClosedTracking.of(rawInput, baseInput); // 1. match by rule, line, line hash and message match(tracking, LineAndLineHashAndMessage::new); // 2. match issues with same rule, same line and same line hash, but not necessarily with same message match(tracking, LineAndLineHashKey::new); // 3. detect code moves by comparing blocks of codes detectCodeMoves(rawInput, baseInput, tracking); // 4. match issues with same rule, same message and same line hash match(tracking, LineHashAndMessageKey::new); // 5. match issues with same rule, same line and same message match(tracking, LineAndMessageKey::new); // 6. match issues with same rule and same line hash but different line and different message. // See SONAR-2812 match(tracking, LineHashKey::new); return tracking; }
@Test public void do_not_fail_if_raw_line_does_not_exist() { FakeInput baseInput = new FakeInput(); FakeInput rawInput = new FakeInput("H1").addIssue(new Issue(200, "H200", RULE_SYSTEM_PRINT, "msg", org.sonar.api.issue.Issue.STATUS_OPEN, new Date())); Tracking<Issue, Issue> tracking = tracker.trackNonClosed(rawInput, baseInput); assertThat(tracking.getUnmatchedRaws()).hasSize(1); }
@Operation(summary = "Create the 3 secure apdus which will generate the pip/pp") @PostMapping(value = { Constants.URL_OLD_RDW_SECAPDU, Constants.URL_RDW_SECAPDU }, consumes = "application/json", produces = "application/json") public SecApduResponse generateSecureAPDUsRestService(@Valid @RequestBody SecApduRequest request, @RequestHeader(value = "X-FORWARDED-FOR") String clientIp) { return rdwService.generateSecureAPDUsRestService(request, clientIp); }
@Test public void generateSecureAPDUsRestServiceTest() { SecApduResponse expectedResponse = new SecApduResponse(); when(rdwServiceMock.generateSecureAPDUsRestService(any(SecApduRequest.class), anyString())).thenReturn(expectedResponse); SecApduResponse actualResponse = rdwController.generateSecureAPDUsRestService(new SecApduRequest(), ""); assertEquals(expectedResponse, actualResponse); }
public Statement buildStatement(final ParserRuleContext parseTree) { return build(Optional.of(getSources(parseTree)), parseTree); }
@Test public void shouldThrowOnNoSubjectOrId() { // Given: final SingleStatementContext stmt = givenQuery("ASSERT SCHEMA TIMEOUT 10 SECONDS;"); // When: final Exception e = assertThrows(KsqlException.class, () -> builder.buildStatement(stmt)); // Then: assertThat(e.getMessage(), is("ASSERT SCHEMA statements much include a subject name or an id")); }
@Override public AwsProxyResponse handle(Throwable ex) { log.error("Called exception handler for:", ex); // adding a print stack trace in case we have no appender or we are running inside SAM local, where need the // output to go to the stderr. ex.printStackTrace(); if (ex instanceof InvalidRequestEventException || ex instanceof InternalServerErrorException) { return new AwsProxyResponse(500, HEADERS, getErrorJson(INTERNAL_SERVER_ERROR)); } else { return new AwsProxyResponse(502, HEADERS, getErrorJson(GATEWAY_TIMEOUT_ERROR)); } }
@Test void typedHandle_InvalidResponseObjectException_responseString() throws JsonProcessingException { AwsProxyResponse resp = exceptionHandler.handle(new InvalidResponseObjectException(INVALID_RESPONSE_MESSAGE, null)); assertNotNull(resp); String body = objectMapper.writeValueAsString(new ErrorModel(AwsProxyExceptionHandler.GATEWAY_TIMEOUT_ERROR)); assertEquals(body, resp.getBody()); }
@VisibleForTesting public static boolean updateMapInternal(BiMap<Integer, String> map, String mapName, String command, String regex, Map<Integer, Integer> staticMapping) throws IOException { boolean updated = false; BufferedReader br = null; try { Process process = Runtime.getRuntime().exec( new String[] { "bash", "-c", command }); br = new BufferedReader( new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8)); String line = null; while ((line = br.readLine()) != null) { String[] nameId = line.split(regex); if ((nameId == null) || (nameId.length != 2)) { throw new IOException("Can't parse " + mapName + " list entry:" + line); } LOG.debug("add to " + mapName + "map:" + nameId[0] + " id:" + nameId[1]); // HDFS can't differentiate duplicate names with simple authentication final Integer key = staticMapping.get(parseId(nameId[1])); final String value = nameId[0]; if (map.containsKey(key)) { final String prevValue = map.get(key); if (value.equals(prevValue)) { // silently ignore equivalent entries continue; } reportDuplicateEntry( "Got multiple names associated with the same id: ", key, value, key, prevValue); continue; } if (map.containsValue(value)) { final Integer prevKey = map.inverse().get(value); reportDuplicateEntry( "Got multiple ids associated with the same name: ", key, value, prevKey, value); continue; } map.put(key, value); updated = true; } LOG.debug("Updated " + mapName + " map size: " + map.size()); } catch (IOException e) { LOG.error("Can't update " + mapName + " map"); throw e; } finally { if (br != null) { try { br.close(); } catch (IOException e1) { LOG.error("Can't close BufferedReader of command result", e1); } } } return updated; }
@Test public void testStaticMapping() throws IOException { assumeNotWindows(); Map<Integer, Integer> uidStaticMap = new PassThroughMap<Integer>(); Map<Integer, Integer> gidStaticMap = new PassThroughMap<Integer>(); uidStaticMap.put(11501, 10); gidStaticMap.put(497, 200); // Maps for id to name map BiMap<Integer, String> uMap = HashBiMap.create(); BiMap<Integer, String> gMap = HashBiMap.create(); String GET_ALL_USERS_CMD = "echo \"atm:x:1000:1000:Aaron T. Myers,,,:/home/atm:/bin/bash\n" + "hdfs:x:11501:10787:Grid Distributed File System:/home/hdfs:/bin/bash\"" + " | cut -d: -f1,3"; String GET_ALL_GROUPS_CMD = "echo \"hdfs:*:11501:hrt_hdfs\n" + "mapred:x:497\n" + "mapred2:x:498\"" + " | cut -d: -f1,3"; ShellBasedIdMapping.updateMapInternal(uMap, "user", GET_ALL_USERS_CMD, ":", uidStaticMap); ShellBasedIdMapping.updateMapInternal(gMap, "group", GET_ALL_GROUPS_CMD, ":", gidStaticMap); assertEquals("hdfs", uMap.get(10)); assertEquals(10, (int)uMap.inverse().get("hdfs")); assertEquals("atm", uMap.get(1000)); assertEquals(1000, (int)uMap.inverse().get("atm")); assertEquals("hdfs", gMap.get(11501)); assertEquals(11501, (int)gMap.inverse().get("hdfs")); assertEquals("mapred", gMap.get(200)); assertEquals(200, (int)gMap.inverse().get("mapred")); assertEquals("mapred2", gMap.get(498)); assertEquals(498, (int)gMap.inverse().get("mapred2")); }
public static long estimateSize(StructType tableSchema, long totalRecords) { if (totalRecords == Long.MAX_VALUE) { return totalRecords; } long result; try { result = LongMath.checkedMultiply(tableSchema.defaultSize(), totalRecords); } catch (ArithmeticException e) { result = Long.MAX_VALUE; } return result; }
@Test public void testEstimateSizeMaxValue() throws IOException { Assert.assertEquals( "estimateSize returns Long max value", Long.MAX_VALUE, SparkSchemaUtil.estimateSize(null, Long.MAX_VALUE)); }
public static synchronized ThreadPool get() { return get(new LoggingUncaughtExceptionHandler()); }
@Test public void testGet() { assertNotNull(new ThreadPoolFactory(DefaultThreadPool.class).create( "prefix", 1, ThreadPool.Priority.low, new LinkedBlockingQueue<>(), new LoggingUncaughtExceptionHandler() )); }
public ProjectStatusResponse.ProjectStatus format() { if (!optionalMeasureData.isPresent()) { return newResponseWithoutQualityGateDetails(); } JsonObject json = JsonParser.parseString(optionalMeasureData.get()).getAsJsonObject(); ProjectStatusResponse.Status qualityGateStatus = measureLevelToQualityGateStatus(json.get("level").getAsString()); projectStatusBuilder.setStatus(qualityGateStatus); projectStatusBuilder.setCaycStatus(caycStatus.toString()); formatIgnoredConditions(json); formatConditions(json.getAsJsonArray("conditions")); formatPeriods(); return projectStatusBuilder.build(); }
@Test public void map_level_conditions_and_period() throws IOException { String measureData = IOUtils.toString(getClass().getResource("QualityGateDetailsFormatterTest/quality_gate_details.json")); SnapshotDto snapshot = new SnapshotDto() .setPeriodMode("last_version") .setPeriodParam("2015-12-07") .setPeriodDate(1449404331764L); underTest = newQualityGateDetailsFormatter(measureData, snapshot); ProjectStatus result = underTest.format(); assertThat(result.getStatus()).isEqualTo(ProjectStatusResponse.Status.ERROR); assertEquals(NON_COMPLIANT.toString(), result.getCaycStatus()); // check conditions assertThat(result.getConditionsCount()).isEqualTo(3); List<ProjectStatusResponse.Condition> conditions = result.getConditionsList(); assertThat(conditions).extracting("status").containsExactly( ProjectStatusResponse.Status.ERROR, ProjectStatusResponse.Status.WARN, ProjectStatusResponse.Status.OK); assertThat(conditions).extracting("metricKey").containsExactly("new_coverage", "new_blocker_violations", "new_critical_violations"); assertThat(conditions).extracting("comparator").containsExactly( ProjectStatusResponse.Comparator.LT, ProjectStatusResponse.Comparator.GT, ProjectStatusResponse.Comparator.GT); assertThat(conditions).extracting("warningThreshold").containsOnly("80", ""); assertThat(conditions).extracting("errorThreshold").containsOnly("85", "0", "0"); assertThat(conditions).extracting("actualValue").containsExactly("82.2985024398452", "1", "0"); // check period ProjectStatusResponse.NewCodePeriod period = result.getPeriod(); assertThat(period).extracting("mode").isEqualTo("last_version"); assertThat(period).extracting("parameter").isEqualTo("2015-12-07"); assertThat(period.getDate()).isEqualTo(formatDateTime(snapshot.getPeriodDate())); }
public static Row nullRow(Schema schema) { return Row.withSchema(schema) .addValues(Collections.nCopies(schema.getFieldCount(), null)) .build(); }
@Test public void testRejectsNullRecord() { Schema type = Stream.of(Schema.Field.of("f_int", Schema.FieldType.INT32)).collect(toSchema()); thrown.expect(IllegalArgumentException.class); Row.nullRow(type); }
public static List<Request> from(String exp){ Matcher matcher = p4Range.matcher(exp); if (!matcher.find()) { return Collections.singletonList(new Request(exp)); } int rangeFrom = Integer.parseInt(matcher.group(1)); int rangeTo = Integer.parseInt(matcher.group(2)); if (rangeFrom > rangeTo) { return Collections.emptyList(); } List<Request> requests = new ArrayList<Request>(rangeTo - rangeFrom + 1); for (int i = rangeFrom; i <= rangeTo; i++) { requests.add(new Request(matcher.replaceAll(String.valueOf(i)))); } return requests; }
@Test public void test_generate_range_when_invalid_number() throws Exception { List<Request> requests = RequestUtils.from("http://angularjs.cn/api/article/latest?p=[10-3]&s=20"); assertThat(requests).isEmpty(); }
public List<Property> getProperties() { return Collections.unmodifiableList(Arrays.asList(properties)); }
@Test // JENKINS-26775 public void sytheticMethodShouldNotBeExported() { Model<Impl> model = builder.get(Impl.class); assertEquals("Redundant properties discovered: " + model.getProperties(), 1, model.getProperties().size()); }
public static String getNewSelectBody(String selectBody, String alisa, String asAlisa, String escapeSymbol) { String[] split = selectBody.split(COMMA); StringBuilder sb = new StringBuilder(); boolean asA = asAlisa != null; for (String body : split) { final String sa = alisa.concat(DOT); if (asA) { int as = body.indexOf(AS); if (as < 0) { sb.append(sa).append(body).append(AS).append(escapeColumn(asAlisa.concat(DOT).concat(body), escapeSymbol)); } else { String column = body.substring(0, as); String property = body.substring(as + 4); property = StringUtils.getTargetColumn(property); sb.append(sa).append(column).append(AS).append(escapeColumn(asAlisa.concat(DOT).concat(property), escapeSymbol)); } } else { sb.append(sa).append(body); } sb.append(COMMA); } return sb.deleteCharAt(sb.length() - 1).toString(); }
@Test void getNewSelectBody() { String s = SqlUtils.getNewSelectBody("id,name", "d", null, null); assertThat(s).isEqualTo("d.id,d.name"); s = SqlUtils.getNewSelectBody("`id`,`name`", "d", null, null); assertThat(s).isEqualTo("d.`id`,d.`name`"); s = SqlUtils.getNewSelectBody("id,name", "d", "pp", "`"); assertThat(s).isEqualTo("d.id AS `pp.id`,d.name AS `pp.name`"); s = SqlUtils.getNewSelectBody("id AS t_id,name AS t_name", "d", null, null); assertThat(s).isEqualTo("d.id AS t_id,d.name AS t_name"); s = SqlUtils.getNewSelectBody("`id` AS t_id,`name` AS t_name", "d", null, null); assertThat(s).isEqualTo("d.`id` AS t_id,d.`name` AS t_name"); s = SqlUtils.getNewSelectBody("id AS `t_id`,name AS `t_name`", "d", "pp", "`"); assertThat(s).isEqualTo("d.id AS `pp.t_id`,d.name AS `pp.t_name`"); s = SqlUtils.getNewSelectBody("`id` AS `t_id`,`name` AS `t_name`", "d", "pp", "'"); assertThat(s).isEqualTo("d.`id` AS 'pp.t_id',d.`name` AS 'pp.t_name'"); }
@Override public void execute(Context context) { executeForBranch(treeRootHolder.getRoot()); }
@Test public void execute_whenAnalyzerChangedAndAnalyzerUpdateDateBeforeAnalysis_shouldNotRaiseEvent() { QualityProfile qp1 = qp(QP_NAME_1, LANGUAGE_KEY_1, new Date()); mockLanguageInRepository(LANGUAGE_KEY_1); when(measureRepository.getBaseMeasure(treeRootHolder.getRoot(), qualityProfileMetric)).thenReturn(Optional.of(newMeasure())); when(measureRepository.getRawMeasure(treeRootHolder.getRoot(), qualityProfileMetric)).thenReturn(Optional.of(newMeasure(qp1))); ScannerPlugin scannerPluginLanguage1 = mockScannerPlugin(LANGUAGE_KEY_1, 1L); when(analysisMetadataHolder.getScannerPluginsByKey()).thenReturn(Map.of(LANGUAGE_KEY_1, scannerPluginLanguage1)); when(analysisMetadataHolder.getBaseAnalysis()).thenReturn(new Analysis.Builder().setUuid("uuid").setCreatedAt(3L).build()); underTest.execute(new TestComputationStepContext()); verifyNoMoreInteractions(eventRepository); }
public static DateTime endOfYear(Date date) { return new DateTime(endOfYear(calendar(date))); }
@Test public void endOfYearTest() { final DateTime date = DateUtil.date(); date.setField(DateField.YEAR, 2019); final DateTime endOfYear = DateUtil.endOfYear(date); assertEquals("2019-12-31 23:59:59", endOfYear.toString()); }
public static List<KiePMMLFieldOperatorValue> getConstraintEntriesFromXOrCompoundPredicate(final CompoundPredicate compoundPredicate, final Map<String, KiePMMLOriginalTypeGeneratedType> fieldTypeMap) { if (!CompoundPredicate.BooleanOperator.XOR.equals(compoundPredicate.getBooleanOperator())) { throw new KiePMMLException(String.format("getConstraintEntriesFromXOrCompoundPredicate invoked with %s CompoundPredicate", compoundPredicate.getBooleanOperator())); } // Managing only SimplePredicates for the moment being final List<Predicate> simplePredicates = compoundPredicate.getPredicates().stream().filter(predicate -> predicate instanceof SimplePredicate).collect(Collectors.toList()); if (simplePredicates.size() < 2) { throw new KiePMMLException("At least two elements expected for XOR operations"); } if (simplePredicates.size() > 2) { // Not managed yet throw new KiePMMLException("More then two elements not managed, yet, for XOR operations"); } return getXORConstraintEntryFromSimplePredicates(simplePredicates, fieldTypeMap); }
@Test void getConstraintEntriesFromXOrCompoundPredicateWrongSize() { assertThatExceptionOfType(KiePMMLException.class).isThrownBy(() -> { CompoundPredicate compoundPredicate = new CompoundPredicate(); compoundPredicate.setBooleanOperator(CompoundPredicate.BooleanOperator.XOR); compoundPredicate.getPredicates().addAll(simplePredicates); KiePMMLASTFactoryUtils.getConstraintEntriesFromXOrCompoundPredicate(compoundPredicate, fieldTypeMap); }); }
public static List<AclEntry> mergeAclEntries(List<AclEntry> existingAcl, List<AclEntry> inAclSpec) throws AclException { ValidatedAclSpec aclSpec = new ValidatedAclSpec(inAclSpec); ArrayList<AclEntry> aclBuilder = Lists.newArrayListWithCapacity(MAX_ENTRIES); List<AclEntry> foundAclSpecEntries = Lists.newArrayListWithCapacity(MAX_ENTRIES); EnumMap<AclEntryScope, AclEntry> providedMask = Maps.newEnumMap(AclEntryScope.class); EnumSet<AclEntryScope> maskDirty = EnumSet.noneOf(AclEntryScope.class); EnumSet<AclEntryScope> scopeDirty = EnumSet.noneOf(AclEntryScope.class); for (AclEntry existingEntry: existingAcl) { AclEntry aclSpecEntry = aclSpec.findByKey(existingEntry); if (aclSpecEntry != null) { foundAclSpecEntries.add(aclSpecEntry); scopeDirty.add(aclSpecEntry.getScope()); if (aclSpecEntry.getType() == MASK) { providedMask.put(aclSpecEntry.getScope(), aclSpecEntry); maskDirty.add(aclSpecEntry.getScope()); } else { aclBuilder.add(aclSpecEntry); } } else { if (existingEntry.getType() == MASK) { providedMask.put(existingEntry.getScope(), existingEntry); } else { aclBuilder.add(existingEntry); } } } // ACL spec entries that were not replacements are new additions. for (AclEntry newEntry: aclSpec) { if (Collections.binarySearch(foundAclSpecEntries, newEntry, ACL_ENTRY_COMPARATOR) < 0) { scopeDirty.add(newEntry.getScope()); if (newEntry.getType() == MASK) { providedMask.put(newEntry.getScope(), newEntry); maskDirty.add(newEntry.getScope()); } else { aclBuilder.add(newEntry); } } } copyDefaultsIfNeeded(aclBuilder); calculateMasks(aclBuilder, providedMask, maskDirty, scopeDirty); return buildAndValidateAcl(aclBuilder); }
@Test public void testMergeAclEntriesAccessMaskPreserved() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, USER, "diana", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, USER, "diana", READ_WRITE)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ_WRITE)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, USER, "diana", READ_EXECUTE)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, USER, "diana", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, USER, "diana", READ_EXECUTE)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ_EXECUTE)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, mergeAclEntries(existing, aclSpec)); }
public QueueConfig setAsyncBackupCount(int asyncBackupCount) { this.asyncBackupCount = checkAsyncBackupCount(backupCount, asyncBackupCount); return this; }
@Test(expected = IllegalArgumentException.class) public void setAsyncBackupCount_whenTooLarge() { // max allowed is 6 queueConfig.setAsyncBackupCount(200); }
@VisibleForTesting static Instant getCreationTime(String configuredCreationTime, ProjectProperties projectProperties) throws DateTimeParseException, InvalidCreationTimeException { try { switch (configuredCreationTime) { case "EPOCH": return Instant.EPOCH; case "USE_CURRENT_TIMESTAMP": projectProperties.log( LogEvent.debug( "Setting image creation time to current time; your image may not be reproducible.")); return Instant.now(); default: DateTimeFormatter formatter = new DateTimeFormatterBuilder() .append(DateTimeFormatter.ISO_DATE_TIME) // parses isoStrict // add ability to parse with no ":" in tz .optionalStart() .appendOffset("+HHmm", "+0000") .optionalEnd() .toFormatter(); return formatter.parse(configuredCreationTime, Instant::from); } } catch (DateTimeParseException ex) { throw new InvalidCreationTimeException(configuredCreationTime, configuredCreationTime, ex); } }
@Test public void testGetCreationTime_useCurrentTimestamp() throws InvalidCreationTimeException { Instant now = Instant.now().minusSeconds(2); Instant time = PluginConfigurationProcessor.getCreationTime("USE_CURRENT_TIMESTAMP", projectProperties); assertThat(time).isGreaterThan(now); }
public static <T> T toObj(byte[] json, Class<T> cls) { try { return mapper.readValue(json, cls); } catch (Exception e) { throw new NacosDeserializationException(cls, e); } }
@Test void testToObject15() { assertEquals("null", JacksonUtils.toObj("null").asText()); assertEquals("string", JacksonUtils.toObj("\"string\"").asText()); assertEquals(30, JacksonUtils.toObj("30").asInt()); assertEquals("value", JacksonUtils.toObj("{\"key\":\"value\"}").get("key").asText()); assertEquals("value", JacksonUtils.toObj("[{\"key\":\"value\"}]").get(0).get("key").asText()); JsonNode jsonNode = JacksonUtils.toObj("{\"aLong\":0,\"aInteger\":1,\"aBoolean\":false}"); assertEquals(0L, jsonNode.get("aLong").asLong()); assertEquals(1, jsonNode.get("aInteger").asInt()); }
@Override public ExportResult<PhotosContainerResource> export( UUID jobId, TokensAndUrlAuthData authData, Optional<ExportInformation> exportInformation) throws CopyExceptionWithFailureReason { Preconditions.checkNotNull(authData); if (!exportInformation.isPresent()) { // No export information if at the start of a bulk export // Start by getting the list of albums to export return exportAlbums(authData, Optional.empty()); } StringPaginationToken paginationToken = (StringPaginationToken) exportInformation.get().getPaginationData(); ContainerResource containerResource = exportInformation.get().getContainerResource(); boolean containerResourcePresent = containerResource != null; boolean paginationDataPresent = paginationToken != null; if (!containerResourcePresent && paginationDataPresent && paginationToken.getToken().startsWith(ALBUM_TOKEN_PREFIX)) { // Continue exporting albums return exportAlbums(authData, Optional.of(paginationToken)); } else if (containerResourcePresent && containerResource instanceof PhotosContainerResource) { // We have had albums specified from the front end so process them for import PhotosContainerResource photosContainerResource = (PhotosContainerResource) containerResource; Preconditions.checkNotNull(photosContainerResource.getAlbums()); ContinuationData continuationData = new ContinuationData(null); for (PhotoAlbum album : photosContainerResource.getAlbums()) { continuationData.addContainerResource(new IdOnlyContainerResource(album.getId())); } return new ExportResult<>( ExportResult.ResultType.CONTINUE, photosContainerResource, continuationData); } else if (containerResourcePresent && containerResource instanceof IdOnlyContainerResource) { // Export photos return exportPhotos( jobId, authData, (IdOnlyContainerResource) containerResource, Optional.ofNullable(paginationToken)); } else { throw new IllegalStateException( String.format( "Invalid state passed into FacebookPhotosExporter. ExportInformation: %s", exportInformation)); } }
@Test public void testExportPhoto() throws CopyExceptionWithFailureReason { ExportResult<PhotosContainerResource> result = facebookPhotosExporter.export( uuid, new TokensAndUrlAuthData("accessToken", null, null), Optional.of(new ExportInformation(null, new IdOnlyContainerResource(ALBUM_ID)))); assertEquals(ExportResult.ResultType.END, result.getType()); PhotosContainerResource exportedData = result.getExportedData(); assertEquals(1, exportedData.getPhotos().size()); assertEquals( new PhotoModel( PHOTO_ID + ".jpg", PHOTO_ID, PHOTO_NAME, "image/jpg", PHOTO_ID, ALBUM_ID, false, PHOTO_TIME), exportedData.getPhotos().toArray()[0]); }
public static CharSequence withoutSubSequence(CharSequence input, int beginIndex, int endIndex) { if (input == null) throw new NullPointerException("input == null"); int length = input.length(); // Exit early if the region is empty or the entire input int skippedRegionLength = regionLength(length, beginIndex, endIndex); if (skippedRegionLength == 0) return input; if (beginIndex == 0 && endIndex == length) return ""; // Exit early if the region ends on a boundary. // This doesn't use input.subsequence as it might allocate a String if (beginIndex == 0) return new SubSequence(input, endIndex, length); if (endIndex == length) return new SubSequence(input, 0, beginIndex); // Otherwise, the region to skip in the middle return new WithoutSubSequence(input, 0, beginIndex, endIndex, length); }
@Test void withoutSubSequence_badParameters() { assertThatThrownBy(() -> CharSequences.withoutSubSequence(null, 0, 0)) .isInstanceOf(NullPointerException.class) .hasMessage("input == null"); assertThatThrownBy(() -> CharSequences.withoutSubSequence("b3", -1, 1)) .isInstanceOf(IndexOutOfBoundsException.class) .hasMessage("beginIndex < 0"); assertThatThrownBy(() -> CharSequences.withoutSubSequence("b3", 0, -1)) .isInstanceOf(IndexOutOfBoundsException.class) .hasMessage("endIndex < 0"); assertThatThrownBy(() -> CharSequences.withoutSubSequence("b3", 1, 0)) .isInstanceOf(IndexOutOfBoundsException.class) .hasMessage("beginIndex > endIndex"); assertThatThrownBy(() -> CharSequences.withoutSubSequence("b3", 0, 3)) .isInstanceOf(IndexOutOfBoundsException.class) .hasMessage("endIndex > input"); }
@Override public void populateContainer(TaskContainer container) { ComputationSteps steps = new ReportComputationSteps(container); container.add(SettingsLoader.class); container.add(task); container.add(steps); container.add(componentClasses()); for (ReportAnalysisComponentProvider componentProvider : componentProviders) { container.add(componentProvider.getComponents()); } container.add(steps.orderedStepClasses()); }
@Test public void Components_of_ReportAnalysisComponentProvider_are_added_to_the_container() { Object object = new Object(); Class<MyClass> clazz = MyClass.class; ReportAnalysisComponentProvider componentProvider = mock(ReportAnalysisComponentProvider.class); when(componentProvider.getComponents()).thenReturn(ImmutableList.of(object, clazz)); ProjectAnalysisTaskContainerPopulator populator = new ProjectAnalysisTaskContainerPopulator(task, new ReportAnalysisComponentProvider[] {componentProvider}); ListTaskContainer container = new ListTaskContainer(); container.add(componentProvider); populator.populateContainer(container); assertThat(container.getAddedComponents()).contains(object, clazz); }
public String anonymize(final ParseTree tree) { return build(tree); }
@Test public void shouldAnonymizeJoinStatementsCorrectly() { final String output = anon.anonymize("INSERT INTO OUTPUT SELECT col1, col2, col3" + " FROM SOURCE1 S1 JOIN SOURCE2 S2 WITHIN 1 SECOND ON col1.k=col2.k;"); Approvals.verify(output); }
public FilterRegistration.Dynamic addFilter(String name, Filter filter) { return addFilter(name, new FilterHolder(requireNonNull(filter))); }
@Test void addsFilterInstances() throws Exception { final Filter filter = new WelcomeFilter(); final FilterRegistration.Dynamic builder = environment.addFilter("filter", filter); assertThat(builder).isNotNull(); try { servletHandler.start(); final FilterHolder filterHolder = servletHandler.getFilter("filter"); assertThat(filterHolder.getFilter()).isEqualTo(filter); } finally { servletHandler.stop(); } }