focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static void registerSerializers(Fury fury) { fury.registerSerializer(ArrowTable.class, new ArrowTableSerializer(fury)); fury.registerSerializer(VectorSchemaRoot.class, new VectorSchemaRootSerializer(fury)); }
@Test public void testRegisterArrowSerializer() throws Exception { Fury fury = Fury.builder().withLanguage(Language.JAVA).build(); ClassResolver classResolver = fury.getClassResolver(); ArrowSerializers.registerSerializers(fury); assertEquals(classResolver.getSerializerClass(ArrowTable.class), ArrowTableSerializer.class); assertEquals( classResolver.getSerializerClass(VectorSchemaRoot.class), ArrowSerializers.VectorSchemaRootSerializer.class); }
public String getApplicationId() { return applicationId; }
@Test public void testGetApplicationId() { // Test the getApplicationId method assertEquals("AppID", event.getApplicationId()); }
public static void calculateChunkedSumsByteArray(int bytesPerSum, int checksumType, byte[] sums, int sumsOffset, byte[] data, int dataOffset, int dataLength) { nativeComputeChunkedSumsByteArray(bytesPerSum, checksumType, sums, sumsOffset, data, dataOffset, dataLength, "", 0, false); }
@Test public void testCalculateChunkedSumsByteArrayFail() throws ChecksumException { allocateArrayByteBuffers(); fillDataAndInvalidChecksums(); NativeCrc32.calculateChunkedSumsByteArray(bytesPerChecksum, checksumType.id, checksums.array(), checksums.position(), data.array(), data.position(), data.remaining()); }
@Override public Collection<String> childNames() { ArrayList<String> childNames = new ArrayList<>(); for (Integer brokerId : image.brokers().keySet()) { childNames.add(brokerId.toString()); } return childNames; }
@Test public void testChildNames() { assertEquals(Collections.singletonList("1"), NODE.childNames()); }
@VisibleForTesting public ConfigDO validateConfigExists(Long id) { if (id == null) { return null; } ConfigDO config = configMapper.selectById(id); if (config == null) { throw exception(CONFIG_NOT_EXISTS); } return config; }
@Test public void testValidateConfigExists_success() { // mock 数据 ConfigDO dbConfigDO = randomConfigDO(); configMapper.insert(dbConfigDO);// @Sql: 先插入出一条存在的数据 // 调用成功 configService.validateConfigExists(dbConfigDO.getId()); }
public Repository connectRepository( RepositoriesMeta repositoriesMeta, String repositoryName, String username, String password ) throws KettleException { RepositoryMeta repositoryMeta = repositoriesMeta.findRepository( repositoryName ); if ( repositoryMeta == null ) { log.logBasic( "I couldn't find the repository with name '" + repositoryName + "'" ); return null; } Repository rep = PluginRegistry.getInstance().loadClass( RepositoryPluginType.class, repositoryMeta, Repository.class ); if ( rep == null ) { log.logBasic( "Unable to load repository plugin for '" + repositoryName + "'" ); return null; } rep.init( repositoryMeta ); try { rep.connect( username, password ); setRepository( rep ); return rep; } catch ( Exception e ) { log.logBasic( "Unable to connect to the repository with name '" + repositoryName + "'" ); return null; } }
@Test public void testConnectRepository() throws KettleException { TransExecutionConfiguration transExecConf = new TransExecutionConfiguration(); final RepositoriesMeta repositoriesMeta = mock( RepositoriesMeta.class ); final RepositoryMeta repositoryMeta = mock( RepositoryMeta.class ); final Repository repository = mock( Repository.class ); final String mockRepo = "mockRepo"; final boolean[] connectionSuccess = { false }; Repository initialRepo = mock( Repository.class ); transExecConf.setRepository( initialRepo ); KettleLogStore.init(); // Create mock repository plugin MockRepositoryPlugin mockRepositoryPlugin = mock( MockRepositoryPlugin.class ); when( mockRepositoryPlugin.getIds() ).thenReturn( new String[] { "mockRepo" } ); when( mockRepositoryPlugin.matches( "mockRepo" ) ).thenReturn( true ); when( mockRepositoryPlugin.getName() ).thenReturn( "mock-repository" ); when( mockRepositoryPlugin.getClassMap() ).thenAnswer( new Answer<Map<Class<?>, String>>() { @Override public Map<Class<?>, String> answer( InvocationOnMock invocation ) throws Throwable { Map<Class<?>, String> dbMap = new HashMap<Class<?>, String>(); dbMap.put( Repository.class, repositoryMeta.getClass().getName() ); return dbMap; } } ); List<PluginInterface> registeredPlugins = PluginRegistry.getInstance().getPlugins( RepositoryPluginType.class ); for ( PluginInterface registeredPlugin : registeredPlugins ) { PluginRegistry.getInstance().removePlugin( RepositoryPluginType.class, registeredPlugin ); } PluginRegistry.getInstance().registerPlugin( RepositoryPluginType.class, mockRepositoryPlugin ); // Define valid connection criteria when( repositoriesMeta.findRepository( anyString() ) ).thenAnswer( new Answer<RepositoryMeta>() { @Override public RepositoryMeta answer( InvocationOnMock invocation ) throws Throwable { return mockRepo.equals( invocation.getArguments()[0] ) ? repositoryMeta : null; } } ); when( mockRepositoryPlugin.loadClass( Repository.class ) ).thenReturn( repository ); doAnswer( new Answer() { @Override public Object answer( InvocationOnMock invocation ) throws Throwable { if ( "username".equals( invocation.getArguments()[0] ) && "password".equals( invocation.getArguments()[1] ) ) { connectionSuccess[0] = true; } else { connectionSuccess[0] = false; throw new KettleException( "Mock Repository connection failed" ); } return null; } } ).when( repository ).connect( anyString(), anyString() ); // Ignore repository not found in RepositoriesMeta transExecConf.connectRepository( repositoriesMeta, "notFound", "username", "password" ); assertEquals( "Repository Changed", initialRepo, transExecConf.getRepository() ); // Ignore failed attempt to connect transExecConf.connectRepository( repositoriesMeta, mockRepo, "username", "" ); assertEquals( "Repository Changed", initialRepo, transExecConf.getRepository() ); // Save repository if connection passes transExecConf.connectRepository( repositoriesMeta, mockRepo, "username", "password" ); assertEquals( "Repository didn't change", repository, transExecConf.getRepository() ); assertTrue( "Repository not connected", connectionSuccess[0] ); }
public int format(String... args) throws UsageException { CommandLineOptions parameters = processArgs(args); if (parameters.version()) { errWriter.println(versionString()); return 0; } if (parameters.help()) { throw new UsageException(); } JavaFormatterOptions options = JavaFormatterOptions.builder() .style(parameters.aosp() ? Style.AOSP : Style.GOOGLE) .formatJavadoc(parameters.formatJavadoc()) .build(); if (parameters.stdin()) { return formatStdin(parameters, options); } else { return formatFiles(parameters, options); } }
@Test public void javadoc() throws Exception { String[] input = { "/**", " * graph", " *", " * graph", " *", " * @param foo lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do" + " eiusmod tempor incididunt ut labore et dolore magna aliqua", " */", "class Test {", " /**", " * creates entropy", " */", " public static void main(String... args) {}", "}", }; String[] expected = { "/**", " * graph", " *", " * <p>graph", " *", " * @param foo lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do" + " eiusmod tempor", " * incididunt ut labore et dolore magna aliqua", " */", "class Test {", " /** creates entropy */", " public static void main(String... args) {}", "}", "", }; InputStream in = new ByteArrayInputStream(joiner.join(input).getBytes(UTF_8)); StringWriter out = new StringWriter(); Main main = new Main( new PrintWriter(out, true), new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.err, UTF_8)), true), in); assertThat(main.format("-")).isEqualTo(0); assertThat(out.toString()).isEqualTo(joiner.join(expected)); }
public static Schema schemaFromPojoClass( TypeDescriptor<?> typeDescriptor, FieldValueTypeSupplier fieldValueTypeSupplier) { return StaticSchemaInference.schemaFromClass(typeDescriptor, fieldValueTypeSupplier); }
@Test public void testPrimitiveArray() { Schema schema = POJOUtils.schemaFromPojoClass( new TypeDescriptor<PrimitiveArrayPOJO>() {}, JavaFieldTypeSupplier.INSTANCE); SchemaTestUtils.assertSchemaEquivalent(PRIMITIVE_ARRAY_POJO_SCHEMA, schema); }
public ApplicationBuilder name(String name) { this.name = name; return getThis(); }
@Test void name() { ApplicationBuilder builder = new ApplicationBuilder(); builder.name("app"); Assertions.assertEquals("app", builder.build().getName()); }
@Override public ContinuousEnumerationResult planSplits(IcebergEnumeratorPosition lastPosition) { table.refresh(); if (lastPosition != null) { return discoverIncrementalSplits(lastPosition); } else { return discoverInitialSplits(); } }
@Test public void testMaxPlanningSnapshotCount() throws Exception { appendTwoSnapshots(); // append 3 more snapshots for (int i = 2; i < 5; ++i) { appendSnapshot(i, 2); } ScanContext scanContext = ScanContext.builder() .startingStrategy(StreamingStartingStrategy.INCREMENTAL_FROM_EARLIEST_SNAPSHOT) // limit to 1 snapshot per discovery .maxPlanningSnapshotCount(1) .build(); ContinuousSplitPlannerImpl splitPlanner = new ContinuousSplitPlannerImpl(TABLE_RESOURCE.tableLoader().clone(), scanContext, null); ContinuousEnumerationResult initialResult = splitPlanner.planSplits(null); assertThat(initialResult.fromPosition()).isNull(); // For inclusive behavior, the initial result should point to snapshot1's parent, // which leads to null snapshotId and snapshotTimestampMs. assertThat(initialResult.toPosition().snapshotId()).isNull(); assertThat(initialResult.toPosition().snapshotTimestampMs()).isNull(); assertThat(initialResult.splits()).isEmpty(); ContinuousEnumerationResult secondResult = splitPlanner.planSplits(initialResult.toPosition()); // should discover dataFile1 appended in snapshot1 verifyMaxPlanningSnapshotCountResult( secondResult, null, snapshot1, ImmutableSet.of(dataFile1.path().toString())); ContinuousEnumerationResult thirdResult = splitPlanner.planSplits(secondResult.toPosition()); // should discover dataFile2 appended in snapshot2 verifyMaxPlanningSnapshotCountResult( thirdResult, snapshot1, snapshot2, ImmutableSet.of(dataFile2.path().toString())); }
public String getStringHeader(Message in, String header, String defaultValue) { String headerValue = in.getHeader(header, String.class); return ObjectHelper.isNotEmpty(headerValue) ? headerValue : defaultValue; }
@Test public void testGetStringHeaderWithNullValue() { when(in.getHeader(HEADER_METRIC_NAME, String.class)).thenReturn(null); assertThat(okProducer.getStringHeader(in, HEADER_METRIC_NAME, "value"), is("value")); inOrder.verify(in, times(1)).getHeader(HEADER_METRIC_NAME, String.class); inOrder.verifyNoMoreInteractions(); }
@SuppressWarnings("unchecked") public static <T> T[] newArray(Class<?> componentType, int newSize) { return (T[]) Array.newInstance(componentType, newSize); }
@Test public void newArrayTest() { String[] newArray = ArrayUtil.newArray(String.class, 3); assertEquals(3, newArray.length); }
public static MetadataUpdate fromJson(String json) { return JsonUtil.parse(json, MetadataUpdateParser::fromJson); }
@Test public void testSetCurrentViewVersionFromJson() { String action = MetadataUpdateParser.SET_CURRENT_VIEW_VERSION; String json = String.format("{\"action\":\"%s\",\"view-version-id\":23}", action); MetadataUpdate expected = new MetadataUpdate.SetCurrentViewVersion(23); assertEquals(action, expected, MetadataUpdateParser.fromJson(json)); }
@Override public Set<OAuth2AccessTokenEntity> getAccessTokensByUserName(String name) { TypedQuery<OAuth2AccessTokenEntity> query = manager.createNamedQuery(OAuth2AccessTokenEntity.QUERY_BY_NAME, OAuth2AccessTokenEntity.class); query.setParameter(OAuth2AccessTokenEntity.PARAM_NAME, name); List<OAuth2AccessTokenEntity> results = query.getResultList(); return results != null ? new HashSet<>(results) : new HashSet<>(); }
@Test public void testGetAccessTokensByUserName() { Set<OAuth2AccessTokenEntity> tokens = repository.getAccessTokensByUserName("user1"); assertEquals(2, tokens.size()); assertEquals("user1", tokens.iterator().next().getAuthenticationHolder().getUserAuth().getName()); }
@Override public int hashCode() { return filter.hashCode(); }
@Test public void hash() { assertThat(filter.hashCode()).isEqualTo(filter.hashCode()); }
@Override public void filter(ContainerRequestContext requestContext) throws IOException { final Optional<String> header = getBearerHeader(requestContext); if (header.isEmpty()) { // no JWT token, we'll fail immediately abortRequest(requestContext); } else { final String token = header.map(h -> h.replaceFirst(AUTHENTICATION_SCHEME + " ", "")).get(); try { verifyToken(token); } catch (TokenVerificationException e) { LOG.error("Failed to verify auth token", e); abortRequest(requestContext); } } }
@Test void verifyNoHeaderProvided() throws IOException { final String key = "gTVfiF6A0pB70A3UP1EahpoR6LId9DdNadIkYNygK5Z8lpeJIpw9vN0jZ6fdsfeuV9KIg9gVLkCHIPj6FHW5Q9AvpOoGZO3h"; final JwtTokenAuthFilter validator = new JwtTokenAuthFilter(key); final ContainerRequest mockedRequest = mockRequest(null); validator.filter(mockedRequest); Mockito.verify(mockedRequest, atLeastOnce()).abortWith(Mockito.any()); }
public DirectoryEntry lookUp( File workingDirectory, JimfsPath path, Set<? super LinkOption> options) throws IOException { checkNotNull(path); checkNotNull(options); DirectoryEntry result = lookUp(workingDirectory, path, options, 0); if (result == null) { // an intermediate file in the path did not exist or was not a directory throw new NoSuchFileException(path.toString()); } return result; }
@Test public void testLookup_root() throws IOException { assertExists(lookup("/"), "/", "/"); assertExists(lookup("$"), "$", "$"); }
public List<MetricsPacket> getMetrics(List<VespaService> services, Instant startTime) { MetricsPacket.Builder[] builderArray = getMetricsBuildersAsArray(services, startTime, null); List<MetricsPacket> metricsPackets = new ArrayList<>(builderArray.length); for (int i = 0; i < builderArray.length; i++) { metricsPackets.add(builderArray[i].build()); builderArray[i] = null; // Set null to be able to GC the builder when packet has been created } return metricsPackets; }
@Test public void system_metrics_are_added() { VespaService service0 = testServices.get(0); Metrics oldSystemMetrics = service0.getSystemMetrics(); service0.getSystemMetrics().add(new Metric(toMetricId("cpu"), 1)); List<MetricsPacket> packets = metricsManager.getMetrics(testServices, Instant.EPOCH); assertEquals(3, packets.size()); MetricsPacket systemPacket = packets.get(0); // system metrics are added before other metrics assertEquals(1, systemPacket.metrics().get(toMetricId("cpu"))); assertEquals("system", systemPacket.dimensions().get(toDimensionId("metrictype"))); service0.setSystemMetrics(oldSystemMetrics); }
public int tryClaim(final int msgTypeId, final int length) { checkTypeId(msgTypeId); checkMsgLength(length); final AtomicBuffer buffer = this.buffer; final int recordLength = length + HEADER_LENGTH; final int recordIndex = claimCapacity(buffer, recordLength); if (INSUFFICIENT_CAPACITY == recordIndex) { return recordIndex; } buffer.putIntOrdered(lengthOffset(recordIndex), -recordLength); MemoryAccess.releaseFence(); buffer.putInt(typeOffset(recordIndex), msgTypeId); return encodedMsgOffset(recordIndex); }
@Test void tryClaimReturnsIndexAtWhichEncodedMessageStartsAfterPadding() { final int length = 10; final int recordLength = length + HEADER_LENGTH; final int alignedRecordLength = align(recordLength, ALIGNMENT); final long headPosition = 248L; final int padding = 22; final long tailPosition = CAPACITY - padding; final int paddingIndex = (int)tailPosition; final int recordIndex = 0; when(buffer.getLongVolatile(HEAD_COUNTER_CACHE_INDEX)).thenReturn(headPosition); when(buffer.getLongVolatile(TAIL_COUNTER_INDEX)).thenReturn(tailPosition); when(buffer.compareAndSetLong(TAIL_COUNTER_INDEX, tailPosition, tailPosition + alignedRecordLength + padding)) .thenReturn(TRUE); final int index = ringBuffer.tryClaim(MSG_TYPE_ID, length); assertEquals(recordIndex + HEADER_LENGTH, index); final InOrder inOrder = inOrder(buffer); inOrder.verify(buffer).getLongVolatile(HEAD_COUNTER_CACHE_INDEX); inOrder.verify(buffer).getLongVolatile(TAIL_COUNTER_INDEX); inOrder.verify(buffer) .compareAndSetLong(TAIL_COUNTER_INDEX, tailPosition, tailPosition + alignedRecordLength + padding); inOrder.verify(buffer).putIntOrdered(lengthOffset(paddingIndex), -padding); inOrder.verify(buffer).putInt(typeOffset(paddingIndex), PADDING_MSG_TYPE_ID); inOrder.verify(buffer).putIntOrdered(lengthOffset(paddingIndex), padding); inOrder.verify(buffer).putIntOrdered(lengthOffset(recordIndex), -recordLength); inOrder.verify(buffer).putInt(typeOffset(recordIndex), MSG_TYPE_ID); inOrder.verifyNoMoreInteractions(); }
public boolean isRunning(Member member) { RpcClient client = RpcClientFactory.getClient(memberClientKey(member)); if (null == client) { return false; } return client.isRunning(); }
@Test void testIsRunningForNonExist() { Member member = new Member(); member.setIp("11.11.11.11"); assertFalse(clusterRpcClientProxy.isRunning(member)); }
@Override public String getName() { return name; }
@Test public void testConstructor_withName() { config = new CardinalityEstimatorConfig("myEstimator"); assertEquals("myEstimator", config.getName()); }
public PickTableLayoutForPredicate pickTableLayoutForPredicate() { return new PickTableLayoutForPredicate(metadata); }
@Test public void eliminateTableScanWhenNoLayoutExist() { tester().assertThat(pickTableLayout.pickTableLayoutForPredicate()) .on(p -> { p.variable("orderstatus", createVarcharType(1)); return p.filter(p.rowExpression("orderstatus = 'G'"), p.tableScan( ordersTableHandle, ImmutableList.of(p.variable("orderstatus", createVarcharType(1))), ImmutableMap.of(p.variable("orderstatus", createVarcharType(1)), new TpchColumnHandle("orderstatus", createVarcharType(1))))); }) .matches(values("A")); tester().assertThat(pickTableLayout.pickTableLayoutForPredicate()) .on(p -> { p.variable("orderstatus", createVarcharType(1)); return p.filter(p.rowExpression("orderstatus = 'G'"), p.tableScan( ordersTableHandle, ImmutableList.of(variable("orderstatus", createVarcharType(1))), ImmutableMap.of(variable("orderstatus", createVarcharType(1)), new TpchColumnHandle("orderstatus", createVarcharType(1))))); }) .matches(values("A")); }
@Subscribe public void onVarbitChanged(VarbitChanged varbitChanged) { if (varbitChanged.getVarpId() == VarPlayer.CANNON_AMMO) { int old = cballsLeft; cballsLeft = varbitChanged.getValue(); if (cballsLeft > old) { cannonBallNotificationSent = false; } if (!cannonBallNotificationSent && cballsLeft > 0 && config.lowWarningThreshold() >= cballsLeft) { notifier.notify(config.showCannonNotifications(), String.format("Your cannon has %d cannon balls remaining!", cballsLeft)); cannonBallNotificationSent = true; } } else if (varbitChanged.getVarpId() == VarPlayer.CANNON_COORD) { WorldPoint c = WorldPoint.fromCoord(varbitChanged.getValue()); cannonPosition = buildCannonWorldArea(c); } else if (varbitChanged.getVarpId() == VarPlayer.CANNON_STATE) { cannonPlaced = varbitChanged.getValue() == 4; if (cannonPlaced) { addCounter(); } else { removeCounter(); } } }
@Test public void testThresholdNotificationsShouldNotNotify() { when(config.lowWarningThreshold()).thenReturn(0); cannonAmmoChanged.setValue(30); plugin.onVarbitChanged(cannonAmmoChanged); cannonAmmoChanged.setValue(10); plugin.onVarbitChanged(cannonAmmoChanged); verify(notifier, never()).notify(any(Notification.class), eq("Your cannon has 10 cannon balls remaining!")); }
public static <T extends DataflowWorkerHarnessOptions> T createFromSystemProperties( Class<T> harnessOptionsClass) throws IOException { ObjectMapper objectMapper = new ObjectMapper(); T options; if (System.getProperties().containsKey("sdk_pipeline_options")) { // TODO: remove this method of getting pipeline options, once migration is complete. String serializedOptions = System.getProperty("sdk_pipeline_options"); LOG.info("Worker harness starting with: {}", serializedOptions); options = objectMapper.readValue(serializedOptions, PipelineOptions.class).as(harnessOptionsClass); } else if (System.getProperties().containsKey("sdk_pipeline_options_file")) { String filePath = System.getProperty("sdk_pipeline_options_file"); LOG.info("Loading pipeline options from " + filePath); String serializedOptions = new String(Files.readAllBytes(Paths.get(filePath)), StandardCharsets.UTF_8); LOG.info("Worker harness starting with: " + serializedOptions); options = objectMapper.readValue(serializedOptions, PipelineOptions.class).as(harnessOptionsClass); } else { LOG.info("Using empty PipelineOptions, as none were provided."); options = PipelineOptionsFactory.as(harnessOptionsClass); } // These values will not be known at job submission time and must be provided. if (System.getProperties().containsKey("worker_id")) { options.setWorkerId(System.getProperty("worker_id")); } if (System.getProperties().containsKey("job_id")) { options.setJobId(System.getProperty("job_id")); } if (System.getProperties().containsKey("worker_pool")) { options.setWorkerPool(System.getProperty("worker_pool")); } // Remove impersonate information from workers // More details: // https://cloud.google.com/dataflow/docs/reference/pipeline-options#security_and_networking if (options.getImpersonateServiceAccount() != null) { LOG.info( "Remove the impersonateServiceAccount pipeline option ({}) when starting the Worker harness.", options.getImpersonateServiceAccount()); options.setImpersonateServiceAccount(null); } return options; }
@Test public void testCreationWithPipelineOptionsFile() throws Exception { File file = tmpFolder.newFile(); String jsonOptions = "{\"options\":{\"numWorkers\":1000}}"; Files.write(Paths.get(file.getPath()), jsonOptions.getBytes(StandardCharsets.UTF_8)); System.getProperties() .putAll( ImmutableMap.<String, String>builder() .put("worker_id", "test_worker_id_2") .put("job_id", "test_job_id_2") // Set a non-default value for testing .put("sdk_pipeline_options_file", file.getPath()) .build()); @SuppressWarnings("deprecation") // testing deprecated functionality DataflowWorkerHarnessOptions options = WorkerPipelineOptionsFactory.createFromSystemProperties(DataflowWorkerHarnessOptions.class); assertEquals("test_worker_id_2", options.getWorkerId()); assertEquals("test_job_id_2", options.getJobId()); assertEquals(1000, options.getNumWorkers()); }
@ExecuteOn(TaskExecutors.IO) @Post(uri = "/export/by-ids", produces = MediaType.APPLICATION_OCTET_STREAM) @Operation( tags = {"Flows"}, summary = "Export flows as a ZIP archive of yaml sources." ) public HttpResponse<byte[]> exportByIds( @Parameter(description = "A list of tuple flow ID and namespace as flow identifiers") @Body List<IdWithNamespace> ids ) throws IOException { var flows = ids.stream() .map(id -> flowRepository.findByIdWithSource(tenantService.resolveTenant(), id.getNamespace(), id.getId()).orElseThrow()) .toList(); var bytes = zipFlows(flows); return HttpResponse.ok(bytes).header("Content-Disposition", "attachment; filename=\"flows.zip\""); }
@Test void exportByIds() throws IOException { List<IdWithNamespace> ids = List.of( new IdWithNamespace("io.kestra.tests", "each-object"), new IdWithNamespace("io.kestra.tests", "webhook"), new IdWithNamespace("io.kestra.tests", "task-flow")); byte[] zip = client.toBlocking().retrieve(HttpRequest.POST("/api/v1/flows/export/by-ids", ids), Argument.of(byte[].class)); File file = File.createTempFile("flows", ".zip"); Files.write(file.toPath(), zip); try(ZipFile zipFile = new ZipFile(file)) { assertThat(zipFile.stream().count(), is(3L)); } file.delete(); }
@Override public void execute(String commandName, BufferedReader reader, BufferedWriter writer) throws Py4JException, IOException { String targetObjectId = reader.readLine(); String methodName = reader.readLine(); List<Object> arguments = getArguments(reader); ReturnObject returnObject = invokeMethod(methodName, targetObjectId, arguments); String returnCommand = Protocol.getOutputCommand(returnObject); logger.finest("Returning command: " + returnCommand); writer.write(returnCommand); writer.flush(); }
@Test public void testMethodWithNull() { String inputCommand = target + "\nmethod2\nn\ne\n"; try { command.execute("c", new BufferedReader(new StringReader(inputCommand)), writer); assertEquals("!yv\n", sWriter.toString()); inputCommand = target + "\nmethod4\nn\ne\n"; command.execute("c", new BufferedReader(new StringReader(inputCommand)), writer); assertEquals("!yv\n!yro1\n", sWriter.toString()); assertEquals(((ExampleClass) gateway.getObject("o1")).getField1(), 3); inputCommand = target + "\nmethod7\nn\ne\n"; command.execute("c", new BufferedReader(new StringReader(inputCommand)), writer); assertEquals("!yv\n!yro1\n!yi2\n", sWriter.toString()); } catch (Exception e) { e.printStackTrace(); fail(); } }
@Override public long findConfigMaxId() { ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(), TableConstant.CONFIG_INFO); MapperResult mapperResult = configInfoMapper.findConfigMaxId(null); return Optional.ofNullable(databaseOperate.queryOne(mapperResult.getSql(), Long.class)).orElse(0L); }
@Test void testFindConfigMaxId0() { Mockito.when(databaseOperate.queryOne(anyString(), eq(Long.class))).thenReturn(0L); long configMaxId = embeddedConfigInfoPersistService.findConfigMaxId(); assertEquals(0, configMaxId); }
@Override public CompletableFuture<KubernetesWorkerNode> requestResource( TaskExecutorProcessSpec taskExecutorProcessSpec) { final KubernetesTaskManagerParameters parameters = createKubernetesTaskManagerParameters( taskExecutorProcessSpec, getBlockedNodeRetriever().getAllBlockedNodeIds()); final KubernetesPod taskManagerPod = KubernetesTaskManagerFactory.buildTaskManagerKubernetesPod( taskManagerPodTemplate, parameters); final String podName = taskManagerPod.getName(); final CompletableFuture<KubernetesWorkerNode> requestResourceFuture = new CompletableFuture<>(); requestResourceFutures.put(podName, requestResourceFuture); log.info( "Creating new TaskManager pod with name {} and resource <{},{}>.", podName, parameters.getTaskManagerMemoryMB(), parameters.getTaskManagerCPU()); final CompletableFuture<Void> createPodFuture = flinkKubeClient.createTaskManagerPod(taskManagerPod); FutureUtils.assertNoException( createPodFuture.handleAsync( (ignore, exception) -> { if (exception != null) { log.warn( "Could not create pod {}, exception: {}", podName, exception); CompletableFuture<KubernetesWorkerNode> future = requestResourceFutures.remove(taskManagerPod.getName()); if (future != null) { future.completeExceptionally(exception); } } else { if (requestResourceFuture.isCancelled()) { stopPod(podName); log.info( "pod {} is cancelled before create pod finish, stop it.", podName); } else { log.info("Pod {} is created.", podName); } } return null; }, getMainThreadExecutor())); FutureUtils.assertNoException( requestResourceFuture.handle( (ignore, t) -> { if (t == null) { return null; } // Unwrap CompletionException cause if any if (t instanceof CompletionException && t.getCause() != null) { t = t.getCause(); } if (t instanceof CancellationException) { requestResourceFutures.remove(taskManagerPod.getName()); if (createPodFuture.isDone()) { log.info( "pod {} is cancelled before scheduled, stop it.", podName); stopPod(taskManagerPod.getName()); } } else if (t instanceof RetryableException || t instanceof KubernetesClientException) { // ignore transient / retriable errors } else { log.error("Error completing resource request.", t); ExceptionUtils.rethrow(t); } return null; })); return requestResourceFuture; }
@Test void testOnPodAdded() throws Exception { new Context() { { final CompletableFuture<KubernetesPod> createPodFuture = new CompletableFuture<>(); final CompletableFuture<KubernetesWorkerNode> requestResourceFuture = new CompletableFuture<>(); flinkKubeClientBuilder.setCreateTaskManagerPodFunction( (pod) -> { createPodFuture.complete(pod); return FutureUtils.completedVoidFuture(); }); runTest( () -> { // request new pod runInMainThread( () -> getDriver() .requestResource(TASK_EXECUTOR_PROCESS_SPEC) .thenAccept(requestResourceFuture::complete)); final KubernetesPod pod = new TestingKubernetesPod( createPodFuture .get(TIMEOUT_SEC, TimeUnit.SECONDS) .getName(), true, false); // prepare validation: // - complete requestResourceFuture in main thread with correct // KubernetesWorkerNode final CompletableFuture<Void> validationFuture = requestResourceFuture.thenAccept( (workerNode) -> { validateInMainThread(); assertThat(workerNode.getResourceID()) .asString() .isEqualTo(pod.getName()); }); // send onAdded event getPodCallbackHandler().onAdded(Collections.singletonList(pod)); // make sure finishing validation validationFuture.get(TIMEOUT_SEC, TimeUnit.SECONDS); }); } }; }
public static String format(double amount, boolean isUseTraditional) { return format(amount, isUseTraditional, false); }
@Test public void formatTest2() { String f1 = NumberChineseFormatter.format(-0.3, false, false); assertEquals("负零点三", f1); f1 = NumberChineseFormatter.format(10, false, false); assertEquals("一十", f1); }
public static Collection subtract(final Collection a, final Collection b) { ArrayList list = new ArrayList(a); for (Iterator it = b.iterator(); it.hasNext(); ) { list.remove(it.next()); } return list; }
@Test void testSubtract() { List<String> subtract = (List<String>) CollectionUtils.subtract(Arrays.asList("a", "b"), Arrays.asList("b", "c")); assertEquals(1, subtract.size()); assertEquals("a", subtract.get(0)); }
@Override public Object decorate(RequestedField field, Object value, SearchUser searchUser) { try { final Node node = nodeService.byNodeId(value.toString()); return node.getTitle(); } catch (NodeNotFoundException e) { return value; } }
@Test void decorate() { final Object decorated = decorator.decorate(RequestedField.parse(Message.FIELD_GL2_SOURCE_NODE), "5ca1ab1e-0000-4000-a000-000000000000", TestSearchUser.builder().build()); Assertions.assertThat(decorated).isEqualTo("5ca1ab1e / my-host.example.com"); }
public ConfigCheckResult checkConfig() { Optional<Long> appId = getAppId(); if (appId.isEmpty()) { return failedApplicationStatus(INVALID_APP_ID_STATUS); } GithubAppConfiguration githubAppConfiguration = new GithubAppConfiguration(appId.get(), gitHubSettings.privateKey(), gitHubSettings.apiURLOrDefault()); return checkConfig(githubAppConfiguration); }
@Test public void checkConfig_whenAppIdNotValid_shouldReturnFailedAppCheck() { when(gitHubSettings.appId()).thenReturn("not a number"); ConfigCheckResult checkResult = configValidator.checkConfig(); assertThat(checkResult.application().autoProvisioning()).isEqualTo(ConfigStatus.failed(INVALID_APP_ID_STATUS)); assertThat(checkResult.application().jit()).isEqualTo(ConfigStatus.failed(INVALID_APP_ID_STATUS)); assertThat(checkResult.installations()).isEmpty(); }
@NonNull public static synchronized ArrayList<String> getAllLogLinesList() { ArrayList<String> lines = new ArrayList<>(msLogs.length); if (msLogs.length > 0) { int index = msLogIndex; do { index--; if (index == -1) index = msLogs.length - 1; String logLine = msLogs[index]; if (logLine == null) break; lines.add(msLogs[index]); } while (index != msLogIndex); } return lines; }
@Test public void testGetAllLogLinesList() throws Exception { // filling up the log buffer for (int i = 0; i < 1024; i++) Logger.d("t", "t"); final int initialListSize = Logger.getAllLogLinesList().size(); // 225 is the max lines count Assert.assertEquals(255, initialListSize); Logger.d("mTag", "Text1"); Assert.assertEquals(initialListSize, Logger.getAllLogLinesList().size()); Logger.i("TAG2", "Text2"); Assert.assertEquals(initialListSize, Logger.getAllLogLinesList().size()); final String expectedFirstLine = "-D-[mTag] Text1"; final String expectedSecondLine = "-I-[TAG2] Text2"; Assert.assertTrue(Logger.getAllLogLinesList().get(1).endsWith(expectedFirstLine)); Assert.assertTrue(Logger.getAllLogLinesList().get(0).endsWith(expectedSecondLine)); }
@Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() != ChatMessageType.SPAM) { return; } var message = event.getMessage(); if (FISHING_CATCH_REGEX.matcher(message).find()) { session.setLastFishCaught(Instant.now()); spotOverlay.setHidden(false); fishingSpotMinimapOverlay.setHidden(false); } if (message.equals("A flying fish jumps up and eats some of your minnows!")) { notifier.notify(config.flyingFishNotification(), "A flying fish is eating your minnows!"); } }
@Test public void testKarambwanji() { ChatMessage chatMessage = new ChatMessage(); chatMessage.setType(ChatMessageType.SPAM); chatMessage.setMessage("You catch 15 Karambwanji."); fishingPlugin.onChatMessage(chatMessage); assertNotNull(fishingPlugin.getSession().getLastFishCaught()); }
public static CompletableFuture<Void> runAfterwards( CompletableFuture<?> future, RunnableWithException runnable) { return runAfterwardsAsync(future, runnable, Executors.directExecutor()); }
@Test void testRunAfterwards() { final CompletableFuture<Void> inputFuture = new CompletableFuture<>(); final OneShotLatch runnableLatch = new OneShotLatch(); final CompletableFuture<Void> runFuture = FutureUtils.runAfterwards(inputFuture, runnableLatch::trigger); assertThat(runnableLatch.isTriggered()).isFalse(); assertThat(runFuture).isNotDone(); inputFuture.complete(null); assertThat(runnableLatch.isTriggered()).isTrue(); assertThatFuture(runFuture).eventuallySucceeds(); }
Path getSelectorFile(DescriptorDigest selector) { return cacheDirectory.resolve(SELECTORS_DIRECTORY).resolve(selector.getHash()); }
@Test public void testGetSelectorFile() throws DigestException { DescriptorDigest selector = DescriptorDigest.fromHash( "cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"); Assert.assertEquals( Paths.get( "cache", "directory", "selectors", "cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"), TEST_CACHE_STORAGE_FILES.getSelectorFile(selector)); }
public boolean add(final UUID accountUuid, final byte[] challengeToken, final Duration ttl) { try { db().putItem(PutItemRequest.builder() .tableName(tableName) .item(Map.of( KEY_ACCOUNT_UUID, AttributeValues.fromUUID(accountUuid), ATTR_CHALLENGE_TOKEN, AttributeValues.fromByteArray(challengeToken), ATTR_TTL, AttributeValues.fromLong(getExpirationTimestamp(ttl)))) .conditionExpression("attribute_not_exists(#uuid)") .expressionAttributeNames(UUID_NAME_MAP) .build()); return true; } catch (final ConditionalCheckFailedException e) { return false; } }
@Test void add() { final UUID uuid = UUID.randomUUID(); assertTrue(pushChallengeDynamoDb.add(uuid, generateRandomToken(), Duration.ofMinutes(1))); assertFalse(pushChallengeDynamoDb.add(uuid, generateRandomToken(), Duration.ofMinutes(1))); }
@Override public Page<ConfigInfo> findConfigInfoLike4Page(final int pageNo, final int pageSize, final String dataId, final String group, final String tenant, final Map<String, Object> configAdvanceInfo) { String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); final String content = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("content"); final String types = Optional.ofNullable(configAdvanceInfo).map(e -> (String) e.get(ParametersField.TYPES)).orElse(null); final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); PaginationHelper<ConfigInfo> helper = createPaginationHelper(); MapperResult sqlCountRows; MapperResult sqlFetchRows; MapperContext context = new MapperContext((pageNo - 1) * pageSize, pageSize); context.putWhereParameter(FieldConstant.TENANT_ID, generateLikeArgument(tenantTmp)); if (!StringUtils.isBlank(dataId)) { context.putWhereParameter(FieldConstant.DATA_ID, generateLikeArgument(dataId)); } if (!StringUtils.isBlank(group)) { context.putWhereParameter(FieldConstant.GROUP_ID, generateLikeArgument(group)); } if (!StringUtils.isBlank(appName)) { context.putWhereParameter(FieldConstant.APP_NAME, appName); } if (!StringUtils.isBlank(content)) { context.putWhereParameter(FieldConstant.CONTENT, generateLikeArgument(content)); } if (StringUtils.isNotBlank(types)) { String[] typesArr = types.split(Symbols.COMMA); context.putWhereParameter(FieldConstant.TYPE, typesArr); } if (StringUtils.isNotBlank(configTags)) { String[] tagArr = configTags.split(","); context.putWhereParameter(FieldConstant.TAG_ARR, tagArr); ConfigTagsRelationMapper configTagsRelationMapper = mapperManager.findMapper( dataSourceService.getDataSourceType(), TableConstant.CONFIG_TAGS_RELATION); sqlCountRows = configTagsRelationMapper.findConfigInfoLike4PageCountRows(context); sqlFetchRows = configTagsRelationMapper.findConfigInfoLike4PageFetchRows(context); } else { ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(), TableConstant.CONFIG_INFO); sqlCountRows = configInfoMapper.findConfigInfoLike4PageCountRows(context); sqlFetchRows = configInfoMapper.findConfigInfoLike4PageFetchRows(context); } try { Page<ConfigInfo> page = helper.fetchPageLimit(sqlCountRows, sqlFetchRows, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); for (ConfigInfo configInfo : page.getPageItems()) { Pair<String, String> pair = EncryptionHandler.decryptHandler(configInfo.getDataId(), configInfo.getEncryptedDataKey(), configInfo.getContent()); configInfo.setContent(pair.getSecond()); } return page; } catch (CannotGetJdbcConnectionException e) { LogUtil.FATAL_LOG.error("[db-error] " + e, e); throw e; } }
@Test void testFindConfigInfoLike4PageWithTags() { String appName = "appName1234"; String content = "content123"; Map<String, Object> configAdvanceInfo = new HashMap<>(); configAdvanceInfo.put("appName", appName); configAdvanceInfo.put("content", content); configAdvanceInfo.put("config_tags", "tags,tag2"); String dataId = "dataId4567222*"; String group = "group3456789*"; String tenant = "tenant4567890"; //mock total count when(jdbcTemplate.queryForObject(anyString(), eq(new Object[] {tenant, dataId.replaceAll("\\*", "%"), group.replaceAll("\\*", "%"), appName, content, "tags", "tag2"}), eq(Integer.class))).thenReturn(new Integer(9)); //mock page list List<ConfigInfo> result = new ArrayList<>(); result.add(createMockConfigInfo(0)); result.add(createMockConfigInfo(1)); result.add(createMockConfigInfo(2)); when(jdbcTemplate.query(anyString(), eq(new Object[] {tenant, dataId.replaceAll("\\*", "%"), group.replaceAll("\\*", "%"), appName, content, "tags", "tag2"}), eq(CONFIG_INFO_ROW_MAPPER))).thenReturn(result); Page<ConfigInfo> configInfo4Page = externalConfigInfoPersistService.findConfigInfoLike4Page(1, 3, dataId, group, tenant, configAdvanceInfo); assertEquals(result.size(), configInfo4Page.getPageItems().size()); assertEquals(9, configInfo4Page.getTotalCount()); }
@Override public PageResult<OperateLogDO> getOperateLogPage(OperateLogPageReqVO pageReqVO) { return operateLogMapper.selectPage(pageReqVO); }
@Test public void testGetOperateLogPage_dto() { // 构造操作日志 OperateLogDO operateLogDO = RandomUtils.randomPojo(OperateLogDO.class, o -> { o.setUserId(2048L); o.setBizId(999L); o.setType("订单"); }); operateLogMapper.insert(operateLogDO); // 测试 userId 不匹配 operateLogMapper.insert(cloneIgnoreId(operateLogDO, o -> o.setUserId(1024L))); // 测试 bizId 不匹配 operateLogMapper.insert(cloneIgnoreId(operateLogDO, o -> o.setBizId(888L))); // 测试 type 不匹配 operateLogMapper.insert(cloneIgnoreId(operateLogDO, o -> o.setType("退款"))); // 构造调用参数 OperateLogPageReqDTO reqDTO = new OperateLogPageReqDTO(); reqDTO.setUserId(2048L); reqDTO.setBizId(999L); reqDTO.setType("订单"); // 调用 PageResult<OperateLogDO> pageResult = operateLogServiceImpl.getOperateLogPage(reqDTO); // 断言,只查到了一条符合条件的 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(operateLogDO, pageResult.getList().get(0)); }
@Override public Iterator<QueryableEntry> iterator() { return new It(); }
@Test public void testIterator_empty_next() { assertNull(result.iterator().next()); }
@SuppressWarnings("unchecked") public static <T> T initialize(Map<String, String> properties) { String factoryImpl = PropertyUtil.propertyAsString(properties, S3FileIOProperties.CLIENT_FACTORY, null); if (Strings.isNullOrEmpty(factoryImpl)) { return (T) AwsClientFactories.from(properties); } return (T) loadClientFactory(factoryImpl, properties); }
@Test public void testS3FileIOImplCatalogPropertyNotDefined() { // don't set anything Map<String, String> properties = Maps.newHashMap(); Object factoryImpl = S3FileIOAwsClientFactories.initialize(properties); assertThat(factoryImpl) .as( "should instantiate an object of type AwsClientFactory when s3.client-factory-impl is not set") .isInstanceOf(AwsClientFactory.class); }
public void track(JobID jobId, BlobKey blobKey, long size) { checkNotNull(jobId); checkNotNull(blobKey); checkArgument(size >= 0); synchronized (lock) { if (caches.putIfAbsent(Tuple2.of(jobId, blobKey), size) == null) { blobKeyByJob.computeIfAbsent(jobId, ignore -> new HashSet<>()).add(blobKey); total += size; if (total > sizeLimit) { LOG.warn( "The overall size of BLOBs in the cache exceeds " + "the limit. Limit = [{}], Current: [{}], " + "The size of next BLOB: [{}].", sizeLimit, total, size); } } else { LOG.warn( "Attempt to track a duplicated BLOB. This may indicate a duplicate upload " + "or a hash collision. Ignoring newest upload. " + "JobID = [{}], BlobKey = [{}]", jobId, blobKey); } } }
@Test void testTrack() { assertThat(tracker.getSize(jobId, blobKey)).isEqualTo(3L); assertThat(tracker.getBlobKeysByJobId(jobId)).contains(blobKey); }
@Override public Processor<K, SubscriptionResponseWrapper<VO>, K, VR> get() { return new ContextualProcessor<K, SubscriptionResponseWrapper<VO>, K, VR>() { private String valueHashSerdePseudoTopic; private Serializer<V> runtimeValueSerializer = constructionTimeValueSerializer; private KTableValueGetter<K, V> valueGetter; private Sensor droppedRecordsSensor; @SuppressWarnings("unchecked") @Override public void init(final ProcessorContext<K, VR> context) { super.init(context); valueHashSerdePseudoTopic = valueHashSerdePseudoTopicSupplier.get(); valueGetter = valueGetterSupplier.get(); valueGetter.init(context); if (runtimeValueSerializer == null) { runtimeValueSerializer = (Serializer<V>) context.valueSerde().serializer(); } final InternalProcessorContext<?, ?> internalProcessorContext = (InternalProcessorContext<?, ?>) context; droppedRecordsSensor = TaskMetrics.droppedRecordsSensor( Thread.currentThread().getName(), internalProcessorContext.taskId().toString(), internalProcessorContext.metrics() ); } @Override public void process(final Record<K, SubscriptionResponseWrapper<VO>> record) { if (record.value().getVersion() != SubscriptionResponseWrapper.CURRENT_VERSION) { //Guard against modifications to SubscriptionResponseWrapper. Need to ensure that there is //compatibility with previous versions to enable rolling upgrades. Must develop a strategy for //upgrading from older SubscriptionWrapper versions to newer versions. throw new UnsupportedVersionException("SubscriptionResponseWrapper is of an incompatible version."); } final ValueAndTimestamp<V> currentValueWithTimestamp = valueGetter.get(record.key()); final long[] currentHash = currentValueWithTimestamp == null ? null : Murmur3.hash128(runtimeValueSerializer.serialize(valueHashSerdePseudoTopic, currentValueWithTimestamp.value())); final long[] messageHash = record.value().getOriginalValueHash(); //If this value doesn't match the current value from the original table, it is stale and should be discarded. if (java.util.Arrays.equals(messageHash, currentHash)) { final VR result; if (record.value().getForeignValue() == null && (!leftJoin || currentValueWithTimestamp == null)) { result = null; //Emit tombstone } else { result = joiner.apply(currentValueWithTimestamp == null ? null : currentValueWithTimestamp.value(), record.value().getForeignValue()); } context().forward(record.withValue(result)); } else { LOG.trace("Dropping FK-join response due to hash mismatch. Expected {}. Actual {}", messageHash, currentHash); droppedRecordsSensor.record(); } } }; }
@Test public void shouldEmitTombstoneForLeftJoinWhenRightIsNullAndLeftIsNull() { final TestKTableValueGetterSupplier<String, String> valueGetterSupplier = new TestKTableValueGetterSupplier<>(); final boolean leftJoin = true; final ResponseJoinProcessorSupplier<String, String, String, String> processorSupplier = new ResponseJoinProcessorSupplier<>( valueGetterSupplier, STRING_SERIALIZER, () -> "value-hash-dummy-topic", JOINER, leftJoin ); final Processor<String, SubscriptionResponseWrapper<String>, String, String> processor = processorSupplier.get(); final MockInternalNewProcessorContext<String, String> context = new MockInternalNewProcessorContext<>(); processor.init(context); context.setRecordMetadata("topic", 0, 0); valueGetterSupplier.put("lhs1", null); final long[] hash = null; processor.process(new Record<>("lhs1", new SubscriptionResponseWrapper<>(hash, null, 0), 0)); final List<MockProcessorContext.CapturedForward<? extends String, ? extends String>> forwarded = context.forwarded(); assertThat(forwarded.size(), is(1)); assertThat(forwarded.get(0).record(), is(new Record<>("lhs1", null, 0))); }
public void addAll(Credentials other) { addAll(other, true); }
@Test public void addAll() { Credentials creds = new Credentials(); creds.addToken(service[0], token[0]); creds.addToken(service[1], token[1]); creds.addSecretKey(secret[0], secret[0].getBytes()); creds.addSecretKey(secret[1], secret[1].getBytes()); Credentials credsToAdd = new Credentials(); // one duplicate with different value, one new credsToAdd.addToken(service[0], token[3]); credsToAdd.addToken(service[2], token[2]); credsToAdd.addSecretKey(secret[0], secret[3].getBytes()); credsToAdd.addSecretKey(secret[2], secret[2].getBytes()); creds.addAll(credsToAdd); assertEquals(3, creds.numberOfTokens()); assertEquals(3, creds.numberOfSecretKeys()); // existing token & secret should be overwritten assertEquals(token[3], creds.getToken(service[0])); assertEquals(secret[3], new Text(creds.getSecretKey(secret[0]))); // non-duplicate token & secret should be present assertEquals(token[1], creds.getToken(service[1])); assertEquals(secret[1], new Text(creds.getSecretKey(secret[1]))); // new token & secret should be added assertEquals(token[2], creds.getToken(service[2])); assertEquals(secret[2], new Text(creds.getSecretKey(secret[2]))); }
public String convert(ILoggingEvent le) { long timestamp = le.getTimeStamp(); return cachingDateFormatter.format(timestamp); }
@Test public void convertsDateAsIso8601WhenNull() { assertEquals(_isoDateString, convert(_timestamp, new String[]{null})); }
static CommandLineOptions parse(Iterable<String> options) { CommandLineOptions.Builder optionsBuilder = CommandLineOptions.builder(); List<String> expandedOptions = new ArrayList<>(); expandParamsFiles(options, expandedOptions); Iterator<String> it = expandedOptions.iterator(); while (it.hasNext()) { String option = it.next(); if (!option.startsWith("-")) { optionsBuilder.filesBuilder().add(option).addAll(it); break; } String flag; String value; int idx = option.indexOf('='); if (idx >= 0) { flag = option.substring(0, idx); value = option.substring(idx + 1); } else { flag = option; value = null; } // NOTE: update usage information in UsageException when new flags are added switch (flag) { case "-i": case "-r": case "-replace": case "--replace": optionsBuilder.inPlace(true); break; case "--lines": case "-lines": case "--line": case "-line": parseRangeSet(optionsBuilder.linesBuilder(), getValue(flag, it, value)); break; case "--offset": case "-offset": optionsBuilder.addOffset(parseInteger(it, flag, value)); break; case "--length": case "-length": optionsBuilder.addLength(parseInteger(it, flag, value)); break; case "--aosp": case "-aosp": case "-a": optionsBuilder.aosp(true); break; case "--version": case "-version": case "-v": optionsBuilder.version(true); break; case "--help": case "-help": case "-h": optionsBuilder.help(true); break; case "--fix-imports-only": optionsBuilder.fixImportsOnly(true); break; case "--skip-sorting-imports": optionsBuilder.sortImports(false); break; case "--skip-removing-unused-imports": optionsBuilder.removeUnusedImports(false); break; case "--skip-reflowing-long-strings": optionsBuilder.reflowLongStrings(false); break; case "--skip-javadoc-formatting": optionsBuilder.formatJavadoc(false); break; case "-": optionsBuilder.stdin(true); break; case "-n": case "--dry-run": optionsBuilder.dryRun(true); break; case "--set-exit-if-changed": optionsBuilder.setExitIfChanged(true); break; case "-assume-filename": case "--assume-filename": optionsBuilder.assumeFilename(getValue(flag, it, value)); break; default: throw new IllegalArgumentException("unexpected flag: " + flag); } } return optionsBuilder.build(); }
@Test public void hello() { CommandLineOptions options = CommandLineOptionsParser.parse( Arrays.asList("-lines=1:10,20:30", "-i", "Hello.java", "Goodbye.java")); assertThat(options.lines().asRanges()) .containsExactly(Range.closedOpen(0, 10), Range.closedOpen(19, 30)); assertThat(options.inPlace()).isTrue(); assertThat(options.files()).containsExactly("Hello.java", "Goodbye.java"); }
public static List<String> listMatchedFilesWithRecursiveOption(PinotFS pinotFs, URI fileUri, @Nullable String includePattern, @Nullable String excludePattern, boolean searchRecursively) throws Exception { String[] files; // listFiles throws IOException files = pinotFs.listFiles(fileUri, searchRecursively); //TODO: sort input files based on creation time PathMatcher includeFilePathMatcher = null; if (includePattern != null) { includeFilePathMatcher = FileSystems.getDefault().getPathMatcher(includePattern); } PathMatcher excludeFilePathMatcher = null; if (excludePattern != null) { excludeFilePathMatcher = FileSystems.getDefault().getPathMatcher(excludePattern); } List<String> filteredFiles = new ArrayList<>(); for (String file : files) { if (includeFilePathMatcher != null) { if (!includeFilePathMatcher.matches(Paths.get(file))) { continue; } } if (excludeFilePathMatcher != null) { if (excludeFilePathMatcher.matches(Paths.get(file))) { continue; } } if (!pinotFs.isDirectory(new URI(sanitizeURIString(file)))) { // In case PinotFS implementations list files without a scheme (e.g. hdfs://), then we may lose it in the // input file path. Call SegmentGenerationUtils.getFileURI() to fix this up. // getFileURI throws URISyntaxException filteredFiles.add(SegmentGenerationUtils.getFileURI(file, fileUri).toString()); } } if (filteredFiles.isEmpty()) { throw new RuntimeException(String.format( "No file found in the input directory: %s matching includeFileNamePattern: %s," + " excludeFileNamePattern: %s", fileUri, includePattern, excludePattern)); } return filteredFiles; }
@Test public void testMatchFilesRecursiveSearchOnNonRecursiveInputFilePattern() throws Exception { File testDir = makeTestDir(); File inputDir = new File(testDir, "dir"); File inputSubDir1 = new File(inputDir, "2009"); inputSubDir1.mkdirs(); File inputFile1 = new File(inputDir, "input.csv"); FileUtils.writeLines(inputFile1, Lists.newArrayList("col1,col2", "value1,1", "value2,2")); File inputFile2 = new File(inputSubDir1, "input.csv"); FileUtils.writeLines(inputFile2, Lists.newArrayList("col1,col2", "value3,3", "value4,4")); URI inputDirURI = new URI(inputDir.getAbsolutePath()); if (inputDirURI.getScheme() == null) { inputDirURI = new File(inputDir.getAbsolutePath()).toURI(); } PinotFS inputDirFS = PinotFSFactory.create(inputDirURI.getScheme()); String includePattern = "glob:" + inputDir.getAbsolutePath() + "/*.csv"; List<String> files = SegmentGenerationUtils.listMatchedFilesWithRecursiveOption(inputDirFS, inputDirURI, includePattern, null, false); Assert.assertEquals(files.size(), 1); }
List<Token> tokenize() throws ScanException { List<Token> tokenList = new ArrayList<Token>(); StringBuffer buf = new StringBuffer(); while (pointer < patternLength) { char c = pattern.charAt(pointer); pointer++; switch (state) { case LITERAL_STATE: handleLiteralState(c, tokenList, buf); break; case FORMAT_MODIFIER_STATE: handleFormatModifierState(c, tokenList, buf); break; case OPTION_STATE: processOption(c, tokenList, buf); break; case KEYWORD_STATE: handleKeywordState(c, tokenList, buf); break; case RIGHT_PARENTHESIS_STATE: handleRightParenthesisState(c, tokenList, buf); break; default: } } // EOS switch (state) { case LITERAL_STATE: addValuedToken(Token.LITERAL, buf, tokenList); break; case KEYWORD_STATE: tokenList.add(new Token(Token.SIMPLE_KEYWORD, buf.toString())); break; case RIGHT_PARENTHESIS_STATE: tokenList.add(Token.RIGHT_PARENTHESIS_TOKEN); break; case FORMAT_MODIFIER_STATE: case OPTION_STATE: throw new ScanException("Unexpected end of pattern string"); } return tokenList; }
@Test public void compositedKeyword() throws ScanException { { List<Token> tl = new TokenStream("%d(A)", new AlmostAsIsEscapeUtil()).tokenize(); List<Token> witness = new ArrayList<Token>(); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.COMPOSITE_KEYWORD, "d")); witness.add(new Token(Token.LITERAL, "A")); witness.add(Token.RIGHT_PARENTHESIS_TOKEN); assertEquals(witness, tl); } { List<Token> tl = new TokenStream("a %subst(%b C)", new AlmostAsIsEscapeUtil()).tokenize(); List<Token> witness = new ArrayList<Token>(); witness.add(new Token(Token.LITERAL, "a ")); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.COMPOSITE_KEYWORD, "subst")); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.SIMPLE_KEYWORD, "b")); witness.add(new Token(Token.LITERAL, " C")); witness.add(Token.RIGHT_PARENTHESIS_TOKEN); assertEquals(witness, tl); } }
@Deprecated public String withNamespace(String resource) { return NamespaceUtil.wrapNamespace(this.getNamespace(), resource); }
@Test public void testWithNamespace() { Set<String> resources = clientConfig.withNamespace(Collections.singleton(resource)); assertTrue(resources.contains("lmq%resource")); }
protected NodeLabelsProvider createNodeLabelsProvider(Configuration conf) throws IOException { NodeLabelsProvider provider = null; String providerString = conf.get(YarnConfiguration.NM_NODE_LABELS_PROVIDER_CONFIG, null); if (providerString == null || providerString.trim().length() == 0) { // Seems like Distributed Node Labels configuration is not enabled return provider; } switch (providerString.trim().toLowerCase()) { case YarnConfiguration.CONFIG_NODE_DESCRIPTOR_PROVIDER: provider = new ConfigurationNodeLabelsProvider(); break; case YarnConfiguration.SCRIPT_NODE_DESCRIPTOR_PROVIDER: provider = new ScriptBasedNodeLabelsProvider(); break; default: try { Class<? extends NodeLabelsProvider> labelsProviderClass = conf.getClass(YarnConfiguration.NM_NODE_LABELS_PROVIDER_CONFIG, null, NodeLabelsProvider.class); provider = labelsProviderClass.newInstance(); } catch (InstantiationException | IllegalAccessException | RuntimeException e) { LOG.error("Failed to create NodeLabelsProvider based on Configuration", e); throw new IOException( "Failed to create NodeLabelsProvider : " + e.getMessage(), e); } } LOG.debug("Distributed Node Labels is enabled" + " with provider class as : {}", provider.getClass()); return provider; }
@Test public void testCreationOfNodeLabelsProviderService() throws InterruptedException { try { NodeManager nodeManager = new NodeManager(); Configuration conf = new Configuration(); NodeLabelsProvider labelsProviderService = nodeManager.createNodeLabelsProvider(conf); Assert .assertNull( "LabelsProviderService should not be initialized in default configuration", labelsProviderService); // With valid className conf.set( YarnConfiguration.NM_NODE_LABELS_PROVIDER_CONFIG, "org.apache.hadoop.yarn.server.nodemanager.nodelabels.ConfigurationNodeLabelsProvider"); labelsProviderService = nodeManager.createNodeLabelsProvider(conf); Assert.assertNotNull("LabelsProviderService should be initialized When " + "node labels provider class is configured", labelsProviderService); // With invalid className conf.set(YarnConfiguration.NM_NODE_LABELS_PROVIDER_CONFIG, "org.apache.hadoop.yarn.server.nodemanager.NodeManager"); try { labelsProviderService = nodeManager.createNodeLabelsProvider(conf); Assert.fail("Expected to throw IOException on Invalid configuration"); } catch (IOException e) { // exception expected on invalid configuration } Assert.assertNotNull("LabelsProviderService should be initialized When " + "node labels provider class is configured", labelsProviderService); // With valid whitelisted configurations conf.set(YarnConfiguration.NM_NODE_LABELS_PROVIDER_CONFIG, YarnConfiguration.CONFIG_NODE_DESCRIPTOR_PROVIDER); labelsProviderService = nodeManager.createNodeLabelsProvider(conf); Assert.assertNotNull("LabelsProviderService should be initialized When " + "node labels provider class is configured", labelsProviderService); } catch (Exception e) { Assert.fail("Exception caught"); e.printStackTrace(); } }
@Override public BulkOperationResponse executeBulkOperation(final BulkOperationRequest bulkOperationRequest, final C userContext, final AuditParams params) { if (bulkOperationRequest.entityIds() == null || bulkOperationRequest.entityIds().isEmpty()) { throw new BadRequestException(NO_ENTITY_IDS_ERROR); } List<BulkOperationFailure> capturedFailures = new LinkedList<>(); for (String entityId : bulkOperationRequest.entityIds()) { try { T entityModel = singleEntityOperationExecutor.execute(entityId, userContext); try { if (params != null) { auditEventSender.success(getAuditActor(userContext), params.eventType(), successAuditLogContextCreator.create(entityModel, params.entityClass())); } } catch (Exception auditLogStoreException) { //exception on audit log storing should not result in failure report, as the operation itself is successful LOG.error("Failed to store in the audit log information about successful entity removal via bulk action ", auditLogStoreException); } } catch (Exception ex) { capturedFailures.add(new BulkOperationFailure(entityId, ex.getMessage())); try { if (params != null) { auditEventSender.failure(getAuditActor(userContext), params.eventType(), failureAuditLogContextCreator.create(params.entityIdInPathParam(), entityId)); } } catch (Exception auditLogStoreException) { //exception on audit log storing should not result in failure report, as the operation itself is successful LOG.error("Failed to store in the audit log information about failed entity removal via bulk action ", auditLogStoreException); } } } return new BulkOperationResponse( bulkOperationRequest.entityIds().size() - capturedFailures.size(), capturedFailures); }
@Test void throwsBadRequestExceptionOnNullEntityIdsList() { assertThrows(BadRequestException.class, () -> toTest.executeBulkOperation(new BulkOperationRequest(null), context, params), NO_ENTITY_IDS_ERROR); }
@Override public ConnectionProperties parse(final String url, final String username, final String catalog) { JdbcUrl jdbcUrl = new StandardJdbcUrlParser().parse(url); return new StandardConnectionProperties(jdbcUrl.getHostname(), jdbcUrl.getPort(DEFAULT_PORT), jdbcUrl.getDatabase(), null, jdbcUrl.getQueryProperties(), new Properties()); }
@Test void assertNewConstructorFailure() { assertThrows(UnrecognizedDatabaseURLException.class, () -> parser.parse("jdbc:opengauss:xxxxxxxx", null, null)); }
@Override public void delete(final Map<Path, TransferStatus> files, final PasswordCallback prompt, final Callback callback) throws BackgroundException { this.trash(files, prompt, callback); for(Path f : files.keySet()) { fileid.cache(f, null); } }
@Test public void testDeleteFileInTrash() throws Exception { final EueResourceIdProvider fileid = new EueResourceIdProvider(session); final Path file = new EueTouchFeature(session, fileid).touch(new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final String resourceId = file.attributes().getFileId(); final EueTrashFeature feature = new EueTrashFeature(session, fileid); feature.delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); final Path trash = new Path("Gelöschte Dateien", EnumSet.of(directory, placeholder)); trash.withAttributes(new EueAttributesFinderFeature(session, fileid).find(trash)); assertFalse(feature.isSupported(trash)); final Path trashed = new Path(trash, file.getName(), EnumSet.of(Path.Type.file)); assertTrue(new EueFindFeature(session, fileid).find(trashed)); new EueTrashFeature(session, fileid).delete(Collections.singletonList(trashed), new DisabledLoginCallback(), new Delete.DisabledCallback()); assertFalse(new EueFindFeature(session, fileid).find(trashed)); }
@Override public void doRun() { if (versionOverride.isPresent()) { LOG.debug("Elasticsearch version is set manually. Not running check."); return; } final Optional<SearchVersion> probedVersion = this.versionProbe.probe(this.elasticsearchHosts); probedVersion.ifPresent(version -> { if (compatible(this.initialElasticsearchVersion, version)) { notificationService.fixed(Notification.Type.ES_VERSION_MISMATCH); } else { LOG.warn("Elasticsearch version currently running ({}) is incompatible with the one Graylog was started " + "with ({}) - a restart is required!", version, initialElasticsearchVersion); final Notification notification = notificationService.buildNow() .addType(Notification.Type.ES_VERSION_MISMATCH) .addSeverity(Notification.Severity.URGENT) .addDetail("initial_version", initialElasticsearchVersion.toString()) .addDetail("current_version", version.toString()); notificationService.publishIfFirst(notification); } }); }
@Test void doesNotRunIfVersionOverrideIsSet() { createPeriodical(SearchVersion.elasticsearch(8, 0, 0), SearchVersion.elasticsearch(7, 0, 0)).doRun(); verifyNoInteractions(notificationService); }
public static String getRemoteAddr(HttpServletRequest request) { String remoteAddr = request.getRemoteAddr(); String proxyHeader = request.getHeader("X-Forwarded-For"); if (proxyHeader != null && ProxyServers.isProxyServer(remoteAddr)) { final String clientAddr = proxyHeader.split(",")[0].trim(); if (!clientAddr.isEmpty()) { remoteAddr = clientAddr; } } return remoteAddr; }
@Test public void testRemoteAddrWithUntrustedProxy() { assertEquals(proxyAddr, getRemoteAddr(clientAddr, proxyAddr, false)); }
@Override public void dispatch(DispatchRequest request) { if (!this.brokerConfig.isEnableCalcFilterBitMap()) { return; } try { Collection<ConsumerFilterData> filterDatas = consumerFilterManager.get(request.getTopic()); if (filterDatas == null || filterDatas.isEmpty()) { return; } Iterator<ConsumerFilterData> iterator = filterDatas.iterator(); BitsArray filterBitMap = BitsArray.create( this.consumerFilterManager.getBloomFilter().getM() ); long startTime = System.currentTimeMillis(); while (iterator.hasNext()) { ConsumerFilterData filterData = iterator.next(); if (filterData.getCompiledExpression() == null) { log.error("[BUG] Consumer in filter manager has no compiled expression! {}", filterData); continue; } if (filterData.getBloomFilterData() == null) { log.error("[BUG] Consumer in filter manager has no bloom data! {}", filterData); continue; } Object ret = null; try { MessageEvaluationContext context = new MessageEvaluationContext(request.getPropertiesMap()); ret = filterData.getCompiledExpression().evaluate(context); } catch (Throwable e) { log.error("Calc filter bit map error!commitLogOffset={}, consumer={}, {}", request.getCommitLogOffset(), filterData, e); } log.debug("Result of Calc bit map:ret={}, data={}, props={}, offset={}", ret, filterData, request.getPropertiesMap(), request.getCommitLogOffset()); // eval true if (ret != null && ret instanceof Boolean && (Boolean) ret) { consumerFilterManager.getBloomFilter().hashTo( filterData.getBloomFilterData(), filterBitMap ); } } request.setBitMap(filterBitMap.bytes()); long elapsedTime = UtilAll.computeElapsedTimeMilliseconds(startTime); // 1ms if (elapsedTime >= 1) { log.warn("Spend {} ms to calc bit map, consumerNum={}, topic={}", elapsedTime, filterDatas.size(), request.getTopic()); } } catch (Throwable e) { log.error("Calc bit map error! topic={}, offset={}, queueId={}, {}", request.getTopic(), request.getCommitLogOffset(), request.getQueueId(), e); } }
@Test public void testDispatch() { BrokerConfig brokerConfig = new BrokerConfig(); brokerConfig.setEnableCalcFilterBitMap(true); ConsumerFilterManager filterManager = ConsumerFilterManagerTest.gen(10, 10); CommitLogDispatcherCalcBitMap calcBitMap = new CommitLogDispatcherCalcBitMap(brokerConfig, filterManager); for (int i = 0; i < 10; i++) { Map<String, String> properties = new HashMap<>(4); properties.put("a", String.valueOf(i * 10 + 5)); String topic = "topic" + i; DispatchRequest dispatchRequest = new DispatchRequest( topic, 0, i * 100 + 123, 100, (long) ("tags" + i).hashCode(), System.currentTimeMillis(), i, null, UUID.randomUUID().toString(), 0, 0, properties ); calcBitMap.dispatch(dispatchRequest); assertThat(dispatchRequest.getBitMap()).isNotNull(); BitsArray bits = BitsArray.create(dispatchRequest.getBitMap()); Collection<ConsumerFilterData> filterDatas = filterManager.get(topic); for (ConsumerFilterData filterData : filterDatas) { if (filterManager.getBloomFilter().isHit(filterData.getBloomFilterData(), bits)) { try { assertThat((Boolean) filterData.getCompiledExpression().evaluate( new MessageEvaluationContext(properties) )).isTrue(); } catch (Exception e) { e.printStackTrace(); assertThat(true).isFalse(); } } else { try { assertThat((Boolean) filterData.getCompiledExpression().evaluate( new MessageEvaluationContext(properties) )).isFalse(); } catch (Exception e) { e.printStackTrace(); assertThat(true).isFalse(); } } } } }
public static SSLFactory createSSLFactoryAndEnableAutoRenewalWhenUsingFileStores(TlsConfig tlsConfig) { return createSSLFactoryAndEnableAutoRenewalWhenUsingFileStores(tlsConfig, () -> false); }
@Test public void createSSLFactoryAndEnableAutoRenewalWhenUsingFileStoresWithPinotSecureMode() throws IOException, URISyntaxException, InterruptedException { TlsConfig tlsConfig = createTlsConfig(); SSLFactory sslFactory = RenewableTlsUtils.createSSLFactoryAndEnableAutoRenewalWhenUsingFileStores(tlsConfig, () -> false); ensurSslFactoryUseNormalTrustManager(sslFactory); updateTlsFilesAndWaitForSslFactoryToBeRenewed(); ensurSslFactoryUseNormalTrustManager(sslFactory); }
public String convert(ILoggingEvent le) { long timestamp = le.getTimeStamp(); return cachingDateFormatter.format(timestamp); }
@Test public void convertsDateAsIso8601WhenInvalidPatternSpecified() { assertEquals(_isoDateString, convert(_timestamp, "foo")); }
@Override public Collection<ThreadPoolPluginSupport> getAllManagedThreadPoolPluginSupports() { return managedThreadPoolPluginSupports.values(); }
@Test public void testGetAllManagedThreadPoolPluginSupports() { GlobalThreadPoolPluginManager manager = new DefaultGlobalThreadPoolPluginManager(); manager.registerThreadPoolPluginSupport(new TestSupport("1")); manager.registerThreadPoolPluginSupport(new TestSupport("2")); Assert.assertEquals(2, manager.getAllManagedThreadPoolPluginSupports().size()); }
@Override @CacheEvict(value = RedisKeyConstants.ROLE, key = "#id") public void updateRoleDataScope(Long id, Integer dataScope, Set<Long> dataScopeDeptIds) { // 校验是否可以更新 validateRoleForUpdate(id); // 更新数据范围 RoleDO updateObject = new RoleDO(); updateObject.setId(id); updateObject.setDataScope(dataScope); updateObject.setDataScopeDeptIds(dataScopeDeptIds); roleMapper.updateById(updateObject); }
@Test public void testUpdateRoleDataScope() { // mock 数据 RoleDO roleDO = randomPojo(RoleDO.class, o -> o.setType(RoleTypeEnum.CUSTOM.getType())); roleMapper.insert(roleDO); // 准备参数 Long id = roleDO.getId(); Integer dataScope = randomEle(DataScopeEnum.values()).getScope(); Set<Long> dataScopeRoleIds = randomSet(Long.class); // 调用 roleService.updateRoleDataScope(id, dataScope, dataScopeRoleIds); // 断言 RoleDO dbRoleDO = roleMapper.selectById(id); assertEquals(dataScope, dbRoleDO.getDataScope()); assertEquals(dataScopeRoleIds, dbRoleDO.getDataScopeDeptIds()); }
public String getUserAgent() { return userAgent; }
@Test public void testGetUserAgent() { shenyuRequestLog.setUserAgent("test"); Assertions.assertEquals(shenyuRequestLog.getUserAgent(), "test"); }
public static ShardingRouteEngine newInstance(final ShardingRule shardingRule, final ShardingSphereDatabase database, final QueryContext queryContext, final ShardingConditions shardingConditions, final ConfigurationProperties props, final ConnectionContext connectionContext) { SQLStatementContext sqlStatementContext = queryContext.getSqlStatementContext(); SQLStatement sqlStatement = sqlStatementContext.getSqlStatement(); if (sqlStatement instanceof TCLStatement) { return new ShardingDatabaseBroadcastRoutingEngine(); } if (sqlStatement instanceof DDLStatement) { if (sqlStatementContext instanceof CursorAvailable) { return getCursorRouteEngine(shardingRule, database, sqlStatementContext, queryContext.getHintValueContext(), shardingConditions, props); } return getDDLRoutingEngine(shardingRule, database, sqlStatementContext); } if (sqlStatement instanceof DALStatement) { return getDALRoutingEngine(shardingRule, database, sqlStatementContext, connectionContext); } if (sqlStatement instanceof DCLStatement) { return getDCLRoutingEngine(shardingRule, database, sqlStatementContext); } return getDQLRoutingEngine(shardingRule, database, sqlStatementContext, queryContext.getHintValueContext(), shardingConditions, props, connectionContext); }
@Test void assertNewInstanceForStandard() { SQLStatement sqlStatement = mock(SQLStatement.class); when(sqlStatementContext.getSqlStatement()).thenReturn(sqlStatement); tableNames.add(""); when(shardingRule.getShardingLogicTableNames(((TableAvailable) sqlStatementContext).getTablesContext().getTableNames())).thenReturn(tableNames); when(shardingRule.isAllShardingTables(tableNames)).thenReturn(true); QueryContext queryContext = new QueryContext(sqlStatementContext, "", Collections.emptyList(), new HintValueContext(), mockConnectionContext(), mock(ShardingSphereMetaData.class)); ShardingRouteEngine actual = ShardingRouteEngineFactory.newInstance(shardingRule, database, queryContext, shardingConditions, props, new ConnectionContext(Collections::emptySet)); assertThat(actual, instanceOf(ShardingStandardRoutingEngine.class)); }
@Override public long getPeriodMillis() { return periodMillis; }
@Test public void testGetPeriodMillis() { assertEquals(SECONDS.toMillis(60), plugin.getPeriodMillis()); }
public Statement buildStatement(final ParserRuleContext parseTree) { return build(Optional.of(getSources(parseTree)), parseTree); }
@Test public void shouldExtractAsAliasedDataSources() { // Given: final SingleStatementContext stmt = givenQuery("SELECT * FROM TEST1 AS t;"); // When: final Query result = (Query) builder.buildStatement(stmt); // Then: assertThat(result.getFrom(), is(new AliasedRelation(TEST1, SourceName.of("T")))); }
public static Map<String, AdvertisedListener> validateAndAnalysisAdvertisedListener(ServiceConfiguration config) { if (StringUtils.isBlank(config.getAdvertisedListeners())) { return Collections.emptyMap(); } Optional<String> firstListenerName = Optional.empty(); Map<String, List<String>> listeners = new LinkedHashMap<>(); for (final String str : StringUtils.split(config.getAdvertisedListeners(), ",")) { int index = str.indexOf(":"); if (index <= 0) { throw new IllegalArgumentException("the configure entry `advertisedListeners` is invalid. because " + str + " do not contain listener name"); } String listenerName = StringUtils.trim(str.substring(0, index)); if (!firstListenerName.isPresent()) { firstListenerName = Optional.of(listenerName); } String value = StringUtils.trim(str.substring(index + 1)); listeners.computeIfAbsent(listenerName, k -> new ArrayList<>(2)); listeners.get(listenerName).add(value); } if (StringUtils.isBlank(config.getInternalListenerName())) { config.setInternalListenerName(firstListenerName.get()); } if (!listeners.containsKey(config.getInternalListenerName())) { throw new IllegalArgumentException("the `advertisedListeners` configure do not contain " + "`internalListenerName` entry"); } final Map<String, AdvertisedListener> result = new LinkedHashMap<>(); final Map<String, Set<String>> reverseMappings = new LinkedHashMap<>(); for (final Map.Entry<String, List<String>> entry : listeners.entrySet()) { if (entry.getValue().size() > 2) { throw new IllegalArgumentException("there are redundant configure for listener `" + entry.getKey() + "`"); } URI pulsarAddress = null, pulsarSslAddress = null, pulsarHttpAddress = null, pulsarHttpsAddress = null; for (final String strUri : entry.getValue()) { try { URI uri = URI.create(strUri); if (StringUtils.equalsIgnoreCase(uri.getScheme(), "pulsar")) { if (pulsarAddress == null) { pulsarAddress = uri; } else { throw new IllegalArgumentException("there are redundant configure for listener `" + entry.getKey() + "`"); } } else if (StringUtils.equalsIgnoreCase(uri.getScheme(), "pulsar+ssl")) { if (pulsarSslAddress == null) { pulsarSslAddress = uri; } else { throw new IllegalArgumentException("there are redundant configure for listener `" + entry.getKey() + "`"); } } else if (StringUtils.equalsIgnoreCase(uri.getScheme(), "http")) { if (pulsarHttpAddress == null) { pulsarHttpAddress = uri; } else { throw new IllegalArgumentException("there are redundant configure for listener `" + entry.getKey() + "`"); } } else if (StringUtils.equalsIgnoreCase(uri.getScheme(), "https")) { if (pulsarHttpsAddress == null) { pulsarHttpsAddress = uri; } else { throw new IllegalArgumentException("there are redundant configure for listener `" + entry.getKey() + "`"); } } String hostPort = String.format("%s:%d", uri.getHost(), uri.getPort()); Set<String> sets = reverseMappings.computeIfAbsent(hostPort, k -> new TreeSet<>()); sets.add(entry.getKey()); if (sets.size() > 1) { throw new IllegalArgumentException("must not specify `" + hostPort + "` to different listener."); } } catch (Throwable cause) { throw new IllegalArgumentException("the value " + strUri + " in the `advertisedListeners` " + "configure is invalid", cause); } } result.put(entry.getKey(), AdvertisedListener.builder() .brokerServiceUrl(pulsarAddress) .brokerServiceUrlTls(pulsarSslAddress) .brokerHttpUrl(pulsarHttpAddress) .brokerHttpsUrl(pulsarHttpsAddress) .build()); } return result; }
@Test(expectedExceptions = IllegalArgumentException.class) public void testListenerDuplicate_3() { ServiceConfiguration config = new ServiceConfiguration(); config.setAdvertisedListeners(" internal:pulsar+ssl://127.0.0.1:6661," + " internal:pulsar+ssl://192.168.1.11:6661"); config.setInternalListenerName("internal"); MultipleListenerValidator.validateAndAnalysisAdvertisedListener(config); }
static int bucketMinUs(int bucket) { return bucket == 0 ? 0 : 1 << bucket; }
@Test public void bucketMinUs() { assertEquals(0, LatencyDistribution.bucketMinUs(0)); assertEquals(2, LatencyDistribution.bucketMinUs(1)); assertEquals(4, LatencyDistribution.bucketMinUs(2)); assertEquals(8, LatencyDistribution.bucketMinUs(3)); assertEquals(16, LatencyDistribution.bucketMinUs(4)); assertEquals(32, LatencyDistribution.bucketMinUs(5)); }
@Override public Set<OAuth2RefreshTokenEntity> getRefreshTokensByUserName(String name) { TypedQuery<OAuth2RefreshTokenEntity> query = manager.createNamedQuery(OAuth2RefreshTokenEntity.QUERY_BY_NAME, OAuth2RefreshTokenEntity.class); query.setParameter(OAuth2RefreshTokenEntity.PARAM_NAME, name); List<OAuth2RefreshTokenEntity> results = query.getResultList(); return results != null ? new HashSet<>(results) : new HashSet<>(); }
@Test public void testGetRefreshTokensByUserName() { Set<OAuth2RefreshTokenEntity> tokens = repository.getRefreshTokensByUserName("user2"); assertEquals(3, tokens.size()); assertEquals("user2", tokens.iterator().next().getAuthenticationHolder().getUserAuth().getName()); }
public static AllMatches allMatches(String regex) { return allMatches(Pattern.compile(regex)); }
@Test @Category(NeedsRunner.class) public void testAllMatches() { PCollection<List<String>> output = p.apply(Create.of("a x", "x x", "y y", "z z")).apply(Regex.allMatches("([xyz]) ([xyz])")); PAssert.that(output) .containsInAnyOrder( Arrays.asList("x x", "x", "x"), Arrays.asList("y y", "y", "y"), Arrays.asList("z z", "z", "z")); p.run(); }
public static <T extends Enum<T>> Validator enumValues(final Class<T> enumClass) { final String[] enumValues = EnumSet.allOf(enumClass) .stream() .map(Object::toString) .toArray(String[]::new); final String[] validValues = Arrays.copyOf(enumValues, enumValues.length + 1); validValues[enumValues.length] = null; return ValidCaseInsensitiveString.in(validValues); }
@Test public void shouldNotThrowIfAValidEnumValue() { // Given: final Validator validator = ConfigValidators.enumValues(TestEnum.class); // When: validator.ensureValid("propName", TestEnum.FOO.toString()); // Then: did not throw }
public static JibContainerBuilder create( String baseImageReference, Set<Platform> platforms, CommonCliOptions commonCliOptions, ConsoleLogger logger) throws InvalidImageReferenceException, FileNotFoundException { if (baseImageReference.startsWith(DOCKER_DAEMON_IMAGE_PREFIX)) { return Jib.from( DockerDaemonImage.named(baseImageReference.replaceFirst(DOCKER_DAEMON_IMAGE_PREFIX, ""))); } if (baseImageReference.startsWith(TAR_IMAGE_PREFIX)) { return Jib.from( TarImage.at(Paths.get(baseImageReference.replaceFirst(TAR_IMAGE_PREFIX, "")))); } ImageReference imageReference = ImageReference.parse(baseImageReference.replaceFirst(REGISTRY_IMAGE_PREFIX, "")); RegistryImage registryImage = RegistryImage.named(imageReference); DefaultCredentialRetrievers defaultCredentialRetrievers = DefaultCredentialRetrievers.init( CredentialRetrieverFactory.forImage( imageReference, logEvent -> logger.log(logEvent.getLevel(), logEvent.getMessage()))); Credentials.getFromCredentialRetrievers(commonCliOptions, defaultCredentialRetrievers) .forEach(registryImage::addCredentialRetriever); JibContainerBuilder containerBuilder = Jib.from(registryImage); if (!platforms.isEmpty()) { containerBuilder.setPlatforms(platforms); } return containerBuilder; }
@Test public void testCreate_registry() throws IOException, InvalidImageReferenceException, CacheDirectoryCreationException { JibContainerBuilder containerBuilder = ContainerBuilders.create( "registry://registry-image-ref", Collections.emptySet(), mockCommonCliOptions, mockLogger); BuildContext buildContext = JibContainerBuilderTestHelper.toBuildContext( containerBuilder, Containerizer.to(RegistryImage.named("ignored"))); ImageConfiguration imageConfiguration = buildContext.getBaseImageConfiguration(); assertThat(imageConfiguration.getImage().toString()).isEqualTo("registry-image-ref"); assertThat(imageConfiguration.getDockerClient().isPresent()).isFalse(); assertThat(imageConfiguration.getTarPath().isPresent()).isFalse(); }
@Override public boolean isScanAllowedUsingPermissionsFromDevopsPlatform() { checkState(authAppInstallationToken != null, "An auth app token is required in case repository permissions checking is necessary."); String[] orgaAndRepoTokenified = devOpsProjectCreationContext.fullName().split("/"); String organization = orgaAndRepoTokenified[0]; String repository = orgaAndRepoTokenified[1]; Set<DevOpsPermissionsMappingDto> permissionsMappingDtos = dbClient.githubPermissionsMappingDao() .findAll(dbClient.openSession(false), devOpsPlatformSettings.getDevOpsPlatform()); boolean userHasDirectAccessToRepo = doesUserHaveScanPermission(organization, repository, permissionsMappingDtos); if (userHasDirectAccessToRepo) { return true; } return doesUserBelongToAGroupWithScanPermission(organization, repository, permissionsMappingDtos); }
@Test void isScanAllowedUsingPermissionsFromDevopsPlatform_whenAccessViaTeam_returnsTrue() { GsonRepositoryTeam team1 = mockGithubTeam("team1", 1, "role1", "read", "another_perm"); GsonRepositoryTeam team2 = mockGithubTeam("team2", 2, "role2", "another_perm", UserRole.SCAN); mockTeamsFromApi(team1, team2); bindGroupsToUser(team1.name(), team2.name()); assertThat(githubProjectCreator.isScanAllowedUsingPermissionsFromDevopsPlatform()).isTrue(); }
@Override public RestLiResponseData<BatchGetResponseEnvelope> buildRestLiResponseData(Request request, RoutingResult routingResult, Object result, Map<String, String> headers, List<HttpCookie> cookies) { @SuppressWarnings({ "unchecked" }) /* constrained by signature of {@link com.linkedin.restli.server.resources.CollectionResource#batchGet(java.util.Set)} */ final Map<Object, RecordTemplate> entities = (Map<Object, RecordTemplate>) result; Map<Object, HttpStatus> statuses = Collections.emptyMap(); Map<Object, RestLiServiceException> serviceErrors = Collections.emptyMap(); if (result instanceof BatchResult) { @SuppressWarnings({ "unchecked" }) /* constrained by signature of {@link com.linkedin.restli.server.resources.CollectionResource#batchGet(java.util.Set)} */ final BatchResult<Object, RecordTemplate> batchResult = (BatchResult<Object, RecordTemplate>) result; statuses = batchResult.getStatuses(); serviceErrors = batchResult.getErrors(); } try { if (statuses.containsKey(null)) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null key inside of a Map returned by the resource method: " + routingResult .getResourceMethod()); } } catch (NullPointerException e) { // Some map implementations will throw an NPE if they do not support null keys. // In this case it is OK to swallow this exception and proceed. } TimingContextUtil.beginTiming(routingResult.getContext().getRawRequestContext(), FrameworkTimingKeys.SERVER_RESPONSE_RESTLI_PROJECTION_APPLY.key()); Map<Object, BatchResponseEntry> batchResult = new HashMap<>(entities.size() + serviceErrors.size()); for (Map.Entry<Object, RecordTemplate> entity : entities.entrySet()) { if (entity.getKey() == null) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null key inside of a Map returned by the resource method: " + routingResult .getResourceMethod()); } Object finalKey = ResponseUtils.translateCanonicalKeyToAlternativeKeyIfNeeded(entity.getKey(), routingResult); DataMap rawData = entity.getValue().data(); if (routingResult.getContext().isFillInDefaultsRequested()) { rawData = (DataMap) ResponseUtils.fillInDataDefault(entity.getValue().schema(), rawData); } final DataMap projectedData = RestUtils.projectFields(rawData, routingResult.getContext()); AnyRecord anyRecord = new AnyRecord(projectedData); batchResult.put(finalKey, new BatchResponseEntry(statuses.get(entity.getKey()), anyRecord)); } TimingContextUtil.endTiming(routingResult.getContext().getRawRequestContext(), FrameworkTimingKeys.SERVER_RESPONSE_RESTLI_PROJECTION_APPLY.key()); for (Map.Entry<Object, RestLiServiceException> entity : serviceErrors.entrySet()) { if (entity.getKey() == null || entity.getValue() == null) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null key inside of a Map returned by the resource method: " + routingResult .getResourceMethod()); } Object finalKey = ResponseUtils.translateCanonicalKeyToAlternativeKeyIfNeeded(entity.getKey(), routingResult); batchResult.put(finalKey, new BatchResponseEntry(statuses.get(entity.getKey()), entity.getValue())); } final Map<Object, RestLiServiceException> contextErrors = routingResult.getContext().getBatchKeyErrors(); for (Map.Entry<Object, RestLiServiceException> entry : contextErrors.entrySet()) { Object finalKey = ResponseUtils.translateCanonicalKeyToAlternativeKeyIfNeeded(entry.getKey(), routingResult); batchResult.put(finalKey, new BatchResponseEntry(statuses.get(entry.getKey()), entry.getValue())); } return new RestLiResponseDataImpl<>(new BatchGetResponseEnvelope(HttpStatus.S_200_OK, batchResult), headers, cookies); }
@Test(dataProvider = "exceptionTestData") public void testBuilderExceptions(Object results, String expectedErrorMessage) { // Protocol version doesn't matter here ServerResourceContext mockContext = getMockResourceContext(null, Collections.emptyMap(), null, null, null); ResourceMethodDescriptor mockDescriptor = getMockResourceMethodDescriptor(null); RoutingResult routingResult = new RoutingResult(mockContext, mockDescriptor); Map<String, String> headers = ResponseBuilderUtil.getHeaders(); BatchGetResponseBuilder responseBuilder = new BatchGetResponseBuilder(new ErrorResponseBuilder()); try { responseBuilder.buildRestLiResponseData(null, routingResult, results, headers, Collections.emptyList()); Assert.fail("buildRestLiResponseData should have failed because of null elements!"); } catch (RestLiServiceException e) { Assert.assertTrue(e.getMessage().contains(expectedErrorMessage)); } }
public static HintValueContext extractHint(final String sql) { if (!containsSQLHint(sql)) { return new HintValueContext(); } HintValueContext result = new HintValueContext(); int hintKeyValueBeginIndex = getHintKeyValueBeginIndex(sql); String hintKeyValueText = sql.substring(hintKeyValueBeginIndex, sql.indexOf(SQL_COMMENT_SUFFIX, hintKeyValueBeginIndex)); Map<String, String> hintKeyValues = getSQLHintKeyValues(hintKeyValueText); if (containsHintKey(hintKeyValues, SQLHintPropertiesKey.DATASOURCE_NAME_KEY)) { result.setDataSourceName(getHintValue(hintKeyValues, SQLHintPropertiesKey.DATASOURCE_NAME_KEY)); } if (containsHintKey(hintKeyValues, SQLHintPropertiesKey.WRITE_ROUTE_ONLY_KEY)) { result.setWriteRouteOnly(Boolean.parseBoolean(getHintValue(hintKeyValues, SQLHintPropertiesKey.WRITE_ROUTE_ONLY_KEY))); } if (containsHintKey(hintKeyValues, SQLHintPropertiesKey.SKIP_SQL_REWRITE_KEY)) { result.setSkipSQLRewrite(Boolean.parseBoolean(getHintValue(hintKeyValues, SQLHintPropertiesKey.SKIP_SQL_REWRITE_KEY))); } if (containsHintKey(hintKeyValues, SQLHintPropertiesKey.DISABLE_AUDIT_NAMES_KEY)) { String property = getHintValue(hintKeyValues, SQLHintPropertiesKey.DISABLE_AUDIT_NAMES_KEY); result.getDisableAuditNames().addAll(getSplitterSQLHintValue(property)); } if (containsHintKey(hintKeyValues, SQLHintPropertiesKey.SHADOW_KEY)) { result.setShadow(Boolean.parseBoolean(getHintValue(hintKeyValues, SQLHintPropertiesKey.SHADOW_KEY))); } for (Entry<String, String> entry : hintKeyValues.entrySet()) { Object value = convert(entry.getValue()); Comparable<?> comparable = value instanceof Comparable ? (Comparable<?>) value : Objects.toString(value); if (containsHintKey(Objects.toString(entry.getKey()), SQLHintPropertiesKey.SHARDING_DATABASE_VALUE_KEY)) { result.getShardingDatabaseValues().put(Objects.toString(entry.getKey()).toUpperCase(), comparable); } if (containsHintKey(Objects.toString(entry.getKey()), SQLHintPropertiesKey.SHARDING_TABLE_VALUE_KEY)) { result.getShardingTableValues().put(Objects.toString(entry.getKey()).toUpperCase(), comparable); } } return result; }
@Test void assertSQLHintShardingTableValueWithStringHintValue() { HintValueContext actual = SQLHintUtils.extractHint("/* SHARDINGSPHERE_HINT: t_order.SHARDING_TABLE_VALUE=a */"); assertThat(actual.getHintShardingTableValue("t_order"), is(Collections.singletonList("a"))); }
@Override public final boolean next() { if (memoryResultSetRows.hasNext()) { currentResultSetRow = memoryResultSetRows.next(); return true; } return false; }
@Test void assertNext() { assertTrue(memoryMergedResult.next()); assertFalse(memoryMergedResult.next()); }
@Override public UsersSearchRestResponse toUsersForResponse(List<UserInformation> userInformations, PaginationInformation paginationInformation) { List<UserRestResponse> usersForResponse = toUsersForResponse(userInformations); PageRestResponse pageRestResponse = new PageRestResponse(paginationInformation.pageIndex(), paginationInformation.pageSize(), paginationInformation.total()); return new UsersSearchRestResponse(usersForResponse, pageRestResponse); }
@Test public void toUsersForResponse_whenAnonymous_returnsOnlyNameAndLogin() { PaginationInformation paging = forPageIndex(1).withPageSize(2).andTotal(3); UserInformation userInformation1 = mockSearchResult(1, true); UserInformation userInformation2 = mockSearchResult(2, false); UsersSearchRestResponse usersForResponse = usersSearchRestResponseGenerator.toUsersForResponse(List.of(userInformation1, userInformation2), paging); UserRestResponseForAnonymousUsers expectUser1 = buildExpectedResponseForAnonymous(userInformation1); UserRestResponseForAnonymousUsers expectUser2 = buildExpectedResponseForAnonymous(userInformation2); assertThat(usersForResponse.users()).containsExactly(expectUser1, expectUser2); assertPaginationInformationAreCorrect(paging, usersForResponse.page()); }
@Override @Nullable public Object convert(@Nullable String value) { if (isNullOrEmpty(value)) { return null; } LOG.debug("Trying to parse date <{}> with pattern <{}>, locale <{}>, and timezone <{}>.", value, dateFormat, locale, timeZone); final DateTimeFormatter formatter; if (containsTimeZone) { formatter = DateTimeFormat .forPattern(dateFormat) .withDefaultYear(YearMonth.now(timeZone).getYear()) .withLocale(locale); } else { formatter = DateTimeFormat .forPattern(dateFormat) .withDefaultYear(YearMonth.now(timeZone).getYear()) .withLocale(locale) .withZone(timeZone); } return DateTime.parse(value, formatter); }
@Test public void testNullInput() throws Exception { final DateConverter converter = new DateConverter(config("yyyy-MM-dd'T'HH:mm:ss.SSSZ", null, null)); assertThat((DateTime) converter.convert(null)).isNull(); }
public static String getSrcUserName(HttpServletRequest request) { IdentityContext identityContext = RequestContextHolder.getContext().getAuthContext().getIdentityContext(); String result = StringUtils.EMPTY; if (null != identityContext) { result = (String) identityContext.getParameter( com.alibaba.nacos.plugin.auth.constant.Constants.Identity.IDENTITY_ID); } // If auth is disabled, get username from parameters by agreed key return StringUtils.isBlank(result) ? request.getParameter(Constants.USERNAME) : result; }
@Test void testGetSrcUserNameFromRequest() { HttpServletRequest request = Mockito.mock(HttpServletRequest.class); Mockito.when(request.getParameter(eq(Constants.USERNAME))).thenReturn("parameterName"); assertEquals("parameterName", RequestUtil.getSrcUserName(request)); }
@Override public Map<String, Object> getUserProperties() { if (principal instanceof KsqlPrincipal) { return ((KsqlPrincipal) principal).getUserProperties(); } else { return Collections.emptyMap(); } }
@Test public void shouldReturnUserProperties() { assertThat(wrappedKsqlPrincipal.getUserProperties(), is(USER_PROPERTIES)); assertThat(wrappedOtherPrincipal.getUserProperties(), is(Collections.emptyMap())); }
@Override public boolean hasSameDbVendor() { Optional<String> registeredDbVendor = metadataIndex.getDbVendor(); return registeredDbVendor.isPresent() && registeredDbVendor.get().equals(getDbVendor()); }
@Test public void hasSameDbVendor_is_false_if_value_is_absent_from_es() { prepareDb("mssql"); assertThat(underTest.hasSameDbVendor()).isFalse(); }
public StatsOutputStream writePair(String name, boolean value) { checkSeparator(); write('"').writeEncoded(name).write("\":").write(value); return this; }
@Test public void testPairs() { stream.writePair("my-count", 1); assertEquals(str(), "\"my-count\":1"); stream.writePair("my-rate", 0.0); assertEquals(str(), "\"my-rate\":0.0"); stream.writePair("my-flag", true); assertEquals(str(), "\"my-flag\":true"); stream.writePair("my-string", "value"); assertEquals(str(), "\"my-string\":\"value\""); }
public static BigDecimal cast(final Integer value, final int precision, final int scale) { if (value == null) { return null; } return cast(value.longValue(), precision, scale); }
@Test public void shouldCastNullBigInt() { // When: final BigDecimal decimal = DecimalUtil.cast((Long)null, 2, 1); // Then: assertThat(decimal, is(nullValue())); }
@VisibleForTesting static Comparator<ActualProperties> streamingExecutionPreference(PreferredProperties preferred) { // Calculating the matches can be a bit expensive, so cache the results between comparisons LoadingCache<List<LocalProperty<VariableReferenceExpression>>, List<Optional<LocalProperty<VariableReferenceExpression>>>> matchCache = CacheBuilder.newBuilder() .build(CacheLoader.from(actualProperties -> LocalProperties.match(actualProperties, preferred.getLocalProperties()))); return (actual1, actual2) -> { List<Optional<LocalProperty<VariableReferenceExpression>>> matchLayout1 = matchCache.getUnchecked(actual1.getLocalProperties()); List<Optional<LocalProperty<VariableReferenceExpression>>> matchLayout2 = matchCache.getUnchecked(actual2.getLocalProperties()); return ComparisonChain.start() .compareTrueFirst(hasLocalOptimization(preferred.getLocalProperties(), matchLayout1), hasLocalOptimization(preferred.getLocalProperties(), matchLayout2)) .compareTrueFirst(meetsPartitioningRequirements(preferred, actual1), meetsPartitioningRequirements(preferred, actual2)) .compare(matchLayout1, matchLayout2, matchedLayoutPreference()) .result(); }; }
@Test public void testPickLayoutUnpartitionedPreference() { Comparator<ActualProperties> preference = streamingExecutionPreference(PreferredProperties.undistributed()); List<ActualProperties> input = ImmutableList.<ActualProperties>builder() .add(builder() .global(streamPartitionedOn("a")) .build()) .add(builder() .global(singleStreamPartition()) .build()) .add(builder() .global(arbitraryPartition()) .local(ImmutableList.of(grouped("a", "b"))) .build()) .add(builder() .global(arbitraryPartition()) .build()) .add(builder() .global(hashDistributedOn("a")) .build()) .add(builder() .global(singleStream()) .local(ImmutableList.of(constant("a"), sorted("b", ASC_NULLS_FIRST))) .build()) .add(builder() .global(singleStreamPartition()) .local(ImmutableList.of(sorted("a", ASC_NULLS_FIRST))) .build()) .build(); List<ActualProperties> expected = ImmutableList.<ActualProperties>builder() .add(builder() .global(singleStreamPartition()) .build()) .add(builder() .global(singleStreamPartition()) .local(ImmutableList.of(sorted("a", ASC_NULLS_FIRST))) .build()) .add(builder() .global(streamPartitionedOn("a")) .build()) .add(builder() .global(arbitraryPartition()) .local(ImmutableList.of(grouped("a", "b"))) .build()) .add(builder() .global(arbitraryPartition()) .build()) .add(builder() .global(hashDistributedOn("a")) .build()) .add(builder() .global(singleStream()) .local(ImmutableList.of(constant("a"), sorted("b", ASC_NULLS_FIRST))) .build()) .build(); assertEquals(stableSort(input, preference), expected); }
@Override public void add(long hash) { convertToDenseIfNeeded(); boolean changed = encoder.add(hash); if (changed) { cachedEstimate = null; } }
@Test public void add() { hyperLogLog.add(1000L); assertEquals(1L, hyperLogLog.estimate()); }
public Optional<Details> sync( @NotNull StepInstance instance, @NotNull WorkflowSummary workflowSummary, @NotNull StepRuntimeSummary stepSummary) { try { switch (stepSummary.getDbOperation()) { case INSERT: case UPSERT: instanceDao.insertOrUpsertStepInstance( instance, stepSummary.getDbOperation() == DbOperation.UPSERT); break; case UPDATE: instanceDao.updateStepInstance(workflowSummary, stepSummary); break; default: throw new MaestroInternalError( "Invalid DB operation: %s for step instance [%s][%s]", stepSummary.getDbOperation(), stepSummary.getStepId(), stepSummary.getStepAttemptId()); } if (!stepSummary.getPendingRecords().isEmpty()) { return jobEventPublisher.publish( StepInstanceUpdateJobEvent.create(instance, stepSummary.getPendingRecords())); } return Optional.empty(); } catch (RuntimeException e) { return Optional.of(Details.create(e, true, "Failed to sync a Maestro step state change")); } }
@Test public void testInvalidDbOperation() { StepRuntimeSummary stepRuntimeSummary = StepRuntimeSummary.builder() .stepId("test-summary") .stepAttemptId(2) .stepInstanceId(1) .dbOperation(DbOperation.DELETE) .build(); Optional<Details> details = syncManager.sync(instance, workflowSummary, stepRuntimeSummary); assertTrue(details.isPresent()); assertEquals("Failed to sync a Maestro step state change", details.get().getMessage()); assertFalse(details.get().getErrors().isEmpty()); assertEquals( "MaestroInternalError: Invalid DB operation: DELETE for step instance [test-summary][2]", details.get().getErrors().get(0)); }
public String name() { return name; }
@Test void testName() { final var prototype = new Character(); prototype.set(Stats.ARMOR, 1); prototype.set(Stats.INTELLECT, 2); assertNull(prototype.name()); final var stupid = new Character(Type.ROGUE, prototype); stupid.remove(Stats.INTELLECT); assertNull(stupid.name()); final var weak = new Character("weak", prototype); weak.remove(Stats.ARMOR); assertEquals("weak", weak.name()); }
static void setStaticGetter(final CompilationDTO<ClusteringModel> compilationDTO, final ClassOrInterfaceDeclaration modelTemplate) { KiePMMLModelFactoryUtils.initStaticGetter(compilationDTO, modelTemplate); final BlockStmt body = getMethodDeclarationBlockStmt(modelTemplate, GET_MODEL); final VariableDeclarator variableDeclarator = getVariableDeclarator(body, TO_RETURN).orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, TO_RETURN, body))); final MethodCallExpr initializer = variableDeclarator.getInitializer() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, TO_RETURN, body))) .asMethodCallExpr(); ClusteringModel clusteringModel = compilationDTO.getModel(); KiePMMLClusteringModel.ModelClass modelClass = modelClassFrom(clusteringModel.getModelClass()); getChainedMethodCallExprFrom("withModelClass", initializer).setArgument(0, literalExprFrom(modelClass)); getChainedMethodCallExprFrom("withComparisonMeasure", initializer).setArgument(0, comparisonMeasureCreationExprFrom(clusteringModel.getComparisonMeasure())); Expression missingValueWeights = clusteringModel.getMissingValueWeights() != null ? missingValueWeightsCreationExprFrom(clusteringModel.getMissingValueWeights()) : new NullLiteralExpr(); getChainedMethodCallExprFrom("withMissingValueWeights", initializer).setArgument(0, missingValueWeights); }
@Test void setStaticGetter() throws IOException { final ClassOrInterfaceDeclaration modelTemplate = MODEL_TEMPLATE.clone(); final CommonCompilationDTO<ClusteringModel> compilationDTO = CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME, pmml, clusteringModel, new PMMLCompilationContextMock(), "fileName"); // fileName hardcoded inside TEST_01_SOURCE String expectedModelClass = KiePMMLClusteringModel.ModelClass.class.getCanonicalName() + "." + clusteringModel.getModelClass().name(); ComparisonMeasure comparisonMeasure = clusteringModel.getComparisonMeasure(); String expectedKind = KiePMMLComparisonMeasure.Kind.class.getCanonicalName() + "." + comparisonMeasure.getKind().name(); String expectedAggregateFunction = KiePMMLAggregateFunction.class.getCanonicalName() + "." + AGGREGATE_FN_MAP.get(comparisonMeasure.getMeasure().getClass()).name(); String expectedCompareFunction = KiePMMLCompareFunction.class.getCanonicalName() + "." + comparisonMeasure.getCompareFunction().name(); String expectedTargetField =targetMiningField.getName(); KiePMMLClusteringModelFactory.setStaticGetter(compilationDTO, modelTemplate); MethodDeclaration retrieved = modelTemplate.getMethodsByName(GET_MODEL).get(0); String text = String.format(getFileContent(TEST_01_SOURCE), expectedModelClass, expectedKind, expectedAggregateFunction, expectedCompareFunction, expectedTargetField); MethodDeclaration expected = JavaParserUtils.parseMethod(text); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); }
@Override public String toString() { String bounds = printExtendsClause() ? " extends " + joinTypeNames(upperBounds) : ""; return getClass().getSimpleName() + '{' + getName() + bounds + '}'; }
@Test public void toString_upper_bounded_by_multiple_bounds() { @SuppressWarnings("unused") class BoundedByMultipleBounds<NAME extends String & Serializable> { } JavaTypeVariable<JavaClass> typeVariable = new ClassFileImporter().importClass(BoundedByMultipleBounds.class).getTypeParameters().get(0); assertThat(typeVariable.toString()) .contains(JavaTypeVariable.class.getSimpleName()) .contains("NAME extends java.lang.String & java.io.Serializable"); }
public CreateStreamCommand createStreamCommand(final KsqlStructuredDataOutputNode outputNode) { return new CreateStreamCommand( outputNode.getSinkName().get(), outputNode.getSchema(), outputNode.getTimestampColumn(), outputNode.getKsqlTopic().getKafkaTopicName(), Formats.from(outputNode.getKsqlTopic()), outputNode.getKsqlTopic().getKeyFormat().getWindowInfo(), Optional.of(outputNode.getOrReplace()), Optional.of(false) ); }
@Test public void shouldThrowInCreateStreamOrReplaceSource() { // Given: final CreateStream ddlStatement = new CreateStream(SOME_NAME, STREAM_ELEMENTS, true, false, withProperties, true); // When: final Exception e = assertThrows( KsqlException.class, () -> createSourceFactory .createStreamCommand(ddlStatement, ksqlConfig)); // Then: assertThat(e.getMessage(), containsString( "Cannot add stream 'bob': CREATE OR REPLACE is not supported on " + "source streams.")); }
public void writeNumIncreasing(long value) { // Values are encoded with a single byte length prefix, followed // by the actual value in big-endian format with leading 0 bytes // dropped. byte[] bufer = new byte[9]; // 8 bytes for value plus one byte for length int len = 0; while (value != 0) { len++; bufer[9 - len] = (byte) (value & 0xff); value >>>= 8; } bufer[9 - len - 1] = (byte) len; len++; byte[] encodedArray = new byte[len]; System.arraycopy(bufer, 9 - len, encodedArray, 0, len); encodedArrays.add(encodedArray); }
@Test public void testWriteNumIncreasing() { OrderedCode orderedCode = new OrderedCode(); orderedCode.writeNumIncreasing(0); orderedCode.writeNumIncreasing(1); orderedCode.writeNumIncreasing(Long.MIN_VALUE); orderedCode.writeNumIncreasing(Long.MAX_VALUE); assertEquals(0, orderedCode.readNumIncreasing()); assertEquals(1, orderedCode.readNumIncreasing()); assertEquals(Long.MIN_VALUE, orderedCode.readNumIncreasing()); assertEquals(Long.MAX_VALUE, orderedCode.readNumIncreasing()); }
public IssueQuery create(SearchRequest request) { try (DbSession dbSession = dbClient.openSession(false)) { final ZoneId timeZone = parseTimeZone(request.getTimeZone()).orElse(clock.getZone()); Collection<RuleDto> ruleDtos = ruleKeysToRuleId(dbSession, request.getRules()); Collection<String> ruleUuids = ruleDtos.stream().map(RuleDto::getUuid).collect(Collectors.toSet()); Collection<String> issueKeys = collectIssueKeys(dbSession, request); if (request.getRules() != null && request.getRules().stream().collect(Collectors.toSet()).size() != ruleDtos.size()) { ruleUuids.add("non-existing-uuid"); } IssueQuery.Builder builder = IssueQuery.builder() .issueKeys(issueKeys) .severities(request.getSeverities()) .cleanCodeAttributesCategories(request.getCleanCodeAttributesCategories()) .impactSoftwareQualities(request.getImpactSoftwareQualities()) .impactSeverities(request.getImpactSeverities()) .statuses(request.getStatuses()) .resolutions(request.getResolutions()) .issueStatuses(request.getIssueStatuses()) .resolved(request.getResolved()) .prioritizedRule(request.getPrioritizedRule()) .rules(ruleDtos) .ruleUuids(ruleUuids) .assigneeUuids(request.getAssigneeUuids()) .authors(request.getAuthors()) .scopes(request.getScopes()) .languages(request.getLanguages()) .tags(request.getTags()) .types(request.getTypes()) .pciDss32(request.getPciDss32()) .pciDss40(request.getPciDss40()) .owaspAsvs40(request.getOwaspAsvs40()) .owaspAsvsLevel(request.getOwaspAsvsLevel()) .owaspTop10(request.getOwaspTop10()) .owaspTop10For2021(request.getOwaspTop10For2021()) .stigAsdR5V3(request.getStigAsdV5R3()) .casa(request.getCasa()) .sansTop25(request.getSansTop25()) .cwe(request.getCwe()) .sonarsourceSecurity(request.getSonarsourceSecurity()) .assigned(request.getAssigned()) .createdAt(parseStartingDateOrDateTime(request.getCreatedAt(), timeZone)) .createdBefore(parseEndingDateOrDateTime(request.getCreatedBefore(), timeZone)) .facetMode(request.getFacetMode()) .timeZone(timeZone) .codeVariants(request.getCodeVariants()); List<ComponentDto> allComponents = new ArrayList<>(); boolean effectiveOnComponentOnly = mergeDeprecatedComponentParameters(dbSession, request, allComponents); addComponentParameters(builder, dbSession, effectiveOnComponentOnly, allComponents, request); setCreatedAfterFromRequest(dbSession, builder, request, allComponents, timeZone); String sort = request.getSort(); if (!isNullOrEmpty(sort)) { builder.sort(sort); builder.asc(request.getAsc()); } return builder.build(); } }
@Test public void timeZone_ifZoneFromQueryIsUnknown_fallbacksToClockZone() { SearchRequest request = new SearchRequest().setTimeZone("Poitou-Charentes"); when(clock.getZone()).thenReturn(ZoneId.systemDefault()); IssueQuery issueQuery = underTest.create(request); assertThat(issueQuery.timeZone()).isEqualTo(clock.getZone()); assertThat(logTester.logs()).containsOnly("TimeZone 'Poitou-Charentes' cannot be parsed as a valid zone ID"); }
@Override public void broadcastOnIssueChange(List<DefaultIssue> issues, Collection<QGChangeEvent> changeEvents, boolean fromAlm) { if (listeners.isEmpty() || issues.isEmpty() || changeEvents.isEmpty()) { return; } try { broadcastChangeEventsToBranches(issues, changeEvents, fromAlm); } catch (Error e) { LOG.warn(format("Broadcasting to listeners failed for %s events", changeEvents.size()), e); } }
@Test public void broadcastOnIssueChange_has_no_effect_when_issues_are_empty() { underTest.broadcastOnIssueChange(emptyList(), singletonList(component1QGChangeEvent), false); verifyNoInteractions(listener1, listener2, listener3); }
@Override public boolean isProvisioningEnabled() { return isEnabled() && configuration.getBoolean(GITLAB_AUTH_PROVISIONING_ENABLED).orElse(false); }
@Test public void isProvisioningEnabled_ifProvisioningEnabledAndGithubAuthEnabled_returnsTrue() { enableGitlabAuthentication(); settings.setProperty(GITLAB_AUTH_PROVISIONING_ENABLED, true); assertThat(config.isProvisioningEnabled()).isTrue(); }
public RemotingCommand cleanExpiredConsumeQueue() { LOGGER.info("AdminBrokerProcessor#cleanExpiredConsumeQueue: start."); final RemotingCommand response = RemotingCommand.createResponseCommand(null); try { brokerController.getMessageStore().cleanExpiredConsumerQueue(); } catch (Throwable t) { return buildErrorResponse(ResponseCode.SYSTEM_ERROR, t.getMessage()); } LOGGER.info("AdminBrokerProcessor#cleanExpiredConsumeQueue: end."); response.setCode(ResponseCode.SUCCESS); response.setRemark(null); return response; }
@Test public void testCleanExpiredConsumeQueue() throws RemotingCommandException { RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.CLEAN_EXPIRED_CONSUMEQUEUE, null); RemotingCommand response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS); }
public File createFile(String fileName) throws IOException { File file = new File(tempFolder(), fileName); file.getParentFile().mkdirs(); file.createNewFile(); createdFiles.add(file); return file; }
@Test public void shouldCreateFilesInTempDirectory() throws IOException { File file = files.createFile("foo"); File parentFile = file.getParentFile(); assertThat(parentFile.getName(), is("cruise")); assertThat(parentFile.getParentFile(), is(tmpDir())); }
public ClientTelemetrySender telemetrySender() { return clientTelemetrySender; }
@Test public void testTelemetrySenderTimeToNextUpdate() { ClientTelemetryReporter.DefaultClientTelemetrySender telemetrySender = (ClientTelemetryReporter.DefaultClientTelemetrySender) clientTelemetryReporter.telemetrySender(); assertEquals(ClientTelemetryState.SUBSCRIPTION_NEEDED, telemetrySender.state()); assertEquals(0, telemetrySender.timeToNextUpdate(100)); telemetrySender.updateSubscriptionResult(subscription, time.milliseconds()); assertEquals(20000, telemetrySender.timeToNextUpdate(100), 200); assertTrue(telemetrySender.maybeSetState(ClientTelemetryState.SUBSCRIPTION_IN_PROGRESS)); assertEquals(100, telemetrySender.timeToNextUpdate(100)); assertTrue(telemetrySender.maybeSetState(ClientTelemetryState.PUSH_NEEDED)); long time = telemetrySender.timeToNextUpdate(100); assertTrue(time > 0 && time >= 0.5 * time && time <= 1.5 * time); assertTrue(telemetrySender.maybeSetState(ClientTelemetryState.PUSH_IN_PROGRESS)); assertEquals(100, telemetrySender.timeToNextUpdate(100)); assertTrue(telemetrySender.maybeSetState(ClientTelemetryState.TERMINATING_PUSH_NEEDED)); assertEquals(0, telemetrySender.timeToNextUpdate(100)); assertTrue(telemetrySender.maybeSetState(ClientTelemetryState.TERMINATING_PUSH_IN_PROGRESS)); assertEquals(Long.MAX_VALUE, telemetrySender.timeToNextUpdate(100)); assertTrue(telemetrySender.maybeSetState(ClientTelemetryState.TERMINATED)); assertThrows(IllegalStateException.class, () -> telemetrySender.timeToNextUpdate(100)); }
@SuppressWarnings("unchecked") @Udf public <T> List<T> union( @UdfParameter(description = "First array of values") final List<T> left, @UdfParameter(description = "Second array of values") final List<T> right) { if (left == null || right == null) { return null; } final Set<T> combined = Sets.newLinkedHashSet(left); combined.addAll(right); return (List<T>) Arrays.asList(combined.toArray()); }
@Test public void shouldUnionArraysContainingNulls() { final List<String> input1 = Arrays.asList(null, "bar"); final List<String> input2 = Arrays.asList("foo"); final List<String> result = udf.union(input1, input2); assertThat(result, contains(null, "bar", "foo")); }
@PostMapping("/import") @RequiresPermissions("system:manager:importConfig") public ShenyuAdminResult importConfigs(final MultipartFile file) { if (Objects.isNull(file)) { return ShenyuAdminResult.error(ShenyuResultMessage.PARAMETER_ERROR); } try { ShenyuAdminResult importResult = configsService.configsImport(file.getBytes()); if (Objects.equals(CommonErrorCode.SUCCESSFUL, importResult.getCode())) { // sync data syncDataService.syncAll(DataEventTypeEnum.REFRESH); } return importResult; } catch (IOException e) { LOG.error("parsing data failed", e); return ShenyuAdminResult.error(ShenyuResultMessage.PARAMETER_ERROR); } }
@Test public void testImportConfigs() throws Exception { // mock import data List<ZipUtil.ZipItem> zipItemList = Lists.newArrayList(); when(this.configsService.configsImport(any())).thenReturn( ShenyuAdminResult.success(ShenyuResultMessage.SUCCESS)); // mock file MockMultipartFile file = new MockMultipartFile("file", "test.zip", MediaType.TEXT_PLAIN_VALUE, ZipUtil.zip(zipItemList)); // Run the test final MockHttpServletResponse response = mockMvc.perform(multipart("/configs/import") .file(file) .accept(MediaType.APPLICATION_JSON)) .andExpect(jsonPath("$.message", is(ShenyuResultMessage.SUCCESS))) .andExpect(status().isOk()) .andReturn().getResponse(); // Verify the results assertThat(response.getStatus()).isEqualTo(HttpStatus.OK.value()); }
public ApplicationStatus appendNewState(ApplicationState state) { return new ApplicationStatus( state, createUpdatedHistoryWithNewState(state), previousAttemptSummary, currentAttemptSummary); }
@Test void testAppendNewState() { ApplicationStatus applicationStatus = new ApplicationStatus(); ApplicationState newState = new ApplicationState(ApplicationStateSummary.RunningHealthy, "foo"); ApplicationStatus newStatus = applicationStatus.appendNewState(newState); assertEquals(2, newStatus.getStateTransitionHistory().size()); assertEquals(newState, newStatus.getStateTransitionHistory().get(1L)); }
boolean canFilterPlayer(String playerName) { boolean isMessageFromSelf = playerName.equals(client.getLocalPlayer().getName()); return !isMessageFromSelf && (config.filterFriends() || !client.isFriended(playerName, false)) && (config.filterFriendsChat() || !isFriendsChatMember(playerName)) && (config.filterClanChat() || !isClanChatMember(playerName)); }
@Test public void testMessageFromFriendIsNotFiltered() { when(client.isFriended("Iron Mammal", false)).thenReturn(true); when(chatFilterConfig.filterFriends()).thenReturn(false); assertFalse(chatFilterPlugin.canFilterPlayer("Iron Mammal")); }
@Override public Properties info(RedisClusterNode node) { Map<String, String> info = execute(node, RedisCommands.INFO_ALL); Properties result = new Properties(); for (Entry<String, String> entry : info.entrySet()) { result.setProperty(entry.getKey(), entry.getValue()); } return result; }
@Test public void testInfo() { RedisClusterNode master = getFirstMaster(); Properties info = connection.info(master); assertThat(info.size()).isGreaterThan(10); }