focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static String formatExpression(final Expression expression) { return formatExpression(expression, FormatOptions.of(s -> false)); }
@Test public void shouldFormatFunctionWithDistinct() { final FunctionCall functionCall = new FunctionCall( FunctionName.of("COUNT"), Collections.singletonList(new StringLiteral("name"))); assertThat(ExpressionFormatter.formatExpression(functionCall), equalTo("COUNT('name')")); }
public boolean isClosed() { return connection == null; }
@Test public void testIsClosed() throws Exception { HttpResourceConnection resource = null; try { URL url = new URL(getSettings().getString(Settings.KEYS.ENGINE_VERSION_CHECK_URL)); resource = new HttpResourceConnection(getSettings()); resource.fetch(url); assertFalse(resource.isClosed()); } finally { if (resource != null) { resource.close(); assertTrue(resource.isClosed()); } } }
public Calendar ceil(long t) { Calendar cal = new GregorianCalendar(Locale.US); cal.setTimeInMillis(t); return ceil(cal); }
@Test(timeout = 1000) @Issue("JENKINS-12357") public void testCeil3_DoW7() throws Exception { // similar to testCeil3, but DoW=7 may stuck in an infinite loop CronTab x = new CronTab("0 0 1 * 7"); Calendar c = new GregorianCalendar(2010, Calendar.JANUARY, 1, 15, 55); // the first such day in 2010 is Aug 1st compare(new GregorianCalendar(2010, Calendar.AUGUST, 1, 0, 0), x.ceil(c)); }
@SuppressWarnings("FutureReturnValueIgnored") public void start() { running.set(true); configFetcher.start(); memoryMonitor.start(); streamingWorkerHarness.start(); sampler.start(); workerStatusReporter.start(); activeWorkRefresher.start(); }
@Test public void testOutputValueTooLargeException() throws Exception { KvCoder<String, String> kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of()); List<ParallelInstruction> instructions = Arrays.asList( makeSourceInstruction(kvCoder), makeDoFnInstruction(new ExceptionCatchingFn(), 0, kvCoder), makeSinkInstruction(kvCoder, 1)); server.setExpectedExceptionCount(1); StreamingDataflowWorker worker = makeWorker( defaultWorkerParams() .setInstructions(instructions) .setOperationalLimits( OperationalLimits.builder() .setMaxOutputValueBytes(15) .setThrowExceptionOnLargeOutput(true) .build()) .build()); worker.start(); // The first time processing will have value "data1_a_bunch_more_data_output", which is above // the limit. After throwing the exception, the output should be just "data1", which is small // enough. server.whenGetWorkCalled().thenReturn(makeInput(1, 0, "key", DEFAULT_SHARDING_KEY)); server.waitForEmptyWorkQueue(); Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(1); assertEquals(1, result.size()); assertEquals( makeExpectedOutput(1, 0, "key", DEFAULT_SHARDING_KEY, "smaller_key").build(), removeDynamicFields(result.get(1L))); }
@Override public String toString() { return buildMetricString(true); }
@Test public void testToString_emptyWithExcludedTargets() { MetricDescriptorImpl descriptor = new MetricDescriptorImpl(supplier); assertEquals("[excludedTargets={}]", descriptor.toString()); }
public static byte[] getValue(byte[] raw) { try (final Asn1InputStream is = new Asn1InputStream(raw)) { is.readTag(); return is.read(is.readLength()); } }
@Test public void shouldThrowExceptionIfStructureKeepsContinueTag() { thrown.expect(Asn1Exception.class); Asn1Utils.getValue(new byte[] { 0x7f, (byte) 0x81, (byte) 0x82 }); }
public String generateHeader() { StringBuilder builder = new StringBuilder(); append(builder, NLS.str("certificate.cert_type"), x509cert.getType()); append(builder, NLS.str("certificate.serialSigVer"), ((Integer) x509cert.getVersion()).toString()); // serial number append(builder, NLS.str("certificate.serialNumber"), "0x" + x509cert.getSerialNumber().toString(16)); // Get subject Principal subjectDN = x509cert.getSubjectDN(); append(builder, NLS.str("certificate.cert_subject"), subjectDN.getName()); append(builder, NLS.str("certificate.serialValidFrom"), x509cert.getNotBefore().toString()); append(builder, NLS.str("certificate.serialValidUntil"), x509cert.getNotAfter().toString()); return builder.toString(); }
@Test public void decodeRSAKeyHeader() { assertThat(certificateManagerRSA.generateHeader()) .contains("X.509") .contains("0x4bd68052") .contains("CN=test cert, OU=test unit, O=OOO TestOrg, L=St.Peterburg, ST=Russia, C=123456"); }
@SuppressWarnings("unchecked") @Override public <T> Attribute<T> attr(AttributeKey<T> key) { ObjectUtil.checkNotNull(key, "key"); DefaultAttribute newAttribute = null; for (;;) { final DefaultAttribute[] attributes = this.attributes; final int index = searchAttributeByKey(attributes, key); final DefaultAttribute[] newAttributes; if (index >= 0) { final DefaultAttribute attribute = attributes[index]; assert attribute.key() == key; if (!attribute.isRemoved()) { return attribute; } // let's try replace the removed attribute with a new one if (newAttribute == null) { newAttribute = new DefaultAttribute<T>(this, key); } final int count = attributes.length; newAttributes = Arrays.copyOf(attributes, count); newAttributes[index] = newAttribute; } else { if (newAttribute == null) { newAttribute = new DefaultAttribute<T>(this, key); } final int count = attributes.length; newAttributes = new DefaultAttribute[count + 1]; orderedCopyOnInsert(attributes, count, newAttributes, newAttribute); } if (ATTRIBUTES_UPDATER.compareAndSet(this, attributes, newAttributes)) { return newAttribute; } } }
@Test public void testGetAndSetWithNull() { AttributeKey<Integer> key = AttributeKey.valueOf("key"); Attribute<Integer> attr = map.attr(key); attr.set(1); assertSame(1, attr.getAndSet(null)); Attribute<Integer> attr2 = map.attr(key); attr2.set(2); assertSame(2, attr2.get()); assertSame(attr, attr2); }
@Override public String getNamespaceURI(String prefix) { if (prefix == null) { throw new IllegalArgumentException("The prefix cannot be null."); } if (Constants.XML_SCHEMA_NAMESPACE_PREFIX.equals(prefix)) { return Constants.XML_SCHEMA_NAMESPACE_URI; } return null; }
@Test public void testSchemaNamespace() { assertEquals(Constants.XML_SCHEMA_NAMESPACE_URI, camelSpringNamespace.getNamespaceURI(Constants.XML_SCHEMA_NAMESPACE_PREFIX)); assertNull(camelSpringNamespace.getNamespaceURI("unregisterdPrefix")); }
@Override public <T> T convert(DataTable dataTable, Type type) { return convert(dataTable, type, false); }
@Test void convert_to_map_of_primitive_to_list_of_object__with_default_converter() throws ParseException { DataTable table = parse("", " | Annie M. G. | 1995-03-21 | 1911-03-20 |", " | Roald | 1990-09-13 | 1916-09-13 |", " | Astrid | 1907-10-14 | 1907-11-14 |"); Map<String, List<Date>> expected = new HashMap<String, List<Date>>() { { put("Annie M. G.", asList(SIMPLE_DATE_FORMAT.parse("1995-03-21"), SIMPLE_DATE_FORMAT.parse("1911-03-20"))); put("Roald", asList(SIMPLE_DATE_FORMAT.parse("1990-09-13"), SIMPLE_DATE_FORMAT.parse("1916-09-13"))); put("Astrid", asList(SIMPLE_DATE_FORMAT.parse("1907-10-14"), SIMPLE_DATE_FORMAT.parse("1907-11-14"))); } }; registry.setDefaultDataTableCellTransformer(JACKSON_TABLE_CELL_BY_TYPE_CONVERTER); assertEquals(expected, converter.convert(table, MAP_OF_STRING_TO_LIST_OF_DATE)); }
@Override public NativeReader<?> create( CloudObject spec, @Nullable Coder<?> coder, @Nullable PipelineOptions options, @Nullable DataflowExecutionContext executionContext, DataflowOperationContext operationContext) throws Exception { coder = checkArgumentNotNull(coder); executionContext = checkArgumentNotNull(executionContext); return createImpl(spec, coder, options, executionContext, operationContext); }
@Test public void testFactory() throws Exception { WindowedValueCoder<?> coder = WindowedValue.getFullCoder( IsmRecordCoder.of( 1, 0, ImmutableList.<Coder<?>>of(StringUtf8Coder.of()), VarLongCoder.of()), GlobalWindow.Coder.INSTANCE); String tmpFile = tmpFolder.newFile().getPath(); ResourceId tmpResourceId = FileSystems.matchSingleFileSpec(tmpFile).resourceId(); @SuppressWarnings("rawtypes") IsmReader<?> ismReader = (IsmReader) new IsmReaderFactory() .create( createSpecForFilename(tmpFile), coder, options, executionContext, operationContext); assertEquals(coder.getValueCoder(), ismReader.getCoder()); assertEquals(tmpResourceId, ismReader.getResourceId()); }
public Object extract(Object target, String attributeName, Object metadata) { return extract(target, attributeName, metadata, true); }
@Test public void when_extractWithNullParams_then_nullValue() { // WHEN Object power = createExtractors(null).extract(null, null, null); // THEN assertNull(power); }
@Override public ValidationResult validate(Object value) { if (value != null && value instanceof String && !((String) value).isEmpty()) return new ValidationResult.ValidationPassed(); else return new ValidationResult.ValidationFailed("Value \"" + value + "\" is not a valid non-empty String!"); }
@Test public void testValidate() throws Exception { Validator v = new FilledStringValidator(); assertFalse(v.validate(null).passed()); assertFalse(v.validate(534).passed()); assertFalse(v.validate("").passed()); assertFalse(v.validate(new String()).passed()); assertTrue(v.validate("so valid").passed()); }
@VisibleForTesting boolean deleteCgroup(String cgroupPath) { boolean deleted = false; LOG.debug("deleteCgroup: {}", cgroupPath); long start = clock.getTime(); do { try { deleted = checkAndDeleteCgroup(new File(cgroupPath)); if (!deleted) { Thread.sleep(deleteCgroupDelay); } } catch (InterruptedException ex) { // NOP } } while (!deleted && (clock.getTime() - start) < deleteCgroupTimeout); if (!deleted) { LOG.warn("Unable to delete cgroup at: " + cgroupPath + ", tried to delete for " + deleteCgroupTimeout + "ms"); } return deleted; }
@Test public void testDeleteCgroup() throws Exception { final ControlledClock clock = new ControlledClock(); CgroupsLCEResourcesHandler handler = new CgroupsLCEResourcesHandler(); handler.setConf(new YarnConfiguration()); handler.initConfig(); handler.clock = clock; FileUtils.deleteQuietly(cgroupDir); // Create a non-empty tasks file File tfile = new File(cgroupDir.getAbsolutePath(), "tasks"); FileOutputStream fos = FileUtils.openOutputStream(tfile); fos.write("1234".getBytes()); fos.close(); final CountDownLatch latch = new CountDownLatch(1); new Thread() { @Override public void run() { latch.countDown(); try { Thread.sleep(200); } catch (InterruptedException ex) { //NOP } clock.tickMsec(YarnConfiguration. DEFAULT_NM_LINUX_CONTAINER_CGROUPS_DELETE_TIMEOUT); } }.start(); latch.await(); Assert.assertFalse(handler.deleteCgroup(cgroupDir.getAbsolutePath())); FileUtils.deleteQuietly(cgroupDir); }
public static <T> T ifOverridden(Supplier<T> supplier, @NonNull Class<?> base, @NonNull Class<?> derived, @NonNull String methodName, @NonNull Class<?>... types) { if (isOverridden(base, derived, methodName, types)) { return supplier.get(); } else { throw new AbstractMethodError("The class " + derived.getName() + " must override at least one of the " + base.getSimpleName() + "." + methodName + " methods"); } }
@Test public void ifOverriddenFailure() { AbstractMethodError error = Assert.assertThrows(AbstractMethodError.class, () -> Util.ifOverridden(() -> true, BaseClass.class, DerivedClassFailure.class, "method")); assertEquals("The class " + DerivedClassFailure.class.getName() + " must override at least one of the BaseClass.method methods", error.getMessage()); }
public static void checkFalse(boolean expression, String errorMessage) { if (expression) { throw new IllegalArgumentException(errorMessage); } }
@Test public void test_checkFalse_whenTrue() { String errorMessage = "foobar"; try { checkFalse(true, errorMessage); fail(); } catch (IllegalArgumentException e) { assertSame(errorMessage, e.getMessage()); } }
static boolean toBoolean(final JsonNode object) { if (object instanceof BooleanNode) { return object.booleanValue(); } throw invalidConversionException(object, SqlBaseType.BOOLEAN); }
@Test(expected = IllegalArgumentException.class) public void shouldFailWhenConvertingNonBooleanToBoolean() { JsonSerdeUtils.toBoolean(JsonNodeFactory.instance.numberNode(1)); }
Flux<Post> findAll() { return Flux.fromIterable(data.values()); }
@Test public void testGetAllPosts() { StepVerifier.create(posts.findAll()) .consumeNextWith(p -> assertTrue(p.getTitle().equals("post one"))) .consumeNextWith(p -> assertTrue(p.getTitle().equals("post two"))) .expectComplete() .verify(); }
public static <R> R runSafely( Block<R, RuntimeException, RuntimeException, RuntimeException> block, CatchBlock catchBlock, FinallyBlock finallyBlock) { return runSafely( block, catchBlock, finallyBlock, RuntimeException.class, RuntimeException.class, RuntimeException.class); }
@Test public void testRunSafelyThreeExceptions() { CustomCheckedException exc = new CustomCheckedException("test"); Exception suppressedOne = new Exception("test catch suppression"); RuntimeException suppressedTwo = new RuntimeException("test finally suppression"); assertThatThrownBy( () -> ExceptionUtil.runSafely( (ExceptionUtil.Block< Void, CustomCheckedException, IOException, ClassNotFoundException>) () -> { throw exc; }, e -> { throw suppressedOne; }, () -> { throw suppressedTwo; }, CustomCheckedException.class, IOException.class, ClassNotFoundException.class)) .isInstanceOf(CustomCheckedException.class) .isEqualTo(exc) .extracting(e -> Arrays.asList(e.getSuppressed())) .asList() .hasSize(2) .containsExactly(suppressedOne, suppressedTwo); }
public Set<GsonProjectMember> getAllProjectMembers(String gitlabUrl, String token, long projectId) { String url = format("/projects/%s/members/all", projectId); return Set.copyOf(executePaginatedQuery(gitlabUrl, token, url, resp -> GSON.fromJson(resp, GITLAB_PROJECT_MEMBER))); }
@Test public void getAllProjectMembers_whenCallIsSuccesfull_deserializesAndReturnsCorrectlyProjectsMembers() throws IOException { ArgumentCaptor<Function<String, List<GsonProjectMember>>> deserializerCaptor = ArgumentCaptor.forClass(Function.class); String token = "token-toto"; GitlabToken gitlabToken = new GitlabToken(token); List<GsonProjectMember> expectedProjectMembers = expectedProjectMembers(); when(gitlabPaginatedHttpClient.get(eq(gitlabUrl), eq(gitlabToken), eq("/projects/42/members/all"), deserializerCaptor.capture())).thenReturn(expectedProjectMembers); Set<GsonProjectMember> actualProjectMembers = underTest.getAllProjectMembers(gitlabUrl, token, 42); assertThat(actualProjectMembers).containsExactlyInAnyOrderElementsOf(expectedProjectMembers); String responseContent = getResponseContent("project-members-full-response.json"); List<GsonProjectMember> deserializedProjectMembers = deserializerCaptor.getValue().apply(responseContent); assertThat(deserializedProjectMembers).isEqualTo(expectedProjectMembers); }
@Override public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) throws IOException, ServletException { if (bizConfig.isAdminServiceAccessControlEnabled()) { HttpServletRequest request = (HttpServletRequest) req; HttpServletResponse response = (HttpServletResponse) resp; String token = request.getHeader(HttpHeaders.AUTHORIZATION); if (!checkAccessToken(token)) { logger.warn("Invalid access token: {} for uri: {}", token, request.getRequestURI()); response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized"); return; } } chain.doFilter(req, resp); }
@Test public void testWithAccessControlEnabledWithMultipleTokenSpecifiedWithValidTokenPassed() throws Exception { String someToken = "someToken"; String anotherToken = "anotherToken"; when(bizConfig.isAdminServiceAccessControlEnabled()).thenReturn(true); when(bizConfig.getAdminServiceAccessTokens()) .thenReturn(String.format("%s,%s", someToken, anotherToken)); when(servletRequest.getHeader(HttpHeaders.AUTHORIZATION)).thenReturn(someToken); authenticationFilter.doFilter(servletRequest, servletResponse, filterChain); verify(bizConfig, times(1)).isAdminServiceAccessControlEnabled(); verify(bizConfig, times(1)).getAdminServiceAccessTokens(); verify(filterChain, times(1)).doFilter(servletRequest, servletResponse); verify(servletResponse, never()).sendError(anyInt(), anyString()); }
@Override public Path move(final Path file, final Path renamed, final TransferStatus status, final Delete.Callback callback, final ConnectionCallback connectionCallback) throws BackgroundException { try { session.sftp().rename(file.getAbsolute(), renamed.getAbsolute(), status.isExists() ? new HashSet<>(Arrays.asList(RenameFlags.OVERWRITE, RenameFlags.NATIVE)) : Collections.singleton(RenameFlags.NATIVE)); // Copy original file attributes return renamed.withAttributes(file.attributes()); } catch(IOException e) { throw new SFTPExceptionMappingService().map("Cannot rename {0}", e, file); } }
@Test(expected = NotfoundException.class) public void testMoveNotFound() throws Exception { final Path workdir = new SFTPHomeDirectoryService(session).find(); final Path test = new Path(workdir, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); new SFTPMoveFeature(session).move(test, new Path(workdir, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)), new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback()); }
@Override public ByteBuf writeBytes(byte[] src, int srcIndex, int length) { ensureWritable(length); setBytes(writerIndex, src, srcIndex, length); writerIndex += length; return this; }
@Test public void testWriteBytesAfterRelease7() { assertThrows(IllegalReferenceCountException.class, new Executable() { @Override public void execute() throws IOException { releasedBuffer().writeBytes(new ByteArrayInputStream(new byte[8]), 1); } }); }
@Override public boolean delete(PageId pageId) { // since bloom filter does not support deleting, always return false here return false; }
@Test public void delete() throws Exception { assertTrue(mCacheManager.put(PAGE_ID1, PAGE1_BYTES, SCOPE1)); assertEquals(PAGE1_BYTES, mCacheManager.get(PAGE_ID1, PAGE1_BYTES, SCOPE1)); assertFalse(mCacheManager.delete(PAGE_ID1)); assertEquals(PAGE1_BYTES, mCacheManager.get(PAGE_ID1, PAGE1_BYTES, SCOPE1)); mCacheManager.updateWorkingSetSize(); // bloom filter does not support deleting, so PAGE_ID1 is not deleted actually assertEquals(1, mCacheManager.getShadowCachePages()); assertEquals(mCacheManager.getShadowCacheBytes(), PAGE1_BYTES); }
public static Path expandIfZip(Path filePath) throws IOException { if (!isZipFile(filePath)) { return filePath; } FileTime pluginZipDate = Files.getLastModifiedTime(filePath); String fileName = filePath.getFileName().toString(); String directoryName = fileName.substring(0, fileName.lastIndexOf(".")); Path pluginDirectory = filePath.resolveSibling(directoryName); if (!Files.exists(pluginDirectory) || pluginZipDate.compareTo(Files.getLastModifiedTime(pluginDirectory)) > 0) { // expand '.zip' file Unzip unzip = new Unzip(); unzip.setSource(filePath.toFile()); unzip.setDestination(pluginDirectory.toFile()); unzip.extract(); log.info("Expanded plugin zip '{}' in '{}'", filePath.getFileName(), pluginDirectory.getFileName()); } return pluginDirectory; }
@Test public void expandIfZipForZipWithResourceFile() throws Exception { PluginZip pluginZip = new PluginZip.Builder(pluginsPath.resolve("my-second-plugin-1.2.3.zip"), "myPlugin") .pluginVersion("1.2.3") .addFile(Paths.get("classes/META-INF/plugin-file"), "plugin") .build(); Path unzipped = FileUtils.expandIfZip(pluginZip.path()); assertEquals(pluginZip.unzippedPath(), unzipped); assertTrue(Files.exists(unzipped.resolve("classes/META-INF/plugin-file"))); }
protected TransMeta processLinkedJobs( TransMeta transMeta ) { for ( StepMeta stepMeta : transMeta.getSteps() ) { if ( stepMeta.getStepID().equalsIgnoreCase( "JobExecutor" ) ) { JobExecutorMeta jem = (JobExecutorMeta) stepMeta.getStepMetaInterface(); ObjectLocationSpecificationMethod specMethod = jem.getSpecificationMethod(); // If the reference is by filename, change it to Repository By Name. Otherwise it's fine so leave it alone if ( specMethod == ObjectLocationSpecificationMethod.FILENAME ) { jem.setSpecificationMethod( ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME ); String filename = jem.getFileName(); String jobname = filename.substring( filename.lastIndexOf( "/" ) + 1, filename.lastIndexOf( '.' ) ); String directory = filename.substring( 0, filename.lastIndexOf( "/" ) ); jem.setJobName( jobname ); jem.setDirectoryPath( directory ); } } } return transMeta; }
@Test public void testProcessLinkedJobsWithNoFilename() { JobExecutorMeta jobExecutorMeta = spy( new JobExecutorMeta() ); jobExecutorMeta.setFileName( null ); jobExecutorMeta.setDirectoryPath( "/path/to" ); jobExecutorMeta.setJobName( "Job1" ); jobExecutorMeta.setSpecificationMethod( ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME ); StepMeta transExecutorStep = mock( StepMeta.class ); when( transExecutorStep.getStepID() ).thenReturn( "JobExecutor" ); when( transExecutorStep.getStepMetaInterface() ).thenReturn( jobExecutorMeta ); TransMeta parent = mock( TransMeta.class ); when( parent.getSteps() ).thenReturn( Arrays.asList( transExecutorStep ) ); TransMeta result = transFileListener.processLinkedJobs( parent ); boolean found = false; for ( StepMeta stepMeta : result.getSteps() ) { if ( stepMeta.getStepID().equalsIgnoreCase( "JobExecutor" ) ) { found = true; JobExecutorMeta resultExecMeta = (JobExecutorMeta) stepMeta.getStepMetaInterface(); assertEquals( ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME, resultExecMeta.getSpecificationMethod() ); assertEquals( resultExecMeta.getDirectoryPath(), "/path/to" ); assertEquals( resultExecMeta.getJobName(), "Job1" ); } } assertTrue( found ); }
@Override public void close() throws Exception { super.close(); if (checkpointLock != null) { synchronized (checkpointLock) { issuedInstant = null; isRunning = false; } } if (LOG.isDebugEnabled()) { LOG.debug("Closed File Monitoring Source for path: " + path + "."); } }
@Test public void testConsumeFromSpecifiedCommit() throws Exception { // write 2 commits first, use the second commit time as the specified start instant, // all the splits should come from the second commit. TestData.writeData(TestData.DATA_SET_INSERT, conf); TestData.writeData(TestData.DATA_SET_UPDATE_INSERT, conf); String specifiedCommit = TestUtils.getLastCompleteInstant(tempFile.getAbsolutePath()); conf.setString(FlinkOptions.READ_START_COMMIT, specifiedCommit); StreamReadMonitoringFunction function = TestUtils.getMonitorFunc(conf); try (AbstractStreamOperatorTestHarness<MergeOnReadInputSplit> harness = createHarness(function)) { harness.setup(); harness.open(); CountDownLatch latch = new CountDownLatch(4); CollectingSourceContext sourceContext = new CollectingSourceContext(latch); runAsync(sourceContext, function); assertTrue(latch.await(WAIT_TIME_MILLIS, TimeUnit.MILLISECONDS), "Should finish splits generation"); assertThat("Should produce the expected splits", sourceContext.getPartitionPaths(), is("par1,par2,par3,par4")); assertTrue(sourceContext.splits.stream().allMatch(split -> split.getInstantRange().isPresent()), "All the instants should have range limit"); assertTrue(sourceContext.splits.stream().allMatch(split -> split.getLatestCommit().equals(specifiedCommit)), "All the splits should be with specified instant time"); // Stop the stream task. function.close(); } }
public static String getServiceName(final String serviceNameWithGroup) { if (StringUtils.isBlank(serviceNameWithGroup)) { return StringUtils.EMPTY; } if (!serviceNameWithGroup.contains(Constants.SERVICE_INFO_SPLITER)) { return serviceNameWithGroup; } return serviceNameWithGroup.split(Constants.SERVICE_INFO_SPLITER)[1]; }
@Test void testGetServiceNameWithEmpty() { assertEquals(StringUtils.EMPTY, NamingUtils.getServiceName(null)); }
public String convert(ILoggingEvent event) { String formattedMessage = event.getFormattedMessage(); if (formattedMessage != null) { String result = CR_PATTERN.matcher(formattedMessage).replaceAll("\\\\r"); result = LF_PATTERN.matcher(result).replaceAll("\\\\n"); return result; } return null; }
@Test public void convert_message_with_CR() { ILoggingEvent event = createILoggingEvent("simple\r message\r with\r CR\r"); assertThat(underTest.convert(event)).isEqualTo("simple\\r message\\r with\\r CR\\r"); }
public static URL socketToUrl(InetSocketAddress socketAddress) { String hostString = socketAddress.getHostString(); // If the hostString is an IPv6 address, it needs to be enclosed in square brackets // at the beginning and end. if (socketAddress.getAddress() != null && socketAddress.getAddress() instanceof Inet6Address && hostString.equals(socketAddress.getAddress().getHostAddress())) { hostString = "[" + hostString + "]"; } String hostPort = hostString + ":" + socketAddress.getPort(); return validateHostPortString(hostPort); }
@Test void testIpv6SocketToUrl() throws MalformedURLException { InetSocketAddress socketAddress = new InetSocketAddress("[2001:1db8::ff00:42:8329]", 8080); URL expectedResult = new URL("http://[2001:1db8::ff00:42:8329]:8080"); assertThat(socketToUrl(socketAddress)).isEqualTo(expectedResult); }
public static String prettyTime(String timeInMillisec) { if (timeInMillisec == null) { return ""; } return prettyTime(Long.parseLong(timeInMillisec), OmniNotes.getAppContext().getResources().getConfiguration().locale); }
@Test public void prettyTime() { long now = Calendar.getInstance().getTimeInMillis(); String prettyTime = DateUtils.prettyTime(now, Locale.ENGLISH); Assert.assertEquals("moments ago", prettyTime.toLowerCase()); prettyTime = DateUtils.prettyTime(now + 10 * 60 * 1000, Locale.ENGLISH); Assert.assertEquals("10 minutes from now", prettyTime.toLowerCase()); prettyTime = DateUtils.prettyTime(now + 24 * 60 * 60 * 1000, Locale.ITALIAN); Assert.assertEquals("fra 24 ore", prettyTime.toLowerCase()); prettyTime = DateUtils.prettyTime(now + 25 * 60 * 60 * 1000, Locale.ITALIAN); Assert.assertEquals("fra 1 giorno", prettyTime.toLowerCase()); prettyTime = DateUtils.prettyTime(null, Locale.JAPANESE); Assert.assertNotNull(prettyTime.toLowerCase()); Assert.assertEquals(0, prettyTime.toLowerCase().length()); }
@Override public Map<Errors, Integer> errorCounts() { HashMap<Errors, Integer> counts = new HashMap<>(); updateErrorCounts(counts, Errors.forCode(data.errorCode())); return counts; }
@Test public void testErrorCountsReturnsNoneWhenNoErrors() { PushTelemetryResponseData data = new PushTelemetryResponseData() .setErrorCode(Errors.NONE.code()); PushTelemetryResponse response = new PushTelemetryResponse(data); assertEquals(Collections.singletonMap(Errors.NONE, 1), response.errorCounts()); }
@Override public Optional<SubflowExecutionResult> createSubflowExecutionResult( RunContext runContext, TaskRun taskRun, io.kestra.core.models.flows.Flow flow, Execution execution ) { // we only create a worker task result when the execution is terminated if (!taskRun.getState().isTerminated()) { return Optional.empty(); } boolean isOutputsAllowed = runContext .<Boolean>pluginConfiguration(PLUGIN_FLOW_OUTPUTS_ENABLED) .orElse(true); final Output.OutputBuilder builder = Output.builder() .executionId(execution.getId()) .state(execution.getState().getCurrent()); final Map<String, Object> subflowOutputs = Optional .ofNullable(flow.getOutputs()) .map(outputs -> outputs .stream() .collect(Collectors.toMap( io.kestra.core.models.flows.Output::getId, io.kestra.core.models.flows.Output::getValue) ) ) .orElseGet(() -> isOutputsAllowed ? this.getOutputs() : null); if (subflowOutputs != null) { try { Map<String, Object> outputs = runContext.render(subflowOutputs); FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class); // this is hacking if (flow.getOutputs() != null && flowInputOutput != null) { outputs = flowInputOutput.typedOutputs(flow, execution, outputs); } builder.outputs(outputs); } catch (Exception e) { runContext.logger().warn("Failed to extract outputs with the error: '{}'", e.getLocalizedMessage(), e); var state = this.isAllowFailure() ? State.Type.WARNING : State.Type.FAILED; taskRun = taskRun .withState(state) .withAttempts(Collections.singletonList(TaskRunAttempt.builder().state(new State().withState(state)).build())) .withOutputs(builder.build().toMap()); return Optional.of(SubflowExecutionResult.builder() .executionId(execution.getId()) .state(State.Type.FAILED) .parentTaskRun(taskRun) .build()); } } taskRun = taskRun.withOutputs(builder.build().toMap()); State.Type finalState = ExecutableUtils.guessState(execution, this.transmitFailed, this.isAllowFailure()); if (taskRun.getState().getCurrent() != finalState) { taskRun = taskRun.withState(finalState); } return Optional.of(ExecutableUtils.subflowExecutionResult(taskRun, execution)); }
@SuppressWarnings("deprecation") @Test void shouldNotReturnOutputsForSubflowOutputsDisabled() { // Given Mockito.when(applicationContext.getProperty(Subflow.PLUGIN_FLOW_OUTPUTS_ENABLED, Boolean.class)) .thenReturn(Optional.of(false)); Map<String, Object> outputs = Map.of("key", "value"); Subflow subflow = Subflow.builder() .outputs(outputs) .build(); // When Optional<SubflowExecutionResult> result = subflow.createSubflowExecutionResult( runContext, TaskRun.builder().state(DEFAULT_SUCCESS_STATE).build(), Flow.builder().build(), Execution.builder().id(EXECUTION_ID).state(DEFAULT_SUCCESS_STATE).build() ); // Then assertTrue(result.isPresent()); Map<String, Object> expected = Subflow.Output.builder() .executionId(EXECUTION_ID) .state(DEFAULT_SUCCESS_STATE.getCurrent()) .outputs(Collections.emptyMap()) .build() .toMap(); assertThat(result.get().getParentTaskRun().getOutputs(), is(expected)); assertThat(result.get().getParentTaskRun().getAttempts().get(0).getState().getHistories(), Matchers.contains( hasProperty("state", is(State.Type.CREATED)), hasProperty("state", is(State.Type.RUNNING)), hasProperty("state", is(State.Type.SUCCESS)) )); }
@Override public DenseMatrix matrixMultiply(Matrix other) { if (dim2 == other.getDimension1Size()) { if (other instanceof DenseMatrix) { DenseMatrix otherDense = (DenseMatrix) other; double[][] output = new double[dim1][otherDense.dim2]; for (int i = 0; i < dim1; i++) { for (int j = 0; j < otherDense.dim2; j++) { output[i][j] = columnRowDot(i,j,otherDense); } } return new DenseMatrix(output); } else if (other instanceof DenseSparseMatrix) { DenseSparseMatrix otherSparse = (DenseSparseMatrix) other; int otherDim2 = otherSparse.getDimension2Size(); double[][] output = new double[dim1][otherDim2]; for (int i = 0; i < dim1; i++) { for (int j = 0; j < otherDim2; j++) { output[i][j] = columnRowDot(i,j,otherSparse); } } return new DenseMatrix(output); } else { throw new IllegalArgumentException("Unknown matrix type " + other.getClass().getName()); } } else { throw new IllegalArgumentException("Invalid matrix dimensions, this.shape=" + Arrays.toString(shape) + ", other.shape = " + Arrays.toString(other.getShape())); } }
@Test public void squareMatrixMultiplyTest() { DenseMatrix a = generateA(); DenseMatrix b = generateB(); DenseMatrix c = generateC(); DenseMatrix aa = generateAA(); DenseMatrix ab = generateAB(); DenseMatrix ac = generateAC(); assertEquals(aa,a.matrixMultiply(a)); assertEquals(ab,a.matrixMultiply(b)); assertEquals(ac,a.matrixMultiply(c)); DenseMatrix ba = generateBA(); DenseMatrix bb = generateBB(); DenseMatrix bc = generateBC(); assertEquals(ba,b.matrixMultiply(a)); assertEquals(bb,b.matrixMultiply(b)); assertEquals(bc,b.matrixMultiply(c)); DenseMatrix ca = generateCA(); DenseMatrix cb = generateCB(); DenseMatrix cc = generateCC(); assertEquals(ca,c.matrixMultiply(a)); assertEquals(cb,c.matrixMultiply(b)); assertEquals(cc,c.matrixMultiply(c)); }
@Override protected int poll() throws Exception { // must reset for each poll shutdownRunningTask = null; pendingExchanges = 0; List<software.amazon.awssdk.services.sqs.model.Message> messages = pollingTask.call(); // okay we have some response from aws so lets mark the consumer as ready forceConsumerAsReady(); Queue<Exchange> exchanges = createExchanges(messages); return processBatch(CastUtils.cast(exchanges)); }
@Test void shouldRequest10MessagesWithSingleReceiveRequestAndIgnoredSequenceNumberSorting() throws Exception { // given generateSequenceNumber = false; var expectedMessages = IntStream.range(0, 10).mapToObj(Integer::toString).toList(); expectedMessages.stream().map(this::message).forEach(sqsClientMock::addMessage); try (var tested = createConsumer(10)) { // when var polledMessagesCount = tested.poll(); // then assertThat(polledMessagesCount).isEqualTo(10); assertThat(receiveMessageBodies()).isEqualTo(expectedMessages); assertThat(sqsClientMock.getReceiveRequests()).containsExactlyInAnyOrder(expectedReceiveRequest(10)); assertThat(sqsClientMock.getQueues()).isEmpty(); } }
public static boolean supports(Map<Integer, SortedSet<LayoutFeature>> map, final LayoutFeature f, final int lv) { final SortedSet<LayoutFeature> set = map.get(lv); return set != null && set.contains(f); }
@Test public void testDataNodeFeature() { final LayoutFeature first = DataNodeLayoutVersion.Feature.FIRST_LAYOUT; assertTrue(DataNodeLayoutVersion.supports(LAST_NON_RESERVED_COMMON_FEATURE, first.getInfo().getLayoutVersion())); assertEquals(LAST_COMMON_FEATURE.getInfo().getLayoutVersion() - 1, first.getInfo().getLayoutVersion()); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { return this.list(directory, listener, String.valueOf(Path.DELIMITER)); }
@Test public void testListPlaceholderAtSignSignatureAWS4() throws Exception { final Path container = new Path("test-eu-central-1-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final S3AccessControlListFeature acl = new S3AccessControlListFeature(session); final Path placeholder = new S3DirectoryFeature(session, new S3WriteFeature(session, acl), acl).mkdir( new Path(container, String.format("%s@", UUID.randomUUID()), EnumSet.of(Path.Type.directory)), new TransferStatus()); final AttributedList<Path> list = new S3ObjectListService(session, acl).list(placeholder, new DisabledListProgressListener()); assertTrue(list.isEmpty()); new S3DefaultDeleteFeature(session).delete(Collections.singletonList(placeholder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public String doSharding(final Collection<String> availableTargetNames, final PreciseShardingValue<Comparable<?>> shardingValue) { ShardingSpherePreconditions.checkNotNull(shardingValue.getValue(), NullShardingValueException::new); String columnName = shardingValue.getColumnName(); ShardingSpherePreconditions.checkState(algorithmExpression.contains(columnName), () -> new MismatchedInlineShardingAlgorithmExpressionAndColumnException(algorithmExpression, columnName)); try { return InlineExpressionParserFactory.newInstance(algorithmExpression).evaluateWithArgs(Collections.singletonMap(columnName, shardingValue.getValue())); } catch (final MissingMethodException ignored) { throw new MismatchedInlineShardingAlgorithmExpressionAndColumnException(algorithmExpression, columnName); } }
@Test void assertDoSharding() { List<String> availableTargetNames = Arrays.asList("t_order_0", "t_order_1", "t_order_2", "t_order_3"); assertThat(inlineShardingAlgorithm.doSharding(availableTargetNames, new PreciseShardingValue<>("t_order", "order_id", DATA_NODE_INFO, 0)), is("t_order_0")); assertThrows(MismatchedInlineShardingAlgorithmExpressionAndColumnException.class, () -> inlineShardingAlgorithm.doSharding(availableTargetNames, new PreciseShardingValue<>("t_order", "non_existent_column1", DATA_NODE_INFO, 0))); }
public Instant getEndOfNextNthPeriod(Instant instant, int periods) { return innerGetEndOfNextNthPeriod(this, this.periodicityType, instant, periods); }
@Test public void testVaryingNumberOfDailyPeriods() { RollingCalendar rc = new RollingCalendar("yyyy-MM-dd"); final long MILLIS_IN_DAY = 24 * 3600 * 1000; for (int p = 20; p > -100; p--) { long now = 1223325293589L; // Mon Oct 06 22:34:53 CEST 2008 Instant nowInstant = Instant.ofEpochMilli(now); Instant result = rc.getEndOfNextNthPeriod(nowInstant, p); long offset = rc.getTimeZone().getRawOffset() + rc.getTimeZone().getDSTSavings(); long origin = now - ((now + offset) % (MILLIS_IN_DAY)); long expected = origin + p * MILLIS_IN_DAY; assertEquals(expected, result.toEpochMilli(), "p=" + p); } }
private List<MBeanAttribute> getAttributes(ObjectName name, MBeanAttributeInfo[] attributeInfos) { final List<String> attributeNames = new ArrayList<>(attributeInfos.length); for (final MBeanAttributeInfo attribute : attributeInfos) { // on ne veut pas afficher l'attribut password, jamais // (notamment, dans users tomcat ou dans datasources tomcat) // et on ne veut pas afficher l'attribut configurationAsProperties d'infinispan (issue 1180) if (attribute.isReadable() && !"password".equalsIgnoreCase(attribute.getName()) && !"configurationAsProperties".equalsIgnoreCase(attribute.getName())) { attributeNames.add(attribute.getName()); } } final String[] attributeNamesArray = attributeNames.toArray(new String[0]); final List<MBeanAttribute> result = new ArrayList<>(); try { final List<Attribute> attributes = mbeanServer.getAttributes(name, attributeNamesArray) .asList(); for (final Attribute attribute : attributes) { final Object value = convertValueIfNeeded(attribute.getValue()); final String attributeDescription = getAttributeDescription(attribute.getName(), attributeInfos); final String formattedAttributeValue = formatAttributeValue(value); final MBeanAttribute mbeanAttribute = new MBeanAttribute(attribute.getName(), attributeDescription, formattedAttributeValue); result.add(mbeanAttribute); } } catch (final Exception e) { // issue 201: do not stop to render MBeans tree when exception in mbeanServer.getAttributes final MBeanAttribute mbeanAttribute = new MBeanAttribute("exception", null, e.toString()); result.add(mbeanAttribute); } return result; }
@Test public void testToString() { final MBeanNode mBeanNode = new MBeanNode("name", "description", List.of(new MBeanAttribute("name", "description", "formattedValue"))); assertNotNull("mbeanNode", mBeanNode); assertNotNull("toString", mBeanNode.toString()); assertNotNull("getAttributes", mBeanNode.getAttributes()); for (final MBeanAttribute attribute : mBeanNode.getAttributes()) { assertNotNull("attribute", attribute); assertNotNull("toString", attribute.toString()); } }
@Override public String toString() { StringBuilder b = new StringBuilder(); if (StringUtils.isNotBlank(protocol)) { b.append(protocol); b.append("://"); } if (StringUtils.isNotBlank(host)) { b.append(host); } if (!isPortDefault() && port != -1) { b.append(':'); b.append(port); } if (StringUtils.isNotBlank(path)) { // If no scheme/host/port, leave the path as is if (b.length() > 0 && !path.startsWith("/")) { b.append('/'); } b.append(encodePath(path)); } if (queryString != null && !queryString.isEmpty()) { b.append(queryString.toString()); } if (fragment != null) { b.append("#"); b.append(encodePath(fragment)); } return b.toString(); }
@Test public void testHttpsProtocolNonDefaultPort() { s = "https://www.example.com:444/blah"; t = "https://www.example.com:444/blah"; assertEquals(t, new HttpURL(s).toString()); }
public static Runnable wrapRunnable(Map<String, String> contextData, Runnable command) { return () -> { try (MdcCloseable ctx = withContext(contextData)) { command.run(); } }; }
@Test void testWrapRunnable() throws Exception { assertJobIDLogged(jobID -> wrapRunnable(asContextData(jobID), LOGGING_RUNNABLE).run()); }
@Override public DataTableType dataTableType() { return dataTableType; }
@Test void can_define_table_entry_transformer() throws NoSuchMethodException { Method method = JavaDataTableTypeDefinitionTest.class.getMethod("converts_table_entry_to_string", Map.class); JavaDataTableTypeDefinition definition = new JavaDataTableTypeDefinition(method, lookup, new String[0]); assertThat(definition.dataTableType().transform(dataTable.cells()), is(singletonList("converts_table_entry_to_string={a=c, b=d}"))); }
@Override void handle(Connection connection, DatabaseCharsetChecker.State state) throws SQLException { expectCaseSensitiveDefaultCollation(connection); if (state == DatabaseCharsetChecker.State.UPGRADE || state == DatabaseCharsetChecker.State.STARTUP) { repairColumns(connection); } }
@Test @UseDataProvider("combinationsOfCsAsAndSuffix") public void repair_case_insensitive_accent_insensitive_combinations_with_or_without_suffix(String collation, String expectedCollation) throws Exception { answerDefaultCollation("Latin1_General_CS_AS"); answerColumnDefs(new ColumnDef(TABLE_ISSUES, COLUMN_KEE, "Latin1_General", collation, "varchar", 10, false)); underTest.handle(connection, DatabaseCharsetChecker.State.UPGRADE); verify(sqlExecutor).executeDdl(connection, "ALTER TABLE issues ALTER COLUMN kee varchar(10) COLLATE " + expectedCollation + " NOT NULL"); }
@Override public String toString() { return major + "." + minor + "." + patch; }
@Test public void toStringTest() { assertEquals("3.8.2", MemberVersion.of(3, 8, 2).toString()); }
@Override public void transform(Message message, DataType fromType, DataType toType) { final Optional<ValueRange> valueRange = getValueRangeBody(message); String range = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "range", "A:A").toString(); String majorDimension = message .getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "majorDimension", RangeCoordinate.DIMENSION_ROWS).toString(); String spreadsheetId = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "spreadsheetId", "").toString(); String[] columnNames = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "columnNames", "A").toString().split(","); boolean splitResults = Boolean .parseBoolean(message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "splitResults", "false").toString()); if (valueRange.isPresent()) { message.setBody( transformFromValueRangeModel(message, valueRange.get(), spreadsheetId, range, majorDimension, columnNames)); } else if (splitResults) { message.setBody(transformFromSplitValuesModel(message, spreadsheetId, range, majorDimension, columnNames)); } else { String valueInputOption = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "valueInputOption", "USER_ENTERED").toString(); message.setBody( transformToValueRangeModel(message, spreadsheetId, range, majorDimension, valueInputOption, columnNames)); } }
@Test public void testTransformToValueRangeRowDimension() throws Exception { Exchange inbound = new DefaultExchange(camelContext); inbound.getMessage().setHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "range", "A1:B1"); String model = "{" + "\"spreadsheetId\": \"" + spreadsheetId + "\"," + "\"A\": \"a1\"," + "\"B\": \"b1\"" + "}"; inbound.getMessage().setBody(model); transformer.transform(inbound.getMessage(), DataType.ANY, DataType.ANY); Assertions.assertEquals(spreadsheetId, inbound.getMessage().getHeader(GoogleSheetsStreamConstants.SPREADSHEET_ID)); Assertions.assertEquals("A1:B1", inbound.getMessage().getHeader(GoogleSheetsStreamConstants.RANGE)); Assertions.assertEquals(RangeCoordinate.DIMENSION_ROWS, inbound.getMessage().getHeader(GoogleSheetsStreamConstants.MAJOR_DIMENSION)); Assertions.assertEquals("USER_ENTERED", inbound.getMessage().getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "valueInputOption")); ValueRange valueRange = (ValueRange) inbound.getMessage().getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "values"); Assertions.assertEquals(1L, valueRange.getValues().size()); Assertions.assertEquals("a1", valueRange.getValues().get(0).get(0)); Assertions.assertEquals("b1", valueRange.getValues().get(0).get(1)); }
public List<ScimGroupDto> findAll(DbSession dbSession) { return mapper(dbSession).findAll(); }
@Test void findAll_returnsAllEntries() { ScimGroupDto scimGroup1 = db.users().insertScimGroup(db.users().insertGroup()); ScimGroupDto scimGroup2 = db.users().insertScimGroup(db.users().insertGroup()); List<ScimGroupDto> underTest = scimGroupDao.findAll(db.getSession()); assertThat(underTest).hasSize(2) .extracting(ScimGroupDto::getGroupUuid, ScimGroupDto::getScimGroupUuid) .containsExactlyInAnyOrder( tuple(scimGroup1.getGroupUuid(), scimGroup1.getScimGroupUuid()), tuple(scimGroup2.getGroupUuid(), scimGroup2.getScimGroupUuid())); }
@Override @DefaultClass(MDCBasedDiscriminator.class) public void setDiscriminator(Discriminator<ILoggingEvent> discriminator) { super.setDiscriminator(discriminator); }
@Ignore @Test public void programmicSiftingAppender() { SiftingAppender connectorAppender = new SiftingAppender(); connectorAppender.setContext(loggerContext); connectorAppender.setName("SIFTING_APPENDER"); MDCBasedDiscriminator discriminator = new MDCBasedDiscriminator(); discriminator.setKey("SKEY"); discriminator.setDefaultValue("DEF_KEY"); discriminator.start(); connectorAppender.setDiscriminator(discriminator); connectorAppender.setAppenderFactory(new AppenderFactory<ILoggingEvent>() { @Override public Appender<ILoggingEvent> buildAppender(Context context, String discriminatingValue) throws JoranException { RollingFileAppender<ILoggingEvent> appender = new RollingFileAppender<ILoggingEvent>(); appender.setName("ROLLING_APPENDER_" + discriminatingValue); appender.setContext(context); appender.setFile("/var/logs/active_" + discriminatingValue + ".log"); TimeBasedRollingPolicy<ILoggingEvent> policy = new TimeBasedRollingPolicy<ILoggingEvent>(); policy.setContext(context); policy.setMaxHistory(365); policy.setFileNamePattern(CoreTestConstants.OUTPUT_DIR_PREFIX + "/logback1127/" + discriminatingValue + "_%d{yyyy_MM_dd}_%i.log"); policy.setParent(appender); policy.setCleanHistoryOnStart(true); SizeAndTimeBasedFNATP<ILoggingEvent> innerpolicy = new SizeAndTimeBasedFNATP<ILoggingEvent>(); innerpolicy.setContext(context); innerpolicy.setMaxFileSize(FileSize.valueOf("5KB")); innerpolicy.setTimeBasedRollingPolicy(policy); policy.setTimeBasedFileNamingAndTriggeringPolicy(innerpolicy); policy.start(); appender.setRollingPolicy(policy); PatternLayoutEncoder pl = new PatternLayoutEncoder(); pl.setContext(context); pl.setPattern("%d{yyyy/MM/dd'T'HH:mm:ss} %-5level - %msg\n"); pl.start(); appender.setEncoder(pl); appender.start(); return appender; } }); connectorAppender.start(); ch.qos.logback.classic.Logger logger = loggerContext.getLogger("org.test"); logger.addAppender(connectorAppender); logger.setLevel(Level.DEBUG); logger.setAdditive(false); MDC.put("SKEY", "K1"); logger.info("bla1"); MDC.clear(); MDC.put("SKEY", "K2"); logger.info("bla2"); MDC.clear(); StatusPrinter.print(loggerContext); }
public static SegmentGenerationJobSpec getSegmentGenerationJobSpec(String jobSpecFilePath, String propertyFilePath, Map<String, Object> context, Map<String, String> environmentValues) { Properties properties = new Properties(); if (propertyFilePath != null) { try { properties.load(FileUtils.openInputStream(new File(propertyFilePath))); } catch (IOException e) { throw new RuntimeException( String.format("Unable to read property file [%s] into properties.", propertyFilePath), e); } } Map<String, Object> propertiesMap = (Map) properties; if (environmentValues != null) { for (String propertyName: propertiesMap.keySet()) { if (environmentValues.get(propertyName) != null) { propertiesMap.put(propertyName, environmentValues.get(propertyName)); } } } if (context != null) { propertiesMap.putAll(context); } String jobSpecTemplate; try { jobSpecTemplate = IOUtils.toString(new BufferedReader(new FileReader(jobSpecFilePath))); } catch (IOException e) { throw new RuntimeException(String.format("Unable to read ingestion job spec file [%s].", jobSpecFilePath), e); } String jobSpecStr; try { jobSpecStr = GroovyTemplateUtils.renderTemplate(jobSpecTemplate, propertiesMap); } catch (Exception e) { throw new RuntimeException(String .format("Unable to render templates on ingestion job spec template file - [%s] with propertiesMap - [%s].", jobSpecFilePath, Arrays.toString(propertiesMap.entrySet().toArray())), e); } String jobSpecFormat = (String) propertiesMap.getOrDefault(JOB_SPEC_FORMAT, YAML); if (jobSpecFormat.equals(JSON)) { try { return JsonUtils.stringToObject(jobSpecStr, SegmentGenerationJobSpec.class); } catch (IOException e) { throw new RuntimeException(String .format("Unable to parse job spec - [%s] to JSON with propertiesMap - [%s]", jobSpecFilePath, Arrays.toString(propertiesMap.entrySet().toArray())), e); } } return new Yaml().loadAs(jobSpecStr, SegmentGenerationJobSpec.class); }
@Test public void testIngestionJobLauncherWithTemplateAndPropertyFileAndEnvironmentVariableOverride() { SegmentGenerationJobSpec spec = IngestionJobLauncher.getSegmentGenerationJobSpec( GroovyTemplateUtils.class.getClassLoader().getResource("ingestion_job_spec_template.yaml").getFile(), GroovyTemplateUtils.class.getClassLoader().getResource("job.config").getFile(), null, _defaultEnvironmentValues); Assert.assertEquals(spec.getInputDirURI(), "file:///path/to/input/2022/08/07"); Assert.assertEquals(spec.getOutputDirURI(), "file:///path/to/output/2022/08/07"); Assert.assertEquals(spec.getSegmentCreationJobParallelism(), 100); }
public void moralize() { for (GraphNode<BayesVariable> v : graph ) { for ( Edge e1 : v.getInEdges() ) { GraphNode pV1 = graph.getNode(e1.getOutGraphNode().getId()); moralize(v, pV1); } } }
@Test public void testMoralize2() { Graph<BayesVariable> graph = new BayesNetwork(); GraphNode x0 = addNode(graph); GraphNode x1 = addNode(graph); GraphNode x2 = addNode(graph); GraphNode x3 = addNode(graph); GraphNode x4 = addNode(graph); GraphNode x5 = addNode(graph); GraphNode x6 = addNode(graph); connectParentToChildren(x1, x2, x3); connectParentToChildren(x2, x4); connectParentToChildren(x4, x5); connectParentToChildren(x3, x5); connectParentToChildren(x6, x5); JunctionTreeBuilder jtBuilder = new JunctionTreeBuilder( graph ); jtBuilder.moralize(); assertLinkedNode(jtBuilder, x1.getId(), 2, 3); assertLinkedNode(jtBuilder, x2.getId(), 1, 4); assertLinkedNode(jtBuilder, x3.getId(), 1, 4, 5, 6); assertLinkedNode(jtBuilder, x4.getId(), 2, 3, 5, 6); assertLinkedNode(jtBuilder, x5.getId(), 3, 4, 6); assertLinkedNode(jtBuilder, x6.getId(), 3, 4, 5); }
public String getMountedExternalStorageDirectoryPath() { String path = null; String state = Environment.getExternalStorageState(); if (Environment.MEDIA_MOUNTED.equals(state) || Environment.MEDIA_MOUNTED_READ_ONLY.equals(state)) { path = getExternalStorageDirectoryPath(); } return path; }
@Test public void getMountedExternalStorageDirectoryPathReturnsNullWhenUnmountable() { ShadowEnvironment.setExternalStorageState(Environment.MEDIA_UNMOUNTABLE); assertThat(contextUtil.getMountedExternalStorageDirectoryPath(), is(nullValue())); }
@Override public Long time(RedisClusterNode node) { RedisClient entry = getEntry(node); RFuture<Long> f = executorService.readAsync(entry, LongCodec.INSTANCE, RedisCommands.TIME_LONG); return syncFuture(f); }
@Test public void testTime() { RedisClusterNode master = getFirstMaster(); Long time = connection.time(master); assertThat(time).isGreaterThan(1000); }
@Override public Optional<ShardingConditionValue> generate(final InExpression predicate, final Column column, final List<Object> params, final TimestampServiceRule timestampServiceRule) { if (predicate.isNot()) { return Optional.empty(); } Collection<ExpressionSegment> expressionSegments = predicate.getExpressionList(); List<Integer> parameterMarkerIndexes = new ArrayList<>(expressionSegments.size()); List<Comparable<?>> shardingConditionValues = new LinkedList<>(); for (ExpressionSegment each : expressionSegments) { ConditionValue conditionValue = new ConditionValue(each, params); Optional<Comparable<?>> value = conditionValue.getValue(); if (conditionValue.isNull()) { shardingConditionValues.add(null); conditionValue.getParameterMarkerIndex().ifPresent(parameterMarkerIndexes::add); continue; } if (value.isPresent()) { shardingConditionValues.add(value.get()); conditionValue.getParameterMarkerIndex().ifPresent(parameterMarkerIndexes::add); continue; } if (ExpressionConditionUtils.isNowExpression(each)) { shardingConditionValues.add(timestampServiceRule.getTimestamp()); } } return shardingConditionValues.isEmpty() ? Optional.empty() : Optional.of(new ListShardingConditionValue<>(column.getName(), column.getTableName(), shardingConditionValues, parameterMarkerIndexes)); }
@SuppressWarnings("unchecked") @Test void assertGenerateConditionValueWithParameter() { ColumnSegment left = new ColumnSegment(0, 0, new IdentifierValue("id")); ListExpression right = new ListExpression(0, 0); right.getItems().add(new ParameterMarkerExpressionSegment(0, 0, 0)); InExpression predicate = new InExpression(0, 0, left, right, false); Optional<ShardingConditionValue> actual = generator.generate(predicate, column, Collections.singletonList(1), timestampServiceRule); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(ListShardingConditionValue.class)); ListShardingConditionValue<Integer> conditionValue = (ListShardingConditionValue<Integer>) actual.get(); assertThat(conditionValue.getTableName(), is("tbl")); assertThat(conditionValue.getColumnName(), is("id")); assertThat(conditionValue.getValues(), is(Collections.singletonList(1))); assertThat(conditionValue.getParameterMarkerIndexes(), is(Collections.singletonList(0))); }
@Override public List<DefaultAuthorizationContext> build(Metadata metadata, GeneratedMessageV3 message) { List<DefaultAuthorizationContext> result = null; if (message instanceof SendMessageRequest) { SendMessageRequest request = (SendMessageRequest) message; if (request.getMessagesCount() <= 0) { throw new AuthorizationException("message is null."); } result = newPubContext(metadata, request.getMessages(0).getTopic()); } if (message instanceof EndTransactionRequest) { EndTransactionRequest request = (EndTransactionRequest) message; result = newPubContext(metadata, request.getTopic()); } if (message instanceof HeartbeatRequest) { HeartbeatRequest request = (HeartbeatRequest) message; if (!isConsumerClientType(request.getClientType())) { return null; } result = newGroupSubContexts(metadata, request.getGroup()); } if (message instanceof ReceiveMessageRequest) { ReceiveMessageRequest request = (ReceiveMessageRequest) message; if (!request.hasMessageQueue()) { throw new AuthorizationException("messageQueue is null."); } result = newSubContexts(metadata, request.getGroup(), request.getMessageQueue().getTopic()); } if (message instanceof AckMessageRequest) { AckMessageRequest request = (AckMessageRequest) message; result = newSubContexts(metadata, request.getGroup(), request.getTopic()); } if (message instanceof ForwardMessageToDeadLetterQueueRequest) { ForwardMessageToDeadLetterQueueRequest request = (ForwardMessageToDeadLetterQueueRequest) message; result = newSubContexts(metadata, request.getGroup(), request.getTopic()); } if (message instanceof NotifyClientTerminationRequest) { NotifyClientTerminationRequest request = (NotifyClientTerminationRequest) message; if (StringUtils.isNotBlank(request.getGroup().getName())) { result = newGroupSubContexts(metadata, request.getGroup()); } } if (message instanceof ChangeInvisibleDurationRequest) { ChangeInvisibleDurationRequest request = (ChangeInvisibleDurationRequest) message; result = newGroupSubContexts(metadata, request.getGroup()); } if (message instanceof QueryRouteRequest) { QueryRouteRequest request = (QueryRouteRequest) message; result = newContext(metadata, request); } if (message instanceof QueryAssignmentRequest) { QueryAssignmentRequest request = (QueryAssignmentRequest) message; result = newSubContexts(metadata, request.getGroup(), request.getTopic()); } if (message instanceof TelemetryCommand) { TelemetryCommand request = (TelemetryCommand) message; result = newContext(metadata, request); } if (CollectionUtils.isNotEmpty(result)) { result.forEach(context -> { context.setChannelId(metadata.get(GrpcConstants.CHANNEL_ID)); context.setRpcCode(message.getDescriptorForType().getFullName()); }); } return result; }
@Test public void buildGrpc() { Metadata metadata = new Metadata(); metadata.put(GrpcConstants.AUTHORIZATION_AK, "rocketmq"); metadata.put(GrpcConstants.REMOTE_ADDRESS, "192.168.0.1"); metadata.put(GrpcConstants.CHANNEL_ID, "channel-id"); GeneratedMessageV3 request = SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder().setName("topic").build()) .build()) .build(); List<DefaultAuthorizationContext> result = builder.build(metadata, request); Assert.assertEquals(1, result.size()); Assert.assertEquals(result.get(0).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(result.get(0).getResource().getResourceKey(), "Topic:topic"); Assert.assertTrue(result.get(0).getActions().containsAll(Arrays.asList(Action.PUB))); Assert.assertEquals(result.get(0).getSourceIp(), "192.168.0.1"); Assert.assertEquals(result.get(0).getChannelId(), "channel-id"); Assert.assertEquals(result.get(0).getRpcCode(), SendMessageRequest.getDescriptor().getFullName()); request = EndTransactionRequest.newBuilder() .setTopic(Resource.newBuilder().setName("topic").build()) .build(); result = builder.build(metadata, request); Assert.assertEquals(1, result.size()); Assert.assertEquals(result.get(0).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(result.get(0).getResource().getResourceKey(), "Topic:topic"); Assert.assertTrue(result.get(0).getActions().containsAll(Arrays.asList(Action.PUB))); request = HeartbeatRequest.newBuilder() .setClientType(ClientType.PUSH_CONSUMER) .setGroup(Resource.newBuilder().setName("group").build()) .build(); result = builder.build(metadata, request); Assert.assertEquals(1, result.size()); Assert.assertEquals(result.get(0).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(result.get(0).getResource().getResourceKey(), "Group:group"); Assert.assertTrue(result.get(0).getActions().containsAll(Arrays.asList(Action.SUB))); request = ReceiveMessageRequest.newBuilder() .setMessageQueue(MessageQueue.newBuilder() .setTopic(Resource.newBuilder().setName("topic").build()) .build()) .setGroup(Resource.newBuilder().setName("group").build()) .build(); result = builder.build(metadata, request); Assert.assertEquals(2, result.size()); Assert.assertEquals(getContext(result, ResourceType.GROUP).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(getContext(result, ResourceType.GROUP).getResource().getResourceKey(), "Group:group"); Assert.assertTrue(getContext(result, ResourceType.GROUP).getActions().containsAll(Arrays.asList(Action.SUB))); Assert.assertEquals(getContext(result, ResourceType.TOPIC).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(getContext(result, ResourceType.TOPIC).getResource().getResourceKey(), "Topic:topic"); Assert.assertTrue(getContext(result, ResourceType.TOPIC).getActions().containsAll(Arrays.asList(Action.SUB))); request = AckMessageRequest.newBuilder() .setTopic(Resource.newBuilder().setName("topic").build()) .setGroup(Resource.newBuilder().setName("group").build()) .build(); result = builder.build(metadata, request); Assert.assertEquals(2, result.size()); Assert.assertEquals(getContext(result, ResourceType.GROUP).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(getContext(result, ResourceType.GROUP).getResource().getResourceKey(), "Group:group"); Assert.assertTrue(getContext(result, ResourceType.GROUP).getActions().containsAll(Arrays.asList(Action.SUB))); Assert.assertEquals(getContext(result, ResourceType.TOPIC).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(getContext(result, ResourceType.TOPIC).getResource().getResourceKey(), "Topic:topic"); Assert.assertTrue(getContext(result, ResourceType.TOPIC).getActions().containsAll(Arrays.asList(Action.SUB))); request = ForwardMessageToDeadLetterQueueRequest.newBuilder() .setTopic(Resource.newBuilder().setName("topic").build()) .setGroup(Resource.newBuilder().setName("group").build()) .build(); result = builder.build(metadata, request); Assert.assertEquals(2, result.size()); Assert.assertEquals(getContext(result, ResourceType.GROUP).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(getContext(result, ResourceType.GROUP).getResource().getResourceKey(), "Group:group"); Assert.assertTrue(getContext(result, ResourceType.GROUP).getActions().containsAll(Arrays.asList(Action.SUB))); Assert.assertEquals(getContext(result, ResourceType.TOPIC).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(getContext(result, ResourceType.TOPIC).getResource().getResourceKey(), "Topic:topic"); Assert.assertTrue(getContext(result, ResourceType.TOPIC).getActions().containsAll(Arrays.asList(Action.SUB))); request = NotifyClientTerminationRequest.newBuilder() .setGroup(Resource.newBuilder().setName("group").build()) .build(); result = builder.build(metadata, request); Assert.assertEquals(1, result.size()); Assert.assertEquals(result.get(0).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(result.get(0).getResource().getResourceKey(), "Group:group"); Assert.assertTrue(result.get(0).getActions().containsAll(Arrays.asList(Action.SUB))); request = ChangeInvisibleDurationRequest.newBuilder() .setGroup(Resource.newBuilder().setName("group").build()) .build(); result = builder.build(metadata, request); Assert.assertEquals(1, result.size()); Assert.assertEquals(result.get(0).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(result.get(0).getResource().getResourceKey(), "Group:group"); Assert.assertTrue(result.get(0).getActions().containsAll(Arrays.asList(Action.SUB))); request = QueryRouteRequest.newBuilder() .setTopic(Resource.newBuilder().setName("topic").build()) .build(); result = builder.build(metadata, request); Assert.assertEquals(1, result.size()); Assert.assertEquals(result.get(0).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(result.get(0).getResource().getResourceKey(), "Topic:topic"); Assert.assertTrue(result.get(0).getActions().containsAll(Arrays.asList(Action.PUB, Action.SUB))); request = QueryAssignmentRequest.newBuilder() .setTopic(Resource.newBuilder().setName("topic").build()) .setGroup(Resource.newBuilder().setName("group").build()) .build(); result = builder.build(metadata, request); Assert.assertEquals(2, result.size()); Assert.assertEquals(getContext(result, ResourceType.GROUP).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(getContext(result, ResourceType.GROUP).getResource().getResourceKey(), "Group:group"); Assert.assertTrue(getContext(result, ResourceType.GROUP).getActions().containsAll(Arrays.asList(Action.SUB))); Assert.assertEquals(getContext(result, ResourceType.TOPIC).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(getContext(result, ResourceType.TOPIC).getResource().getResourceKey(), "Topic:topic"); Assert.assertTrue(getContext(result, ResourceType.TOPIC).getActions().containsAll(Arrays.asList(Action.SUB))); request = TelemetryCommand.newBuilder() .setSettings(Settings.newBuilder() .setPublishing(Publishing.newBuilder() .addTopics(Resource.newBuilder().setName("topic").build()) .build()) .build()) .build(); result = builder.build(metadata, request); Assert.assertEquals(1, result.size()); Assert.assertEquals(getContext(result, ResourceType.TOPIC).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(getContext(result, ResourceType.TOPIC).getResource().getResourceKey(), "Topic:topic"); Assert.assertTrue(getContext(result, ResourceType.TOPIC).getActions().containsAll(Arrays.asList(Action.PUB))); request = TelemetryCommand.newBuilder() .setSettings(Settings.newBuilder() .setSubscription(Subscription.newBuilder() .setGroup(Resource.newBuilder().setName("group").build()) .addSubscriptions(SubscriptionEntry.newBuilder() .setTopic(Resource.newBuilder().setName("topic").build()) .build()) .build()) .build()) .build(); result = builder.build(metadata, request); Assert.assertEquals(2, result.size()); Assert.assertEquals(getContext(result, ResourceType.GROUP).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(getContext(result, ResourceType.GROUP).getResource().getResourceKey(), "Group:group"); Assert.assertTrue(getContext(result, ResourceType.GROUP).getActions().containsAll(Arrays.asList(Action.SUB))); Assert.assertEquals(getContext(result, ResourceType.TOPIC).getSubject().getSubjectKey(), "User:rocketmq"); Assert.assertEquals(getContext(result, ResourceType.TOPIC).getResource().getResourceKey(), "Topic:topic"); Assert.assertTrue(getContext(result, ResourceType.TOPIC).getActions().containsAll(Arrays.asList(Action.SUB))); }
public MawoConfiguration() { this(readConfigFile()); }
@Test void testMaWoConfiguration() { MawoConfiguration mawoConf = new MawoConfiguration(); // validate Rpc server port assertEquals(5120, mawoConf.getRpcServerPort()); // validate Rpc hostname assertEquals("localhost", mawoConf.getRpcHostName()); // validate job queue storage conf boolean jobQueueStorage = mawoConf.getJobQueueStorageEnabled(); assertTrue(jobQueueStorage); // validate default teardownWorkerValidity Interval assertEquals(120000, mawoConf.getTeardownWorkerValidityInterval()); // validate Zk related configs assertEquals("/tmp/mawoRoot", mawoConf.getZKParentPath()); assertEquals("localhost:2181", mawoConf.getZKAddress()); assertEquals(1000, mawoConf.getZKRetryIntervalMS()); assertEquals(10000, mawoConf.getZKSessionTimeoutMS()); assertEquals(1000, mawoConf.getZKRetriesNum()); }
public int getBitCount() { return bitSet.cardinality(); }
@Test public void testGetBitCount() { int length = 1024; Bitmap bitmap = new Bitmap(length); assertEquals(bitmap.getBitCount(), 0); // all zeros at initialization for (int i = 0; i < length; i++) { bitmap.setBit(i, true); assertEquals(bitmap.getBitCount(), i + 1); // i + 1 "true" bits } }
@Override public void initialize(ServiceConfiguration config) throws IOException, IllegalArgumentException { String prefix = (String) config.getProperty(CONF_TOKEN_SETTING_PREFIX); if (null == prefix) { prefix = ""; } this.confTokenSecretKeySettingName = prefix + CONF_TOKEN_SECRET_KEY; this.confTokenPublicKeySettingName = prefix + CONF_TOKEN_PUBLIC_KEY; this.confTokenAuthClaimSettingName = prefix + CONF_TOKEN_AUTH_CLAIM; this.confTokenPublicAlgSettingName = prefix + CONF_TOKEN_PUBLIC_ALG; this.confTokenAudienceClaimSettingName = prefix + CONF_TOKEN_AUDIENCE_CLAIM; this.confTokenAudienceSettingName = prefix + CONF_TOKEN_AUDIENCE; this.confTokenAllowedClockSkewSecondsSettingName = prefix + CONF_TOKEN_ALLOWED_CLOCK_SKEW_SECONDS; // we need to fetch the algorithm before we fetch the key this.publicKeyAlg = getPublicKeyAlgType(config); this.validationKey = getValidationKey(config); this.roleClaim = getTokenRoleClaim(config); this.audienceClaim = getTokenAudienceClaim(config); this.audience = getTokenAudience(config); long allowedSkew = getConfTokenAllowedClockSkewSeconds(config); this.parser = Jwts.parserBuilder() .setAllowedClockSkewSeconds(allowedSkew) .setSigningKey(this.validationKey) .build(); if (audienceClaim != null && audience == null) { throw new IllegalArgumentException("Token Audience Claim [" + audienceClaim + "] configured, but Audience stands for this broker not."); } }
@Test(expectedExceptions = IllegalArgumentException.class) public void testValidationWhenPublicKeyAlgIsInvalid() throws IOException { Properties properties = new Properties(); properties.setProperty(AuthenticationProviderToken.CONF_TOKEN_PUBLIC_ALG, "invalid"); ServiceConfiguration conf = new ServiceConfiguration(); conf.setProperties(properties); new AuthenticationProviderToken().initialize(conf); }
public synchronized String createTopic(String topicName, int partitions) throws KafkaResourceManagerException { checkArgument(partitions > 0, "partitions must be positive."); String uniqueName = KafkaResourceManagerUtils.generateTopicName(topicName); try { Set<String> currentTopics = kafkaClient.listTopics().names().get(); if (!currentTopics.contains(uniqueName)) { kafkaClient .createTopics( Collections.singletonList(new NewTopic(uniqueName, partitions, (short) 1))) .all() .get(); topicNames.add(uniqueName); } } catch (Exception e) { throw new KafkaResourceManagerException("Error creating topics.", e); } LOG.info("Successfully created topic {}.", uniqueName); return uniqueName; }
@Test public void testCreateTopicShouldThrowErrorWhenKafkaFailsToCreateTopic() throws ExecutionException, InterruptedException { when(kafkaClient.createTopics(any(Collection.class)).all().get()) .thenThrow(new ExecutionException(new RuntimeException("create topic future fails"))); assertThrows(KafkaResourceManagerException.class, () -> testManager.createTopic(TOPIC_NAME, 1)); }
public static boolean parse(final String str, ResTable_config out) { return parse(str, out, true); }
@Test public void parse_orientation_port() { ResTable_config config = new ResTable_config(); ConfigDescription.parse("port", config); assertThat(config.orientation).isEqualTo(ORIENTATION_PORT); }
@Override public void updateBrand(ProductBrandUpdateReqVO updateReqVO) { // 校验存在 validateBrandExists(updateReqVO.getId()); validateBrandNameUnique(updateReqVO.getId(), updateReqVO.getName()); // 更新 ProductBrandDO updateObj = ProductBrandConvert.INSTANCE.convert(updateReqVO); brandMapper.updateById(updateObj); }
@Test public void testUpdateBrand_notExists() { // 准备参数 ProductBrandUpdateReqVO reqVO = randomPojo(ProductBrandUpdateReqVO.class); // 调用, 并断言异常 assertServiceException(() -> brandService.updateBrand(reqVO), BRAND_NOT_EXISTS); }
@Override public String resolve(Method method, Object[] arguments, String spelExpression) { if (StringUtils.isEmpty(spelExpression)) { return spelExpression; } if (spelExpression.matches(PLACEHOLDER_SPEL_REGEX) && stringValueResolver != null) { return stringValueResolver.resolveStringValue(spelExpression); } if (spelExpression.matches(METHOD_SPEL_REGEX)) { SpelRootObject rootObject = new SpelRootObject(method, arguments); MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer); Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext); return (String) evaluated; } if (spelExpression.matches(BEAN_SPEL_REGEX)) { SpelRootObject rootObject = new SpelRootObject(method, arguments); MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer); evaluationContext.setBeanResolver(new BeanFactoryResolver(this.beanFactory)); Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext); return (String) evaluated; } return spelExpression; }
@Test public void nullTest() throws Exception { DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{}, null); assertThat(result).isNull(); }
@SuppressWarnings("unchecked") public static <K> Set<K> toPropertySet(String key, List<?> list) { Set<K> set = new HashSet<>(); if (CollectionUtils.isEmpty(list)) {// 防止外面传入空list return set; } try { Class<?> clazz = list.get(0).getClass(); Field field = deepFindField(clazz, key); if (field == null) { throw new IllegalArgumentException("Could not find the key"); } field.setAccessible(true); for (Object o : list) { set.add((K)field.get(o)); } } catch (Exception e) { throw new BeanUtilsException(e); } return set; }
@Test public void testToPropertySetEmpty() { assertNotNull(BeanUtils.toPropertySet("keys", someAnotherList)); }
static MapKeyLoader.Role assignRole(boolean isPartitionOwner, boolean isMapNamePartition, boolean isMapNamePartitionFirstReplica) { if (isMapNamePartition) { if (isPartitionOwner) { // map-name partition owner is the SENDER return MapKeyLoader.Role.SENDER; } else { if (isMapNamePartitionFirstReplica) { // first replica of the map-name partition is the SENDER_BACKUP return MapKeyLoader.Role.SENDER_BACKUP; } else { // other replicas of the map-name partition do not have a role return MapKeyLoader.Role.NONE; } } } else { // ordinary partition owners are RECEIVERs, otherwise no role return isPartitionOwner ? MapKeyLoader.Role.RECEIVER : MapKeyLoader.Role.NONE; } }
@Test public void assignRole_NOT_SENDER_BACKUP() { boolean isPartitionOwner = false; boolean isMapNamePartition = true; boolean isMapNamePartitionFirstReplica = false; Role role = MapKeyLoaderUtil.assignRole(isPartitionOwner, isMapNamePartition, isMapNamePartitionFirstReplica); assertEquals(NONE, role); }
@PostMapping("/plugin/selector/saveOrUpdate") public Mono<String> saveSelector(@RequestBody final SelectorData selectorData) { if (StringUtils.isEmpty(selectorData.getPluginName())) { return Mono.just("Error: please add pluginName!"); } SelectorData defaultSelectorData = buildDefaultSelectorData(selectorData); subscriber.onSelectorSubscribe(defaultSelectorData); saveDiscoveryUpstreamData(defaultSelectorData); return Mono.just(selectorData.getId()); }
@Test public void saveSelector() throws Exception { final String selectorPluginName = "testSaveSelector"; final SelectorData selectorData = new SelectorData(); selectorData.setPluginName(selectorPluginName); final String json = GsonUtils.getGson().toJson(selectorData); this.mockMvc .perform(MockMvcRequestBuilders.post("/shenyu/plugin/selector/saveOrUpdate") .content(json) .contentType(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andReturn(); assertThat(baseDataCache.obtainSelectorData(selectorPluginName)).isNotNull(); final String selectorPluginNameError = "testSaveSelectorError"; final SelectorData selectorDataError = new SelectorData(); selectorData.setPluginName(selectorPluginNameError); final String jsonError = GsonUtils.getGson().toJson(selectorDataError); final Object resultErr = this.mockMvc .perform(MockMvcRequestBuilders.post("/shenyu/plugin/selector/saveOrUpdate") .content(jsonError) .contentType(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andReturn() .getAsyncResult(); assertThat(resultErr).isEqualTo("Error: please add pluginName!"); }
public static ResourceId matchNewResource(String singleResourceSpec, boolean isDirectory) { return getFileSystemInternal(parseScheme(singleResourceSpec)) .matchNewResource(singleResourceSpec, isDirectory); }
@Test(expected = IllegalArgumentException.class) public void testInvalidSchemaMatchNewResource() { assertEquals("file", FileSystems.matchNewResource("invalidschema://tmp/f1", false)); assertEquals("file", FileSystems.matchNewResource("c:/tmp/f1", false)); }
@Override public void onMsg(TbContext ctx, TbMsg msg) { ctx.logJsEvalRequest(); withCallback(scriptEngine.executeFilterAsync(msg), filterResult -> { ctx.logJsEvalResponse(); ctx.tellNext(msg, filterResult ? TbNodeConnectionType.TRUE : TbNodeConnectionType.FALSE); }, t -> { ctx.tellFailure(msg, t); ctx.logJsEvalFailure(); }, ctx.getDbCallbackExecutor()); }
@Test public void exceptionInJsThrowsException() throws TbNodeException { initWithScript(); TbMsgMetaData metaData = new TbMsgMetaData(); TbMsg msg = TbMsg.newMsg(TbMsgType.POST_TELEMETRY_REQUEST, null, metaData, TbMsgDataType.JSON, TbMsg.EMPTY_JSON_OBJECT, ruleChainId, ruleNodeId); when(scriptEngine.executeFilterAsync(msg)).thenReturn(Futures.immediateFailedFuture(new ScriptException("error"))); node.onMsg(ctx, msg); verifyError(msg, "error", ScriptException.class); }
public static List<FieldSchema> convert(Schema schema) { return schema.columns().stream() .map(col -> new FieldSchema(col.name(), convertToTypeString(col.type()), col.doc())) .collect(Collectors.toList()); }
@Test public void testSchemaConvertToIcebergSchemaForEveryPrimitiveType() { Schema schemaWithEveryType = HiveSchemaUtil.convert(getSupportedFieldSchemas()); assertThat(schemaWithEveryType.asStruct()).isEqualTo(getSchemaWithSupportedTypes().asStruct()); }
public String encode(String name, String value) { return encode(new DefaultCookie(name, value)); }
@Test public void illegalCharInCookieNameMakesStrictEncoderThrowsException() { Set<Character> illegalChars = new HashSet<Character>(); // CTLs for (int i = 0x00; i <= 0x1F; i++) { illegalChars.add((char) i); } illegalChars.add((char) 0x7F); // separators for (char c : new char[] { '(', ')', '<', '>', '@', ',', ';', ':', '\\', '"', '/', '[', ']', '?', '=', '{', '}', ' ', '\t' }) { illegalChars.add(c); } int exceptions = 0; for (char c : illegalChars) { try { ServerCookieEncoder.STRICT.encode(new DefaultCookie("foo" + c + "bar", "value")); } catch (IllegalArgumentException e) { exceptions++; } } assertEquals(illegalChars.size(), exceptions); }
public static Sensor punctuateRatioSensor(final String threadId, final StreamsMetricsImpl streamsMetrics) { final Sensor sensor = streamsMetrics.threadLevelSensor(threadId, PUNCTUATE + RATIO_SUFFIX, Sensor.RecordingLevel.INFO); final Map<String, String> tagMap = streamsMetrics.threadLevelTagMap(threadId); addValueMetricToSensor( sensor, THREAD_LEVEL_GROUP, tagMap, PUNCTUATE + RATIO_SUFFIX, PUNCTUATE_RATIO_DESCRIPTION ); return sensor; }
@Test public void shouldGetPunctuateRatioSensor() { final String operation = "punctuate-ratio"; final String ratioDescription = "The fraction of time the thread spent on punctuating active tasks"; when(streamsMetrics.threadLevelSensor(THREAD_ID, operation, RecordingLevel.INFO)).thenReturn(expectedSensor); when(streamsMetrics.threadLevelTagMap(THREAD_ID)).thenReturn(tagMap); try (final MockedStatic<StreamsMetricsImpl> streamsMetricsStaticMock = mockStatic(StreamsMetricsImpl.class)) { final Sensor sensor = ThreadMetrics.punctuateRatioSensor(THREAD_ID, streamsMetrics); streamsMetricsStaticMock.verify( () -> StreamsMetricsImpl.addValueMetricToSensor( expectedSensor, THREAD_LEVEL_GROUP, tagMap, operation, ratioDescription ) ); assertThat(sensor, is(expectedSensor)); } }
@Override public int actionLaunch(String fileName, String appName, Long lifetime, String queue) throws IOException, YarnException { int result = EXIT_SUCCESS; try { Service service = loadAppJsonFromLocalFS(fileName, appName, lifetime, queue); String buffer = jsonSerDeser.toJson(service); ClientResponse response = getApiClient() .post(ClientResponse.class, buffer); result = processResponse(response); } catch (Exception e) { LOG.error("Fail to launch application: ", e); result = EXIT_EXCEPTION_THROWN; } return result; }
@Test void testBadLaunch() { String fileName = "unknown_file"; String appName = "unknown_app"; long lifetime = 3600L; String queue = "default"; try { int result = badAsc.actionLaunch(fileName, appName, lifetime, queue); assertEquals(EXIT_EXCEPTION_THROWN, result); } catch (IOException | YarnException e) { fail(); } }
@Override public Cursor<byte[]> scan(RedisClusterNode node, ScanOptions options) { return new ScanCursor<byte[]>(0, options) { private RedisClient client = getEntry(node); @Override protected ScanIteration<byte[]> doScan(long cursorId, ScanOptions options) { if (isQueueing() || isPipelined()) { throw new UnsupportedOperationException("'SSCAN' cannot be called in pipeline / transaction mode."); } if (client == null) { return null; } List<Object> args = new ArrayList<Object>(); if (cursorId == 101010101010101010L) { cursorId = 0; } args.add(Long.toUnsignedString(cursorId)); if (options.getPattern() != null) { args.add("MATCH"); args.add(options.getPattern()); } if (options.getCount() != null) { args.add("COUNT"); args.add(options.getCount()); } RFuture<ListScanResult<byte[]>> f = executorService.readAsync(client, ByteArrayCodec.INSTANCE, RedisCommands.SCAN, args.toArray()); ListScanResult<byte[]> res = syncFuture(f); String pos = res.getPos(); client = res.getRedisClient(); if ("0".equals(pos)) { client = null; } return new ScanIteration<byte[]>(Long.parseUnsignedLong(pos), res.getValues()); } }.open(); }
@Test public void testScan() { for (int i = 0; i < 1000; i++) { connection.set(("" + i).getBytes(StandardCharsets.UTF_8), ("" + i).getBytes(StandardCharsets.UTF_8)); } Cursor<byte[]> b = connection.scan(ScanOptions.scanOptions().build()); int counter = 0; while (b.hasNext()) { b.next(); counter++; } assertThat(counter).isEqualTo(1000); }
public static ScmInfo create(ScannerReport.Changesets changesets) { requireNonNull(changesets); Changeset[] lineChangesets = new Changeset[changesets.getChangesetIndexByLineCount()]; LineIndexToChangeset lineIndexToChangeset = new LineIndexToChangeset(changesets); for (int i = 0; i < changesets.getChangesetIndexByLineCount(); i++) { lineChangesets[i] = lineIndexToChangeset.apply(i); } return new ScmInfoImpl(lineChangesets); }
@Test public void create_scm_info_with_some_changesets() { ScmInfo scmInfo = ReportScmInfo.create(ScannerReport.Changesets.newBuilder() .setComponentRef(FILE_REF) .addChangeset(ScannerReport.Changesets.Changeset.newBuilder() .setAuthor("john") .setDate(123456789L) .setRevision("rev-1") .build()) .addChangeset(ScannerReport.Changesets.Changeset.newBuilder() .setAuthor("henry") .setDate(1234567810L) .setRevision("rev-2") .build()) .addChangesetIndexByLine(0) .addChangesetIndexByLine(1) .addChangesetIndexByLine(0) .addChangesetIndexByLine(0) .build()); assertThat(scmInfo.getAllChangesets()).hasSize(4); }
public void createNewCodeDefinition(DbSession dbSession, String projectUuid, String mainBranchUuid, String defaultBranchName, String newCodeDefinitionType, @Nullable String newCodeDefinitionValue) { boolean isCommunityEdition = editionProvider.get().filter(EditionProvider.Edition.COMMUNITY::equals).isPresent(); NewCodePeriodType newCodePeriodType = parseNewCodeDefinitionType(newCodeDefinitionType); NewCodePeriodDto dto = new NewCodePeriodDto(); dto.setType(newCodePeriodType); dto.setProjectUuid(projectUuid); if (isCommunityEdition) { dto.setBranchUuid(mainBranchUuid); } getNewCodeDefinitionValueProjectCreation(newCodePeriodType, newCodeDefinitionValue, defaultBranchName).ifPresent(dto::setValue); if (!CaycUtils.isNewCodePeriodCompliant(dto.getType(), dto.getValue())) { throw new IllegalArgumentException("Failed to set the New Code Definition. The given value is not compatible with the Clean as You Code methodology. " + "Please refer to the documentation for compliant options."); } dbClient.newCodePeriodDao().insert(dbSession, dto); }
@Test public void createNewCodeDefinition_return_branch_value_for_reference_branch_type() { newCodeDefinitionResolver.createNewCodeDefinition(dbSession, DEFAULT_PROJECT_ID, MAIN_BRANCH_UUID, MAIN_BRANCH, REFERENCE_BRANCH.name(), null); Optional<NewCodePeriodDto> newCodePeriodDto = dbClient.newCodePeriodDao().selectByProject(dbSession, DEFAULT_PROJECT_ID); assertThat(newCodePeriodDto) .isPresent() .get() .extracting(NewCodePeriodDto::getType, NewCodePeriodDto::getValue, NewCodePeriodDto::getBranchUuid, NewCodePeriodDto::getProjectUuid) .containsExactly(REFERENCE_BRANCH, MAIN_BRANCH, null, DEFAULT_PROJECT_ID); }
@Override public Set<GrokPattern> bulkLoad(Collection<String> patternIds) { final DBCursor<GrokPattern> dbCursor = dbCollection.find(DBQuery.in("_id", patternIds)); return ImmutableSet.copyOf((Iterator<GrokPattern>) dbCursor); }
@Test @MongoDBFixtures("MongoDbGrokPatternServiceTest.json") public void bulkLoad() { final List<String> idList = ImmutableList.of( "56250da2d400000000000001", "56250da2d400000000000002", "56250da2d4000000deadbeef"); final Set<GrokPattern> grokPatterns = service.bulkLoad(idList); assertThat(grokPatterns) .hasSize(2) .contains( GrokPattern.create("56250da2d400000000000001", "Test1", "[a-z]+", null), GrokPattern.create("56250da2d400000000000002", "Test2", "[a-z]+", null)); }
static KiePMMLFalsePredicate getKiePMMLFalsePredicate(final False falsePre) { return KiePMMLFalsePredicate.builder(Collections.emptyList()).build(); }
@Test void getKiePMMLFalsePredicate() { False toConvert = new False(); KiePMMLFalsePredicate retrieved = KiePMMLFalsePredicateInstanceFactory.getKiePMMLFalsePredicate(toConvert); commonVerifyKiePMMLFalsePredicate(retrieved, toConvert); }
@Override public LocalResourceId resolve(String other, ResolveOptions resolveOptions) { checkState(isDirectory(), "Expected the path is a directory, but had [%s].", pathString); checkArgument( resolveOptions.equals(StandardResolveOptions.RESOLVE_FILE) || resolveOptions.equals(StandardResolveOptions.RESOLVE_DIRECTORY), "ResolveOptions: [%s] is not supported.", resolveOptions); checkArgument( !(resolveOptions.equals(StandardResolveOptions.RESOLVE_FILE) && other.endsWith("/")), "The resolved file: [%s] should not end with '/'.", other); if (SystemUtils.IS_OS_WINDOWS) { return resolveLocalPathWindowsOS(other, resolveOptions); } else { return resolveLocalPath(other, resolveOptions); } }
@Test public void testResolveInWindowsOS() { // TODO: Java core test failing on windows, https://github.com/apache/beam/issues/20460 assumeFalse(SystemUtils.IS_OS_WINDOWS); if (!SystemUtils.IS_OS_WINDOWS) { // Skip tests return; } assertEquals( toResourceIdentifier("C:\\my home\\out put"), toResourceIdentifier("C:\\my home\\") .resolve("out put", StandardResolveOptions.RESOLVE_FILE)); assertEquals( toResourceIdentifier("C:\\out put"), toResourceIdentifier("C:\\my home\\") .resolve("..", StandardResolveOptions.RESOLVE_DIRECTORY) .resolve(".", StandardResolveOptions.RESOLVE_DIRECTORY) .resolve("out put", StandardResolveOptions.RESOLVE_FILE)); assertEquals( toResourceIdentifier("C:\\my home\\**\\*"), toResourceIdentifier("C:\\my home\\") .resolve("**", StandardResolveOptions.RESOLVE_DIRECTORY) .resolve("*", StandardResolveOptions.RESOLVE_FILE)); }
static String resolveRegion(AwsConfig awsConfig, AwsMetadataApi metadataApi, Environment environment) { if (!isNullOrEmptyAfterTrim(awsConfig.getRegion())) { return awsConfig.getRegion(); } if (environment.isRunningOnEcs()) { return regionFrom(metadataApi.availabilityZoneEcs()); } return regionFrom(metadataApi.availabilityZoneEc2()); }
@Test public void resolveRegionEc2Metadata() { // given AwsConfig awsConfig = AwsConfig.builder().build(); AwsMetadataApi awsMetadataApi = mock(AwsMetadataApi.class); Environment environment = mock(Environment.class); given(awsMetadataApi.availabilityZoneEc2()).willReturn("us-east-1a"); // when String result = resolveRegion(awsConfig, awsMetadataApi, environment); // then assertEquals("us-east-1", result); }
public long getDirLen() { return dir_len; }
@Test public void getDirLen() { assertEquals(TestParameters.VP_DIRECTORY_LENGTH, chmItsfHeader.getDirLen()); }
public Map<String, String> build() { Map<String, String> builder = new HashMap<>(); configureFileSystem(builder); configureNetwork(builder); configureCluster(builder); configureSecurity(builder); configureOthers(builder); LOGGER.info("Elasticsearch listening on [HTTP: {}:{}, TCP: {}:{}]", builder.get(ES_HTTP_HOST_KEY), builder.get(ES_HTTP_PORT_KEY), builder.get(ES_TRANSPORT_HOST_KEY), builder.get(ES_TRANSPORT_PORT_KEY)); return builder; }
@Test @UseDataProvider("clusterEnabledOrNot") public void enable_http_connector_different_host(boolean clusterEnabled) throws Exception { Props props = minProps(clusterEnabled, "127.0.0.2", null); Map<String, String> settings = new EsSettings(props, new EsInstallation(props), system).build(); assertThat(settings) .containsEntry("http.port", "9001") .containsEntry("http.host", "127.0.0.2"); }
public static NacosNamingServiceWrapper createNamingService(URL connectionURL) { boolean check = connectionURL.getParameter(NACOS_CHECK_KEY, true); int retryTimes = connectionURL.getPositiveParameter(NACOS_RETRY_KEY, 10); int sleepMsBetweenRetries = connectionURL.getPositiveParameter(NACOS_RETRY_WAIT_KEY, 10); NacosConnectionManager nacosConnectionManager = new NacosConnectionManager(connectionURL, check, retryTimes, sleepMsBetweenRetries); return new NacosNamingServiceWrapper(nacosConnectionManager, retryTimes, sleepMsBetweenRetries); }
@Test void testCreateNamingService() { URL url = URL.valueOf("test://test:8080/test?backup=backup&nacos.check=false"); NacosNamingServiceWrapper namingService = NacosNamingServiceUtils.createNamingService(url); Assertions.assertNotNull(namingService); }
public static void validateVCores(Configuration conf) { int minVcores = conf.getInt( YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES, YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES); int maxVcores = conf.getInt( YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES, YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES); if (minVcores <= 0 || minVcores > maxVcores) { throw new YarnRuntimeException("Invalid resource scheduler vcores" + " allocation configuration" + ", " + YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES + "=" + minVcores + ", " + YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES + "=" + maxVcores + ", min and max should be greater than 0" + ", max should be no smaller than min."); } }
@Test (expected = YarnRuntimeException.class) public void testValidateVCoresHigherMinThanMaxVCore() { Map<String, String> configs = new HashMap(); configs.put(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES, "4"); configs.put(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES, "1"); Configuration config = CapacitySchedulerConfigGeneratorForTest .createConfiguration(configs); CapacitySchedulerConfigValidator.validateVCores(config); fail(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES + " should be > " + YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB); }
@Override public InterpreterResult interpret(String st, InterpreterContext context) { return helper.interpret(session, st, context); }
@Test void should_error_describing_non_existing_table() { // Given String query = "USE system;\n" + "DESCRIBE TABLE complex_table;"; // When final InterpreterResult actual = interpreter.interpret(query, intrContext); // Then assertEquals(Code.ERROR, actual.code()); assertTrue(actual.message().get(0).getData().contains("Cannot find table system.complex_table"), actual.message().get(0).getData()); }
public Map<TopicPartition, OffsetAndTimestamp> offsetsForTimes(Map<TopicPartition, Long> timestampsToSearch, Timer timer) { metadata.addTransientTopics(topicsForPartitions(timestampsToSearch.keySet())); try { Map<TopicPartition, ListOffsetData> fetchedOffsets = fetchOffsetsByTimes(timestampsToSearch, timer, true).fetchedOffsets; return buildOffsetsForTimesResult(timestampsToSearch, fetchedOffsets); } finally { metadata.clearTransientTopics(); } }
@Test public void testBatchedListOffsetsMetadataErrors() { buildFetcher(); ListOffsetsResponseData data = new ListOffsetsResponseData() .setThrottleTimeMs(0) .setTopics(Collections.singletonList(new ListOffsetsTopicResponse() .setName(tp0.topic()) .setPartitions(Arrays.asList( new ListOffsetsPartitionResponse() .setPartitionIndex(tp0.partition()) .setErrorCode(Errors.NOT_LEADER_OR_FOLLOWER.code()) .setTimestamp(ListOffsetsResponse.UNKNOWN_TIMESTAMP) .setOffset(ListOffsetsResponse.UNKNOWN_OFFSET), new ListOffsetsPartitionResponse() .setPartitionIndex(tp1.partition()) .setErrorCode(Errors.UNKNOWN_TOPIC_OR_PARTITION.code()) .setTimestamp(ListOffsetsResponse.UNKNOWN_TIMESTAMP) .setOffset(ListOffsetsResponse.UNKNOWN_OFFSET))))); client.prepareResponse(new ListOffsetsResponse(data)); Map<TopicPartition, Long> offsetsToSearch = new HashMap<>(); offsetsToSearch.put(tp0, ListOffsetsRequest.EARLIEST_TIMESTAMP); offsetsToSearch.put(tp1, ListOffsetsRequest.EARLIEST_TIMESTAMP); assertThrows(TimeoutException.class, () -> offsetFetcher.offsetsForTimes(offsetsToSearch, time.timer(1))); }
@Override public void handlerSelector(final SelectorData selectorData) { if (Objects.isNull(selectorData.getId())) { return; } if (!selectorData.getContinued()) { ApplicationConfigCache.getInstance().cacheRuleHandle(CacheKeyUtils.INST.getKey(selectorData.getId(), Constants.DEFAULT_RULE), "{}"); } GrpcClientCache.initGrpcClient(selectorData.getId()); }
@Test public void testHandlerSelector() { when(selectorData.getId()).thenReturn(null); grpcPluginDataHandler.handlerSelector(selectorData); when(selectorData.getId()).thenReturn("selectorId"); grpcPluginDataHandler.handlerSelector(selectorData); assertNotNull(GrpcClientCache.getGrpcClient(selectorData.getId())); }
@Override public Long createOAuth2Client(OAuth2ClientSaveReqVO createReqVO) { validateClientIdExists(null, createReqVO.getClientId()); // 插入 OAuth2ClientDO client = BeanUtils.toBean(createReqVO, OAuth2ClientDO.class); oauth2ClientMapper.insert(client); return client.getId(); }
@Test public void testCreateOAuth2Client_success() { // 准备参数 OAuth2ClientSaveReqVO reqVO = randomPojo(OAuth2ClientSaveReqVO.class, o -> o.setLogo(randomString())) .setId(null); // 防止 id 被赋值 // 调用 Long oauth2ClientId = oauth2ClientService.createOAuth2Client(reqVO); // 断言 assertNotNull(oauth2ClientId); // 校验记录的属性是否正确 OAuth2ClientDO oAuth2Client = oauth2ClientMapper.selectById(oauth2ClientId); assertPojoEquals(reqVO, oAuth2Client, "id"); }
public static Object invokeStaticMethod(Method staticMethod, Object... args) throws IllegalArgumentException, InvocationTargetException, SecurityException { if (staticMethod == null) { throw new IllegalArgumentException("staticMethod must be not null"); } if (!Modifier.isStatic(staticMethod.getModifiers())) { throw new IllegalArgumentException("`" + methodToString(staticMethod) + "` is not a static method"); } return invokeMethod(null, staticMethod, args); }
@Test public void testInvokeMethod3() throws NoSuchMethodException, InvocationTargetException { Assertions.assertEquals("0", ReflectionUtil.invokeStaticMethod( String.class, "valueOf", new Class<?>[]{int.class}, 0)); Assertions.assertEquals("123", ReflectionUtil.invokeStaticMethod( String.class, "valueOf", new Class<?>[]{int.class}, new Object[]{123})); Assertions.assertThrows(NoSuchMethodException.class, () -> ReflectionUtil .invokeStaticMethod(String.class, "size", null, ReflectionUtil.EMPTY_ARGS)); }
public boolean compatibleVersion(String acceptableVersionRange, String actualVersion) { V pluginVersion = parseVersion(actualVersion); // Treat a single version "1.4" as a left bound, equivalent to "[1.4,)" if (acceptableVersionRange.matches(VERSION_REGEX)) { return ge(pluginVersion, parseVersion(acceptableVersionRange)); } // Otherwise ensure it is a version range with bounds Matcher matcher = INTERVAL_PATTERN.matcher(acceptableVersionRange); Preconditions.checkArgument(matcher.matches(), "invalid version range"); String leftBound = matcher.group("left"); String rightBound = matcher.group("right"); Preconditions.checkArgument( leftBound != null || rightBound != null, "left and right bounds cannot both be empty"); BiPredicate<V, V> leftComparator = acceptableVersionRange.startsWith("[") ? VersionChecker::ge : VersionChecker::gt; BiPredicate<V, V> rightComparator = acceptableVersionRange.endsWith("]") ? VersionChecker::le : VersionChecker::lt; if (leftBound != null && !leftComparator.test(pluginVersion, parseVersion(leftBound))) { return false; } if (rightBound != null && !rightComparator.test(pluginVersion, parseVersion(rightBound))) { return false; } return true; }
@Test public void testRange_rightClosed() { Assert.assertFalse(checker.compatibleVersion("[2.3,4.3]", "5.0")); Assert.assertFalse(checker.compatibleVersion("(2.3,4.3]", "5.0")); Assert.assertFalse(checker.compatibleVersion("[,4.3]", "5.0")); Assert.assertFalse(checker.compatibleVersion("(,4.3]", "5.0")); }
@SuppressWarnings("ChainOfInstanceofChecks") public OpenFileInformation prepareToOpenFile( final Path path, final OpenFileParameters parameters, final long blockSize) throws IOException { Configuration options = parameters.getOptions(); Set<String> mandatoryKeys = parameters.getMandatoryKeys(); // S3 Select is not supported in this release if (options.get(SelectConstants.SELECT_SQL, null) != null) { if (mandatoryKeys.contains(SelectConstants.SELECT_SQL)) { // mandatory option: fail with a specific message. throw new UnsupportedOperationException(SelectConstants.SELECT_UNSUPPORTED); } else { // optional; log once and continue LOG_NO_SQL_SELECT.warn(SelectConstants.SELECT_UNSUPPORTED); } } // choice of keys depends on open type rejectUnknownMandatoryKeys( mandatoryKeys, InternalConstants.S3A_OPENFILE_KEYS, "for " + path + " in file I/O"); // where does a read end? long fileLength = LENGTH_UNKNOWN; // was a status passed in via a withStatus() invocation in // the builder API? FileStatus providedStatus = parameters.getStatus(); S3AFileStatus fileStatus = null; if (providedStatus != null) { // there's a file status // make sure the file name matches -the rest of the path // MUST NOT be checked. Path providedStatusPath = providedStatus.getPath(); checkArgument(path.getName().equals(providedStatusPath.getName()), "Filename mismatch between file being opened %s and" + " supplied filestatus %s", path, providedStatusPath); // make sure the status references a file if (providedStatus.isDirectory()) { throw new FileNotFoundException( "Supplied status references a directory " + providedStatus); } // build up the values long len = providedStatus.getLen(); long modTime = providedStatus.getModificationTime(); String versionId; String eTag; // can use this status to skip our own probes, LOG.debug("File was opened with a supplied FileStatus;" + " skipping getFileStatus call in open() operation: {}", providedStatus); // what type is the status (and hence: what information does it contain?) if (providedStatus instanceof S3AFileStatus) { // is it an S3AFileSystem status? S3AFileStatus st = (S3AFileStatus) providedStatus; versionId = st.getVersionId(); eTag = st.getEtag(); } else if (providedStatus instanceof S3ALocatedFileStatus) { // S3ALocatedFileStatus instance may supply etag and version. S3ALocatedFileStatus st = (S3ALocatedFileStatus) providedStatus; versionId = st.getVersionId(); eTag = st.getEtag(); } else { // it is another type. // build a status struct without etag or version. LOG.debug("Converting file status {}", providedStatus); versionId = null; eTag = null; } // Construct a new file status with the real path of the file. fileStatus = new S3AFileStatus( len, modTime, path, blockSize, username, eTag, versionId); // set the end of the read to the file length fileLength = fileStatus.getLen(); } FSBuilderSupport builderSupport = new FSBuilderSupport(options); // determine start and end of file. long splitStart = builderSupport.getPositiveLong(FS_OPTION_OPENFILE_SPLIT_START, 0); // split end long splitEnd = builderSupport.getLong( FS_OPTION_OPENFILE_SPLIT_END, LENGTH_UNKNOWN); if (splitStart > 0 && splitStart > splitEnd) { LOG.warn("Split start {} is greater than split end {}, resetting", splitStart, splitEnd); splitStart = 0; } // read end is the open file value fileLength = builderSupport.getPositiveLong(FS_OPTION_OPENFILE_LENGTH, fileLength); // if the read end has come from options, use that // in creating a file status if (fileLength >= 0 && fileStatus == null) { fileStatus = createStatus(path, fileLength, blockSize); } // Build up the input policy. // seek policy from default, s3a opt or standard option // read from the FS standard option. Collection<String> policies = options.getStringCollection(FS_OPTION_OPENFILE_READ_POLICY); if (policies.isEmpty()) { // fall back to looking at the S3A-specific option. policies = options.getStringCollection(INPUT_FADVISE); } return new OpenFileInformation() .withAsyncDrainThreshold( builderSupport.getPositiveLong(ASYNC_DRAIN_THRESHOLD, defaultReadAhead)) .withBufferSize( (int)builderSupport.getPositiveLong( FS_OPTION_OPENFILE_BUFFER_SIZE, defaultBufferSize)) .withChangePolicy(changePolicy) .withFileLength(fileLength) .withInputPolicy( S3AInputPolicy.getFirstSupportedPolicy(policies, defaultInputPolicy)) .withReadAheadRange( builderSupport.getPositiveLong(READAHEAD_RANGE, defaultReadAhead)) .withSplitStart(splitStart) .withSplitEnd(splitEnd) .withStatus(fileStatus) .build(); }
@Test public void testLocatedStatus() throws Throwable { Path p = new Path("file:///tmp/" + TESTPATH.getName()); ObjectAssert<OpenFileSupport.OpenFileInformation> asst = assertFileInfo( prepareToOpenFile( params(FS_OPTION_OPENFILE_LENGTH, "32") .withStatus( new S3ALocatedFileStatus( status(p, 4096), null)))); asst.extracting(f -> f.getStatus().getVersionId()) .isEqualTo("version"); asst.extracting(f -> f.getStatus().getEtag()) .isEqualTo("etag"); asst.extracting(f -> f.getStatus().getLen()) .isEqualTo(4096L); }
public static String format(String source) { return new FormatProcess(source).perform().trim(); }
@Test public void formatTest(){ // issue#I3XS44@Gitee // 测试是否空指针错误 final String sql = "(select 1 from dual) union all (select 1 from dual)"; SqlFormatter.format(sql); }
@Override public RLock readLock() { return new RedissonReadLock(commandExecutor, getName()); }
@Test public void testUnlockFail() { Assertions.assertThrows(IllegalMonitorStateException.class, () -> { RReadWriteLock rwlock = redisson.getReadWriteLock("lock"); Thread t = new Thread() { public void run() { RReadWriteLock rwlock = redisson.getReadWriteLock("lock"); rwlock.readLock().lock(); }; }; t.start(); t.join(); RLock lock = rwlock.readLock(); try { lock.unlock(); } finally { // clear scheduler lock.forceUnlock(); } }); }
public static String getNativeDataTypeSimpleName( ValueMetaInterface v ) { try { return v.getType() != ValueMetaInterface.TYPE_BINARY ? v.getNativeDataTypeClass().getSimpleName() : "Binary"; } catch ( KettleValueException e ) { LogChannelInterface log = new LogChannel( v ); log.logDebug( BaseMessages.getString( PKG, "FieldHelper.Log.UnknownNativeDataTypeSimpleName" ) ); return "Object"; } }
@Test public void getNativeDataTypeSimpleName_Binary() { ValueMetaBinary v = new ValueMetaBinary(); assertEquals( "Binary", FieldHelper.getNativeDataTypeSimpleName( v ) ); }
protected void update() { controller.moveBullet(0.5f); }
@Test void testUpdate() { gameLoop.update(); assertEquals(0.5f, gameLoop.controller.getBulletPosition(), 0); }
public static boolean isPrimitive(@Nullable Type type) { return type != null && type.getSort() <= Type.DOUBLE; }
@Test void testIsPrimitive() { for (Type primitive : Types.PRIMITIVES) { assertTrue(Types.isPrimitive(primitive), "Failed on: " + primitive); } for (String primitiveBox : Types.PRIMITIVE_BOXES) { assertFalse(Types.isPrimitive(primitiveBox), "Failed on: " + primitiveBox); } assertFalse(Types.isPrimitive(Types.STRING_TYPE)); assertFalse(Types.isPrimitive((Type) null)); assertFalse(Types.isPrimitive((String) null)); }
public static Optional<String> getSchemaNameBySchemaPath(final String schemaPath) { Pattern pattern = Pattern.compile(getShardingSphereDataNodePath() + "/([\\w\\-]+)/schemas/([\\w\\-]+)?", Pattern.CASE_INSENSITIVE); Matcher matcher = pattern.matcher(schemaPath); return matcher.find() ? Optional.of(matcher.group(2)) : Optional.empty(); }
@Test void assertGetSchemaNameBySchemaPathSchemaNameNotFoundScenario() { assertThat(ShardingSphereDataNode.getSchemaNameBySchemaPath("/statistics//databasesdb_name"), is(Optional.empty())); }
public TpcEngineBuilder setReactorCount(int reactorCount) { this.reactorCount = checkPositive(reactorCount, "reactorCount"); return this; }
@Test public void test_setReactorCountWhenNegative() { TpcEngineBuilder builder = new TpcEngineBuilder(); assertThrows(IllegalArgumentException.class, () -> builder.setReactorCount(-1)); }
void runOnce() { if (transactionManager != null) { try { transactionManager.maybeResolveSequences(); RuntimeException lastError = transactionManager.lastError(); // do not continue sending if the transaction manager is in a failed state if (transactionManager.hasFatalError()) { if (lastError != null) maybeAbortBatches(lastError); client.poll(retryBackoffMs, time.milliseconds()); return; } if (transactionManager.hasAbortableError() && shouldHandleAuthorizationError(lastError)) { return; } // Check whether we need a new producerId. If so, we will enqueue an InitProducerId // request which will be sent below transactionManager.bumpIdempotentEpochAndResetIdIfNeeded(); if (maybeSendAndPollTransactionalRequest()) { return; } } catch (AuthenticationException e) { // This is already logged as error, but propagated here to perform any clean ups. log.trace("Authentication exception while processing transactional request", e); transactionManager.authenticationFailed(e); } } long currentTimeMs = time.milliseconds(); long pollTimeout = sendProducerData(currentTimeMs); client.poll(pollTimeout, currentTimeMs); }
@Test public void testResetNextBatchExpiry() throws Exception { client = spy(new MockClient(time, metadata)); setupWithTransactionState(null); appendToAccumulator(tp0, 0L, "key", "value"); sender.runOnce(); sender.runOnce(); time.setCurrentTimeMs(time.milliseconds() + accumulator.getDeliveryTimeoutMs() + 1); sender.runOnce(); InOrder inOrder = inOrder(client); inOrder.verify(client, atLeastOnce()).ready(any(), anyLong()); inOrder.verify(client, atLeastOnce()).newClientRequest(anyString(), any(), anyLong(), anyBoolean(), anyInt(), any()); inOrder.verify(client, atLeastOnce()).send(any(), anyLong()); inOrder.verify(client).poll(eq(0L), anyLong()); inOrder.verify(client).poll(eq(accumulator.getDeliveryTimeoutMs()), anyLong()); inOrder.verify(client).poll(geq(1L), anyLong()); }
public boolean fileExists(String path) throws IOException, InvalidTokenException { String url; try { url = getUriBuilder() .setPath(API_PATH_PREFIX + "/mounts/primary/files/info") .setParameter("path", path) .build() .toString(); } catch (URISyntaxException e) { throw new IllegalStateException("Could not produce url.", e); } Request.Builder requestBuilder = getRequestBuilder(url); try (Response response = getResponse(requestBuilder)) { int code = response.code(); if (code == 200) { return true; } if (code == 404) { return false; } throw new KoofrClientIOException(response); } }
@Test public void testFileExistsError() { server.enqueue(new MockResponse().setResponseCode(500).setBody("Internal error")); KoofrClientIOException caughtExc = assertThrows( KoofrClientIOException.class, () -> client.fileExists("/path/to/file")); assertNotNull(caughtExc); assertEquals( "Got error code: 500 message: Server Error body: Internal error", caughtExc.getMessage()); assertEquals(1, server.getRequestCount()); }
@Override public Optional<ServiceInstance> choose(String serviceName) { return DiscoveryManager.INSTANCE.choose(serviceName); }
@Test public void choose() { final List<ServiceInstance> serviceInstances = mockInstances(); final Optional<ServiceInstance> choose = lbService.choose(serviceName); Assert.assertTrue(choose.isPresent()); Assert.assertTrue(serviceInstances.contains(choose.get())); }
@Override public void checkpointCoordinator(long checkpointId, CompletableFuture<byte[]> result) { // unfortunately, this method does not run in the scheduler executor, but in the // checkpoint coordinator time thread. // we can remove the delegation once the checkpoint coordinator runs fully in the // scheduler's main thread executor mainThreadExecutor.execute(() -> checkpointCoordinatorInternal(checkpointId, result)); }
@Test void testCheckpointFailsIfSendingEventFailedAfterTrigger() throws Exception { CompletableFuture<Acknowledge> eventSendingResult = new CompletableFuture<>(); final EventReceivingTasks tasks = EventReceivingTasks.createForRunningTasksWithRpcResult(eventSendingResult); final OperatorCoordinatorHolder holder = createCoordinatorHolder(tasks, TestingOperatorCoordinator::new); // Send one event without finishing it. getCoordinator(holder).getSubtaskGateway(0).sendEvent(new TestOperatorEvent(0)); // Trigger one checkpoint. CompletableFuture<byte[]> checkpointResult = new CompletableFuture<>(); holder.checkpointCoordinator(1, checkpointResult); getCoordinator(holder).getLastTriggeredCheckpoint().complete(new byte[0]); // Fail the event sending. eventSendingResult.completeExceptionally(new RuntimeException("Artificial")); assertThat(checkpointResult).isCompletedExceptionally(); }
public static ThreadFactory createThreadFactory(final String pattern, final boolean daemon) { return new ThreadFactory() { private final AtomicLong threadEpoch = new AtomicLong(0); @Override public Thread newThread(Runnable r) { String threadName; if (pattern.contains("%d")) { threadName = String.format(pattern, threadEpoch.addAndGet(1)); } else { threadName = pattern; } Thread thread = new Thread(r, threadName); thread.setDaemon(daemon); return thread; } }; }
@Test public void testThreadNameWithoutNumberNoDemon() { assertEquals(THREAD_NAME, ThreadUtils.createThreadFactory(THREAD_NAME, false). newThread(EMPTY_RUNNABLE).getName()); }