focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static int[] toIntArray(Collection<Integer> collection) { int[] collectionArray = new int[collection.size()]; int index = 0; for (Integer item : collection) { collectionArray[index++] = item; } return collectionArray; }
@Test public void testToIntArray() { List<Integer> list = new ArrayList<>(); list.add(42); list.add(23); list.add(Integer.MAX_VALUE); int[] intArray = toIntArray(list); assertNotNull(intArray); assertEquals(list.size(), intArray.length); assertEquals(list.get(0).intValue(), intArray[0]); assertEquals(list.get(1).intValue(), intArray[1]); assertEquals(list.get(2).intValue(), intArray[2]); }
@Override public void pluginUnLoaded(GoPluginDescriptor pluginDescriptor) { if (scmExtension.canHandlePlugin(pluginDescriptor.id())) { scmMetadataStore.removeMetadata(pluginDescriptor.id()); } }
@Test public void shouldNotTryRemoveMetadataOnPluginUnLoadedCallback() throws Exception { SCMConfigurations scmConfigurations = new SCMConfigurations(); SCMView scmView = createSCMView(null, null); SCMMetadataStore.getInstance().addMetadataFor(pluginDescriptor.id(), scmConfigurations, scmView); when(scmExtension.canHandlePlugin(pluginDescriptor.id())).thenReturn(false); metadataLoader.pluginUnLoaded(pluginDescriptor); assertThat(SCMMetadataStore.getInstance().getConfigurationMetadata(pluginDescriptor.id())).isEqualTo(scmConfigurations); assertThat(SCMMetadataStore.getInstance().getViewMetadata(pluginDescriptor.id())).isEqualTo(scmView); }
public UdafImpl getUdafImpl() { return new LazyUdafImpl<>(this); }
@Test public void nonparameterizedGetUdafImpl_throwsIllegalStateException() { LazyAggregateCombineFn<?, ?, ?> combiner = new LazyAggregateCombineFn<>(new NonParameterizedAggregateFn()); AggregateFunction aggregateFunction = combiner.getUdafImpl(); RelDataTypeFactory typeFactory = new JavaTypeFactoryImpl(RelDataTypeSystem.DEFAULT); exceptions.expect(IllegalStateException.class); List<FunctionParameter> params = aggregateFunction.getParameters(); params.get(0).getType(typeFactory); }
@Override public Map<String, Metric> getMetrics() { final Map<String, Metric> gauges = new HashMap<>(); for (final Thread.State state : Thread.State.values()) { gauges.put(name(state.toString().toLowerCase(), "count"), (Gauge<Object>) () -> getThreadCount(state)); } gauges.put("count", (Gauge<Integer>) threads::getThreadCount); gauges.put("daemon.count", (Gauge<Integer>) threads::getDaemonThreadCount); gauges.put("peak.count", (Gauge<Integer>) threads::getPeakThreadCount); gauges.put("total_started.count", (Gauge<Long>) threads::getTotalStartedThreadCount); gauges.put("deadlock.count", (Gauge<Integer>) () -> deadlockDetector.getDeadlockedThreads().size()); gauges.put("deadlocks", (Gauge<Set<String>>) deadlockDetector::getDeadlockedThreads); return Collections.unmodifiableMap(gauges); }
@Test public void hasAGaugeForAnyDeadlockCount() { assertThat(((Gauge<?>) gauges.getMetrics().get("deadlock.count")).getValue()) .isEqualTo(1); }
@Override public int hashCode() { int result = startOffset; result = 31 * result + (cssClass != null ? cssClass.hashCode() : 0); return result; }
@Test public void test_hashcode() { OpeningHtmlTag openingHtmlTag = new OpeningHtmlTag(3, "tag"); OpeningHtmlTag openingHtmlTagWithSameValues = new OpeningHtmlTag(3, "tag"); OpeningHtmlTag openingHtmlTagWithDifferentValue = new OpeningHtmlTag(5, "tag2"); assertThat(openingHtmlTag) .hasSameHashCodeAs(openingHtmlTagWithSameValues) .hasSameHashCodeAs(openingHtmlTag); assertThat(openingHtmlTag.hashCode()).isNotEqualTo(openingHtmlTagWithDifferentValue.hashCode()); }
@Override public void onPartitionsRevoked(final Collection<TopicPartition> partitions) { log.debug("Current state {}: revoked partitions {} because of consumer rebalance.\n" + "\tcurrently assigned active tasks: {}\n" + "\tcurrently assigned standby tasks: {}\n", streamThread.state(), partitions, taskManager.activeTaskIds(), taskManager.standbyTaskIds()); // We need to still invoke handleRevocation if the thread has been told to shut down, but we shouldn't ever // transition away from PENDING_SHUTDOWN once it's been initiated (to anything other than DEAD) if ((streamThread.setState(State.PARTITIONS_REVOKED) != null || streamThread.state() == State.PENDING_SHUTDOWN) && !partitions.isEmpty()) { final long start = time.milliseconds(); try { taskManager.handleRevocation(partitions); } finally { log.info("partition revocation took {} ms.", time.milliseconds() - start); } } }
@Test public void shouldNotHandleRevokedPartitionsIfStateCannotTransitToPartitionRevoked() { when(streamThread.setState(State.PARTITIONS_REVOKED)).thenReturn(null); streamsRebalanceListener.onPartitionsRevoked(Collections.singletonList(new TopicPartition("topic", 0))); verify(taskManager, never()).handleRevocation(any()); }
public MaterialAgent createAgent(MaterialRevision revision) { Material material = revision.getMaterial(); if (material instanceof DependencyMaterial) { return MaterialAgent.NO_OP; } else if (material instanceof PackageMaterial) { return MaterialAgent.NO_OP; } else if (material instanceof PluggableSCMMaterial) { return new PluggableSCMMaterialAgent(scmExtension, revision, workingDirectory, consumer); } else if (material instanceof ScmMaterial) { String destFolderPath = ((ScmMaterial) material).workingdir(workingDirectory).getAbsolutePath(); return new AbstractMaterialAgent(revision, consumer, workingDirectory, new AgentSubprocessExecutionContext(agentIdentifier, destFolderPath)); } throw new RuntimeException("Could not find MaterialChecker for material = " + material); }
@Test public void shouldGetPluggableSCMMaterialAgent() { File workingDirectory = new File("/tmp/workingDirectory"); MaterialRevision revision = new MaterialRevision(new PluggableSCMMaterial(), new Modifications()); MaterialAgentFactory factory = new MaterialAgentFactory(null, workingDirectory, null, scmExtension); MaterialAgent agent = factory.createAgent(revision); assertThat(agent instanceof PluggableSCMMaterialAgent, is(true)); assertThat(ReflectionUtil.getField(agent, "scmExtension"), is(scmExtension)); assertThat(ReflectionUtil.getField(agent, "revision"), is(revision)); assertThat(ReflectionUtil.getField(agent, "workingDirectory"), is(workingDirectory)); }
public KvMetadata resolveMetadata( boolean isKey, List<MappingField> resolvedFields, Map<String, String> options, InternalSerializationService serializationService ) { KvMetadataResolver resolver = findMetadataResolver(options, isKey); return requireNonNull(resolver.resolveMetadata(isKey, resolvedFields, options, serializationService)); }
@Test @Parameters({ "true", "false" }) public void when_formatIsMissingInOptionsWhileResolvingMetadata_then_throws(boolean key) { assertThatThrownBy(() -> resolvers.resolveMetadata(key, emptyList(), emptyMap(), ss)) .isInstanceOf(QueryException.class) .hasMessageMatching("Missing '(key|value)Format' option"); }
@Override public void pushNotificationToCore(String serviceId, FromDeviceRpcResponse response, TbQueueCallback callback) { TopicPartitionInfo tpi = topicService.getNotificationsTopic(ServiceType.TB_CORE, serviceId); log.trace("PUSHING msg: {} to:{}", response, tpi); FromDeviceRPCResponseProto.Builder builder = FromDeviceRPCResponseProto.newBuilder() .setRequestIdMSB(response.getId().getMostSignificantBits()) .setRequestIdLSB(response.getId().getLeastSignificantBits()) .setError(response.getError().isPresent() ? response.getError().get().ordinal() : -1); response.getResponse().ifPresent(builder::setResponse); ToCoreNotificationMsg msg = ToCoreNotificationMsg.newBuilder().setFromDeviceRpcResponse(builder).build(); producerProvider.getTbCoreNotificationsMsgProducer().send(tpi, new TbProtoQueueMsg<>(response.getId(), msg), callback); toCoreNfs.incrementAndGet(); }
@Test public void testPushNotificationToCoreWithRestApiCallResponseMsgProto() { TopicPartitionInfo tpi = mock(TopicPartitionInfo.class); TbQueueCallback callbackMock = mock(TbQueueCallback.class); TbQueueProducer<TbProtoQueueMsg<TransportProtos.ToCoreNotificationMsg>> tbCoreQueueProducer = mock(TbQueueProducer.class); doReturn(tpi).when(topicService).getNotificationsTopic(any(ServiceType.class), any(String.class)); when(producerProvider.getTbCoreNotificationsMsgProducer()).thenReturn(tbCoreQueueProducer); TransportProtos.RestApiCallResponseMsgProto responseMsgProto = TransportProtos.RestApiCallResponseMsgProto.getDefaultInstance(); TransportProtos.ToCoreNotificationMsg toCoreNotificationMsg = TransportProtos.ToCoreNotificationMsg.newBuilder().setRestApiCallResponseMsg(responseMsgProto).build(); clusterService.pushNotificationToCore(CORE, responseMsgProto, callbackMock); verify(topicService).getNotificationsTopic(ServiceType.TB_CORE, CORE); verify(producerProvider).getTbCoreNotificationsMsgProducer(); ArgumentCaptor<TbProtoQueueMsg<TransportProtos.ToCoreNotificationMsg>> protoQueueMsgArgumentCaptor = ArgumentCaptor.forClass(TbProtoQueueMsg.class); verify(tbCoreQueueProducer).send(eq(tpi), protoQueueMsgArgumentCaptor.capture(), eq(callbackMock)); TbProtoQueueMsg<TransportProtos.ToCoreNotificationMsg> protoQueueMsgArgumentCaptorValue = protoQueueMsgArgumentCaptor.getValue(); assertThat(protoQueueMsgArgumentCaptorValue.getKey()).isNotNull(); assertThat(protoQueueMsgArgumentCaptorValue.getValue()).isEqualTo(toCoreNotificationMsg); assertThat(protoQueueMsgArgumentCaptorValue.getHeaders().getData()).isEqualTo(new DefaultTbQueueMsgHeaders().getData()); }
@Override public void write(int b) throws IOException { if (stream.getCount() >= multiPartSize) { newStream(); uploadParts(); } stream.write(b); pos += 1; writeBytes.increment(); writeOperations.increment(); // switch to multipart upload if (multipartUploadId == null && pos >= multiPartThresholdSize) { initializeMultiPartUpload(); uploadParts(); } }
@Test public void testAbortAfterFailedPartUpload() { RuntimeException mockException = new RuntimeException("mock uploadPart failure"); doThrow(mockException).when(s3mock).uploadPart((UploadPartRequest) any(), (RequestBody) any()); assertThatThrownBy( () -> { try (S3OutputStream stream = new S3OutputStream(s3mock, randomURI(), properties, nullMetrics())) { stream.write(randomData(10 * 1024 * 1024)); } }) .isInstanceOf(mockException.getClass()) .hasMessageContaining(mockException.getMessage()); verify(s3mock, times(1)).abortMultipartUpload((AbortMultipartUploadRequest) any()); }
@Override public RouteContext route(final ShardingRule shardingRule) { RouteContext result = new RouteContext(); String dataSourceName = getDataSourceName(shardingRule.getDataSourceNames()); RouteMapper dataSourceMapper = new RouteMapper(dataSourceName, dataSourceName); if (logicTables.isEmpty()) { result.getRouteUnits().add(new RouteUnit(dataSourceMapper, Collections.emptyList())); } else if (1 == logicTables.size()) { String logicTableName = logicTables.iterator().next(); if (!shardingRule.findShardingTable(logicTableName).isPresent()) { result.getRouteUnits().add(new RouteUnit(dataSourceMapper, Collections.emptyList())); return result; } DataNode dataNode = shardingRule.getDataNode(logicTableName); result.getRouteUnits().add(new RouteUnit(new RouteMapper(dataNode.getDataSourceName(), dataNode.getDataSourceName()), Collections.singletonList(new RouteMapper(logicTableName, dataNode.getTableName())))); } else { routeWithMultipleTables(result, shardingRule); } return result; }
@Test void assertRoutingForTableWithoutTableRule() { RouteContext actual = new ShardingUnicastRoutingEngine(mock(SQLStatementContext.class), Collections.singleton("t_other"), new ConnectionContext(Collections::emptySet)).route(shardingRule); assertThat(actual.getRouteUnits().size(), is(1)); }
public FEELFnResult<String> invoke(@ParameterName("from") Object val) { if ( val == null ) { return FEELFnResult.ofResult( null ); } else { return FEELFnResult.ofResult( TypeUtil.formatValue(val, false) ); } }
@Test void invokeString() { FunctionTestUtil.assertResult(stringFunction.invoke("test"), "test"); }
@Override public boolean add(long value) { assert value != nullValue : "add() called with null-sentinel value " + nullValue; return hsa.ensure(value).isNew(); }
@Test public void testAdd() { long key = random.nextLong(); assertTrue(set.add(key)); assertFalse(set.add(key)); }
@Override public String[] getParameterValues(String name) { return stringMap.get(name); }
@Test void testGetParameterValuesEmpty() { assertNull(reuseUploadFileHttpServletRequest.getParameterValues("nonExistentParam")); }
public List<ZAddressRange<Long>> zOrderSearchCurveLongs(List<ZValueRange> ranges) { return zOrderSearchCurve(ranges); }
@Test public void testZOrderSearchEvenCurves() { List<Integer> bitPositions = ImmutableList.of(2, 2); ZOrder zOrder = new ZOrder(bitPositions); for (int i = 0; i < SEARCH_CURVE_RANGES.length; i++) { List<ZValueRange> ranges = Arrays.stream(SEARCH_CURVE_RANGES[i]).collect(Collectors.toList()); List<ZAddressRange<Long>> addresses = zOrder.zOrderSearchCurveLongs(ranges); assertEquals(addresses, Arrays.stream(EXPECTED_Z_ADDRESS_RANGES[i]).collect(Collectors.toList())); } }
public static String findContainingJar(Class<?> clazz, String fileNamePattern) { ClassLoader loader = clazz.getClassLoader(); String classFile = clazz.getName().replaceAll("\\.", "/") + ".class"; try { for(final Enumeration<URL> itr = loader.getResources(classFile); itr.hasMoreElements();) { final URL url = itr.nextElement(); if ("jar".equals(url.getProtocol())) { String toReturn = url.getPath(); if (fileNamePattern == null || toReturn.matches(fileNamePattern)) { toReturn = URLDecoder.decode(toReturn, "UTF-8"); return toReturn.replaceAll("!.*$", ""); } } } } catch (IOException e) { throw new RuntimeException(e); } return null; }
@Test public void testFindContainingJar() throws Exception { String result = TempletonUtils.findContainingJar(Configuration.class, ".*hadoop.*\\.jar.*"); Assert.assertNotNull(result); result = TempletonUtils.findContainingJar(FileSystem.class, ".*hadoop.*\\.jar.*"); Assert.assertNotNull(result); result = TempletonUtils.findContainingJar(HadoopShimsSecure.class, ".*unknownjar.*"); Assert.assertNull("unexpectedly found jar for HadoopShimsSecure class: " + result, result); }
public static Path createTemporaryPath(ConnectorSession session, HdfsContext context, HdfsEnvironment hdfsEnvironment, Path targetPath) { // use a per-user temporary directory to avoid permission problems String temporaryPrefix = getTemporaryStagingDirectoryPath(session) .replace("${USER}", context.getIdentity().getUser()); // use relative temporary directory on ViewFS if (isViewFileSystem(context, hdfsEnvironment, targetPath)) { if (pathExists(context, hdfsEnvironment, targetPath)) { temporaryPrefix = ".hive-staging"; } else { //use the temporary folder in parent when target path does not exist temporaryPrefix = "../.hive-staging"; } } // create a temporary directory on the same filesystem Path temporaryRoot = new Path(targetPath, temporaryPrefix); if (!pathExists(context, hdfsEnvironment, temporaryRoot)) { createDirectory(context, hdfsEnvironment, temporaryRoot); } Path temporaryPath = new Path(temporaryRoot, randomUUID().toString()); createDirectory(context, hdfsEnvironment, temporaryPath); return temporaryPath; }
@Test void testCreateTemporaryPathOnViewFS() { HdfsEnvironment hdfsEnvironment = createTestHdfsEnvironment(new HiveClientConfig(), new MetastoreClientConfig()); Path viewfsPath = new Path("viewfs://ns-default/test-dir"); File storageDir = Files.createTempDir(); // ViewFS check requires the mount point config, using system temporary folder as the storage hdfsEnvironment.getConfiguration(CONTEXT, viewfsPath).set("fs.viewfs.mounttable.ns-default.link./test-dir", "file://" + storageDir); //Make temporary folder under an existing data folder without staging folder ".hive-staging" Path temporaryPath = createTemporaryPath(SESSION, CONTEXT, hdfsEnvironment, viewfsPath); assertEquals(temporaryPath.getParent().toString(), "viewfs://ns-default/test-dir/.hive-staging"); try { UUID.fromString(temporaryPath.getName()); } catch (IllegalArgumentException e) { fail("Expected a UUID folder name "); } //Make temporary folder under an existing data folder with an existing staging folder ".hive-staging" temporaryPath = createTemporaryPath(SESSION, CONTEXT, hdfsEnvironment, viewfsPath); assertEquals(temporaryPath.getParent().toString(), "viewfs://ns-default/test-dir/.hive-staging"); try { UUID.fromString(temporaryPath.getName()); } catch (IllegalArgumentException e) { fail("Expected a UUID folder name "); } //Make temporary folder under a non-existing data folder (for new tables), it would use the temporary folder of the parent temporaryPath = createTemporaryPath(SESSION, CONTEXT, hdfsEnvironment, new Path(viewfsPath, "non-existing")); assertEquals(temporaryPath.getParent().toString(), "viewfs://ns-default/test-dir/.hive-staging"); try { UUID.fromString(temporaryPath.getName()); } catch (IllegalArgumentException e) { fail("Expected a UUID folder name "); } }
void validateInput(@Valid Input input){ // do something }
@Test void whenInputIsInvalid_thenThrowsException(){ Input input = new Input(); input.setNumberBetweenOneAndTen(0); input.setIpAddress("invalid"); assertThrows(ConstraintViolationException.class, () -> { service.validateInput(input); }); }
public static String getValidFilePath(String inputPath) { return getValidFilePath(inputPath, false); }
@Test public void getValidFilePath_writeToTaskPath_throwsIllegalArgumentException() { boolean thrown = false; try { SecurityUtils.getValidFilePath("/var/task/test.txt", true); } catch (IllegalArgumentException e) { thrown = true; } if (!thrown) { fail("Did not throw exception"); } try { SecurityUtils.getValidFilePath("file:///var/task/test.txt", true); } catch (IllegalArgumentException e) { return; } fail(); }
@Udf(description = "Converts a string representation of a date in the given format" + " into the number of milliseconds since 1970-01-01 00:00:00 UTC/GMT." + " Single quotes in the timestamp format can be escaped with ''," + " for example: 'yyyy-MM-dd''T''HH:mm:ssX'." + " The system default time zone is used when no time zone is explicitly provided.") public long stringToTimestamp( @UdfParameter( description = "The string representation of a date.") final String formattedTimestamp, @UdfParameter( description = "The format pattern should be in the format expected by" + " java.time.format.DateTimeFormatter.") final String formatPattern) { // NB: We do not perform a null here preferring to throw an exception as // there is no sentinel value for a "null" Date. try { final StringToTimestampParser timestampParser = parsers.get(formatPattern); return timestampParser.parse(formattedTimestamp); } catch (final ExecutionException | RuntimeException e) { throw new KsqlFunctionException("Failed to parse timestamp '" + formattedTimestamp + "' with formatter '" + formatPattern + "': " + e.getMessage(), e); } }
@Test public void shouldWorkWithManyDifferentFormatters() { IntStream.range(0, 10_000) .parallel() .forEach(idx -> { try { final String sourceDate = "2018-12-01 10:12:13.456X" + idx; final String pattern = "yyyy-MM-dd HH:mm:ss.SSS'X" + idx + "'"; final long result = udf.stringToTimestamp(sourceDate, pattern); final long expectedResult = new SimpleDateFormat(pattern).parse(sourceDate).getTime(); assertThat(result, is(expectedResult)); } catch (final Exception e) { fail(e.getMessage()); } }); }
@Override public List<String> splitAndEvaluate() { try (ReflectContext context = new ReflectContext(JAVA_CLASSPATH)) { if (Strings.isNullOrEmpty(inlineExpression)) { return Collections.emptyList(); } return flatten(evaluate(context, GroovyUtils.split(handlePlaceHolder(inlineExpression)))); } }
@Test void assertEvaluateForLong() { StringBuilder expression = new StringBuilder(); for (int i = 0; i < 1024; i++) { expression.append("ds_"); expression.append(i / 64); expression.append(".t_user_"); expression.append(i); if (i != 1023) { expression.append(","); } } List<String> expected = createInlineExpressionParser(expression.toString()).splitAndEvaluate(); assertThat(expected.size(), is(1024)); assertThat(expected, hasItems("ds_0.t_user_0", "ds_15.t_user_1023")); }
@Override public String get(final Scope scope, final ConnectionSession connectionSession, final MySQLSystemVariable variable) { return DatabaseProtocolServerInfo.getProtocolVersion(connectionSession.getCurrentDatabaseName(), TypedSPILoader.getService(DatabaseType.class, "MySQL")); }
@Test void assertGetValue() { DatabaseType databaseType = TypedSPILoader.getService(DatabaseType.class, "MySQL"); try (MockedStatic<DatabaseProtocolServerInfo> mockedStatic = Mockito.mockStatic(DatabaseProtocolServerInfo.class)) { mockedStatic.when(() -> DatabaseProtocolServerInfo.getProtocolVersion(null, databaseType)).thenReturn("8.0"); ConnectionSession connectionSession = new ConnectionSession(databaseType, new DefaultAttributeMap()); assertThat(new VersionValueProvider().get(Scope.GLOBAL, connectionSession, MySQLSystemVariable.VERSION), is("8.0")); } }
@SuppressWarnings("unchecked") public static <T> T newInstance(Class<T> theClass, Configuration conf) { return newInstance(theClass, conf, EMPTY_ARRAY); }
@Test public void testNewInstanceForNonDefaultConstructor() { Object x = ReflectionUtils.newInstance( NoDefaultCtor.class, null, new Class[] {int.class}, 1); assertTrue(x instanceof NoDefaultCtor); }
public static void addNumRunningCompactionsMetric(final StreamsMetricsImpl streamsMetrics, final RocksDBMetricContext metricContext, final Gauge<BigInteger> valueProvider) { addMutableMetric( streamsMetrics, metricContext, valueProvider, NUMBER_OF_RUNNING_COMPACTIONS, NUMBER_OF_RUNNING_COMPACTIONS_DESCRIPTION ); }
@Test public void shouldAddNumRunningCompactionsMetric() { final String name = "num-running-compactions"; final String description = "Number of currently running compactions"; runAndVerifyMutableMetric( name, description, () -> RocksDBMetrics.addNumRunningCompactionsMetric(streamsMetrics, ROCKSDB_METRIC_CONTEXT, VALUE_PROVIDER) ); }
@Override public int compareTo(Source other) { return mType.compareTo(other.mType); }
@Test public void compareTo() { assertEquals(-1, Source.UNKNOWN.compareTo(Source.DEFAULT)); assertEquals(-1, Source.DEFAULT.compareTo(Source.CLUSTER_DEFAULT)); assertEquals(-1, Source.CLUSTER_DEFAULT.compareTo(Source.REFERENCE)); assertEquals(-1, Source.REFERENCE.compareTo(Source.siteProperty(""))); assertEquals(-1, Source.Type.SITE_PROPERTY.compareTo(Source.Type.SYSTEM_PROPERTY)); assertEquals(-1, Source.Type.SYSTEM_PROPERTY.compareTo(Source.Type.PATH_DEFAULT)); assertEquals(-1, Source.PATH_DEFAULT.compareTo(Source.RUNTIME)); assertEquals(-1, Source.RUNTIME.compareTo(Source.MOUNT_OPTION)); }
@Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { if (request instanceof HttpServletRequest httpRequest) { HttpServletResponse httpResponse = (HttpServletResponse) response; try { chain.doFilter(new ServletRequestWrapper(httpRequest), httpResponse); } catch (Throwable e) { if (httpResponse.isCommitted()) { // Request has been aborted by the client, nothing can been done as Tomcat has committed the response LOGGER.debug(format("Processing of request %s failed", toUrl(httpRequest)), e); return; } LOGGER.error(format("Processing of request %s failed", toUrl(httpRequest)), e); httpResponse.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); } } else { // Not an HTTP request, not profiled chain.doFilter(request, response); } }
@Test public void throwable_in_doFilter_is_logged_in_debug_if_response_is_already_committed() throws Exception { logTester.setLevel(Level.DEBUG); doThrow(new RuntimeException()).when(chain).doFilter(any(ServletRequest.class), any(ServletResponse.class)); HttpServletResponse response = mockHttpResponse(true); underTest.doFilter(request("POST", "/context/service/call", "param=value"), response, chain); List<String> debugLogs = logTester.logs(Level.DEBUG); assertThat(debugLogs.size()).isOne(); assertThat(debugLogs.get(0)).contains("Processing of request", "failed"); }
@Override public void preflight(final Path workdir, final String filename) throws BackgroundException { if(!validate(filename)) { throw new InvalidFilenameException(MessageFormat.format(LocaleFactory.localizedString("Cannot create {0}", "Error"), filename)); } // File/directory creation summary: // - Directories with ctera:writepermission but no ctera:createdirectoriespermission allow for file creation only. // - Directories with ctera:createdirectoriespermission but no ctera:writepermission allow for directory and file creation. // - Directories with only ctera:readpermission do not allow for file nor directory creation, for listing only. // In other words: // - file creation is allowed if either ctera:createdirectoriespermission or ctera:writepermission is set or both are set // - directory creation is allowed if ctera:createdirectoriespermission is set. // ctera:createdirectoriespermission or ctera:writepermission try { assumeRole(workdir, WRITEPERMISSION); } catch(AccessDeniedException e) { // ignore and try second option assumeRole(workdir, CREATEDIRECTORIESPERMISSION); } }
@Test public void testPreflightWriteAndCreateDirectoriesPermission() throws Exception { final Path file = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); file.setAttributes(file.attributes().withAcl( new Acl( new Acl.UserAndRole(new Acl.CanonicalUser(), WRITEPERMISSION), new Acl.UserAndRole(new Acl.CanonicalUser(), CREATEDIRECTORIESPERMISSION) ))); new CteraTouchFeature(session).preflight(file, new AlphanumericRandomStringService().random()); // assert no fail }
@Override protected ObjectListingChunk getObjectListingChunk(String key, boolean recursive) throws IOException { String delimiter = recursive ? "" : PATH_SEPARATOR; key = PathUtils.normalizePath(key, PATH_SEPARATOR); // In case key is root (empty string) do not normalize prefix key = key.equals(PATH_SEPARATOR) ? "" : key; ListObjectsRequest request = new ListObjectsRequest(mBucketName); request.setPrefix(key); request.setMaxKeys(getListingChunkLength(mUfsConf)); request.setDelimiter(delimiter); ObjectListing result = getObjectListingChunk(request); if (result != null) { return new OSSObjectListingChunk(request, result); } return null; }
@Test public void testGetObjectListingChunk() { // test successful get object listing chunk Mockito.when(mClient.listObjects(ArgumentMatchers.any(ListObjectsRequest.class))) .thenReturn(new ObjectListing()); ListObjectsRequest request = new ListObjectsRequest(); GenericResult result = mOSSUnderFileSystem.getObjectListingChunk(request); Assert.assertTrue(result instanceof ObjectListing); }
static @Nullable String resolveConsumerArn(Read spec, PipelineOptions options) { String streamName = Preconditions.checkArgumentNotNull(spec.getStreamName()); KinesisIOOptions sourceOptions = options.as(KinesisIOOptions.class); Map<String, String> streamToArnMapping = sourceOptions.getKinesisIOConsumerArns(); String consumerArn; if (streamToArnMapping.containsKey(streamName)) { consumerArn = streamToArnMapping.get(streamName); // can resolve to null too } else { consumerArn = spec.getConsumerArn(); } return consumerArn; }
@Test public void testConsumerArnNotPassed() { KinesisIO.Read readSpec = KinesisIO.read().withStreamName("stream-xxx"); KinesisIOOptions options = createIOOptions(); assertThat(KinesisSource.resolveConsumerArn(readSpec, options)).isNull(); }
public static boolean shutdownExecutorForcefully(ExecutorService executor, Duration timeout) { return shutdownExecutorForcefully(executor, timeout, true); }
@Test void testShutdownExecutorForcefullyNotInterruptable() { MockExecutorService executor = new MockExecutorService(5); executor.interruptAfterNumForcefulShutdown(1); assertThat( ComponentClosingUtils.shutdownExecutorForcefully( executor, Duration.ofDays(1), false)) .isTrue(); assertThat(executor.forcefullyShutdownCount).isEqualTo(5); }
public static NeverTrigger ever() { // NeverTrigger ignores all inputs and is Window-type independent. return new NeverTrigger(); }
@Test public void testFireDeadline() throws Exception { assertEquals( BoundedWindow.TIMESTAMP_MAX_VALUE, Never.ever() .getWatermarkThatGuaranteesFiring(new IntervalWindow(new Instant(0), new Instant(10)))); }
public static int indexOf(CharSequence str, char searchChar) { return indexOf(str, searchChar, 0); }
@Test public void indexOfTest() { int index = CharSequenceUtil.indexOf("abc123", '1'); assertEquals(3, index); index = CharSequenceUtil.indexOf("abc123", '3'); assertEquals(5, index); index = CharSequenceUtil.indexOf("abc123", 'a'); assertEquals(0, index); }
@Override public void execute() throws MojoExecutionException { if (pathToModelDir == null) { throw new MojoExecutionException("pathToModelDir parameter must not be null"); } // skip if input file does not exist if (inputCamelSchemaFile == null || !inputCamelSchemaFile.exists()) { getLog().info("Input Camel schema file: " + inputCamelSchemaFile + " does not exist. Skip EIP document enrichment"); return; } validateExists(inputCamelSchemaFile, "inputCamelSchemaFile"); validateIsFile(inputCamelSchemaFile, "inputCamelSchemaFile"); validateExists(camelCoreXmlDir, "camelCoreXmlDir"); validateIsDirectory(camelCoreModelDir, "camelCoreModelDir"); validateIsDirectory(camelCoreXmlDir, "camelCoreXmlDir"); try { runPlugin(); } catch (Exception e) { throw new MojoExecutionException("Error during plugin execution", e); } if (deleteFilesAfterRun != null) { deleteFilesAfterDone(deleteFilesAfterRun); } }
@Test public void testExecutePathToModelDirIsNull() { eipDocumentationEnricherMojo.pathToModelDir = null; try { eipDocumentationEnricherMojo.execute(); fail("Expected MojoExecutionException"); } catch (MojoExecutionException e) { // Expected. } }
@Override public void importData(JsonReader reader) throws IOException { logger.info("Reading configuration for 1.3"); // this *HAS* to start as an object reader.beginObject(); while (reader.hasNext()) { JsonToken tok = reader.peek(); switch (tok) { case NAME: String name = reader.nextName(); // find out which member it is if (name.equals(CLIENTS)) { readClients(reader); } else if (name.equals(GRANTS)) { readGrants(reader); } else if (name.equals(WHITELISTEDSITES)) { readWhitelistedSites(reader); } else if (name.equals(BLACKLISTEDSITES)) { readBlacklistedSites(reader); } else if (name.equals(AUTHENTICATIONHOLDERS)) { readAuthenticationHolders(reader); } else if (name.equals(ACCESSTOKENS)) { readAccessTokens(reader); } else if (name.equals(REFRESHTOKENS)) { readRefreshTokens(reader); } else if (name.equals(SYSTEMSCOPES)) { readSystemScopes(reader); } else { boolean processed = false; for (MITREidDataServiceExtension extension : extensions) { if (extension.supportsVersion(THIS_VERSION)) { processed = extension.importExtensionData(name, reader); if (processed) { // if the extension processed data, break out of this inner loop // (only the first extension to claim an extension point gets it) break; } } } if (!processed) { // unknown token, skip it reader.skipValue(); } } break; case END_OBJECT: // the object ended, we're done here reader.endObject(); continue; default: logger.debug("Found unexpected entry"); reader.skipValue(); continue; } } fixObjectReferences(); for (MITREidDataServiceExtension extension : extensions) { if (extension.supportsVersion(THIS_VERSION)) { extension.fixExtensionObjectReferences(maps); break; } } maps.clearAll(); }
@Test public void testImportSystemScopes() throws IOException { SystemScope scope1 = new SystemScope(); scope1.setId(1L); scope1.setValue("scope1"); scope1.setDescription("Scope 1"); scope1.setRestricted(true); scope1.setDefaultScope(false); scope1.setIcon("glass"); SystemScope scope2 = new SystemScope(); scope2.setId(2L); scope2.setValue("scope2"); scope2.setDescription("Scope 2"); scope2.setRestricted(false); scope2.setDefaultScope(false); scope2.setIcon("ball"); SystemScope scope3 = new SystemScope(); scope3.setId(3L); scope3.setValue("scope3"); scope3.setDescription("Scope 3"); scope3.setRestricted(false); scope3.setDefaultScope(true); scope3.setIcon("road"); String configJson = "{" + "\"" + MITREidDataService.CLIENTS + "\": [], " + "\"" + MITREidDataService.ACCESSTOKENS + "\": [], " + "\"" + MITREidDataService.REFRESHTOKENS + "\": [], " + "\"" + MITREidDataService.GRANTS + "\": [], " + "\"" + MITREidDataService.WHITELISTEDSITES + "\": [], " + "\"" + MITREidDataService.BLACKLISTEDSITES + "\": [], " + "\"" + MITREidDataService.AUTHENTICATIONHOLDERS + "\": [], " + "\"" + MITREidDataService.SYSTEMSCOPES + "\": [" + "{\"id\":1,\"description\":\"Scope 1\",\"icon\":\"glass\",\"value\":\"scope1\",\"restricted\":true,\"defaultScope\":false}," + "{\"id\":2,\"description\":\"Scope 2\",\"icon\":\"ball\",\"value\":\"scope2\",\"restricted\":false,\"defaultScope\":false}," + "{\"id\":3,\"description\":\"Scope 3\",\"icon\":\"road\",\"value\":\"scope3\",\"restricted\":false,\"defaultScope\":true}" + " ]" + "}"; logger.debug(configJson); JsonReader reader = new JsonReader(new StringReader(configJson)); dataService.importData(reader); verify(sysScopeRepository, times(3)).save(capturedScope.capture()); List<SystemScope> savedScopes = capturedScope.getAllValues(); assertThat(savedScopes.size(), is(3)); assertThat(savedScopes.get(0).getValue(), equalTo(scope1.getValue())); assertThat(savedScopes.get(0).getDescription(), equalTo(scope1.getDescription())); assertThat(savedScopes.get(0).getIcon(), equalTo(scope1.getIcon())); assertThat(savedScopes.get(0).isDefaultScope(), equalTo(scope1.isDefaultScope())); assertThat(savedScopes.get(0).isRestricted(), equalTo(scope1.isRestricted())); assertThat(savedScopes.get(1).getValue(), equalTo(scope2.getValue())); assertThat(savedScopes.get(1).getDescription(), equalTo(scope2.getDescription())); assertThat(savedScopes.get(1).getIcon(), equalTo(scope2.getIcon())); assertThat(savedScopes.get(1).isDefaultScope(), equalTo(scope2.isDefaultScope())); assertThat(savedScopes.get(1).isRestricted(), equalTo(scope2.isRestricted())); assertThat(savedScopes.get(2).getValue(), equalTo(scope3.getValue())); assertThat(savedScopes.get(2).getDescription(), equalTo(scope3.getDescription())); assertThat(savedScopes.get(2).getIcon(), equalTo(scope3.getIcon())); assertThat(savedScopes.get(2).isDefaultScope(), equalTo(scope3.isDefaultScope())); assertThat(savedScopes.get(2).isRestricted(), equalTo(scope3.isRestricted())); }
@Override public EntityExcerpt createExcerpt(Collector collector) { return EntityExcerpt.builder() .id(ModelId.of(collector.id())) .type(TYPE_V1) .title(collector.name()) .build(); }
@Test @MongoDBFixtures("SidecarCollectorFacadeTest.json") public void createExcerpt() { final Collector collector = collectorService.find("5b4c920b4b900a0024af0001"); final EntityExcerpt excerpt = facade.createExcerpt(collector); assertThat(excerpt.id()).isEqualTo(ModelId.of("5b4c920b4b900a0024af0001")); assertThat(excerpt.type()).isEqualTo(ModelTypes.SIDECAR_COLLECTOR_V1); assertThat(excerpt.title()).isEqualTo("filebeat"); }
public static boolean canDrop(FilterPredicate pred, List<ColumnChunkMetaData> columns) { Objects.requireNonNull(pred, "pred cannot be null"); Objects.requireNonNull(columns, "columns cannot be null"); return pred.accept(new StatisticsFilter(columns)); }
@Test public void testClearExceptionForNots() { List<ColumnChunkMetaData> columnMetas = Arrays.asList( getDoubleColumnMeta(new DoubleStatistics(), 0L), getIntColumnMeta(new IntStatistics(), 0L)); FilterPredicate pred = and(not(eq(doubleColumn, 12.0)), eq(intColumn, 17)); try { canDrop(pred, columnMetas); fail("This should throw"); } catch (IllegalArgumentException e) { assertEquals( "This predicate contains a not! Did you forget to run this predicate through LogicalInverseRewriter?" + " not(eq(double.column, 12.0))", e.getMessage()); } }
@ConstantFunction(name = "floor", argTypes = {DOUBLE}, returnType = BIGINT) public static ConstantOperator floor(ConstantOperator expr) { return ConstantOperator.createBigint((long) Math.floor(expr.getDouble())); }
@Test public void floor() { assertEquals(100, ScalarOperatorFunctions.floor(O_FLOAT_100).getBigint()); }
T getFunction(final List<SqlArgument> arguments) { // first try to get the candidates without any implicit casting Optional<T> candidate = findMatchingCandidate(arguments, false); if (candidate.isPresent()) { return candidate.get(); } else if (!supportsImplicitCasts) { throw createNoMatchingFunctionException(arguments); } // if none were found (candidate isn't present) try again with implicit casting candidate = findMatchingCandidate(arguments, true); if (candidate.isPresent()) { return candidate.get(); } throw createNoMatchingFunctionException(arguments); }
@Test public void shouldAllowAnyDecimal() { // Given: givenFunctions( function(EXPECTED, -1, DECIMAL) ); // When: final KsqlScalarFunction fun = udfIndex.getFunction(ImmutableList.of(SqlArgument.of(DECIMAL1_ARG))); // Then: assertThat(fun.name(), equalTo(EXPECTED)); }
public static SinkConfig validateUpdate(SinkConfig existingConfig, SinkConfig newConfig) { SinkConfig mergedConfig = clone(existingConfig); if (!existingConfig.getTenant().equals(newConfig.getTenant())) { throw new IllegalArgumentException("Tenants differ"); } if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) { throw new IllegalArgumentException("Namespaces differ"); } if (!existingConfig.getName().equals(newConfig.getName())) { throw new IllegalArgumentException("Sink Names differ"); } if (!StringUtils.isEmpty(newConfig.getClassName())) { mergedConfig.setClassName(newConfig.getClassName()); } if (!StringUtils.isEmpty(newConfig.getSourceSubscriptionName()) && !newConfig.getSourceSubscriptionName() .equals(existingConfig.getSourceSubscriptionName())) { throw new IllegalArgumentException("Subscription Name cannot be altered"); } if (newConfig.getInputSpecs() == null) { newConfig.setInputSpecs(new HashMap<>()); } if (mergedConfig.getInputSpecs() == null) { mergedConfig.setInputSpecs(new HashMap<>()); } if (!StringUtils.isEmpty(newConfig.getLogTopic())) { mergedConfig.setLogTopic(newConfig.getLogTopic()); } if (newConfig.getInputs() != null) { newConfig.getInputs().forEach((topicName -> { newConfig.getInputSpecs().putIfAbsent(topicName, ConsumerConfig.builder().isRegexPattern(false).build()); })); } if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) { newConfig.getInputSpecs().put(newConfig.getTopicsPattern(), ConsumerConfig.builder() .isRegexPattern(true) .build()); } if (newConfig.getTopicToSerdeClassName() != null) { newConfig.getTopicToSerdeClassName().forEach((topicName, serdeClassName) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .serdeClassName(serdeClassName) .isRegexPattern(false) .build()); }); } if (newConfig.getTopicToSchemaType() != null) { newConfig.getTopicToSchemaType().forEach((topicName, schemaClassname) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .schemaType(schemaClassname) .isRegexPattern(false) .build()); }); } if (!newConfig.getInputSpecs().isEmpty()) { SinkConfig finalMergedConfig = mergedConfig; newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> { if (!existingConfig.getInputSpecs().containsKey(topicName)) { throw new IllegalArgumentException("Input Topics cannot be altered"); } if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) { throw new IllegalArgumentException( "isRegexPattern for input topic " + topicName + " cannot be altered"); } finalMergedConfig.getInputSpecs().put(topicName, consumerConfig); }); } if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees() .equals(existingConfig.getProcessingGuarantees())) { throw new IllegalArgumentException("Processing Guarantees cannot be altered"); } if (newConfig.getConfigs() != null) { mergedConfig.setConfigs(newConfig.getConfigs()); } if (newConfig.getSecrets() != null) { mergedConfig.setSecrets(newConfig.getSecrets()); } if (newConfig.getParallelism() != null) { mergedConfig.setParallelism(newConfig.getParallelism()); } if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering() .equals(existingConfig.getRetainOrdering())) { throw new IllegalArgumentException("Retain Ordering cannot be altered"); } if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering() .equals(existingConfig.getRetainKeyOrdering())) { throw new IllegalArgumentException("Retain Key Ordering cannot be altered"); } if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) { throw new IllegalArgumentException("AutoAck cannot be altered"); } if (newConfig.getResources() != null) { mergedConfig .setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources())); } if (newConfig.getTimeoutMs() != null) { mergedConfig.setTimeoutMs(newConfig.getTimeoutMs()); } if (newConfig.getCleanupSubscription() != null) { mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription()); } if (!StringUtils.isEmpty(newConfig.getArchive())) { mergedConfig.setArchive(newConfig.getArchive()); } if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) { mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags()); } if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) { mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions()); } if (newConfig.getTransformFunction() != null) { mergedConfig.setTransformFunction(newConfig.getTransformFunction()); } if (newConfig.getTransformFunctionClassName() != null) { mergedConfig.setTransformFunctionClassName(newConfig.getTransformFunctionClassName()); } if (newConfig.getTransformFunctionConfig() != null) { mergedConfig.setTransformFunctionConfig(newConfig.getTransformFunctionConfig()); } return mergedConfig; }
@Test public void testMergeDifferentParallelism() { SinkConfig sinkConfig = createSinkConfig(); SinkConfig newSinkConfig = createUpdatedSinkConfig("parallelism", 101); SinkConfig mergedConfig = SinkConfigUtils.validateUpdate(sinkConfig, newSinkConfig); assertEquals( mergedConfig.getParallelism(), Integer.valueOf(101) ); mergedConfig.setParallelism(sinkConfig.getParallelism()); assertEquals( new Gson().toJson(sinkConfig), new Gson().toJson(mergedConfig) ); }
public static byte[] hexStringToByte(final String hex) { final byte[] bts = new byte[hex.length() / 2]; for (int i = 0; i < bts.length; i++) { bts[i] = (byte) Integer.parseInt(hex.substring(2 * i, 2 * i + 2), 16); } return bts; }
@Test void testStringToHexArray() { String hex = "019f314a"; byte[] hexArray = StringUtils.hexStringToByte(hex); byte[] expectedArray = new byte[] {1, -97, 49, 74}; assertThat(hexArray).isEqualTo(expectedArray); }
public Filter parseSingleExpression(final String filterExpression, final List<EntityAttribute> attributes) { if (!filterExpression.contains(FIELD_AND_VALUE_SEPARATOR)) { throw new IllegalArgumentException(WRONG_FILTER_EXPR_FORMAT_ERROR_MSG); } final String[] split = filterExpression.split(FIELD_AND_VALUE_SEPARATOR, 2); final String fieldPart = split[0]; if (fieldPart == null || fieldPart.isEmpty()) { throw new IllegalArgumentException(WRONG_FILTER_EXPR_FORMAT_ERROR_MSG); } final String valuePart = split[1]; if (valuePart == null || valuePart.isEmpty()) { throw new IllegalArgumentException(WRONG_FILTER_EXPR_FORMAT_ERROR_MSG); } final EntityAttribute attributeMetaData = getAttributeMetaData(attributes, fieldPart); final SearchQueryField.Type fieldType = attributeMetaData.type(); if (isRangeValueExpression(valuePart, fieldType)) { if (valuePart.startsWith(RANGE_VALUES_SEPARATOR)) { return new RangeFilter(attributeMetaData.id(), null, extractValue(fieldType, valuePart.substring(RANGE_VALUES_SEPARATOR.length())) ); } else if (valuePart.endsWith(RANGE_VALUES_SEPARATOR)) { return new RangeFilter(attributeMetaData.id(), extractValue(fieldType, valuePart.substring(0, valuePart.length() - RANGE_VALUES_SEPARATOR.length())), null ); } else { final String[] ranges = valuePart.split(RANGE_VALUES_SEPARATOR); return new RangeFilter(attributeMetaData.id(), extractValue(fieldType, ranges[0]), extractValue(fieldType, ranges[1]) ); } } else { return new SingleValueFilter(attributeMetaData.id(), extractValue(fieldType, valuePart)); } }
@Test void parsesFilterExpressionCorrectlyForBoolType() { assertEquals(new SingleValueFilter("away", true), toTest.parseSingleExpression("away:true", List.of(EntityAttribute.builder() .id("away") .title("Away") .type(SearchQueryField.Type.BOOLEAN) .filterable(true) .build()) )); }
@Override public Num calculate(BarSeries series, Position position) { Num numberOfLosingPositions = numberOfLosingPositionsCriterion.calculate(series, position); if (numberOfLosingPositions.isZero()) { return series.zero(); } Num grossLoss = grossLossCriterion.calculate(series, position); if (grossLoss.isZero()) { return series.zero(); } return grossLoss.dividedBy(numberOfLosingPositions); }
@Test public void calculateProfitWithShortPositions() { MockBarSeries series = new MockBarSeries(numFunction, 95, 100, 70, 80, 85, 100); TradingRecord tradingRecord = new BaseTradingRecord(Trade.sellAt(0, series), Trade.buyAt(1, series), Trade.sellAt(2, series), Trade.buyAt(5, series)); AnalysisCriterion avgLoss = getCriterion(); assertNumEquals(-17.5, avgLoss.calculate(series, tradingRecord)); }
@VisibleForTesting static <T> Udaf<T, List<Struct>, List<T>> earliestTN( final int earliestN, final boolean ignoreNulls ) { if (earliestN <= 0) { throw new KsqlFunctionException("earliestN must be 1 or greater"); } return new Udaf<T, List<Struct>, List<T>>() { Schema structSchema; SqlType aggregateType; SqlType returnType; @Override public void initializeTypeArguments(final List<SqlArgument> argTypeList) { final SqlType inputType = argTypeList.get(0).getSqlTypeOrThrow(); final Schema connectType = SchemaConverters.sqlToConnectConverter().toConnectSchema(inputType); structSchema = KudafByOffsetUtils.buildSchema(connectType); aggregateType = SqlArray.of(SchemaConverters.connectToSqlConverter().toSqlType(structSchema)); returnType = SqlArray.of(inputType); } @Override public Optional<SqlType> getAggregateSqlType() { return Optional.of(aggregateType); } @Override public Optional<SqlType> getReturnSqlType() { return Optional.of(returnType); } @Override public List<Struct> initialize() { return new ArrayList<>(earliestN); } @Override public List<Struct> aggregate(final T current, final List<Struct> aggregate) { if (current == null && ignoreNulls) { return aggregate; } if (aggregate.size() < earliestN) { aggregate.add(createStruct(structSchema, current)); } return aggregate; } @Override public List<Struct> merge(final List<Struct> aggOne, final List<Struct> aggTwo) { final List<Struct> merged = new ArrayList<>(aggOne.size() + aggTwo.size()); merged.addAll(aggOne); merged.addAll(aggTwo); merged.sort(INTERMEDIATE_STRUCT_COMPARATOR); return merged.subList(0, Math.min(earliestN, merged.size())); } @Override @SuppressWarnings("unchecked") public List<T> map(final List<Struct> agg) { return (List<T>) agg.stream().map(s -> s.get(VAL_FIELD)).collect(Collectors.toList()); } }; }
@Test public void shouldInitializeN() { // Given: final Udaf<Integer, List<Struct>, List<Integer>> udaf = EarliestByOffset .earliestTN( 2, false); // When: final List<Struct> init = udaf.initialize(); // Then: assertThat(init, is(empty())); }
@Override public void clear() { Object value; do { value = poll(); } while (value != null); }
@Test public void testClear() { queue.offer(1); queue.offer(2); queue.offer(3); queue.clear(); assertEquals(0, queue.size()); }
boolean isCacheableResourceRequest(HttpServletRequest request) { String requestPath = request.getPathInfo(); if (requestPath == null) { return false; } for (String resourcePrefix : resourcePrefixes) { if (requestPath.startsWith(resourcePrefix)) { return true; } } return false; }
@Test public void test_getPathInfo_not_null_JENKINS_40116() throws IOException, ServletException { Mockito.when(servletRequest.getPathInfo()).thenReturn("/a/b/c.js"); Assert.assertTrue(resourceCacheControl.isCacheableResourceRequest(servletRequest)); }
@Override public PosixFileAttributes readAttributes(File file) { return new Attributes(file); }
@Test public void testAttributes() { PosixFileAttributes attrs = provider.readAttributes(file); assertThat(attrs.permissions()).isEqualTo(PosixFilePermissions.fromString("rw-r--r--")); assertThat(attrs.group()).isEqualTo(createGroupPrincipal("group")); assertThat(attrs.fileKey()).isEqualTo(0); }
@Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { if (!httpAuth.isAllowed(req, resp)) { return; } final long start = System.currentTimeMillis(); final String resource = HttpParameter.RESOURCE.getParameterFrom(req); if (resource != null) { MonitoringController.doResource(resp, resource); return; } final CollectorController collectorController = new CollectorController(collectorServer); final String application = collectorController.getApplication(req, resp); I18N.bindLocale(req.getLocale()); try { if (application == null) { CollectorController.writeOnlyAddApplication(resp); return; } if (!collectorServer.isApplicationDataAvailable(application) && HttpParameter.ACTION.getParameterFrom(req) == null) { CollectorController.writeDataUnavailableForApplication(application, resp); return; } collectorController.doMonitoring(req, resp, application); } finally { I18N.unbindLocale(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("monitoring from " + req.getRemoteAddr() + ", request=" + req.getRequestURI() + (req.getQueryString() != null ? '?' + req.getQueryString() : "") + ", application=" + application + " in " + (System.currentTimeMillis() - start) + "ms"); } } }
@Test public void testDoGet() throws ServletException, IOException { doGet("a", null); doGet(null, null); doGet(".*", null); doGet(null, TEST); }
@SuppressWarnings("ConstantConditions") private Queue<Comment> getComments() { LOG.debug("Start: Jira NewCommentsConsumer: retrieving issue comments. Last comment id: {}", lastCommentId); IssueRestClient client = getEndpoint().getClient().getIssueClient(); LinkedList<Comment> newComments = getIssues().stream() .map(issue -> client.getIssue(issue.getKey()).claim()) .flatMap(issue -> StreamSupport.stream(issue.getComments().spliterator(), false)) .filter(comment -> comment.getId() > lastCommentId) .collect(Collectors.toCollection(LinkedList::new)); Collections.reverse(newComments); lastCommentId = newComments.stream().mapToLong(Comment::getId).max().orElse(lastCommentId); LOG.debug("End: Jira NewCommentsConsumer: retrieving issue comments. {} new comments since last run.", newComments.size()); return newComments; }
@Test public void multipleIssuesTest() throws Exception { Issue issue1 = createIssueWithComments(20L, 2000); Issue issue2 = createIssueWithComments(21L, 3000); Issue issue3 = createIssueWithComments(22L, 1000); List<Issue> newIssues = List.of(issue3, issue2, issue1); SearchResult searchResult = new SearchResult(0, 50, 3, newIssues); when(searchRestClient.searchJql(any(), any(), any(), any())).thenReturn(Promises.promise(searchResult)); newIssues.forEach(issue -> when(issueRestClient.getIssue(eq(issue.getKey()))) .then(inv -> Promises.promise(issue))); List<Comment> comments = new ArrayList<>(); newIssues.forEach(issue -> issue.getComments().forEach(comments::add)); // reverse the order, from oldest comment to recent Collections.reverse(comments); // expect 6000 comments mockResult.expectedBodiesReceived(comments); mockResult.assertIsSatisfied(); }
Converter<E> compile() { head = tail = null; for (Node n = top; n != null; n = n.next) { switch (n.type) { case Node.LITERAL: addToList(new LiteralConverter<E>((String) n.getValue())); break; case Node.COMPOSITE_KEYWORD: CompositeNode cn = (CompositeNode) n; CompositeConverter<E> compositeConverter = createCompositeConverter(cn); if (compositeConverter == null) { addError("Failed to create converter for [%" + cn.getValue() + "] keyword"); addToList(new LiteralConverter<E>("%PARSER_ERROR[" + cn.getValue() + "]")); break; } compositeConverter.setFormattingInfo(cn.getFormatInfo()); compositeConverter.setOptionList(cn.getOptions()); Compiler<E> childCompiler = new Compiler<E>(cn.getChildNode(), converterMap); childCompiler.setContext(context); Converter<E> childConverter = childCompiler.compile(); compositeConverter.setChildConverter(childConverter); addToList(compositeConverter); break; case Node.SIMPLE_KEYWORD: SimpleKeywordNode kn = (SimpleKeywordNode) n; DynamicConverter<E> dynaConverter = createConverter(kn); if (dynaConverter != null) { dynaConverter.setFormattingInfo(kn.getFormatInfo()); dynaConverter.setOptionList(kn.getOptions()); addToList(dynaConverter); } else { // if the appropriate dynaconverter cannot be found, then replace // it with a dummy LiteralConverter indicating an error. Converter<E> errConveter = new LiteralConverter<E>("%PARSER_ERROR[" + kn.getValue() + "]"); addStatus(new ErrorStatus("[" + kn.getValue() + "] is not a valid conversion word", this)); addToList(errConveter); } } } return head; }
@Test public void testBasic() throws Exception { { Parser<Object> p = new Parser<Object>("abc %hello"); p.setContext(context); Node t = p.parse(); Converter<Object> head = p.compile(t, converterMap); String result = write(head, new Object()); assertEquals("abc Hello", result); } { Parser<Object> p = new Parser<Object>("abc %hello %OTT"); p.setContext(context); Node t = p.parse(); Converter<Object> head = p.compile(t, converterMap); String result = write(head, new Object()); assertEquals("abc Hello 123", result); } }
@Override public String getConfig(final String dataId) { return this.apolloClient.getItemValue(dataId); }
@Test public void testGetConfig2() { when(apolloClient.getItemValue(Mockito.any())).thenReturn(""); assertEquals("", apolloDataChangedListener.getConfig("42")); verify(apolloClient).getItemValue(Mockito.any()); }
public static URLConnectionFactory newDefaultURLConnectionFactory( Configuration conf) { ConnectionConfigurator conn = getSSLConnectionConfiguration( DEFAULT_SOCKET_TIMEOUT, DEFAULT_SOCKET_TIMEOUT, conf); return new URLConnectionFactory(conn); }
@Test public void testSSLInitFailure() throws Exception { Configuration conf = new Configuration(); conf.set(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, "foo"); GenericTestUtils.LogCapturer logs = GenericTestUtils.LogCapturer.captureLogs( LoggerFactory.getLogger(URLConnectionFactory.class)); URLConnectionFactory.newDefaultURLConnectionFactory(conf); Assert.assertTrue("Expected log for ssl init failure not found!", logs.getOutput().contains( "Cannot load customized ssl related configuration")); }
public void runExtractor(Message msg) { try(final Timer.Context ignored = completeTimer.time()) { final String field; try (final Timer.Context ignored2 = conditionTimer.time()) { // We can only work on Strings. if (!(msg.getField(sourceField) instanceof String)) { conditionMissesCounter.inc(); return; } field = (String) msg.getField(sourceField); // Decide if to extract at all. if (conditionType.equals(ConditionType.STRING)) { if (field.contains(conditionValue)) { conditionHitsCounter.inc(); } else { conditionMissesCounter.inc(); return; } } else if (conditionType.equals(ConditionType.REGEX)) { if (regexConditionPattern.matcher(field).find()) { conditionHitsCounter.inc(); } else { conditionMissesCounter.inc(); return; } } } try (final Timer.Context ignored2 = executionTimer.time()) { Result[] results; try { results = run(field); } catch (ExtractorException e) { final String error = "Could not apply extractor <" + getTitle() + " (" + getId() + ")>"; msg.addProcessingError(new Message.ProcessingError( ProcessingFailureCause.ExtractorException, error, ExceptionUtils.getRootCauseMessage(e))); return; } if (results == null || results.length == 0 || Arrays.stream(results).anyMatch(result -> result.getValue() == null)) { return; } else if (results.length == 1 && results[0].target == null) { // results[0].target is null if this extractor cannot produce multiple fields use targetField in that case msg.addField(targetField, results[0].getValue()); } else { for (final Result result : results) { msg.addField(result.getTarget(), result.getValue()); } } // Remove original from message? if (cursorStrategy.equals(CursorStrategy.CUT) && !targetField.equals(sourceField) && !Message.RESERVED_FIELDS.contains(sourceField) && results[0].beginIndex != -1) { final StringBuilder sb = new StringBuilder(field); final List<Result> reverseList = Arrays.stream(results) .sorted(Comparator.<Result>comparingInt(result -> result.endIndex).reversed()) .collect(Collectors.toList()); // remove all from reverse so that the indices still match for (final Result result : reverseList) { sb.delete(result.getBeginIndex(), result.getEndIndex()); } final String builtString = sb.toString(); final String finalResult = builtString.trim().isEmpty() ? "fullyCutByExtractor" : builtString; msg.removeField(sourceField); // TODO don't add an empty field back, or rather don't add fullyCutByExtractor msg.addField(sourceField, finalResult); } runConverters(msg); } } }
@Test public void testWithOneValueOnlyResultsAndValueIsNull() throws Exception { final TestExtractor extractor = new TestExtractor.Builder() .callback(new Callable<Result[]>() { @Override public Result[] call() throws Exception { return new Result[]{ new Result(null, -1, -1) }; } }) .build(); final Message msg = createMessage("the hello"); extractor.runExtractor(msg); assertThat(msg.hasField("target")).isFalse(); }
@Override public final void aroundWriteTo(WriterInterceptorContext context) throws IOException { final String contentEncoding = (String) context.getHeaders().getFirst(HttpHeaders.CONTENT_ENCODING); if ((contentEncoding != null) && (contentEncoding.equals("gzip") || contentEncoding.equals("x-gzip"))) { context.setOutputStream(new GZIPOutputStream(context.getOutputStream())); } context.proceed(); }
@Test void noEncodingwillNotAroundWrite() throws IOException, WebApplicationException { MultivaluedMap<String, Object> headers = new MultivaluedHashMap<>(); headers.add(HttpHeaders.CONTENT_ENCODING, null); WriterInterceptorContextMock context = new WriterInterceptorContextMock(headers); new ConfiguredGZipEncoder(true).aroundWriteTo(context); assertThat(context.getOutputStream()).isNotInstanceOf(GZIPOutputStream.class); assertThat(context.isProceedCalled()).isTrue(); }
@Override public void updateTask(Task task) { Map.Entry<String, Long> taskInDb = getTaskChecksumAndUpdateTime(task.getTaskId()); String taskCheckSum = computeChecksum(task); if (taskInDb != null) { long updateInterval = task.getUpdateTime() - taskInDb.getValue(); if (taskCheckSum.equals(taskInDb.getKey()) && updateInterval < maxTaskUpdateInterval) { LOG.debug( "task has the same checksum and update interval {} is less than max interval {} millis and skip update", updateInterval, maxTaskUpdateInterval); return; } LOG.info( "update task [{}] with checksum=[{}] with an update interval=[{}]", task.getTaskId(), taskCheckSum, updateInterval); } task.setWorkerId(taskCheckSum); super.updateTask(task); }
@Test public void testTaskCompressionOnUpdate() throws JsonProcessingException { task.setInputData( Collections.singletonMap( "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")); maestroExecutionDao.updateTask(task); String taskPayload = getTaskPayload(); JsonNode root = mapper.readTree(taskPayload); Assert.assertEquals( "{\"compressed\":\"H4sIAAAAAAAA/6tWSiQWKFkRrVapFgC8tuw9WAAAAA==\",\"compressor\":\"gzip\"}", root.get("inputData").toString()); Task dbTask = maestroExecutionDao.getTask(task.getTaskId()); Assert.assertEquals(task.getInputData(), dbTask.getInputData()); }
@Override public QueueInfo getQueueInfo(String queueName, boolean includeChildQueues, boolean recursive) { return DEFAULT_QUEUE.getQueueInfo(false, false); }
@Test(timeout=5000) public void testFifoSchedulerCapacityWhenNoNMs() { FifoScheduler scheduler = new FifoScheduler(); QueueInfo queueInfo = scheduler.getQueueInfo(null, false, false); Assert.assertEquals(0.0f, queueInfo.getCurrentCapacity(), 0.0f); }
@Override public RuleConfig newRuleConfig() { return new RuleConfigImpl(); }
@Test public void logicalAddByElement() { // DROOLS-7583 LogicalAddByElementTestUnit unit = new LogicalAddByElementTestUnit(); ArrayList<String> eventsRecorded = new ArrayList<>(); RuleConfig ruleConfig = RuleUnitProvider.get().newRuleConfig(); ruleConfig.getRuleRuntimeListeners().add(new RuleRuntimeEventListener() { @Override public void objectInserted(ObjectInsertedEvent event) { String byRuleName = Optional.ofNullable(event.getRule()) .map(rule -> " by " + rule.getName()) .orElse(""); eventsRecorded.add(event.getObject() + " inserted" + byRuleName); } @Override public void objectUpdated(ObjectUpdatedEvent event) { } @Override public void objectDeleted(ObjectDeletedEvent event) { String byRuleName = Optional.ofNullable(event.getRule()) .map(rule -> " by " + rule.getName()) .orElse(""); eventsRecorded.add(event.getOldObject() + " deleted" + byRuleName); } }); try ( RuleUnitInstance<LogicalAddByElementTestUnit> unitInstance = RuleUnitProvider.get().createRuleUnitInstance(unit, ruleConfig) ) { DataHandle handleToStringWithLength3 = unit.getStrings().add("abc"); unit.getStrings().add("len4"); assertThat(unitInstance.fire()).isEqualTo(4); assertThat(eventsRecorded).containsExactly( "abc inserted", "len4 inserted", "3 inserted by R1", "4 inserted by R1"); assertThat(unit.getResults()).containsExactly("3 exists", "4 exists"); eventsRecorded.clear(); unit.getResults().clear(); unit.getStrings().remove(handleToStringWithLength3); assertThat(unitInstance.fire()).isEqualTo(0); assertThat(eventsRecorded).doesNotContain("4 deleted"); assertThat(eventsRecorded).containsExactly("abc deleted", "3 deleted"); } }
protected TaskConfig buildTaskConfig(TaskConfig config) { TaskConfig taskExecConfig = new TaskConfig(); for (Property property : config.list()) { taskExecConfig.add(getExecProperty(config, property)); } return taskExecConfig; }
@Test public void shouldReturnDefaultValueInExecConfigWhenConfigValueIsNull() { TaskConfig defaultTaskConfig = new TaskConfig(); String propertyName = "URL"; String defaultValue = "ABC.TXT"; Map<String, Map<String, String>> configMap = new HashMap<>(); configMap.put(propertyName, null); PluggableTask task = mock(PluggableTask.class); when(task.getPluginConfiguration()).thenReturn(new PluginConfiguration()); when(task.configAsMap()).thenReturn(configMap); PluggableTaskBuilder taskBuilder = new PluggableTaskBuilder(runIfConfigs, cancelBuilder, task, TEST_PLUGIN_ID, "test-directory"); defaultTaskConfig.addProperty(propertyName).withDefault(defaultValue); TaskConfig config = taskBuilder.buildTaskConfig(defaultTaskConfig); assertThat(config.getValue(propertyName)).isEqualTo(defaultValue); }
public boolean fileExists(String path) throws IOException, InvalidTokenException { String url; try { url = getUriBuilder() .setPath(API_PATH_PREFIX + "/mounts/primary/files/info") .setParameter("path", path) .build() .toString(); } catch (URISyntaxException e) { throw new IllegalStateException("Could not produce url.", e); } Request.Builder requestBuilder = getRequestBuilder(url); try (Response response = getResponse(requestBuilder)) { int code = response.code(); if (code == 200) { return true; } if (code == 404) { return false; } throw new KoofrClientIOException(response); } }
@Test public void testFileExistsRefreshTokenNotFound() throws Exception { when(credentialFactory.refreshCredential(credential)) .then( (InvocationOnMock invocation) -> { throw new InvalidTokenException("Unable to refresh token.", null); }); server.enqueue(new MockResponse().setResponseCode(401)); InvalidTokenException caughtExc = null; try { client.fileExists("/path/to/file"); } catch (InvalidTokenException exc) { caughtExc = exc; } assertNotNull(caughtExc); assertEquals("Unable to refresh token.", caughtExc.getMessage()); assertEquals(1, server.getRequestCount()); RecordedRequest recordedRequest = server.takeRequest(); assertEquals("GET", recordedRequest.getMethod()); assertEquals( "/api/v2/mounts/primary/files/info?path=%2Fpath%2Fto%2Ffile", recordedRequest.getPath()); assertEquals("Bearer acc", recordedRequest.getHeader("Authorization")); assertEquals("2.1", recordedRequest.getHeader("X-Koofr-Version")); }
public void done() { done.compareAndSet(null, NORMAL_COMPLETION); }
@Test public void when_doneCalledTwice_then_secondIgnored() { initProducer(false); producer.done(); producer.done(); assertThat(iterator.hasNext()).isFalse(); }
@Override public void setMonochrome(boolean monochrome) { formats = monochrome ? monochrome() : ansi(); }
@Test void should_handle_scenario_outline() { Feature feature = TestFeatureParser.parse("path/test.feature", "" + "Feature: feature name\n" + " Scenario Outline: <name>\n" + " Given first step\n" + " Then <arg> step\n" + " Examples: examples name\n" + " | name | arg |\n" + " | name 1 | second |\n" + " | name 2 | third |\n"); ByteArrayOutputStream out = new ByteArrayOutputStream(); Runtime.builder() .withFeatureSupplier(new StubFeatureSupplier(feature)) .withAdditionalPlugins(new PrettyFormatter(out)) .withRuntimeOptions(new RuntimeOptionsBuilder().setMonochrome().build()) .withBackendSupplier(new StubBackendSupplier( new StubStepDefinition("first step", "path/step_definitions.java:3"), new StubStepDefinition("second step", "path/step_definitions.java:7"), new StubStepDefinition("third step", "path/step_definitions.java:11"))) .build() .run(); assertThat(out, bytes(equalToCompressingWhiteSpace("" + "\n" + "Scenario Outline: name 1 # path/test.feature:7\n" + " Given first step # path/step_definitions.java:3\n" + " Then second step # path/step_definitions.java:7\n" + "\n" + "Scenario Outline: name 2 # path/test.feature:8\n" + " Given first step # path/step_definitions.java:3\n" + " Then third step # path/step_definitions.java:11\n"))); }
@Override public String toString() { return modId; }
@Test public void basic() { mod1 = new Mod(AAA); assertEquals("wrong id", AAA, mod1.toString()); }
public static <T> JSONSchema<T> of(SchemaDefinition<T> schemaDefinition) { SchemaReader<T> reader = schemaDefinition.getSchemaReaderOpt() .orElseGet(() -> new JacksonJsonReader<>(jsonMapper(), schemaDefinition.getPojo())); SchemaWriter<T> writer = schemaDefinition.getSchemaWriterOpt() .orElseGet(() -> new JacksonJsonWriter<>(jsonMapper())); return new JSONSchema<>(parseSchemaInfo(schemaDefinition, SchemaType.JSON), schemaDefinition.getPojo(), reader, writer); }
@Test public void testJsonGenericRecordBuilder() { JSONSchema<Seller> sellerJsonSchema = JSONSchema.of(Seller.class); RecordSchemaBuilder sellerSchemaBuilder = SchemaBuilder.record("seller"); sellerSchemaBuilder.field("state").type(SchemaType.STRING); sellerSchemaBuilder.field("street").type(SchemaType.STRING); sellerSchemaBuilder.field("zipCode").type(SchemaType.INT64); SchemaInfo sellerSchemaInfo = sellerSchemaBuilder.build(SchemaType.JSON); GenericSchemaImpl sellerGenericSchema = GenericSchemaImpl.of(sellerSchemaInfo); JSONSchema<PC> pcJsonSchema = JSONSchema.of(PC.class); RecordSchemaBuilder pcSchemaBuilder = SchemaBuilder.record("pc"); pcSchemaBuilder.field("brand").type(SchemaType.STRING); pcSchemaBuilder.field("model").type(SchemaType.STRING); pcSchemaBuilder.field("gpu").type(SchemaType.STRING); pcSchemaBuilder.field("year").type(SchemaType.INT64); pcSchemaBuilder.field("seller", sellerGenericSchema).type(SchemaType.JSON).optional(); SchemaInfo pcGenericSchemaInfo = pcSchemaBuilder.build(SchemaType.JSON); GenericSchemaImpl pcGenericSchema = GenericSchemaImpl.of(pcGenericSchemaInfo); Seller seller = new Seller("USA","oakstreet",9999); PC pc = new PC("dell","g3",2020, GPU.AMD, seller); byte[] bytes = pcJsonSchema.encode(pc); Assert.assertTrue(bytes.length > 0); Object pc2 = pcJsonSchema.decode(bytes); assertEquals(pc, pc2); GenericRecord sellerRecord = sellerGenericSchema.newRecordBuilder() .set("state", "USA") .set("street", "oakstreet") .set("zipCode", 9999) .build(); GenericRecord pcRecord = pcGenericSchema.newRecordBuilder() .set("brand", "dell") .set("model","g3") .set("year", 2020) .set("gpu", GPU.AMD) .set("seller", sellerRecord) .build(); byte[] bytes3 = pcGenericSchema.encode(pcRecord); Assert.assertTrue(bytes3.length > 0); GenericRecord pc3Record = pcGenericSchema.decode(bytes3); for(Field field : pc3Record.getFields()) { assertTrue(pcGenericSchema.getFields().contains(field)); } assertEquals("dell", pc3Record.getField("brand")); assertEquals("g3", pc3Record.getField("model")); assertEquals(2020, pc3Record.getField("year")); assertEquals(GPU.AMD.toString(), pc3Record.getField("gpu")); GenericRecord seller3Record = (GenericRecord) pc3Record.getField("seller"); assertEquals("USA", seller3Record.getField("state")); assertEquals("oakstreet", seller3Record.getField("street")); assertEquals(9999, seller3Record.getField("zipCode")); assertTrue(pc3Record instanceof GenericJsonRecord); Assertions.assertThatCode(() -> pc3Record.getField("I_DO_NOT_EXIST")).doesNotThrowAnyException(); }
@Override public PurgeExecutions.Output run(RunContext runContext) throws Exception { ExecutionService executionService = ((DefaultRunContext)runContext).getApplicationContext().getBean(ExecutionService.class); FlowService flowService = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowService.class); // validate that this namespace is authorized on the target namespace / all namespaces var flowInfo = runContext.flowInfo(); if (namespace == null){ flowService.checkAllowedAllNamespaces(flowInfo.tenantId(), flowInfo.tenantId(), flowInfo.namespace()); } else if (!runContext.render(namespace).equals(flowInfo.namespace())) { flowService.checkAllowedNamespace(flowInfo.tenantId(), runContext.render(namespace), flowInfo.tenantId(), flowInfo.namespace()); } ExecutionService.PurgeResult purgeResult = executionService.purge( purgeExecution, purgeLog, purgeMetric, purgeStorage, flowInfo.tenantId(), runContext.render(namespace), runContext.render(flowId), startDate != null ? ZonedDateTime.parse(runContext.render(startDate)) : null, ZonedDateTime.parse(runContext.render(endDate)), states ); return Output.builder() .executionsCount(purgeResult.getExecutionsCount()) .logsCount(purgeResult.getLogsCount()) .storagesCount(purgeResult.getStoragesCount()) .metricsCount(purgeResult.getMetricsCount()) .build(); }
@Test void deleted() throws Exception { String namespace = "deleted.namespace"; String flowId = "deleted-flow-id"; // create an execution to delete var execution = Execution.builder() .namespace(namespace) .flowId(flowId) .id(IdUtils.create()) .state(new State().withState(State.Type.SUCCESS)) .build(); executionRepository.save(execution); executionRepository.delete(execution); var purge = PurgeExecutions.builder() .namespace(namespace) .flowId(flowId) .endDate(ZonedDateTime.now().plusMinutes(1).format(DateTimeFormatter.ISO_ZONED_DATE_TIME)) .build(); var runContext = runContextFactory.of(Map.of("flow", Map.of("namespace", namespace, "id", flowId))); var output = purge.run(runContext); assertThat(output.getExecutionsCount(), is(1)); }
@Override public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return PathAttributes.EMPTY; } if(containerService.isContainer(file)) { final PathAttributes attributes = new PathAttributes(); if(log.isDebugEnabled()) { log.debug(String.format("Read location for bucket %s", file)); } attributes.setRegion(new S3LocationFeature(session, session.getClient().getRegionEndpointCache()).getLocation(file).getIdentifier()); return attributes; } if(file.getType().contains(Path.Type.upload)) { final Write.Append append = new S3MultipartUploadService(session, new S3WriteFeature(session, acl), acl).append(file, new TransferStatus()); if(append.append) { return new PathAttributes().withSize(append.offset); } throw new NotfoundException(file.getAbsolute()); } try { PathAttributes attr; final Path bucket = containerService.getContainer(file); try { attr = new S3AttributesAdapter(session.getHost()).toAttributes(session.getClient().getVersionedObjectDetails( file.attributes().getVersionId(), bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(), containerService.getKey(file))); } catch(ServiceException e) { switch(e.getResponseCode()) { case 405: if(log.isDebugEnabled()) { log.debug(String.format("Mark file %s as delete marker", file)); } // Only DELETE method is allowed for delete markers attr = new PathAttributes(); attr.setCustom(Collections.singletonMap(KEY_DELETE_MARKER, Boolean.TRUE.toString())); attr.setDuplicate(true); return attr; } throw new S3ExceptionMappingService().map("Failure to read attributes of {0}", e, file); } if(StringUtils.isNotBlank(attr.getVersionId())) { if(log.isDebugEnabled()) { log.debug(String.format("Determine if %s is latest version for %s", attr.getVersionId(), file)); } // Determine if latest version try { final String latest = new S3AttributesAdapter(session.getHost()).toAttributes(session.getClient().getObjectDetails( bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(), containerService.getKey(file))).getVersionId(); if(null != latest) { if(log.isDebugEnabled()) { log.debug(String.format("Found later version %s for %s", latest, file)); } // Duplicate if not latest version attr.setDuplicate(!latest.equals(attr.getVersionId())); } } catch(ServiceException e) { final BackgroundException failure = new S3ExceptionMappingService().map("Failure to read attributes of {0}", e, file); if(failure instanceof NotfoundException) { attr.setDuplicate(true); } else { throw failure; } } } return attr; } catch(NotfoundException e) { if(file.isDirectory()) { if(log.isDebugEnabled()) { log.debug(String.format("Search for common prefix %s", file)); } // File may be marked as placeholder but no placeholder file exists. Check for common prefix returned. try { new S3ObjectListService(session, acl).list(file, new CancellingListProgressListener(), String.valueOf(Path.DELIMITER), 1); } catch(ListCanceledException l) { // Found common prefix return PathAttributes.EMPTY; } catch(NotfoundException n) { throw e; } // Found common prefix return PathAttributes.EMPTY; } throw e; } }
@Test public void testFindPlaceholder() throws Exception { final Path container = new Path("test-eu-central-1-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final S3AccessControlListFeature acl = new S3AccessControlListFeature(session); final Path test = new S3DirectoryFeature(session, new S3WriteFeature(session, acl), acl).mkdir(new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final PathAttributes attributes = new S3AttributesFinderFeature(session, acl).find(test); new S3DefaultDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); assertEquals(0L, attributes.getSize()); assertEquals(Checksum.parse("d41d8cd98f00b204e9800998ecf8427e"), attributes.getChecksum()); assertNotEquals(-1L, attributes.getModificationDate()); }
public List<V> getNeighbors(V vertex) { return containsVertex(vertex) ? neighbors.get(vertex) : new ArrayList<>(); }
@Test void getNeighbors() { List<Character> result = graph.getNeighbors('B'); List<Character> expected = Arrays.asList('C', 'F'); assertEquals(expected, result); }
public static void setDebugMode(boolean debugMode) { RpcRunningState.debugMode = debugMode; }
@Test public void setDebugMode() throws Exception { boolean old = RpcRunningState.isDebugMode(); try { RpcRunningState.setDebugMode(true); Assert.assertTrue(RpcRunningState.isDebugMode()); RpcRunningState.setDebugMode(false); Assert.assertFalse(RpcRunningState.isDebugMode()); } finally { RpcRunningState.setDebugMode(old); } }
public static Thread daemonThread(Runnable r, Class<?> context, String description) { return daemonThread(r, "hollow", context, description); }
@Test public void nullContext() { try { daemonThread(() -> {}, null, "boom"); fail("expected an exception"); } catch (NullPointerException e) { assertEquals("context required", e.getMessage()); } }
public static GenericSchemaImpl of(SchemaInfo schemaInfo) { return of(schemaInfo, true); }
@Test public void testAutoJsonSchema() { // configure the schema info provider MultiVersionSchemaInfoProvider multiVersionSchemaInfoProvider = mock(MultiVersionSchemaInfoProvider.class); GenericSchema genericAvroSchema = GenericSchemaImpl.of(Schema.JSON(Foo.class).getSchemaInfo()); when(multiVersionSchemaInfoProvider.getSchemaByVersion(any(byte[].class))) .thenReturn(CompletableFuture.completedFuture(genericAvroSchema.getSchemaInfo())); // configure encode schema Schema<Foo> encodeSchema = Schema.JSON(Foo.class); // configure decode schema AutoConsumeSchema decodeSchema = new AutoConsumeSchema(); decodeSchema.configureSchemaInfo("test-topic", "topic", encodeSchema.getSchemaInfo()); decodeSchema.setSchemaInfoProvider(multiVersionSchemaInfoProvider); testEncodeAndDecodeGenericRecord(encodeSchema, decodeSchema); }
public void verify( Optional<String> expectedClusterId, OptionalInt expectedNodeId, EnumSet<VerificationFlag> verificationFlags ) { Map<Uuid, String> seenUuids = new HashMap<>(); if (verificationFlags.contains(VerificationFlag.REQUIRE_AT_LEAST_ONE_VALID)) { if (logDirProps.isEmpty()) { throw new RuntimeException("No readable meta.properties files found."); } } for (Entry<String, MetaProperties> entry : logDirProps.entrySet()) { String logDir = entry.getKey(); String path = new File(logDir, META_PROPERTIES_NAME).toString(); MetaProperties metaProps = entry.getValue(); if (verificationFlags.contains(VerificationFlag.REQUIRE_V0)) { if (!metaProps.version().equals(MetaPropertiesVersion.V0)) { throw new RuntimeException("Found unexpected version in " + path + ". " + "ZK-based brokers that are not migrating only support version 0 " + "(which is implicit when the `version` field is missing)."); } } if (!metaProps.clusterId().isPresent()) { if (metaProps.version().alwaysHasClusterId()) { throw new RuntimeException("cluster.id was not specified in the v1 file: " + path); } } else if (!expectedClusterId.isPresent()) { expectedClusterId = metaProps.clusterId(); } else if (!metaProps.clusterId().get().equals(expectedClusterId.get())) { throw new RuntimeException("Invalid cluster.id in: " + path + ". Expected " + expectedClusterId.get() + ", but read " + metaProps.clusterId().get()); } if (!metaProps.nodeId().isPresent()) { if (metaProps.version().alwaysHasNodeId()) { throw new RuntimeException("node.id was not specified in " + path); } } else if (!expectedNodeId.isPresent()) { expectedNodeId = metaProps.nodeId(); } else if (metaProps.nodeId().getAsInt() != expectedNodeId.getAsInt()) { throw new RuntimeException("Stored node id " + metaProps.nodeId().getAsInt() + " doesn't match previous node id " + expectedNodeId.getAsInt() + " in " + path + ". If you moved your data, make sure your configured node id matches. If you " + "intend to create a new node, you should remove all data in your data " + "directories."); } if (metaProps.directoryId().isPresent()) { if (DirectoryId.reserved(metaProps.directoryId().get())) { throw new RuntimeException("Invalid resrved directory ID " + metaProps.directoryId().get() + " found in " + logDir); } String prevLogDir = seenUuids.put(metaProps.directoryId().get(), logDir); if (prevLogDir != null) { throw new RuntimeException("Duplicate directory ID " + metaProps.directoryId() + " found. It was the ID of " + prevLogDir + ", " + "but also of " + logDir); } } } if (verificationFlags.contains(VerificationFlag.REQUIRE_METADATA_LOG_DIR)) { if (!metadataLogDir.isPresent()) { throw new RuntimeException("No metadata log directory was specified."); } } if (metadataLogDir.isPresent()) { if (errorLogDirs.contains(metadataLogDir.get())) { throw new RuntimeException("Encountered I/O error in metadata log directory " + metadataLogDir.get() + ". Cannot continue."); } } }
@Test public void testVerificationFailureOnRequireAtLeastOneValid() { assertEquals("No readable meta.properties files found.", assertThrows(RuntimeException.class, () -> EMPTY.verify(Optional.empty(), OptionalInt.empty(), EnumSet.of(REQUIRE_AT_LEAST_ONE_VALID))). getMessage()); }
public PrimaryKey getHashKey() { return hashKey; }
@Test public void testEqualsWithKeys() { { HollowMapSchema s1 = new HollowMapSchema("Test", "TypeA", "TypeB", "f1"); HollowMapSchema s2 = new HollowMapSchema("Test", "TypeA", "TypeB", "f1"); Assert.assertEquals(s1, s2); Assert.assertEquals(s1.getHashKey(), s2.getHashKey()); Assert.assertEquals(new PrimaryKey("TypeA", "f1"), s2.getHashKey()); } { HollowMapSchema s1 = new HollowMapSchema("Test", "TypeA", "TypeB", "f1", "f2"); HollowMapSchema s2 = new HollowMapSchema("Test", "TypeA", "TypeB", "f1", "f2"); Assert.assertEquals(s1, s2); Assert.assertEquals(s1.getHashKey(), s2.getHashKey()); Assert.assertEquals(new PrimaryKey("TypeA", "f1", "f2"), s2.getHashKey()); } { HollowMapSchema s1 = new HollowMapSchema("Test", "TypeA", "TypeB"); HollowMapSchema s2 = new HollowMapSchema("Test", "TypeA", "TypeB", "f1"); Assert.assertNotEquals(s1, s2); Assert.assertNotEquals(s1.getHashKey(), s2.getHashKey()); } { HollowMapSchema s1 = new HollowMapSchema("Test", "TypeA", "TypeB", "f1"); HollowMapSchema s2 = new HollowMapSchema("Test", "TypeA", "TypeB", "f1", "f2"); Assert.assertNotEquals(s1, s2); Assert.assertNotEquals(s1.getHashKey(), s2.getHashKey()); } }
@VisibleForTesting static Optional<String> getChildValue(@Nullable Xpp3Dom dom, String... childNodePath) { if (dom == null) { return Optional.empty(); } Xpp3Dom node = dom; for (String child : childNodePath) { node = node.getChild(child); if (node == null) { return Optional.empty(); } } return Optional.ofNullable(node.getValue()); }
@Test public void testGetChildValue_notFullyMatched() { Xpp3Dom root = newXpp3Dom("root", "value"); Xpp3Dom foo = addXpp3DomChild(root, "foo", "foo"); addXpp3DomChild(foo, "bar", "bar"); assertThat(MavenProjectProperties.getChildValue(root, "baz")).isEmpty(); assertThat(MavenProjectProperties.getChildValue(root, "foo", "baz")).isEmpty(); }
public static void setVersion(final ContextManager contextManager) { CommonConstants.PROXY_VERSION.set(getProxyVersion()); contextManager.getMetaDataContexts().getMetaData().getDatabases().values().forEach(ShardingSphereProxyVersion::setDatabaseVersion); }
@Test void assertSetVersionWhenStorageTypeDifferentWithProtocolType() throws SQLException { ShardingSphereProxyVersion.setVersion(mockContextManager("Oracle", "12.0.0")); assertThat(DatabaseProtocolServerInfo.getProtocolVersion("foo_db", TypedSPILoader.getService(DatabaseType.class, "MySQL")), startsWith("5.7.22")); }
@Override public void addKey(DeviceKey deviceKey) { checkNotNull(deviceKey, "Device key cannot be null"); store.createOrUpdateDeviceKey(deviceKey); }
@Test public void testAddKey() { DeviceKeyId deviceKeyId = DeviceKeyId.deviceKeyId(deviceKeyIdValue); DeviceKey deviceKey = DeviceKey.createDeviceKeyUsingCommunityName(deviceKeyId, deviceKeyLabel, deviceKeySnmpName); // Test to make sure that the device key store is empty Collection<DeviceKey> deviceKeys = manager.getDeviceKeys(); assertTrue("The device key set should be empty.", deviceKeys.isEmpty()); // Add the new device key using the device key manager. manager.addKey(deviceKey); // Test the getDeviceKeys method to make sure that the new device key exists deviceKeys = manager.getDeviceKeys(); assertEquals("There should be one device key in the set.", deviceKeys.size(), 1); // Test the getDeviceKey method using the device key unique identifier deviceKey = manager.getDeviceKey(deviceKeyId); assertEquals("There should be one device key in the set.", deviceKeys.size(), 1); // Validate that only the DEVICE_KEY_ADDED event was received. validateEvents(DeviceKeyEvent.Type.DEVICE_KEY_ADDED); }
@Override public String getName() { return _name; }
@Test public void testStringHammingDistanceTransformFunction() { ExpressionContext expression = RequestContextUtils.getExpression( String.format("hamming_distance(%s, %s)", STRING_ALPHANUM_SV_COLUMN, STRING_ALPHANUM_SV_COLUMN)); TransformFunction transformFunction = TransformFunctionFactory.get(expression, _dataSourceMap); assertTrue(transformFunction instanceof ScalarTransformFunctionWrapper); assertEquals(transformFunction.getName(), "hammingDistance"); int[] expectedValues = new int[NUM_ROWS]; for (int i = 0; i < NUM_ROWS; i++) { int distance = 0; for (int j = 0; j < _stringAlphaNumericSVValues[i].length(); j++) { if (_stringAlphaNumericSVValues[i].charAt(j) != _stringAlphaNumericSVValues[i].charAt(j)) { distance++; } } expectedValues[i] = distance; } testTransformFunction(transformFunction, expectedValues); }
@Override public Collection<LocalDataQueryResultRow> getRows(final ShowRulesUsedStorageUnitStatement sqlStatement, final ContextManager contextManager) { String resourceName = sqlStatement.getStorageUnitName().orElse(null); return database.getResourceMetaData().getStorageUnits().containsKey(resourceName) ? getRows(sqlStatement) : Collections.emptyList(); }
@Test void assertGetEmptyRowData() { executor.setDatabase(mockEmptyDatabase()); ShowRulesUsedStorageUnitStatement sqlStatement = new ShowRulesUsedStorageUnitStatement("empty_ds", mock(DatabaseSegment.class)); assertTrue(executor.getRows(sqlStatement, mock(ContextManager.class)).isEmpty()); }
@Override public int addAllIfGreater(Map<V, Double> objects) { return get(addAllIfGreaterAsync(objects)); }
@Test public void testAddAllIfGreater() { RScoredSortedSet<String> set = redisson.getScoredSortedSet("simple"); set.add(10, "1981"); set.add(11, "1984"); set.add(13, "1985"); Map<String, Double> map = new HashMap<>(); map.put("1981", 111D); map.put("1982", 112D); map.put("1983", 113D); map.put("1984", 8D); map.put("1985", 3D); assertThat(set.addAllIfGreater(map)).isEqualTo(3); assertThat(set.size()).isEqualTo(5); assertThat(set.getScore("1981")).isEqualTo(111D); assertThat(set.getScore("1982")).isEqualTo(112D); assertThat(set.getScore("1983")).isEqualTo(113D); assertThat(set.getScore("1984")).isEqualTo(11D); assertThat(set.getScore("1985")).isEqualTo(13D); }
@Override public V load(K key) { awaitSuccessfulInit(); try (SqlResult queryResult = sqlService.execute(queries.load(), key)) { Iterator<SqlRow> it = queryResult.iterator(); V value = null; if (it.hasNext()) { SqlRow sqlRow = it.next(); if (it.hasNext()) { throw new IllegalStateException("multiple matching rows for a key " + key); } // If there is a single column as the value, return that column as the value if (queryResult.getRowMetadata().getColumnCount() == 2 && genericMapStoreProperties.singleColumnAsValue) { value = sqlRow.getObject(1); } else { //noinspection unchecked value = (V) toGenericRecord(sqlRow, genericMapStoreProperties); } } return value; } }
@Test public void givenTableNameProperty_whenCreateMapLoader_thenUseTableName() { String tableName = randomName(); ObjectSpec spec = objectProvider.createObject(tableName, false); objectProvider.insertItems(spec, 1); Properties properties = new Properties(); properties.setProperty(DATA_CONNECTION_REF_PROPERTY, TEST_DATABASE_REF); properties.setProperty(EXTERNAL_NAME_PROPERTY, tableName); mapLoader = createMapLoader(properties, hz); GenericRecord genericRecord = mapLoader.load(0); assertThat(genericRecord).isNotNull(); }
@Override public Object getObject(final int columnIndex) throws SQLException { return mergeResultSet.getValue(columnIndex, Object.class); }
@Test void assertGetObjectWithDate() throws SQLException { Date result = mock(Date.class); when(mergeResultSet.getValue(1, Date.class)).thenReturn(result); assertThat(shardingSphereResultSet.getObject(1, Date.class), is(result)); }
@VisibleForTesting static JibContainerBuilder processCommonConfiguration( RawConfiguration rawConfiguration, InferredAuthProvider inferredAuthProvider, ProjectProperties projectProperties) throws InvalidFilesModificationTimeException, InvalidAppRootException, IncompatibleBaseImageJavaVersionException, IOException, InvalidImageReferenceException, InvalidContainerizingModeException, MainClassInferenceException, InvalidPlatformException, InvalidContainerVolumeException, InvalidWorkingDirectoryException, InvalidCreationTimeException, ExtraDirectoryNotFoundException { // Create and configure JibContainerBuilder ModificationTimeProvider modificationTimeProvider = createModificationTimeProvider(rawConfiguration.getFilesModificationTime()); JavaContainerBuilder javaContainerBuilder = getJavaContainerBuilderWithBaseImage( rawConfiguration, projectProperties, inferredAuthProvider) .setAppRoot(getAppRootChecked(rawConfiguration, projectProperties)) .setModificationTimeProvider(modificationTimeProvider); JibContainerBuilder jibContainerBuilder = projectProperties.createJibContainerBuilder( javaContainerBuilder, getContainerizingModeChecked(rawConfiguration, projectProperties)); jibContainerBuilder .setFormat(rawConfiguration.getImageFormat()) .setPlatforms(getPlatformsSet(rawConfiguration)) .setEntrypoint(computeEntrypoint(rawConfiguration, projectProperties, jibContainerBuilder)) .setProgramArguments(rawConfiguration.getProgramArguments().orElse(null)) .setEnvironment(rawConfiguration.getEnvironment()) .setExposedPorts(Ports.parse(rawConfiguration.getPorts())) .setVolumes(getVolumesSet(rawConfiguration)) .setLabels(rawConfiguration.getLabels()) .setUser(rawConfiguration.getUser().orElse(null)) .setCreationTime(getCreationTime(rawConfiguration.getCreationTime(), projectProperties)); getWorkingDirectoryChecked(rawConfiguration) .ifPresent(jibContainerBuilder::setWorkingDirectory); // Adds all the extra files. for (ExtraDirectoriesConfiguration extraDirectory : rawConfiguration.getExtraDirectories()) { Path from = extraDirectory.getFrom(); if (Files.exists(from)) { jibContainerBuilder.addFileEntriesLayer( JavaContainerBuilderHelper.extraDirectoryLayerConfiguration( from, AbsoluteUnixPath.get(extraDirectory.getInto()), extraDirectory.getIncludesList(), extraDirectory.getExcludesList(), rawConfiguration.getExtraDirectoryPermissions(), modificationTimeProvider)); } else if (!from.endsWith(DEFAULT_JIB_DIR)) { throw new ExtraDirectoryNotFoundException(from.toString(), from.toString()); } } return jibContainerBuilder; }
@Test public void testEntrypoint_defaultWarPackaging() throws IOException, InvalidImageReferenceException, MainClassInferenceException, InvalidAppRootException, InvalidWorkingDirectoryException, InvalidPlatformException, InvalidContainerVolumeException, IncompatibleBaseImageJavaVersionException, NumberFormatException, InvalidContainerizingModeException, InvalidFilesModificationTimeException, InvalidCreationTimeException, ExtraDirectoryNotFoundException { when(projectProperties.isWarProject()).thenReturn(true); ContainerBuildPlan buildPlan = processCommonConfiguration(); assertThat(buildPlan.getEntrypoint()) .containsExactly("java", "-jar", "/usr/local/jetty/start.jar", "--module=ee10-deploy") .inOrder(); verifyNoInteractions(logger); }
public static void checkParam(String dataId, String group, String datumId, String content) throws NacosException { if (StringUtils.isBlank(dataId) || !isValid(dataId.trim())) { throw new NacosApiException(HttpStatus.BAD_REQUEST.value(), ErrorCode.PARAMETER_VALIDATE_ERROR, "invalid dataId : " + dataId); } else if (StringUtils.isBlank(group) || !isValid(group)) { throw new NacosApiException(HttpStatus.BAD_REQUEST.value(), ErrorCode.PARAMETER_VALIDATE_ERROR, "invalid group : " + group); } else if (StringUtils.isBlank(datumId) || !isValid(datumId)) { throw new NacosApiException(HttpStatus.BAD_REQUEST.value(), ErrorCode.PARAMETER_VALIDATE_ERROR, "invalid datumId : " + datumId); } else if (StringUtils.isBlank(content)) { throw new NacosApiException(HttpStatus.BAD_REQUEST.value(), ErrorCode.PARAMETER_VALIDATE_ERROR, "content is blank : " + content); } else if (content.length() > PropertyUtil.getMaxContent()) { throw new NacosApiException(HttpStatus.BAD_REQUEST.value(), ErrorCode.PARAMETER_VALIDATE_ERROR, "invalid content, over " + PropertyUtil.getMaxContent()); } }
@Test void testCheckParamV3() { //tag size over 5 Map<String, Object> configAdvanceInfo = new HashMap<>(); configAdvanceInfo.put("config_tags", "test,test,test,test,test,test"); try { ParamUtils.checkParam(configAdvanceInfo); fail(); } catch (NacosException e) { System.out.println(e.toString()); } //tag length over 5 configAdvanceInfo.clear(); StringBuilder tagBuilder = new StringBuilder(); for (int i = 0; i < 65; i++) { tagBuilder.append("t"); } configAdvanceInfo.put("config_tags", tagBuilder.toString()); try { ParamUtils.checkParam(configAdvanceInfo); fail(); } catch (NacosException e) { System.out.println(e.toString()); } //desc length over 128 configAdvanceInfo.clear(); StringBuilder descBuilder = new StringBuilder(); for (int i = 0; i < 129; i++) { descBuilder.append("t"); } configAdvanceInfo.put("desc", descBuilder.toString()); try { ParamUtils.checkParam(configAdvanceInfo); fail(); } catch (NacosException e) { System.out.println(e.toString()); } //use length over 32 configAdvanceInfo.clear(); StringBuilder useBuilder = new StringBuilder(); for (int i = 0; i < 33; i++) { useBuilder.append("t"); } configAdvanceInfo.put("use", useBuilder.toString()); try { ParamUtils.checkParam(configAdvanceInfo); fail(); } catch (NacosException e) { System.out.println(e.toString()); } //effect length over 32 configAdvanceInfo.clear(); StringBuilder effectBuilder = new StringBuilder(); for (int i = 0; i < 33; i++) { effectBuilder.append("t"); } configAdvanceInfo.put("effect", effectBuilder.toString()); try { ParamUtils.checkParam(configAdvanceInfo); fail(); } catch (NacosException e) { System.out.println(e.toString()); } //type length over 32 configAdvanceInfo.clear(); StringBuilder typeBuilder = new StringBuilder(); for (int i = 0; i < 33; i++) { typeBuilder.append("t"); } configAdvanceInfo.put("type", typeBuilder.toString()); try { ParamUtils.checkParam(configAdvanceInfo); fail(); } catch (NacosException e) { System.out.println(e.toString()); } //schema length over 32768 configAdvanceInfo.clear(); StringBuilder schemaBuilder = new StringBuilder(); for (int i = 0; i < 32769; i++) { schemaBuilder.append("t"); } configAdvanceInfo.put("schema", schemaBuilder.toString()); try { ParamUtils.checkParam(configAdvanceInfo); fail(); } catch (NacosException e) { System.out.println(e.toString()); } //invalid param configAdvanceInfo.clear(); configAdvanceInfo.put("test", "test"); try { ParamUtils.checkParam(configAdvanceInfo); fail(); } catch (NacosException e) { System.out.println(e.toString()); } }
@VisibleForTesting static FileSystem getFileSystemInternal(String scheme) { String lowerCaseScheme = scheme.toLowerCase(); Map<String, FileSystem> schemeToFileSystem = SCHEME_TO_FILESYSTEM.get(); FileSystem rval = schemeToFileSystem.get(lowerCaseScheme); if (rval == null) { throw new IllegalArgumentException("No filesystem found for scheme " + scheme); } return rval; }
@Test public void testGetLocalFileSystem() throws Exception { // TODO: Java core test failing on windows, https://github.com/apache/beam/issues/20484 assumeFalse(SystemUtils.IS_OS_WINDOWS); assertTrue( FileSystems.getFileSystemInternal(toLocalResourceId("~/home/").getScheme()) instanceof LocalFileSystem); assertTrue( FileSystems.getFileSystemInternal(toLocalResourceId("file://home").getScheme()) instanceof LocalFileSystem); assertTrue( FileSystems.getFileSystemInternal(toLocalResourceId("FILE://home").getScheme()) instanceof LocalFileSystem); assertTrue( FileSystems.getFileSystemInternal(toLocalResourceId("File://home").getScheme()) instanceof LocalFileSystem); if (SystemUtils.IS_OS_WINDOWS) { assertTrue( FileSystems.getFileSystemInternal(toLocalResourceId("c:\\home\\").getScheme()) instanceof LocalFileSystem); } }
@Override public FileClient getFileClient(Long id) { return clientCache.getUnchecked(id); }
@Test public void testGetFileClient() { // mock 数据 FileConfigDO fileConfig = randomFileConfigDO().setMaster(false); fileConfigMapper.insert(fileConfig); // 准备参数 Long id = fileConfig.getId(); // mock 获得 Client FileClient fileClient = new LocalFileClient(id, new LocalFileClientConfig()); when(fileClientFactory.getFileClient(eq(id))).thenReturn(fileClient); // 调用,并断言 assertSame(fileClient, fileConfigService.getFileClient(id)); // 断言缓存 verify(fileClientFactory).createOrUpdateFileClient(eq(id), eq(fileConfig.getStorage()), eq(fileConfig.getConfig())); }
@Override public Map<String, Boolean> getGroupUuidToManaged(DbSession dbSession, Set<String> groupUuids) { return findManagedInstanceService() .map(managedInstanceService -> managedInstanceService.getGroupUuidToManaged(dbSession, groupUuids)) .orElse(returnNonManagedForAll(groupUuids)); }
@Test public void getGroupUuidToManaged_whenNoDelegates_setAllUsersAsNonManaged() { Set<String> groupUuids = Set.of("a", "b"); DelegatingManagedServices managedInstanceService = NO_MANAGED_SERVICES; Map<String, Boolean> groupUuidToManaged = managedInstanceService.getGroupUuidToManaged(dbSession, groupUuids); assertThat(groupUuidToManaged).containsExactlyInAnyOrderEntriesOf(Map.of("a", false, "b", false)); }
@Udf public Map<String, String> splitToMap( @UdfParameter( description = "Separator string and values to join") final String input, @UdfParameter( description = "Separator string and values to join") final String entryDelimiter, @UdfParameter( description = "Separator string and values to join") final String kvDelimiter) { if (input == null || entryDelimiter == null || kvDelimiter == null) { return null; } if (entryDelimiter.isEmpty() || kvDelimiter.isEmpty() || entryDelimiter.equals(kvDelimiter)) { return null; } final Iterable<String> entries = Splitter.on(entryDelimiter).omitEmptyStrings().split(input); return StreamSupport.stream(entries.spliterator(), false) .filter(e -> e.contains(kvDelimiter)) .map(kv -> Splitter.on(kvDelimiter).split(kv).iterator()) .collect(Collectors.toMap( Iterator::next, Iterator::next, (v1, v2) -> v2)); }
@Test public void shouldReturnEmptyForEmptyInput() { Map<String, String> result = udf.splitToMap("", "/", ":="); assertThat(result, is(Collections.EMPTY_MAP)); }
@Override public boolean add(Integer partitionId) { throw new UnsupportedOperationException(); }
@Test(expected = UnsupportedOperationException.class) public void test_add() { set.add(5); }
@Override public int partition(Integer bucketId, int numPartitions) { Preconditions.checkNotNull(bucketId, BUCKET_NULL_MESSAGE); Preconditions.checkArgument(bucketId >= 0, BUCKET_LESS_THAN_LOWER_BOUND_MESSAGE, bucketId); Preconditions.checkArgument( bucketId < maxNumBuckets, BUCKET_GREATER_THAN_UPPER_BOUND_MESSAGE, bucketId, maxNumBuckets); if (numPartitions <= maxNumBuckets) { return bucketId % numPartitions; } else { return getPartitionWithMoreWritersThanBuckets(bucketId, numPartitions); } }
@Test public void testPartitionerBucketIdNullFail() { PartitionSpec partitionSpec = TableSchemaType.ONE_BUCKET.getPartitionSpec(DEFAULT_NUM_BUCKETS); BucketPartitioner bucketPartitioner = new BucketPartitioner(partitionSpec); assertThatExceptionOfType(RuntimeException.class) .isThrownBy(() -> bucketPartitioner.partition(null, DEFAULT_NUM_BUCKETS)) .withMessage(BUCKET_NULL_MESSAGE); }
@Override protected void doStop() throws Exception { shutdownReconnectService(reconnectService); LOG.debug("Disconnecting from: {}...", getEndpoint().getConnectionString()); super.doStop(); closeSession(); LOG.info("Disconnected from: {}", getEndpoint().getConnectionString()); }
@Test public void doStopShouldCloseTheSMPPSession() throws Exception { doStartShouldStartANewSmppSession(); reset(endpoint, processor, session); when(endpoint.getConnectionString()) .thenReturn("smpp://smppclient@localhost:2775"); consumer.doStop(); verify(session).removeSessionStateListener(isA(SessionStateListener.class)); verify(session).unbindAndClose(); }
@VisibleForTesting public int getUpdateApplicationPriorityFailedRetrieved() { return numUpdateAppPriorityFailedRetrieved.value(); }
@Test public void testUpdateApplicationPriorityFailed() { long totalBadBefore = metrics.getUpdateApplicationPriorityFailedRetrieved(); badSubCluster.getUpdateApplicationPriority(); Assert.assertEquals(totalBadBefore + 1, metrics.getUpdateApplicationPriorityFailedRetrieved()); }
public static void onlyOneIsTrue(final String message, final boolean... expressions) { if (!onlyOneIsTrueNonThrow(expressions)) { throw new IllegalArgumentException(message); } }
@Test public void testOnlyOneIsTrueThrow2() { Assertions.assertThrows(IllegalArgumentException.class, () -> Utils.onlyOneIsTrue("foo", true, true)); }
public static String findFirstUniqueAndStableStanzaID( final Packet packet, final String by ) { if ( packet == null ) { throw new IllegalArgumentException( "Argument 'packet' cannot be null." ); } if ( by == null || by.isEmpty() ) { throw new IllegalArgumentException( "Argument 'by' cannot be null or an empty string." ); } final List<Element> sids = packet.getElement().elements( QName.get( "stanza-id", "urn:xmpp:sid:0" ) ); if ( sids == null ) { return null; } for ( final Element sid : sids ) { if ( by.equals( sid.attributeValue( "by" ) ) ) { final String result = sid.attributeValue( "id" ); if ( result != null && !result.isEmpty() ) { return result; } } } return null; }
@Test public void testParseUUIDValue() throws Exception { // Setup fixture. final Packet input = new Message(); final JID self = new JID( "foobar" ); final String expected = "de305d54-75b4-431b-adb2-eb6b9e546013"; final Element toOverwrite = input.getElement().addElement( "stanza-id", "urn:xmpp:sid:0" ); toOverwrite.addAttribute( "id", expected ); toOverwrite.addAttribute( "by", self.toString() ); // Execute system under test. final String result = StanzaIDUtil.findFirstUniqueAndStableStanzaID( input, self.toString() ); // Verify results. assertEquals( expected, result ); }
@SuppressWarnings("unused") // Part of required API. public void execute( final ConfiguredStatement<InsertValues> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { final InsertValues insertValues = statement.getStatement(); final MetaStore metaStore = executionContext.getMetaStore(); final KsqlConfig config = statement.getSessionConfig().getConfig(true); final DataSource dataSource = getDataSource(config, metaStore, insertValues); validateInsert(insertValues.getColumns(), dataSource); final ProducerRecord<byte[], byte[]> record = buildRecord(statement, metaStore, dataSource, serviceContext); try { producer.sendRecord(record, serviceContext, config.getProducerClientConfigProps()); } catch (final TopicAuthorizationException e) { // TopicAuthorizationException does not give much detailed information about why it failed, // except which topics are denied. Here we just add the ACL to make the error message // consistent with other authorization error messages. final Exception rootCause = new KsqlTopicAuthorizationException( AclOperation.WRITE, e.unauthorizedTopics() ); throw new KsqlException(createInsertFailedExceptionMessage(insertValues), rootCause); } catch (final ClusterAuthorizationException e) { // ClusterAuthorizationException is thrown when using idempotent producers // and either a topic write permission or a cluster-level idempotent write // permission (only applicable for broker versions no later than 2.8) is // missing. In this case, we include additional context to help the user // distinguish this type of failure from other permissions exceptions // such as the ones thrown above when TopicAuthorizationException is caught. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } catch (final KafkaException e) { if (e.getCause() != null && e.getCause() instanceof ClusterAuthorizationException) { // The error message thrown when an idempotent producer is missing permissions // is (nondeterministically) inconsistent: it is either a raw ClusterAuthorizationException, // as checked for above, or a ClusterAuthorizationException wrapped inside a KafkaException. // ksqlDB handles these two the same way, accordingly. // See https://issues.apache.org/jira/browse/KAFKA-14138 for more. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } else { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } } catch (final Exception e) { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } }
@Test public void shouldAllowInsertOnMultipleKeySchemaDefinitions() throws Exception { final String protoMultiSchema = "syntax = \"proto3\";\n" + "package io.proto;\n" + "\n" + "message SingleKey {\n" + " string k0 = 1;\n" + "}\n" + "message MultiKeys {\n" + " string k0 = 1;\n" + " string k1 = 2;\n" + "}\n"; // Given: when(srClient.getLatestSchemaMetadata(Mockito.any())) .thenReturn(new SchemaMetadata(1, 1, protoMultiSchema)); when(srClient.getSchemaById(1)) .thenReturn(new ProtobufSchema(protoMultiSchema)); givenDataSourceWithSchema( TOPIC_NAME, SCHEMA_WITH_MUTI_KEYS, SerdeFeatures.of(SerdeFeature.SCHEMA_INFERENCE), SerdeFeatures.of(), FormatInfo.of(FormatFactory.PROTOBUF.name(), ImmutableMap.of( AvroProperties.FULL_SCHEMA_NAME,"io.proto.MultiKeys", AvroProperties.SCHEMA_ID, "1" )), FormatInfo.of(FormatFactory.JSON.name()), false, false); final ConfiguredStatement<InsertValues> statement = givenInsertValues( ImmutableList.of(K0, K1, COL0, COL1), ImmutableList.of( new StringLiteral("K0"), new StringLiteral("K1"), new StringLiteral("V0"), new LongLiteral(21)) ); // When: executor.execute(statement, mock(SessionProperties.class), engine, serviceContext); // Then: verify(keySerializer).serialize(TOPIC_NAME, genericKey("K0", "K1")); verify(valueSerializer).serialize(TOPIC_NAME, genericRow("V0", 21L)); verify(producer).send(new ProducerRecord<>(TOPIC_NAME, null, 1L, KEY, VALUE)); }
void doSubmit(final Runnable action) { CONTINUATION.get().submit(action); }
@Test public void testRecursiveOrderWithException() { final Continuations CONT = new Continuations(); final StringBuilder result = new StringBuilder(); try { CONT.doSubmit(() -> { result.append("BEGIN{"); recursivePostOrderWithException(CONT, result, "root", 0); }); CONT.doSubmit(() -> { result.append("}END"); }); fail("should have thrown exception"); } catch (Exception e) { assertEquals(e.getMessage(), "nested"); } assertEquals(result.toString(), "BEGIN{[done(rootLL:2)][done(rootLR:2)][done(rootL:1)][done(rootRL:2)]"); }
@Override public MatchType convert(@NotNull String type) { if (type.contains(DELIMITER)) { String[] matchType = type.split(DELIMITER); return new MatchType(RateLimitType.valueOf(matchType[0].toUpperCase()), matchType[1]); } return new MatchType(RateLimitType.valueOf(type.toUpperCase()), null); }
@Test public void testConvertStringTypeHttpMethodWithMatcher() { MatchType matchType = target.convert("http_method=get"); assertThat(matchType).isNotNull(); assertThat(matchType.getType()).isEqualByComparingTo(RateLimitType.HTTP_METHOD); assertThat(matchType.getMatcher()).isEqualTo("get"); }
@VisibleForTesting static void validatePartitionedReplicaGroupInstance(TableConfig tableConfig) { if (tableConfig.getValidationConfig().getReplicaGroupStrategyConfig() == null || MapUtils.isEmpty( tableConfig.getInstanceAssignmentConfigMap())) { return; } for (Map.Entry<String, InstanceAssignmentConfig> entry : tableConfig.getInstanceAssignmentConfigMap().entrySet()) { boolean isNullReplicaGroupPartitionConfig = entry.getValue().getReplicaGroupPartitionConfig() == null; Preconditions.checkState(isNullReplicaGroupPartitionConfig, "Both replicaGroupStrategyConfig and replicaGroupPartitionConfig is provided"); } }
@Test public void testValidatePartitionedReplicaGroupInstance() { String partitionColumn = "testPartitionCol"; ReplicaGroupStrategyConfig replicaGroupStrategyConfig = new ReplicaGroupStrategyConfig(partitionColumn, 2); TableConfig tableConfigWithoutReplicaGroupStrategyConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(TABLE_NAME).build(); // Call validate with a table-config without replicaGroupStrategyConfig or replicaGroupPartitionConfig. TableConfigUtils.validatePartitionedReplicaGroupInstance(tableConfigWithoutReplicaGroupStrategyConfig); TableConfig tableConfigWithReplicaGroupStrategyConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(TABLE_NAME).build(); tableConfigWithReplicaGroupStrategyConfig.getValidationConfig() .setReplicaGroupStrategyConfig(replicaGroupStrategyConfig); // Call validate with a table-config with replicaGroupStrategyConfig and without replicaGroupPartitionConfig. TableConfigUtils.validatePartitionedReplicaGroupInstance(tableConfigWithReplicaGroupStrategyConfig); InstanceAssignmentConfig instanceAssignmentConfig = Mockito.mock(InstanceAssignmentConfig.class); InstanceReplicaGroupPartitionConfig instanceReplicaGroupPartitionConfig = new InstanceReplicaGroupPartitionConfig(true, 0, 0, 0, 2, 0, false, partitionColumn); Mockito.doReturn(instanceReplicaGroupPartitionConfig).when(instanceAssignmentConfig) .getReplicaGroupPartitionConfig(); TableConfig invalidTableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(TABLE_NAME) .setInstanceAssignmentConfigMap(ImmutableMap.of(TableType.OFFLINE.toString(), instanceAssignmentConfig)) .build(); invalidTableConfig.getValidationConfig().setReplicaGroupStrategyConfig(replicaGroupStrategyConfig); try { // Call validate with a table-config with replicaGroupStrategyConfig and replicaGroupPartitionConfig. TableConfigUtils.validatePartitionedReplicaGroupInstance(invalidTableConfig); Assert.fail("Validation should have failed since both replicaGroupStrategyConfig " + "and replicaGroupPartitionConfig are set"); } catch (IllegalStateException ignored) { } }
@Override public Mono<ObjectNode> fetchSenderConfig(String notifierDescriptorName) { return fetchConfig(notifierDescriptorName) .mapNotNull(setting -> (ObjectNode) setting.get(SENDER_KEY)) .defaultIfEmpty(JsonNodeFactory.instance.objectNode()); }
@Test void fetchSenderConfigTest() { var objectNode = mock(ObjectNode.class); var spyNotifierConfigStore = spy(notifierConfigStore); doReturn(Mono.just(objectNode)).when(spyNotifierConfigStore) .fetchConfig(eq("fake-notifier")); var senderConfig = mock(ObjectNode.class); when(objectNode.get(eq(DefaultNotifierConfigStore.SENDER_KEY))).thenReturn(senderConfig); spyNotifierConfigStore.fetchSenderConfig("fake-notifier") .as(StepVerifier::create) .consumeNextWith(actual -> assertThat(actual).isEqualTo(senderConfig)) .verifyComplete(); verify(objectNode).get(eq(DefaultNotifierConfigStore.SENDER_KEY)); }
@Override public int hashCode() { return Objects.hash( distribution, originalPartitionSchemas ); }
@Test public void hashCodeEqualsTest() { SlaveStepCopyPartitionDistribution other = new SlaveStepCopyPartitionDistribution(); Assert.assertEquals( slaveStep.hashCode(), other.hashCode() ); }
@Override public int run(String[] args) throws Exception { YarnConfiguration yarnConf = getConf() == null ? new YarnConfiguration() : new YarnConfiguration(getConf()); boolean isFederationEnabled = yarnConf.getBoolean(YarnConfiguration.FEDERATION_ENABLED, YarnConfiguration.DEFAULT_FEDERATION_ENABLED); if (args.length < 1 || !isFederationEnabled) { printUsage(CMD_EMPTY); return EXIT_ERROR; } String cmd = args[0]; if (CMD_HELP.equals(cmd)) { if (args.length > 1) { printUsage(args[1]); } else { printHelp(); } return EXIT_SUCCESS; } else if (CMD_SUBCLUSTER.equals(cmd)) { return handleSubCluster(args); } else if (CMD_POLICY.equals(cmd)) { return handlePolicy(args); } else if (CMD_APPLICATION.equals(cmd)) { return handleApplication(args); } else { System.out.println("No related commands found."); printHelp(); } return EXIT_SUCCESS; }
@Test public void testDeregisterSubClusters() throws Exception { PrintStream oldOutPrintStream = System.out; ByteArrayOutputStream dataOut = new ByteArrayOutputStream(); System.setOut(new PrintStream(dataOut)); oldOutPrintStream.println(dataOut); String[] args = {"-deregisterSubCluster"}; assertEquals(0, rmAdminCLI.run(args)); args = new String[]{"-deregisterSubCluster", "-sc"}; assertEquals(0, rmAdminCLI.run(args)); args = new String[]{"-deregisterSubCluster", "--sc", ""}; assertEquals(0, rmAdminCLI.run(args)); args = new String[]{"-deregisterSubCluster", "--subClusterId"}; assertEquals(0, rmAdminCLI.run(args)); args = new String[]{"-deregisterSubCluster", "--subClusterId", ""}; assertEquals(0, rmAdminCLI.run(args)); }
protected File getOutputFile(final String path, final String baseFileName) throws IOException { makeDir(path); final String now = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss").format(new Date()); final String fileName = baseFileName + "." + now; final File file = Paths.get(path, fileName).toFile(); if (!file.exists() && !file.createNewFile()) { throw new IOException("Fail to create file: " + file); } return file; }
@Test public void testGetOutputFileWithAbsolutePath() throws IOException { final String path = Paths.get("cde").toAbsolutePath().toString(); final File f = getOutputFile(path, "test3.log"); assertTrue(f.exists()); FileUtils.forceDelete(new File(path)); }
@Override public DeleteFederationApplicationResponse deleteFederationApplication( DeleteFederationApplicationRequest request) throws YarnException, IOException { // Parameter validation. if (request == null) { routerMetrics.incrDeleteFederationApplicationFailedRetrieved(); RouterServerUtil.logAndThrowException( "Missing deleteFederationApplication Request.", null); } String application = request.getApplication(); if (StringUtils.isBlank(application)) { routerMetrics.incrDeleteFederationApplicationFailedRetrieved(); RouterServerUtil.logAndThrowException( "ApplicationId cannot be null.", null); } // Try calling deleteApplicationHomeSubCluster to delete the application. try { long startTime = clock.getTime(); ApplicationId applicationId = ApplicationId.fromString(application); federationFacade.deleteApplicationHomeSubCluster(applicationId); long stopTime = clock.getTime(); routerMetrics.succeededDeleteFederationApplicationFailedRetrieved(stopTime - startTime); return DeleteFederationApplicationResponse.newInstance( "applicationId = " + applicationId + " delete success."); } catch (Exception e) { RouterServerUtil.logAndThrowException(e, "Unable to deleteFederationApplication due to exception. " + e.getMessage()); } throw new YarnException("Unable to deleteFederationApplication."); }
@Test public void testDeleteFederationApplication() throws Exception { ApplicationId applicationId = ApplicationId.newInstance(10, 1); DeleteFederationApplicationRequest request1 = DeleteFederationApplicationRequest.newInstance(applicationId.toString()); LambdaTestUtils.intercept(YarnException.class, "Application application_10_0001 does not exist.", () -> interceptor.deleteFederationApplication(request1)); ApplicationId applicationId2 = ApplicationId.newInstance(10, 2); SubClusterId homeSubCluster = SubClusterId.newInstance("SC-1"); ApplicationHomeSubCluster appHomeSubCluster = ApplicationHomeSubCluster.newInstance(applicationId2, homeSubCluster); facade.addApplicationHomeSubCluster(appHomeSubCluster); DeleteFederationApplicationRequest request2 = DeleteFederationApplicationRequest.newInstance(applicationId2.toString()); DeleteFederationApplicationResponse deleteFederationApplicationResponse = interceptor.deleteFederationApplication(request2); assertNotNull(deleteFederationApplicationResponse); assertEquals("applicationId = " + applicationId2 + " delete success.", deleteFederationApplicationResponse.getMessage()); }
@Operation(summary = "previewSchedule", description = "PREVIEW_SCHEDULE_NOTES") @Parameters({ @Parameter(name = "schedule", description = "SCHEDULE", schema = @Schema(implementation = String.class, example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}")), }) @PostMapping("/preview") @ResponseStatus(HttpStatus.CREATED) @ApiException(PREVIEW_SCHEDULE_ERROR) public Result previewSchedule(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser, @RequestParam(value = "schedule") String schedule) { Map<String, Object> result = schedulerService.previewSchedule(loginUser, schedule); return returnDataList(result); }
@Test public void testPreviewSchedule() throws Exception { Mockito.when(schedulerService.previewSchedule(isA(User.class), isA(String.class))) .thenReturn(success()); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedules/preview", 123) .header(SESSION_ID, sessionId) .param("schedule", "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *','timezoneId':'Asia/Shanghai'}")) .andExpect(status().isCreated()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); }