focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public Collection<DatabasePacket> execute() { SQLParserEngine sqlParserEngine = createShardingSphereSQLParserEngine(connectionSession.getUsedDatabaseName()); String sql = packet.getSQL(); SQLStatement sqlStatement = sqlParserEngine.parse(sql, true); String escapedSql = escape(sqlStatement, sql); if (!escapedSql.equalsIgnoreCase(sql)) { sqlStatement = sqlParserEngine.parse(escapedSql, true); sql = escapedSql; } List<Integer> actualParameterMarkerIndexes = new ArrayList<>(); if (sqlStatement.getParameterCount() > 0) { List<ParameterMarkerSegment> parameterMarkerSegments = new ArrayList<>(((AbstractSQLStatement) sqlStatement).getParameterMarkerSegments()); for (ParameterMarkerSegment each : parameterMarkerSegments) { actualParameterMarkerIndexes.add(each.getParameterIndex()); } sql = convertSQLToJDBCStyle(parameterMarkerSegments, sql); sqlStatement = sqlParserEngine.parse(sql, true); } List<PostgreSQLColumnType> paddedColumnTypes = paddingColumnTypes(sqlStatement.getParameterCount(), packet.readParameterTypes()); SQLStatementContext sqlStatementContext = sqlStatement instanceof DistSQLStatement ? new DistSQLStatementContext((DistSQLStatement) sqlStatement) : new SQLBindEngine(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData(), connectionSession.getCurrentDatabaseName(), packet.getHintValueContext()) .bind(sqlStatement, Collections.emptyList()); PostgreSQLServerPreparedStatement serverPreparedStatement = new PostgreSQLServerPreparedStatement(sql, sqlStatementContext, packet.getHintValueContext(), paddedColumnTypes, actualParameterMarkerIndexes); connectionSession.getServerPreparedStatementRegistry().addPreparedStatement(packet.getStatementId(), serverPreparedStatement); return Collections.singleton(PostgreSQLParseCompletePacket.getInstance()); }
@Test void assertExecuteWithNonOrderedParameterizedSQL() throws ReflectiveOperationException { final String rawSQL = "update t_test set name=$2 where id=$1"; final String expectedSQL = "update t_test set name=? where id=?"; final String statementId = "S_2"; when(parsePacket.getSQL()).thenReturn(rawSQL); when(parsePacket.getHintValueContext()).thenReturn(new HintValueContext()); when(parsePacket.getStatementId()).thenReturn(statementId); when(parsePacket.readParameterTypes()).thenReturn(Arrays.asList(PostgreSQLColumnType.JSON, PostgreSQLColumnType.INT4)); Plugins.getMemberAccessor().set(PostgreSQLComParseExecutor.class.getDeclaredField("connectionSession"), executor, connectionSession); ContextManager contextManager = mockContextManager(); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); executor.execute(); PostgreSQLServerPreparedStatement actualPreparedStatement = connectionSession.getServerPreparedStatementRegistry().getPreparedStatement(statementId); assertThat(actualPreparedStatement.getSql(), is(expectedSQL)); assertThat(actualPreparedStatement.getParameterTypes(), is(Arrays.asList(PostgreSQLColumnType.JSON, PostgreSQLColumnType.INT4))); assertThat(actualPreparedStatement.getActualParameterMarkerIndexes(), is(Arrays.asList(1, 0))); }
protected static KeyPair signWithRsa() { KeyPair keyPair = null; try { KeyPairGenerator kpGenerator = KeyPairGenerator.getInstance("RSA"); kpGenerator.initialize(4096); java.security.KeyPair keypair = kpGenerator.generateKeyPair(); PublicKey publicKey = keypair.getPublic(); PrivateKey privateKey = keypair.getPrivate(); ContentSigner signer = new JcaContentSignerBuilder("SHA256WithRSA").build(keypair.getPrivate()); keyPair = new KeyPair(publicKey, privateKey, signer); } catch (NoSuchAlgorithmException | OperatorCreationException e) { logger.error( CONFIG_SSL_CERT_GENERATE_FAILED, "", "", "Generate Key with SHA256WithRSA algorithm failed. Please check if your system support.", e); } return keyPair; }
@Test void testSignWithRsa() { DubboCertManager.KeyPair keyPair = DubboCertManager.signWithRsa(); Assertions.assertNotNull(keyPair); Assertions.assertNotNull(keyPair.getPrivateKey()); Assertions.assertNotNull(keyPair.getPublicKey()); Assertions.assertNotNull(keyPair.getSigner()); }
@Override @CacheEvict(cacheNames = RedisKeyConstants.NOTIFY_TEMPLATE, allEntries = true) // allEntries 清空所有缓存,因为 id 不是直接的缓存 code,不好清理 public void deleteNotifyTemplate(Long id) { // 校验存在 validateNotifyTemplateExists(id); // 删除 notifyTemplateMapper.deleteById(id); }
@Test public void testDeleteNotifyTemplate_success() { // mock 数据 NotifyTemplateDO dbNotifyTemplate = randomPojo(NotifyTemplateDO.class); notifyTemplateMapper.insert(dbNotifyTemplate);// @Sql: 先插入出一条存在的数据 // 准备参数 Long id = dbNotifyTemplate.getId(); // 调用 notifyTemplateService.deleteNotifyTemplate(id); // 校验数据不存在了 assertNull(notifyTemplateMapper.selectById(id)); }
public static String getS3EncryptionContext(String bucket, Configuration conf) throws IOException { // look up the per-bucket value of the encryption context String encryptionContext = S3AUtils.lookupBucketSecret(bucket, conf, S3_ENCRYPTION_CONTEXT); if (encryptionContext == null) { // look up the global value of the encryption context encryptionContext = S3AUtils.lookupPassword(null, conf, S3_ENCRYPTION_CONTEXT); } if (encryptionContext == null) { // no encryption context, return "" return ""; } return encryptionContext; }
@Test public void testGetS3EncryptionContextPerBucket() throws IOException { Configuration configuration = new Configuration(false); configuration.set("fs.s3a.bucket.bucket1.encryption.context", BUCKET_CONTEXT); configuration.set(S3_ENCRYPTION_CONTEXT, GLOBAL_CONTEXT); final String result = S3AEncryption.getS3EncryptionContext("bucket1", configuration); Assert.assertEquals(BUCKET_CONTEXT, result); }
static void parseServerIpAndPort(MysqlConnection connection, Span span) { try { URI url = URI.create(connection.getURL().substring(5)); // strip "jdbc:" String remoteServiceName = connection.getProperties().getProperty("zipkinServiceName"); if (remoteServiceName == null || "".equals(remoteServiceName)) { String databaseName = getDatabaseName(connection); if (databaseName != null && !databaseName.isEmpty()) { remoteServiceName = "mysql-" + databaseName; } else { remoteServiceName = "mysql"; } } span.remoteServiceName(remoteServiceName); String host = getHost(connection); if (host != null) { span.remoteIpAndPort(host, url.getPort() == -1 ? 3306 : url.getPort()); } } catch (Exception e) { // remote address is optional } }
@Test void parseServerIpAndPort_serviceNameFromDatabaseName() throws SQLException { setupAndReturnPropertiesForHost("1.2.3.4"); when(connection.getCatalog()).thenReturn("mydatabase"); TracingQueryInterceptor.parseServerIpAndPort(connection, span); verify(span).remoteServiceName("mysql-mydatabase"); verify(span).remoteIpAndPort("1.2.3.4", 5555); }
@Override public void writeAttribute(String prefix, String namespaceURI, String localName, String value) throws XMLStreamException { String filteredValue = nonXmlCharFilterer.filter(value); writer.writeAttribute(prefix, namespaceURI, localName, filteredValue); }
@Test public void testWriteAttribute2Args() throws XMLStreamException { filteringXmlStreamWriter.writeAttribute("localName", "value"); verify(xmlStreamWriterMock).writeAttribute("localName", "filteredValue"); }
@Override public double cdf(double x) { if (x <= 0) { return 0.0; } else if (x >= 1) { return 1.0; } else { return Beta.regularizedIncompleteBetaFunction(alpha, beta, x); } }
@Test public void testCdf() { System.out.println("cdf"); BetaDistribution instance = new BetaDistribution(2, 5); instance.rand(); assertEquals(0, instance.cdf(-0.1), 1E-5); assertEquals(0, instance.cdf(0.0), 1E-5); assertEquals(0.114265, instance.cdf(0.1), 1E-5); assertEquals(0.34464, instance.cdf(0.2), 1E-5); assertEquals(0.579825, instance.cdf(0.3), 1E-5); assertEquals(0.76672, instance.cdf(0.4), 1E-5); assertEquals(0.890625, instance.cdf(0.5), 1E-5); assertEquals(1.0, instance.cdf(1.0), 1E-5); assertEquals(1.0, instance.cdf(1.5), 1E-5); }
@Override public List<Intent> compile(PointToPointIntent intent, List<Intent> installable) { log.trace("compiling {} {}", intent, installable); ConnectPoint ingressPoint = intent.filteredIngressPoint().connectPoint(); ConnectPoint egressPoint = intent.filteredEgressPoint().connectPoint(); //TODO: handle protected path case with suggested path!! //Idea: use suggested path as primary and another path from path service as protection if (intent.suggestedPath() != null && intent.suggestedPath().size() > 0) { Path path = new DefaultPath(PID, intent.suggestedPath(), new ScalarWeight(1)); //Check intent constraints against suggested path and suggested path availability if (checkPath(path, intent.constraints()) && pathAvailable(intent)) { allocateIntentBandwidth(intent, path); return asList(createLinkCollectionIntent(ImmutableSet.copyOf(intent.suggestedPath()), DEFAULT_COST, intent)); } } if (ingressPoint.deviceId().equals(egressPoint.deviceId())) { return createZeroHopLinkCollectionIntent(intent); } // proceed with no protected paths if (!ProtectionConstraint.requireProtectedPath(intent)) { return createUnprotectedLinkCollectionIntent(intent); } try { // attempt to compute and implement backup path return createProtectedIntent(ingressPoint, egressPoint, intent, installable); } catch (PathNotFoundException e) { log.warn("Could not find disjoint Path for {}", intent); // no disjoint path extant -- maximum one path exists between devices return createSinglePathIntent(ingressPoint, egressPoint, intent, installable); } }
@Test public void testSuggestedPathNotAvailable() { String[] suggestedPathHops = {S1, S3, S8}; String[] shortestPath = {S1, S2, S8}; List<Link> suggestedPath = NetTestTools.createPath(suggestedPathHops).links(); PointToPointIntent intent = makeIntentSuggestedPath(new ConnectPoint(DID_1, PORT_1), new ConnectPoint(DID_8, PORT_2), suggestedPath); String[][] path = {shortestPath}; PointToPointIntentCompiler compiler = makeCompilerSuggestedPath(path); List<Intent> result = compiler.compile(intent, null); assertThat(result, is(Matchers.notNullValue())); assertThat(result, hasSize(1)); Intent resultIntent = result.get(0); assertThat(resultIntent instanceof LinkCollectionIntent, is(true)); if (resultIntent instanceof LinkCollectionIntent) { LinkCollectionIntent resultLinkIntent = (LinkCollectionIntent) resultIntent; FilteredConnectPoint ingressPoint = new FilteredConnectPoint(new ConnectPoint(DID_1, PORT_1)); FilteredConnectPoint egressPoint = new FilteredConnectPoint(new ConnectPoint(DID_8, PORT_2)); // 5 links for the hops, plus one default link on ingress and egress assertThat(resultLinkIntent.links(), hasSize(shortestPath.length - 1)); assertThat(resultLinkIntent.links(), linksHasPath(S1, S2)); assertThat(resultLinkIntent.links(), linksHasPath(S2, S8)); assertThat(resultLinkIntent.filteredIngressPoints(), is(ImmutableSet.of(ingressPoint))); assertThat(resultLinkIntent.filteredEgressPoints(), is(ImmutableSet.of(egressPoint))); } assertThat("key is inherited", resultIntent.key(), is(intent.key())); }
@Override public String toString() { return MoreObjects.toStringHelper(this) .add("name", name()) .add("monitorsrcports", monitorSrcPorts()) .add("monitordstports", monitorDstPorts()) .add("monitorvlans", monitorVlans()) .add("mirrorport", mirrorPort()) .add("mirrorvlan", mirrorVlan()) .toString(); }
@Test public void testToString() { String result = md1.toString(); assertThat(result, notNullValue()); assertThat(result, containsString("name=" + NAME_1.toString())); assertThat(result, containsString("monitorsrcports=" + MONITOR_SRC_PORTS_1.toString())); assertThat(result, containsString("monitordstports=" + MONITOR_DST_PORTS_1.toString())); assertThat(result, containsString("monitorvlans=" + MONITOR_VLANS_1.toString())); assertThat(result, containsString("mirrorport=" + MIRROR_PORT_1.toString())); assertThat(result, containsString("mirrorvlan=" + MIRROR_VLAN_1.toString())); }
@Override public void checkBeforeUpdate(final CreateEncryptRuleStatement sqlStatement) { if (!sqlStatement.isIfNotExists()) { checkDuplicateRuleNames(sqlStatement); } checkColumnNames(sqlStatement); checkAlgorithmTypes(sqlStatement); checkToBeCreatedEncryptors(sqlStatement); checkDataSources(); }
@Test void assertCheckSQLStatementWithDuplicateEncryptRule() { EncryptRule rule = mock(EncryptRule.class); when(rule.getAllTableNames()).thenReturn(Arrays.asList("t_user", "t_order")); executor.setRule(rule); assertThrows(DuplicateRuleException.class, () -> executor.checkBeforeUpdate(createSQLStatement(false, "MD5"))); }
public static void verifyIncrementPubContent(String content) { if (content == null || content.length() == 0) { throw new IllegalArgumentException("The content for publishing or deleting cannot be null!"); } for (int i = 0; i < content.length(); i++) { char c = content.charAt(i); if (c == '\r' || c == '\n') { throw new IllegalArgumentException("The content for publishing or deleting cannot contain enter and next line symbol!"); } if (c == Constants.WORD_SEPARATOR.charAt(0)) { throw new IllegalArgumentException("The content for publishing or deleting cannot contain (char)2!"); } } }
@Test void testVerifyIncrementPubContent() { String content = ""; try { ContentUtils.verifyIncrementPubContent(content); fail(); } catch (IllegalArgumentException e) { assertNotNull(e.toString()); } content = "\r"; try { ContentUtils.verifyIncrementPubContent(content); fail(); } catch (IllegalArgumentException e) { assertNotNull(e.toString()); } content = "\n"; try { ContentUtils.verifyIncrementPubContent(content); fail(); } catch (IllegalArgumentException e) { assertNotNull(e.toString()); } content = Constants.WORD_SEPARATOR + "test"; try { ContentUtils.verifyIncrementPubContent(content); fail(); } catch (IllegalArgumentException e) { assertNotNull(e.toString()); } }
public static KsqlWindowExpression parseWindowExpression(final String expressionText) { final ParserRuleContext parseTree = GrammarParseUtil.getParseTree( expressionText, SqlBaseParser::windowExpression ); final WindowExpression windowExpression = new AstBuilder(TypeRegistry.EMPTY).buildWindowExpression(parseTree); return windowExpression.getKsqlWindowExpression(); }
@Test public void shouldParseWindowExpression() { // When: final KsqlWindowExpression parsed = ExpressionParser.parseWindowExpression( "TUMBLING (SIZE 1 DAYS)" ); // Then: assertThat( parsed, equalTo(new TumblingWindowExpression( parsed.getLocation(), new WindowTimeClause(1, TimeUnit.DAYS), Optional.empty(), Optional.empty())) ); }
public RingbufferStoreConfig setFactoryClassName(@Nonnull String factoryClassName) { this.factoryClassName = checkHasText(factoryClassName, "Ringbuffer store factory class name must contain text"); this.factoryImplementation = null; return this; }
@Test public void setFactoryClassName() { config.setFactoryClassName("myFactoryClassName"); assertEquals("myFactoryClassName", config.getFactoryClassName()); }
@Override public void changeLimitForPeriod(final int limitForPeriod) { RateLimiterConfig newConfig = RateLimiterConfig.from(state.get().config) .limitForPeriod(limitForPeriod) .build(); state.updateAndGet(currentState -> new State( newConfig, currentState.activeCycle, currentState.activePermissions, currentState.nanosToWait )); }
@Test public void changeLimitForPeriod() throws Exception { setup(Duration.ZERO); RateLimiterConfig rateLimiterConfig = rateLimiter.getRateLimiterConfig(); then(rateLimiterConfig.getTimeoutDuration()).isEqualTo(Duration.ZERO); then(rateLimiterConfig.getLimitForPeriod()).isEqualTo(PERMISSIONS_RER_CYCLE); then(rateLimiterConfig.getLimitRefreshPeriod()).isEqualTo(Duration.ofNanos(CYCLE_IN_NANOS)); rateLimiter.changeLimitForPeriod(35); then(rateLimiterConfig != rateLimiter.getRateLimiterConfig()).isTrue(); rateLimiterConfig = rateLimiter.getRateLimiterConfig(); then(rateLimiterConfig.getTimeoutDuration()).isEqualTo(Duration.ZERO); then(rateLimiterConfig.getLimitForPeriod()).isEqualTo(35); then(rateLimiterConfig.getLimitRefreshPeriod()).isEqualTo(Duration.ofNanos(CYCLE_IN_NANOS)); }
public static Write write() { return new Write(null /* Configuration */, ""); }
@Test public void testWriteBuildsCorrectly() { HBaseIO.Write write = HBaseIO.write().withConfiguration(conf).withTableId("table"); assertEquals("table", write.getTableId()); assertNotNull("configuration", write.getConfiguration()); }
DefaultHttp2FrameStream newStream() { return new DefaultHttp2FrameStream(); }
@Test public void multipleNewOutboundStreamsShouldBeBuffered() throws Exception { // We use a limit of 1 and then increase it step by step. setUp(Http2FrameCodecBuilder.forServer().encoderEnforceMaxConcurrentStreams(true), new Http2Settings().maxConcurrentStreams(1)); Http2FrameStream stream1 = frameCodec.newStream(); Http2FrameStream stream2 = frameCodec.newStream(); Http2FrameStream stream3 = frameCodec.newStream(); ChannelPromise promise1 = channel.newPromise(); ChannelPromise promise2 = channel.newPromise(); ChannelPromise promise3 = channel.newPromise(); channel.writeAndFlush(new DefaultHttp2HeadersFrame(new DefaultHttp2Headers()).stream(stream1), promise1); channel.writeAndFlush(new DefaultHttp2HeadersFrame(new DefaultHttp2Headers()).stream(stream2), promise2); channel.writeAndFlush(new DefaultHttp2HeadersFrame(new DefaultHttp2Headers()).stream(stream3), promise3); assertTrue(isStreamIdValid(stream1.id())); channel.runPendingTasks(); assertTrue(isStreamIdValid(stream2.id())); assertTrue(promise1.syncUninterruptibly().isSuccess()); assertFalse(promise2.isDone()); assertFalse(promise3.isDone()); // Increase concurrent streams limit to 2 frameInboundWriter.writeInboundSettings(new Http2Settings().maxConcurrentStreams(2)); channel.flush(); // As we increased the limit to 2 we should have also succeed the second frame. assertTrue(promise2.syncUninterruptibly().isSuccess()); assertFalse(promise3.isDone()); frameInboundWriter.writeInboundSettings(new Http2Settings().maxConcurrentStreams(3)); channel.flush(); // With the max streams of 3 all streams should be succeed now. assertTrue(promise3.syncUninterruptibly().isSuccess()); assertFalse(channel.finishAndReleaseAll()); }
@Override public CompletableFuture<Boolean> triggerCheckpointAsync( CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions) { if (!isExternallyInducedSource()) { return triggerCheckpointNowAsync(checkpointMetaData, checkpointOptions); } CompletableFuture<Boolean> triggerFuture = new CompletableFuture<>(); // immediately move RPC to mailbox so we don't need to synchronize fields mainMailboxExecutor.execute( () -> triggerCheckpointOnExternallyInducedSource( checkpointMetaData, checkpointOptions, triggerFuture), "SourceOperatorStreamTask#triggerCheckpointAsync(%s, %s)", checkpointMetaData, checkpointOptions); return triggerFuture; }
@Test void testTriggeringStopWithSavepointWithDrain() throws Exception { SourceOperatorFactory<Integer> sourceOperatorFactory = new SourceOperatorFactory<>( new MockSource(Boundedness.CONTINUOUS_UNBOUNDED, 2), WatermarkStrategy.noWatermarks()); CompletableFuture<Boolean> checkpointCompleted = new CompletableFuture<>(); CheckpointResponder checkpointResponder = new TestCheckpointResponder() { @Override public void acknowledgeCheckpoint( JobID jobID, ExecutionAttemptID executionAttemptID, long checkpointId, CheckpointMetrics checkpointMetrics, TaskStateSnapshot subtaskState) { super.acknowledgeCheckpoint( jobID, executionAttemptID, checkpointId, checkpointMetrics, subtaskState); checkpointCompleted.complete(null); } }; try (StreamTaskMailboxTestHarness<Integer> testHarness = new StreamTaskMailboxTestHarnessBuilder<>( SourceOperatorStreamTask::new, BasicTypeInfo.INT_TYPE_INFO) .setupOutputForSingletonOperatorChain(sourceOperatorFactory) .setCheckpointResponder(checkpointResponder) .build()) { CompletableFuture<Boolean> triggerResult = testHarness.streamTask.triggerCheckpointAsync( new CheckpointMetaData(2, 2), CheckpointOptions.alignedNoTimeout( SavepointType.terminate(SavepointFormatType.CANONICAL), SAVEPOINT_LOCATION)); checkpointCompleted.whenComplete( (ignored, exception) -> testHarness.streamTask.notifyCheckpointCompleteAsync(2)); testHarness.waitForTaskCompletion(); testHarness.finishProcessing(); assertThat(triggerResult.isDone()).isTrue(); assertThat(triggerResult.get()).isTrue(); assertThat(checkpointCompleted.isDone()).isTrue(); } }
public static String hex(byte[] bytes) { return StringUtils.encodeHex(bytes(bytes)); }
@Test public void testHash() { // Test null // @TODO - should the StringUtils.hash(String) method be fixed to handle null input? try { SHA1.hex((String) null); fail(); } catch (NullPointerException npe) { assertTrue(true); } // Test empty String String result = SHA1.hex(""); assertEquals("da39a3ee5e6b4b0d3255bfef95601890afd80709", result); // Test a known hash String adminInHash = "d033e22ae348aeb5660fc2140aec35850c4da997"; result = SHA1.hex("admin"); assertEquals(adminInHash, result); // Test a random String - make sure all resulting characters are valid hash characters // and that the returned string is 32 characters long. String random = "jive software blah and stuff this is pretty cool"; result = SHA1.hex(random); assertTrue(isValidHash(result)); // Test junk input: String junk = "\n\n\t\b\r!@(!)^(#)@+_-\u2031\u09291\u00A9\u00BD\u0394\u00F8"; result = SHA1.hex(junk); assertTrue(isValidHash(result)); }
@Override public void configure(Map<String, ?> configs, boolean isKey) { if (listClass != null || inner != null) { log.error("Could not configure ListDeserializer as some parameters were already set -- listClass: {}, inner: {}", listClass, inner); throw new ConfigException("List deserializer was already initialized using a non-default constructor"); } configureListClass(configs, isKey); configureInnerSerde(configs, isKey); }
@Test public void testListKeyDeserializerShouldThrowConfigExceptionDueAlreadyInitialized() { props.put(CommonClientConfigs.DEFAULT_LIST_KEY_SERDE_TYPE_CLASS, ArrayList.class); props.put(CommonClientConfigs.DEFAULT_LIST_KEY_SERDE_INNER_CLASS, Serdes.StringSerde.class); final ListDeserializer<Integer> initializedListDeserializer = new ListDeserializer<>(ArrayList.class, Serdes.Integer().deserializer()); final ConfigException exception = assertThrows( ConfigException.class, () -> initializedListDeserializer.configure(props, true) ); assertEquals("List deserializer was already initialized using a non-default constructor", exception.getMessage()); }
public CacheConfig<K, V> setBackupCount(int backupCount) { this.backupCount = checkBackupCount(backupCount, asyncBackupCount); return this; }
@Test(expected = IllegalArgumentException.class) public void setBackupCount_whenItsNegative() { CacheConfig config = new CacheConfig(); config.setBackupCount(-1); }
@Override public TImmutablePartitionResult updateImmutablePartition(TImmutablePartitionRequest request) throws TException { LOG.info("Receive update immutable partition: {}", request); TImmutablePartitionResult result; try { result = updateImmutablePartitionInternal(request); } catch (Throwable t) { LOG.warn(t.getMessage(), t); result = new TImmutablePartitionResult(); TStatus errorStatus = new TStatus(RUNTIME_ERROR); errorStatus.setError_msgs(Lists.newArrayList(String.format("txn_id=%d failed. %s", request.getTxn_id(), t.getMessage()))); result.setStatus(errorStatus); } LOG.info("Finish update immutable partition: {}", result); return result; }
@Test public void testUpdateImmutablePartitionException() throws TException { new MockUp<FrontendServiceImpl>() { @Mock public synchronized TImmutablePartitionResult updateImmutablePartitionInternal( TImmutablePartitionRequest request) { throw new RuntimeException("test"); } }; FrontendServiceImpl impl = new FrontendServiceImpl(exeEnv); TImmutablePartitionRequest request = new TImmutablePartitionRequest(); TImmutablePartitionResult partition = impl.updateImmutablePartition(request); Assert.assertEquals(partition.getStatus().getStatus_code(), TStatusCode.RUNTIME_ERROR); }
@Override public RefreshQueuesResponse refreshQueues(RefreshQueuesRequest request) throws StandbyException, YarnException, IOException { // parameter verification. if (request == null) { routerMetrics.incrRefreshQueuesFailedRetrieved(); RouterServerUtil.logAndThrowException("Missing RefreshQueues request.", null); } // call refreshQueues of activeSubClusters. try { long startTime = clock.getTime(); RMAdminProtocolMethod remoteMethod = new RMAdminProtocolMethod( new Class[] {RefreshQueuesRequest.class}, new Object[] {request}); String subClusterId = request.getSubClusterId(); Collection<RefreshQueuesResponse> refreshQueueResps = remoteMethod.invokeConcurrent(this, RefreshQueuesResponse.class, subClusterId); // If we get the return result from refreshQueueResps, // it means that the call has been successful, // and the RefreshQueuesResponse method can be reconstructed and returned. if (CollectionUtils.isNotEmpty(refreshQueueResps)) { long stopTime = clock.getTime(); routerMetrics.succeededRefreshQueuesRetrieved(stopTime - startTime); return RefreshQueuesResponse.newInstance(); } } catch (YarnException e) { routerMetrics.incrRefreshQueuesFailedRetrieved(); RouterServerUtil.logAndThrowException(e, "Unable to refreshQueue due to exception. " + e.getMessage()); } routerMetrics.incrRefreshQueuesFailedRetrieved(); throw new YarnException("Unable to refreshQueue."); }
@Test public void testRefreshQueues() throws Exception { // We will test 2 cases: // case 1, request is null. // case 2, normal request. // If the request is null, a Missing RefreshQueues request exception will be thrown. // null request. LambdaTestUtils.intercept(YarnException.class, "Missing RefreshQueues request.", () -> interceptor.refreshQueues(null)); // normal request. RefreshQueuesRequest request = RefreshQueuesRequest.newInstance(); RefreshQueuesResponse response = interceptor.refreshQueues(request); assertNotNull(response); }
public static boolean isServiceDiscoveryURL(URL url) { return hasServiceDiscoveryRegistryProtocol(url) || hasServiceDiscoveryRegistryTypeKey(url); }
@Test public void testIsServiceDiscoveryURL() { String address1 = "http://example.com"; URL url1 = UrlUtils.parseURL(address1, null); String address2 = "service-discovery-registry://example.com"; URL url2 = UrlUtils.parseURL(address2, null); String address3 = "SERVICE-DISCOVERY-REGISTRY://example.com"; URL url3 = UrlUtils.parseURL(address3, null); String address4 = "http://example.com?registry-type=service"; URL url4 = UrlUtils.parseURL(address4, null); url4.addParameter(REGISTRY_TYPE_KEY, SERVICE_REGISTRY_TYPE); assertFalse(UrlUtils.isServiceDiscoveryURL(url1)); assertTrue(UrlUtils.isServiceDiscoveryURL(url2)); assertTrue(UrlUtils.isServiceDiscoveryURL(url3)); assertTrue(UrlUtils.isServiceDiscoveryURL(url4)); }
public ServiceBuilder<U> providerIds(String providerIds) { this.providerIds = providerIds; return getThis(); }
@Test void providerIds() { ServiceBuilder builder = new ServiceBuilder(); builder.providerIds("providerIds"); Assertions.assertEquals("providerIds", builder.build().getProviderIds()); }
@Override public ListenableFuture<BufferResult> get(OutputBufferId outputBufferId, long startingSequenceId, DataSize maxSize) { requireNonNull(outputBufferId, "outputBufferId is null"); checkArgument(maxSize.toBytes() > 0, "maxSize must be at least 1 byte"); return partitions.get(outputBufferId.getId()).getPages(startingSequenceId, maxSize); }
@Test public void testDuplicateRequests() { PartitionedOutputBuffer buffer = createPartitionedBuffer( createInitialEmptyOutputBuffers(PARTITIONED) .withBuffer(FIRST, 0) .withNoMoreBufferIds(), sizeOfPages(10)); // add three items for (int i = 0; i < 3; i++) { addPage(buffer, createPage(i)); } // add a queue assertQueueState(buffer, FIRST, 3, 0); // get the three elements assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2))); // pages not acknowledged yet so state is the same assertQueueState(buffer, FIRST, 3, 0); // get the three elements again assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2))); // pages not acknowledged yet so state is the same assertQueueState(buffer, FIRST, 3, 0); // acknowledge the pages buffer.get(FIRST, 3, sizeOfPages(10)).cancel(true); // attempt to get the three elements again assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, false)); // pages not acknowledged yet so state is the same assertQueueState(buffer, FIRST, 0, 3); }
public static long timeUnitToMill(String timeStrWithUnit) { // If `timeStrWithUnit` doesn't include time unit, // `Duration.parse` would fail to parse and throw Exception. if (timeStrWithUnit.endsWith("ms")) { return Long.parseLong(timeStrWithUnit.substring(0, timeStrWithUnit.length() - 2)); } return Duration.parse("PT" + timeStrWithUnit).toMillis(); }
@Test void testTimeUnitToMill() { assertEquals(10L, ZeppelinConfiguration.timeUnitToMill("10ms")); assertEquals(2000L, ZeppelinConfiguration.timeUnitToMill("2s")); assertEquals(60000L, ZeppelinConfiguration.timeUnitToMill("1m")); assertEquals(3600000L, ZeppelinConfiguration.timeUnitToMill("1h")); }
@Override public void emit(String emitKey, List<Metadata> metadataList, ParseContext parseContext) throws IOException, TikaEmitterException { if (metadataList == null || metadataList.isEmpty()) { throw new TikaEmitterException("metadata list must not be null or of size 0"); } //TODO: estimate size of metadata list. Above a certain size, //create a temp file? UnsynchronizedByteArrayOutputStream bos = UnsynchronizedByteArrayOutputStream .builder() .get(); try (Writer writer = new OutputStreamWriter(bos, StandardCharsets.UTF_8)) { JsonMetadataList.toJson(metadataList, writer); } catch (IOException e) { throw new TikaEmitterException("can't jsonify", e); } Metadata metadata = new Metadata(); emit(emitKey, TikaInputStream.get(bos.toByteArray(), metadata), metadata, parseContext); }
@Test public void testBasic() throws Exception { EmitterManager emitterManager = EmitterManager.load(getConfig("tika-config-az-blob.xml")); Emitter emitter = emitterManager.getEmitter("az-blob"); List<Metadata> metadataList = new ArrayList<>(); Metadata m = new Metadata(); m.set("k1", "v1"); m.add("k1", "v2"); m.set("k2", "v3"); m.add("k2", "v4"); metadataList.add(m); emitter.emit("something-or-other/test-out", metadataList, new ParseContext()); }
public static String getClassName(Schema schema) { String namespace = schema.getNamespace(); String name = schema.getName(); if (namespace == null || "".equals(namespace)) return name; String dot = namespace.endsWith("$") ? "" : "."; // back-compatibly handle $ return mangle(namespace) + dot + mangleTypeIdentifier(name); }
@Test void classNameContainingReservedWords() { final Schema schema = Schema.createRecord("AnyName", null, "db.public.table", false); assertEquals("db.public$.table.AnyName", SpecificData.getClassName(schema)); }
public static void raftReadFromLeader() { RAFT_FROM_LEADER.record(1); }
@Test void testRaftReadFromLeader() { MetricsMonitor.raftReadFromLeader(); assertEquals(1D, MetricsMonitor.getRaftFromLeader().totalAmount(), 0.01); }
@Override public String format(final Schema schema) { final String converted = SchemaWalker.visit(schema, new Converter()) + typePostFix(schema); return options.contains(Option.AS_COLUMN_LIST) ? stripTopLevelStruct(converted) : converted; }
@Test public void shouldFormatOptionalBytes() { assertThat(DEFAULT.format(Schema.OPTIONAL_BYTES_SCHEMA), is("BYTES")); assertThat(STRICT.format(Schema.OPTIONAL_BYTES_SCHEMA), is("BYTES")); }
@Override public void getConfig(StorServerConfig.Builder builder) { super.getConfig(builder); provider.getConfig(builder); }
@Test void testCommunicationManagerDefaults() { StorCommunicationmanagerConfig.Builder builder = new StorCommunicationmanagerConfig.Builder(); DistributorCluster cluster = parse("<cluster id=\"storage\">" + " <redundancy>3</redundancy>" + " <documents/>" + " <group>" + " <node distribution-key=\"0\" hostalias=\"mockhost\"/>" + " </group>" + "</cluster>"); cluster.getChildren().get("0").getConfig(builder); StorCommunicationmanagerConfig config = new StorCommunicationmanagerConfig(builder); assertEquals(1, config.mbus().num_network_threads()); }
public static String getDecodeQuery(final String uri) { try { URI u = new URI(uri); String query = URISupport.prepareQuery(u); String uriWithoutQuery = URISupport.stripQuery(uri); if (query == null) { return uriWithoutQuery; } else { Map<String, Object> parameters = URISupport.parseQuery(query, false, false); if (parameters.size() == 1) { // only 1 parameter need to create new query string query = URISupport.createQueryString(parameters); } else { // reorder parameters a..z final Set<String> keySet = parameters.keySet(); final String[] parametersArray = keySet.toArray(new String[0]); Arrays.sort(parametersArray); // build uri object with sorted parameters query = URISupport.createQueryString(parametersArray, parameters, true); } return makeUri(uriWithoutQuery, query); } } catch (URISyntaxException ex) { return null; } }
@Test public void testGetDecodeQuery() throws Exception { String out = URISupport.normalizeUri("smtp://localhost?username=davsclaus&password=secret"); String enc = UnsafeUriCharactersEncoder.encode(out); String dec = URISupport.getDecodeQuery(enc); assertEquals(out, dec); out = URISupport.normalizeUri("smtp://localhost?password=secret&username=davsclaus"); assertEquals(out, dec); out = URISupport.normalizeUri("http://localhost?username=davsclaus&password=RAW(#@a)"); enc = UnsafeUriCharactersEncoder.encode(out); assertNotEquals(out, enc); dec = URISupport.getDecodeQuery(enc); assertEquals(out, dec); out = URISupport.normalizeUri("bean://MyBean?method=RAW(addString(%22#@a%23, test))"); enc = UnsafeUriCharactersEncoder.encode(out); assertNotEquals(out, enc); dec = URISupport.getDecodeQuery(enc); assertEquals(out, dec); }
@Override public Object evaluateUnsafe(EvaluationContext context) { final Object idxObj = this.index.evaluateUnsafe(context); final Object indexable = indexableObject.evaluateUnsafe(context); if (idxObj == null || indexable == null) { return null; } if (idxObj instanceof Long) { int idx = Ints.saturatedCast((long) idxObj); if (indexable.getClass().isArray()) { return Array.get(indexable, idx); } else if (indexable instanceof List) { return ((List) indexable).get(idx); } else if (indexable instanceof Iterable) { return Iterables.get((Iterable) indexable, idx); } throw new IllegalArgumentException( context.pipelineErrorMessage("Object '" + indexable + "' is not an Array, List or Iterable.")); } else if (idxObj instanceof String) { final String idx = idxObj.toString(); if (indexable instanceof Map) { return ((Map) indexable).get(idx); } throw new IllegalArgumentException( context.pipelineErrorMessage("Object '" + indexable + "' is not a Map.")); } throw new IllegalArgumentException( context.pipelineErrorMessage("Index '" + idxObj + "' is not a Long or String.")); }
@Test public void invalidObject() { final IndexedAccessExpression expression = new IndexedAccessExpression(START, obj(23), num(0)); // this should throw an exception assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> expression.evaluateUnsafe(context)); }
public static void doRegister(final String json, final String url, final String type, final String accessToken) throws IOException { if (StringUtils.isBlank(accessToken)) { LOGGER.error("{} client register error accessToken is null, please check the config : {} ", type, json); return; } Headers headers = new Headers.Builder().add(Constants.X_ACCESS_TOKEN, accessToken).build(); String result = OkHttpTools.getInstance().post(url, json, headers); if (Objects.equals(SUCCESS, result)) { LOGGER.info("{} client register success: {} ", type, json); } else { LOGGER.error("{} client register error: {} ", type, json); } }
@Test public void testDoRegisterWhenThrowException() throws IOException { when(okHttpTools.post(url, json)).thenThrow(IOException.class); assertThrows(IOException.class, () -> { try (MockedStatic<OkHttpTools> okHttpToolsMockedStatic = mockStatic(OkHttpTools.class)) { okHttpToolsMockedStatic.when(OkHttpTools::getInstance).thenReturn(okHttpTools); RegisterUtils.doRegister(json, url, RegisterTypeEnum.DUBBO.getName()); verify(okHttpTools, times(1)).post(eq(url), eq(json)); } }); }
@Override public void run() { // top-level command, do nothing }
@Test public void test_submit_withClassName() { run("submit", "--class", "com.hazelcast.jet.testjob.TestJob", testJobJarFile.toString()); assertTrueEventually(() -> assertEquals(1, hz.getJet().getJobs().size()), 5); Job job = hz.getJet().getJobs().get(0); assertThat(job).eventuallyHasStatus(JobStatus.RUNNING); assertNull(job.getName()); }
public CompletableFuture<Integer> read(ByteBuffer buf, long offset, long len, FileId fileId, String ufsPath, UfsReadOptions options) { Objects.requireNonNull(buf); if (offset < 0 || len < 0 || len > buf.remaining()) { throw new OutOfRangeRuntimeException(String.format( "offset is negative, len is negative, or len is greater than buf remaining. " + "offset: %s, len: %s, buf remaining: %s", offset, len, buf.remaining())); } if (mReadQueue.size() >= READ_CAPACITY) { throw new ResourceExhaustedRuntimeException("UFS read at capacity", true); } CompletableFuture<Integer> future = new CompletableFuture<>(); if (len == 0) { future.complete(0); return future; } Meter meter = mUfsBytesReadThroughputMetrics.computeIfAbsent(mUfsClient.getUfsMountPointUri(), uri -> MetricsSystem.meterWithTags(MetricKey.WORKER_BYTES_READ_UFS_THROUGHPUT.getName(), MetricKey.WORKER_BYTES_READ_UFS_THROUGHPUT.isClusterAggregated(), MetricInfo.TAG_UFS, MetricsSystem.escape(mUfsClient.getUfsMountPointUri()), MetricInfo.TAG_USER, options.getTag())); mReadQueue.add(new ReadTask(buf, ufsPath, fileId, offset, len, options, future, meter)); return future; }
@Test public void offset() throws Exception { mUfsIOManager.read(TEST_BUF, 2, TEST_BLOCK_SIZE - 2, FIRST_BLOCK_ID, mTestFilePath, UfsReadOptions.getDefaultInstance()).get(); assertTrue(checkBuf(2, (int) TEST_BLOCK_SIZE - 2, TEST_BUF)); TEST_BUF.clear(); }
@Override public <I, O> List<O> flatMap(List<I> data, SerializableFunction<I, Stream<O>> func, int parallelism) { return data.stream().parallel().flatMap(throwingFlatMapWrapper(func)).collect(Collectors.toList()); }
@Test public void testFlatMap() { List<String> list1 = Arrays.asList("a", "b", "c"); List<String> list2 = Arrays.asList("d", "e", "f"); List<String> list3 = Arrays.asList("g", "h", "i"); List<List<String>> inputList = new ArrayList<>(); inputList.add(list1); inputList.add(list2); inputList.add(list3); List<String> result = context.flatMap(inputList, Collection::stream, 2); Assertions.assertEquals(9, result.size()); }
public synchronized void synchronizeClusterSchemas( ClusterSchema clusterSchema ) { synchronizeClusterSchemas( clusterSchema, clusterSchema.getName() ); }
@Test public void synchronizeClusterSchemas_should_not_sync_unshared() throws Exception { final String clusterSchemaName = "ClusterSchema"; TransMeta transformarion1 = createTransMeta(); ClusterSchema clusterSchema1 = createClusterSchema( clusterSchemaName, true ); transformarion1.setClusterSchemas( Collections.singletonList( clusterSchema1 ) ); spoonDelegates.trans.addTransformation( transformarion1 ); TransMeta transformarion2 = createTransMeta(); ClusterSchema clusterSchema2 = createClusterSchema( clusterSchemaName, false ); transformarion2.setClusterSchemas( Collections.singletonList( clusterSchema2 ) ); spoonDelegates.trans.addTransformation( transformarion2 ); clusterSchema2.setDynamic( true ); sharedUtil.synchronizeClusterSchemas( clusterSchema2 ); assertThat( clusterSchema1.isDynamic(), equalTo( false ) ); }
public static ResourceProfile generateTaskManagerTotalResourceProfile( WorkerResourceSpec workerResourceSpec) { return ResourceProfile.newBuilder() .setCpuCores(workerResourceSpec.getCpuCores()) .setTaskHeapMemory(workerResourceSpec.getTaskHeapSize()) .setTaskOffHeapMemory(workerResourceSpec.getTaskOffHeapSize()) .setManagedMemory(workerResourceSpec.getManagedMemSize()) .setNetworkMemory(workerResourceSpec.getNetworkMemSize()) .setExtendedResources(workerResourceSpec.getExtendedResources().values()) .build(); }
@Test void testGenerateTaskManagerTotalResourceProfile() { final ResourceProfile resourceProfile = ResourceProfile.newBuilder() .setCpuCores(1.0) .setTaskHeapMemoryMB(1) .setTaskOffHeapMemoryMB(2) .setNetworkMemoryMB(3) .setManagedMemoryMB(4) .setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1)) .build(); final WorkerResourceSpec workerResourceSpec = new WorkerResourceSpec.Builder() .setCpuCores(1.0) .setTaskHeapMemoryMB(1) .setTaskOffHeapMemoryMB(2) .setNetworkMemoryMB(3) .setManagedMemoryMB(4) .setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1)) .build(); assertThat(SlotManagerUtils.generateTaskManagerTotalResourceProfile(workerResourceSpec)) .isEqualTo(resourceProfile); }
public static Optional<CeTaskInterruptedException> isTaskInterruptedException(Throwable e) { if (e instanceof CeTaskInterruptedException ceTaskInterruptedException) { return Optional.of(ceTaskInterruptedException); } return isCauseInterruptedException(e); }
@Test public void isCauseInterruptedException_returns_CeTaskInterruptedException_or_subclass_in_cause_chain() { String message = randomAlphabetic(50); CeActivityDto.Status status = randomStatus(); CeTaskInterruptedException e1 = new CeTaskInterruptedException(message, status) { }; CeTaskInterruptedException e2 = new CeTaskInterruptedExceptionSubclass(message, status); assertThat(isTaskInterruptedException(new RuntimeException(e1))).contains(e1); assertThat(isTaskInterruptedException(new Exception(new RuntimeException(e2)))).contains(e2); }
@Override public VarianceAccumulator addInput(VarianceAccumulator currentVariance, T rawInput) { if (rawInput == null) { return currentVariance; } return currentVariance.combineWith( VarianceAccumulator.ofSingleElement(SqlFunctions.toBigDecimal(rawInput))); }
@Test public void testReturnsAccumulatorUnchangedForNullInput() { VarianceAccumulator accumulator = newVarianceAccumulator(ZERO, BigDecimal.ONE, BigDecimal.TEN); assertEquals(accumulator, varianceFn.addInput(accumulator, null)); }
public List<CounterRequest> getRequests() { // thread-safe : // on crée une copie de la collection et on clone ici chaque CounterRequest de manière synchronisée // de manière à ce que l'appelant n'ai pas à se préoccuper des synchronisations nécessaires // Rq : l'Iterator sur ConcurrentHashMap.values() est garanti ne pas lancer ConcurrentModificationException // même s'il y a des ajouts concurrents final List<CounterRequest> result = new ArrayList<>(requests.size()); for (final CounterRequest request : requests.values()) { // on synchronize sur request en cas d'ajout en parallèle d'un hit sur cette request synchronized (request) { result.add(request.clone()); } } return result; }
@Test public void testGetRequests() { counter.clear(); final CounterRequest counterRequest = createCounterRequest(); counter.addHits(counterRequest); final List<CounterRequest> requests = counter.getRequests(); assertEquals("requests size", 1, requests.size()); assertEquals("request", counterRequest.toString(), requests.get(0).toString()); }
@Override public void markFailed(Throwable t) { currentExecutions.values().forEach(e -> e.markFailed(t)); }
@Test void testMarkFailed() throws Exception { final SpeculativeExecutionVertex ev = createSpeculativeExecutionVertex(); final Execution e1 = ev.getCurrentExecutionAttempt(); final Execution e2 = ev.createNewSpeculativeExecution(System.currentTimeMillis()); ev.markFailed(new Exception("Forced test failure.")); assertThat(internalFailuresListener.getFailedTasks()) .containsExactly(e1.getAttemptId(), e2.getAttemptId()); }
@Override public Object evaluateUnsafe(EvaluationContext context) { final Object idxObj = this.index.evaluateUnsafe(context); final Object indexable = indexableObject.evaluateUnsafe(context); if (idxObj == null || indexable == null) { return null; } if (idxObj instanceof Long) { int idx = Ints.saturatedCast((long) idxObj); if (indexable.getClass().isArray()) { return Array.get(indexable, idx); } else if (indexable instanceof List) { return ((List) indexable).get(idx); } else if (indexable instanceof Iterable) { return Iterables.get((Iterable) indexable, idx); } throw new IllegalArgumentException( context.pipelineErrorMessage("Object '" + indexable + "' is not an Array, List or Iterable.")); } else if (idxObj instanceof String) { final String idx = idxObj.toString(); if (indexable instanceof Map) { return ((Map) indexable).get(idx); } throw new IllegalArgumentException( context.pipelineErrorMessage("Object '" + indexable + "' is not a Map.")); } throw new IllegalArgumentException( context.pipelineErrorMessage("Index '" + idxObj + "' is not a Long or String.")); }
@Test public void accessArray() { int ary[] = new int[] {23}; final IndexedAccessExpression idxExpr = new IndexedAccessExpression(START, obj(ary), num(0)); final Object evaluate = idxExpr.evaluateUnsafe(context); assertThat(evaluate).isOfAnyClassIn(Integer.class); assertThat(evaluate).isEqualTo(23); }
public RingbufferConfig setTimeToLiveSeconds(int timeToLiveSeconds) { this.timeToLiveSeconds = checkNotNegative(timeToLiveSeconds, "timeToLiveSeconds can't be smaller than 0"); return this; }
@Test(expected = IllegalArgumentException.class) public void setTimeToLiveSeconds_whenNegative() { RingbufferConfig config = new RingbufferConfig(NAME); config.setTimeToLiveSeconds(-1); }
public DropTypeCommand create(final DropType statement) { final String typeName = statement.getTypeName(); final boolean ifExists = statement.getIfExists(); if (!ifExists && !metaStore.resolveType(typeName).isPresent()) { throw new KsqlException("Type " + typeName + " does not exist."); } return new DropTypeCommand(typeName); }
@Test public void shouldFailCreateTypeIfTypeDoesNotExist() { // Given: final DropType dropType = new DropType(Optional.empty(), NOT_EXISTING_TYPE, false); // When: final Exception e = assertThrows( KsqlException.class, () -> factory.create(dropType) ); // Then: assertThat(e.getMessage(), equalTo("Type " + NOT_EXISTING_TYPE + " does not exist.")); }
public static <T> JSONSchema<T> of(SchemaDefinition<T> schemaDefinition) { SchemaReader<T> reader = schemaDefinition.getSchemaReaderOpt() .orElseGet(() -> new JacksonJsonReader<>(jsonMapper(), schemaDefinition.getPojo())); SchemaWriter<T> writer = schemaDefinition.getSchemaWriterOpt() .orElseGet(() -> new JacksonJsonWriter<>(jsonMapper())); return new JSONSchema<>(parseSchemaInfo(schemaDefinition, SchemaType.JSON), schemaDefinition.getPojo(), reader, writer); }
@Test public void testNotAllowNullEncodeAndDecode() { JSONSchema<Foo> jsonSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).withAlwaysAllowNull(false).build()); Foo foo1 = new Foo(); foo1.setField1("foo1"); foo1.setField2("bar1"); foo1.setField4(new Bar()); foo1.setFieldUnableNull("notNull"); Foo foo2 = new Foo(); foo2.setField1("foo2"); foo2.setField2("bar2"); byte[] bytes1 = jsonSchema.encode(foo1); Foo object1 = jsonSchema.decode(bytes1); Assert.assertTrue(bytes1.length > 0); assertEquals(object1, foo1); try { jsonSchema.encode(foo2); } catch (Exception e) { Assert.assertTrue(e instanceof SchemaSerializationException); } }
public void validateFilterExpression(final Expression exp) { final SqlType type = getExpressionReturnType(exp); if (!SqlTypes.BOOLEAN.equals(type)) { throw new KsqlStatementException( "Type error in " + filterType.name() + " expression: " + "Should evaluate to boolean but is" + " (" + type.toString(FormatOptions.none()) + ") instead.", "Type error in " + filterType.name() + " expression: " + "Should evaluate to boolean but is " + exp.toString() + " (" + type.toString(FormatOptions.none()) + ") instead.", exp.toString() ); } }
@Test public void shouldThrowOnBadTypeCompoundComparison_leftError() { // Given: final Expression left1 = new UnqualifiedColumnReferenceExp(COLUMN1); final Expression right1 = new UnqualifiedColumnReferenceExp(COLUMN2); final Expression comparision1 = new ComparisonExpression(Type.EQUAL, left1, right1); final Expression left2 = new UnqualifiedColumnReferenceExp(COLUMN1); final Expression right2 = new StringLiteral("foo"); final Expression comparision2 = new ComparisonExpression(Type.EQUAL, left2, right2); final Expression expression = new LogicalBinaryExpression(LogicalBinaryExpression.Type.AND, comparision1, comparision2); when(schema.findValueColumn(COLUMN1)) .thenReturn(Optional.of(Column.of(COLUMN1, STRING, VALUE, 10))); when(schema.findValueColumn(COLUMN2)) .thenReturn(Optional.of(Column.of(COLUMN2, INTEGER, VALUE, 10))); // When: assertThrows("Error in WHERE expression: " + "Cannot compare col1 (STRING) to col2 (INTEGER) with EQUAL.", KsqlException.class, () -> validator.validateFilterExpression(expression)); }
@Override public final void isEqualTo(@Nullable Object other) { if (Objects.equal(actual, other)) { return; } // Fail but with a more descriptive message: if (actual == null || !(other instanceof Map)) { super.isEqualTo(other); return; } containsEntriesInAnyOrder((Map<?, ?>) other, /* allowUnexpected= */ false); }
@Test public void isEqualToFailureDiffering_sameToString() { ImmutableMap<String, Number> actual = ImmutableMap.<String, Number>of("jan", 1, "feb", 2, "march", 3L); ImmutableMap<String, Integer> expectedMap = ImmutableMap.of("jan", 1, "feb", 2, "march", 3); expectFailureWhenTestingThat(actual).isEqualTo(expectedMap); assertFailureKeys( "keys with wrong values", "for key", "expected value", "but got value", "---", "expected", "but was"); assertFailureValueIndexed("for key", 0, "march"); assertFailureValue("expected value", "3 (java.lang.Integer)"); assertFailureValue("but got value", "3 (java.lang.Long)"); }
@Override public RLock readLock() { return new RedissonReadLock(commandExecutor, getName()); }
@Test public void testIsLockedOtherThread() throws InterruptedException { RReadWriteLock rwlock = redisson.getReadWriteLock("lock"); RLock lock = rwlock.readLock(); lock.lock(); Thread t = new Thread() { public void run() { RReadWriteLock rwlock = redisson.getReadWriteLock("lock"); RLock lock = rwlock.readLock(); Assertions.assertTrue(lock.isLocked()); }; }; t.start(); t.join(); lock.unlock(); Thread t2 = new Thread() { public void run() { RReadWriteLock rwlock = redisson.getReadWriteLock("lock"); RLock lock = rwlock.readLock(); Assertions.assertFalse(lock.isLocked()); }; }; t2.start(); t2.join(); }
public int addWritePermOfBrokerByLock(final String brokerName) { try { try { this.lock.writeLock().lockInterruptibly(); return operateWritePermOfBroker(brokerName, RequestCode.ADD_WRITE_PERM_OF_BROKER); } finally { this.lock.writeLock().unlock(); } } catch (Exception e) { log.error("addWritePermOfBrokerByLock Exception", e); } return 0; }
@Test public void testAddWritePermOfBrokerByLock() throws Exception { Map<String, QueueData> qdMap = new HashMap<>(); QueueData qd = new QueueData(); qd.setPerm(PermName.PERM_READ); qd.setBrokerName("broker-a"); qdMap.put("broker-a",qd); HashMap<String, Map<String, QueueData>> topicQueueTable = new HashMap<>(); topicQueueTable.put("topic-a", qdMap); Field filed = RouteInfoManager.class.getDeclaredField("topicQueueTable"); filed.setAccessible(true); filed.set(routeInfoManager, topicQueueTable); int addTopicCnt = routeInfoManager.addWritePermOfBrokerByLock("broker-a"); assertThat(addTopicCnt).isEqualTo(1); assertThat(qd.getPerm()).isEqualTo(PermName.PERM_READ | PermName.PERM_WRITE); }
@Override public void execute(ComputationStep.Context context) { new DepthTraversalTypeAwareCrawler( new TypeAwareVisitorAdapter(CrawlerDepthLimit.PROJECT, PRE_ORDER) { @Override public void visitProject(Component project) { executeForProject(project); } }).visit(treeRootHolder.getRoot()); }
@Test void new_measure_has_ERROR_level_of_all_conditions_for_a_specific_metric_if_its_the_worst() { int rawValue = 3; Condition fixedCondition = createLessThanCondition(INT_METRIC_1, "4"); Condition periodCondition = createLessThanCondition(INT_METRIC_1, "2"); qualityGateHolder.setQualityGate(new QualityGate(SOME_QG_UUID, SOME_QG_NAME, of(fixedCondition, periodCondition))); Measure measure = newMeasureBuilder().create(rawValue, null); measureRepository.addRawMeasure(PROJECT_REF, INT_METRIC_1_KEY, measure); underTest.execute(new TestComputationStepContext()); Optional<Measure> rawMeasure1 = measureRepository.getAddedRawMeasure(PROJECT_REF, INT_METRIC_1_KEY); assertThat(rawMeasure1.get()) .hasQualityGateLevel(ERROR) .hasQualityGateText(dumbResultTextAnswer(fixedCondition, ERROR, rawValue)); }
@Override public void onWorkflowFinalized(Workflow workflow) { WorkflowSummary summary = StepHelper.retrieveWorkflowSummary(objectMapper, workflow.getInput()); WorkflowRuntimeSummary runtimeSummary = retrieveWorkflowRuntimeSummary(workflow); String reason = workflow.getReasonForIncompletion(); LOG.info( "Workflow {} with execution_id [{}] is finalized with internal state [{}] and reason [{}]", summary.getIdentity(), workflow.getWorkflowId(), workflow.getStatus(), reason); metrics.counter( MetricConstants.WORKFLOW_STATUS_LISTENER_CALL_BACK_METRIC, getClass(), TYPE_TAG, "onWorkflowFinalized", MetricConstants.STATUS_TAG, workflow.getStatus().name()); if (reason != null && workflow.getStatus() == Workflow.WorkflowStatus.FAILED && reason.startsWith(MaestroStartTask.DEDUP_FAILURE_PREFIX)) { LOG.info( "Workflow {} with execution_id [{}] has not actually started, thus skip onWorkflowFinalized.", summary.getIdentity(), workflow.getWorkflowId()); return; // special case doing nothing } WorkflowInstance.Status instanceStatus = instanceDao.getWorkflowInstanceStatus( summary.getWorkflowId(), summary.getWorkflowInstanceId(), summary.getWorkflowRunId()); if (instanceStatus == null || (instanceStatus.isTerminal() && workflow.getStatus().isTerminal())) { LOG.info( "Workflow {} with execution_id [{}] does not exist or already " + "in a terminal state [{}] with internal state [{}], thus skip onWorkflowFinalized.", summary.getIdentity(), workflow.getWorkflowId(), instanceStatus, workflow.getStatus()); return; } Map<String, Task> realTaskMap = TaskHelper.getUserDefinedRealTaskMap(workflow); // cancel internally failed tasks realTaskMap.values().stream() .filter(task -> !StepHelper.retrieveStepStatus(task.getOutputData()).isTerminal()) .forEach(task -> maestroTask.cancel(workflow, task, null)); WorkflowRuntimeOverview overview = TaskHelper.computeOverview( objectMapper, summary, runtimeSummary.getRollupBase(), realTaskMap); try { validateAndUpdateOverview(overview, summary); switch (workflow.getStatus()) { case TERMINATED: // stopped due to stop request if (reason != null && reason.startsWith(FAILURE_REASON_PREFIX)) { update(workflow, WorkflowInstance.Status.FAILED, summary, overview); } else { update(workflow, WorkflowInstance.Status.STOPPED, summary, overview); } break; case TIMED_OUT: update(workflow, WorkflowInstance.Status.TIMED_OUT, summary, overview); break; default: // other status (FAILED, COMPLETED, PAUSED, RUNNING) to be handled here. Optional<Task.Status> done = TaskHelper.checkProgress(realTaskMap, summary, overview, true); switch (done.orElse(Task.Status.IN_PROGRESS)) { /** * This is a special status to indicate that the workflow has succeeded. Check {@link * TaskHelper#checkProgress} for more details. */ case FAILED_WITH_TERMINAL_ERROR: WorkflowInstance.Status nextStatus = AggregatedViewHelper.deriveAggregatedStatus( instanceDao, summary, WorkflowInstance.Status.SUCCEEDED, overview); if (!nextStatus.isTerminal()) { throw new MaestroInternalError( "Invalid status: [%s], expecting a terminal one", nextStatus); } update(workflow, nextStatus, summary, overview); break; case FAILED: case CANCELED: // due to step failure update(workflow, WorkflowInstance.Status.FAILED, summary, overview); break; case TIMED_OUT: update(workflow, WorkflowInstance.Status.TIMED_OUT, summary, overview); break; // all other status are invalid default: metrics.counter( MetricConstants.WORKFLOW_STATUS_LISTENER_CALL_BACK_METRIC, getClass(), TYPE_TAG, "invalidStatusOnWorkflowFinalized"); throw new MaestroInternalError( "Invalid status [%s] onWorkflowFinalized", workflow.getStatus()); } break; } } catch (MaestroInternalError | IllegalArgumentException e) { // non-retryable error and still fail the instance LOG.warn("onWorkflowFinalized is failed with a non-retryable error", e); metrics.counter( MetricConstants.WORKFLOW_STATUS_LISTENER_CALL_BACK_METRIC, getClass(), TYPE_TAG, "nonRetryableErrorOnWorkflowFinalized"); update( workflow, WorkflowInstance.Status.FAILED, summary, overview, Details.create( e.getMessage(), "onWorkflowFinalized is failed with non-retryable error.")); } }
@Test public void testNonRetryableErrorOnWorkflowFinalized() { Task task1 = new Task(); task1.setReferenceTaskName("bar"); Map<String, Object> summary = new HashMap<>(); summary.put("runtime_state", Collections.singletonMap("status", "SUCCEEDED")); summary.put("type", "NOOP"); task1.setOutputData(Collections.singletonMap(Constants.STEP_RUNTIME_SUMMARY_FIELD, summary)); task1.setTaskType(Constants.MAESTRO_TASK_NAME); task1.setStatus(Task.Status.FAILED); Task task2 = new Task(); task2.setReferenceTaskName("foo"); task2.setTaskType(Constants.MAESTRO_TASK_NAME); task2.setStatus(Task.Status.COMPLETED); when(workflow.getTasks()).thenReturn(Arrays.asList(task1, task2)); when(workflow.getWorkflowId()).thenReturn("test-workflow-id"); when(workflow.getStatus()).thenReturn(Workflow.WorkflowStatus.PAUSED); when(instanceDao.getWorkflowInstanceStatus(eq("test-workflow-id"), anyLong(), anyLong())) .thenReturn(WorkflowInstance.Status.IN_PROGRESS); StepRuntimeState state = new StepRuntimeState(); state.setStatus(StepInstance.Status.NOT_CREATED); when(stepInstanceDao.getAllStepStates(any(), anyLong(), anyLong())) .thenReturn(singletonMap("bat", state)); StepTransition transition1 = new StepTransition(); transition1.setSuccessors(Collections.singletonMap("foo", "true")); StepTransition transition2 = new StepTransition(); transition2.setPredecessors(Collections.singletonList("bar")); when(workflow.getInput()) .thenReturn( Collections.singletonMap( Constants.WORKFLOW_SUMMARY_FIELD, threeItemMap( "workflow_id", "test-workflow-id", "initiator", singletonMap("type", "MANUAL"), "runtime_dag", twoItemMap("bar", transition1, "foo", transition2)))); statusListener.onWorkflowFinalized(workflow); Assert.assertEquals( 1L, metricRepo .getCounter( MetricConstants.WORKFLOW_STATUS_LISTENER_CALL_BACK_METRIC, MaestroWorkflowStatusListener.class, "type", "nonRetryableErrorOnWorkflowFinalized") .count()); ArgumentCaptor<Timeline> timelineCaptor = ArgumentCaptor.forClass(Timeline.class); verify(instanceDao, times(1)) .updateWorkflowInstance( any(), any(), timelineCaptor.capture(), eq(WorkflowInstance.Status.FAILED), anyLong()); Timeline timeline = timelineCaptor.getValue(); Assert.assertEquals(2, timeline.getTimelineEvents().size()); Assert.assertEquals( "Workflow instance status is updated to [FAILED] due to [test-reason]", timeline.getTimelineEvents().get(0).getMessage()); Assert.assertEquals( "Invalid state: stepId [foo] should not have any status", timeline.getTimelineEvents().get(1).getMessage()); verify(publisher, times(1)).publishOrThrow(any(), any()); }
@VisibleForTesting static int checkJar(Path file) throws Exception { final URI uri = file.toUri(); int numSevereIssues = 0; try (final FileSystem fileSystem = FileSystems.newFileSystem( new URI("jar:file", uri.getHost(), uri.getPath(), uri.getFragment()), Collections.emptyMap())) { if (isTestJarAndEmpty(file, fileSystem.getPath("/"))) { return 0; } if (!noticeFileExistsAndIsValid(fileSystem.getPath("META-INF", "NOTICE"), file)) { numSevereIssues++; } if (!licenseFileExistsAndIsValid(fileSystem.getPath("META-INF", "LICENSE"), file)) { numSevereIssues++; } numSevereIssues += getNumLicenseFilesOutsideMetaInfDirectory(file, fileSystem.getPath("/")); numSevereIssues += getFilesWithIncompatibleLicenses(file, fileSystem.getPath("/")); } return numSevereIssues; }
@Test void testIgnoreLicenseDirectories(@TempDir Path tempDir) throws Exception { assertThat( JarFileChecker.checkJar( createJar( tempDir, Entry.fileEntry(VALID_NOTICE_CONTENTS, VALID_NOTICE_PATH), Entry.fileEntry(VALID_LICENSE_CONTENTS, VALID_LICENSE_PATH), Entry.directoryEntry( Arrays.asList("some", "license", "directory"))))) .isEqualTo(0); }
public static boolean acceptEndpoint(String endpointUrl) { return endpointUrl != null && endpointUrl.matches(ENDPOINT_PATTERN_STRING); }
@Test public void testAcceptEndpointFailures() { assertFalse(AMQPMessageConsumptionTask.acceptEndpoint("amqp://localhost/x/testQueue")); assertFalse(AMQPMessageConsumptionTask.acceptEndpoint("rabbit://localhost/x/testQueue")); }
@Override public void resetConfigStats(RedisClusterNode node) { RedisClient entry = getEntry(node); RFuture<Void> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CONFIG_RESETSTAT); syncFuture(f); }
@Test public void testResetConfigStats() { RedisClusterNode master = getFirstMaster(); connection.resetConfigStats(master); }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void leaveChat() { BaseResponse response = bot.execute(new LeaveChat(chatId)); assertFalse(response.isOk()); assertEquals(400, response.errorCode()); assertEquals("Bad Request: chat member status can't be changed in private chats", response.description()); }
@Override protected Set<StepField> getUsedFields( RestMeta stepMeta ) { Set<StepField> usedFields = new HashSet<>(); // add url field if ( stepMeta.isUrlInField() && StringUtils.isNotEmpty( stepMeta.getUrlField() ) ) { usedFields.addAll( createStepFields( stepMeta.getUrlField(), getInputs() ) ); } // add method field if ( stepMeta.isDynamicMethod() && StringUtils.isNotEmpty( stepMeta.getMethodFieldName() ) ) { usedFields.addAll( createStepFields( stepMeta.getMethodFieldName(), getInputs() ) ); } // add body field if ( StringUtils.isNotEmpty( stepMeta.getBodyField() ) ) { usedFields.addAll( createStepFields( stepMeta.getBodyField(), getInputs() ) ); } // add parameters as used fields String[] parameterFields = stepMeta.getParameterField(); if ( ArrayUtils.isNotEmpty( parameterFields ) ) { for ( String paramField : parameterFields ) { usedFields.addAll( createStepFields( paramField, getInputs() ) ); } } // add headers as used fields String[] headerFields = stepMeta.getHeaderField(); if ( ArrayUtils.isNotEmpty( headerFields ) ) { for ( String headerField : headerFields ) { usedFields.addAll( createStepFields( headerField, getInputs() ) ); } } return usedFields; }
@Test public void testGetUsedFields_parameterField() throws Exception { Set<StepField> fields = new HashSet<>(); when( meta.getParameterField() ).thenReturn( new String[] { "param1", "param2" } ); doReturn( stepNodes ).when( analyzer ).getInputs(); doReturn( fields ).when( analyzer ).createStepFields( anyString(), eq( stepNodes ) ); Set<StepField> usedFields = analyzer.getUsedFields( meta ); verify( analyzer ).createStepFields( "param1", stepNodes ); verify( analyzer ).createStepFields( "param2", stepNodes ); }
@Override public Boolean update(List<ModifyRequest> modifyRequests, BiConsumer<Boolean, Throwable> consumer) { return update(transactionTemplate, jdbcTemplate, modifyRequests, consumer); }
@Test void testUpdate1() { List<ModifyRequest> modifyRequests = new ArrayList<>(); ModifyRequest modifyRequest1 = new ModifyRequest(); String sql = "UPDATE config_info SET data_id = 'test' WHERE id = ?;"; modifyRequest1.setSql(sql); Object[] args = new Object[] {1}; modifyRequest1.setArgs(args); modifyRequests.add(modifyRequest1); when(transactionTemplate.execute(any(TransactionCallback.class))).thenReturn(true); assertTrue(operate.update(modifyRequests)); }
@Override public void upgrade() { if (hasBeenRunSuccessfully()) { LOG.debug("Migration already completed."); return; } final Set<String> dashboardIdToViewId = new HashSet<>(); final Consumer<String> recordMigratedDashboardIds = dashboardIdToViewId::add; final Map<String, Set<String>> widgetIdMigrationMapping = new HashMap<>(); final Consumer<Map<String, Set<String>>> recordMigratedWidgetIds = widgetIdMigrationMapping::putAll; final Map<View, Search> newViews = this.dashboardsService.streamAll() .sorted(Comparator.comparing(Dashboard::id)) .map(dashboard -> migrateDashboard(dashboard, recordMigratedDashboardIds, recordMigratedWidgetIds)) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); writeViews(newViews); final MigrationCompleted migrationCompleted = MigrationCompleted.create(dashboardIdToViewId, widgetIdMigrationMapping); writeMigrationCompleted(migrationCompleted); }
@Test @MongoDBFixtures("ops_dashboards.json") public void migrateOpsDashboards() throws Exception { this.migration.upgrade(); final MigrationCompleted migrationCompleted = captureMigrationCompleted(); assertThat(migrationCompleted.migratedDashboardIds()) .containsExactlyInAnyOrder( "5ddf8ed5b2d44b2e04472992", "5ddf8ed6b2d44b2e044729a2", "5ddf8ed7b2d44b2e044729b1", "5ddf8ed8b2d44b2e044729d2", "5ddf8ed8b2d44b2e044729d8" ); assertViewsWritten(5, resourceFile("ops_dashboards-expected_views.json")); assertSearchesWritten(5, resourceFile("ops_dashboards-expected_searches.json")); }
public FEELFnResult<String> invoke(@ParameterName( "string" ) String string, @ParameterName( "match" ) String match) { if ( string == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "string", "cannot be null")); } if ( match == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "match", "cannot be null")); } int index = string.indexOf( match ); if( index >= 0 ) { return FEELFnResult.ofResult( string.substring( index+match.length() ) ); } else { return FEELFnResult.ofResult( "" ); } }
@Test void invokeMatchExists() { FunctionTestUtil.assertResult(substringAfterFunction.invoke("foobar", "ob"), "ar"); FunctionTestUtil.assertResult(substringAfterFunction.invoke("foobar", "o"), "obar"); }
public static TypeInformation<?> readTypeInfo(String typeString) { final List<Token> tokens = tokenize(typeString); final TokenConverter converter = new TokenConverter(typeString, tokens); return converter.convert(); }
@Test void testSyntaxError2() { assertThatThrownBy( () -> TypeStringUtils.readTypeInfo("ROW<f0 DECIMAL DECIMAL, f1 TINYINT>")) .isInstanceOf(ValidationException.class); // duplicate type }
public static <T> RestResult<T> failedWithMsg(int code, String errMsg) { return RestResult.<T>builder().withCode(code).withMsg(errMsg).build(); }
@Test void testFailedWithMsgMethod() { RestResult<String> restResult = RestResultUtils.failedWithMsg(400, "content"); assertRestResult(restResult, 400, "content", null, false); }
public MessagesRequestSpec simpleQueryParamsToFullRequestSpecification(final String query, final Set<String> streams, final String timerangeKeyword, final List<String> fields, final String sort, final SortSpec.Direction sortOrder, final int from, final int size) { return new MessagesRequestSpec(query, streams, timerangeParser.parseTimeRange(timerangeKeyword), sort, sortOrder, from, size, fields); }
@Test void throwsExceptionOnWrongMetricFormat() { assertThrows(IllegalArgumentException.class, () -> toTest.simpleQueryParamsToFullRequestSpecification("*", Set.of(), "42d", List.of("http_method"), List.of("avg:joe", "ayayayayay!"))); }
@Override protected Result doInvoke(Invocation invocation, List<Invoker<T>> invokers, LoadBalance loadbalance) throws RpcException { RetryContext.INSTANCE.markRetry(retry); checkInvokers(invokers, invocation); final List<io.github.resilience4j.retry.Retry> handlers = retryHandler .getHandlers(convertToApacheDubboEntity(invocation, invokers.get(0))); final List<Invoker<T>> selected = new ArrayList<>(); DecorateCheckedSupplier<Result> dcs = Decorators.ofCheckedSupplier(buildFunc(invocation, invokers, loadbalance, selected)); io.github.resilience4j.retry.Retry retryRule = null; if (!handlers.isEmpty()) { // only one policy is supported for retry retryRule = handlers.get(0); dcs.withRetry(retryRule); } try { return dcs.get(); } catch (RpcException ex) { log(retryRule, invocation); throw ex; } catch (Throwable ex) { log(retryRule, invocation); throw formatEx(ex); } finally { RetryContext.INSTANCE.remove(); FlowControlContext.INSTANCE.clear(); selected.clear(); } }
@Test public void doInvoke() { final Directory<Result> directory = Mockito.mock(Directory.class); Mockito.when(directory.getUrl()).thenReturn(new URL("dubbo", "localhost", 8080)); final ApacheDubboClusterInvoker<Result> clusterInvoker = new ApacheDubboClusterInvoker<>(directory); final RoundRobinLoadBalance roundRobinLoadBalance = new RoundRobinLoadBalance(); final Invocation invocation = Mockito.mock(Invocation.class); String interfaceName = this.getClass().getName(); String version = "1.0.0"; Mockito.when(invocation.getMethodName()).thenReturn("test"); Mockito.when(invocation.getAttachment(ConvertUtils.DUBBO_ATTACHMENT_VERSION)).thenReturn(version); Mockito.when(invocation.getArguments()).thenReturn(new Object[]{"test"}); final Invoker invoker = Mockito.mock(Invoker.class); Mockito.when(invoker.getInterface()).thenReturn(this.getClass()); final URL url = Mockito.mock(URL.class); Mockito.when(url.getParameter(CommonConst.GENERIC_INTERFACE_KEY, interfaceName)).thenReturn(interfaceName); Mockito.when(url.getParameter(CommonConst.URL_VERSION_KEY, version)).thenReturn(version); Mockito.when(url.getParameter(CommonConst.DUBBO_REMOTE_APPLICATION)).thenReturn("application"); Mockito.when(invoker.getUrl()).thenReturn(url); Mockito.when(invocation.getInvoker()).thenReturn(invoker); Mockito.when(directory.getUrl()).thenReturn(url); final AsyncRpcResult asyncRpcResult = AsyncRpcResult.newDefaultAsyncResult(new Object(), invocation); Mockito.when(invoker.invoke(invocation)).thenReturn(asyncRpcResult); final Result result = clusterInvoker.doInvoke(invocation, Arrays.asList(invoker), roundRobinLoadBalance); Assert.assertEquals(result, asyncRpcResult); // test throw exception Mockito.when(invoker.invoke(invocation)).thenThrow(new RpcException("test error")); boolean isEx = false; try { clusterInvoker.doInvoke(invocation, Arrays.asList(invoker), roundRobinLoadBalance); } catch (RpcException ex) { isEx = true; } Assert.assertTrue(isEx); }
@Nullable @Override public Session load(@NonNull String sessionId) { var session = store.getIfPresent(sessionId); if (session == null || session.createdAt().plus(timeToLive).isBefore(Instant.now())) { return null; } return session; }
@Test void load_realCache_bounded() { var maxSize = 10; var ttl = Duration.ofMinutes(5); Cache<String, Session> store = Caffeine.newBuilder().maximumSize(maxSize).build(); var sut = new CaffeineSessionRepo(store, ttl); var state = "myState"; var nonce = UUID.randomUUID().toString(); var redirectUri = URI.create("https://example.com/callback"); var clientId = "app"; var sesionIds = IntStream.range(0, 100).mapToObj(Integer::toString).toList(); sesionIds.stream() .map( i -> Session.create() .id(i) .state(state) .nonce(nonce) .redirectUri(redirectUri) .clientId(clientId) .build()) .forEach(sut::save); store.cleanUp(); // when var remainingCount = sesionIds.stream().flatMap(id -> Optional.ofNullable(sut.load(id)).stream()).count(); // then assertEquals(maxSize, remainingCount); }
@Override public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException { HttpServletRequest request = (HttpServletRequest) servletRequest; String uri = request.getRequestURI(); String method = request.getMethod(); try { if (isRegisterInstanceUri(uri, method)) { //register ClientAttributes attributes = getClientAttributes(); RequestContextHolder.getContext() .addExtensionContext(ClientAttributes.class.getSimpleName(), attributes); } else if (isBeatUri(uri, method)) { //beat String ip = WebUtils.optional(request, IP, StringUtils.EMPTY); int port = Integer.parseInt(WebUtils.optional(request, PORT, ZERO)); String clientId = IpPortBasedClient.getClientId(ip + InternetAddressUtil.IP_PORT_SPLITER + port, true); IpPortBasedClient client = (IpPortBasedClient) clientManager.getClient(clientId); if (client != null) { ClientAttributes requestClientAttributes = getClientAttributes(); //update clientAttributes,when client version attributes is null,then update. if (canUpdateClientAttributes(client, requestClientAttributes)) { client.setAttributes(requestClientAttributes); } } } } catch (Exception e) { Loggers.SRV_LOG.error("handler client attributes error", e); } try { filterChain.doFilter(request, servletResponse); } catch (ServletException e) { throw new RuntimeException(e); } }
@Test void testDoFilterForBeatUri() throws IOException { when(request.getParameter("ip")).thenReturn("127.0.0.1"); when(request.getParameter("port")).thenReturn("8848"); when(request.getParameter("encoding")).thenReturn("utf-8"); when(clientManager.getClient("127.0.0.1:8848#true")).thenReturn(client); when(request.getRequestURI()).thenReturn( UtilsAndCommons.NACOS_SERVER_CONTEXT + UtilsAndCommons.NACOS_NAMING_CONTEXT + UtilsAndCommons.NACOS_NAMING_INSTANCE_CONTEXT + "/beat"); when(request.getMethod()).thenReturn("PUT"); filter.doFilter(request, response, new MockFilterChain()); verify(client).setAttributes(any(ClientAttributes.class)); }
@Override public Path move(final Path file, final Path renamed, final TransferStatus status, final Delete.Callback callback, final ConnectionCallback connectionCallback) throws BackgroundException { try { if(status.isExists()) { if(log.isWarnEnabled()) { log.warn(String.format("Trash file %s to be replaced with %s", renamed, file)); } delete.delete(Collections.singletonMap(renamed, status), connectionCallback, callback); } final String id = fileid.getFileId(file); File result = null; if(!StringUtils.equals(file.getName(), renamed.getName())) { // Rename title final File properties = new File(); properties.setName(renamed.getName()); properties.setMimeType(status.getMime()); result = session.getClient().files().update(id, properties) .setFields(DriveAttributesFinderFeature.DEFAULT_FIELDS) .setSupportsAllDrives(new HostPreferences(session.getHost()).getBoolean("googledrive.teamdrive.enable")) .execute(); } if(!new SimplePathPredicate(file.getParent()).test(renamed.getParent())) { // Retrieve the existing parents to remove final StringBuilder previousParents = new StringBuilder(); final File reference = session.getClient().files().get(id) .setFields("parents") .setSupportsAllDrives(new HostPreferences(session.getHost()).getBoolean("googledrive.teamdrive.enable")) .execute(); for(String parent : reference.getParents()) { previousParents.append(parent).append(','); } // Move the file to the new folder result = session.getClient().files().update(id, null) .setAddParents(fileid.getFileId(renamed.getParent())) .setRemoveParents(previousParents.toString()) .setFields(DriveAttributesFinderFeature.DEFAULT_FIELDS) .setSupportsAllDrives(new HostPreferences(session.getHost()).getBoolean("googledrive.teamdrive.enable")) .execute(); } fileid.cache(file, null); fileid.cache(renamed, id); return renamed.withAttributes(new DriveAttributesFinderFeature(session, fileid).toAttributes(result)); } catch(IOException e) { throw new DriveExceptionMappingService(fileid).map("Cannot rename {0}", e, file); } }
@Test public void testMoveDirectory() throws Exception { final Path sourceDirectory = new Path(DriveHomeFinderService.MYDRIVE_FOLDER, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); final Path targetDirectory = new Path(DriveHomeFinderService.MYDRIVE_FOLDER, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); final DriveFileIdProvider fileid = new DriveFileIdProvider(session); new DriveDirectoryFeature(session, fileid).mkdir(sourceDirectory, new TransferStatus()); final Path sourceFile = new DriveTouchFeature(session, fileid).touch(new Path(sourceDirectory, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final Path targetFile = new Path(targetDirectory, sourceFile.getName(), EnumSet.of(Path.Type.file)); new DriveMoveFeature(session, fileid).move(sourceDirectory, targetDirectory, new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback()); final Find find = new DefaultFindFeature(session); assertFalse(find.find(sourceDirectory)); assertTrue(find.find(targetDirectory)); assertTrue(find.find(targetFile)); new DriveDeleteFeature(session, fileid).delete(Arrays.asList(targetFile, targetDirectory), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public String doSharding(final Collection<String> availableTargetNames, final PreciseShardingValue<Comparable<?>> shardingValue) { ShardingSpherePreconditions.checkNotNull(shardingValue.getValue(), NullShardingValueException::new); String suffix = String.valueOf(hashShardingValue(shardingValue.getValue()) % shardingCount); return ShardingAutoTableAlgorithmUtils.findMatchedTargetName(availableTargetNames, suffix, shardingValue.getDataNodeInfo()).orElse(null); }
@Test void assertRangeDoSharding() { List<String> availableTargetNames = Arrays.asList("t_order_0", "t_order_1", "t_order_2", "t_order_3"); Collection<String> actual = shardingAlgorithm.doSharding(availableTargetNames, new RangeShardingValue<>("t_order", "create_time", DATA_NODE_INFO, Range.closed("a", "f"))); assertThat(actual.size(), is(4)); }
public PrepareResult prepare(HostValidator hostValidator, DeployLogger logger, PrepareParams params, Optional<ApplicationVersions> activeApplicationVersions, Instant now, File serverDbSessionDir, ApplicationPackage applicationPackage, SessionZooKeeperClient sessionZooKeeperClient) { ApplicationId applicationId = params.getApplicationId(); Preparation preparation = new Preparation(hostValidator, logger, params, activeApplicationVersions, TenantRepository.getTenantPath(applicationId.tenant()), serverDbSessionDir, applicationPackage, sessionZooKeeperClient, onnxModelCost, endpointCertificateSecretStores); preparation.preprocess(); try { AllocatedHosts allocatedHosts = preparation.buildModels(now); preparation.makeResult(allocatedHosts); if ( ! params.isDryRun()) { FileReference fileReference = preparation.triggerDistributionOfApplicationPackage(); preparation.writeStateZK(fileReference); preparation.writeEndpointCertificateMetadataZK(); preparation.writeContainerEndpointsZK(); } log.log(Level.FINE, () -> "time used " + params.getTimeoutBudget().timesUsed() + " : " + applicationId); return preparation.result(); } catch (IllegalArgumentException e) { if (e instanceof InvalidApplicationException) throw e; throw new InvalidApplicationException("Invalid application package", e); } }
@Test public void require_that_application_id_is_written_in_prepare() throws IOException { PrepareParams params = new PrepareParams.Builder().applicationId(applicationId()).build(); int sessionId = 1; prepare(testApp, params); assertEquals(applicationId(), createSessionZooKeeperClient(sessionId).readApplicationId()); }
@POST @ZeppelinApi public Response createNote(String message) throws IOException { String user = authenticationService.getPrincipal(); LOGGER.info("Creating new note by JSON {}", message); NewNoteRequest request = GSON.fromJson(message, NewNoteRequest.class); String defaultInterpreterGroup = request.getDefaultInterpreterGroup(); if (StringUtils.isBlank(defaultInterpreterGroup)) { defaultInterpreterGroup = zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_GROUP_DEFAULT); } String noteId = notebookService.createNote( request.getName(), defaultInterpreterGroup, request.getAddingEmptyParagraph(), getServiceContext(), new RestServiceCallback<>()); return notebook.processNote(noteId, note -> { AuthenticationInfo subject = new AuthenticationInfo(authenticationService.getPrincipal()); if (request.getParagraphs() != null) { for (NewParagraphRequest paragraphRequest : request.getParagraphs()) { Paragraph p = note.addNewParagraph(subject); initParagraph(p, paragraphRequest, user); } } return new JsonResponse<>(Status.OK, "", note.getId()).build(); }); }
@Test void testCancelNoteJob() throws Exception { LOG.info("Running testCancelNoteJob"); String note1Id = null; try { note1Id = notebook.createNote("note1", anonymous); // Add 3 paragraphs for the note. List<Paragraph> paragraphs = notebook.processNote(note1Id, note1 -> { List<Paragraph> paragraphsList = new ArrayList<>(); for (int i = 0; i < 3; i++) { Paragraph p1 = note1.addNewParagraph(AuthenticationInfo.ANONYMOUS); p1.setText("%python\nimport time\ntime.sleep(10)\nprint('done')"); note1.run(p1.getId()); paragraphsList.add(p1); } return paragraphsList; }); //The first paragraph is running, and the other two is pending. paragraphs.get(0).waitUntilRunning(); // cancel running note CloseableHttpResponse delete = httpDelete("/notebook/job/" + note1Id); assertThat(delete, isAllowed()); Map<String, Object> resp = gson.fromJson(EntityUtils.toString(delete.getEntity(), StandardCharsets.UTF_8), new TypeToken<Map<String, Object>>() { }.getType()); assertEquals("OK", resp.get("status")); delete.close(); for (Paragraph p : paragraphs) { p.waitUntilFinished(); assertEquals(Job.Status.ABORT, p.getStatus()); } } finally { // cleanup if (null != note1Id) { notebook.removeNote(note1Id, anonymous); } } }
public static FEEL_1_1Parser parse(FEELEventListenersManager eventsManager, String source, Map<String, Type> inputVariableTypes, Map<String, Object> inputVariables, Collection<FEELFunction> additionalFunctions, List<FEELProfile> profiles, FEELTypeRegistry typeRegistry) { CharStream input = CharStreams.fromString(source); FEEL_1_1Lexer lexer = new FEEL_1_1Lexer( input ); CommonTokenStream tokens = new CommonTokenStream( lexer ); FEEL_1_1Parser parser = new FEEL_1_1Parser( tokens ); ParserHelper parserHelper = new ParserHelper(eventsManager); additionalFunctions.forEach(f -> parserHelper.getSymbolTable().getBuiltInScope().define(f.getSymbol())); parser.setHelper(parserHelper); parser.setErrorHandler( new FEELErrorHandler() ); parser.removeErrorListeners(); // removes the error listener that prints to the console parser.addErrorListener( new FEELParserErrorListener( eventsManager ) ); // pre-loads the parser with symbols defineVariables( inputVariableTypes, inputVariables, parser ); if (typeRegistry != null) { parserHelper.setTypeRegistry(typeRegistry); } return parser; }
@Test void power1() { String inputExpression = "y * 5 ** 3"; BaseNode infix = parse( inputExpression, mapOf(entry("y", BuiltInType.NUMBER)) ); assertThat( infix).isInstanceOf(InfixOpNode.class); assertThat( infix.getResultType()).isEqualTo(BuiltInType.NUMBER); assertThat( infix.getText()).isEqualTo(inputExpression); InfixOpNode mult = (InfixOpNode) infix; assertThat( mult.getLeft()).isInstanceOf(NameRefNode.class); assertThat( mult.getLeft().getText()).isEqualTo("y"); assertThat( mult.getOperator()).isEqualTo(InfixOperator.MULT); assertThat( mult.getRight()).isInstanceOf(InfixOpNode.class); assertThat( mult.getRight().getText()).isEqualTo( "5 ** 3"); InfixOpNode exp = (InfixOpNode) mult.getRight(); assertThat( exp.getLeft()).isInstanceOf(NumberNode.class); assertThat( exp.getLeft().getText()).isEqualTo("5"); assertThat( exp.getOperator()).isEqualTo(InfixOperator.POW); assertThat( exp.getRight()).isInstanceOf(NumberNode.class); assertThat( exp.getRight().getText()).isEqualTo("3"); }
@Override public boolean supports(String hashedPassword) { return prefixPattern.matcher(hashedPassword).matches(); }
@Test public void testSupports() throws Exception { assertThat(SHA1HashPasswordAlgorithm.supports("deadbeefaffedeadbeefdeadbeefaffedeadbeef")).isTrue(); assertThat(SHA1HashPasswordAlgorithm.supports("{bcrypt}foobar")).isFalse(); assertThat(SHA1HashPasswordAlgorithm.supports("{foobar}foobar")).isFalse(); }
public static SlaveConnectionManager getInstance() { if ( slaveConnectionManager == null ) { slaveConnectionManager = new SlaveConnectionManager(); } return slaveConnectionManager; }
@Test public void shouldOverrideDefaultSSLContextByDefault() throws Exception { System.clearProperty( "javax.net.ssl.keyStore" ); SlaveConnectionManager instance = SlaveConnectionManager.getInstance(); assertNotEquals( defaultContext, SSLContext.getDefault() ); }
@Override public HashSlotCursor8byteKey cursor() { return new Cursor(); }
@Test public void testCursor_valueAddress() { final SlotAssignmentResult slot = insert(random.nextLong()); HashSlotCursor8byteKey cursor = hsa.cursor(); cursor.advance(); assertEquals(slot.address(), cursor.valueAddress()); }
@Override @MethodNotAvailable public CompletionStage<V> putAsync(K key, V value) { throw new MethodNotAvailableException(); }
@Test(expected = MethodNotAvailableException.class) public void testPutAsync() { adapter.putAsync(42, "newValue"); }
@Override @PublicAPI(usage = ACCESS) public String getName() { return WILDCARD_TYPE_NAME + boundsToString(); }
@Test public void wildcard_name_lower_bounded() { @SuppressWarnings("unused") class LowerBounded<T extends List<? super String>> { } JavaWildcardType wildcardType = importWildcardTypeOf(LowerBounded.class); assertThat(wildcardType.getName()).isEqualTo("? super java.lang.String"); }
@Bean public SyncDataService websocketSyncDataService(final ObjectProvider<WebsocketConfig> websocketConfig, final ObjectProvider<PluginDataSubscriber> pluginSubscriber, final ObjectProvider<List<MetaDataSubscriber>> metaSubscribers, final ObjectProvider<List<AuthDataSubscriber>> authSubscribers, final ObjectProvider<List<ProxySelectorDataSubscriber>> proxySelectorSubscribers, final ObjectProvider<List<DiscoveryUpstreamDataSubscriber>> discoveryUpstreamSubscribers) { LOGGER.info("you use websocket sync shenyu data......."); return new WebsocketSyncDataService(websocketConfig.getIfAvailable(WebsocketConfig::new), pluginSubscriber.getIfAvailable(), metaSubscribers.getIfAvailable(Collections::emptyList), authSubscribers.getIfAvailable(Collections::emptyList), proxySelectorSubscribers.getIfAvailable(Collections::emptyList), discoveryUpstreamSubscribers.getIfAvailable(Collections::emptyList)); }
@Test public void testWebsocketSyncDataService() { assertNotNull(websocketSyncDataService); }
@Override public String telnet(AbstractChannel channel, String message) { StringBuilder result = new StringBuilder(); if (StringUtils.isNotBlank(message)) { TelnetHandler handler = TelnetHandlerFactory.getHandler(message); if (handler != null) { result.append(handler.getCommand()).append(LINE) .append(handler.getDescription()).append(LINE); } else { result.append("Not found command : " + message); } } else { result.append("The supported command include:").append(LINE); for (Map.Entry<String, TelnetHandler> entry : TelnetHandlerFactory.getAllHandlers().entrySet()) { result.append(entry.getKey()).append(" "); //result.append(entry.fetchKey() + "\t : " + entry.getValue().getDescription() + "\r\n"); } result.append(LINE); } return result.toString(); }
@Test public void telnet() throws Exception { Assert.assertNotNull(new HelpTelnetHandler().telnet(null, "")); Assert.assertNotNull(new HelpTelnetHandler().telnet(null, null)); Assert.assertNotNull(new HelpTelnetHandler().telnet(null, "xx")); }
public static SinkConfig validateUpdate(SinkConfig existingConfig, SinkConfig newConfig) { SinkConfig mergedConfig = clone(existingConfig); if (!existingConfig.getTenant().equals(newConfig.getTenant())) { throw new IllegalArgumentException("Tenants differ"); } if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) { throw new IllegalArgumentException("Namespaces differ"); } if (!existingConfig.getName().equals(newConfig.getName())) { throw new IllegalArgumentException("Sink Names differ"); } if (!StringUtils.isEmpty(newConfig.getClassName())) { mergedConfig.setClassName(newConfig.getClassName()); } if (!StringUtils.isEmpty(newConfig.getSourceSubscriptionName()) && !newConfig.getSourceSubscriptionName() .equals(existingConfig.getSourceSubscriptionName())) { throw new IllegalArgumentException("Subscription Name cannot be altered"); } if (newConfig.getInputSpecs() == null) { newConfig.setInputSpecs(new HashMap<>()); } if (mergedConfig.getInputSpecs() == null) { mergedConfig.setInputSpecs(new HashMap<>()); } if (!StringUtils.isEmpty(newConfig.getLogTopic())) { mergedConfig.setLogTopic(newConfig.getLogTopic()); } if (newConfig.getInputs() != null) { newConfig.getInputs().forEach((topicName -> { newConfig.getInputSpecs().putIfAbsent(topicName, ConsumerConfig.builder().isRegexPattern(false).build()); })); } if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) { newConfig.getInputSpecs().put(newConfig.getTopicsPattern(), ConsumerConfig.builder() .isRegexPattern(true) .build()); } if (newConfig.getTopicToSerdeClassName() != null) { newConfig.getTopicToSerdeClassName().forEach((topicName, serdeClassName) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .serdeClassName(serdeClassName) .isRegexPattern(false) .build()); }); } if (newConfig.getTopicToSchemaType() != null) { newConfig.getTopicToSchemaType().forEach((topicName, schemaClassname) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .schemaType(schemaClassname) .isRegexPattern(false) .build()); }); } if (!newConfig.getInputSpecs().isEmpty()) { SinkConfig finalMergedConfig = mergedConfig; newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> { if (!existingConfig.getInputSpecs().containsKey(topicName)) { throw new IllegalArgumentException("Input Topics cannot be altered"); } if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) { throw new IllegalArgumentException( "isRegexPattern for input topic " + topicName + " cannot be altered"); } finalMergedConfig.getInputSpecs().put(topicName, consumerConfig); }); } if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees() .equals(existingConfig.getProcessingGuarantees())) { throw new IllegalArgumentException("Processing Guarantees cannot be altered"); } if (newConfig.getConfigs() != null) { mergedConfig.setConfigs(newConfig.getConfigs()); } if (newConfig.getSecrets() != null) { mergedConfig.setSecrets(newConfig.getSecrets()); } if (newConfig.getParallelism() != null) { mergedConfig.setParallelism(newConfig.getParallelism()); } if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering() .equals(existingConfig.getRetainOrdering())) { throw new IllegalArgumentException("Retain Ordering cannot be altered"); } if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering() .equals(existingConfig.getRetainKeyOrdering())) { throw new IllegalArgumentException("Retain Key Ordering cannot be altered"); } if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) { throw new IllegalArgumentException("AutoAck cannot be altered"); } if (newConfig.getResources() != null) { mergedConfig .setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources())); } if (newConfig.getTimeoutMs() != null) { mergedConfig.setTimeoutMs(newConfig.getTimeoutMs()); } if (newConfig.getCleanupSubscription() != null) { mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription()); } if (!StringUtils.isEmpty(newConfig.getArchive())) { mergedConfig.setArchive(newConfig.getArchive()); } if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) { mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags()); } if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) { mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions()); } if (newConfig.getTransformFunction() != null) { mergedConfig.setTransformFunction(newConfig.getTransformFunction()); } if (newConfig.getTransformFunctionClassName() != null) { mergedConfig.setTransformFunctionClassName(newConfig.getTransformFunctionClassName()); } if (newConfig.getTransformFunctionConfig() != null) { mergedConfig.setTransformFunctionConfig(newConfig.getTransformFunctionConfig()); } return mergedConfig; }
@Test public void testMergeDifferentClassName() { SinkConfig sinkConfig = createSinkConfig(); SinkConfig newSinkConfig = createUpdatedSinkConfig("className", "Different"); SinkConfig mergedConfig = SinkConfigUtils.validateUpdate(sinkConfig, newSinkConfig); assertEquals( mergedConfig.getClassName(), "Different" ); mergedConfig.setClassName(sinkConfig.getClassName()); assertEquals( new Gson().toJson(sinkConfig), new Gson().toJson(mergedConfig) ); }
public static void hookIntentGetService(Context context, int requestCode, Intent intent, int flags) { hookIntent(intent); }
@Test public void hookIntentGetService() { PushAutoTrackHelper.hookIntentGetService(mApplication, 100, MockDataTest.mockJPushIntent(), 100); }
private void respondWithFile(ChannelHandlerContext ctx, HttpRequest request, String requestPath) throws IOException, ParseException, RestHandlerException { // make sure we request the "index.html" in case there is a directory request if (requestPath.endsWith("/")) { requestPath = requestPath + "index.html"; } if (!requestPath.contains(".")) { // we assume that the path ends in either .html or .js requestPath = requestPath + ".json"; } // convert to absolute path final File file = new File(rootPath, requestPath); if (!file.exists()) { // file does not exist. Try to load it with the classloader ClassLoader cl = HistoryServerStaticFileServerHandler.class.getClassLoader(); try (InputStream resourceStream = cl.getResourceAsStream("web" + requestPath)) { boolean success = false; try { if (resourceStream != null) { URL root = cl.getResource("web"); URL requested = cl.getResource("web" + requestPath); if (root != null && requested != null) { URI rootURI = new URI(root.getPath()).normalize(); URI requestedURI = new URI(requested.getPath()).normalize(); // Check that we don't load anything from outside of the // expected scope. if (!rootURI.relativize(requestedURI).equals(requestedURI)) { LOG.debug("Loading missing file from classloader: {}", requestPath); // ensure that directory to file exists. file.getParentFile().mkdirs(); Files.copy(resourceStream, file.toPath()); success = true; } } } } catch (Throwable t) { LOG.error("error while responding", t); } finally { if (!success) { LOG.debug("Unable to load requested file {} from classloader", requestPath); throw new NotFoundException("File not found."); } } } } StaticFileServerHandler.checkFileValidity(file, rootPath, LOG); // cache validation final String ifModifiedSince = request.headers().get(IF_MODIFIED_SINCE); if (ifModifiedSince != null && !ifModifiedSince.isEmpty()) { SimpleDateFormat dateFormatter = new SimpleDateFormat(StaticFileServerHandler.HTTP_DATE_FORMAT, Locale.US); Date ifModifiedSinceDate = dateFormatter.parse(ifModifiedSince); // Only compare up to the second because the datetime format we send to the client // does not have milliseconds long ifModifiedSinceDateSeconds = ifModifiedSinceDate.getTime() / 1000; long fileLastModifiedSeconds = file.lastModified() / 1000; if (ifModifiedSinceDateSeconds == fileLastModifiedSeconds) { if (LOG.isDebugEnabled()) { LOG.debug( "Responding 'NOT MODIFIED' for file '" + file.getAbsolutePath() + '\''); } StaticFileServerHandler.sendNotModified(ctx); return; } } if (LOG.isDebugEnabled()) { LOG.debug("Responding with file '" + file.getAbsolutePath() + '\''); } // Don't need to close this manually. Netty's DefaultFileRegion will take care of it. final RandomAccessFile raf; try { raf = new RandomAccessFile(file, "r"); } catch (FileNotFoundException e) { if (LOG.isDebugEnabled()) { LOG.debug("Could not find file {}.", file.getAbsolutePath()); } HandlerUtils.sendErrorResponse( ctx, request, new ErrorResponseBody("File not found."), NOT_FOUND, Collections.emptyMap()); return; } try { long fileLength = raf.length(); HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK); StaticFileServerHandler.setContentTypeHeader(response, file); // the job overview should be updated as soon as possible if (!requestPath.equals("/joboverview.json")) { StaticFileServerHandler.setDateAndCacheHeaders(response, file); } if (HttpUtil.isKeepAlive(request)) { response.headers().set(CONNECTION, HttpHeaderValues.KEEP_ALIVE); } HttpUtil.setContentLength(response, fileLength); // write the initial line and the header. ctx.write(response); // write the content. ChannelFuture lastContentFuture; if (ctx.pipeline().get(SslHandler.class) == null) { ctx.write( new DefaultFileRegion(raf.getChannel(), 0, fileLength), ctx.newProgressivePromise()); lastContentFuture = ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT); } else { lastContentFuture = ctx.writeAndFlush( new HttpChunkedInput(new ChunkedFile(raf, 0, fileLength, 8192)), ctx.newProgressivePromise()); // HttpChunkedInput will write the end marker (LastHttpContent) for us. } // close the connection, if no keep-alive is needed if (!HttpUtil.isKeepAlive(request)) { lastContentFuture.addListener(ChannelFutureListener.CLOSE); } } catch (Exception e) { raf.close(); LOG.error("Failed to serve file.", e); throw new RestHandlerException("Internal server error.", INTERNAL_SERVER_ERROR); } }
@Test void testRespondWithFile(@TempDir Path tmpDir) throws Exception { final Path webDir = Files.createDirectory(tmpDir.resolve("webDir")); final Path uploadDir = Files.createDirectory(tmpDir.resolve("uploadDir")); Router router = new Router() .addGet("/:*", new HistoryServerStaticFileServerHandler(webDir.toFile())); WebFrontendBootstrap webUI = new WebFrontendBootstrap( router, LoggerFactory.getLogger(HistoryServerStaticFileServerHandlerTest.class), uploadDir.toFile(), null, "localhost", 0, new Configuration()); int port = webUI.getServerPort(); try { // verify that 404 message is returned when requesting a non-existent file Tuple2<Integer, String> notFound404 = HttpUtils.getFromHTTP("http://localhost:" + port + "/hello"); assertThat(notFound404.f0).isEqualTo(404); assertThat(notFound404.f1).contains("not found"); // verify that a) a file can be loaded using the ClassLoader and b) that the // HistoryServer // index_hs.html is injected Tuple2<Integer, String> index = HttpUtils.getFromHTTP("http://localhost:" + port + "/index.html"); assertThat(index.f0).isEqualTo(200); assertThat(index.f1).contains("Apache Flink Web Dashboard"); // verify that index.html is appended if the request path ends on '/' Tuple2<Integer, String> index2 = HttpUtils.getFromHTTP("http://localhost:" + port + "/"); assertThat(index2).isEqualTo(index); // verify that a 405 message is returned when requesting a directory Files.createDirectory(webDir.resolve("dir.json")); Tuple2<Integer, String> dirNotFound = HttpUtils.getFromHTTP("http://localhost:" + port + "/dir"); assertThat(dirNotFound.f0).isEqualTo(405); assertThat(dirNotFound.f1).contains("not found"); // verify that a 403 message is returned when requesting a file outside the webDir Files.createFile(tmpDir.resolve("secret")); Tuple2<Integer, String> dirOutsideDirectory = HttpUtils.getFromHTTP("http://localhost:" + port + "/../secret"); assertThat(dirOutsideDirectory.f0).isEqualTo(403); assertThat(dirOutsideDirectory.f1).contains("Forbidden"); } finally { webUI.shutdown(); } }
public static String printDistributed(SubPlan plan, FunctionAndTypeManager functionAndTypeManager, Session session) { List<PlanFragment> fragments = plan.getAllFragments(); Map<PlanFragmentId, PlanFragment> fragmentsById = Maps.uniqueIndex(fragments, PlanFragment::getId); PlanNodeIdGenerator idGenerator = new PlanNodeIdGenerator(); StringBuilder output = new StringBuilder(); output.append("digraph distributed_plan {\n"); printSubPlan(plan, fragmentsById, idGenerator, output, functionAndTypeManager, session); output.append("}\n"); return output.toString(); }
@Test public void testPrintDistributed() { SubPlan tableScanNodeSubPlan = new SubPlan( createTestPlanFragment(0, TEST_TABLE_SCAN_NODE), ImmutableList.of()); SubPlan nestedSubPlan = new SubPlan( createTestPlanFragment(1, TEST_TABLE_SCAN_NODE), ImmutableList.of(tableScanNodeSubPlan)); String actualNestedSubPlan = printDistributed( nestedSubPlan, FUNCTION_AND_TYPE_MANAGER, testSessionBuilder().build()); String expectedNestedSubPlan = join( System.lineSeparator(), "digraph distributed_plan {", "subgraph cluster_1 {", "label = \"SOURCE\"", format("plannode_1[%s];", TEST_TABLE_SCAN_NODE_INNER_OUTPUT), "}", "subgraph cluster_0 {", "label = \"SOURCE\"", format("plannode_1[%s];", TEST_TABLE_SCAN_NODE_INNER_OUTPUT), "}", "}", ""); assertEquals(actualNestedSubPlan, expectedNestedSubPlan); }
@Override public Set<Long> calculateUsers(DelegateExecution execution, String param) { Set<Long> deptIds = StrUtils.splitToLongSet(param); List<DeptRespDTO> depts = deptApi.getDeptList(deptIds); return convertSet(depts, DeptRespDTO::getLeaderUserId); }
@Test public void testCalculateUsers() { // 准备参数 String param = "1,2"; // mock 方法 DeptRespDTO dept1 = randomPojo(DeptRespDTO.class, o -> o.setLeaderUserId(11L)); DeptRespDTO dept2 = randomPojo(DeptRespDTO.class, o -> o.setLeaderUserId(22L)); when(deptApi.getDeptList(eq(asSet(1L, 2L)))).thenReturn(asList(dept1, dept2)); // 调用 Set<Long> results = strategy.calculateUsers(null, param); // 断言 assertEquals(asSet(11L, 22L), results); }
public static Gson instance() { return SingletonHolder.INSTANCE; }
@Test void rejectsDeserializationOfAESCipherProvider() { final IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> Serialization.instance().fromJson("{}", AESCipherProvider.class)); assertEquals(format("Refusing to deserialize a %s in the JSON stream!", AESCipherProvider.class.getName()), e.getMessage()); }
@VisibleForTesting static ImmutableSet<Port> portMapToSet(@Nullable Map<String, Map<String, String>> portMap) throws BadContainerConfigurationFormatException { if (portMap == null) { return ImmutableSet.of(); } ImmutableSet.Builder<Port> ports = new ImmutableSet.Builder<>(); for (Map.Entry<String, Map<String, String>> entry : portMap.entrySet()) { String port = entry.getKey(); Matcher matcher = PORT_PATTERN.matcher(port); if (!matcher.matches()) { throw new BadContainerConfigurationFormatException( "Invalid port configuration: '" + port + "'."); } int portNumber = Integer.parseInt(matcher.group("portNum")); String protocol = matcher.group("protocol"); ports.add(Port.parseProtocol(portNumber, protocol)); } return ports.build(); }
@Test public void testPortMapToList() throws BadContainerConfigurationFormatException { ImmutableSortedMap<String, Map<String, String>> input = ImmutableSortedMap.of( "1000", ImmutableMap.of(), "2000/tcp", ImmutableMap.of(), "3000/udp", ImmutableMap.of()); ImmutableSet<Port> expected = ImmutableSet.of(Port.tcp(1000), Port.tcp(2000), Port.udp(3000)); Assert.assertEquals(expected, JsonToImageTranslator.portMapToSet(input)); ImmutableList<Map<String, Map<String, String>>> badInputs = ImmutableList.of( ImmutableMap.of("abc", ImmutableMap.of()), ImmutableMap.of("1000-2000", ImmutableMap.of()), ImmutableMap.of("/udp", ImmutableMap.of()), ImmutableMap.of("123/xxx", ImmutableMap.of())); for (Map<String, Map<String, String>> badInput : badInputs) { try { JsonToImageTranslator.portMapToSet(badInput); Assert.fail(); } catch (BadContainerConfigurationFormatException ignored) { // ignored } } }
public static Object getConstructorArg(Class<?> cl) { if (boolean.class.equals(cl) || Boolean.class.equals(cl)) { return Boolean.FALSE; } if (byte.class.equals(cl) || Byte.class.equals(cl)) { return (byte) 0; } if (short.class.equals(cl) || Short.class.equals(cl)) { return (short) 0; } if (int.class.equals(cl) || Integer.class.equals(cl)) { return 0; } if (long.class.equals(cl) || Long.class.equals(cl)) { return 0L; } if (float.class.equals(cl) || Float.class.equals(cl)) { return (float) 0; } if (double.class.equals(cl) || Double.class.equals(cl)) { return (double) 0; } if (char.class.equals(cl) || Character.class.equals(cl)) { return (char) 0; } return null; }
@Test void testConstructorArg() { Assertions.assertFalse((boolean) JavaBeanSerializeUtil.getConstructorArg(boolean.class)); Assertions.assertFalse((boolean) JavaBeanSerializeUtil.getConstructorArg(Boolean.class)); Assertions.assertEquals((byte) 0, JavaBeanSerializeUtil.getConstructorArg(byte.class)); Assertions.assertEquals((byte) 0, JavaBeanSerializeUtil.getConstructorArg(Byte.class)); Assertions.assertEquals((short) 0, JavaBeanSerializeUtil.getConstructorArg(short.class)); Assertions.assertEquals((short) 0, JavaBeanSerializeUtil.getConstructorArg(Short.class)); Assertions.assertEquals(0, JavaBeanSerializeUtil.getConstructorArg(int.class)); Assertions.assertEquals(0, JavaBeanSerializeUtil.getConstructorArg(Integer.class)); Assertions.assertEquals((long) 0, JavaBeanSerializeUtil.getConstructorArg(long.class)); Assertions.assertEquals((long) 0, JavaBeanSerializeUtil.getConstructorArg(Long.class)); Assertions.assertEquals((float) 0, JavaBeanSerializeUtil.getConstructorArg(float.class)); Assertions.assertEquals((float) 0, JavaBeanSerializeUtil.getConstructorArg(Float.class)); Assertions.assertEquals((double) 0, JavaBeanSerializeUtil.getConstructorArg(double.class)); Assertions.assertEquals((double) 0, JavaBeanSerializeUtil.getConstructorArg(Double.class)); Assertions.assertEquals((char) 0, JavaBeanSerializeUtil.getConstructorArg(char.class)); Assertions.assertEquals(new Character((char) 0), JavaBeanSerializeUtil.getConstructorArg(Character.class)); Assertions.assertNull(JavaBeanSerializeUtil.getConstructorArg(JavaBeanSerializeUtil.class)); }
@Override public MapperResult findDeletedConfig(MapperContext context) { return new MapperResult( "SELECT data_id, group_id, tenant_id,gmt_modified,nid FROM his_config_info WHERE op_type = 'D' AND " + "gmt_modified >= ? and nid > ? order by nid OFFSET 0 ROWS FETCH NEXT ? ROWS ONLY", CollectionUtils.list(context.getWhereParameter(FieldConstant.START_TIME), context.getWhereParameter(FieldConstant.LAST_MAX_ID), context.getWhereParameter(FieldConstant.PAGE_SIZE))); }
@Test void testFindDeletedConfig() { MapperResult mapperResult = historyConfigInfoMapperByDerby.findDeletedConfig(context); assertEquals(mapperResult.getSql(), "SELECT data_id, group_id, tenant_id,gmt_modified,nid FROM his_config_info WHERE op_type = 'D' " + "AND gmt_modified >= ? and nid > ? order by nid OFFSET 0 ROWS FETCH NEXT ? ROWS ONLY"); assertArrayEquals(new Object[] {startTime, lastMaxId, pageSize}, mapperResult.getParamList().toArray()); }
OutputT apply(InputT input) throws UserCodeExecutionException { Optional<UserCodeExecutionException> latestError = Optional.empty(); long waitFor = 0L; while (waitFor != BackOff.STOP) { try { sleepIfNeeded(waitFor); incIfPresent(getCallCounter()); return getThrowableFunction().apply(input); } catch (UserCodeExecutionException e) { if (!e.shouldRepeat()) { throw e; } latestError = Optional.of(e); } catch (InterruptedException ignored) { } try { incIfPresent(getBackoffCounter()); waitFor = getBackOff().nextBackOffMillis(); } catch (IOException e) { throw new UserCodeExecutionException(e); } } throw latestError.orElse( new UserCodeExecutionException("failed to process for input: " + input)); }
@Test public void givenCallerTimeoutErrorsExceedsLimit_emitsIntoFailurePCollection() { PCollectionTuple pct = pipeline .apply(Create.of(1)) .apply( ParDo.of( new DoFnWithRepeaters( new CallerImpl(LIMIT + 1, UserCodeTimeoutException.class), new SetupTeardownImpl(0))) .withOutputTags(OUTPUT_TAG, TupleTagList.of(FAILURE_TAG))); PAssert.that(pct.get(OUTPUT_TAG)).empty(); PAssert.that(pct.get(FAILURE_TAG)).containsInAnyOrder(UserCodeTimeoutException.class.getName()); pipeline.run(); }
@Override public BulkOperationResponse executeBulkOperation(final BulkOperationRequest bulkOperationRequest, final C userContext, final AuditParams params) { if (bulkOperationRequest.entityIds() == null || bulkOperationRequest.entityIds().isEmpty()) { throw new BadRequestException(NO_ENTITY_IDS_ERROR); } List<BulkOperationFailure> capturedFailures = new LinkedList<>(); for (String entityId : bulkOperationRequest.entityIds()) { try { T entityModel = singleEntityOperationExecutor.execute(entityId, userContext); try { if (params != null) { auditEventSender.success(getAuditActor(userContext), params.eventType(), successAuditLogContextCreator.create(entityModel, params.entityClass())); } } catch (Exception auditLogStoreException) { //exception on audit log storing should not result in failure report, as the operation itself is successful LOG.error("Failed to store in the audit log information about successful entity removal via bulk action ", auditLogStoreException); } } catch (Exception ex) { capturedFailures.add(new BulkOperationFailure(entityId, ex.getMessage())); try { if (params != null) { auditEventSender.failure(getAuditActor(userContext), params.eventType(), failureAuditLogContextCreator.create(params.entityIdInPathParam(), entityId)); } } catch (Exception auditLogStoreException) { //exception on audit log storing should not result in failure report, as the operation itself is successful LOG.error("Failed to store in the audit log information about failed entity removal via bulk action ", auditLogStoreException); } } } return new BulkOperationResponse( bulkOperationRequest.entityIds().size() - capturedFailures.size(), capturedFailures); }
@Test void returnsProperResponseOnFailedBulkRemoval() throws Exception { mockUserContext(); doThrow(new NotFoundException("!?!?")).when(singleEntityOperationExecutor).execute(any(), eq(context)); final BulkOperationResponse bulkOperationResponse = toTest.executeBulkOperation(new BulkOperationRequest(List.of("no", "good", "ids")), context, params); assertThat(bulkOperationResponse.successfullyPerformed()).isEqualTo(0); assertThat(bulkOperationResponse.failures()) .hasSize(3) .containsExactly( new BulkOperationFailure("no", "!?!?"), new BulkOperationFailure("good", "!?!?"), new BulkOperationFailure("ids", "!?!?") ); verify(singleEntityOperationExecutor).execute("no", context); verify(singleEntityOperationExecutor).execute("good", context); verify(singleEntityOperationExecutor).execute("ids", context); verifyNoMoreInteractions(singleEntityOperationExecutor); verifyNoInteractions(successAuditLogContextCreator); verify(failureAuditLogContextCreator).create(entityIdInPathParam, "no"); verify(failureAuditLogContextCreator).create(entityIdInPathParam, "good"); verify(failureAuditLogContextCreator).create(entityIdInPathParam, "ids"); verifyNoMoreInteractions(failureAuditLogContextCreator); verify(auditEventSender, times(3)).failure(any(), eq(eventType), any()); }
public AuthenticationProvider getAuthenticationProvider() { return authenticationProvider; }
@Test(timeOut = 30000) public void testConnectCommandWithInvalidRoleCombinations() throws Exception { AuthenticationService authenticationService = mock(AuthenticationService.class); AuthenticationProvider authenticationProvider = new MockAuthenticationProvider(); String authMethodName = authenticationProvider.getAuthMethodName(); when(brokerService.getAuthenticationService()).thenReturn(authenticationService); when(authenticationService.getAuthenticationProvider(authMethodName)).thenReturn(authenticationProvider); svcConfig.setAuthenticationEnabled(true); svcConfig.setAuthenticateOriginalAuthData(false); svcConfig.setAuthorizationEnabled(true); svcConfig.setProxyRoles(Collections.singleton("pass.proxy")); // Invalid combinations where authData is proxy role verifyAuthRoleAndOriginalPrincipalBehavior(authMethodName, "pass.proxy", "pass.proxy"); verifyAuthRoleAndOriginalPrincipalBehavior(authMethodName, "pass.proxy", ""); verifyAuthRoleAndOriginalPrincipalBehavior(authMethodName, "pass.proxy", null); // Invalid combinations where original principal is set to a pass.proxy role verifyAuthRoleAndOriginalPrincipalBehavior(authMethodName, "pass.client", "pass.proxy"); // Invalid combinations where the original principal is set to a non-proxy role verifyAuthRoleAndOriginalPrincipalBehavior(authMethodName, "pass.client1", "pass.client"); verifyAuthRoleAndOriginalPrincipalBehavior(authMethodName, "pass.client", "pass.client"); verifyAuthRoleAndOriginalPrincipalBehavior(authMethodName, "pass.client", "pass.client1"); }
@Override public PayloadSerializer getSerializer(Schema schema, Map<String, Object> tableParams) { Class<? extends TBase> thriftClass = getMessageClass(tableParams); TProtocolFactory protocolFactory = getProtocolFactory(tableParams); inferAndVerifySchema(thriftClass, schema); return getPayloadSerializer(schema, protocolFactory, thriftClass); }
@Test public void invalidArgs() { assertThrows( IllegalArgumentException.class, () -> provider.getSerializer(SHUFFLED_SCHEMA, ImmutableMap.of())); assertThrows( IllegalArgumentException.class, () -> provider.getSerializer( SHUFFLED_SCHEMA, ImmutableMap.of("thriftClass", "", "thriftProtocolFactoryClass", ""))); assertThrows( IllegalArgumentException.class, () -> provider.getSerializer( SHUFFLED_SCHEMA, ImmutableMap.of( "thriftClass", "", "thriftProtocolFactoryClass", TCompactProtocol.Factory.class.getName()))); assertThrows( IllegalArgumentException.class, () -> provider.getSerializer( SHUFFLED_SCHEMA, ImmutableMap.of( "thriftClass", TestThriftMessage.class.getName(), "thriftProtocolFactoryClass", ""))); assertThrows( ClassCastException.class, () -> provider.getSerializer( SHUFFLED_SCHEMA, ImmutableMap.of( "thriftClass", ImmutableList.class.getName(), "thriftProtocolFactoryClass", TCompactProtocol.Factory.class.getName()))); assertThrows( ClassCastException.class, () -> provider.getSerializer( SHUFFLED_SCHEMA, ImmutableMap.of( "thriftClass", TestThriftMessage.class.getName(), "thriftProtocolFactoryClass", ImmutableList.class.getName()))); assertThrows( IllegalArgumentException.class, () -> provider.getSerializer( Schema.builder() .addStringField("f_NOTACTUALLYINMESSAGE") .addInt32Field("f_int") .addArrayField("f_double_array", Schema.FieldType.DOUBLE) .addDoubleField("f_double") .addInt64Field("f_long") .build(), ImmutableMap.of( "thriftClass", TestThriftMessage.class.getName(), "thriftProtocolFactoryClass", TCompactProtocol.Factory.class.getName()))); }
public String swVersion() { return get(SW_VERSION, null); }
@Test public void testSetSwVersion() { SW_BDC.swVersion(SW_VERSION_NEW); assertEquals("Incorrect swVersion", SW_VERSION_NEW, SW_BDC.swVersion()); }
@VisibleForTesting public Path getBasePath() { return this.basePath; }
@Test void testBasePath() throws IOException { JobID jobID = JobID.generate(); String rootPath = "/dstl-root-path"; Path oriBasePath = new Path(rootPath); ChangelogStorageMetricGroup metrics = new ChangelogStorageMetricGroup(createUnregisteredTaskManagerJobMetricGroup()); StateChangeFsUploader uploader = new StateChangeFsUploader( jobID, oriBasePath, oriBasePath.getFileSystem(), false, 4096, metrics, TaskChangelogRegistry.NO_OP); assertThat(uploader.getBasePath().getPath()) .isEqualTo( String.format( "%s/%s/%s", rootPath, jobID.toHexString(), StateChangeFsUploader.PATH_SUB_DIR)); }
public void setCancellationContext(CancellationContext cancellationContext) { this.cancellationContext = cancellationContext; }
@Test void setCancellationContext() { CancelableStreamObserver<Object> observer = new CancelableStreamObserver<Object>() { @Override public void onNext(Object data) {} @Override public void onError(Throwable throwable) {} @Override public void onCompleted() {} }; CancellationContext cancellationContext = new CancellationContext(); observer.setCancellationContext(cancellationContext); observer.cancel(new IllegalStateException()); Assertions.assertTrue(cancellationContext.isCancelled()); }
public static ResourceModel processResource(final Class<?> resourceClass) { return processResource(resourceClass, null); }
@Test(expectedExceptions = ResourceConfigException.class) public void failsOnMissingParamAnnotation() { @RestLiCollection(name = "noParamAnnotation") class LocalClass extends CollectionResourceTemplate<Long, EmptyRecord> { @Action(name = "noParamAnnotation") public void noParamAnnotation(String someId) { } } RestLiAnnotationReader.processResource(LocalClass.class); Assert.fail("#getParameters should fail throwing a ResourceConfigException"); }
@Override public void open() throws InterpreterException { try { SparkConf conf = new SparkConf(); for (Map.Entry<Object, Object> entry : getProperties().entrySet()) { if (!StringUtils.isBlank(entry.getValue().toString())) { conf.set(entry.getKey().toString(), entry.getValue().toString()); } // zeppelin.spark.useHiveContext & zeppelin.spark.concurrentSQL are legacy zeppelin // properties, convert them to spark properties here. if (entry.getKey().toString().equals("zeppelin.spark.useHiveContext")) { conf.set("spark.useHiveContext", entry.getValue().toString()); } if (entry.getKey().toString().equals("zeppelin.spark.concurrentSQL") && entry.getValue().toString().equals("true")) { conf.set(SparkStringConstants.SCHEDULER_MODE_PROP_NAME, "FAIR"); } } // use local mode for embedded spark mode when spark.master is not found if (!conf.contains(SparkStringConstants.MASTER_PROP_NAME)) { if (conf.contains("master")) { conf.set(SparkStringConstants.MASTER_PROP_NAME, conf.get("master")); } else { String masterEnv = System.getenv(SparkStringConstants.MASTER_ENV_NAME); conf.set(SparkStringConstants.MASTER_PROP_NAME, masterEnv == null ? SparkStringConstants.DEFAULT_MASTER_VALUE : masterEnv); } } this.innerInterpreter = loadSparkScalaInterpreter(conf); this.innerInterpreter.open(); sc = this.innerInterpreter.getSparkContext(); jsc = JavaSparkContext.fromSparkContext(sc); sparkVersion = SparkVersion.fromVersionString(sc.version()); if (enableSupportedVersionCheck && sparkVersion.isUnsupportedVersion()) { throw new Exception("This is not officially supported spark version: " + sparkVersion + "\nYou can set zeppelin.spark.enableSupportedVersionCheck to false if you really" + " want to try this version of spark."); } sqlContext = this.innerInterpreter.getSqlContext(); sparkSession = this.innerInterpreter.getSparkSession(); SESSION_NUM.incrementAndGet(); } catch (Exception e) { LOGGER.error("Fail to open SparkInterpreter", e); throw new InterpreterException("Fail to open SparkInterpreter", e); } }
@Test void testDisableSparkUI_1() throws InterpreterException { Properties properties = new Properties(); properties.setProperty(SparkStringConstants.MASTER_PROP_NAME, "local"); properties.setProperty(SparkStringConstants.APP_NAME_PROP_NAME, "test"); properties.setProperty("zeppelin.spark.maxResult", "100"); properties.setProperty("spark.ui.enabled", "false"); // disable color output for easy testing properties.setProperty("zeppelin.spark.scala.color", "false"); properties.setProperty("zeppelin.spark.deprecatedMsg.show", "false"); interpreter = new SparkInterpreter(properties); interpreter.setInterpreterGroup(mock(InterpreterGroup.class)); InterpreterContext.set(getInterpreterContext()); interpreter.open(); InterpreterContext context = getInterpreterContext(); InterpreterResult result = interpreter.interpret("sc.range(1, 10).sum", context); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); // spark job url is not sent verify(mockRemoteEventClient, never()).onParaInfosReceived(any(Map.class)); }
public static RowCoder of(Schema schema) { return new RowCoder(schema); }
@Test public void testConsistentWithEqualsIterableWithNull() throws Exception { Schema schema = Schema.builder() .addField("a", Schema.FieldType.iterable(Schema.FieldType.INT32.withNullable(true))) .build(); Row row = Row.withSchema(schema).addValue(Arrays.asList(1, null)).build(); CoderProperties.coderDecodeEncodeEqual(RowCoder.of(schema), row); }
@Override public String getScheme() { return "file"; }
@Test public void testStatistics() throws Exception { int fileSchemeCount = 0; for (Statistics stats : FileSystem.getAllStatistics()) { if (stats.getScheme().equals("file")) { fileSchemeCount++; } } assertEquals(1, fileSchemeCount); }
@Override public void write( int b ) throws IOException { write( new byte[] { (byte) b, } ); }
@Test public void testWrite() throws IOException { WriterOutputStream stream = new WriterOutputStream( writer ); stream.write( 68 ); stream.write( "value".getBytes(), 1, 3 ); stream.write( "value".getBytes() ); stream.flush(); stream.close(); verify( writer ).append( new String( new byte[] { (byte) 68 } ) ); verify( writer ).append( "alu" ); verify( writer ).append( "value" ); verify( writer ).flush(); verify( writer ).close(); assertNull( stream.getWriter() ); writer = mock( Writer.class ); WriterOutputStream streamWithEncoding = new WriterOutputStream( writer, encoding ); streamWithEncoding.write( "value".getBytes( encoding ) ); verify( writer ).append( "value" ); }