focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public CompletableFuture<AckResult> ackMessage(ProxyContext ctx, ReceiptHandle handle, String messageId, AckMessageRequestHeader requestHeader, long timeoutMillis) { SimpleChannel channel = channelManager.createChannel(ctx); ChannelHandlerContext channelHandlerContext = channel.getChannelHandlerContext(); RemotingCommand command = LocalRemotingCommand.createRequestCommand(RequestCode.ACK_MESSAGE, requestHeader, ctx.getLanguage()); CompletableFuture<RemotingCommand> future = new CompletableFuture<>(); try { RemotingCommand response = brokerController.getAckMessageProcessor() .processRequest(channelHandlerContext, command); future.complete(response); } catch (Exception e) { log.error("Fail to process ackMessage command", e); future.completeExceptionally(e); } return future.thenApply(r -> { AckResult ackResult = new AckResult(); if (ResponseCode.SUCCESS == r.getCode()) { ackResult.setStatus(AckStatus.OK); } else { ackResult.setStatus(AckStatus.NO_EXIST); } return ackResult; }); }
@Test public void testAckMessage() throws Exception { String messageId = "messageId"; long popTime = System.currentTimeMillis(); long invisibleTime = 3000L; int reviveQueueId = 1; ReceiptHandle handle = ReceiptHandle.builder() .startOffset(0L) .retrieveTime(popTime) .invisibleTime(invisibleTime) .reviveQueueId(reviveQueueId) .topicType(ReceiptHandle.NORMAL_TOPIC) .brokerName(brokerName) .queueId(queueId) .offset(queueOffset) .build(); RemotingCommand remotingCommand = RemotingCommand.createResponseCommand(ResponseCode.SUCCESS, null); Mockito.when(ackMessageProcessorMock.processRequest(Mockito.any(SimpleChannelHandlerContext.class), Mockito.argThat(argument -> { boolean first = argument.getCode() == RequestCode.ACK_MESSAGE; boolean second = argument.readCustomHeader() instanceof AckMessageRequestHeader; return first && second; }))).thenReturn(remotingCommand); AckMessageRequestHeader requestHeader = new AckMessageRequestHeader(); CompletableFuture<AckResult> future = localMessageService.ackMessage(proxyContext, handle, messageId, requestHeader, 1000L); AckResult ackResult = future.get(); assertThat(ackResult.getStatus()).isEqualTo(AckStatus.OK); }
public PackageDescr getPackageDescr(final KiePMMLDroolsAST kiePMMLDroolsAST, final String packageName) { return getBaseDescr(kiePMMLDroolsAST, packageName); }
@Test void getPackageDescr() { KiePMMLDroolsAST kiePMMLDroolsAST = new KiePMMLDroolsAST(Collections.emptyList(), Collections.emptyList()); PackageDescr retrieved = droolsModelProvider.getPackageDescr(kiePMMLDroolsAST, PACKAGE_NAME); commonVerifyPackageDescr(retrieved, PACKAGE_NAME); }
@Override public String getType() { return TYPE; }
@Test void testGetType() { assertEquals(httpHealthCheckProcessor.getType(), HealthCheckType.HTTP.name()); }
static void populateOutputFields(final PMML4Result toUpdate, final ProcessingDTO processingDTO) { logger.debug("populateOutputFields {} {}", toUpdate, processingDTO); for (KiePMMLOutputField outputField : processingDTO.getOutputFields()) { Object variableValue = outputField.evaluate(processingDTO); if (variableValue != null) { String variableName = outputField.getName(); toUpdate.addResultVariable(variableName, variableValue); processingDTO.addKiePMMLNameValue(new KiePMMLNameValue(variableName, variableValue)); } } }
@Test void populateTransformedOutputField2() { KiePMMLConstant kiePMMLConstant = new KiePMMLConstant("CONSTANT_NAME", Collections.emptyList(), "String", null); KiePMMLOutputField outputField = KiePMMLOutputField.builder(OUTPUT_NAME, Collections.emptyList()) .withResultFeature(RESULT_FEATURE.TRANSFORMED_VALUE) .withKiePMMLExpression(kiePMMLConstant) .build(); KiePMMLTestingModel kiePMMLModel = testingModelBuilder(outputField).build(); ProcessingDTO processingDTO = buildProcessingDTOWithDefaultNameValues(kiePMMLModel); PMML4Result toUpdate = new PMML4Result(); PostProcess.populateOutputFields(toUpdate, processingDTO); assertThat(toUpdate.getResultVariables()).isNotEmpty(); assertThat(toUpdate.getResultVariables()).containsKey(OUTPUT_NAME); assertThat(toUpdate.getResultVariables().get(OUTPUT_NAME)).isEqualTo(kiePMMLConstant.getValue()); }
@Override public boolean canSerialize(String topic, Target type) { String subject = schemaSubject(topic, type); return getSchemaBySubject(subject).isPresent(); }
@Test void canDeserializeAndCanSerializeReturnsTrueIfSubjectExists() throws Exception { String topic = RandomString.make(10); registryClient.register(topic + "-key", new AvroSchema("\"int\"")); registryClient.register(topic + "-value", new AvroSchema("\"int\"")); assertThat(serde.canSerialize(topic, Serde.Target.KEY)).isTrue(); assertThat(serde.canSerialize(topic, Serde.Target.VALUE)).isTrue(); }
public static <E, T, U> Map<T, Map<U, E>> group2Map(Collection<E> collection, Function<E, T> key1, Function<E, U> key2) { return group2Map(collection, key1, key2, false); }
@Test public void testGroup2Map() { Map<Long, Map<Long, Student>> map = CollStreamUtil.group2Map(null, Student::getTermId, Student::getClassId); assertEquals(map, Collections.EMPTY_MAP); List<Student> list = new ArrayList<>(); map = CollStreamUtil.group2Map(list, Student::getTermId, Student::getClassId); assertEquals(map, Collections.EMPTY_MAP); list.add(new Student(1, 1, 1, "张三")); list.add(new Student(1, 2, 1, "李四")); list.add(new Student(2, 2, 1, "王五")); map = CollStreamUtil.group2Map(list, Student::getTermId, Student::getClassId); Map<Long, Map<Long, Student>> compare = new HashMap<>(); Map<Long, Student> map1 = new HashMap<>(); map1.put(1L, new Student(1, 1, 1, "张三")); map1.put(2L, new Student(1, 2, 1, "李四")); compare.put(1L, map1); Map<Long, Student> map2 = new HashMap<>(); map2.put(2L, new Student(2, 2, 1, "王五")); compare.put(2L, map2); assertEquals(compare, map); // 对null友好 Map<Long, Map<Long, Student>> termIdClassIdStudentMap = CollStreamUtil.group2Map(Arrays.asList(null, new Student(2, 2, 1, "王五")), Student::getTermId, Student::getClassId); Map<Long, Map<Long, Student>> termIdClassIdStudentCompareMap = new HashMap<Long, Map<Long, Student>>() {{ put(null, MapUtil.of(null, null)); put(2L, MapUtil.of(2L, new Student(2, 2, 1, "王五"))); }}; assertEquals(termIdClassIdStudentCompareMap, termIdClassIdStudentMap); }
public void isEqualToIgnoringScale(BigDecimal expected) { compareValues(expected); }
@Test public void isEqualToIgnoringScale_bigDecimal() { assertThat(TEN).isEqualToIgnoringScale(TEN); assertThat(TEN).isEqualToIgnoringScale(new BigDecimal(10)); expectFailureWhenTestingThat(TEN).isEqualToIgnoringScale(new BigDecimal(3)); assertFailureKeys("expected", "but was", "(scale is ignored)"); assertFailureValue("expected", "3"); assertFailureValue("but was", "10"); }
static CheckedRunnable decorateCheckedRunnable(Observation observation, CheckedRunnable runnable) { return () -> observation.observeChecked(runnable::run); }
@Test public void shouldDecorateCheckedRunnableAndReturnWithSuccess() throws Throwable { CheckedRunnable timedRunnable = Observations .decorateCheckedRunnable(observation, helloWorldService::sayHelloWorldWithException); timedRunnable.run(); assertThatObservationWasStartedAndFinishedWithoutErrors(); then(helloWorldService).should().sayHelloWorldWithException(); }
@Override public boolean test(Pickle pickle) { if (expressions.isEmpty()) { return true; } List<String> tags = pickle.getTags(); return expressions.stream() .allMatch(expression -> expression.evaluate(tags)); }
@Test void not_tag_predicate_matches_pickle_with_no_tags() { Pickle pickle = createPickleWithTags(); TagPredicate predicate = createPredicate("not @FOO"); assertTrue(predicate.test(pickle)); }
@Override long partitionTimestamp(final TopicPartition partition) { final RecordQueue queue = partitionQueues.get(partition); if (queue == null) { throw new IllegalStateException("Partition " + partition + " not found."); } return queue.partitionTime(); }
@Test public void shouldThrowIllegalStateExceptionUponGetPartitionTimestampIfPartitionUnknown() { final PartitionGroup group = getBasicGroup(); final IllegalStateException exception = assertThrows( IllegalStateException.class, () -> group.partitionTimestamp(unknownPartition)); assertThat(errMessage, equalTo(exception.getMessage())); }
@Override public Future<RestResponse> restRequest(final RestRequest request) { return restRequest(request, new RequestContext()); }
@Test public void testRestWithFailout() throws URISyntaxException, InterruptedException { setupRedirectStrategy(true); sendAndVerifyRestRequest(); ArgumentCaptor<RestRequest> requestArgumentCaptor = ArgumentCaptor.forClass(RestRequest.class); verify(_d2Client, times(1)).restRequest(requestArgumentCaptor.capture(), any(), any()); assertEquals(requestArgumentCaptor.getValue().getURI().toString(), REDIRECTED_URI); }
public static <T> T createDelegatingProxy(Class<T> clazz, final Object delegate) { final Class delegateClass = delegate.getClass(); return (T) Proxy.newProxyInstance( clazz.getClassLoader(), new Class[] {clazz}, (proxy, method, args) -> { try { Method delegateMethod = findDelegateMethod(delegateClass, method.getName(), method.getParameterTypes()); delegateMethod.setAccessible(true); return delegateMethod.invoke(delegate, args); } catch (NoSuchMethodException e) { return PRIMITIVE_RETURN_VALUES.get(method.getReturnType().getName()); } catch (InvocationTargetException e) { // Required to propagate the correct throwable. throw e.getTargetException(); } }); }
@Test public void createDelegatingProxy_defersToDelegate() { DelegatingProxyFixture fixture = ReflectionHelpers.createDelegatingProxy(DelegatingProxyFixture.class, new Delegate()); assertThat(fixture.delegateMethod()).isEqualTo("called"); }
static void format(final JavaInput javaInput, JavaOutput javaOutput, JavaFormatterOptions options) throws FormatterException { Context context = new Context(); DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<>(); context.put(DiagnosticListener.class, diagnostics); Options.instance(context).put("allowStringFolding", "false"); Options.instance(context).put("--enable-preview", "true"); JCCompilationUnit unit; JavacFileManager fileManager = new JavacFileManager(context, true, UTF_8); try { fileManager.setLocation(StandardLocation.PLATFORM_CLASS_PATH, ImmutableList.of()); } catch (IOException e) { // impossible throw new IOError(e); } SimpleJavaFileObject source = new SimpleJavaFileObject(URI.create("source"), JavaFileObject.Kind.SOURCE) { @Override public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException { return javaInput.getText(); } }; Log.instance(context).useSource(source); ParserFactory parserFactory = ParserFactory.instance(context); JavacParser parser = parserFactory.newParser( javaInput.getText(), /* keepDocComments= */ true, /* keepEndPos= */ true, /* keepLineMap= */ true); unit = parser.parseCompilationUnit(); unit.sourcefile = source; javaInput.setCompilationUnit(unit); Iterable<Diagnostic<? extends JavaFileObject>> errorDiagnostics = Iterables.filter(diagnostics.getDiagnostics(), Formatter::errorDiagnostic); if (!Iterables.isEmpty(errorDiagnostics)) { throw FormatterException.fromJavacDiagnostics(errorDiagnostics); } OpsBuilder builder = new OpsBuilder(javaInput, javaOutput); // Output the compilation unit. JavaInputAstVisitor visitor; if (Runtime.version().feature() >= 21) { visitor = createVisitor( "com.google.googlejavaformat.java.java21.Java21InputAstVisitor", builder, options); } else if (Runtime.version().feature() >= 17) { visitor = createVisitor( "com.google.googlejavaformat.java.java17.Java17InputAstVisitor", builder, options); } else { visitor = new JavaInputAstVisitor(builder, options.indentationMultiplier()); } visitor.scan(unit, null); builder.sync(javaInput.getText().length()); builder.drain(); Doc doc = new DocBuilder().withOps(builder.build()).build(); doc.computeBreaks(javaOutput.getCommentsHelper(), MAX_LINE_LENGTH, new Doc.State(+0, 0)); doc.write(javaOutput); javaOutput.flush(); }
@Test public void testFormatNonJavaFiles() throws Exception { StringWriter out = new StringWriter(); StringWriter err = new StringWriter(); Main main = new Main(new PrintWriter(out, true), new PrintWriter(err, true), System.in); // should succeed because non-Java files are skipped assertThat(main.format("foo.go")).isEqualTo(0); assertThat(err.toString()).contains("Skipping non-Java file: " + "foo.go"); // format still fails on missing files assertThat(main.format("Foo.java")).isEqualTo(1); assertThat(err.toString()).contains("Foo.java: could not read file: "); }
@Override protected Mono<Void> doExecute(final ServerWebExchange exchange, final ShenyuPluginChain chain, final SelectorData selector, final RuleData rule) { return wasmLoader.getWasmExtern(DO_EXECUTE_METHOD_NAME).map(doExecute -> { final Long argumentId = callWASI(exchange, chain, selector, rule, doExecute); return doExecute(exchange, chain, selector, rule, argumentId); }).orElseGet(() -> { LOG.error("{} function not found in {}", DO_EXECUTE_METHOD_NAME, wasmLoader.getWasmName()); exchange.getResponse().setStatusCode(HttpStatus.INTERNAL_SERVER_ERROR); Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.WASM_FUNC_NOT_FOUND); return WebFluxResultUtils.result(exchange, error); }); }
@Test public void executeRuleManyMatch() { List<ConditionData> conditionDataList = Collections.singletonList(conditionData); this.ruleData.setConditionDataList(conditionDataList); this.ruleData.setMatchMode(0); this.ruleData.setMatchRestful(false); this.selectorData.setMatchMode(0); this.selectorData.setLogged(true); this.selectorData.setConditionList(conditionDataList); BaseDataCache.getInstance().cachePluginData(pluginData); BaseDataCache.getInstance().cacheSelectData(selectorData); BaseDataCache.getInstance().cacheRuleData(RuleData.builder() .id("1") .pluginName("SHENYU") .selectorId("1") .enabled(true) .loged(true) .matchMode(0) .matchRestful(false) .conditionDataList(Collections.singletonList(conditionData)) .sort(1).build()); BaseDataCache.getInstance().cacheRuleData(RuleData.builder() .id("2") .pluginName("SHENYU") .selectorId("1") .enabled(true) .loged(true) .matchMode(0) .matchRestful(false) .conditionDataList(Collections.singletonList(conditionData)) .sort(2).build()); StepVerifier.create(testShenyuWasmPlugin.execute(exchange, shenyuPluginChain)).expectSubscription().verifyComplete(); verify(testShenyuWasmPlugin).doExecute(exchange, shenyuPluginChain, selectorData, ruleData); }
public static String getSafeVariableName(String name, String... existingVariableNames) { return getSafeVariableName( name, Arrays.asList( existingVariableNames ) ); }
@Test public void testGetSaveVariableNameWithCollection() { assertThat( Strings.getSafeVariableName( "int[]", new ArrayList<>() ) ).isEqualTo( "intArray" ); assertThat( Strings.getSafeVariableName( "Extends", new ArrayList<>() ) ).isEqualTo( "extends1" ); assertThat( Strings.getSafeVariableName( "prop", Arrays.asList( "prop", "prop1" ) ) ).isEqualTo( "prop2" ); assertThat( Strings.getSafeVariableName( "prop.font", Arrays.asList( "propFont", "propFont_" ) ) ) .isEqualTo( "propFont1" ); assertThat( Strings.getSafeVariableName( "_Test", new ArrayList<>() ) ).isEqualTo( "test" ); assertThat( Strings.getSafeVariableName( "__Test", Arrays.asList( "test" ) ) ).isEqualTo( "test1" ); assertThat( Strings.getSafeVariableName( "___", new ArrayList<>() ) ).isEqualTo( "___" ); assertThat( Strings.getSafeVariableName( "_0Test", new ArrayList<>() ) ).isEqualTo( "test" ); assertThat( Strings.getSafeVariableName( "__0Test", Arrays.asList( "test" ) ) ).isEqualTo( "test1" ); assertThat( Strings.getSafeVariableName( "___0", new ArrayList<>() ) ).isEqualTo( "___0" ); assertThat( Strings.getSafeVariableName( "__0123456789Test", Arrays.asList( "test" ) ) ).isEqualTo( "test1" ); assertThat( Strings.getSafeVariableName( "bad/test", Arrays.asList( "bad_test" ) ) ).isEqualTo( "bad_test1" ); }
@Override public Set<Port> ports() { return ImmutableSet.copyOf(osNetworkStore.ports()); }
@Test public void testGetPorts() { createBasicNetworks(); assertEquals("Number of port did not match", 1, target.ports().size()); }
String getFileName(double lat, double lon) { int lonInt = getMinLonForTile(lon); int latInt = getMinLatForTile(lat); return toLowerCase(getNorthString(latInt) + getPaddedLatString(latInt) + getEastString(lonInt) + getPaddedLonString(lonInt)); }
@Disabled @Test public void testGetEleHorizontalBorder() { // Border between the tiles n42e011 and n42e012 assertEquals("n42e011", instance.getFileName(42.1, 11.999999)); assertEquals(324, instance.getEle(42.1, 11.999999), precision); assertEquals("n42e012", instance.getFileName(42.1, 12.000001)); assertEquals(324, instance.getEle(42.1, 12.000001), precision); }
@Beta public static Application fromBuilder(Builder builder) throws Exception { return builder.build(); }
@Test void search() throws Exception { try ( ApplicationFacade app = new ApplicationFacade(Application.fromBuilder(new Application.Builder().container("default", new Application.Builder.Container() .searcher("foo", MockSearcher.class)))) ) { Result result = app.search("foo", new Query("?query=foo&timeout=20000")); assertEquals(1, result.hits().size()); } }
public static String createStartupShutdownMessage(String classname, String hostname, String[] args) { return toStartupShutdownString("STARTUP_MSG: ", new String[] { "Starting " + classname, " host = " + hostname, " args = " + (args != null ? Arrays.asList(args) : new ArrayList<>()), " version = " + VersionInfo.getVersion(), " classpath = " + System.getProperty("java.class.path"), " build = " + VersionInfo.getUrl() + " -r " + VersionInfo.getRevision() + "; compiled by '" + VersionInfo.getUser() + "' on " + VersionInfo.getDate(), " java = " + System.getProperty("java.version") } ); }
@Test public void testCreateStartupShutdownMessage() { //pass null args and method must still return a string beginning with // "STARTUP_MSG" String msg = StringUtils.createStartupShutdownMessage( this.getClass().getName(), "test.host", null); assertTrue(msg.startsWith("STARTUP_MSG:")); }
@Deprecated @Override public void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId) throws ProducerFencedException { Objects.requireNonNull(consumerGroupId); sendOffsetsToTransaction(offsets, new ConsumerGroupMetadata(consumerGroupId)); }
@Test public void shouldThrowOnSendOffsetsToTransactionIfTransactionsNotInitialized() { buildMockProducer(true); assertThrows(IllegalStateException.class, () -> producer.sendOffsetsToTransaction(null, new ConsumerGroupMetadata(groupId))); }
protected String getDumpPath() { final String dumpPath = url.getParameter(DUMP_DIRECTORY); if (StringUtils.isEmpty(dumpPath)) { return USER_HOME; } final File dumpDirectory = new File(dumpPath); if (!dumpDirectory.exists()) { if (dumpDirectory.mkdirs()) { logger.info(format("Dubbo dump directory[%s] created", dumpDirectory.getAbsolutePath())); } else { logger.warn( COMMON_UNEXPECTED_CREATE_DUMP, "", "", format( "Dubbo dump directory[%s] can't be created, use the 'user.home'[%s]", dumpDirectory.getAbsolutePath(), USER_HOME)); return USER_HOME; } } return dumpPath; }
@Test void jStackDumpTest_dumpDirectoryNotExists_cannotBeCreatedTakeUserHome() { final String dumpDirectory = dumpDirectoryCannotBeCreated(); URL url = URL.valueOf("dubbo://admin:hello1234@10.20.130.230:20880/context/path?dump.directory=" + dumpDirectory + "&version=1.0.0&application=morgan&noValue=true"); AbortPolicyWithReport abortPolicyWithReport = new AbortPolicyWithReport("Test", url); Assertions.assertEquals(System.getProperty("user.home"), abortPolicyWithReport.getDumpPath()); }
public static int compareVersion(final String versionA, final String versionB) { final String[] sA = versionA.split("\\."); final String[] sB = versionB.split("\\."); int expectSize = 3; if (sA.length != expectSize || sB.length != expectSize) { throw new IllegalArgumentException("version must be like x.y.z(-beta)"); } int first = Objects.compare(sA[0], sB[0], STRING_COMPARATOR); if (first != 0) { return first; } int second = Objects.compare(sA[1], sB[1], STRING_COMPARATOR); if (second != 0) { return second; } return Objects.compare(sA[2].split("-")[0], sB[2].split("-")[0], STRING_COMPARATOR); }
@Test void testVersionCompareGtWithChar() { assertTrue(VersionUtils.compareVersion("1.2.2-beta", "1.2.1-beta") > 0); }
static String strip(final String line) { return new Parser(line).parse(); }
@Test public void shouldCorrectHandleEscapedSingleQuotes() { // Given: final String line = "'this isn''t a comment -- the first quote isn''t closed' -- comment"; final String line2 = "'''this isn''t a comment -- the first quote isn''t closed' -- comment"; // Then: assertThat(CommentStripper.strip(line), is("'this isn''t a comment -- the first quote isn''t closed'")); assertThat(CommentStripper.strip(line2), is("'''this isn''t a comment -- the first quote isn''t closed'")); }
public static void bindEnvironment(ScriptEngine engine, String requestContent, Map<String, Object> requestContext, StateStore stateStore) { // Build a map of header values. bindEnvironment(engine, requestContent, requestContext, stateStore, null); }
@Test void testRequestContextIsModified() { String script = """ requestContext.foo = "bar"; return mockRequest.requestContent; """; ScriptEngineManager sem = new ScriptEngineManager(); Map<String, Object> context = new HashMap<>(); String body = "content"; try { // Evaluating request with script coming from operation dispatcher rules. ScriptEngine se = sem.getEngineByExtension("groovy"); ScriptEngineBinder.bindEnvironment(se, body, context, null); String result = (String) se.eval(script); assertEquals(body, result); assertTrue(context.containsKey("foo")); assertEquals("bar", context.get("foo")); } catch (Exception e) { fail("Exception should no be thrown"); } }
@Override public LocalAddress localAddress() { return (LocalAddress) super.localAddress(); }
@Test public void testWriteInWritePromiseCompletePreservesOrder() throws InterruptedException { Bootstrap cb = new Bootstrap(); ServerBootstrap sb = new ServerBootstrap(); final CountDownLatch messageLatch = new CountDownLatch(2); final ByteBuf data = Unpooled.buffer(); final ByteBuf data2 = Unpooled.buffer(); data.writeInt(Integer.BYTES).writeInt(2); data2.writeInt(Integer.BYTES).writeInt(1); try { cb.group(group1) .channel(LocalChannel.class) .handler(new TestHandler()); sb.group(group2) .channel(LocalServerChannel.class) .childHandler(new ChannelInboundHandlerAdapter() { @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { if (msg instanceof ByteBuf) { ByteBuf buf = (ByteBuf) msg; while (buf.isReadable()) { int size = buf.readInt(); ByteBuf slice = buf.readRetainedSlice(size); try { if (slice.readInt() == messageLatch.getCount()) { messageLatch.countDown(); } } finally { slice.release(); } } buf.release(); } else { super.channelRead(ctx, msg); } } }); Channel sc = null; Channel cc = null; try { // Start server sc = sb.bind(TEST_ADDRESS).syncUninterruptibly().channel(); // Connect to the server cc = cb.connect(sc.localAddress()).syncUninterruptibly().channel(); final Channel ccCpy = cc; // Make sure a write operation is executed in the eventloop cc.pipeline().lastContext().executor().execute(new Runnable() { @Override public void run() { ChannelPromise promise = ccCpy.newPromise(); promise.addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { ccCpy.writeAndFlush(data2.retainedDuplicate(), ccCpy.newPromise()); } }); ccCpy.writeAndFlush(data.retainedDuplicate(), promise); } }); assertTrue(messageLatch.await(5, SECONDS)); } finally { closeChannel(cc); closeChannel(sc); } } finally { data.release(); data2.release(); } }
@Override public Throwable getThrowable() { return exception; }
@Test public void getThrowable() throws Exception { assertThat(promise.getThrowable()).isNull(); Throwable exception = new OMSRuntimeException("-1", "Test Error"); promise.setFailure(exception); assertThat(promise.getThrowable()).isEqualTo(exception); }
@Override public Local create(final Path file) { return this.create(new UUIDRandomStringService().random(), file); }
@Test public void testExists() { final DefaultTemporaryFileService service = new DefaultTemporaryFileService(); { final Local f = service.create(new AlphanumericRandomStringService().random()); assertFalse(f.exists()); assertTrue(f.getParent().exists()); assertTrue(f.getParent().getParent().exists()); } { final Path file = new Path("/p/f", EnumSet.of(Path.Type.file)); final Local f = service.create(new AlphanumericRandomStringService().random(), file); assertFalse(f.exists()); assertTrue(f.getParent().exists()); assertTrue(f.getParent().getParent().exists()); } }
public static FieldType toFieldType(SqlTypeNameSpec sqlTypeName) { return toFieldType( Preconditions.checkArgumentNotNull( SqlTypeName.get(sqlTypeName.getTypeName().getSimple()), "Failed to find Calcite type with name '%s'", sqlTypeName.getTypeName().getSimple())); }
@Test public void testFieldTypeNotFound() { RelDataType relDataType = dataTypeFactory.createUnknownType(); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("Cannot find a matching Beam FieldType for Calcite type: UNKNOWN"); CalciteUtils.toFieldType(relDataType); }
@Override public void write(int b) throws IOException { filePosition++; super.write(b); }
@Test public void testWriteByteArrayWithLengthAndOffset() throws IOException { byte[] arr = new byte[345]; random.nextBytes(arr); writer.write(arr, 10, 300); expectData(Arrays.copyOfRange(arr, 10, 310)); }
@Override public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException { try { final EueApiClient client = new EueApiClient(session); final UiFsModel response; final String resourceId = fileid.getFileId(file); switch(resourceId) { case EueResourceIdProvider.ROOT: case EueResourceIdProvider.TRASH: response = new ListResourceAliasApi(client).resourceAliasAliasGet(resourceId, null, file.attributes().getETag(), null, null, null, null, Collections.singletonList(OPTION_WIN_32_PROPS), null); break; default: response = new ListResourceApi(client).resourceResourceIdGet(resourceId, null, file.attributes().getETag(), null, null, null, null, Collections.singletonList(OPTION_WIN_32_PROPS), null); break; } switch(response.getUifs().getResourceType()) { case "aliascontainer": case "container": if(file.isFile()) { throw new NotfoundException(file.getAbsolute()); } break; default: if(file.isDirectory()) { throw new NotfoundException(file.getAbsolute()); } break; } final PathAttributes attr = this.toAttributes(response.getUifs(), response.getUiwin32(), EueShareFeature.findShareForResource(session.userShares(), resourceId)); if(client.getResponseHeaders().containsKey(HttpHeaders.ETAG)) { attr.setETag(StringUtils.remove(client.getResponseHeaders().get(HttpHeaders.ETAG).stream().findFirst().orElse(null), '"')); } return attr; } catch(ApiException e) { switch(e.getCode()) { case HttpStatus.SC_NOT_MODIFIED: if(log.isDebugEnabled()) { log.debug(String.format("No changes for file %s with ETag %s", file, file.attributes().getETag())); } return file.attributes(); } throw new EueExceptionMappingService().map("Failure to read attributes of {0}", e, file); } }
@Test public void testFindFeatureForSharedFile() throws Exception { final EueResourceIdProvider fileid = new EueResourceIdProvider(session); final Path sourceFolder = new EueDirectoryFeature(session, fileid).mkdir(new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final Path file = new Path(sourceFolder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); createFile(fileid, file, RandomUtils.nextBytes(0)); assertTrue(new EueFindFeature(session, fileid).find(file)); final ShareCreationResponseEntry shareCreationResponseEntry = createShare(fileid, file); final String shareName = shareCreationResponseEntry.getEntity().getName(); final EueAttributesFinderFeature feature = new EueAttributesFinderFeature(session, fileid); final PathAttributes attr = feature.find(file, new DisabledListProgressListener()); assertNotNull(attr.getLink()); assertEquals(attr.getLink(), new EueShareUrlProvider(session.getHost(), session.userShares()).toUrl(file).find(DescriptiveUrl.Type.signed)); new EueDeleteFeature(session, fileid).delete(Collections.singletonList(sourceFolder), new DisabledPasswordCallback(), new Delete.DisabledCallback()); }
@Override public Class convert(String value) { if (isBlank(value)) { throw new ParameterException(getErrorString("a blank value", "a class")); } try { return Class.forName(value); } catch (ClassNotFoundException e) { throw new ParameterException(getErrorString(value, "a class")); } }
@Test @SuppressWarnings("unchecked") public void classIsCreatedFromFullyQualifiedClassName() { Class<Annotator> clazz = converter.convert(Annotator.class.getName()); assertThat(clazz, is(equalTo(Annotator.class))); }
@SuppressWarnings({"unchecked", "rawtypes"}) public Collection<DataNode> getDataNodes(final String tableName) { Collection<DataNode> result = getDataNodesByTableName(tableName); if (result.isEmpty()) { return result; } for (Entry<ShardingSphereRule, DataNodeBuilder> entry : dataNodeBuilders.entrySet()) { result = entry.getValue().build(result, entry.getKey()); } return result; }
@Test void assertGetDataNodesForShardingTableWithoutDataNodeContainedRule() { DataNodes dataNodes = new DataNodes(Collections.singletonList(mockDataSourceMapperRule())); Collection<DataNode> actual = dataNodes.getDataNodes("t_order"); assertThat(actual, is(Collections.emptyList())); }
public static String addKeySuffixes(String key, String... suffixes) { String keySuffix = DFSUtilClient.concatSuffixes(suffixes); return DFSUtilClient.addSuffix(key, keySuffix); }
@Test public void testConfModificationFederationAndHa() { final HdfsConfiguration conf = new HdfsConfiguration(); String nsId = "ns1"; String nnId = "nn1"; conf.set(DFS_NAMESERVICES, nsId); conf.set(DFS_NAMESERVICE_ID, nsId); conf.set(DFS_HA_NAMENODES_KEY_PREFIX + "." + nsId, nnId); // Set the nameservice specific keys with nameserviceId in the config key for (String key : NameNode.NAMENODE_SPECIFIC_KEYS) { // Note: value is same as the key conf.set(DFSUtil.addKeySuffixes(key, nsId, nnId), key); } // Initialize generic keys from specific keys NameNode.initializeGenericKeys(conf, nsId, nnId); // Retrieve the keys without nameserviceId and Ensure generic keys are set // to the correct value for (String key : NameNode.NAMENODE_SPECIFIC_KEYS) { assertEquals(key, conf.get(key)); } }
public V computeIfAbsent(K key, Function<? super K, ? extends V> valueFunction) { V value = cache.get(key); if (value == null) { if (cache.size() < capacity) { // use CHM.computeIfAbsent to avoid duplicate calculation of a single key value = cache.computeIfAbsent(key, valueFunction); } else { value = valueFunction.apply(key); } } return value; }
@Test public void when_addingElementsToCacheMultiThreaded_then_minProperSizeAndElements() { int capacity = 20; int elementsToAdd = 100; int threadCount = 10; ConcurrentInitialSetCache<Integer, Integer> cache = new ConcurrentInitialSetCache<>(capacity); Runnable runnable = () -> { for (int i = 0; i < elementsToAdd; i++) { cache.computeIfAbsent(i, Function.identity()); } }; List<Thread> threadList = IntStream.range(0, threadCount) .mapToObj(value -> new Thread(runnable)) .collect(Collectors.toList()); threadList.forEach(Thread::start); threadList.forEach((ConsumerEx<Thread>) Thread::join); assertTrue(cache.cache.size() >= capacity); for (int i = 0; i < capacity; i++) { assertTrue(cache.cache.containsKey(i)); } }
static void toJson(ViewRepresentation representation, JsonGenerator generator) throws IOException { Preconditions.checkArgument(representation != null, "Invalid view representation: null"); switch (representation.type().toLowerCase(Locale.ENGLISH)) { case ViewRepresentation.Type.SQL: SQLViewRepresentationParser.toJson((SQLViewRepresentation) representation, generator); break; default: throw new UnsupportedOperationException( String.format( "Cannot serialize unsupported view representation: %s", representation.type())); } }
@Test public void testNullViewRepresentation() { assertThatThrownBy(() -> ViewRepresentationParser.toJson(null)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Invalid view representation: null"); }
public AWSSecurityMapping getAWSLakeFormationSecurityMapping(String user) { Optional<AWSSecurityMapping> awsSecurityMapping = awsSecurityMappings.stream() .filter(mapping -> (mapping.matches(user))) .findFirst(); if (!awsSecurityMapping.isPresent()) { throw new AccessDeniedException("No matching AWS Lake Formation Security Mapping"); } verify(!awsSecurityMapping.get().getCredentials().isPresent(), "Basic AWS Credentials are not supported for AWS Lake Formation Security Mapping"); verify(awsSecurityMapping.get().getIamRole().isPresent(), "iamRole is mandatory for AWS Lake Formation Security Mapping"); return awsSecurityMapping.get(); }
@Test public void testValidAWSLakeFormationMapping() { String lakeFormationSecurityMappingConfigPath = this.getClass().getClassLoader().getResource("com.facebook.presto.hive.aws.security/aws-security-mapping-lakeformation-valid.json").getPath(); AWSSecurityMappings mappings = parseJson(new File(lakeFormationSecurityMappingConfigPath).toPath(), AWSSecurityMappings.class); assertEquals(MappingResult.role("arn:aws:iam::123456789101:role/admin_role").getIamRole(), mappings.getAWSLakeFormationSecurityMapping(MappingSelector.empty().withUser("admin").getUser()).getIamRole().get()); assertEquals(MappingResult.role("arn:aws:iam::123456789101:role/analyst_role").getIamRole(), mappings.getAWSLakeFormationSecurityMapping(MappingSelector.empty().withUser("analyst").getUser()).getIamRole().get()); assertEquals(MappingResult.role("arn:aws:iam::123456789101:role/default_role").getIamRole(), mappings.getAWSLakeFormationSecurityMapping(MappingSelector.empty().getUser()).getIamRole().get()); }
public static Write write(String url, String token) { checkNotNull(url, "url is required."); checkNotNull(token, "token is required."); return write(StaticValueProvider.of(url), StaticValueProvider.of(token)); }
@Test @Category(NeedsRunner.class) public void successfulSplunkIOMultiBatchParallelismTest() { // Create server expectation for success. mockServerListening(200); int testPort = mockServerRule.getPort(); int testParallelism = 2; String url = Joiner.on(':').join("http://localhost", testPort); String token = "test-token"; List<SplunkEvent> testEvents = ImmutableList.of( SplunkEvent.newBuilder() .withEvent("test-event-1") .withHost("test-host-1") .withIndex("test-index-1") .withSource("test-source-1") .withSourceType("test-source-type-1") .withTime(12345L) .create(), SplunkEvent.newBuilder() .withEvent("test-event-2") .withHost("test-host-2") .withIndex("test-index-2") .withSource("test-source-2") .withSourceType("test-source-type-2") .withTime(12345L) .create()); PCollection<SplunkWriteError> actual = pipeline .apply("Create Input data", Create.of(testEvents)) .apply( "SplunkIO", SplunkIO.write(url, token) .withParallelism(testParallelism) .withBatchCount(testEvents.size())); // All successful responses. PAssert.that(actual).empty(); pipeline.run(); // Server received exactly one POST request per parallelism mockServerClient.verify( HttpRequest.request(EXPECTED_PATH), VerificationTimes.atMost(testParallelism)); }
public static DecimalType findDivisionDecimalType( int precision1, int scale1, int precision2, int scale2) { int scale = Math.max(6, scale1 + precision2 + 1); int precision = precision1 - scale1 + scale2 + scale; return adjustPrecisionScale(precision, scale); }
@Test void testFindDivisionDecimalType() { assertThat(LogicalTypeMerging.findDivisionDecimalType(32, 8, 38, 8)) .hasPrecisionAndScale(38, 6); assertThat(LogicalTypeMerging.findDivisionDecimalType(30, 20, 30, 20)) .hasPrecisionAndScale(38, 8); }
public static LinearModel fit(Formula formula, DataFrame data) { return fit(formula, data, new Properties()); }
@Test public void testLongley() throws Exception { System.out.println("Longley"); LinearModel model = OLS.fit(Longley.formula, Longley.data); System.out.println(model); assertEquals(12.8440, model.RSS(), 1E-4); assertEquals(1.1946, model.error(), 1E-4); assertEquals(9, model.df()); assertEquals(0.9926, model.RSquared(), 1E-4); assertEquals(0.9877, model.adjustedRSquared(), 1E-4); assertEquals(202.5094, model.ftest(), 1E-4); assertEquals(4.42579E-9, model.pvalue(), 1E-14); double[][] ttest = { {2946.85636, 5647.97658, 0.522, 0.6144}, { 0.26353, 0.10815, 2.437, 0.0376}, { 0.03648, 0.03024, 1.206, 0.2585}, { 0.01116, 0.01545, 0.722, 0.4885}, { -1.73703, 0.67382, -2.578, 0.0298}, { -1.41880, 2.94460, -0.482, 0.6414}, { 0.23129, 1.30394, 0.177, 0.8631} }; double[] residuals = { -0.6008156, 1.5502732, 0.1032287, -1.2306486, -0.3355139, 0.2693345, 0.8776759, 0.1222429, -2.0086121, -0.4859826, 1.0663129, 1.2274906, -0.3835821, 0.2710215, 0.1978569, -0.6402823 }; for (int i = 0; i < ttest.length; i++) { for (int j = 0; j < 4; j++) { assertEquals(ttest[i][j], model.ttest()[i][j], 1E-3); } } for (int i = 0; i < residuals.length; i++) { assertEquals(residuals[i], model.residuals()[i], 1E-4); } java.nio.file.Path temp = Write.object(model); Read.object(temp); // Test with data without response variable. DataFrame test = Longley.data.drop("deflator"); model.predict(test); }
public static Map<String, Integer> createColumnLabelAndIndexMap(final SQLStatementContext sqlStatementContext, final ResultSetMetaData resultSetMetaData) throws SQLException { if (sqlStatementContext instanceof SelectStatementContext && ((SelectStatementContext) sqlStatementContext).containsDerivedProjections()) { return createColumnLabelAndIndexMapWithExpandProjections((SelectStatementContext) sqlStatementContext); } Map<String, Integer> result = new CaseInsensitiveMap<>(resultSetMetaData.getColumnCount(), 1F); for (int columnIndex = resultSetMetaData.getColumnCount(); columnIndex > 0; columnIndex--) { result.put(resultSetMetaData.getColumnLabel(columnIndex), columnIndex); } return result; }
@Test void assertCreateColumnLabelAndIndexMapWithSelectWithExpandProjections() throws SQLException { SelectStatementContext selectStatementContext = mock(SelectStatementContext.class); when(selectStatementContext.containsDerivedProjections()).thenReturn(true); List<Projection> projections = new ArrayList<>(2); projections.add(new ColumnProjection(null, "col1", null, mock(DatabaseType.class))); projections.add(new ColumnProjection(null, "col2", null, mock(DatabaseType.class))); when(selectStatementContext.getProjectionsContext()).thenReturn(new ProjectionsContext(0, 0, false, projections)); Map<String, Integer> expected = new HashMap<>(2, 1F); expected.put("col1", 1); expected.put("col2", 2); Map<String, Integer> actual = ShardingSphereResultSetUtils.createColumnLabelAndIndexMap(selectStatementContext, null); assertThat(actual, is(expected)); }
@Override public Class<?> getReturnClass() { return returnClass; }
@Test void getReturnClass() { Assertions.assertEquals(String.class, method.getReturnClass()); }
private <T> T newPlugin(Class<T> klass) { // KAFKA-8340: The thread classloader is used during static initialization and must be // set to the plugin's classloader during instantiation try (LoaderSwap loaderSwap = withClassLoader(klass.getClassLoader())) { return Utils.newInstance(klass); } catch (Throwable t) { throw new ConnectException("Instantiation error", t); } }
@Test public void shouldShareStaticValuesBetweenSamePlugin() { // Plugins are not isolated from other instances of their own class. Converter firstPlugin = plugins.newPlugin( TestPlugin.ALIASED_STATIC_FIELD.className(), new AbstractConfig(new ConfigDef(), Collections.emptyMap()), Converter.class ); assertInstanceOf(SamplingTestPlugin.class, firstPlugin, "Cannot collect samples"); Converter secondPlugin = plugins.newPlugin( TestPlugin.ALIASED_STATIC_FIELD.className(), new AbstractConfig(new ConfigDef(), Collections.emptyMap()), Converter.class ); assertInstanceOf(SamplingTestPlugin.class, secondPlugin, "Cannot collect samples"); assertSame( ((SamplingTestPlugin) firstPlugin).otherSamples(), ((SamplingTestPlugin) secondPlugin).otherSamples() ); }
@Override public Object getValue(final int columnIndex, final Class<?> type) throws SQLException { if (boolean.class == type) { return resultSet.getBoolean(columnIndex); } if (byte.class == type) { return resultSet.getByte(columnIndex); } if (short.class == type) { return resultSet.getShort(columnIndex); } if (int.class == type) { return resultSet.getInt(columnIndex); } if (long.class == type) { return resultSet.getLong(columnIndex); } if (float.class == type) { return resultSet.getFloat(columnIndex); } if (double.class == type) { return resultSet.getDouble(columnIndex); } if (String.class == type) { return resultSet.getString(columnIndex); } if (BigDecimal.class == type) { return resultSet.getBigDecimal(columnIndex); } if (byte[].class == type) { return resultSet.getBytes(columnIndex); } if (Date.class == type) { return resultSet.getDate(columnIndex); } if (Time.class == type) { return resultSet.getTime(columnIndex); } if (Timestamp.class == type) { return resultSet.getTimestamp(columnIndex); } if (Blob.class == type) { return resultSet.getBlob(columnIndex); } if (Clob.class == type) { return resultSet.getClob(columnIndex); } if (Array.class == type) { return resultSet.getArray(columnIndex); } return resultSet.getObject(columnIndex); }
@Test void assertGetValueByClob() throws SQLException { ResultSet resultSet = mock(ResultSet.class); Clob value = mock(Clob.class); when(resultSet.getClob(1)).thenReturn(value); assertThat(new JDBCStreamQueryResult(resultSet).getValue(1, Clob.class), is(value)); }
public static IMetaStore openLocalPentahoMetaStore() throws MetaStoreException { return MetaStoreConst.openLocalPentahoMetaStore( true ); }
@Test public void testOpenLocalPentahoMetaStore() throws Exception { MetaStoreConst.disableMetaStore = false; File tempDir = Files.createTempDir(); String tempPath = tempDir.getAbsolutePath(); System.setProperty( Const.PENTAHO_METASTORE_FOLDER, tempPath ); String metaFolder = tempPath + File.separator + XmlUtil.META_FOLDER_NAME; // Create a metastore assertNotNull( MetaStoreConst.openLocalPentahoMetaStore() ); assertTrue( ( new File( metaFolder ) ).exists() ); // Check existing while disabling the metastore ( used for tests ) MetaStoreConst.disableMetaStore = true; assertNull( MetaStoreConst.openLocalPentahoMetaStore() ); // Check existing metastore MetaStoreConst.disableMetaStore = false; assertNotNull( MetaStoreConst.openLocalPentahoMetaStore( false ) ); // Try to read a metastore that does not exist with allowCreate = false FileUtils.deleteDirectory( new File( metaFolder ) ); assertNull( MetaStoreConst.openLocalPentahoMetaStore( false ) ); assertFalse( ( new File( metaFolder ) ).exists() ); }
@Override public void pluginStateChanged(PluginStateEvent event) { var pluginState = event.getPluginState(); String pluginId = event.getPlugin().getPluginId(); if (pluginState == PluginState.UNLOADED) { entries.remove(pluginId); } else if (pluginState == PluginState.CREATED || pluginState == PluginState.RESOLVED) { entries.computeIfAbsent(pluginId, id -> readPluginStorage(event.getPlugin())); } }
@Test void shouldRemoveEntryIfPluginUnloaded() throws FileNotFoundException { var pluginWrapper = mockPluginWrapper(); when(pluginWrapper.getPluginState()).thenReturn(PluginState.CREATED); var event = new PluginStateEvent(pluginManager, pluginWrapper, null); finder.pluginStateChanged(event); var classNames = finder.findClassNames("fake-plugin"); assertFalse(classNames.isEmpty()); when(pluginWrapper.getPluginState()).thenReturn(PluginState.UNLOADED); event = new PluginStateEvent(pluginManager, pluginWrapper, null); finder.pluginStateChanged(event); classNames = finder.findClassNames("fake-plugin"); assertTrue(classNames.isEmpty()); }
public static void getTables( DatabaseMeta databaseMeta, String schema, Consumer<String[]> tablesConsumer ) { executeAction( databaseMeta, database -> { try { tablesConsumer.accept( database.getTablenames( schema, false ) ); } catch ( KettleDatabaseException | NullPointerException e ) { logError( databaseMeta, e ); tablesConsumer.accept( new String[ 0 ] ); } } ); }
@Test @SuppressWarnings( "squid:S2699" ) // assertion is implicit. non-timeout validates that an error event was logged public void getTablesError() throws InterruptedException, ExecutionException, TimeoutException { dbMeta.setDatabaseType( "GENERIC" ); AsyncDatabaseAction.getTables( dbMeta, "PUBLIC", completion::complete ); errorLogListener.errorOccurred.get( COMPLETION_TIMEOUT, TimeUnit.MILLISECONDS ); }
@SuppressWarnings("unchecked") public static <base, T extends base> T getOption(Class<T> cls, base [] opts ) throws IOException { for(base o: opts) { if (o.getClass() == cls) { return (T) o; } } return null; }
@Test public void testFind() throws Exception { Object[] opts = new Object[]{1, "hi", true, "bye", 'x'}; assertEquals(1, Options.getOption(Integer.class, opts).intValue()); assertEquals("hi", Options.getOption(String.class, opts)); assertEquals(true, Options.getOption(Boolean.class, opts).booleanValue()); }
@Override public void removeNetwork(String networkId) { checkArgument(!Strings.isNullOrEmpty(networkId), ERR_NULL_NETWORK_ID); synchronized (this) { if (isNetworkInUse(networkId)) { final String error = String.format(MSG_NETWORK, networkId, ERR_IN_USE); throw new IllegalStateException(error); } K8sNetwork network = k8sNetworkStore.removeNetwork(networkId); if (network != null) { log.info(String.format(MSG_NETWORK, network.name(), MSG_REMOVED)); } } }
@Test(expected = IllegalArgumentException.class) public void testRemoveNetworkWithNull() { target.removeNetwork(null); }
public static Credentials loadCredentials(String password, String source) throws IOException, CipherException { return loadCredentials(password, new File(source)); }
@Test public void testLoadCredentialsFromString() throws Exception { Credentials credentials = WalletUtils.loadCredentials( PASSWORD, WalletUtilsTest.class .getResource( "/keyfiles/" + "UTC--2016-11-03T05-55-06." + "340672473Z--ef678007d18427e6022059dbc264f27507cd1ffc") .getFile()); assertEquals(credentials, (CREDENTIALS)); }
public Entry getPostingList(long key) { return dictionary.get(key); }
@Test void requireThatEntriesAreSortedOnId() { SimpleIndex.Builder builder = new SimpleIndex.Builder(); builder.insert(KEY, new Posting(DOC_ID, 10)); builder.insert(KEY, new Posting(DOC_ID - 1, 20)); // Out of order builder.insert(KEY, new Posting(DOC_ID + 1, 30)); SimpleIndex index = builder.build(); SimpleIndex.Entry entry = index.getPostingList(KEY); assertEquals(3, entry.docIds.length); assertEquals(DOC_ID - 1, entry.docIds[0]); assertEquals(DOC_ID, entry.docIds[1]); assertEquals(DOC_ID + 1, entry.docIds[2]); }
@Override public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return true; } try { final SMBSession.DiskShareWrapper share = session.openShare(file); try { if(new SMBPathContainerService(session).isContainer(file)) { return true; } if(file.isDirectory()) { return share.get().folderExists(new SMBPathContainerService(session).getKey(file)); } return share.get().fileExists(new SMBPathContainerService(session).getKey(file)); } catch(SMBRuntimeException e) { throw new SMBExceptionMappingService().map("Failure to read attributes of {0}", e, file); } finally { session.releaseShare(share); } } catch(NotfoundException e) { return false; } }
@Test public void testFindRoot() throws Exception { assertTrue(new SMBFindFeature(session).find(new Path("/", EnumSet.of(Path.Type.directory)))); }
public Entry<Object, Object> project(JetSqlRow row) { keyTarget.init(); valueTarget.init(); for (int i = 0; i < row.getFieldCount(); i++) { Object value = getToConverter(types[i]).convert(row.get(i)); injectors[i].set(value); } Object key = keyTarget.conclude(); if (key == null && failOnNulls) { throw QueryException.error("Cannot write NULL to '__key' field. " + "Note that NULL is used also if your INSERT/SINK command doesn't write to '__key' at all."); } Object value = valueTarget.conclude(); if (value == null && failOnNulls) { throw QueryException.error("Cannot write NULL to 'this' field. " + "Note that NULL is used also if your INSERT/SINK command doesn't write to 'this' at all."); } return entry(key, value); }
@Test public void test_project() { KvProjector projector = new KvProjector( new QueryPath[]{QueryPath.KEY_PATH, QueryPath.VALUE_PATH}, new QueryDataType[]{QueryDataType.INT, QueryDataType.INT}, new MultiplyingTarget(), new MultiplyingTarget(), false ); Entry<Object, Object> entry = projector.project(new JetSqlRow(TEST_SS, new Object[]{1, 2})); assertThat(entry.getKey()).isEqualTo(2); assertThat(entry.getValue()).isEqualTo(4); }
public static SerdeFeatures of(final SerdeFeature... features) { return new SerdeFeatures(ImmutableSet.copyOf(features)); }
@Test public void shouldReturnEmptyFromFindAnyOnNoMatch() { assertThat(SerdeFeatures.of() .findAny(WRAPPING_FEATURES), is(Optional.empty())); }
static int validatePubsubMessageSize(PubsubMessage message, int maxPublishBatchSize) throws SizeLimitExceededException { int payloadSize = message.getPayload().length; if (payloadSize > PUBSUB_MESSAGE_DATA_MAX_BYTES) { throw new SizeLimitExceededException( "Pubsub message data field of length " + payloadSize + " exceeds maximum of " + PUBSUB_MESSAGE_DATA_MAX_BYTES + " bytes. See https://cloud.google.com/pubsub/quotas#resource_limits"); } int totalSize = payloadSize; @Nullable Map<String, String> attributes = message.getAttributeMap(); if (attributes != null) { if (attributes.size() > PUBSUB_MESSAGE_MAX_ATTRIBUTES) { throw new SizeLimitExceededException( "Pubsub message contains " + attributes.size() + " attributes which exceeds the maximum of " + PUBSUB_MESSAGE_MAX_ATTRIBUTES + ". See https://cloud.google.com/pubsub/quotas#resource_limits"); } // Consider attribute encoding overhead, so it doesn't go over the request limits totalSize += attributes.size() * PUBSUB_MESSAGE_ATTRIBUTE_ENCODE_ADDITIONAL_BYTES; for (Map.Entry<String, String> attribute : attributes.entrySet()) { String key = attribute.getKey(); int keySize = key.getBytes(StandardCharsets.UTF_8).length; if (keySize > PUBSUB_MESSAGE_ATTRIBUTE_MAX_KEY_BYTES) { throw new SizeLimitExceededException( "Pubsub message attribute key '" + key + "' exceeds the maximum of " + PUBSUB_MESSAGE_ATTRIBUTE_MAX_KEY_BYTES + " bytes. See https://cloud.google.com/pubsub/quotas#resource_limits"); } totalSize += keySize; String value = attribute.getValue(); int valueSize = value.getBytes(StandardCharsets.UTF_8).length; if (valueSize > PUBSUB_MESSAGE_ATTRIBUTE_MAX_VALUE_BYTES) { throw new SizeLimitExceededException( "Pubsub message attribute value for key '" + key + "' starting with '" + value.substring(0, Math.min(256, value.length())) + "' exceeds the maximum of " + PUBSUB_MESSAGE_ATTRIBUTE_MAX_VALUE_BYTES + " bytes. See https://cloud.google.com/pubsub/quotas#resource_limits"); } totalSize += valueSize; } } if (totalSize > maxPublishBatchSize) { throw new SizeLimitExceededException( "Pubsub message of length " + totalSize + " exceeds maximum of " + maxPublishBatchSize + " bytes, when considering the payload and attributes. " + "See https://cloud.google.com/pubsub/quotas#resource_limits"); } return totalSize; }
@Test public void testValidatePubsubMessagePayloadTooLarge() { byte[] data = new byte[(10 << 20) + 1]; PubsubMessage message = new PubsubMessage(data, null); assertThrows( SizeLimitExceededException.class, () -> PreparePubsubWriteDoFn.validatePubsubMessageSize( message, PUBSUB_MESSAGE_MAX_TOTAL_SIZE)); }
@Override @Nonnull public <T> T invokeAny(@Nonnull Collection<? extends Callable<T>> tasks) throws ExecutionException { throwRejectedExecutionExceptionIfShutdown(); Exception exception = null; for (Callable<T> task : tasks) { try { return task.call(); } catch (Exception e) { // try next task exception = e; } } throw new ExecutionException("No tasks finished successfully.", exception); }
@Test void testInvokeAnyWithNoopShutdown() { final CompletableFuture<Thread> future = new CompletableFuture<>(); testWithNoopShutdown( testInstance -> testInstance.invokeAny(callableCollectionFromFuture(future))); assertThat(future).isCompletedWithValue(Thread.currentThread()); }
public OpenAPI filter(OpenAPI openAPI, OpenAPISpecFilter filter, Map<String, List<String>> params, Map<String, String> cookies, Map<String, List<String>> headers) { OpenAPI filteredOpenAPI = filterOpenAPI(filter, openAPI, params, cookies, headers); if (filteredOpenAPI == null) { return filteredOpenAPI; } OpenAPI clone = new OpenAPI(); clone.info(filteredOpenAPI.getInfo()); clone.openapi(filteredOpenAPI.getOpenapi()); clone.jsonSchemaDialect(filteredOpenAPI.getJsonSchemaDialect()); clone.setSpecVersion(filteredOpenAPI.getSpecVersion()); clone.setExtensions(filteredOpenAPI.getExtensions()); clone.setExternalDocs(filteredOpenAPI.getExternalDocs()); clone.setSecurity(filteredOpenAPI.getSecurity()); clone.setServers(filteredOpenAPI.getServers()); clone.tags(filteredOpenAPI.getTags() == null ? null : new ArrayList<>(openAPI.getTags())); final Set<String> allowedTags = new HashSet<>(); final Set<String> filteredTags = new HashSet<>(); Paths clonedPaths = new Paths(); if (filteredOpenAPI.getPaths() != null) { for (String resourcePath : filteredOpenAPI.getPaths().keySet()) { PathItem pathItem = filteredOpenAPI.getPaths().get(resourcePath); PathItem filteredPathItem = filterPathItem(filter, pathItem, resourcePath, params, cookies, headers); PathItem clonedPathItem = cloneFilteredPathItem(filter,filteredPathItem, resourcePath, params, cookies, headers, allowedTags, filteredTags); if (clonedPathItem != null) { if (!clonedPathItem.readOperations().isEmpty()) { clonedPaths.addPathItem(resourcePath, clonedPathItem); } } } clone.paths(clonedPaths); } filteredTags.removeAll(allowedTags); final List<Tag> tags = clone.getTags(); if (tags != null && !filteredTags.isEmpty()) { tags.removeIf(tag -> filteredTags.contains(tag.getName())); if (clone.getTags().isEmpty()) { clone.setTags(null); } } if (filteredOpenAPI.getWebhooks() != null) { for (String resourcePath : filteredOpenAPI.getWebhooks().keySet()) { PathItem pathItem = filteredOpenAPI.getPaths().get(resourcePath); PathItem filteredPathItem = filterPathItem(filter, pathItem, resourcePath, params, cookies, headers); PathItem clonedPathItem = cloneFilteredPathItem(filter,filteredPathItem, resourcePath, params, cookies, headers, allowedTags, filteredTags); if (clonedPathItem != null) { if (!clonedPathItem.readOperations().isEmpty()) { clone.addWebhooks(resourcePath, clonedPathItem); } } } } if (filteredOpenAPI.getComponents() != null) { clone.components(new Components()); clone.getComponents().setSchemas(filterComponentsSchema(filter, filteredOpenAPI.getComponents().getSchemas(), params, cookies, headers)); clone.getComponents().setSecuritySchemes(filteredOpenAPI.getComponents().getSecuritySchemes()); clone.getComponents().setCallbacks(filteredOpenAPI.getComponents().getCallbacks()); clone.getComponents().setExamples(filteredOpenAPI.getComponents().getExamples()); clone.getComponents().setExtensions(filteredOpenAPI.getComponents().getExtensions()); clone.getComponents().setHeaders(filteredOpenAPI.getComponents().getHeaders()); clone.getComponents().setLinks(filteredOpenAPI.getComponents().getLinks()); clone.getComponents().setParameters(filteredOpenAPI.getComponents().getParameters()); clone.getComponents().setRequestBodies(filteredOpenAPI.getComponents().getRequestBodies()); clone.getComponents().setResponses(filteredOpenAPI.getComponents().getResponses()); clone.getComponents().setPathItems(filteredOpenAPI.getComponents().getPathItems()); } if (filter.isRemovingUnreferencedDefinitions()) { clone = removeBrokenReferenceDefinitions(clone); } return clone; }
@Test(description = "it should filter away get operations in a resource") public void filterAwayGetOperations() throws IOException { final OpenAPI openAPI = getOpenAPI(RESOURCE_PATH); final NoGetOperationsFilter filter = new NoGetOperationsFilter(); final OpenAPI filtered = new SpecFilter().filter(openAPI, filter, null, null, null); if (filtered.getPaths() != null) { for (Map.Entry<String, PathItem> entry : filtered.getPaths().entrySet()) { assertNull(entry.getValue().getGet()); } } else { fail("paths should not be null"); } }
public void consumed(HsConsumerId consumerId) { checkState(!released, "Buffer is already released."); checkState(consumed.add(consumerId), "Consume buffer repeatedly is unexpected."); // increase ref count when buffer is consumed, will be decreased when downstream finish // consuming. buffer.retainBuffer(); }
@Test void testBufferConsumedRepeatedly() { final HsConsumerId consumerId = HsConsumerId.DEFAULT; bufferContext.consumed(consumerId); assertThatThrownBy(() -> bufferContext.consumed(consumerId)) .isInstanceOf(IllegalStateException.class) .hasMessageContaining("Consume buffer repeatedly is unexpected."); }
public static String format(Integer id) { return format(id, " "); }
@Test public void testFormat() { assertEquals(AreaUtils.format(110105), "北京市 北京市 朝阳区"); assertEquals(AreaUtils.format(1), "中国"); assertEquals(AreaUtils.format(2), "蒙古"); }
public static SnowflakeIdentifier toSnowflakeIdentifier(Namespace namespace) { switch (namespace.length()) { case NAMESPACE_ROOT_LEVEL: return SnowflakeIdentifier.ofRoot(); case NAMESPACE_DB_LEVEL: return SnowflakeIdentifier.ofDatabase(namespace.level(NAMESPACE_DB_LEVEL - 1)); case NAMESPACE_SCHEMA_LEVEL: return SnowflakeIdentifier.ofSchema( namespace.level(NAMESPACE_DB_LEVEL - 1), namespace.level(NAMESPACE_SCHEMA_LEVEL - 1)); default: throw new IllegalArgumentException( String.format( "Snowflake max namespace level is %d, got namespace '%s'", MAX_NAMESPACE_DEPTH, namespace)); } }
@Test public void testToSnowflakeIdentifierMaxNamespaceLevel() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy( () -> NamespaceHelpers.toSnowflakeIdentifier( Namespace.of("DB1", "SCHEMA1", "THIRD_NS_LVL"))) .withMessageContaining("max namespace level"); }
public static ParamType getSchemaFromType(final Type type) { return getSchemaFromType(type, JAVA_TO_ARG_TYPE); }
@Test public void shouldGetBooleanSchemaForBooleanPrimitiveClass() { assertThat( UdfUtil.getSchemaFromType(boolean.class), equalTo(ParamTypes.BOOLEAN) ); }
@Override public Mono<UserDetails> findByUsername(String username) { return userService.getUser(username) .onErrorMap(UserNotFoundException.class, e -> new BadCredentialsException("Invalid Credentials")) .flatMap(user -> { var name = user.getMetadata().getName(); var userBuilder = User.withUsername(name) .password(user.getSpec().getPassword()) .disabled(requireNonNullElse(user.getSpec().getDisabled(), false)); var setAuthorities = roleService.getRolesByUsername(name) // every authenticated user should have authenticated and anonymous roles. .concatWithValues(AUTHENTICATED_ROLE_NAME, ANONYMOUS_ROLE_NAME) .map(roleName -> new SimpleGrantedAuthority(ROLE_PREFIX + roleName)) .distinct() .collectList() .doOnNext(userBuilder::authorities); return setAuthorities.then(Mono.fromSupplier(() -> { var twoFactorAuthSettings = TwoFactorUtils.getTwoFactorAuthSettings(user); return new HaloUser.Builder(userBuilder.build()) .twoFactorAuthEnabled( (!twoFactorAuthDisabled) && twoFactorAuthSettings.isAvailable() ) .totpEncryptedSecret(user.getSpec().getTotpEncryptedSecret()) .build(); })); }); }
@Test void shouldFindHaloUserDetailsWith2faDisabledWhen2faDisabledGlobally() { userDetailService.setTwoFactorAuthDisabled(true); var fakeUser = createFakeUser(); fakeUser.getSpec().setTwoFactorAuthEnabled(true); fakeUser.getSpec().setTotpEncryptedSecret("fake-totp-encrypted-secret"); when(userService.getUser("faker")).thenReturn(Mono.just(fakeUser)); when(roleService.getRolesByUsername("faker")).thenReturn(Flux.empty()); userDetailService.findByUsername("faker") .as(StepVerifier::create) .assertNext(userDetails -> { assertInstanceOf(HaloUserDetails.class, userDetails); assertFalse(((HaloUserDetails) userDetails).isTwoFactorAuthEnabled()); }) .verifyComplete(); }
public Optional<Project> getReporterLevelAccessProject(String gitlabUrl, String pat, Long gitlabProjectId) { String url = format("%s/projects?min_access_level=20&id_after=%s&id_before=%s", gitlabUrl, gitlabProjectId - 1, gitlabProjectId + 1); LOG.debug("get project : [{}]", url); Request request = new Request.Builder() .addHeader(PRIVATE_TOKEN, pat) .get() .url(url) .build(); try (Response response = client.newCall(request).execute()) { checkResponseIsSuccessful(response); String body = response.body().string(); LOG.trace("loading project payload result : [{}]", body); List<Project> projects = Project.parseJsonArray(body); if (projects.isEmpty()) { return Optional.empty(); } else { return Optional.of(projects.get(0)); } } catch (JsonSyntaxException e) { throw new IllegalArgumentException("Could not parse GitLab answer to retrieve a project. Got a non-json payload as result."); } catch (IOException e) { logException(url, e); throw new IllegalStateException(e.getMessage(), e); } }
@Test public void get_reporter_level_access_project() throws InterruptedException { MockResponse projectResponse = new MockResponse() .setResponseCode(200) .setBody("[{" + " \"id\": 1234," + " \"name\": \"SonarQube example 2\"," + " \"name_with_namespace\": \"SonarSource / SonarQube / SonarQube example 2\"," + " \"path\": \"sonarqube-example-2\"," + " \"path_with_namespace\": \"sonarsource/sonarqube/sonarqube-example-2\"," + " \"web_url\": \"https://example.gitlab.com/sonarsource/sonarqube/sonarqube-example-2\"" + "}]"); server.enqueue(projectResponse); Optional<Project> project = underTest.getReporterLevelAccessProject(gitlabUrl, "pat", 1234L); RecordedRequest projectGitlabRequest = server.takeRequest(10, TimeUnit.SECONDS); String gitlabUrlCall = projectGitlabRequest.getRequestUrl().toString(); assertThat(project).isNotNull(); assertThat(gitlabUrlCall).isEqualTo( server.url("") + "projects?min_access_level=20&id_after=1233&id_before=1235"); assertThat(projectGitlabRequest.getMethod()).isEqualTo("GET"); }
@Udf(description = "When filtering a map, " + "the function provided must have a boolean result. " + "For each map entry, the function will be applied to the " + "key and value arguments in that order. The filtered map is returned." ) public <K, V> Map<K, V> filterMap( @UdfParameter(description = "The map") final Map<K, V> map, @UdfParameter(description = "The lambda function") final BiFunction<K, V, Boolean> biFunction ) { if (map == null || biFunction == null) { return null; } return map.entrySet() .stream() .filter(e -> biFunction.apply(e.getKey(), e.getValue())) .collect(Collectors.toMap(Entry::getKey, Entry::getValue)); }
@Test public void shouldReturnFilteredMap() { final Map<Integer, Integer> m1 = new HashMap<>(); assertThat(udf.filterMap(m1, biFunction1()), is(Collections.emptyMap())); m1.put(5, 4); m1.put(6, 18); m1.put(77, 45); assertThat(udf.filterMap(m1, biFunction1()), is(Stream.of(new Object[][] { { 6, 18 }, }).collect(Collectors.toMap(data -> (Integer) data[0], data -> (Integer) data[1])))); final Map<String, String> m2 = new HashMap<>(); m2.put("yes", "no"); m2.put("okey", "yeah"); m2.put("ab", "cd"); assertThat(udf.filterMap(m2, biFunction2()), is(Stream.of(new Object[][] { { "okey", "yeah" }, }).collect(Collectors.toMap(data -> (String) data[0], data -> (String) data[1])))); }
@VisibleForTesting static <T> Iterator<T> flatten(final Iterator<Iterator<T>> toFlatten) { return new AbstractIterator<T>() { @Nullable Iterator<T> current = null; @Override protected T computeNext() { while (true) { if (current == null) { if (toFlatten.hasNext()) { current = toFlatten.next(); } else { return endOfData(); } } if (current.hasNext()) { return current.next(); } current = null; } } }; }
@Test public void testIteratorFlatten() { List<Integer> first = Arrays.asList(1, 2, 3); List<Integer> second = Arrays.asList(4, 5, 6); List<Integer> result = new ArrayList<>(); Iterators.addAll( result, TransformTranslator.flatten(Arrays.asList(first.iterator(), second.iterator()).iterator())); assertEquals(Arrays.asList(1, 2, 3, 4, 5, 6), result); }
public static boolean isValidProjectKey(String keyCandidate) { return VALID_PROJECT_KEY_REGEXP.matcher(keyCandidate).matches(); }
@Test public void valid_project_key() { assertThat(ComponentKeys.isValidProjectKey("abc")).isTrue(); assertThat(ComponentKeys.isValidProjectKey("ab_12")).isTrue(); }
public Optional<Violation> validate(IndexSetConfig newConfig) { // Don't validate prefix conflicts in case of an update if (Strings.isNullOrEmpty(newConfig.id())) { final Violation prefixViolation = validatePrefix(newConfig); if (prefixViolation != null) { return Optional.of(prefixViolation); } } final Violation fieldMappingViolation = validateMappingChangesAreLegal(newConfig); if (fieldMappingViolation != null) { return Optional.of(fieldMappingViolation); } Violation refreshIntervalViolation = validateSimpleIndexSetConfig(newConfig); if (refreshIntervalViolation != null){ return Optional.of(refreshIntervalViolation); } return Optional.empty(); }
@Test public void testStrategiesPresentIfDataTiersIsNull() { final IndexSet indexSet = mock(IndexSet.class); when(indexSet.getIndexPrefix()).thenReturn("foo"); when(indexSetRegistry.iterator()).thenReturn(Collections.singleton(indexSet).iterator()); assertThat(validator.validate(testIndexSetConfig().toBuilder().retentionStrategyConfig(null).build())).hasValueSatisfying(v -> assertThat(v.message()).contains("retention_strategy cannot be null") ); assertThat(validator.validate(testIndexSetConfig().toBuilder().retentionStrategyClass(null).build())).hasValueSatisfying(v -> assertThat(v.message()).contains("retention_strategy_class cannot be null") ); assertThat(validator.validate(testIndexSetConfig().toBuilder().rotationStrategyConfig(null).build())).hasValueSatisfying(v -> assertThat(v.message()).contains("rotation_strategy cannot be null") ); assertThat(validator.validate(testIndexSetConfig().toBuilder().rotationStrategyClass(null).build())).hasValueSatisfying(v -> assertThat(v.message()).contains("rotation_strategy_class cannot be null") ); }
@GetMapping("/metrics") public Result<MetricsInfoVo> metrics( @RequestParam(value = "onlyStatus", required = false, defaultValue = "true") Boolean onlyStatus) { MetricsInfoVo metricsInfoVo = new MetricsInfoVo(); metricsInfoVo.setStatus(serverStatusManager.getServerStatus().name()); if (onlyStatus) { return Result.success(metricsInfoVo); } int connectionBasedClient = 0; int ephemeralIpPortClient = 0; int persistentIpPortClient = 0; int responsibleClientCount = 0; Collection<String> allClientId = clientManager.allClientId(); for (String clientId : allClientId) { if (clientId.contains(IpPortBasedClient.ID_DELIMITER)) { if (clientId.endsWith(ClientConstants.PERSISTENT_SUFFIX)) { persistentIpPortClient += 1; } else { ephemeralIpPortClient += 1; } } else { connectionBasedClient += 1; } if (clientManager.isResponsibleClient(clientManager.getClient(clientId))) { responsibleClientCount += 1; } } metricsInfoVo.setServiceCount(MetricsMonitor.getDomCountMonitor().get()); metricsInfoVo.setInstanceCount(MetricsMonitor.getIpCountMonitor().get()); metricsInfoVo.setSubscribeCount(MetricsMonitor.getSubscriberCount().get()); metricsInfoVo.setClientCount(allClientId.size()); metricsInfoVo.setConnectionBasedClientCount(connectionBasedClient); metricsInfoVo.setEphemeralIpPortClientCount(ephemeralIpPortClient); metricsInfoVo.setPersistentIpPortClientCount(persistentIpPortClient); metricsInfoVo.setResponsibleClientCount(responsibleClientCount); metricsInfoVo.setCpu(EnvUtil.getCpu()); metricsInfoVo.setLoad(EnvUtil.getLoad()); metricsInfoVo.setMem(EnvUtil.getMem()); return Result.success(metricsInfoVo); }
@Test void testMetrics() { Mockito.when(serverStatusManager.getServerStatus()).thenReturn(ServerStatus.UP); Collection<String> clients = new HashSet<>(); clients.add("1628132208793_127.0.0.1_8080"); clients.add("127.0.0.1:8081#true"); clients.add("127.0.0.1:8082#false"); Mockito.when(clientManager.allClientId()).thenReturn(clients); Mockito.when(clientManager.isResponsibleClient(null)).thenReturn(Boolean.TRUE); Result<MetricsInfoVo> result = operatorControllerV2.metrics(false); assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode()); MetricsInfoVo metricsInfoVo = result.getData(); assertEquals(ServerStatus.UP.toString(), metricsInfoVo.getStatus()); assertEquals(3, metricsInfoVo.getClientCount().intValue()); assertEquals(1, metricsInfoVo.getConnectionBasedClientCount().intValue()); assertEquals(1, metricsInfoVo.getEphemeralIpPortClientCount().intValue()); assertEquals(1, metricsInfoVo.getPersistentIpPortClientCount().intValue()); assertEquals(3, metricsInfoVo.getResponsibleClientCount().intValue()); }
@Override public byte[] encode(ILoggingEvent event) { final int initialCapacity = event.getThrowableProxy() == null ? DEFAULT_SIZE : DEFAULT_SIZE_WITH_THROWABLE; StringBuilder sb = new StringBuilder(initialCapacity); sb.append(OPEN_OBJ); if (withSequenceNumber) { appenderMemberWithLongValue(sb, SEQUENCE_NUMBER_ATTR_NAME, event.getSequenceNumber()); sb.append(VALUE_SEPARATOR); } if (withTimestamp) { appenderMemberWithLongValue(sb, TIMESTAMP_ATTR_NAME, event.getTimeStamp()); sb.append(VALUE_SEPARATOR); } if (withNanoseconds) { appenderMemberWithLongValue(sb, NANOSECONDS_ATTR_NAME, event.getNanoseconds()); sb.append(VALUE_SEPARATOR); } if (withLevel) { String levelStr = event.getLevel() != null ? event.getLevel().levelStr : NULL_STR; appenderMember(sb, LEVEL_ATTR_NAME, levelStr); sb.append(VALUE_SEPARATOR); } if (withThreadName) { appenderMember(sb, THREAD_NAME_ATTR_NAME, jsonEscape(event.getThreadName())); sb.append(VALUE_SEPARATOR); } if (withLoggerName) { appenderMember(sb, LOGGER_ATTR_NAME, event.getLoggerName()); sb.append(VALUE_SEPARATOR); } if (withContext) { appendLoggerContext(sb, event.getLoggerContextVO()); sb.append(VALUE_SEPARATOR); } if (withMarkers) appendMarkers(sb, event); if (withMDC) appendMDC(sb, event); if (withKVPList) appendKeyValuePairs(sb, event); if (withMessage) { appenderMember(sb, MESSAGE_ATTR_NAME, jsonEscape(event.getMessage())); sb.append(VALUE_SEPARATOR); } if (withFormattedMessage) { appenderMember(sb, FORMATTED_MESSAGE_ATTR_NAME, jsonEscape(event.getFormattedMessage())); sb.append(VALUE_SEPARATOR); } if (withArguments) appendArgumentArray(sb, event); if (withThrowable) appendThrowableProxy(sb, THROWABLE_ATTR_NAME, event.getThrowableProxy()); sb.append(CLOSE_OBJ); sb.append(CoreConstants.JSON_LINE_SEPARATOR); return sb.toString().getBytes(UTF_8_CHARSET); }
@Test void withArguments() throws JsonProcessingException { LoggingEvent event = new LoggingEvent("x", logger, Level.WARN, "hello", null, new Object[] { "arg1", "arg2" }); byte[] resultBytes = jsonEncoder.encode(event); String resultString = new String(resultBytes, StandardCharsets.UTF_8); //System.out.println(resultString); JsonLoggingEvent resultEvent = stringToLoggingEventMapper.mapStringToLoggingEvent(resultString); compareEvents(event, resultEvent); }
public static String buildWebApplicationRootUrl(NetworkService networkService) { checkNotNull(networkService); if (!isWebService(networkService)) { return "http://" + NetworkEndpointUtils.toUriAuthority(networkService.getNetworkEndpoint()) + "/"; } String rootUrl = (isPlainHttp(networkService) ? "http://" : "https://") + buildWebUriAuthority(networkService) + buildWebAppRootPath(networkService); return rootUrl.endsWith("/") ? rootUrl : rootUrl + "/"; }
@Test public void buildWebApplicationRootUrl_whenHttpWithoutRoot_buildsExpectedUrl() { assertThat( NetworkServiceUtils.buildWebApplicationRootUrl( NetworkService.newBuilder() .setNetworkEndpoint(forIpAndPort("127.0.0.1", 8080)) .setServiceName("http") .build())) .isEqualTo("http://127.0.0.1:8080/"); }
@Deprecated public OffsetStore getOffsetStore() { return offsetStore; }
@Test public void testStart_OffsetShouldNotNUllAfterStart() { assertNotNull(pushConsumer.getOffsetStore()); }
@VisibleForTesting public long getRollingMonitorInterval() { return rollingMonitorInterval; }
@Test public void testRollingMonitorIntervalGreaterThanSet() { this.conf.set(YarnConfiguration.MIN_LOG_ROLLING_INTERVAL_SECONDS, "1800"); this.conf.set(YarnConfiguration .NM_LOG_AGGREGATION_ROLL_MONITORING_INTERVAL_SECONDS, "2700"); LogAggregationService logAggregationService = new LogAggregationService(dispatcher, this.context, this.delSrvc, super.dirsHandler); logAggregationService.init(this.conf); long interval = logAggregationService.getRollingMonitorInterval(); assertEquals(2700L, interval); }
@Override public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay way, IntsRef relationFlags) { PointList pointList = way.getTag("point_list", null); if (pointList != null) { if (pointList.isEmpty() || !pointList.is3D()) { if (maxSlopeEnc != null) maxSlopeEnc.setDecimal(false, edgeId, edgeIntAccess, 0); if (averageSlopeEnc != null) averageSlopeEnc.setDecimal(false, edgeId, edgeIntAccess, 0); return; } // Calculate 2d distance, although pointList might be 3D. // This calculation is a bit expensive and edge_distance is available already, but this would be in 3D double distance2D = DistanceCalcEarth.calcDistance(pointList, false); if (distance2D < MIN_LENGTH) { if (averageSlopeEnc != null) // default is minimum of average_slope is negative so we have to explicitly set it to 0 averageSlopeEnc.setDecimal(false, edgeId, edgeIntAccess, 0); return; } double towerNodeSlope = calcSlope(pointList.getEle(pointList.size() - 1) - pointList.getEle(0), distance2D); if (Double.isNaN(towerNodeSlope)) throw new IllegalArgumentException("average_slope was NaN for OSM way ID " + way.getId()); if (averageSlopeEnc != null) { if (towerNodeSlope >= 0) averageSlopeEnc.setDecimal(false, edgeId, edgeIntAccess, Math.min(towerNodeSlope, averageSlopeEnc.getMaxStorableDecimal())); else averageSlopeEnc.setDecimal(true, edgeId, edgeIntAccess, Math.min(Math.abs(towerNodeSlope), averageSlopeEnc.getMaxStorableDecimal())); } if (maxSlopeEnc != null) { // max_slope is more error-prone as the shorter distances increase the fluctuation // so apply some more filtering (here we use the average elevation delta of the previous two points) double maxSlope = 0, prevDist = 0, prevLat = pointList.getLat(0), prevLon = pointList.getLon(0); for (int i = 1; i < pointList.size(); i++) { double pillarDistance2D = DistanceCalcEarth.DIST_EARTH.calcDist(prevLat, prevLon, pointList.getLat(i), pointList.getLon(i)); if (i > 1 && prevDist > MIN_LENGTH) { double averagedPrevEle = (pointList.getEle(i - 1) + pointList.getEle(i - 2)) / 2; double tmpSlope = calcSlope(pointList.getEle(i) - averagedPrevEle, pillarDistance2D + prevDist / 2); maxSlope = Math.abs(tmpSlope) > Math.abs(maxSlope) ? tmpSlope : maxSlope; } prevDist = pillarDistance2D; prevLat = pointList.getLat(i); prevLon = pointList.getLon(i); } // For tunnels and bridges we cannot trust the pillar node elevation and ignore all changes. // Probably we should somehow recalculate even the average_slope after elevation interpolation? See EdgeElevationInterpolator if (way.hasTag("tunnel", "yes") || way.hasTag("bridge", "yes") || way.hasTag("highway", "steps")) maxSlope = towerNodeSlope; else maxSlope = Math.abs(towerNodeSlope) > Math.abs(maxSlope) ? towerNodeSlope : maxSlope; if (Double.isNaN(maxSlope)) throw new IllegalArgumentException("max_slope was NaN for OSM way ID " + way.getId()); double val = Math.max(maxSlope, maxSlopeEnc.getMinStorableDecimal()); maxSlopeEnc.setDecimal(false, edgeId, edgeIntAccess, Math.min(maxSlopeEnc.getMaxStorableDecimal(), val)); } } }
@Test public void testMaxSlopeSmallerThanMinStorableDecimal() { PointList pointList = new PointList(5, true); pointList.add(47.7283135, 11.9991135, 1178.0); pointList.add(47.7282782, 11.9991944, 1163.0); pointList.add(47.7281561, 11.9993135, 1163.0); ReaderWay way = new ReaderWay(1); way.setTag("point_list", pointList); ArrayEdgeIntAccess intAccess = new ArrayEdgeIntAccess(1); DecimalEncodedValue averageEnc = AverageSlope.create(); DecimalEncodedValue maxEnc = MaxSlope.create(); new EncodingManager.Builder().add(averageEnc).add(maxEnc).build(); SlopeCalculator creator = new SlopeCalculator(maxEnc, averageEnc); int edgeId = 0; creator.handleWayTags(edgeId, intAccess, way, IntsRef.EMPTY); assertEquals(-31, maxEnc.getDecimal(false, edgeId, intAccess), 1e-3); assertEquals(31, averageEnc.getDecimal(true, edgeId, intAccess), 1e-3); }
@Override public void close() throws IOException { if (mUfsInStream.isPresent()) { mUfsInStream.get().close(); mUfsInStream = Optional.empty(); } }
@Test public void openClose() throws IOException, AlluxioException { AlluxioURI ufsPath = getUfsPath(); createFile(ufsPath, 0); getStream(ufsPath).close(); }
public static boolean canDrop( FilterPredicate pred, List<ColumnChunkMetaData> columns, DictionaryPageReadStore dictionaries) { Objects.requireNonNull(pred, "pred cannnot be null"); Objects.requireNonNull(columns, "columns cannnot be null"); return pred.accept(new DictionaryFilter(columns, dictionaries)); }
@Test public void testAnd() throws Exception { BinaryColumn col = binaryColumn("binary_field"); // both evaluate to false (no upper-case letters are in the dictionary) FilterPredicate B = eq(col, Binary.fromString("B")); FilterPredicate C = eq(col, Binary.fromString("C")); // both evaluate to true (all lower-case letters are in the dictionary) FilterPredicate x = eq(col, Binary.fromString("x")); FilterPredicate y = eq(col, Binary.fromString("y")); assertTrue("Should drop when either predicate must be false", canDrop(and(B, y), ccmd, dictionaries)); assertTrue("Should drop when either predicate must be false", canDrop(and(x, C), ccmd, dictionaries)); assertTrue("Should drop when either predicate must be false", canDrop(and(B, C), ccmd, dictionaries)); assertFalse("Should not drop when either predicate could be true", canDrop(and(x, y), ccmd, dictionaries)); }
@Override public void onApplicationEvent(ContextRefreshedEvent contextRefreshedEvent) { try { File pluginsFolder = new File(systemEnvironment.get(SystemEnvironment.AGENT_PLUGINS_PATH)); if (pluginsFolder.exists()) { FileUtils.forceDelete(pluginsFolder); } zipUtil.unzip(DownloadableFile.AGENT_PLUGINS.getLocalFile(), pluginsFolder); defaultPluginJarLocationMonitor.initialize(); pluginManager.startInfrastructure(false); } catch (IOException e) { LOG.warn("could not extract plugin zip", e); } catch (RuntimeException e) { LOG.warn("error while initializing agent plugins", e); } }
@Test void shouldInitializePluginJarLocationMonitorAndStartPluginInfrastructureAfterPluginZipExtracted() throws Exception { InOrder inOrder = inOrder(zipUtil, pluginManager, pluginJarLocationMonitor); agentPluginsInitializer.onApplicationEvent(null); inOrder.verify(zipUtil).unzip(DownloadableFile.AGENT_PLUGINS.getLocalFile(), new File(SystemEnvironment.PLUGINS_PATH)); inOrder.verify(pluginJarLocationMonitor).initialize(); inOrder.verify(pluginManager).startInfrastructure(false); }
public boolean poll(Timer timer, boolean waitForJoinGroup) { maybeUpdateSubscriptionMetadata(); invokeCompletedOffsetCommitCallbacks(); if (subscriptions.hasAutoAssignedPartitions()) { if (protocol == null) { throw new IllegalStateException("User configured " + ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG + " to empty while trying to subscribe for group protocol to auto assign partitions"); } // Always update the heartbeat last poll time so that the heartbeat thread does not leave the // group proactively due to application inactivity even if (say) the coordinator cannot be found. pollHeartbeat(timer.currentTimeMs()); if (coordinatorUnknownAndUnreadySync(timer)) { return false; } if (rejoinNeededOrPending()) { // due to a race condition between the initial metadata fetch and the initial rebalance, // we need to ensure that the metadata is fresh before joining initially. This ensures // that we have matched the pattern against the cluster's topics at least once before joining. if (subscriptions.hasPatternSubscription()) { // For consumer group that uses pattern-based subscription, after a topic is created, // any consumer that discovers the topic after metadata refresh can trigger rebalance // across the entire consumer group. Multiple rebalances can be triggered after one topic // creation if consumers refresh metadata at vastly different times. We can significantly // reduce the number of rebalances caused by single topic creation by asking consumer to // refresh metadata before re-joining the group as long as the refresh backoff time has // passed. if (this.metadata.timeToAllowUpdate(timer.currentTimeMs()) == 0) { this.metadata.requestUpdate(true); } if (!client.ensureFreshMetadata(timer)) { return false; } maybeUpdateSubscriptionMetadata(); } // if not wait for join group, we would just use a timer of 0 if (!ensureActiveGroup(waitForJoinGroup ? timer : time.timer(0L))) { // since we may use a different timer in the callee, we'd still need // to update the original timer's current time after the call timer.update(time.milliseconds()); return false; } } } else { // For manually assigned partitions, we do not try to pro-actively lookup coordinator; // instead we only try to refresh metadata when necessary. // If connections to all nodes fail, wakeups triggered while attempting to send fetch // requests result in polls returning immediately, causing a tight loop of polls. Without // the wakeup, poll() with no channels would block for the timeout, delaying re-connection. // awaitMetadataUpdate() in ensureCoordinatorReady initiates new connections with configured backoff and avoids the busy loop. if (metadata.updateRequested() && !client.hasReadyNodes(timer.currentTimeMs())) { client.awaitMetadataUpdate(timer); } // if there is pending coordinator requests, ensure they have a chance to be transmitted. client.pollNoWakeup(); } maybeAutoCommitOffsetsAsync(timer.currentTimeMs()); return true; }
@Test public void testCoordinatorDisconnect() { client.prepareResponse(groupCoordinatorResponse(node, Errors.NONE)); coordinator.ensureCoordinatorReady(time.timer(Long.MAX_VALUE)); // coordinator disconnect will mark coordinator as unknown time.sleep(sessionTimeoutMs); RequestFuture<Void> future = coordinator.sendHeartbeatRequest(); // should send out the heartbeat assertEquals(1, consumerClient.pendingRequestCount()); assertFalse(future.isDone()); client.prepareResponse(heartbeatResponse(Errors.NONE), true); // return disconnected time.sleep(sessionTimeoutMs); consumerClient.poll(time.timer(0)); assertTrue(future.isDone()); assertTrue(future.failed()); assertInstanceOf(DisconnectException.class, future.exception()); assertTrue(coordinator.coordinatorUnknown()); }
public boolean includes(String ipAddress) { if (all) { return true; } if (ipAddress == null) { throw new IllegalArgumentException("ipAddress is null."); } try { return includes(addressFactory.getByName(ipAddress)); } catch (UnknownHostException e) { return false; } }
@Test(expected = IllegalArgumentException.class) public void testNullIpAddress() { //create MachineList with a list of of ip ranges specified in CIDR format MachineList ml = new MachineList(CIDR_LIST, new TestAddressFactory()); //test for exclusion with a null IP assertFalse(ml.includes((String) null)); assertFalse(ml.includes((InetAddress) null)); }
@Transactional public AppNamespace createAppNamespaceInLocal(AppNamespace appNamespace) { return createAppNamespaceInLocal(appNamespace, true); }
@Test(expected = BadRequestException.class) @Sql(scripts = "/sql/appnamespaceservice/init-appnamespace.sql", executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) @Sql(scripts = "/sql/cleanup.sql", executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD) public void testCreatePrivateAppNamespaceExistedInAnotherAppIdAsPublic() { AppNamespace appNamespace = assembleBaseAppNamespace(); appNamespace.setPublic(false); appNamespace.setName("SCC.song0711-03"); appNamespace.setAppId("100003173"); appNamespace.setFormat(ConfigFileFormat.Properties.getValue()); appNamespaceService.createAppNamespaceInLocal(appNamespace); }
@Override public int handleExecutionException(Exception exception, CommandLine commandLine, CommandLine.ParseResult parseResult) { PrintWriter errorWriter = commandLine.getErr(); CommandLine.Help.ColorScheme colorScheme = commandLine.getColorScheme(); // Print exception String stackTrace = ExceptionUtil.toString(exception); errorWriter.println(colorScheme.errorText(stackTrace)); // Print usage commandLine.usage(errorWriter, colorScheme); return 0; }
@Test void test_handleExecutionException_withoutExceptionMessage() { // when new ExceptionHandler().handleExecutionException(exception, commandLine, parseResult); // then verify(exception, times(1)).printStackTrace(any(PrintWriter.class)); verify(commandLine, times(1)).usage(errorWriter, colorScheme); }
public static SchemaPairCompatibility checkReaderWriterCompatibility(final Schema reader, final Schema writer) { final SchemaCompatibilityResult compatibility = new ReaderWriterCompatibilityChecker().getCompatibility(reader, writer); final String message; switch (compatibility.getCompatibility()) { case INCOMPATIBLE: { message = String.format( "Data encoded using writer schema:%n%s%n" + "will or may fail to decode using reader schema:%n%s%n", writer.toString(true), reader.toString(true)); break; } case COMPATIBLE: { message = READER_WRITER_COMPATIBLE_MESSAGE; break; } default: throw new AvroRuntimeException("Unknown compatibility: " + compatibility); } return new SchemaPairCompatibility(compatibility, reader, writer, message); }
@Test void validateSchemaPairAllFields() { final List<Schema.Field> readerFields = list(new Schema.Field("oldfield1", INT_SCHEMA, null, null), new Schema.Field("oldfield2", STRING_SCHEMA, null, null)); final Schema reader = Schema.createRecord(null, null, null, false, readerFields); final SchemaCompatibility.SchemaPairCompatibility expectedResult = new SchemaCompatibility.SchemaPairCompatibility( SchemaCompatibility.SchemaCompatibilityResult.compatible(), reader, WRITER_SCHEMA, SchemaCompatibility.READER_WRITER_COMPATIBLE_MESSAGE); // Test with all fields. assertEquals(expectedResult, checkReaderWriterCompatibility(reader, WRITER_SCHEMA)); }
@Override public Exchange receive() { return doReceive(-1); }
@Test public void testReceive() { String body = consumer.receiveBody(endpointUrl + "/", String.class); assertEquals(getExpectedContent(), body); }
public Set<Long> getIterationsToRunFromDetails(ForeachStepOverview prevStepOverview) { if (prevStepOverview == null || prevStepOverview.details == null) { return new HashSet<>(); } Set<Long> currRunInstances = details == null ? new HashSet<>() : details.flatten(e -> true).values().stream() .flatMap(List::stream) .collect(Collectors.toSet()); return prevStepOverview.details.flatten(e -> true).values().stream() .flatMap(List::stream) .filter(e -> !currRunInstances.contains(e)) .collect(Collectors.toSet()); }
@Test public void testGetIterationsToRunFromDetails() throws Exception { ForeachStepOverview prev = loadObject( "fixtures/instances/sample-foreach-step-overview.json", ForeachStepOverview.class); ForeachStepOverview curr = loadObject( "fixtures/instances/sample-foreach-step-overview.json", ForeachStepOverview.class); curr.getDetails().getInfo().remove(WorkflowInstance.Status.IN_PROGRESS); curr.getDetails().getInfo().remove(WorkflowInstance.Status.SUCCEEDED); curr.addOne(80110, WorkflowInstance.Status.SUCCEEDED, new WorkflowRollupOverview()); curr.addOne(80115, WorkflowInstance.Status.SUCCEEDED, new WorkflowRollupOverview()); curr.refreshDetail(); List<Long> flattenedSuccPrevDetails = prev.getDetails().flatten(e -> true).get(WorkflowInstance.Status.SUCCEEDED); assertEquals(5, flattenedSuccPrevDetails.size()); assertEquals( 1, prev.getDetails().flatten(e -> true).get(WorkflowInstance.Status.IN_PROGRESS).size()); assertTrue(flattenedSuccPrevDetails.contains(80110L)); assertTrue(flattenedSuccPrevDetails.contains(80115L)); assertTrue(flattenedSuccPrevDetails.contains(80112L)); assertTrue(flattenedSuccPrevDetails.contains(80113L)); assertTrue(flattenedSuccPrevDetails.contains(80114L)); assertTrue( prev.getDetails() .flatten(e -> true) .get(WorkflowInstance.Status.IN_PROGRESS) .contains(70000L)); Set<Long> result = curr.getIterationsToRunFromDetails(prev); assertEquals(4, result.size()); assertFalse(result.contains(80110L)); assertFalse(result.contains(80115L)); ForeachStepOverview newOne = new ForeachStepOverview(); result = newOne.getIterationsToRunFromDetails(prev); assertEquals(6, result.size()); result = newOne.getIterationsToRunFromDetails(null); assertEquals(0, result.size()); result = newOne.getIterationsToRunFromDetails(new ForeachStepOverview()); assertEquals(0, result.size()); result = prev.getIterationsToRunFromDetails(new ForeachStepOverview()); assertEquals(0, result.size()); }
public static String stringEmptyAndThenExecute(String source, Callable<String> callable) { if (StringUtils.isEmpty(source)) { try { return callable.call(); } catch (Exception e) { LogUtils.NAMING_LOGGER.error("string empty and then execute cause an exception.", e); } } return source == null ? null : source.trim(); }
@Test void testStringEmptyAndThenExecuteException() throws Exception { Callable callable = mock(Callable.class); when(callable.call()).thenThrow(new RuntimeException("test")); String actual = TemplateUtils.stringEmptyAndThenExecute(null, callable); assertNull(actual); }
public CompletableFuture<Account> clearUsernameHash(final Account account) { return redisDeleteAsync(account) .thenCompose(ignored -> updateWithRetriesAsync( account, a -> true, accounts::clearUsernameHash, () -> accounts.getByAccountIdentifierAsync(account.getUuid()).thenApply(Optional::orElseThrow), AccountChangeValidator.USERNAME_CHANGE_VALIDATOR, MAX_UPDATE_ATTEMPTS)) .whenComplete((updatedAccount, throwable) -> { if (throwable == null) { // Make a best effort to clear any stale data that may have been cached while this operation was in progress redisDeleteAsync(updatedAccount); } }); }
@Test void testClearUsernameHash() { when(accounts.clearUsernameHash(any())) .thenReturn(CompletableFuture.completedFuture(null)); Account account = AccountsHelper.generateTestAccount("+18005551234", UUID.randomUUID(), UUID.randomUUID(), new ArrayList<>(), new byte[UnidentifiedAccessUtil.UNIDENTIFIED_ACCESS_KEY_LENGTH]); account.setUsernameHash(USERNAME_HASH_1); accountsManager.clearUsernameHash(account).join(); verify(accounts).clearUsernameHash(eq(account)); }
@Override public HttpResponseOutputStream<Void> write(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { try { return this.write(file, this.toHeaders(file, status, expect), status); } catch(ConflictException e) { if(expect) { if(null != status.getLockId()) { // Handle 412 Precondition Failed with expired token log.warn(String.format("Retry failure %s with lock id %s removed", e, status.getLockId())); return this.write(file, this.toHeaders(file, status.withLockId(null), expect), status); } } throw e; } catch(InteroperabilityException e) { if(expect) { // Handle 417 Expectation Failed log.warn(String.format("Retry failure %s with Expect: Continue removed", e)); return this.write(file, this.toHeaders(file, status.withLockId(null), false), status); } throw e; } }
@Test public void testReadWrite() throws Exception { final TransferStatus status = new TransferStatus(); final Local local = new Local(System.getProperty("java.io.tmpdir"), new AlphanumericRandomStringService().random()); final byte[] content = "test".getBytes(StandardCharsets.UTF_8); final OutputStream out = local.getOutputStream(false); IOUtils.write(content, out); out.close(); status.setLength(content.length); final Path test = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final HttpUploadFeature upload = new DAVUploadFeature(session); upload.upload(test, local, new BandwidthThrottle(BandwidthThrottle.UNLIMITED), new DisabledStreamListener(), status, new DisabledConnectionCallback()); assertTrue(session.getFeature(Find.class).find(test)); assertEquals(content.length, new DAVListService(session).list(test.getParent(), new DisabledListProgressListener()).get(test).attributes().getSize(), 0L); assertEquals(content.length, new DAVUploadFeature(session).append(test, status.withRemote(new DAVAttributesFinderFeature(session).find(test))).offset, 0L); { final byte[] buffer = new byte[content.length]; IOUtils.readFully(new DAVReadFeature(session).read(test, new TransferStatus(), new DisabledConnectionCallback()), buffer); assertArrayEquals(content, buffer); } { final byte[] buffer = new byte[content.length - 1]; final InputStream in = new DAVReadFeature(session).read(test, new TransferStatus().withLength(content.length - 1L).append(true).withOffset(1L), new DisabledConnectionCallback()); IOUtils.readFully(in, buffer); in.close(); final byte[] reference = new byte[content.length - 1]; System.arraycopy(content, 1, reference, 0, content.length - 1); assertArrayEquals(reference, buffer); } new DAVDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static void checkDrivingLicenceMrz(String mrz) { if (mrz.charAt(0) != 'D') { throw new VerificationException("MRZ should start with D"); } if (mrz.charAt(1) != '1') { throw new VerificationException("Only BAP configuration is supported (1)"); } if (!mrz.substring(2, 5).equals("NLD")) { throw new VerificationException("Only Dutch driving licence supported"); } if (mrz.length() != 30) { throw new VerificationException("Dutch MRZ should have length of 30"); } checkMrzCheckDigit(mrz); }
@Test public void checkDrivingLicenceMrzCheckBapWrong() { assertThrows(VerificationException.class, () -> { MrzUtils.checkDrivingLicenceMrz("PPPPPPPPPPPPPPPPPPPPPPPPPPPPPP"); }); }
@DataPermission(enable = false) // 忽略数据权限,避免因为过滤,导致找不到候选人 public Set<Long> calculateUsers(DelegateExecution execution) { Integer strategy = BpmnModelUtils.parseCandidateStrategy(execution.getCurrentFlowElement()); String param = BpmnModelUtils.parseCandidateParam(execution.getCurrentFlowElement()); // 1.1 计算任务的候选人 Set<Long> userIds = getCandidateStrategy(strategy).calculateUsers(execution, param); // 1.2 移除被禁用的用户 removeDisableUsers(userIds); // 2. 校验是否有候选人 if (CollUtil.isEmpty(userIds)) { log.error("[calculateUsers][流程任务({}/{}/{}) 任务规则({}/{}) 找不到候选人]", execution.getId(), execution.getProcessDefinitionId(), execution.getCurrentActivityId(), strategy, param); throw exception(TASK_CREATE_FAIL_NO_CANDIDATE_USER); } return userIds; }
@Test public void testCalculateUsers() { // 准备参数 String param = "1,2"; DelegateExecution execution = mock(DelegateExecution.class); // mock 方法(DelegateExecution) UserTask userTask = mock(UserTask.class); when(execution.getCurrentFlowElement()).thenReturn(userTask); when(userTask.getAttributeValue(eq(BpmnModelConstants.NAMESPACE), eq(BpmnModelConstants.USER_TASK_CANDIDATE_STRATEGY))) .thenReturn(BpmTaskCandidateStrategyEnum.USER.getStrategy().toString()); when(userTask.getAttributeValue(eq(BpmnModelConstants.NAMESPACE), eq(BpmnModelConstants.USER_TASK_CANDIDATE_PARAM))) .thenReturn(param); // mock 方法(adminUserApi) AdminUserRespDTO user1 = randomPojo(AdminUserRespDTO.class, o -> o.setId(1L) .setStatus(CommonStatusEnum.ENABLE.getStatus())); AdminUserRespDTO user2 = randomPojo(AdminUserRespDTO.class, o -> o.setId(2L) .setStatus(CommonStatusEnum.ENABLE.getStatus())); Map<Long, AdminUserRespDTO> userMap = MapUtil.builder(user1.getId(), user1) .put(user2.getId(), user2).build(); when(adminUserApi.getUserMap(eq(asSet(1L, 2L)))).thenReturn(userMap); // 调用 Set<Long> results = taskCandidateInvoker.calculateUsers(execution); // 断言 assertEquals(asSet(1L, 2L), results); }
public void setComplexProperty(String name, Object complexProperty) { String dName = Introspector.decapitalize(name); PropertyDescriptor propertyDescriptor = getPropertyDescriptor(dName); if (propertyDescriptor == null) { addWarn("Could not find PropertyDescriptor for [" + name + "] in " + objClass.getName()); return; } Method setter = propertyDescriptor.getWriteMethod(); if (setter == null) { addWarn("Not setter method for property [" + name + "] in " + obj.getClass().getName()); return; } Class<?>[] paramTypes = setter.getParameterTypes(); if (!isSanityCheckSuccessful(name, setter, paramTypes, complexProperty)) { return; } try { invokeMethodWithSingleParameterOnThisObject(setter, complexProperty); } catch (Exception e) { addError("Could not set component " + obj + " for parent component " + obj, e); } }
@Test public void testSetComplexWithCamelCaseName() { SwimmingPool pool = new SwimmingPoolImpl(); setter.setComplexProperty("swimmingPool", pool); assertEquals(pool, house.getSwimmingPool()); }
@Override public boolean trySet(V value, long timeToLive, TimeUnit timeUnit) { return get(trySetAsync(value, timeToLive, timeUnit)); }
@Test public void testTrySet() { RBucket<String> r1 = redisson.getBucket("testTrySet"); assertThat(r1.setIfAbsent("3")).isTrue(); assertThat(r1.setIfAbsent("4")).isFalse(); assertThat(r1.get()).isEqualTo("3"); }
@Override public KeyValueIterator<Windowed<K>, V> fetchAll(final Instant timeFrom, final Instant timeTo) { final NextIteratorFunction<Windowed<K>, V, ReadOnlyWindowStore<K, V>> nextIteratorFunction = store -> store.fetchAll(timeFrom, timeTo); return new DelegatingPeekingKeyValueIterator<>( storeName, new CompositeKeyValueIterator<>( provider.stores(storeName, windowStoreType).iterator(), nextIteratorFunction)); }
@Test public void shouldFetchAllAcrossStores() { final ReadOnlyWindowStoreStub<String, String> secondUnderlying = new ReadOnlyWindowStoreStub<>(WINDOW_SIZE); stubProviderTwo.addStore(storeName, secondUnderlying); underlyingWindowStore.put("a", "a", 0L); secondUnderlying.put("b", "b", 10L); final List<KeyValue<Windowed<String>, String>> results = StreamsTestUtils.toList(windowStore.fetchAll(ofEpochMilli(0), ofEpochMilli(10))); assertThat(results, equalTo(Arrays.asList( KeyValue.pair(new Windowed<>("a", new TimeWindow(0, WINDOW_SIZE)), "a"), KeyValue.pair(new Windowed<>("b", new TimeWindow(10, 10 + WINDOW_SIZE)), "b")))); }
@Override public Response replaceLabelsOnNode(Set<String> newNodeLabelsName, HttpServletRequest hsr, String nodeId) throws Exception { // Step1. Check the parameters to ensure that the parameters are not empty. if (StringUtils.isBlank(nodeId)) { routerMetrics.incrReplaceLabelsOnNodeFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), REPLACE_LABELSONNODE, UNKNOWN, TARGET_WEB_SERVICE, "Parameter error, nodeId must not be null or empty."); throw new IllegalArgumentException("Parameter error, nodeId must not be null or empty."); } if (CollectionUtils.isEmpty(newNodeLabelsName)) { routerMetrics.incrReplaceLabelsOnNodeFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), REPLACE_LABELSONNODE, UNKNOWN, TARGET_WEB_SERVICE, "Parameter error, newNodeLabelsName must not be empty."); throw new IllegalArgumentException("Parameter error, newNodeLabelsName must not be empty."); } try { // Step2. We find the subCluster according to the nodeId, // and then call the replaceLabelsOnNode of the subCluster. long startTime = clock.getTime(); SubClusterInfo subClusterInfo = getNodeSubcluster(nodeId); DefaultRequestInterceptorREST interceptor = getOrCreateInterceptorByNodeId(nodeId); final HttpServletRequest hsrCopy = clone(hsr); interceptor.replaceLabelsOnNode(newNodeLabelsName, hsrCopy, nodeId); // Step3. Return the response result. long stopTime = clock.getTime(); RouterAuditLogger.logSuccess(getUser().getShortUserName(), REPLACE_LABELSONNODE, TARGET_WEB_SERVICE); routerMetrics.succeededReplaceLabelsOnNodeRetrieved(stopTime - startTime); String msg = "subCluster#" + subClusterInfo.getSubClusterId().getId() + ":Success;"; return Response.status(Status.OK).entity(msg).build(); } catch (Exception e) { routerMetrics.incrReplaceLabelsOnNodeFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), REPLACE_LABELSONNODE, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); throw e; } }
@Test public void testReplaceLabelsOnNodeError() throws Exception { // newNodeToLabels is null String nodeId = "node3:3"; Set<String> labels = Collections.singleton("NodeLabel3"); Set<String> labelsEmpty = new HashSet<>(); // nodeId is null LambdaTestUtils.intercept(IllegalArgumentException.class, "Parameter error, nodeId must not be null or empty.", () -> interceptor.replaceLabelsOnNode(labels, null, null)); // labels is null LambdaTestUtils.intercept(IllegalArgumentException.class, "Parameter error, newNodeLabelsName must not be empty.", () -> interceptor.replaceLabelsOnNode(null, null, nodeId)); // labels is empty LambdaTestUtils.intercept(IllegalArgumentException.class, "Parameter error, newNodeLabelsName must not be empty.", () -> interceptor.replaceLabelsOnNode(labelsEmpty, null, nodeId)); }
public static boolean containsAny(Collection<?> coll1, Collection<?> coll2) { if (isEmpty(coll1) || isEmpty(coll2)) { return false; } if (coll1.size() < coll2.size()) { for (Object object : coll1) { if (coll2.contains(object)) { return true; } } } else { for (Object object : coll2) { if (coll1.contains(object)) { return true; } } } return false; }
@Test public void containsAnyTest() { final ArrayList<Integer> list1 = CollUtil.newArrayList(1, 2, 3, 4, 5); final ArrayList<Integer> list2 = CollUtil.newArrayList(5, 3, 1, 9, 11); assertTrue(CollUtil.containsAny(list1, list2)); }
@Override public PageResult<LoginLogDO> getLoginLogPage(LoginLogPageReqVO pageReqVO) { return loginLogMapper.selectPage(pageReqVO); }
@Test public void testGetLoginLogPage() { // mock 数据 LoginLogDO loginLogDO = randomPojo(LoginLogDO.class, o -> { o.setUserIp("192.168.199.16"); o.setUsername("wang"); o.setResult(SUCCESS.getResult()); o.setCreateTime(buildTime(2021, 3, 6)); }); loginLogMapper.insert(loginLogDO); // 测试 status 不匹配 loginLogMapper.insert(cloneIgnoreId(loginLogDO, o -> o.setResult(CAPTCHA_CODE_ERROR.getResult()))); // 测试 ip 不匹配 loginLogMapper.insert(cloneIgnoreId(loginLogDO, o -> o.setUserIp("192.168.128.18"))); // 测试 username 不匹配 loginLogMapper.insert(cloneIgnoreId(loginLogDO, o -> o.setUsername("yunai"))); // 测试 createTime 不匹配 loginLogMapper.insert(cloneIgnoreId(loginLogDO, o -> o.setCreateTime(buildTime(2021, 2, 6)))); // 构造调用参数 LoginLogPageReqVO reqVO = new LoginLogPageReqVO(); reqVO.setUsername("wang"); reqVO.setUserIp("192.168.199"); reqVO.setStatus(true); reqVO.setCreateTime(buildBetweenTime(2021, 3, 5, 2021, 3, 7)); // 调用 PageResult<LoginLogDO> pageResult = loginLogService.getLoginLogPage(reqVO); // 断言,只查到了一条符合条件的 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(loginLogDO, pageResult.getList().get(0)); }
static Instant minTimestamp(Stream<Instant> timestamps) { return timestamps.min(Comparator.naturalOrder()).orElse(BoundedWindow.TIMESTAMP_MAX_VALUE); }
@Test public void shouldGiveMinTs() { assertThat(minTimestamp(ImmutableList.<org.joda.time.Instant>of().stream())) .isEqualTo(BoundedWindow.TIMESTAMP_MAX_VALUE); assertThat(minTimestamp(ImmutableList.of(org.joda.time.Instant.ofEpochMilli(1)).stream())) .isEqualTo(org.joda.time.Instant.ofEpochMilli(1)); assertThat( minTimestamp( ImmutableList.of( org.joda.time.Instant.ofEpochMilli(1), org.joda.time.Instant.ofEpochMilli(2)) .stream())) .isEqualTo(org.joda.time.Instant.ofEpochMilli(1)); }
@Override synchronized boolean readyToProcess(final long wallClockTime) { return wrapped.readyToProcess(wallClockTime); }
@Test public void testReadyToProcess() { final long wallClockTime = 0L; when(wrapped.readyToProcess(wallClockTime)).thenReturn(true); synchronizedPartitionGroup.readyToProcess(wallClockTime); verify(wrapped, times(1)).readyToProcess(wallClockTime); }
@Override public void divideRoutineLoadJob(int currentConcurrentTaskNum) throws UserException { List<RoutineLoadTaskInfo> result = new ArrayList<>(); writeLock(); try { if (state == JobState.NEED_SCHEDULE) { // divide kafkaPartitions into tasks for (int i = 0; i < currentConcurrentTaskNum; i++) { Map<Integer, Long> taskKafkaProgress = Maps.newHashMap(); for (int j = 0; j < currentKafkaPartitions.size(); j++) { if (j % currentConcurrentTaskNum == i) { int kafkaPartition = currentKafkaPartitions.get(j); taskKafkaProgress.put(kafkaPartition, ((KafkaProgress) progress).getOffsetByPartition(kafkaPartition)); } } long timeToExecuteMs = System.currentTimeMillis() + taskSchedIntervalS * 1000; KafkaTaskInfo kafkaTaskInfo = new KafkaTaskInfo(UUID.randomUUID(), this, taskSchedIntervalS * 1000, timeToExecuteMs, taskKafkaProgress, taskTimeoutSecond * 1000); kafkaTaskInfo.setWarehouseId(warehouseId); routineLoadTaskInfoList.add(kafkaTaskInfo); result.add(kafkaTaskInfo); } // change job state to running if (result.size() != 0) { unprotectUpdateState(JobState.RUNNING, null, false); } } else { LOG.debug("Ignore to divide routine load job while job state {}", state); } // save task into queue of needScheduleTasks GlobalStateMgr.getCurrentState().getRoutineLoadTaskScheduler().addTasksInQueue(result); } finally { writeUnlock(); } }
@Test public void testDivideRoutineLoadJob(@Injectable RoutineLoadMgr routineLoadManager, @Mocked RoutineLoadDesc routineLoadDesc) throws UserException { GlobalStateMgr globalStateMgr = Deencapsulation.newInstance(GlobalStateMgr.class); RoutineLoadJob routineLoadJob = new KafkaRoutineLoadJob(1L, "kafka_routine_load_job", 1L, 1L, "127.0.0.1:9020", "topic1"); new Expectations(globalStateMgr) { { globalStateMgr.getRoutineLoadMgr(); minTimes = 0; result = routineLoadManager; } }; RoutineLoadTaskScheduler routineLoadTaskScheduler = new RoutineLoadTaskScheduler(routineLoadManager); Deencapsulation.setField(globalStateMgr, "routineLoadTaskScheduler", routineLoadTaskScheduler); Deencapsulation.setField(routineLoadJob, "currentKafkaPartitions", Arrays.asList(1, 4, 6)); routineLoadJob.divideRoutineLoadJob(2); // todo(ml): assert List<RoutineLoadTaskInfo> routineLoadTaskInfoList = Deencapsulation.getField(routineLoadJob, "routineLoadTaskInfoList"); Assert.assertEquals(2, routineLoadTaskInfoList.size()); for (RoutineLoadTaskInfo routineLoadTaskInfo : routineLoadTaskInfoList) { KafkaTaskInfo kafkaTaskInfo = (KafkaTaskInfo) routineLoadTaskInfo; Assert.assertEquals(false, kafkaTaskInfo.isRunning()); if (kafkaTaskInfo.getPartitions().size() == 2) { Assert.assertTrue(kafkaTaskInfo.getPartitions().contains(1)); Assert.assertTrue(kafkaTaskInfo.getPartitions().contains(6)); } else if (kafkaTaskInfo.getPartitions().size() == 1) { Assert.assertTrue(kafkaTaskInfo.getPartitions().contains(4)); } else { Assert.fail(); } } }
@Override public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { super.onDataReceived(device, data); if (data.size() < 3) { onInvalidDataReceived(device, data); return; } final int opCode = data.getIntValue(Data.FORMAT_UINT8, 0); if (opCode != OP_CODE_NUMBER_OF_STORED_RECORDS_RESPONSE && opCode != OP_CODE_RESPONSE_CODE) { onInvalidDataReceived(device, data); return; } final int operator = data.getIntValue(Data.FORMAT_UINT8, 1); if (operator != OPERATOR_NULL) { onInvalidDataReceived(device, data); return; } switch (opCode) { case OP_CODE_NUMBER_OF_STORED_RECORDS_RESPONSE -> { // Field size is defined per service int numberOfRecords; switch (data.size() - 2) { case 1 -> numberOfRecords = data.getIntValue(Data.FORMAT_UINT8, 2); case 2 -> numberOfRecords = data.getIntValue(Data.FORMAT_UINT16_LE, 2); case 4 -> numberOfRecords = data.getIntValue(Data.FORMAT_UINT32_LE, 2); default -> { // Other field sizes are not supported onInvalidDataReceived(device, data); return; } } onNumberOfRecordsReceived(device, numberOfRecords); } case OP_CODE_RESPONSE_CODE -> { if (data.size() != 4) { onInvalidDataReceived(device, data); return; } final int requestCode = data.getIntValue(Data.FORMAT_UINT8, 2); final int responseCode = data.getIntValue(Data.FORMAT_UINT8, 3); if (responseCode == RACP_RESPONSE_SUCCESS) { onRecordAccessOperationCompleted(device, requestCode); } else if (responseCode == RACP_ERROR_NO_RECORDS_FOUND) { onRecordAccessOperationCompletedWithNoRecordsFound(device, requestCode); } else { onRecordAccessOperationError(device, requestCode, responseCode); } } } }
@Test public void onRecordAccessOperationError_operatorNotSupported() { final Data data = new Data(new byte[] { 6, 0, 2, 4 }); callback.onDataReceived(null, data); assertEquals(4, error); assertEquals(2, requestCode); }
public void close(final boolean closeQueries) { primaryContext.getQueryRegistry().close(closeQueries); try { cleanupService.stopAsync().awaitTerminated( this.primaryContext.getKsqlConfig() .getLong(KsqlConfig.KSQL_QUERY_CLEANUP_SHUTDOWN_TIMEOUT_MS), TimeUnit.MILLISECONDS); } catch (final TimeoutException e) { log.warn("Timed out while closing cleanup service. " + "External resources for the following applications may be orphaned: {}", cleanupService.pendingApplicationIds() ); } engineMetrics.close(); aggregateMetricsCollector.shutdown(); }
@Test public void shouldHardDeleteSchemaOnEngineCloseForTransientQueriesSharedRuntimes() throws IOException, RestClientException { // Given: setupKsqlEngineWithSharedRuntimeEnabled(); final QueryMetadata query = KsqlEngineTestUtil.executeQuery( serviceContext, ksqlEngine, "select * from test1 EMIT CHANGES;", ksqlConfig, Collections.emptyMap() ); final String internalTopic1Val = KsqlConstants.getSRSubject( query.getQueryApplicationId() + "-subject1" + KsqlConstants.STREAMS_CHANGELOG_TOPIC_SUFFIX, false); final String internalTopic2Val = KsqlConstants.getSRSubject( query.getQueryApplicationId() + "-subject3" + KsqlConstants.STREAMS_REPARTITION_TOPIC_SUFFIX, false); final String internalTopic1Key = KsqlConstants.getSRSubject( query.getQueryApplicationId() + "-subject1" + KsqlConstants.STREAMS_CHANGELOG_TOPIC_SUFFIX, true); final String internalTopic2Key = KsqlConstants.getSRSubject( query.getQueryApplicationId() + "-subject3" + KsqlConstants.STREAMS_REPARTITION_TOPIC_SUFFIX, true); when(schemaRegistryClient.getAllSubjects()).thenReturn( Arrays.asList( internalTopic1Val, internalTopic1Key, "subject2", internalTopic2Val, internalTopic2Key)); query.start(); // When: query.close(); // Then: awaitCleanupComplete(); verify(schemaRegistryClient, times(4)).deleteSubject(any()); verify(schemaRegistryClient).deleteSubject(internalTopic1Val, true); verify(schemaRegistryClient).deleteSubject(internalTopic2Val, true); verify(schemaRegistryClient).deleteSubject(internalTopic1Key, true); verify(schemaRegistryClient).deleteSubject(internalTopic2Key, true); verify(schemaRegistryClient, never()).deleteSubject("subject2"); }
@Override public InterpreterResult interpret(String st, InterpreterContext context) { String[] lines = splitAndRemoveEmpty(st, "\n"); return interpret(lines, context); }
@Test void loadFileTest() throws IOException, AlluxioException { FileSystemTestUtils.createByteFile(fs, "/testFile", WritePType.CACHE_THROUGH, 10, 10); int memPercentage = fs.getStatus(new AlluxioURI("/testFile")).getInMemoryPercentage(); assertNotEquals(0, memPercentage); alluxioInterpreter.interpret("load /testFile", null); memPercentage = fs.getStatus(new AlluxioURI("/testFile")).getInMemoryPercentage(); assertEquals(100, memPercentage); }
@Override public Map<K, V> getCachedMap() { return localCacheView.getCachedMap(); }
@Test public void testPutIfAbsent() { RLocalCachedMap<SimpleKey, SimpleValue> map = redisson.getLocalCachedMap(LocalCachedMapOptions.name("test")); Map<SimpleKey, SimpleValue> cache = map.getCachedMap(); SimpleKey key = new SimpleKey("1"); SimpleValue value = new SimpleValue("2"); map.put(key, value); Assertions.assertEquals(value, map.putIfAbsent(key, new SimpleValue("3"))); Assertions.assertEquals(value, map.get(key)); SimpleKey key1 = new SimpleKey("2"); SimpleValue value1 = new SimpleValue("4"); Assertions.assertNull(map.putIfAbsent(key1, value1)); Assertions.assertEquals(value1, map.get(key1)); assertThat(cache.size()).isEqualTo(2); }