focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public Footer readFooter(HiveWriterVersion hiveWriterVersion, InputStream inputStream, DwrfEncryptionProvider dwrfEncryptionProvider, DwrfKeyProvider dwrfKeyProvider, OrcDataSource orcDataSource, Optional<OrcDecompressor> decompressor) throws IOException { long cpuStart = THREAD_MX_BEAN.getCurrentThreadCpuTime(); CodedInputStream input = CodedInputStream.newInstance(inputStream); DwrfProto.Footer footer = DwrfProto.Footer.parseFrom(input); List<ColumnStatistics> fileStats = toColumnStatistics(hiveWriterVersion, footer.getStatisticsList(), false); List<StripeInformation> fileStripes = toStripeInformation(footer.getStripesList()); List<OrcType> types = toType(footer.getTypesList()); Optional<DwrfEncryption> encryption = footer.hasEncryption() ? Optional.of(toEncryption(footer.getEncryption())) : Optional.empty(); Optional<List<Integer>> stripeCacheOffsets = Optional.of(footer.getStripeCacheOffsetsList()); if (encryption.isPresent()) { Map<Integer, Slice> keys = dwrfKeyProvider.getIntermediateKeys(types); EncryptionLibrary encryptionLibrary = dwrfEncryptionProvider.getEncryptionLibrary(encryption.get().getKeyProvider()); fileStats = decryptAndCombineFileStatistics(hiveWriterVersion, encryption.get(), encryptionLibrary, fileStats, fileStripes, keys, orcDataSource, decompressor); } runtimeStats.addMetricValue("DwrfReadFooterTimeNanos", RuntimeUnit.NANO, THREAD_MX_BEAN.getCurrentThreadCpuTime() - cpuStart); OptionalLong rawSize = footer.hasRawDataSize() ? OptionalLong.of(footer.getRawDataSize()) : OptionalLong.empty(); return new Footer( footer.getNumberOfRows(), footer.getRowIndexStride(), rawSize, fileStripes, types, fileStats, toUserMetadata(footer.getMetadataList()), encryption, stripeCacheOffsets); }
@Test public void testReadFooter() throws IOException { long numberOfRows = 10; int rowIndexStride = 11; List<Integer> stripeCacheOffsets = ImmutableList.of(1, 2, 3); for (OptionalLong rawDataSize : ImmutableList.of(OptionalLong.empty(), OptionalLong.of(1_000_123))) { DwrfProto.Footer.Builder protoFooterBuilder = DwrfProto.Footer.newBuilder() .setNumberOfRows(numberOfRows) .setRowIndexStride(rowIndexStride) .addAllStripeCacheOffsets(stripeCacheOffsets); if (rawDataSize.isPresent()) { protoFooterBuilder.setRawDataSize(rawDataSize.getAsLong()); } DwrfProto.Footer protoFooter = protoFooterBuilder.build(); Footer footer = convertToFooter(protoFooter); assertEquals(footer.getNumberOfRows(), numberOfRows); assertEquals(footer.getRowsInRowGroup(), rowIndexStride); assertEquals(footer.getDwrfStripeCacheOffsets().get(), stripeCacheOffsets); assertEquals(footer.getRawSize(), rawDataSize); assertEquals(footer.getStripes(), Collections.emptyList()); } }
public static String getMethodKey(String interfaceName, String methodName) { return interfaceName + "#" + methodName; }
@Test public void getMethodKey() throws Exception { Assert.assertEquals(ClassUtils.getMethodKey("xxx", "yyy"), "xxx#yyy"); }
public BrokerReplicasInfo getInSyncStateData(final String controllerAddress, final List<String> brokers) throws RemotingConnectException, RemotingSendRequestException, RemotingTimeoutException, InterruptedException, MQBrokerException, RemotingCommandException { // Get controller leader address. final GetMetaDataResponseHeader controllerMetaData = getControllerMetaData(controllerAddress); assert controllerMetaData != null; assert controllerMetaData.getControllerLeaderAddress() != null; final String leaderAddress = controllerMetaData.getControllerLeaderAddress(); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.CONTROLLER_GET_SYNC_STATE_DATA, null); final byte[] body = RemotingSerializable.encode(brokers); request.setBody(body); RemotingCommand response = this.remotingClient.invokeSync(leaderAddress, request, 3000); assert response != null; switch (response.getCode()) { case ResponseCode.SUCCESS: { return RemotingSerializable.decode(response.getBody(), BrokerReplicasInfo.class); } default: break; } throw new MQBrokerException(response.getCode(), response.getRemark()); }
@Test public void assertGetInSyncStateData() throws RemotingException, InterruptedException, MQBrokerException { mockInvokeSync(); BrokerReplicasInfo responseBody = new BrokerReplicasInfo(); BrokerReplicasInfo.ReplicasInfo replicasInfo = new BrokerReplicasInfo.ReplicasInfo(MixAll.MASTER_ID, defaultBrokerAddr, 1, 1, Collections.emptyList(), Collections.emptyList()); responseBody.getReplicasInfoTable().put("key", replicasInfo); GetMetaDataResponseHeader responseHeader = new GetMetaDataResponseHeader(); responseHeader.setControllerLeaderAddress(defaultBrokerAddr); setResponseHeader(responseHeader); setResponseBody(responseBody); BrokerReplicasInfo actual = mqClientAPI.getInSyncStateData(defaultBrokerAddr, Collections.singletonList(defaultBrokerAddr)); assertNotNull(actual); assertEquals(1L, actual.getReplicasInfoTable().size()); }
public String getQuery() throws Exception { return getQuery(weatherConfiguration.getLocation()); }
@Test public void testCurrentLocationQuery() throws Exception { WeatherConfiguration weatherConfiguration = new WeatherConfiguration(); weatherConfiguration.setMode(WeatherMode.XML); weatherConfiguration.setPeriod("3"); weatherConfiguration.setLanguage(WeatherLanguage.nl); weatherConfiguration.setUnits(WeatherUnits.IMPERIAL); weatherConfiguration.setAppid(APPID); WeatherQuery weatherQuery = new WeatherQuery(weatherConfiguration); weatherConfiguration.setGeoLocationProvider(geoLocationProvider); String query = weatherQuery.getQuery(); assertThat(query, is( "http://api.openweathermap.org/data/2.5/forecast/daily?lat=51.98&lon=4.13&lang=nl&cnt=3&units=imperial&mode=xml&APPID=9162755b2efa555823cfe0451d7fff38")); }
public long getBytesOutCounter() { return bytesOutFromRemovedSubscriptions.longValue() + sumSubscriptions(AbstractSubscription::getBytesOutCounter); }
@Test public void testGetBytesOutCounter() { topic.bytesOutFromRemovedSubscriptions.add(1L); when(subscription.getBytesOutCounter()).thenReturn(2L); assertEquals(topic.getBytesOutCounter(), 3L); }
public Map<TopicPartition, Long> retryEndOffsets(Set<TopicPartition> partitions, Duration timeoutDuration, long retryBackoffMs) { try { return RetryUtil.retryUntilTimeout( () -> endOffsets(partitions), () -> "list offsets for topic partitions", timeoutDuration, retryBackoffMs); } catch (UnsupportedVersionException e) { // Older brokers don't support this admin method, so rethrow it without wrapping it throw e; } catch (Exception e) { throw ConnectUtils.maybeWrap(e, "Failed to list offsets for topic partitions"); } }
@Test public void retryEndOffsetsShouldRethrowUnknownVersionException() { String topicName = "myTopic"; TopicPartition tp1 = new TopicPartition(topicName, 0); Set<TopicPartition> tps = Collections.singleton(tp1); Long offset = null; // response should use error Cluster cluster = createCluster(1, topicName, 1); try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(new MockTime(), cluster)) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareResponse(prepareMetadataResponse(cluster, Errors.NONE)); // Expect the admin client list offsets will throw unsupported version, simulating older brokers env.kafkaClient().prepareResponse(listOffsetsResultWithUnsupportedVersion(tp1, offset)); TopicAdmin admin = new TopicAdmin(env.adminClient()); // The retryEndOffsets should catch and rethrow an unsupported version exception assertThrows(UnsupportedVersionException.class, () -> admin.retryEndOffsets(tps, Duration.ofMillis(100), 1)); } }
@VisibleForTesting static boolean areProxyPropertiesSet(String protocol) { return PROXY_PROPERTIES.stream() .anyMatch(property -> System.getProperty(protocol + "." + property) != null); }
@Test public void testAreProxyPropertiesSet_httpsHostSet() { System.setProperty("https.proxyHost", "host"); Assert.assertFalse(MavenSettingsProxyProvider.areProxyPropertiesSet("http")); Assert.assertTrue(MavenSettingsProxyProvider.areProxyPropertiesSet("https")); }
@Override public void logoutSuccess(HttpRequest request, @Nullable String login) { checkRequest(request); if (!LOGGER.isDebugEnabled()) { return; } LOGGER.debug("logout success [IP|{}|{}][login|{}]", request.getRemoteAddr(), getAllIps(request), preventLogFlood(emptyIfNull(login))); }
@Test public void logout_success_logs_remote_ip_from_request() { underTest.logoutSuccess(mockRequest("1.2.3.4"), "foo"); verifyLog("logout success [IP|1.2.3.4|][login|foo]", Set.of("login", "logout failure")); }
public static <T extends HoodieRecordPayload> T loadPayload(String recordPayloadClass, Object[] payloadArgs, Class<?>... constructorArgTypes) { try { return (T) ReflectionUtils.getClass(recordPayloadClass).getConstructor(constructorArgTypes) .newInstance(payloadArgs); } catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { throw new HoodieException("Unable to instantiate payload class ", e); } }
@Test void loadPayload() { String payloadClassName = DefaultHoodieRecordPayload.class.getName(); HoodieRecordPayload payload = HoodieRecordUtils.loadPayload(payloadClassName, new Object[] {null, 0}, GenericRecord.class, Comparable.class); assertEquals(payload.getClass().getName(), payloadClassName); }
public String prettyPrint() { final String pretty = prettify(); System.out.println(pretty); return pretty; }
@Test public void pretty_printing_works() { // Given String json = "{\"data\": [{" + " \"uid\": 10,\n" + " \"name\": \"abc\"\n" + " }\n" + " ]\n" + "}"; // When final JsonPath jsonPath = new JsonPath(json); // Then final String string = jsonPath.prettyPrint(); assertThat(string, equalTo("{\n" + " \"data\": [\n" + " {\n" + " \"uid\": 10,\n" + " \"name\": \"abc\"\n" + " }\n" + " ]\n" + "}")); }
@Override public NSURL lock(final boolean interactive) throws AccessDeniedException { final NSURL resolved = resolver.resolve(this, interactive); if(null == resolved) { // Ignore failure resolving path return null; // NSURL.fileURLWithPath(this.getAbsolute()); } if(!resolved.startAccessingSecurityScopedResource()) { throw new LocalAccessDeniedException(String.format("Failure accessing security scoped resource for %s", this)); } return resolved; }
@Test public void testLockNoSuchFile() throws Exception { FinderLocal l = new FinderLocal(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString()); assertNull(l.lock(false)); }
public static <T> Write<T> write() { return new Write<>(); }
@Test public void testWrite() throws Exception { String tableName = DatabaseTestHelper.getTestTableName("UT_WRITE"); DatabaseTestHelper.createTable(DATA_SOURCE, tableName); try { ArrayList<KV<Integer, String>> data = getDataToWrite(EXPECTED_ROW_COUNT); pipeline.apply(Create.of(data)).apply(getJdbcWrite(tableName)); pipeline.run(); assertRowCount(DATA_SOURCE, tableName, EXPECTED_ROW_COUNT); } finally { DatabaseTestHelper.deleteTable(DATA_SOURCE, tableName); } }
public static Read read() { return new AutoValue_MongoDbIO_Read.Builder() .setMaxConnectionIdleTime(60000) .setNumSplits(0) .setBucketAuto(false) .setSslEnabled(false) .setIgnoreSSLCertificate(false) .setSslInvalidHostNameAllowed(false) .setQueryFn(FindQuery.create()) .build(); }
@Test public void testReadWithFilterAndLimit() throws Exception { PCollection<Document> output = pipeline.apply( MongoDbIO.read() .withUri("mongodb://localhost:" + port) .withDatabase(DATABASE_NAME) .withCollection(COLLECTION_NAME) .withNumSplits(10) .withQueryFn( FindQuery.create() .withFilters(Filters.eq("scientist", "Einstein")) .withLimit(5))); PAssert.thatSingleton(output.apply("Count", Count.globally())).isEqualTo(5L); pipeline.run(); }
public String[] getFields( String objectName ) throws KettleException { return getFields( getObjectFields( objectName ) ); }
@Test //PDI-16459 public void getFieldsTest() throws KettleException { String name = "name"; SalesforceConnection conn = new SalesforceConnection( null, "http://localhost:1234", "aUser", "aPass" ); Field[] fields = new Field[ 1 ]; Field field = new Field(); field.setRelationshipName( "Parent" ); field.setName( name ); fields[ 0 ] = field; String[] names = conn.getFields( fields ); Assert.assertEquals( name, names[ 0 ] ); }
public boolean submitProcessingErrors(Message message) { return submitProcessingErrorsInternal(message, message.processingErrors()); }
@Test public void submitProcessingErrors_processingErrorSubmittedToQueueAndMessageFilteredOut_ifSubmissionEnabledAndDuplicatesAreNotKept() throws Exception { // given final Message msg = Mockito.mock(Message.class); when(msg.getMessageId()).thenReturn("msg-x"); when(msg.supportsFailureHandling()).thenReturn(true); when(msg.processingErrors()).thenReturn(List.of( new Message.ProcessingError(() -> "Cause", "Message", "Details") )); when(failureHandlingConfiguration.submitProcessingFailures()).thenReturn(true); when(failureHandlingConfiguration.keepFailedMessageDuplicate()).thenReturn(false); // when final boolean notFilterOut = underTest.submitProcessingErrors(msg); // then assertThat(notFilterOut).isFalse(); verify(msg).setFilterOut(true); verify(failureSubmissionQueue, times(1)).submitBlocking(failureBatchCaptor.capture()); assertThat(failureBatchCaptor.getValue()).satisfies(fb -> { assertThat(fb.containsProcessingFailures()).isTrue(); assertThat(fb.size()).isEqualTo(1); assertThat(fb.getFailures().get(0)).satisfies(processingFailure -> { assertThat(processingFailure.failureType()).isEqualTo(FailureType.PROCESSING); assertThat(processingFailure.failureCause().label()).isEqualTo("Cause"); assertThat(processingFailure.message()).isEqualTo("Failed to process message with id 'msg-x': Message"); assertThat(processingFailure.failureDetails()).isEqualTo("Details"); assertThat(processingFailure.failureTimestamp()).isNotNull(); assertThat(processingFailure.failedMessage()).isEqualTo(msg); assertThat(processingFailure.targetIndex()).isNull(); assertThat(processingFailure.requiresAcknowledgement()).isTrue(); }); }); }
MethodSpec buildFunction(AbiDefinition functionDefinition) throws ClassNotFoundException { return buildFunction(functionDefinition, true); }
@Test public void testBuildingFunctionTransactionThatReturnsValueReportsWarning() throws Exception { AbiDefinition functionDefinition = new AbiDefinition( false, Arrays.asList(new NamedType("param", "uint8")), "functionName", Arrays.asList(new NamedType("result", "uint8")), "type", false); solidityFunctionWrapper.buildFunction(functionDefinition); verify(generationReporter) .report( "Definition of the function functionName returns a value but is not defined as a view function. " + "Please ensure it contains the view modifier if you want to read the return value"); }
public static Range<Comparable<?>> safeIntersection(final Range<Comparable<?>> range, final Range<Comparable<?>> connectedRange) { try { return range.intersection(connectedRange); } catch (final ClassCastException ex) { Class<?> clazz = getRangeTargetNumericType(range, connectedRange); if (null == clazz) { throw ex; } Range<Comparable<?>> newRange = createTargetNumericTypeRange(range, clazz); Range<Comparable<?>> newConnectedRange = createTargetNumericTypeRange(connectedRange, clazz); return newRange.intersection(newConnectedRange); } }
@Test void assertSafeIntersectionForLong() { Range<Comparable<?>> range = Range.upTo(3147483647L, BoundType.OPEN); Range<Comparable<?>> connectedRange = Range.downTo(3, BoundType.OPEN); Range<Comparable<?>> newRange = SafeNumberOperationUtils.safeIntersection(range, connectedRange); assertThat(newRange.lowerEndpoint(), is(3L)); assertThat(newRange.lowerBoundType(), is(BoundType.OPEN)); assertThat(newRange.upperEndpoint(), is(3147483647L)); assertThat(newRange.upperBoundType(), is(BoundType.OPEN)); }
public void addOtherTesseractConfig(String key, String value) { if (key == null) { throw new IllegalArgumentException("key must not be null"); } if (value == null) { throw new IllegalArgumentException("value must not be null"); } Matcher m = ALLOWABLE_OTHER_PARAMS_PATTERN.matcher(key); if (!m.find()) { throw new IllegalArgumentException("Key contains illegal characters: " + key); } m.reset(value); if (!m.find()) { throw new IllegalArgumentException("Value contains illegal characters: " + value); } otherTesseractConfig.put(key.trim(), value.trim()); userConfigured.add("otherTesseractConfig"); }
@Test public void testBadOtherValue() { TesseractOCRConfig config = new TesseractOCRConfig(); assertThrows(IllegalArgumentException.class, () -> { config.addOtherTesseractConfig("bad", "bad bad"); }); }
@Override public boolean removeAll(Collection<?> c) { throw new UnsupportedOperationException("LazySet is not modifiable"); }
@Test(expected = UnsupportedOperationException.class) public void testRemoveAll_throwsException() { set.removeAll(Collections.emptyList()); }
public static boolean isEcho(String parameterTypesDesc, String method) { return $ECHO.equals(method) && $ECHO_PARAMETER_DESC.equals(parameterTypesDesc); }
@Test void testIsEcho() { Assertions.assertTrue(RpcUtils.isEcho("Ljava/lang/Object;", "$echo")); Assertions.assertFalse(RpcUtils.isEcho("Ljava/lang/Object;", "testMethod")); Assertions.assertFalse(RpcUtils.isEcho("Ljava/lang/String;", "$echo")); }
@Override public Object decode(Response response, Type type) throws IOException { JsonAdapter<Object> jsonAdapter = moshi.adapter(type); if (response.status() == 404 || response.status() == 204) return Util.emptyValueOf(type); if (response.body() == null) return null; try (BufferedSource source = Okio.buffer(Okio.source(response.body().asInputStream()))) { if (source.exhausted()) { return null; // empty body } return jsonAdapter.fromJson(source); } catch (JsonDataException e) { if (e.getCause() != null && e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } throw e; } }
@Test void notFoundDecodesToEmpty() throws Exception { Response response = Response.builder() .status(404) .reason("NOT FOUND") .headers(Collections.emptyMap()) .request(Request.create(Request.HttpMethod.GET, "/api", Collections.emptyMap(), null, Util.UTF_8)) .build(); assertThat((byte[]) new MoshiDecoder().decode(response, byte[].class)).isEmpty(); }
@Override public long getLongLE(int index) { checkIndex(index, 8); return _getLongLE(index); }
@Test public void testGetLongLEAfterRelease() { assertThrows(IllegalReferenceCountException.class, new Executable() { @Override public void execute() { releasedBuffer().getLongLE(0); } }); }
@Override public synchronized void onCompleted() { doneFuture.set(null); }
@Test public void onCompleted() { completeObserver.onCompleted(); ListenableFuture<Void> future = completeObserver.getCompletionFuture(); assertNotNull(future); }
public static Field p(String fieldName) { return SELECT_ALL_FROM_SOURCES_ALL.where(fieldName); }
@Test void contains_sameElement() { String q = Q.p("f1").containsSameElement(Q.p("stime").le(1).and("etime").gt(2)) .build(); assertEquals(q, "yql=select * from sources * where f1 contains sameElement(stime <= 1, etime > 2)"); }
public static Method getApplyMethod(ScalarFn scalarFn) { Class<? extends ScalarFn> clazz = scalarFn.getClass(); Collection<Method> matches = ReflectHelpers.declaredMethodsWithAnnotation( ScalarFn.ApplyMethod.class, clazz, ScalarFn.class); if (matches.isEmpty()) { throw new IllegalArgumentException( String.format( "No method annotated with @%s found in class %s.", ScalarFn.ApplyMethod.class.getSimpleName(), clazz.getName())); } // If we have at least one match, then either it should be the only match // or it should be an extension of the other matches (which came from parent // classes). Method first = matches.iterator().next(); for (Method other : matches) { if (!first.getName().equals(other.getName()) || !Arrays.equals(first.getParameterTypes(), other.getParameterTypes())) { throw new IllegalArgumentException( String.format( "Found multiple methods annotated with @%s. [%s] and [%s]", ScalarFn.ApplyMethod.class.getSimpleName(), ReflectHelpers.formatMethod(first), ReflectHelpers.formatMethod(other))); } } // Method must be public. if ((first.getModifiers() & Modifier.PUBLIC) == 0) { throw new IllegalArgumentException( String.format("Method %s is not public.", ReflectHelpers.formatMethod(first))); } return first; }
@Test public void testDifferentMethodNameThrowsIllegalArgumentException() { thrown.expect(instanceOf(IllegalArgumentException.class)); thrown.expectMessage("Found multiple methods annotated with @ApplyMethod."); ScalarFnReflector.getApplyMethod(new IncrementFnDifferentMethodName()); }
@Override public String getName() { return FUNCTION_NAME; }
@Test public void testAdditionNullColumn() { ExpressionContext expression = RequestContextUtils.getExpression(String.format("add(%s,%s)", INT_SV_COLUMN, INT_SV_NULL_COLUMN)); TransformFunction transformFunction = TransformFunctionFactory.get(expression, _dataSourceMap); Assert.assertTrue(transformFunction instanceof AdditionTransformFunction); Assert.assertEquals(transformFunction.getName(), AdditionTransformFunction.FUNCTION_NAME); double[] expectedValues = new double[NUM_ROWS]; RoaringBitmap roaringBitmap = new RoaringBitmap(); for (int i = 0; i < NUM_ROWS; i++) { if (isNullRow(i)) { expectedValues[i] = (double) Integer.MIN_VALUE + (double) _intSVValues[i]; roaringBitmap.add(i); } else { expectedValues[i] = (double) _intSVValues[i] * 2; } } testTransformFunctionWithNull(transformFunction, expectedValues, roaringBitmap); }
public static void validateFields(Object data) { validateFields(data, "Validation error: "); }
@Test void validatePerMinute() { StringDataEntry stringDataEntryValid = new StringDataEntry("key", "value"); long start = System.currentTimeMillis(); for (int i = 0; i < _1M; i++) { ConstraintValidator.validateFields(stringDataEntryValid); } long end = System.currentTimeMillis(); Assertions.assertTrue(MIN_IN_MS > end - start); }
public static Expression generateFilterExpression(SearchArgument sarg) { return translate(sarg.getExpression(), sarg.getLeaves()); }
@Test public void testDateType() { SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); Date gmtDate = Date.valueOf(LocalDate.of(2015, 11, 12)); SearchArgument arg = builder.startAnd().equals("date", PredicateLeaf.Type.DATE, gmtDate).end().build(); UnboundPredicate expected = Expressions.equal("date", Literal.of("2015-11-12").to(Types.DateType.get()).value()); UnboundPredicate actual = (UnboundPredicate) HiveIcebergFilterFactory.generateFilterExpression(arg); assertPredicatesMatch(expected, actual); }
public static <T> Iterator<T> iterator(Class<T> expectedType, String factoryId, ClassLoader classLoader) throws Exception { Iterator<Class<T>> classIterator = classIterator(expectedType, factoryId, classLoader); return new NewInstanceIterator<>(classIterator); }
@Test public void loadServicesWithSpaceInURL() throws Exception { Class<ServiceLoaderSpecialCharsTestInterface> type = ServiceLoaderSpecialCharsTestInterface.class; String factoryId = "com.hazelcast.ServiceLoaderSpecialCharsTestInterface"; URL url = ClassLoader.getSystemResource("test with special chars^/"); ClassLoader given = new URLClassLoader(new URL[]{url}); Set<ServiceLoaderSpecialCharsTestInterface> implementations = new HashSet<>(); Iterator<ServiceLoaderSpecialCharsTestInterface> iterator = ServiceLoader.iterator(type, factoryId, given); while (iterator.hasNext()) { implementations.add(iterator.next()); } assertEquals(1, implementations.size()); }
@Override public Long sendSingleSmsToMember(String mobile, Long userId, String templateCode, Map<String, Object> templateParams) { // 如果 mobile 为空,则加载用户编号对应的手机号 if (StrUtil.isEmpty(mobile)) { mobile = memberService.getMemberUserMobile(userId); } // 执行发送 return sendSingleSms(mobile, userId, UserTypeEnum.MEMBER.getValue(), templateCode, templateParams); }
@Test public void testSendSingleSmsToUser() { // 准备参数 Long userId = randomLongId(); String templateCode = randomString(); Map<String, Object> templateParams = MapUtil.<String, Object>builder().put("code", "1234") .put("op", "login").build(); // mock memberService 的方法 String mobile = "15601691300"; when(memberService.getMemberUserMobile(eq(userId))).thenReturn(mobile); // mock SmsTemplateService 的方法 SmsTemplateDO template = randomPojo(SmsTemplateDO.class, o -> { o.setStatus(CommonStatusEnum.ENABLE.getStatus()); o.setContent("验证码为{code}, 操作为{op}"); o.setParams(Lists.newArrayList("code", "op")); }); when(smsTemplateService.getSmsTemplateByCodeFromCache(eq(templateCode))).thenReturn(template); String content = randomString(); when(smsTemplateService.formatSmsTemplateContent(eq(template.getContent()), eq(templateParams))) .thenReturn(content); // mock SmsChannelService 的方法 SmsChannelDO smsChannel = randomPojo(SmsChannelDO.class, o -> o.setStatus(CommonStatusEnum.ENABLE.getStatus())); when(smsChannelService.getSmsChannel(eq(template.getChannelId()))).thenReturn(smsChannel); // mock SmsLogService 的方法 Long smsLogId = randomLongId(); when(smsLogService.createSmsLog(eq(mobile), eq(userId), eq(UserTypeEnum.MEMBER.getValue()), eq(Boolean.TRUE), eq(template), eq(content), eq(templateParams))).thenReturn(smsLogId); // 调用 Long resultSmsLogId = smsSendService.sendSingleSmsToMember(null, userId, templateCode, templateParams); // 断言 assertEquals(smsLogId, resultSmsLogId); // 断言调用 verify(smsProducer).sendSmsSendMessage(eq(smsLogId), eq(mobile), eq(template.getChannelId()), eq(template.getApiTemplateId()), eq(Lists.newArrayList(new KeyValue<>("code", "1234"), new KeyValue<>("op", "login")))); }
public static File[] getPathFiles(String path) throws FileNotFoundException { URL url = ResourceUtils.class.getClassLoader().getResource(path); if (url == null) { throw new FileNotFoundException("path not found: " + path); } return Arrays.stream(Objects.requireNonNull(new File(url.getPath()).listFiles(), "No files in " + path)) .filter(File::isFile).toArray(File[]::new); }
@Test public void testGetPathFilesSuccess() throws FileNotFoundException { final File[] files = ResourceUtils.getPathFiles("testdata"); assertNotNull(files); assertEquals(1, files.length); }
public static EnvVar createEnvVarFromFieldRef(String name, String field) { return new EnvVarBuilder() .withName(name) .withValueFrom(new EnvVarSourceBuilder() .withNewFieldRef() .withFieldPath(field) .endFieldRef() .build()) .build(); }
@Test public void testCreateEnvVarFromFieldRef() { EnvVar var = ContainerUtils.createEnvVarFromFieldRef("VAR_1", "spec.nodeName"); assertThat(var.getName(), is("VAR_1")); assertThat(var.getValueFrom().getFieldRef().getFieldPath(), is("spec.nodeName")); }
@Override protected Mono<Void> doFilter(final ServerWebExchange exchange) { ServerHttpResponse response = exchange.getResponse(); response.setStatusCode(HttpStatus.OK); return Mono.empty(); }
@Test public void testDoFilter() { ServerWebExchange webExchange = MockServerWebExchange.from(MockServerHttpRequest .post("http://localhost:8080/")); Mono<Void> filter = excludeFilter.doFilter(webExchange); StepVerifier.create(filter).verifyComplete(); }
@Override public InterpreterResult interpret(String st, InterpreterContext context) { return helper.interpret(session, st, context); }
@Test void should_extract_variable_from_statement() { // Given AngularObjectRegistry angularObjectRegistry = new AngularObjectRegistry("cassandra", null); GUI gui = new GUI(); gui.textbox("login", "hsue"); gui.textbox("age", "27"); InterpreterContext intrContext = InterpreterContext.builder() .setParagraphTitle("Paragraph1") .setAngularObjectRegistry(angularObjectRegistry) .setGUI(gui) .build(); String queries = "@prepare[test_insert_with_variable]=" + "INSERT INTO zeppelin.users(login,firstname,lastname,age) VALUES(?,?,?,?)\n" + "@bind[test_insert_with_variable]='{{login=hsue}}','Helen','SUE',{{age=27}}\n" + "SELECT firstname,lastname,age FROM zeppelin.users WHERE login='hsue';"; // When final InterpreterResult actual = interpreter.interpret(queries, intrContext); // Then assertEquals(Code.SUCCESS, actual.code()); assertEquals("firstname\tlastname\tage\nHelen\tSUE\t27\n", actual.message().get(0).getData()); }
@VisibleForTesting static SortedMap<OffsetRange, Integer> computeOverlappingRanges(Iterable<OffsetRange> ranges) { ImmutableSortedMap.Builder<OffsetRange, Integer> rval = ImmutableSortedMap.orderedBy(OffsetRangeComparator.INSTANCE); List<OffsetRange> sortedRanges = Lists.newArrayList(ranges); if (sortedRanges.isEmpty()) { return rval.build(); } Collections.sort(sortedRanges, OffsetRangeComparator.INSTANCE); // Stores ranges in smallest 'from' and then smallest 'to' order // e.g. [2, 7), [3, 4), [3, 5), [3, 5), [3, 6), [4, 0) PriorityQueue<OffsetRange> rangesWithSameFrom = new PriorityQueue<>(OffsetRangeComparator.INSTANCE); Iterator<OffsetRange> iterator = sortedRanges.iterator(); // Stored in reverse sorted order so that when we iterate and re-add them back to // overlappingRanges they are stored in sorted order from smallest to largest range.to List<OffsetRange> rangesToProcess = new ArrayList<>(); while (iterator.hasNext()) { OffsetRange current = iterator.next(); // Skip empty ranges if (current.getFrom() == current.getTo()) { continue; } // If the current range has a different 'from' then a prior range then we must produce // ranges in [rangesWithSameFrom.from, current.from) while (!rangesWithSameFrom.isEmpty() && rangesWithSameFrom.peek().getFrom() != current.getFrom()) { rangesToProcess.addAll(rangesWithSameFrom); Collections.sort(rangesToProcess, OffsetRangeComparator.INSTANCE); rangesWithSameFrom.clear(); int i = 0; long lastTo = rangesToProcess.get(i).getFrom(); // Output all the ranges that are strictly less then current.from // e.g. current.to := 7 for [3, 4), [3, 5), [3, 5), [3, 6) will produce // [3, 4) := 4 // [4, 5) := 3 // [5, 6) := 1 for (; i < rangesToProcess.size(); ++i) { if (rangesToProcess.get(i).getTo() > current.getFrom()) { break; } // Output only the first of any subsequent duplicate ranges if (i == 0 || rangesToProcess.get(i - 1).getTo() != rangesToProcess.get(i).getTo()) { rval.put( new OffsetRange(lastTo, rangesToProcess.get(i).getTo()), rangesToProcess.size() - i); lastTo = rangesToProcess.get(i).getTo(); } } // We exitted the loop with 'to' > current.from, we must add the range [lastTo, // current.from) if it is non-empty if (lastTo < current.getFrom() && i != rangesToProcess.size()) { rval.put(new OffsetRange(lastTo, current.getFrom()), rangesToProcess.size() - i); } // The remaining ranges have a 'to' that is greater then 'current.from' and will overlap // with current so add them back to rangesWithSameFrom with the updated 'from' for (; i < rangesToProcess.size(); ++i) { rangesWithSameFrom.add( new OffsetRange(current.getFrom(), rangesToProcess.get(i).getTo())); } rangesToProcess.clear(); } rangesWithSameFrom.add(current); } // Process the last chunk of overlapping ranges while (!rangesWithSameFrom.isEmpty()) { // This range always represents the range with with the smallest 'to' OffsetRange current = rangesWithSameFrom.remove(); rangesToProcess.addAll(rangesWithSameFrom); Collections.sort(rangesToProcess, OffsetRangeComparator.INSTANCE); rangesWithSameFrom.clear(); rval.put(current, rangesToProcess.size() + 1 /* include current */); // Shorten all the remaining ranges such that they start with current.to for (OffsetRange rangeWithDifferentFrom : rangesToProcess) { // Skip any duplicates of current if (rangeWithDifferentFrom.getTo() > current.getTo()) { rangesWithSameFrom.add(new OffsetRange(current.getTo(), rangeWithDifferentFrom.getTo())); } } rangesToProcess.clear(); } return rval.build(); }
@Test public void testIncreasingOverlaps() { Iterable<OffsetRange> ranges = Arrays.asList(range(0, 4), range(1, 5), range(2, 6), range(3, 7), range(4, 8), range(5, 9)); Map<OffsetRange, Integer> nonOverlappingRangesToNumElementsPerPosition = computeOverlappingRanges(ranges); assertEquals( ImmutableMap.builder() .put(range(0, 1), 1) .put(range(1, 2), 2) .put(range(2, 3), 3) .put(range(3, 4), 4) .put(range(4, 5), 4) .put(range(5, 6), 4) .put(range(6, 7), 3) .put(range(7, 8), 2) .put(range(8, 9), 1) .build(), nonOverlappingRangesToNumElementsPerPosition); assertNonEmptyRangesAndPositions(ranges, nonOverlappingRangesToNumElementsPerPosition); }
public LinkedHashSet<Class<?>> getRegisteredPojoTypes() { return registeredPojoTypes; }
@Test void testLoadingPojoTypesFromSerializationConfig() { String serializationConfigStr = "[org.apache.flink.api.common.serialization.SerializerConfigImplTest:" + " {type: pojo}," + " org.apache.flink.api.common.serialization.SerializerConfigImplTest$TestSerializer1:" + " {type: pojo}," + " org.apache.flink.api.common.serialization.SerializerConfigImplTest$TestSerializer2:" + " {type: pojo}]"; SerializerConfig serializerConfig = getConfiguredSerializerConfig(serializationConfigStr); assertThat(serializerConfig.getRegisteredPojoTypes()) .containsExactly( SerializerConfigImplTest.class, TestSerializer1.class, TestSerializer2.class); }
@Override public CompletableFuture<RegistrationResponse> registerTaskManager( final JobID jobId, final TaskManagerRegistrationInformation taskManagerRegistrationInformation, final Time timeout) { if (!jobGraph.getJobID().equals(jobId)) { log.debug( "Rejecting TaskManager registration attempt because of wrong job id {}.", jobId); return CompletableFuture.completedFuture( new JMTMRegistrationRejection( String.format( "The JobManager is not responsible for job %s. Maybe the TaskManager used outdated connection information.", jobId))); } final TaskManagerLocation taskManagerLocation; try { taskManagerLocation = resolveTaskManagerLocation( taskManagerRegistrationInformation.getUnresolvedTaskManagerLocation()); } catch (FlinkException exception) { log.error("Could not accept TaskManager registration.", exception); return CompletableFuture.completedFuture(new RegistrationResponse.Failure(exception)); } final ResourceID taskManagerId = taskManagerLocation.getResourceID(); final UUID sessionId = taskManagerRegistrationInformation.getTaskManagerSession(); final TaskManagerRegistration taskManagerRegistration = registeredTaskManagers.get(taskManagerId); if (taskManagerRegistration != null) { if (taskManagerRegistration.getSessionId().equals(sessionId)) { log.debug( "Ignoring registration attempt of TaskManager {} with the same session id {}.", taskManagerId, sessionId); final RegistrationResponse response = new JMTMRegistrationSuccess(resourceId); return CompletableFuture.completedFuture(response); } else { disconnectTaskManager( taskManagerId, new FlinkException( String.format( "A registered TaskManager %s re-registered with a new session id. This indicates a restart of the TaskManager. Closing the old connection.", taskManagerId))); } } CompletableFuture<RegistrationResponse> registrationResponseFuture = getRpcService() .connect( taskManagerRegistrationInformation.getTaskManagerRpcAddress(), TaskExecutorGateway.class) .handleAsync( (TaskExecutorGateway taskExecutorGateway, Throwable throwable) -> { if (throwable != null) { return new RegistrationResponse.Failure(throwable); } slotPoolService.registerTaskManager(taskManagerId); registeredTaskManagers.put( taskManagerId, TaskManagerRegistration.create( taskManagerLocation, taskExecutorGateway, sessionId)); // monitor the task manager as heartbeat target taskManagerHeartbeatManager.monitorTarget( taskManagerId, new TaskExecutorHeartbeatSender(taskExecutorGateway)); return new JMTMRegistrationSuccess(resourceId); }, getMainThreadExecutor()); if (fetchAndRetainPartitions) { registrationResponseFuture.whenComplete( (ignored, throwable) -> fetchAndRetainPartitionWithMetricsOnTaskManager(taskManagerId)); } return registrationResponseFuture; }
@Test void testTaskManagerRegistrationTriggersHeartbeating() throws Exception { final CompletableFuture<ResourceID> heartbeatResourceIdFuture = new CompletableFuture<>(); final UnresolvedTaskManagerLocation unresolvedTaskManagerLocation = new LocalUnresolvedTaskManagerLocation(); final TestingTaskExecutorGateway taskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setHeartbeatJobManagerFunction( (taskManagerId, ignored) -> { heartbeatResourceIdFuture.complete(taskManagerId); return FutureUtils.completedVoidFuture(); }) .createTestingTaskExecutorGateway(); rpcService.registerGateway(taskExecutorGateway.getAddress(), taskExecutorGateway); try (final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withResourceId(jmResourceId) .withConfiguration(configuration) .withHighAvailabilityServices(haServices) .withHeartbeatServices(new HeartbeatServicesImpl(1L, 10000L)) .createJobMaster()) { jobMaster.start(); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); // register task manager will trigger monitor heartbeat target, schedule heartbeat // request at interval time CompletableFuture<RegistrationResponse> registrationResponse = jobMasterGateway.registerTaskManager( jobGraph.getJobID(), TaskManagerRegistrationInformation.create( taskExecutorGateway.getAddress(), unresolvedTaskManagerLocation, TestingUtils.zeroUUID()), testingTimeout); // wait for the completion of the registration registrationResponse.get(); assertThat(heartbeatResourceIdFuture.join()) .satisfiesAnyOf( resourceID -> assertThat(resourceID).isNull(), resourceID -> assertThat(resourceID).isEqualTo(jmResourceId)); } }
@Override public synchronized boolean checkValid(String clientId, String username, byte[] password) { // Check Username / Password in DB using sqlQuery if (username == null || password == null) { LOG.info("username or password was null"); return false; } ResultSet resultSet = null; PreparedStatement preparedStatement = null; Connection conn = null; try { conn = this.dataSource.getConnection(); preparedStatement = conn.prepareStatement(this.sqlQuery); preparedStatement.setString(1, username); resultSet = preparedStatement.executeQuery(); if (resultSet.next()) { final String foundPwq = resultSet.getString(1); messageDigest.update(password); byte[] digest = messageDigest.digest(); String encodedPasswd = new String(Hex.encodeHex(digest)); return foundPwq.equals(encodedPasswd); } } catch (SQLException sqlex) { LOG.error("Error quering DB for username: {}", username, sqlex); } finally { try { if (resultSet != null) { resultSet.close(); } if (preparedStatement != null) { preparedStatement.close(); } if (conn != null) { conn.close(); } } catch (SQLException e) { LOG.error("Error releasing connection to the datasource", username, e); } } return false; }
@Test public void Db_verifyValid() { final DBAuthenticator dbAuthenticator = new DBAuthenticator( ORG_H2_DRIVER, JDBC_H2_MEM_TEST, "SELECT PASSWORD FROM ACCOUNT WHERE LOGIN=?", SHA_256); assertTrue(dbAuthenticator.checkValid(null, "dbuser", "password".getBytes(UTF_8))); }
@Override public Response toResponse(Throwable t) { int status = DEFAULT_STATUS; if (t instanceof WebApplicationException) { status = ((WebApplicationException) t).getResponse().getStatus(); } SimpleHttpErrorInfo errorInfo = new SimpleHttpErrorInfo(status, t.getMessage()); return Response.status(status).entity(errorInfo).type(MediaType.APPLICATION_JSON).build(); }
@Test public void testToResponseWithWebApplicationException() { // Arrange int status = 404; when(_webApplicationException.getResponse()).thenReturn(Response.status(status).build()); when(_webApplicationException.getMessage()).thenReturn("Not Found"); // Act Response response = _exceptionMapper.toResponse(_webApplicationException); // Assert assertEquals(response.getStatus(), status); SimpleHttpErrorInfo errorInfo = (SimpleHttpErrorInfo) response.getEntity(); assertEquals(errorInfo.getCode(), status); assertEquals(errorInfo.getError(), "Not Found"); }
public static String getNameSrvAddrFromNamesrvEndpoint(String nameSrvEndpoint) { if (StringUtils.isEmpty(nameSrvEndpoint)) { return null; } return nameSrvEndpoint.substring(nameSrvEndpoint.lastIndexOf('/') + 1); }
@Test public void testGetNameSrvAddrFromNamesrvEndpoint() { assertThat(NameServerAddressUtils.getNameSrvAddrFromNamesrvEndpoint(endpoint1)) .isEqualTo("127.0.0.1:9876"); assertThat(NameServerAddressUtils.getNameSrvAddrFromNamesrvEndpoint(endpoint2)) .isEqualTo("127.0.0.1:9876"); assertThat(NameServerAddressUtils.getNameSrvAddrFromNamesrvEndpoint(endpoint3)) .isEqualTo("MQ_INST_123456789_BXXUzaee.xxx:80"); assertThat(NameServerAddressUtils.getNameSrvAddrFromNamesrvEndpoint(endpoint4)) .isEqualTo("MQ_INST_123456789_BXXUzaee.xxx:80"); }
@Nonnull static String extractJavaScriptPlayerCode(@Nonnull final String videoId) throws ParsingException { String url; try { url = YoutubeJavaScriptExtractor.extractJavaScriptUrlWithIframeResource(); final String playerJsUrl = YoutubeJavaScriptExtractor.cleanJavaScriptUrl(url); // Assert that the URL we extracted and built is valid new URL(playerJsUrl); return YoutubeJavaScriptExtractor.downloadJavaScriptCode(playerJsUrl); } catch (final Exception e) { url = YoutubeJavaScriptExtractor.extractJavaScriptUrlWithEmbedWatchPage(videoId); final String playerJsUrl = YoutubeJavaScriptExtractor.cleanJavaScriptUrl(url); try { // Assert that the URL we extracted and built is valid new URL(playerJsUrl); } catch (final MalformedURLException exception) { throw new ParsingException( "The extracted and built JavaScript URL is invalid", exception); } return YoutubeJavaScriptExtractor.downloadJavaScriptCode(playerJsUrl); } }
@Test public void testExtractJavaScript__success() throws ParsingException { String playerJsCode = YoutubeJavaScriptExtractor.extractJavaScriptPlayerCode("d4IGg5dqeO8"); assertPlayerJsCode(playerJsCode); }
public <T> Converter<T> getConverter(Type type, boolean isCustomFirst) { Converter<T> converter; if (isCustomFirst) { converter = this.getCustomConverter(type); if (null == converter) { converter = this.getDefaultConverter(type); } } else { converter = this.getDefaultConverter(type); if (null == converter) { converter = this.getCustomConverter(type); } } return converter; }
@Test public void getConverterTest() { Converter<Object> converter = ConverterRegistry.getInstance().getConverter(CharSequence.class, false); assertNotNull(converter); }
public Optional<Object> evaluate(final Map<String, Object> columnPairsMap, final String outputColumn, final String regexField) { return rows.stream() .map(row -> row.evaluate(columnPairsMap, outputColumn, regexField)) .filter(Optional::isPresent) .findFirst() .map(Optional::get); }
@Test void evaluateKeyFoundMatching() { KiePMMLInlineTable kiePMMLInlineTable = new KiePMMLInlineTable("name", Collections.emptyList(), ROWS); Optional<Object> retrieved = kiePMMLInlineTable.evaluate(Collections.singletonMap("KEY-1-1", "VALUE-1-1"), "KEY-1-2", null); assertThat(retrieved).isPresent(); assertThat(retrieved.get()).isEqualTo("VALUE-1-2"); }
public Statistics getTableStatistics(IcebergTable icebergTable, Map<ColumnRefOperator, Column> colRefToColumnMetaMap, OptimizerContext session, ScalarOperator predicate, TableVersionRange version) { Table nativeTable = icebergTable.getNativeTable(); Statistics.Builder statisticsBuilder = Statistics.builder(); String uuid = icebergTable.getUUID(); if (version.end().isPresent()) { Set<Integer> primitiveColumnsFieldIds = nativeTable.schema().columns().stream() .filter(column -> column.type().isPrimitiveType()) .map(Types.NestedField::fieldId).collect(Collectors.toSet()); Map<Integer, Long> colIdToNdvs = new HashMap<>(); if (session != null && session.getSessionVariable().enableReadIcebergPuffinNdv()) { colIdToNdvs = readNumDistinctValues(icebergTable, primitiveColumnsFieldIds, version); if (uuidToPartitionFieldIdToValues.containsKey(uuid) && !uuidToPartitionFieldIdToValues.get(uuid).isEmpty()) { HashMultimap<Integer, Object> partitionFieldIdToValue = uuidToPartitionFieldIdToValues.get(uuid); Map<Integer, Long> partitionSourceIdToNdv = new HashMap<>(); for (PartitionField partitionField : nativeTable.spec().fields()) { int sourceId = partitionField.sourceId(); int fieldId = partitionField.fieldId(); if (partitionFieldIdToValue.containsKey(fieldId)) { partitionSourceIdToNdv.put(sourceId, (long) partitionFieldIdToValue.get(fieldId).size()); } } colIdToNdvs.putAll(partitionSourceIdToNdv); } } PredicateSearchKey key = PredicateSearchKey.of(icebergTable.getRemoteDbName(), icebergTable.getRemoteTableName(), version.end().get(), predicate); IcebergFileStats icebergFileStats; if (!icebergFileStatistics.containsKey(key)) { icebergFileStats = new IcebergFileStats(1); } else { icebergFileStats = icebergFileStatistics.get(key); } statisticsBuilder.setOutputRowCount(icebergFileStats.getRecordCount()); statisticsBuilder.addColumnStatistics(buildColumnStatistics( nativeTable, colRefToColumnMetaMap, icebergFileStats, colIdToNdvs)); } else { // empty table statisticsBuilder.setOutputRowCount(1); statisticsBuilder.addColumnStatistics(buildUnknownColumnStatistics(colRefToColumnMetaMap.keySet())); } return statisticsBuilder.build(); }
@Test public void testGetEmptyTableStatistics() { IcebergStatisticProvider statisticProvider = new IcebergStatisticProvider(); IcebergTable icebergTable = new IcebergTable(1, "srTableName", "iceberg_catalog", "resource_name", "db_name", "table_name", "", Lists.newArrayList(), mockedNativeTableA, Maps.newHashMap()); Map<ColumnRefOperator, Column> colRefToColumnMetaMap = new HashMap<ColumnRefOperator, Column>(); ColumnRefOperator columnRefOperator1 = new ColumnRefOperator(3, Type.INT, "id", true); ColumnRefOperator columnRefOperator2 = new ColumnRefOperator(4, Type.STRING, "data", true); colRefToColumnMetaMap.put(columnRefOperator1, new Column("id", Type.INT)); colRefToColumnMetaMap.put(columnRefOperator2, new Column("data", Type.STRING)); Statistics statistics = statisticProvider.getTableStatistics(icebergTable, colRefToColumnMetaMap, null, null, TableVersionRange.empty()); Assert.assertEquals(1.0, statistics.getOutputRowCount(), 0.001); }
@Override public UpdateApplicationPriorityResponse updateApplicationPriority( UpdateApplicationPriorityRequest request) throws YarnException, IOException { if (request == null || request.getApplicationId() == null || request.getApplicationPriority() == null) { routerMetrics.incrUpdateAppPriorityFailedRetrieved(); String msg = "Missing updateApplicationPriority request or applicationId " + "or applicationPriority information."; RouterAuditLogger.logFailure(user.getShortUserName(), UPDATE_APPLICATIONPRIORITY, UNKNOWN, TARGET_CLIENT_RM_SERVICE, msg); RouterServerUtil.logAndThrowException(msg, null); } long startTime = clock.getTime(); SubClusterId subClusterId = null; ApplicationId applicationId = request.getApplicationId(); try { subClusterId = getApplicationHomeSubCluster(applicationId); } catch (YarnException e) { routerMetrics.incrUpdateAppPriorityFailedRetrieved(); String msg = "Application " + applicationId + " does not exist in FederationStateStore."; RouterAuditLogger.logFailure(user.getShortUserName(), UPDATE_APPLICATIONPRIORITY, UNKNOWN, TARGET_CLIENT_RM_SERVICE, msg); RouterServerUtil.logAndThrowException(msg, e); } ApplicationClientProtocol clientRMProxy = getClientRMProxyForSubCluster(subClusterId); UpdateApplicationPriorityResponse response = null; try { response = clientRMProxy.updateApplicationPriority(request); } catch (Exception e) { routerMetrics.incrFailAppAttemptFailedRetrieved(); String msg = "Unable to update application priority for " + applicationId + " to SubCluster " + subClusterId; RouterAuditLogger.logFailure(user.getShortUserName(), UPDATE_APPLICATIONPRIORITY, UNKNOWN, TARGET_CLIENT_RM_SERVICE, msg); RouterServerUtil.logAndThrowException(msg, e); } if (response == null) { LOG.error("No response when update application priority of " + "the applicationId {} to SubCluster {}.", applicationId, subClusterId.getId()); } long stopTime = clock.getTime(); routerMetrics.succeededUpdateAppPriorityRetrieved(stopTime - startTime); RouterAuditLogger.logSuccess(user.getShortUserName(), UPDATE_APPLICATIONPRIORITY, TARGET_CLIENT_RM_SERVICE, applicationId, subClusterId); return response; }
@Test public void testUpdateApplicationPriority() throws Exception { LOG.info("Test FederationClientInterceptor : Update Application Priority request."); // null request LambdaTestUtils.intercept(YarnException.class, "Missing updateApplicationPriority request " + "or applicationId or applicationPriority information.", () -> interceptor.updateApplicationPriority(null)); // normal request ApplicationId appId = ApplicationId.newInstance(System.currentTimeMillis(), 1); SubmitApplicationRequest request = mockSubmitApplicationRequest(appId); // Submit the application SubmitApplicationResponse response = interceptor.submitApplication(request); Assert.assertNotNull(response); Assert.assertNotNull(stateStoreUtil.queryApplicationHomeSC(appId)); SubClusterId subClusterId = interceptor.getApplicationHomeSubCluster(appId); Assert.assertNotNull(subClusterId); MockRM mockRM = interceptor.getMockRMs().get(subClusterId); mockRM.waitForState(appId, RMAppState.ACCEPTED); RMApp rmApp = mockRM.getRMContext().getRMApps().get(appId); mockRM.waitForState(rmApp.getCurrentAppAttempt().getAppAttemptId(), RMAppAttemptState.SCHEDULED); // Call GetApplicationAttempts GetApplicationAttemptsRequest attemptsRequest = GetApplicationAttemptsRequest.newInstance(appId); GetApplicationAttemptsResponse attemptsResponse = interceptor.getApplicationAttempts(attemptsRequest); Assert.assertNotNull(attemptsResponse); Priority priority = Priority.newInstance(20); UpdateApplicationPriorityRequest requestUpdateAppPriority = UpdateApplicationPriorityRequest.newInstance(appId, priority); UpdateApplicationPriorityResponse responseAppPriority = interceptor.updateApplicationPriority(requestUpdateAppPriority); Assert.assertNotNull(responseAppPriority); Assert.assertEquals(20, responseAppPriority.getApplicationPriority().getPriority()); }
public static int parseInt(String num, int defaultInt) { if (num == null) { return defaultInt; } else { try { return Integer.parseInt(num); } catch (Exception e) { return defaultInt; } } }
@Test public void testParseInt() { Assert.assertEquals(CommonUtils.parseInt("", 123), 123); Assert.assertEquals(CommonUtils.parseInt("xxx", 123), 123); Assert.assertEquals(CommonUtils.parseInt(null, 123), 123); Assert.assertEquals(CommonUtils.parseInt("12345", 123), 12345); }
public Expression rewrite(final Expression expression) { return new ExpressionTreeRewriter<>(new OperatorPlugin()::process) .rewrite(expression, null); }
@Test public void shouldReplaceComparisonOfWindowEndAndString() { // Given: final Expression predicate = getPredicate( "SELECT * FROM orders where WINDOWEND > '2017-01-01T00:00:00.000';"); // When: final Expression rewritten = rewriter.rewrite(predicate); // Then: assertThat(rewritten.toString(), is(String.format("(WINDOWEND > %d)", A_TIMESTAMP))); }
@Override public ProtobufSystemInfo.Section toProtobuf() { ProtobufSystemInfo.Section.Builder protobuf = ProtobufSystemInfo.Section.newBuilder(); protobuf.setName("Search State"); ClusterStatsResponse stats = esClient.clusterStats(); setAttribute(protobuf, "State", stats.getHealthStatus().name()); setAttribute(protobuf, "Nodes", stats.getNodeCount()); return protobuf.build(); }
@Test public void test_attributes() { ProtobufSystemInfo.Section section = underTest.toProtobuf(); assertThat(attribute(section, "Nodes").getLongValue()).isPositive(); assertThat(attribute(section, "State").getStringValue()).isIn("RED", "YELLOW", "GREEN"); }
@Override public long read() { return gaugeSource.read(); }
@Test public void whenProbeThrowsException() { metricsRegistry.registerStaticProbe(this, "foo", MANDATORY, (LongProbeFunction) o -> { throw new RuntimeException(); }); LongGauge gauge = metricsRegistry.newLongGauge("foo"); long actual = gauge.read(); assertEquals(0, actual); }
public void resetPositionsIfNeeded() { Map<TopicPartition, Long> offsetResetTimestamps = offsetFetcherUtils.getOffsetResetTimestamp(); if (offsetResetTimestamps.isEmpty()) return; resetPositionsAsync(offsetResetTimestamps); }
@Test public void testResetOffsetsAuthorizationFailure() { buildFetcher(); assignFromUser(singleton(tp0)); subscriptions.requestOffsetReset(tp0, OffsetResetStrategy.LATEST); // First request gets a disconnect client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP, validLeaderEpoch), listOffsetResponse(Errors.TOPIC_AUTHORIZATION_FAILED, -1, -1), false); offsetFetcher.resetPositionsIfNeeded(); consumerClient.pollNoWakeup(); assertFalse(subscriptions.hasValidPosition(tp0)); try { offsetFetcher.resetPositionsIfNeeded(); fail("Expected authorization error to be raised"); } catch (TopicAuthorizationException e) { assertEquals(singleton(tp0.topic()), e.unauthorizedTopics()); } // The exception should clear after being raised, but no retry until the backoff offsetFetcher.resetPositionsIfNeeded(); consumerClient.pollNoWakeup(); assertFalse(client.hasInFlightRequests()); assertFalse(subscriptions.hasValidPosition(tp0)); // Next one succeeds time.sleep(retryBackoffMs); client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP), listOffsetResponse(Errors.NONE, 1L, 5L)); offsetFetcher.resetPositionsIfNeeded(); consumerClient.pollNoWakeup(); assertFalse(subscriptions.isOffsetResetNeeded(tp0)); assertTrue(subscriptions.isFetchable(tp0)); assertEquals(5, subscriptions.position(tp0).offset); }
@Override public boolean deleteMaintenanceAssociation(MdId mdName, MaIdShort maName) throws CfmConfigException { log.info("Deleting MA {} of MD {} from distributed store", maName, mdName); MaintenanceDomain.MdBuilder builder = DefaultMaintenanceDomain .builder(store.getMaintenanceDomain(mdName) .orElseThrow(() -> new IllegalArgumentException("Unknown MD: " + mdName))); //Check the MA is present if (!builder.checkMaExists(maName)) { return false; } builder = builder.deleteFromMaList(maName); store.createUpdateMaintenanceDomain(builder.build()); return true; }
@Test public void testDeleteMaintenanceAssociation() throws CfmConfigException { assertTrue(service.deleteMaintenanceAssociation( MdIdCharStr.asMdId("test-md-1"), MaIdCharStr.asMaId("test-ma-1-2"))); //Now check it has actually been removed Collection<MaintenanceAssociation> maListUpdated = service.getAllMaintenanceAssociation( MdIdCharStr.asMdId("test-md-1")); assertEquals(1, maListUpdated.size()); maListUpdated.stream().findFirst().ifPresent(ma -> assertEquals("test-ma-1-1", ma.maId().maName()) ); //Now try with an invalid mdName try { service.deleteMaintenanceAssociation( MdIdCharStr.asMdId("test-md-2"), MaIdCharStr.asMaId("test-ma-1-2")); } catch (IllegalArgumentException e) { assertEquals("Unknown MD: test-md-2", e.getMessage()); } //Now try with an invalid maName try { assertFalse(service.deleteMaintenanceAssociation( MdIdCharStr.asMdId("test-md-1"), MaIdCharStr.asMaId("test-ma-1-3"))); } catch (IllegalArgumentException e) { fail("Should not throw exception: " + e.getMessage()); } }
static void validate(KafkaConsumer<byte[], byte[]> consumer, byte[] message, ConsumerRecords<byte[], byte[]> records) { if (records.isEmpty()) { consumer.commitSync(); throw new RuntimeException("poll() timed out before finding a result (timeout:[" + POLL_TIMEOUT_MS + "])"); } //Check result matches the original record String sent = new String(message, StandardCharsets.UTF_8); String read = new String(records.iterator().next().value(), StandardCharsets.UTF_8); if (!read.equals(sent)) { consumer.commitSync(); throw new RuntimeException("The message read [" + read + "] did not match the message sent [" + sent + "]"); } //Check we only got the one message if (records.count() != 1) { int count = records.count(); consumer.commitSync(); throw new RuntimeException("Only one result was expected during this test. We found [" + count + "]"); } }
@Test public void shouldFailWhenConsumerRecordsIsEmpty() { when(records.isEmpty()).thenReturn(true); assertThrows(RuntimeException.class, () -> EndToEndLatency.validate(consumer, new byte[0], records)); }
@Override public void createApiConfig(K8sApiConfig config) { checkNotNull(config, ERR_NULL_CONFIG); configStore.createApiConfig(config); log.info(String.format(MSG_CONFIG, endpoint(config), MSG_CREATED)); }
@Test(expected = NullPointerException.class) public void testCreateNullConfig() { target.createApiConfig(null); }
@Udf public String lpad( @UdfParameter(description = "String to be padded") final String input, @UdfParameter(description = "Target length") final Integer targetLen, @UdfParameter(description = "Padding string") final String padding) { if (input == null) { return null; } if (padding == null || padding.isEmpty() || targetLen == null || targetLen < 0) { return null; } final StringBuilder sb = new StringBuilder(targetLen + padding.length()); final int padUpTo = Math.max(targetLen - input.length(), 0); for (int i = 0; i < padUpTo; i += padding.length()) { sb.append(padding); } sb.setLength(padUpTo); sb.append(input); sb.setLength(targetLen); return sb.toString(); }
@Test public void shouldReturnNullForNegativeLengthString() { final String result = udf.lpad("foo", -1, "bar"); assertThat(result, is(nullValue())); }
@Override public void route(final RouteContext routeContext, final ShadowRule rule) { decorateRouteContext(routeContext, rule, findShadowDataSourceMappings(rule)); }
@Test void assertRoute() { RouteContext routeContext = createRouteContext(); shadowRouteEngine.route(routeContext, new ShadowRule(createShadowRuleConfiguration())); Collection<RouteUnit> routeUnits = routeContext.getRouteUnits(); RouteMapper dataSourceMapper = routeUnits.iterator().next().getDataSourceMapper(); assertThat(dataSourceMapper.getLogicName(), is("logic_db")); assertThat(dataSourceMapper.getActualName(), is("ds_shadow")); }
@Override public Map<Grantee, ShardingSpherePrivileges> build(final AuthorityRuleConfiguration ruleConfig) { return ruleConfig.getUsers().stream().collect(Collectors.toMap(ShardingSphereUser::getGrantee, each -> new AllPermittedPrivileges())); }
@Test void assertBuild() { AuthorityRuleConfiguration ruleConfig = new AuthorityRuleConfiguration( Collections.singleton(new ShardingSphereUser("root@%")), mock(AlgorithmConfiguration.class), Collections.emptyMap(), null); Map<Grantee, ShardingSpherePrivileges> actual = TypedSPILoader.getService(PrivilegeProvider.class, "ALL_PERMITTED").build(ruleConfig); assertThat(actual.size(), is(1)); assertThat(actual.get(new Grantee("root", "%")), instanceOf(AllPermittedPrivileges.class)); }
public List<ErasureCodingPolicy> loadPolicy(String policyFilePath) { try { File policyFile = getPolicyFile(policyFilePath); if (!policyFile.exists()) { LOG.warn("Not found any EC policy file"); return Collections.emptyList(); } return loadECPolicies(policyFile); } catch (ParserConfigurationException | IOException | SAXException e) { throw new RuntimeException("Failed to load EC policy file: " + policyFilePath); } }
@Test public void testBadECLayoutVersion() throws Exception { PrintWriter out = new PrintWriter(new FileWriter(POLICY_FILE)); out.println("<?xml version=\"1.0\"?>"); out.println("<configuration>"); out.println("<layoutversion>3</layoutversion>"); out.println("<schemas>"); out.println(" <schema id=\"RSk12m4\">"); out.println(" <codec>RS</codec>"); out.println(" <k>12</k>"); out.println(" <m>4</m>"); out.println(" </schema>"); out.println(" <schema id=\"RS-legacyk12m4\">"); out.println(" <codec>RS-legacy</codec>"); out.println(" <k>12</k>"); out.println(" <m>4</m>"); out.println(" </schema>"); out.println("</schemas>"); out.println("<policies>"); out.println(" <policy>"); out.println(" <schema>RSk12m4</schema>"); out.println(" <cellsize>1024</cellsize>"); out.println(" </policy>"); out.println("</policies>"); out.println("</configuration>"); out.close(); ECPolicyLoader ecPolicyLoader = new ECPolicyLoader(); try { ecPolicyLoader.loadPolicy(POLICY_FILE); fail("RuntimeException should be thrown for bad layoutversion"); } catch (RuntimeException e) { assertExceptionContains("The parse failed because of " + "bad layoutversion value", e); } }
@SneakyThrows({InterruptedException.class, ExecutionException.class}) @Override public String query(final String key) { List<KeyValue> keyValues = client.getKVClient().get(ByteSequence.from(key, StandardCharsets.UTF_8)).get().getKvs(); return keyValues.isEmpty() ? null : keyValues.iterator().next().getValue().toString(StandardCharsets.UTF_8); }
@Test void assertGetKey() { repository.query("key"); verify(kv).get(ByteSequence.from("key", StandardCharsets.UTF_8)); verify(getResponse).getKvs(); }
public static<BUILDER extends ConfigBuilder> void setValues(BUILDER destination, BUILDER source) { try { Method setter = destination.getClass().getDeclaredMethod("override", destination.getClass()); setter.setAccessible(true); setter.invoke(destination, source); setter.setAccessible(false); } catch (Exception e) { throw new ConfigurationRuntimeException("Could not set values on config builder." + destination.getClass().getName(), e); } }
@Test public void require_that_builder_values_can_be_overridden_by_another_builder() { FunctionTestConfig.Builder destination = createVariableAccessBuilder(); FunctionTestConfig.Builder source = new FunctionTestConfig.Builder() .int_val(-1) .intarr(0) .doublearr(0.0) .basicStruct(b -> b.bar(-1).intArr(0)) .myarray(b -> b .intval(-1) .refval("") .fileVal("") .myStruct(bb -> bb.a(0) )); ConfigInstanceUtil.setValues(destination, source); FunctionTestConfig result = new FunctionTestConfig(destination); assertEquals(-1, result.int_val()); assertEquals("foo", result.string_val()); assertEquals(1, result.intarr().size()); assertEquals(0, result.intarr(0)); assertEquals(2, result.longarr().size()); assertEquals(3, result.doublearr().size()); assertEquals(2344.0, result.doublearr(0), 0.01); assertEquals(123.0, result.doublearr(1), 0.01); assertEquals(0.0, result.doublearr(2), 0.01); assertEquals(-1, result.basicStruct().bar()); assertEquals("basicFoo", result.basicStruct().foo()); assertEquals(3, result.basicStruct().intArr().size()); assertEquals(310, result.basicStruct().intArr(0)); assertEquals(311, result.basicStruct().intArr(1)); assertEquals(0, result.basicStruct().intArr(2)); assertEquals(3, result.myarray().size()); assertEquals(-1, result.myarray(2).intval()); assertTrue(result.myarray(2).refval().isEmpty()); assertTrue(result.myarray(2).fileVal().value().isEmpty()); assertEquals(0, result.myarray(2).myStruct().a()); }
static Object parseCell(String cell, Schema.Field field) { Schema.FieldType fieldType = field.getType(); try { switch (fieldType.getTypeName()) { case STRING: return cell; case INT16: return Short.parseShort(cell); case INT32: return Integer.parseInt(cell); case INT64: return Long.parseLong(cell); case BOOLEAN: return Boolean.parseBoolean(cell); case BYTE: return Byte.parseByte(cell); case DECIMAL: return new BigDecimal(cell); case DOUBLE: return Double.parseDouble(cell); case FLOAT: return Float.parseFloat(cell); case DATETIME: return Instant.parse(cell); default: throw new UnsupportedOperationException( "Unsupported type: " + fieldType + ", consider using withCustomRecordParsing"); } } catch (IllegalArgumentException e) { throw new IllegalArgumentException( e.getMessage() + " field " + field.getName() + " was received -- type mismatch"); } }
@Test public void givenValidLongCell_parses() { Long longNum = Long.parseLong("1234567890"); DefaultMapEntry cellToExpectedValue = new DefaultMapEntry("1234567890", longNum); Schema schema = Schema.builder() .addInt32Field("an_integer") .addInt64Field("a_long") .addInt16Field("a_short") .build(); assertEquals( cellToExpectedValue.getValue(), CsvIOParseHelpers.parseCell( cellToExpectedValue.getKey().toString(), schema.getField("a_long"))); }
@Override public boolean removeAll(java.util.Collection<?> c) { if(super.removeAll(c)) { for(Object item : c) { this.collectionItemRemoved((E) item); } return true; } return false; }
@Test public void testRemoveAll() { Collection<Object> collection = new Collection<>(); final Object a = new Object(); collection.add(a); final Object b = new Object(); collection.add(b); final Object c = new Object(); collection.add(c); collection.removeAll(Arrays.asList(a, b)); assertFalse(collection.contains(a)); assertFalse(collection.contains(b)); assertTrue(collection.contains(c)); }
public Long saveBoard(final Long memberId, final BoardCreateRequest request) { Board board = new Board(request.title(), request.content(), memberId, request.images(), imageConverter); Board savedBoard = boardRepository.save(board); imageUploader.upload(board.getImages(), request.images()); return savedBoard.getId(); }
@Test void 게시글을_저장한다() { // given Long memberId = 1L; BoardCreateRequest req = new BoardCreateRequest("title", "content", new ArrayList<>()); // when Long result = boardService.saveBoard(memberId, req); // then assertThat(result).isEqualTo(1L); }
@Override public boolean isSupport(URL address) { return dubboCertManager != null && dubboCertManager.isConnected(); }
@Test void testEnable2() { ClassLoader originClassLoader = Thread.currentThread().getContextClassLoader(); ClassLoader newClassLoader = new ClassLoader(originClassLoader) { @Override public Class<?> loadClass(String name) throws ClassNotFoundException { if (name.startsWith("org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequestBuilder")) { throw new ClassNotFoundException("Test"); } return super.loadClass(name); } }; Thread.currentThread().setContextClassLoader(newClassLoader); try (MockedConstruction<DubboCertManager> construction = Mockito.mockConstruction(DubboCertManager.class, (mock, context) -> { // ignore })) { FrameworkModel frameworkModel = new FrameworkModel(); DubboCertProvider provider = new DubboCertProvider(frameworkModel); Assertions.assertFalse(provider.isSupport(null)); frameworkModel.destroy(); } Thread.currentThread().setContextClassLoader(originClassLoader); }
public String property(String key) { return properties.get(key); }
@Test public void encryption_ok() { ImmutableMap<String, String> map = ImmutableMap.<String, String>builder() .put("prop-1", "{b64}Zm9v") .build(); ScannerProperties underTest = new ScannerProperties(map); assertThat(underTest.property("prop-1")).isEqualTo("foo"); }
public static PipelineOptions create() { return new Builder().as(PipelineOptions.class); }
@Test public void testRevision() { PipelineOptions options = PipelineOptionsFactory.create(); assertEquals(1, options.revision()); for (int i = 0; i < 10; i++) { options.setJobName("other" + i); // updates are idempotent, the 2nd call won't increment the revision options.setJobName("other" + i); } assertEquals(11, options.revision()); }
public static String htmlEntites(String str) { return str.replace("<", "&lt;").replace(">", "&gt;"); }
@Test public void testHtmlEntites() { assertEquals("&lt;test&gt;", StringUtils.htmlEntites("<test>")); }
@Override public void run() { try { // We kill containers until the kernel reports the OOM situation resolved // Note: If the kernel has a delay this may kill more than necessary while (true) { String status = cgroups.getCGroupParam( CGroupsHandler.CGroupController.MEMORY, "", CGROUP_PARAM_MEMORY_OOM_CONTROL); if (!status.contains(CGroupsHandler.UNDER_OOM)) { break; } boolean containerKilled = killContainer(); if (!containerKilled) { // This can happen, if SIGKILL did not clean up // non-PGID or containers or containers launched by other users // or if a process was put to the root YARN cgroup. throw new YarnRuntimeException( "Could not find any containers but CGroups " + "reserved for containers ran out of memory. " + "I am giving up"); } } } catch (ResourceHandlerException ex) { LOG.warn("Could not fetch OOM status. " + "This is expected at shutdown. Exiting.", ex); } }
@Test public void testOneOpportunisticContainerOverLimitUponOOM() throws Exception { ConcurrentHashMap<ContainerId, Container> containers = new ConcurrentHashMap<>(); Container c1 = createContainer(1, false, 2L, true); containers.put(c1.getContainerId(), c1); Container c2 = createContainer(2, false, 1L, true); containers.put(c2.getContainerId(), c2); ContainerExecutor ex = createContainerExecutor(containers); Context context = mock(Context.class); when(context.getContainers()).thenReturn(containers); when(context.getContainerExecutor()).thenReturn(ex); CGroupsHandler cGroupsHandler = mock(CGroupsHandler.class); when(cGroupsHandler.getCGroupParam( CGroupsHandler.CGroupController.MEMORY, "", CGROUP_PARAM_MEMORY_OOM_CONTROL)) .thenReturn("under_oom 1").thenReturn("under_oom 0"); when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY, c1.getContainerId().toString(), CGROUP_PROCS_FILE)) .thenReturn("1234").thenReturn(""); when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY, c1.getContainerId().toString(), CGROUP_PARAM_MEMORY_USAGE_BYTES)) .thenReturn(getMB(9)); when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY, c1.getContainerId().toString(), CGROUP_PARAM_MEMORY_MEMSW_USAGE_BYTES)) .thenReturn(getMB(9)); // contnainer c2 is out of its limit when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY, c2.getContainerId().toString(), CGROUP_PROCS_FILE)) .thenReturn("1235").thenReturn(""); when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY, c2.getContainerId().toString(), CGROUP_PARAM_MEMORY_USAGE_BYTES)) .thenReturn(getMB(11)); when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY, c2.getContainerId().toString(), CGROUP_PARAM_MEMORY_MEMSW_USAGE_BYTES)) .thenReturn(getMB(11)); DefaultOOMHandler handler = new DefaultOOMHandler(context, false) { @Override protected CGroupsHandler getCGroupsHandler() { return cGroupsHandler; } }; handler.run(); verify(ex, times(1)).signalContainer( new ContainerSignalContext.Builder() .setPid("1235") .setContainer(c2) .setSignal(ContainerExecutor.Signal.KILL) .build() ); verify(ex, times(1)).signalContainer(any()); }
@Override public String key() { return PropertyType.FLOAT.name(); }
@Test public void key() { assertThat(validation.key()).isEqualTo("FLOAT"); }
private KafkaRebalanceStatus updateStatus(KafkaRebalance kafkaRebalance, KafkaRebalanceStatus desiredStatus, Throwable e) { // Leave the current status when the desired state is null if (desiredStatus != null) { Condition cond = KafkaRebalanceUtils.rebalanceStateCondition(desiredStatus); List<Condition> previous = Collections.emptyList(); if (desiredStatus.getConditions() != null) { previous = desiredStatus.getConditions().stream().filter(condition -> condition != cond).collect(Collectors.toList()); } // If a throwable is supplied, it is set in the status with priority if (e != null) { StatusUtils.setStatusConditionAndObservedGeneration(kafkaRebalance, desiredStatus, KafkaRebalanceState.NotReady.toString(), e); desiredStatus.setConditions(Stream.concat(desiredStatus.getConditions().stream(), previous.stream()).collect(Collectors.toList())); } else if (cond != null) { StatusUtils.setStatusConditionAndObservedGeneration(kafkaRebalance, desiredStatus, cond); desiredStatus.setConditions(Stream.concat(desiredStatus.getConditions().stream(), previous.stream()).collect(Collectors.toList())); } else { throw new IllegalArgumentException("Status related exception and the Status condition's type cannot both be null"); } return desiredStatus; } return kafkaRebalance.getStatus(); }
@Test public void testKafkaRebalanceStaysReadyWhenComplete(VertxTestContext context) { KafkaRebalance kr = new KafkaRebalanceBuilder(createKafkaRebalance(namespace, CLUSTER_NAME, RESOURCE_NAME, EMPTY_KAFKA_REBALANCE_SPEC, false)) .withNewStatus() .withObservedGeneration(1L) .withConditions(new ConditionBuilder() .withType("Ready") .withStatus("True") .build()) .endStatus() .build(); Crds.kafkaRebalanceOperation(client).inNamespace(namespace).resource(kr).create(); Crds.kafkaRebalanceOperation(client).inNamespace(namespace).resource(kr).updateStatus(); crdCreateKafka(); crdCreateCruiseControlSecrets(); Checkpoint checkpoint = context.checkpoint(); krao.reconcile(new Reconciliation("test-trigger", KafkaRebalance.RESOURCE_KIND, namespace, RESOURCE_NAME)) .onComplete(context.succeeding(v -> context.verify(() -> assertState(context, client, namespace, kr.getMetadata().getName(), KafkaRebalanceState.Ready)))) .compose(v -> { Kafka kafkaPatch = new KafkaBuilder(Crds.kafkaOperation(client).inNamespace(namespace).withName(CLUSTER_NAME).get()) .withNewStatus() .withObservedGeneration(1L) .withConditions(new ConditionBuilder() .withType("NotReady") .withStatus("True") .build()) .endStatus() .build(); Crds.kafkaOperation(client).inNamespace(namespace).resource(kafkaPatch).updateStatus(); return krao.reconcile(new Reconciliation("test-trigger", KafkaRebalance.RESOURCE_KIND, namespace, RESOURCE_NAME)); }) .onComplete(context.succeeding(v -> { // the resource moved from ProposalReady to Rebalancing on approval assertState(context, client, namespace, RESOURCE_NAME, KafkaRebalanceState.Ready); checkpoint.flag(); })); }
@Override public void clear() { trackers.clear(); }
@Test void testClearDoesNotThrowException() { DefaultResourceTracker tracker = new DefaultResourceTracker(); tracker.clear(); }
public String getProperty(String propertyKey) { return configs.get(propertyKey); }
@Test public void testSerialization() throws Exception { JDBCResource resource0 = new JDBCResource("jdbc_resource_test", getMockConfigs()); String json = GsonUtils.GSON.toJson(resource0); Resource resource1 = GsonUtils.GSON.fromJson(json, Resource.class); Assert.assertTrue(resource1 instanceof JDBCResource); Assert.assertEquals(resource0.getName(), resource1.getName()); Assert.assertEquals(resource0.getProperty(JDBCResource.DRIVER_URL), ((JDBCResource) resource1).getProperty(JDBCResource.DRIVER_URL)); Assert.assertEquals(resource0.getProperty(JDBCResource.DRIVER_CLASS), ((JDBCResource) resource1).getProperty(JDBCResource.DRIVER_CLASS)); Assert.assertEquals(resource0.getProperty(JDBCResource.URI), ((JDBCResource) resource1).getProperty(JDBCResource.URI)); Assert.assertEquals(resource0.getProperty(JDBCResource.USER), ((JDBCResource) resource1).getProperty(JDBCResource.USER)); Assert.assertEquals(resource0.getProperty(JDBCResource.PASSWORD), ((JDBCResource) resource1).getProperty(JDBCResource.PASSWORD)); }
public Flux<CosmosDatabaseProperties> queryDatabases( final String query, final CosmosQueryRequestOptions queryRequestOptions) { CosmosDbUtils.validateIfParameterIsNotEmpty(query, "query"); return CosmosDbUtils.convertCosmosPagedFluxToFluxResults(client.queryDatabases(query, queryRequestOptions)); }
@Test void testQueryDatabases() { final CosmosAsyncClientWrapper client = mock(CosmosAsyncClientWrapper.class); final CosmosAsyncDatabase database = mock(CosmosAsyncDatabase.class); when(client.getDatabase("test")).thenReturn(database); final CosmosDbClientOperations operations = CosmosDbClientOperations.withClient(client); CosmosDbTestUtils.assertIllegalArgumentException(() -> operations.queryDatabases(null, null)); CosmosDbTestUtils.assertIllegalArgumentException(() -> operations.queryDatabases("", null)); }
public static Permission getPermission(String name, String serviceName, String... actions) { PermissionFactory permissionFactory = PERMISSION_FACTORY_MAP.get(serviceName); if (permissionFactory == null) { throw new IllegalArgumentException("No permissions found for service: " + serviceName); } return permissionFactory.create(name, actions); }
@Test public void getPermission_List() { Permission permission = ActionConstants.getPermission("foo", ListService.SERVICE_NAME); assertNotNull(permission); assertTrue(permission instanceof ListPermission); }
public static Expression generateFilterExpression(SearchArgument sarg) { return translate(sarg.getExpression(), sarg.getLeaves()); }
@Test public void testFloatType() { SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); SearchArgument arg = builder.startAnd().equals("float", PredicateLeaf.Type.FLOAT, 1200D).end().build(); UnboundPredicate expected = Expressions.equal("float", 1200D); UnboundPredicate actual = (UnboundPredicate) HiveIcebergFilterFactory.generateFilterExpression(arg); assertPredicatesMatch(expected, actual); }
@Override public Connection connect(String url, Properties info) throws SQLException { if ("false".equals(info.get("javamelody"))) { // if property javamelody=false then it's not for us // (we pass here from the DriverManager.getConnection below) return null; } String myUrl = url; // we load first the driver class from the info or the url, to be sure that it will be found String proxiedDriver = info.getProperty("driver"); if (proxiedDriver == null && myUrl != null) { // if not in the info, the driver class could also be passed at the end of the url, for example ...?driver=org.h2.Driver final int index = myUrl.indexOf("driver="); if (index != -1) { proxiedDriver = myUrl.substring(index + "driver=".length()); myUrl = myUrl.substring(0, index - 1); } } if (proxiedDriver == null) { // if the driver is not defined in the info or in the url // it could still be found automatically if the driver is in the classpath // or (in WEB-INF/lib and if the jdbc drivers are not loaded by the JDK before this webapp) // but we don't want to create proxies and increment counts for the connections inside datasources // so we only accept and go further if driver is defined in the info or in the url return null; } try { // on utilise Thread.currentThread().getContextClassLoader() car le driver peut ne pas être // dans le même classLoader que les classes de javamelody // Class driverClass = Class.forName(proxiedDriver, true, Thread.currentThread().getContextClassLoader()); // et non Class.forName(proxiedDriver); } catch (final ClassNotFoundException e) { throw new SQLException(e.getMessage(), e); } final Properties myInfo = (Properties) info.clone(); myInfo.remove("driver"); myInfo.put("javamelody", "false"); Parameters.initJdbcDriverParameters(myUrl, myInfo); // we could call driverClass.newInstance().connect(myUrl, myInfo) // possibly by looking the driver which accepts the url in DriverManager.getDrivers() // but we prefer calling the standard DriverManager.getConnection(myUrl, myInfo) return JdbcWrapper.SINGLETON .createConnectionProxy(DriverManager.getConnection(myUrl, myInfo)); }
@Test public void testConnect() throws SQLException { final Properties info = new Properties(); try { driver.connect(null, info); } catch (final SQLException e) { // SQLException normale : The url cannot be null assertNotNull("connect", e); } driver.connect("jdbc:h2:mem:?driver=org.h2.Driver", info); info.put("driver", "org.h2.Driver"); driver.connect("jdbc:h2:mem:", info); info.put("driver", "nimporte.quoi"); try { driver.connect(null, info); } catch (final SQLException e) { // SQLException normale : class not found assertNotNull("connect", e); } }
public ImmutableList<Name> names() { return names; }
@Test public void testRelativePath_fourNames() { new PathTester(pathService, "foo/bar/baz/test") .names("foo", "bar", "baz", "test") .test("foo/bar/baz/test"); }
public static void mergeUtmByMetaData(Context context, JSONObject properties) throws JSONException { if (properties == null) { return; } for (Map.Entry<String, String> entry : UTM_MAP.entrySet()) { if (entry != null) { String utmValue = getApplicationMetaData(context, entry.getKey()); if (!TextUtils.isEmpty(utmValue)) { properties.put(entry.getValue(), utmValue); } } } }
@Test public void mergeUtmByMetaData() { try { ChannelUtils.mergeUtmByMetaData(mApplication, new JSONObject()); } catch (JSONException e) { e.printStackTrace(); } }
@Override public Optional<FunctionAuthData> updateAuthData(Function.FunctionDetails funcDetails, Optional<FunctionAuthData> existingFunctionAuthData, AuthenticationDataSource authenticationDataSource) throws Exception { String secretId; secretId = existingFunctionAuthData.map(functionAuthData -> new String(functionAuthData.getData())) .orElseGet(() -> RandomStringUtils.random(5, true, true).toLowerCase()); String token; try { token = getToken(authenticationDataSource); } catch (AuthenticationException e) { // No token is passed so delete the token. Might be trying to switch over to using anonymous user cleanUpAuthData( funcDetails, existingFunctionAuthData); return Optional.empty(); } if (token != null) { upsertSecret(token, funcDetails, getSecretName(secretId)); return Optional.of(FunctionAuthData.builder().data(secretId.getBytes()).build()); } return existingFunctionAuthData; }
@Test public void testUpdateAuthData() throws Exception { CoreV1Api coreV1Api = mock(CoreV1Api.class); KubernetesSecretsTokenAuthProvider kubernetesSecretsTokenAuthProvider = new KubernetesSecretsTokenAuthProvider(); kubernetesSecretsTokenAuthProvider.initialize(coreV1Api, null, (fd) -> "default"); // test when existingFunctionAuthData is empty Optional<FunctionAuthData> existingFunctionAuthData = Optional.empty(); Function.FunctionDetails funcDetails = Function.FunctionDetails.newBuilder().setTenant("test-tenant").setNamespace("test-ns").setName("test-func").build(); Optional<FunctionAuthData> functionAuthData = kubernetesSecretsTokenAuthProvider.updateAuthData(funcDetails, existingFunctionAuthData, new AuthenticationDataSource() { @Override public boolean hasDataFromCommand() { return true; } @Override public String getCommandData() { return "test-token"; } }); Assert.assertTrue(functionAuthData.isPresent()); Assert.assertTrue(StringUtils.isNotBlank(new String(functionAuthData.get().getData()))); // test when existingFunctionAuthData is NOT empty existingFunctionAuthData = Optional.of(new FunctionAuthData("pf-secret-z7mxx".getBytes(), null)); functionAuthData = kubernetesSecretsTokenAuthProvider.updateAuthData(funcDetails, existingFunctionAuthData, new AuthenticationDataSource() { @Override public boolean hasDataFromCommand() { return true; } @Override public String getCommandData() { return "test-token"; } }); Assert.assertTrue(functionAuthData.isPresent()); Assert.assertEquals(new String(functionAuthData.get().getData()), "pf-secret-z7mxx"); }
@Override protected void parse(final ProtocolFactory protocols, final Local file) throws AccessDeniedException { NSDictionary serialized = NSDictionary.dictionaryWithContentsOfFile(file.getAbsolute()); if(null == serialized) { throw new LocalAccessDeniedException(String.format("Invalid bookmark file %s", file)); } this.parse(protocols, serialized); }
@Test(expected = AccessDeniedException.class) public void testParseNotFound() throws Exception { new FlowBookmarkCollection().parse(new ProtocolFactory(Collections.emptySet()), new Local(System.getProperty("java.io.tmpdir"), "f")); }
private static GuardedByExpression bind(JCTree.JCExpression exp, BinderContext context) { GuardedByExpression expr = BINDER.visit(exp, context); checkGuardedBy(expr != null, String.valueOf(exp)); checkGuardedBy(expr.kind() != Kind.TYPE_LITERAL, "Raw type literal: %s", exp); return expr; }
@Test public void simpleName() { assertThat( bind( "Test", "Other.lock", forSourceLines( "threadsafety/Test.java", "package threadsafety;", "class Other {", " static final Object lock = new Object();", "}", "class Test {", " final Other Other = null;", "}"))) .isEqualTo("(SELECT (TYPE_LITERAL threadsafety.Other) lock)"); }
@Produces @DefaultBean @Singleton public JobRequestScheduler jobRequestScheduler(StorageProvider storageProvider) { if (jobRunrBuildTimeConfiguration.jobScheduler().enabled()) { return new JobRequestScheduler(storageProvider, emptyList()); } return null; }
@Test void jobRequestSchedulerIsSetupWhenConfigured() { when(jobSchedulerBuildTimeConfiguration.enabled()).thenReturn(true); assertThat(jobRunrProducer.jobRequestScheduler(storageProvider)).isNotNull(); }
@Override public void accept(ModemVisitor modemVisitor) { if (modemVisitor instanceof HayesVisitor) { ((HayesVisitor) modemVisitor).visit(this); } else { LOGGER.info("Only HayesVisitor is allowed to visit Hayes modem"); } }
@Test void testAcceptForUnix() { var hayes = new Hayes(); var mockVisitor = mock(ConfigureForUnixVisitor.class); hayes.accept(mockVisitor); verifyNoMoreInteractions(mockVisitor); }
@Override public ApiResult<TopicPartition, DeletedRecords> handleResponse( Node broker, Set<TopicPartition> keys, AbstractResponse abstractResponse ) { DeleteRecordsResponse response = (DeleteRecordsResponse) abstractResponse; Map<TopicPartition, DeletedRecords> completed = new HashMap<>(); Map<TopicPartition, Throwable> failed = new HashMap<>(); List<TopicPartition> unmapped = new ArrayList<>(); Set<TopicPartition> retriable = new HashSet<>(); for (DeleteRecordsResponseData.DeleteRecordsTopicResult topicResult: response.data().topics()) { for (DeleteRecordsResponseData.DeleteRecordsPartitionResult partitionResult : topicResult.partitions()) { Errors error = Errors.forCode(partitionResult.errorCode()); TopicPartition topicPartition = new TopicPartition(topicResult.name(), partitionResult.partitionIndex()); if (error == Errors.NONE) { completed.put(topicPartition, new DeletedRecords(partitionResult.lowWatermark())); } else { handlePartitionError(topicPartition, error, failed, unmapped, retriable); } } } // Sanity-check if the current leader for these partitions returned results for all of them for (TopicPartition topicPartition : keys) { if (unmapped.isEmpty() && !completed.containsKey(topicPartition) && !failed.containsKey(topicPartition) && !retriable.contains(topicPartition) ) { ApiException sanityCheckException = new ApiException( "The response from broker " + broker.id() + " did not contain a result for topic partition " + topicPartition); log.error( "DeleteRecords request for topic partition {} failed sanity check", topicPartition, sanityCheckException); failed.put(topicPartition, sanityCheckException); } } return new ApiResult<>(completed, failed, unmapped); }
@Test public void testHandleLookupRetriablePartitionInvalidMetadataResponse() { TopicPartition errorPartition = t0p0; Errors error = Errors.NOT_LEADER_OR_FOLLOWER; Map<TopicPartition, Short> errorsByPartition = new HashMap<>(); errorsByPartition.put(errorPartition, error.code()); AdminApiHandler.ApiResult<TopicPartition, DeletedRecords> result = handleResponse(createResponse(errorsByPartition)); // Some invalid metadata errors should be retried from the lookup stage as the partition-to-leader // mappings should be recalculated. List<TopicPartition> unmapped = new ArrayList<>(); unmapped.add(errorPartition); Set<TopicPartition> completed = new HashSet<>(recordsToDelete.keySet()); completed.removeAll(unmapped); assertResult(result, completed, emptyMap(), unmapped, emptySet()); }
protected RemotingCommand request(ChannelHandlerContext ctx, RemotingCommand request, ProxyContext context, long timeoutMillis) throws Exception { String brokerName; if (request.getCode() == RequestCode.SEND_MESSAGE_V2) { if (request.getExtFields().get(BROKER_NAME_FIELD_FOR_SEND_MESSAGE_V2) == null) { return RemotingCommand.buildErrorResponse(ResponseCode.VERSION_NOT_SUPPORTED, "Request doesn't have field bname"); } brokerName = request.getExtFields().get(BROKER_NAME_FIELD_FOR_SEND_MESSAGE_V2); } else { if (request.getExtFields().get(BROKER_NAME_FIELD) == null) { return RemotingCommand.buildErrorResponse(ResponseCode.VERSION_NOT_SUPPORTED, "Request doesn't have field bname"); } brokerName = request.getExtFields().get(BROKER_NAME_FIELD); } if (request.isOnewayRPC()) { messagingProcessor.requestOneway(context, brokerName, request, timeoutMillis); return null; } messagingProcessor.request(context, brokerName, request, timeoutMillis) .thenAccept(r -> writeResponse(ctx, context, request, r)) .exceptionally(t -> { writeErrResponse(ctx, context, request, t); return null; }); return null; }
@Test public void testRequestDefaultException() throws Exception { ArgumentCaptor<RemotingCommand> captor = ArgumentCaptor.forClass(RemotingCommand.class); String brokerName = "broker"; String remark = "exception"; CompletableFuture<RemotingCommand> future = new CompletableFuture<>(); future.completeExceptionally(new Exception(remark)); when(messagingProcessorMock.request(any(), eq(brokerName), any(), anyLong())).thenReturn(future); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.PULL_MESSAGE, null); request.addExtField(AbstractRemotingActivity.BROKER_NAME_FIELD, brokerName); RemotingCommand remotingCommand = remotingActivity.request(ctx, request, null, 10000); assertThat(remotingCommand).isNull(); verify(ctx, times(1)).writeAndFlush(captor.capture()); assertThat(captor.getValue().getCode()).isEqualTo(ResponseCode.SYSTEM_ERROR); }
@Override public void close() { delegate.close(); }
@Test public void shouldCloseDelegate() { // When: deserializer.close(); // Then: verify(delegate).close(); }
protected String buildMessageSummary(com.github.jcustenborder.cef.Message cef) { return cef.deviceProduct() + ": [" + cef.deviceEventClassId() + ", " + cef.severity() + "] " + cef.name(); }
@Test public void buildMessageSummary() throws Exception { final com.github.jcustenborder.cef.Message cefMessage = mock(com.github.jcustenborder.cef.Message.class); when(cefMessage.deviceProduct()).thenReturn("product"); when(cefMessage.deviceEventClassId()).thenReturn("event-class-id"); when(cefMessage.name()).thenReturn("name"); when(cefMessage.severity()).thenReturn("High"); assertEquals("product: [event-class-id, High] name", codec.buildMessageSummary(cefMessage)); }
void start(Iterable<ShardCheckpoint> checkpoints) { LOG.info( "Pool {} - starting for stream {} consumer {}. Checkpoints = {}", poolId, read.getStreamName(), consumerArn, checkpoints); for (ShardCheckpoint shardCheckpoint : checkpoints) { checkState( !state.containsKey(shardCheckpoint.getShardId()), "Duplicate shard id %s", shardCheckpoint.getShardId()); ShardState shardState = new ShardState( initShardSubscriber(shardCheckpoint), shardCheckpoint, watermarkPolicyFactory); state.put(shardCheckpoint.getShardId(), shardState); } }
@Test public void poolReSubscribesFromInitialWhenRecoverableErrorOccursImmediately() throws Exception { kinesis = new EFOStubbedKinesisAsyncClient(10); kinesis.stubSubscribeToShard("shard-000").failWith(new ReadTimeoutException()); kinesis.stubSubscribeToShard("shard-000", eventWithRecords(550, 3)); kinesis.stubSubscribeToShard("shard-000", eventsWithoutRecords(553, 1)); KinesisReaderCheckpoint initialCheckpoint = initialLatestCheckpoint(ImmutableList.of("shard-000")); pool = new EFOShardSubscribersPool(readSpec, consumerArn, kinesis, 1); pool.start(initialCheckpoint); PoolAssertion.assertPool(pool) .givesCheckPointedRecords( ShardAssertion.shard("shard-000") .gives(KinesisRecordView.generate("shard-000", 550, 3)) .withLastCheckpointSequenceNumber(553)); assertThat(kinesis.subscribeRequestsSeen()) .containsExactlyInAnyOrder( subscribeLatest("shard-000"), subscribeLatest("shard-000"), subscribeAfterSeqNumber("shard-000", "552"), subscribeAfterSeqNumber("shard-000", "553")); }
public int indexOf(PDPage page) { SearchContext context = new SearchContext(page); if (findPage(context, root)) { return context.index; } return -1; }
@Test void negative() throws IOException { doc = Loader.loadPDF(RandomAccessReadBuffer.createBufferFromStream( TestPDPageTree.class.getResourceAsStream("with_outline.pdf"))); assertEquals(-1, doc.getPages().indexOf(new PDPage())); }
public static String toStringUtf8(byte[] input) { return new String(input, Charsets.UTF_8); }
@Test public void toStringUtf8() { assertThat(Tools.toStringUtf8(new byte[] {0x48, 0x69, 0x21}), is(equalTo("Hi!"))); }
void forwardToStateService(DeviceStateServiceMsgProto deviceStateServiceMsg, TbCallback callback) { if (statsEnabled) { stats.log(deviceStateServiceMsg); } stateService.onQueueMsg(deviceStateServiceMsg, callback); }
@Test public void givenProcessingSuccess_whenForwardingDeviceStateMsgToStateService_thenOnSuccessCallbackIsCalled() { // GIVEN var stateMsg = TransportProtos.DeviceStateServiceMsgProto.newBuilder() .setTenantIdMSB(tenantId.getId().getMostSignificantBits()) .setTenantIdLSB(tenantId.getId().getLeastSignificantBits()) .setDeviceIdMSB(deviceId.getId().getMostSignificantBits()) .setDeviceIdLSB(deviceId.getId().getLeastSignificantBits()) .setAdded(true) .setUpdated(false) .setDeleted(false) .build(); doCallRealMethod().when(defaultTbCoreConsumerServiceMock).forwardToStateService(stateMsg, tbCallbackMock); // WHEN defaultTbCoreConsumerServiceMock.forwardToStateService(stateMsg, tbCallbackMock); // THEN then(stateServiceMock).should().onQueueMsg(stateMsg, tbCallbackMock); }
@Override public boolean revokeToken(String clientId, String accessToken) { // 先查询,保证 clientId 时匹配的 OAuth2AccessTokenDO accessTokenDO = oauth2TokenService.getAccessToken(accessToken); if (accessTokenDO == null || ObjectUtil.notEqual(clientId, accessTokenDO.getClientId())) { return false; } // 再删除 return oauth2TokenService.removeAccessToken(accessToken) != null; }
@Test public void testRevokeToken_success() { // 准备参数 String clientId = randomString(); String accessToken = randomString(); // mock 方法(访问令牌) OAuth2AccessTokenDO accessTokenDO = randomPojo(OAuth2AccessTokenDO.class).setClientId(clientId); when(oauth2TokenService.getAccessToken(eq(accessToken))).thenReturn(accessTokenDO); // mock 方法(移除) when(oauth2TokenService.removeAccessToken(eq(accessToken))).thenReturn(accessTokenDO); // 调用,并断言 assertTrue(oauth2GrantService.revokeToken(clientId, accessToken)); }
private static Map<String, Object> getProperties(Step step) { Map<String, Object> properties = step.getProperties(); if (properties == null) { properties = new HashMap<>(); step.setProperties(properties); } return properties; }
@Test public void testStreamingGroupIntoBatchesTranslation() throws Exception { List<String> experiments = new ArrayList<>( ImmutableList.of( GcpOptions.STREAMING_ENGINE_EXPERIMENT, GcpOptions.WINDMILL_SERVICE_EXPERIMENT)); JobSpecification jobSpec = runStreamingGroupIntoBatchesAndGetJobSpec(false, experiments); List<Step> steps = jobSpec.getJob().getSteps(); Step shardedStateStep = steps.get(steps.size() - 1); Map<String, Object> properties = shardedStateStep.getProperties(); assertTrue(properties.containsKey(PropertyNames.USES_KEYED_STATE)); assertEquals("true", getString(properties, PropertyNames.USES_KEYED_STATE)); assertFalse(properties.containsKey(PropertyNames.ALLOWS_SHARDABLE_STATE)); assertTrue(properties.containsKey(PropertyNames.PRESERVES_KEYS)); }
@Override public void alert(Anomaly anomaly, boolean autoFixTriggered, long selfHealingStartTime, AnomalyType anomalyType) { super.alert(anomaly, autoFixTriggered, selfHealingStartTime, anomalyType); if (_msTeamsWebhook == null) { LOG.warn("MSTeams webhook is null, can't send MSTeams self healing notification"); return; } Map<String, String> facts = new HashMap<>(Map.of("Anomaly type", anomalyType.toString(), "Anomaly", anomaly.toString(), "Self Healing enabled", Boolean.toString(_selfHealingEnabled.get(anomalyType)), "Auto fix triggered", Boolean.toString(autoFixTriggered))); if (_selfHealingEnabled.get(anomalyType)) { facts.put("Self Healing start time", utcDateFor(selfHealingStartTime)); } try { sendMSTeamsMessage(new MSTeamsMessage(facts), _msTeamsWebhook); } catch (IOException e) { LOG.warn("ERROR sending alert to MSTeams", e); } }
@Test public void testMSTeamsAlertWithDefaultOptions() { _notifier = new MockMSTeamsSelfHealingNotifier(mockTime); _notifier._msTeamsWebhook = "https://dummy.webhook.office.com/webhookb2"; _notifier.alert(failures, false, 1L, KafkaAnomalyType.BROKER_FAILURE); assertEquals(1, _notifier.getMSTeamsMessageList().size()); MSTeamsMessage message = _notifier.getMSTeamsMessageList().get(0); assertNotNull(message.getFacts().get("Anomaly")); assertEquals("true", message.getFacts().get("Self Healing enabled")); assertEquals("false", message.getFacts().get("Auto fix triggered")); }
@VisibleForTesting static void validateFips(final KsqlConfig config, final KsqlRestConfig restConfig) { if (config.getBoolean(ConfluentConfigs.ENABLE_FIPS_CONFIG)) { final FipsValidator fipsValidator = ConfluentConfigs.buildFipsValidator(); // validate cipher suites and TLS version validateCipherSuites(fipsValidator, restConfig); // validate broker validateBroker(fipsValidator, config); // validate ssl endpoint algorithm validateSslEndpointAlgo(fipsValidator, restConfig); // validate schema registry url validateSrUrl(fipsValidator, restConfig); // validate all listeners validateListeners(fipsValidator, restConfig); log.info("FIPS mode enabled for ksqlDB!"); } }
@Test public void shouldFailOnInvalidListenerProtocols() { // Given: final KsqlConfig config = configWith(ImmutableMap.of( ConfluentConfigs.ENABLE_FIPS_CONFIG, true, CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SecurityProtocol.SASL_SSL.name )); final KsqlRestConfig restConfig = new KsqlRestConfig(ImmutableMap.<String, Object>builder() .put(KsqlRestConfig.SSL_CIPHER_SUITES_CONFIG, Collections.singletonList("TLS_RSA_WITH_AES_256_CCM")) .put(KsqlConfig.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "https") .build() ); // When: final Exception e = assertThrows( SecurityException.class, () -> KsqlServerMain.validateFips(config, restConfig) ); // Then: assertThat(e.getMessage(), containsString( "FIPS 140-2 Configuration Error, invalid rest protocol: http" + "\nInvalid rest protocol for listeners." + "\nMake sure that all listeners, listeners.proxy.protocol, ksql.advertised.listener, and ksql.internal.listener follow FIPS 140-2.")); }
@Override public void write(Object object) throws IOException { objectOutputStream.writeObject(object); objectOutputStream.flush(); preventMemoryLeak(); }
@Test public void writesToUnderlyingObjectOutputStream() throws IOException { // given ObjectWriter objectWriter = new AutoFlushingObjectWriter(objectOutputStream, 2); String object = "foo"; // when objectWriter.write(object); // then verify(objectOutputStream).writeObjectOverride(object); }
@Override public DnsServerAddressStream nameServerAddressStream(String hostname) { for (;;) { int i = hostname.indexOf('.', 1); if (i < 0 || i == hostname.length() - 1) { return defaultNameServerAddresses.stream(); } DnsServerAddresses addresses = domainToNameServerStreamMap.get(hostname); if (addresses != null) { return addresses.stream(); } hostname = hostname.substring(i + 1); } }
@Test public void defaultReturnedWhenNoBetterMatch(@TempDir Path tempDir) throws Exception { File f = buildFile(tempDir, "domain linecorp.local\n" + "nameserver 127.0.0.2\n" + "nameserver 127.0.0.3\n"); File f2 = buildFile(tempDir, "domain squarecorp.local\n" + "nameserver 127.0.0.4\n" + "nameserver 127.0.0.5\n"); UnixResolverDnsServerAddressStreamProvider p = new UnixResolverDnsServerAddressStreamProvider(f, f2); DnsServerAddressStream stream = p.nameServerAddressStream("somehost"); assertHostNameEquals("127.0.0.2", stream.next()); assertHostNameEquals("127.0.0.3", stream.next()); }
public void generate() throws IOException { packageNameByTypes.clear(); generatePackageInfo(); generateTypeStubs(); generateMessageHeaderStub(); for (final List<Token> tokens : ir.messages()) { final Token msgToken = tokens.get(0); final List<Token> messageBody = getMessageBody(tokens); final boolean hasVarData = -1 != findSignal(messageBody, Signal.BEGIN_VAR_DATA); int i = 0; final List<Token> fields = new ArrayList<>(); i = collectFields(messageBody, i, fields); final List<Token> groups = new ArrayList<>(); i = collectGroups(messageBody, i, groups); final List<Token> varData = new ArrayList<>(); collectVarData(messageBody, i, varData); final String decoderClassName = formatClassName(decoderName(msgToken.name())); final String decoderStateClassName = decoderClassName + "#CodecStates"; final FieldPrecedenceModel decoderPrecedenceModel = precedenceChecks.createDecoderModel( decoderStateClassName, tokens); generateDecoder(decoderClassName, msgToken, fields, groups, varData, hasVarData, decoderPrecedenceModel); final String encoderClassName = formatClassName(encoderName(msgToken.name())); final String encoderStateClassName = encoderClassName + "#CodecStates"; final FieldPrecedenceModel encoderPrecedenceModel = precedenceChecks.createEncoderModel( encoderStateClassName, tokens); generateEncoder(encoderClassName, msgToken, fields, groups, varData, hasVarData, encoderPrecedenceModel); } }
@Test void shouldGenerateBasicMessage() throws Exception { final UnsafeBuffer buffer = new UnsafeBuffer(new byte[4096]); generator().generate(); final Object msgFlyweight = wrap(buffer, compileCarEncoder().getConstructor().newInstance()); final Object groupFlyweight = fuelFiguresCount(msgFlyweight, 0); assertNotNull(groupFlyweight); assertThat(msgFlyweight.toString(), startsWith("[Car]")); }
public static void bind(ServerSocket socket, InetSocketAddress address, int backlog) throws IOException { bind(socket, address, backlog, null, null); }
@Test public void testBindSimple() throws Exception { ServerSocket socket = new ServerSocket(); InetSocketAddress address = new InetSocketAddress("0.0.0.0",0); Server.bind(socket, address, 10); try { assertTrue(socket.isBound()); } finally { socket.close(); } }
public List<ComputeNode> getAliveComputeNodes(long warehouseId) { Optional<Long> workerGroupId = selectWorkerGroupInternal(warehouseId); if (workerGroupId.isEmpty()) { return new ArrayList<>(); } return getAliveComputeNodes(warehouseId, workerGroupId.get()); }
@Test public void testGetAliveComputeNodes() throws UserException { new MockUp<GlobalStateMgr>() { @Mock public NodeMgr getNodeMgr() { return nodeMgr; } }; new MockUp<NodeMgr>() { @Mock public SystemInfoService getClusterInfo() { return systemInfo; } }; new MockUp<SystemInfoService>() { @Mock public ComputeNode getBackendOrComputeNode(long nodeId) { if (nodeId == 10003L) { ComputeNode node = new ComputeNode(); node.setAlive(false); return node; } ComputeNode node = new ComputeNode(); node.setAlive(true); return node; } }; new Expectations() { { GlobalStateMgr.getCurrentState().getStarOSAgent().getWorkersByWorkerGroup(StarOSAgent.DEFAULT_WORKER_GROUP_ID); minTimes = 0; result = Lists.newArrayList(10003L, 10004L); } }; WarehouseManager mgr = new WarehouseManager(); mgr.initDefaultWarehouse(); List<Long> nodeIds = mgr.getAllComputeNodeIds(WarehouseManager.DEFAULT_WAREHOUSE_ID); Assert.assertEquals(2, nodeIds.size()); List<ComputeNode> nodes = mgr.getAliveComputeNodes(WarehouseManager.DEFAULT_WAREHOUSE_ID); Assert.assertEquals(1, nodes.size()); }
public static SchemaPairCompatibility checkReaderWriterCompatibility(final Schema reader, final Schema writer) { final SchemaCompatibilityResult compatibility = new ReaderWriterCompatibilityChecker().getCompatibility(reader, writer); final String message; switch (compatibility.getCompatibility()) { case INCOMPATIBLE: { message = String.format( "Data encoded using writer schema:%n%s%n" + "will or may fail to decode using reader schema:%n%s%n", writer.toString(true), reader.toString(true)); break; } case COMPATIBLE: { message = READER_WRITER_COMPATIBLE_MESSAGE; break; } default: throw new AvroRuntimeException("Unknown compatibility: " + compatibility); } return new SchemaPairCompatibility(compatibility, reader, writer, message); }
@Test void validateSchemaPairMissingField() { final List<Field> readerFields = list(new Schema.Field("oldfield1", INT_SCHEMA, null, null)); final Schema reader = Schema.createRecord(null, null, null, false, readerFields); final SchemaCompatibility.SchemaPairCompatibility expectedResult = new SchemaCompatibility.SchemaPairCompatibility( SchemaCompatibility.SchemaCompatibilityResult.compatible(), reader, WRITER_SCHEMA, SchemaCompatibility.READER_WRITER_COMPATIBLE_MESSAGE); // Test omitting a field. assertEquals(expectedResult, checkReaderWriterCompatibility(reader, WRITER_SCHEMA)); }