focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public double getMean() { if (values.length == 0) { return 0; } double sum = 0; for (int i = 0; i < values.length; i++) { sum += values[i] * normWeights[i]; } return sum; }
@Test public void calculatesAMeanOfZeroForAnEmptySnapshot() { final Snapshot emptySnapshot = new WeightedSnapshot( weightedArray(new long[]{}, new double[]{})); assertThat(emptySnapshot.getMean()) .isZero(); }
@ApiOperation(value = "List models", nickname= "listModels", tags = { "Models" }) @ApiImplicitParams({ @ApiImplicitParam(name = "id", dataType = "string", value = "Only return models with the given version.", paramType = "query"), @ApiImplicitParam(name = "category", dataType = "string", value = "Only return models with the given category.", paramType = "query"), @ApiImplicitParam(name = "categoryLike", dataType = "string", value = "Only return models with a category like the given name.", paramType = "query"), @ApiImplicitParam(name = "categoryNotEquals", dataType = "string", value = "Only return models which do not have the given category.", paramType = "query"), @ApiImplicitParam(name = "name", dataType = "string", value = "Only return models with the given name.", paramType = "query"), @ApiImplicitParam(name = "nameLike", dataType = "string", value = "Only return models with a name like the given name.", paramType = "query"), @ApiImplicitParam(name = "key", dataType = "string", value = "Only return models with the given key.", paramType = "query"), @ApiImplicitParam(name = "deploymentId", dataType = "string", value = "Only return models with the given category.", paramType = "query"), @ApiImplicitParam(name = "version", dataType = "integer", value = "Only return models with the given version.", paramType = "query"), @ApiImplicitParam(name = "latestVersion", dataType = "boolean", value = "If true, only return models which are the latest version. Best used in combination with key. If false is passed in as value, this is ignored and all versions are returned.", paramType = "query"), @ApiImplicitParam(name = "deployed", dataType = "boolean", value = "If true, only deployed models are returned. If false, only undeployed models are returned (deploymentId is null).", paramType = "query"), @ApiImplicitParam(name = "tenantId", dataType = "string", value = "Only return models with the given tenantId.", paramType = "query"), @ApiImplicitParam(name = "tenantIdLike", dataType = "string", value = "Only return models with a tenantId like the given value.", paramType = "query"), @ApiImplicitParam(name = "withoutTenantId", dataType = "boolean", value = "If true, only returns models without a tenantId set. If false, the withoutTenantId parameter is ignored.", paramType = "query"), @ApiImplicitParam(name = "sort", dataType = "string", value = "Property to sort on, to be used together with the order.", allowableValues = "id,category,createTime,key,lastUpdateTime,name,version,tenantId", paramType = "query"), }) @ApiResponses(value = { @ApiResponse(code = 200, message = "Indicates request was successful and the models are returned"), @ApiResponse(code = 400, message = "Indicates a parameter was passed in the wrong format. The status-message contains additional information.") }) @GetMapping(value = "/repository/models", produces = "application/json") public DataResponse<ModelResponse> getModels(@ApiParam(hidden = true) @RequestParam Map<String, String> allRequestParams) { ModelQuery modelQuery = repositoryService.createModelQuery(); if (allRequestParams.containsKey("id")) { modelQuery.modelId(allRequestParams.get("id")); } if (allRequestParams.containsKey("category")) { modelQuery.modelCategory(allRequestParams.get("category")); } if (allRequestParams.containsKey("categoryLike")) { modelQuery.modelCategoryLike(allRequestParams.get("categoryLike")); } if (allRequestParams.containsKey("categoryNotEquals")) { modelQuery.modelCategoryNotEquals(allRequestParams.get("categoryNotEquals")); } if (allRequestParams.containsKey("name")) { modelQuery.modelName(allRequestParams.get("name")); } if (allRequestParams.containsKey("nameLike")) { modelQuery.modelNameLike(allRequestParams.get("nameLike")); } if (allRequestParams.containsKey("key")) { modelQuery.modelKey(allRequestParams.get("key")); } if (allRequestParams.containsKey("version")) { modelQuery.modelVersion(Integer.valueOf(allRequestParams.get("version"))); } if (allRequestParams.containsKey("latestVersion")) { boolean isLatestVersion = Boolean.parseBoolean(allRequestParams.get("latestVersion")); if (isLatestVersion) { modelQuery.latestVersion(); } } if (allRequestParams.containsKey("deploymentId")) { modelQuery.deploymentId(allRequestParams.get("deploymentId")); } if (allRequestParams.containsKey("deployed")) { boolean isDeployed = Boolean.parseBoolean(allRequestParams.get("deployed")); if (isDeployed) { modelQuery.deployed(); } else { modelQuery.notDeployed(); } } if (allRequestParams.containsKey("tenantId")) { modelQuery.modelTenantId(allRequestParams.get("tenantId")); } if (allRequestParams.containsKey("tenantIdLike")) { modelQuery.modelTenantIdLike(allRequestParams.get("tenantIdLike")); } if (allRequestParams.containsKey("withoutTenantId")) { boolean withoutTenantId = Boolean.parseBoolean(allRequestParams.get("withoutTenantId")); if (withoutTenantId) { modelQuery.modelWithoutTenantId(); } } if (restApiInterceptor != null) { restApiInterceptor.accessModelInfoWithQuery(modelQuery); } return paginateList(allRequestParams, modelQuery, "id", allowedSortProperties, restResponseFactory::createModelResponseList); }
@Test @Deployment(resources = { "org/flowable/rest/service/api/repository/oneTaskProcess.bpmn20.xml" }) public void testGetModels() throws Exception { // Create 2 models Model model1 = null; Model model2 = null; try { model1 = repositoryService.newModel(); model1.setCategory("Model category"); model1.setKey("Model key"); model1.setMetaInfo("Model metainfo"); model1.setName("Model name"); model1.setVersion(2); model1.setDeploymentId(deploymentId); repositoryService.saveModel(model1); model2 = repositoryService.newModel(); model2.setCategory("Another category"); model2.setKey("Another key"); model2.setMetaInfo("Another metainfo"); model2.setName("Another name"); model2.setVersion(3); repositoryService.saveModel(model2); // Try filter-less, should return all models String url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION); assertResultsPresentInDataResponse(url, model1.getId(), model2.getId()); // Filter based on id url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?id=" + model1.getId(); assertResultsPresentInDataResponse(url, model1.getId()); // Filter based on category url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?category=" + encode("Another category"); assertResultsPresentInDataResponse(url, model2.getId()); // Filter based on category like url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?categoryLike=" + encode("Mode%"); assertResultsPresentInDataResponse(url, model1.getId()); // Filter based on category not equals url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?categoryNotEquals=" + encode("Another category"); assertResultsPresentInDataResponse(url, model1.getId()); // Filter based on name url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?name=" + encode("Another name"); assertResultsPresentInDataResponse(url, model2.getId()); // Filter based on name like url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?nameLike=" + encode("%del name"); assertResultsPresentInDataResponse(url, model1.getId()); // Filter based on key url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?key=" + encode("Model key"); assertResultsPresentInDataResponse(url, model1.getId()); // Filter based on version url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?version=3"; assertResultsPresentInDataResponse(url, model2.getId()); // Filter based on deploymentId url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?deploymentId=" + deploymentId; assertResultsPresentInDataResponse(url, model1.getId()); // Filter based on deployed=true url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?deployed=true"; assertResultsPresentInDataResponse(url, model1.getId()); // Filter based on deployed=false url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?deployed=false"; assertResultsPresentInDataResponse(url, model2.getId()); // Filter based on latestVersion url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?key=" + encode("Model key") + "&latestVersion=true"; // Make sure both models have same key model2 = repositoryService.createModelQuery().modelId(model2.getId()).singleResult(); model2.setKey("Model key"); repositoryService.saveModel(model2); assertResultsPresentInDataResponse(url, model2.getId()); // Filter without tenant ID, before tenant update url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?withoutTenantId=true"; assertResultsPresentInDataResponse(url, model1.getId(), model2.getId()); // Set tenant ID model1 = repositoryService.getModel(model1.getId()); model1.setTenantId("myTenant"); repositoryService.saveModel(model1); // Filter without tenant ID, after tenant update url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?withoutTenantId=true"; assertResultsPresentInDataResponse(url, model2.getId()); // Filter based on tenantId url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?tenantId=myTenant"; assertResultsPresentInDataResponse(url, model1.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?tenantId=anotherTenant"; assertResultsPresentInDataResponse(url); // Filter based on tenantId like url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?tenantIdLike=" + encode("%enant"); assertResultsPresentInDataResponse(url, model1.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_MODEL_COLLECTION) + "?tenantIdLike=anotherTenant"; assertResultsPresentInDataResponse(url); } finally { if (model1 != null) { try { repositoryService.deleteModel(model1.getId()); } catch (Throwable ignore) { } } if (model2 != null) { try { repositoryService.deleteModel(model2.getId()); } catch (Throwable ignore) { } } } }
@Override public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException { DefaultMQAdminExt adminExt = new DefaultMQAdminExt(rpcHook); adminExt.setInstanceName(Long.toString(System.currentTimeMillis())); String groupName = commandLine.getOptionValue('g').trim(); if (commandLine.hasOption('n')) { adminExt.setNamesrvAddr(commandLine.getOptionValue('n').trim()); } try { adminExt.start(); List<ConsumerConfigInfo> consumerConfigInfoList = new ArrayList<>(); ClusterInfo clusterInfo = adminExt.examineBrokerClusterInfo(); Map<String, Set<String>> clusterAddrTable = clusterInfo.getClusterAddrTable(); for (Entry<String, BrokerData> brokerEntry : clusterInfo.getBrokerAddrTable().entrySet()) { String clusterName = this.getClusterName(brokerEntry.getKey(), clusterAddrTable); String brokerAddress = brokerEntry.getValue().selectBrokerAddr(); SubscriptionGroupConfig subscriptionGroupConfig = adminExt.examineSubscriptionGroupConfig(brokerAddress, groupName); if (subscriptionGroupConfig == null) { continue; } consumerConfigInfoList.add(new ConsumerConfigInfo(clusterName, brokerEntry.getKey(), subscriptionGroupConfig)); } if (CollectionUtils.isEmpty(consumerConfigInfoList)) { return; } for (ConsumerConfigInfo info : consumerConfigInfoList) { System.out.printf("=============================%s:%s=============================\n", info.getClusterName(), info.getBrokerName()); SubscriptionGroupConfig config = info.getSubscriptionGroupConfig(); Field[] fields = config.getClass().getDeclaredFields(); for (Field field : fields) { field.setAccessible(true); if (field.get(config) != null) { System.out.printf("%s%-40s= %s\n", "", field.getName(), field.get(config).toString()); } else { System.out.printf("%s%-40s= %s\n", "", field.getName(), ""); } } } } catch (Exception e) { throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e); } finally { adminExt.shutdown(); } }
@Test public void testExecute() throws SubCommandException { GetConsumerConfigSubCommand cmd = new GetConsumerConfigSubCommand(); Options options = ServerUtil.buildCommandlineOptions(new Options()); String[] subargs = new String[] {"-g group_test", String.format("-n localhost:%d", nameServerMocker.listenPort())}; final CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new DefaultParser()); cmd.execute(commandLine, options, null); }
@VisibleForTesting MailTemplateDO validateMailTemplate(String templateCode) { // 获得邮件模板。考虑到效率,从缓存中获取 MailTemplateDO template = mailTemplateService.getMailTemplateByCodeFromCache(templateCode); // 邮件模板不存在 if (template == null) { throw exception(MAIL_TEMPLATE_NOT_EXISTS); } return template; }
@Test public void testValidateMailTemplateValid_notExists() { // 准备参数 String templateCode = RandomUtils.randomString(); // mock 方法 // 调用,并断言异常 assertServiceException(() -> mailSendService.validateMailTemplate(templateCode), MAIL_TEMPLATE_NOT_EXISTS); }
@Override public ListenableFuture<?> execute(Rollback statement, TransactionManager transactionManager, Metadata metadata, AccessControl accessControl, QueryStateMachine stateMachine, List<Expression> parameters) { Session session = stateMachine.getSession(); if (!session.getTransactionId().isPresent()) { throw new PrestoException(NOT_IN_TRANSACTION, "No transaction in progress"); } TransactionId transactionId = session.getTransactionId().get(); stateMachine.clearTransactionId(); transactionManager.asyncAbort(transactionId); return immediateFuture(null); }
@Test public void testRollback() { TransactionManager transactionManager = createTestTransactionManager(); Session session = sessionBuilder() .setTransactionId(transactionManager.beginTransaction(false)) .build(); QueryStateMachine stateMachine = createQueryStateMachine("ROLLBACK", session, true, transactionManager, executor, metadata); RollbackTask rollbackTask = new RollbackTask(); assertTrue(stateMachine.getSession().getTransactionId().isPresent()); assertEquals(transactionManager.getAllTransactionInfos().size(), 1); getFutureValue(rollbackTask.execute(new Rollback(), transactionManager, metadata, new AllowAllAccessControl(), stateMachine, emptyList())); assertTrue(stateMachine.getQueryInfo(Optional.empty()).isClearTransactionId()); assertFalse(stateMachine.getQueryInfo(Optional.empty()).getStartedTransactionId().isPresent()); assertTrue(transactionManager.getAllTransactionInfos().isEmpty()); }
@Override public ProtobufSystemInfo.Section toProtobuf() { ProtobufSystemInfo.Section.Builder protobuf = ProtobufSystemInfo.Section.newBuilder(); protobuf.setName("System"); setAttribute(protobuf, "Server ID", server.getId()); setAttribute(protobuf, "Version", getVersion()); setAttribute(protobuf, "Edition", sonarRuntime.getEdition().getLabel()); setAttribute(protobuf, NCLOC.getName(), statisticsSupport.getLinesOfCode()); setAttribute(protobuf, "Container", containerSupport.isRunningInContainer()); setAttribute(protobuf, "External Users and Groups Provisioning", commonSystemInformation.getManagedInstanceProviderName()); setAttribute(protobuf, "External User Authentication", commonSystemInformation.getExternalUserAuthentication()); addIfNotEmpty(protobuf, "Accepted external identity providers", commonSystemInformation.getEnabledIdentityProviders()); addIfNotEmpty(protobuf, "External identity providers whose users are allowed to sign themselves up", commonSystemInformation.getAllowsToSignUpEnabledIdentityProviders()); setAttribute(protobuf, "High Availability", false); setAttribute(protobuf, "Official Distribution", officialDistribution.check()); setAttribute(protobuf, "Force authentication", commonSystemInformation.getForceAuthentication()); setAttribute(protobuf, "Home Dir", config.get(PATH_HOME.getKey()).orElse(null)); setAttribute(protobuf, "Data Dir", config.get(PATH_DATA.getKey()).orElse(null)); setAttribute(protobuf, "Temp Dir", config.get(PATH_TEMP.getKey()).orElse(null)); setAttribute(protobuf, "Processors", Runtime.getRuntime().availableProcessors()); return protobuf.build(); }
@Test public void toProtobuf_whenForceAuthentication_returnIt() { when(commonSystemInformation.getForceAuthentication()).thenReturn(false); ProtobufSystemInfo.Section protobuf = underTest.toProtobuf(); assertThatAttributeIs(protobuf, "Force authentication", false); }
@PublicAPI(usage = ACCESS) public boolean containsType(Class<?> type) { return containsType(type.getName()); }
@Test public void containsType() { JavaMethod method = importMethod(SomeClass.class, "method"); assertAllTrue(contains(method, SQLDataException.class)); assertAllFalse(contains(method, Exception.class)); }
public static String buildGlueExpression(Map<Column, Domain> partitionPredicates) { List<String> perColumnExpressions = new ArrayList<>(); int expressionLength = 0; for (Map.Entry<Column, Domain> partitionPredicate : partitionPredicates.entrySet()) { String columnName = partitionPredicate.getKey().getName(); if (JSQL_PARSER_RESERVED_KEYWORDS.contains(columnName.toUpperCase(ENGLISH))) { // The column name is a reserved keyword in the grammar of the SQL parser used internally by Glue API continue; } Domain domain = partitionPredicate.getValue(); if (domain != null && !domain.isAll()) { Optional<String> columnExpression = buildGlueExpressionForSingleDomain(columnName, domain); if (columnExpression.isPresent()) { int newExpressionLength = expressionLength + columnExpression.get().length(); if (expressionLength > 0) { newExpressionLength += CONJUNCT_SEPARATOR.length(); } if (newExpressionLength > GLUE_EXPRESSION_CHAR_LIMIT) { continue; } perColumnExpressions.add((columnExpression.get())); expressionLength = newExpressionLength; } } } return Joiner.on(CONJUNCT_SEPARATOR).join(perColumnExpressions); }
@Test public void testTinyintConversion() { Map<Column, Domain> predicates = new PartitionFilterBuilder(HIVE_TYPE_TRANSLATOR) .addIntegerValues("col1", Long.valueOf(Byte.MAX_VALUE)) .build(); String expression = buildGlueExpression(predicates); assertEquals(expression, format("((col1 = %d))", Byte.MAX_VALUE)); }
@Override public ColumnStatisticsObj aggregate(List<ColStatsObjWithSourceInfo> colStatsWithSourceInfo, List<String> partNames, boolean areAllPartsFound) throws MetaException { checkStatisticsList(colStatsWithSourceInfo); ColumnStatisticsObj statsObj = null; String colType; String colName = null; // check if all the ColumnStatisticsObjs contain stats and all the ndv are // bitvectors boolean doAllPartitionContainStats = partNames.size() == colStatsWithSourceInfo.size(); NumDistinctValueEstimator ndvEstimator = null; boolean areAllNDVEstimatorsMergeable = true; for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); if (statsObj == null) { colName = cso.getColName(); colType = cso.getColType(); statsObj = ColumnStatsAggregatorFactory.newColumnStaticsObj(colName, colType, cso.getStatsData().getSetField()); LOG.trace("doAllPartitionContainStats for column: {} is: {}", colName, doAllPartitionContainStats); } DecimalColumnStatsDataInspector columnStatsData = decimalInspectorFromStats(cso); // check if we can merge NDV estimators if (columnStatsData.getNdvEstimator() == null) { areAllNDVEstimatorsMergeable = false; break; } else { NumDistinctValueEstimator estimator = columnStatsData.getNdvEstimator(); if (ndvEstimator == null) { ndvEstimator = estimator; } else { if (!ndvEstimator.canMerge(estimator)) { areAllNDVEstimatorsMergeable = false; break; } } } } if (areAllNDVEstimatorsMergeable && ndvEstimator != null) { ndvEstimator = NumDistinctValueEstimatorFactory.getEmptyNumDistinctValueEstimator(ndvEstimator); } LOG.debug("all of the bit vectors can merge for {} is {}", colName, areAllNDVEstimatorsMergeable); ColumnStatisticsData columnStatisticsData = initColumnStatisticsData(); if (doAllPartitionContainStats || colStatsWithSourceInfo.size() < 2) { DecimalColumnStatsDataInspector aggregateData = null; long lowerBound = 0; long higherBound = 0; double densityAvgSum = 0.0; DecimalColumnStatsMerger merger = new DecimalColumnStatsMerger(); for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); DecimalColumnStatsDataInspector newData = decimalInspectorFromStats(cso); lowerBound = Math.max(lowerBound, newData.getNumDVs()); higherBound += newData.getNumDVs(); if (newData.isSetLowValue() && newData.isSetHighValue()) { densityAvgSum += (MetaStoreServerUtils.decimalToDouble(newData.getHighValue()) - MetaStoreServerUtils .decimalToDouble(newData.getLowValue())) / newData.getNumDVs(); } if (areAllNDVEstimatorsMergeable && ndvEstimator != null) { ndvEstimator.mergeEstimators(newData.getNdvEstimator()); } if (aggregateData == null) { aggregateData = newData.deepCopy(); } else { aggregateData.setLowValue(merger.mergeLowValue( merger.getLowValue(aggregateData), merger.getLowValue(newData))); aggregateData.setHighValue(merger.mergeHighValue( merger.getHighValue(aggregateData), merger.getHighValue(newData))); aggregateData.setNumNulls(merger.mergeNumNulls(aggregateData.getNumNulls(), newData.getNumNulls())); aggregateData.setNumDVs(merger.mergeNumDVs(aggregateData.getNumDVs(), newData.getNumDVs())); } } if (areAllNDVEstimatorsMergeable && ndvEstimator != null) { // if all the ColumnStatisticsObjs contain bitvectors, we do not need to // use uniform distribution assumption because we can merge bitvectors // to get a good estimation. aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues()); } else { long estimation; if (useDensityFunctionForNDVEstimation && aggregateData != null && aggregateData.isSetLowValue() && aggregateData.isSetHighValue()) { // We have estimation, lowerbound and higherbound. We use estimation // if it is between lowerbound and higherbound. double densityAvg = densityAvgSum / partNames.size(); estimation = (long) ((MetaStoreServerUtils.decimalToDouble(aggregateData.getHighValue()) - MetaStoreServerUtils .decimalToDouble(aggregateData.getLowValue())) / densityAvg); if (estimation < lowerBound) { estimation = lowerBound; } else if (estimation > higherBound) { estimation = higherBound; } } else { estimation = (long) (lowerBound + (higherBound - lowerBound) * ndvTuner); } aggregateData.setNumDVs(estimation); } columnStatisticsData.setDecimalStats(aggregateData); } else { // TODO: bail out if missing stats are over a certain threshold // we need extrapolation LOG.debug("start extrapolation for {}", colName); Map<String, Integer> indexMap = new HashMap<>(); for (int index = 0; index < partNames.size(); index++) { indexMap.put(partNames.get(index), index); } Map<String, Double> adjustedIndexMap = new HashMap<>(); Map<String, ColumnStatisticsData> adjustedStatsMap = new HashMap<>(); // while we scan the css, we also get the densityAvg, lowerbound and // higherbound when useDensityFunctionForNDVEstimation is true. double densityAvgSum = 0.0; if (!areAllNDVEstimatorsMergeable) { // if not every partition uses bitvector for ndv, we just fall back to // the traditional extrapolation methods. for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); String partName = csp.getPartName(); DecimalColumnStatsData newData = cso.getStatsData().getDecimalStats(); if (useDensityFunctionForNDVEstimation && newData.isSetLowValue() && newData.isSetHighValue()) { densityAvgSum += (MetaStoreServerUtils.decimalToDouble(newData.getHighValue()) - MetaStoreServerUtils .decimalToDouble(newData.getLowValue())) / newData.getNumDVs(); } adjustedIndexMap.put(partName, (double) indexMap.get(partName)); adjustedStatsMap.put(partName, cso.getStatsData()); } } else { // we first merge all the adjacent bitvectors that we could merge and // derive new partition names and index. StringBuilder pseudoPartName = new StringBuilder(); double pseudoIndexSum = 0; int length = 0; int curIndex = -1; DecimalColumnStatsDataInspector aggregateData = null; for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); String partName = csp.getPartName(); DecimalColumnStatsDataInspector newData = decimalInspectorFromStats(cso); // newData.isSetBitVectors() should be true for sure because we // already checked it before. if (indexMap.get(partName) != curIndex) { // There is bitvector, but it is not adjacent to the previous ones. if (length > 0) { // we have to set ndv adjustedIndexMap.put(pseudoPartName.toString(), pseudoIndexSum / length); aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues()); ColumnStatisticsData csd = new ColumnStatisticsData(); csd.setDecimalStats(aggregateData); adjustedStatsMap.put(pseudoPartName.toString(), csd); if (useDensityFunctionForNDVEstimation) { densityAvgSum += (MetaStoreServerUtils.decimalToDouble(aggregateData.getHighValue()) - MetaStoreServerUtils .decimalToDouble(aggregateData.getLowValue())) / aggregateData.getNumDVs(); } // reset everything pseudoPartName = new StringBuilder(); pseudoIndexSum = 0; length = 0; ndvEstimator = NumDistinctValueEstimatorFactory.getEmptyNumDistinctValueEstimator(ndvEstimator); } aggregateData = null; } curIndex = indexMap.get(partName); pseudoPartName.append(partName); pseudoIndexSum += curIndex; length++; curIndex++; if (aggregateData == null) { aggregateData = newData.deepCopy(); } else { if (MetaStoreServerUtils.decimalToDouble(aggregateData.getLowValue()) < MetaStoreServerUtils .decimalToDouble(newData.getLowValue())) { aggregateData.setLowValue(aggregateData.getLowValue()); } else { aggregateData.setLowValue(newData.getLowValue()); } if (MetaStoreServerUtils.decimalToDouble(aggregateData.getHighValue()) > MetaStoreServerUtils .decimalToDouble(newData.getHighValue())) { aggregateData.setHighValue(aggregateData.getHighValue()); } else { aggregateData.setHighValue(newData.getHighValue()); } aggregateData.setNumNulls(aggregateData.getNumNulls() + newData.getNumNulls()); } ndvEstimator.mergeEstimators(newData.getNdvEstimator()); } if (length > 0) { // we have to set ndv adjustedIndexMap.put(pseudoPartName.toString(), pseudoIndexSum / length); aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues()); ColumnStatisticsData csd = new ColumnStatisticsData(); csd.setDecimalStats(aggregateData); adjustedStatsMap.put(pseudoPartName.toString(), csd); if (useDensityFunctionForNDVEstimation) { densityAvgSum += (MetaStoreServerUtils.decimalToDouble(aggregateData.getHighValue()) - MetaStoreServerUtils .decimalToDouble(aggregateData.getLowValue())) / aggregateData.getNumDVs(); } } } extrapolate(columnStatisticsData, partNames.size(), colStatsWithSourceInfo.size(), adjustedIndexMap, adjustedStatsMap, densityAvgSum / adjustedStatsMap.size()); } LOG.debug( "Ndv estimation for {} is {}. # of partitions requested: {}. # of partitions found: {}", colName, columnStatisticsData.getDecimalStats().getNumDVs(), partNames.size(), colStatsWithSourceInfo.size()); KllHistogramEstimator mergedKllHistogramEstimator = mergeHistograms(colStatsWithSourceInfo); if (mergedKllHistogramEstimator != null) { columnStatisticsData.getDecimalStats().setHistogram(mergedKllHistogramEstimator.serialize()); } statsObj.setStatsData(columnStatisticsData); return statsObj; }
@Test public void testAggregateMultiStatsOnlySomeAvailableButUnmergeableBitVector() throws MetaException { List<String> partitions = Arrays.asList("part1", "part2", "part3"); ColumnStatisticsData data1 = new ColStatsBuilder<>(Decimal.class).numNulls(1).numDVs(3) .low(ONE).high(SIX).fmSketch(1, 2, 6).kll(1, 2, 6).build(); ColumnStatisticsData data3 = new ColStatsBuilder<>(Decimal.class).numNulls(3).numDVs(1) .low(SEVEN).high(SEVEN).hll(7).kll(7).build(); List<ColStatsObjWithSourceInfo> statsList = Arrays.asList( createStatsWithInfo(data1, TABLE, COL, partitions.get(0)), createStatsWithInfo(data3, TABLE, COL, partitions.get(2))); DecimalColumnStatsAggregator aggregator = new DecimalColumnStatsAggregator(); ColumnStatisticsObj computedStatsObj = aggregator.aggregate(statsList, partitions, false); // hll in case of missing stats is left as null, only numDVs is updated ColumnStatisticsData expectedStats = new ColStatsBuilder<>(Decimal.class).numNulls(6).numDVs(3) .low(ONE).high(DecimalUtils.createThriftDecimal("7.5")).kll(1, 2, 6, 7).build(); assertEqualStatistics(expectedStats, computedStatsObj.getStatsData()); aggregator.useDensityFunctionForNDVEstimation = true; computedStatsObj = aggregator.aggregate(statsList, partitions, false); // the use of the density function leads to a different estimation for numNDV expectedStats = new ColStatsBuilder<>(Decimal.class).numNulls(6).numDVs(4) .low(ONE).high(DecimalUtils.createThriftDecimal("7.5")).kll(1, 2, 6, 7).build(); assertEqualStatistics(expectedStats, computedStatsObj.getStatsData()); }
@Description("decodes the UTF-8 encoded string") @ScalarFunction @SqlType(StandardTypes.VARCHAR) public static Slice fromUtf8(@SqlType(StandardTypes.VARBINARY) Slice slice) { return SliceUtf8.fixInvalidUtf8(slice); }
@Test public void testFromUtf8() { assertFunction("from_utf8(to_utf8('hello'))", VARCHAR, "hello"); assertFunction("from_utf8(from_hex('58BF'))", VARCHAR, "X\uFFFD"); assertFunction("from_utf8(from_hex('58DF'))", VARCHAR, "X\uFFFD"); assertFunction("from_utf8(from_hex('58F7'))", VARCHAR, "X\uFFFD"); assertFunction("from_utf8(from_hex('58BF'), '#')", VARCHAR, "X#"); assertFunction("from_utf8(from_hex('58DF'), 35)", VARCHAR, "X#"); assertFunction("from_utf8(from_hex('58BF'), '')", VARCHAR, "X"); assertInvalidFunction("from_utf8(to_utf8('hello'), 'foo')", INVALID_FUNCTION_ARGUMENT); assertInvalidFunction("from_utf8(to_utf8('hello'), 1114112)", INVALID_FUNCTION_ARGUMENT); }
public Num getGrossReturn() { if (isOpened()) { return zero(); } else { return getGrossReturn(exit.getPricePerAsset()); } }
@Test public void testGetGrossReturnForShortPositionsUsingBarCloseOnNaN() { MockBarSeries series = new MockBarSeries(DoubleNum::valueOf, 100, 95); Position position = new Position(new Trade(0, TradeType.SELL, NaN, NaN), new Trade(1, TradeType.BUY, NaN, NaN)); assertNumEquals(DoubleNum.valueOf(1.05), position.getGrossReturn(series)); }
public static String getFormDataString(Map<String, String> params) throws UnsupportedEncodingException { StringBuilder result = new StringBuilder(); boolean first = true; for(Map.Entry<String, String> entry : params.entrySet()){ if (first) first = false; else result.append("&"); result.append(URLEncoder.encode(entry.getKey(), "UTF-8")); result.append("="); result.append(URLEncoder.encode(entry.getValue(), "UTF-8").replaceAll("\\+", "%20")); } return result.toString(); }
@Test public void testGetFormDataString() throws UnsupportedEncodingException { // This is to reproduce and fix #172 Map<String, String> params = new HashMap<>(); params.put("scope", "a b c d"); String s = Http2Client.getFormDataString(params); Assert.assertEquals("scope=a%20b%20c%20d", s); }
public static DefaultFuture newFuture(Channel channel, Request request, int timeout, ExecutorService executor) { final DefaultFuture future = new DefaultFuture(channel, request, timeout); future.setExecutor(executor); // timeout check timeoutCheck(future); return future; }
@Test void newFuture() { DefaultFuture future = defaultFuture(3000); Assertions.assertNotNull(future, "new future return null"); }
@Override public void importData(JsonReader reader) throws IOException { logger.info("Reading configuration for 1.3"); // this *HAS* to start as an object reader.beginObject(); while (reader.hasNext()) { JsonToken tok = reader.peek(); switch (tok) { case NAME: String name = reader.nextName(); // find out which member it is if (name.equals(CLIENTS)) { readClients(reader); } else if (name.equals(GRANTS)) { readGrants(reader); } else if (name.equals(WHITELISTEDSITES)) { readWhitelistedSites(reader); } else if (name.equals(BLACKLISTEDSITES)) { readBlacklistedSites(reader); } else if (name.equals(AUTHENTICATIONHOLDERS)) { readAuthenticationHolders(reader); } else if (name.equals(ACCESSTOKENS)) { readAccessTokens(reader); } else if (name.equals(REFRESHTOKENS)) { readRefreshTokens(reader); } else if (name.equals(SYSTEMSCOPES)) { readSystemScopes(reader); } else { boolean processed = false; for (MITREidDataServiceExtension extension : extensions) { if (extension.supportsVersion(THIS_VERSION)) { processed = extension.importExtensionData(name, reader); if (processed) { // if the extension processed data, break out of this inner loop // (only the first extension to claim an extension point gets it) break; } } } if (!processed) { // unknown token, skip it reader.skipValue(); } } break; case END_OBJECT: // the object ended, we're done here reader.endObject(); continue; default: logger.debug("Found unexpected entry"); reader.skipValue(); continue; } } fixObjectReferences(); for (MITREidDataServiceExtension extension : extensions) { if (extension.supportsVersion(THIS_VERSION)) { extension.fixExtensionObjectReferences(maps); break; } } maps.clearAll(); }
@Test public void testImportBlacklistedSites() throws IOException { BlacklistedSite site1 = new BlacklistedSite(); site1.setId(1L); site1.setUri("http://foo.com"); BlacklistedSite site2 = new BlacklistedSite(); site2.setId(2L); site2.setUri("http://bar.com"); BlacklistedSite site3 = new BlacklistedSite(); site3.setId(3L); site3.setUri("http://baz.com"); String configJson = "{" + "\"" + MITREidDataService.CLIENTS + "\": [], " + "\"" + MITREidDataService.ACCESSTOKENS + "\": [], " + "\"" + MITREidDataService.REFRESHTOKENS + "\": [], " + "\"" + MITREidDataService.GRANTS + "\": [], " + "\"" + MITREidDataService.WHITELISTEDSITES + "\": [], " + "\"" + MITREidDataService.SYSTEMSCOPES + "\": [], " + "\"" + MITREidDataService.AUTHENTICATIONHOLDERS + "\": [], " + "\"" + MITREidDataService.BLACKLISTEDSITES + "\": [" + "{\"id\":1,\"uri\":\"http://foo.com\"}," + "{\"id\":2,\"uri\":\"http://bar.com\"}," + "{\"id\":3,\"uri\":\"http://baz.com\"}" + " ]" + "}"; logger.debug(configJson); JsonReader reader = new JsonReader(new StringReader(configJson)); dataService.importData(reader); verify(blSiteRepository, times(3)).save(capturedBlacklistedSites.capture()); List<BlacklistedSite> savedSites = capturedBlacklistedSites.getAllValues(); assertThat(savedSites.size(), is(3)); assertThat(savedSites.get(0).getUri(), equalTo(site1.getUri())); assertThat(savedSites.get(1).getUri(), equalTo(site2.getUri())); assertThat(savedSites.get(2).getUri(), equalTo(site3.getUri())); }
public Object convert(Object obj) { Object newObject = null; switch (conversion) { case NO_CONVERSION: newObject = obj; break; case DOUBLE_TO_FLOAT: newObject = ((Double) obj).floatValue(); break; case INT_TO_SHORT: newObject = ((Integer) obj).shortValue(); break; case INT_TO_BYTE: newObject = ((Integer) obj).byteValue(); break; case STRING_TO_CHAR: newObject = ((CharSequence) obj).charAt(0); break; case NUM_TO_LONG: newObject = Long.parseLong(obj.toString()); break; default: newObject = null; } return newObject; }
@Test public void testTypeConversion() { TypeConverter converter = new TypeConverter(); assertEquals("test", converter.convert("test")); assertTrue(converter.convert("test") instanceof String); }
public String getProjectId() { return projectId; }
@Test public void testGetProjectIdReturnsCorrectValue() { assertThat(new BigQueryResourceManager(TEST_ID, PROJECT_ID, bigQuery).getProjectId()) .isEqualTo(PROJECT_ID); }
@Override public PMML_MODEL getPMMLModelType() { logger.trace("getPMMLModelType"); return PMML_MODEL.CLUSTERING_MODEL; }
@Test void getPMMLModelType() { assertThat(PROVIDER.getPMMLModelType()).isEqualTo(PMML_MODEL.CLUSTERING_MODEL); }
public StorageModels() { this.models = new ArrayList<>(); this.columnNameOverrideRule = new HashMap<>(); this.listeners = new ArrayList<>(); }
@Test public void testStorageModels() throws StorageException { StorageModels models = new StorageModels(); models.add(TestModel.class, -1, new Storage("StorageModelsTest", false, DownSampling.Hour) ); final List<Model> allModules = models.allModels(); assertEquals(1, allModules.size()); final Model model = allModules.get(0); assertEquals(4, model.getColumns().size()); assertFalse(model.getColumns().get(0).isStorageOnly()); assertFalse(model.getColumns().get(1).isStorageOnly()); assertFalse(model.getColumns().get(2).isStorageOnly()); Assertions.assertTrue(model.getColumns().get(3).isStorageOnly()); assertArrayEquals(new String[] { "column2", "column" }, model.getColumns().get(2).getSqlDatabaseExtension().getIndices().get(1).getColumns()); }
public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; }
@Test public void testNoExceptionIsThrownWhenListSizeIsBelowTheLimit() { assertDoesNotThrow(() -> new ConfigDef().define("lst", Type.LIST, asList("a", "b"), ListSize.atMostOfSize(3), Importance.HIGH, "lst doc")); }
public Optional<Object> evaluate(final Map<String, Object> columnPairsMap, final String outputColumn, final String regexField) { boolean matching = true; boolean isRegex = regexField != null && columnValues.containsKey(regexField) && (boolean) columnValues.get(regexField); for (Map.Entry<String, Object> columnPairEntry : columnPairsMap.entrySet()) { Object value = columnValues.get(columnPairEntry.getKey()); matching = isRegex ? isRegexMatching(value.toString(), (String) columnPairEntry.getValue()) : isMatching(value, columnPairEntry.getValue()); if (!matching) { break; } } return matching ? Optional.ofNullable(columnValues.get(outputColumn)) : Optional.empty(); }
@Test void evaluateKeyFoundMatchingRegex() { KiePMMLRow kiePMMLRow = new KiePMMLRow(COLUMN_VALUES); Optional<Object> retrieved = kiePMMLRow.evaluate(Collections.singletonMap("KEY-1", "[0-9]"), "KEY-0", REGEX_FIELD); assertThat(retrieved).isPresent(); assertThat(retrieved.get()).isEqualTo(COLUMN_VALUES.get("KEY-0")); }
public Optional<String> fetchFileIfNotModified(String url) throws IOException { return fetchFile(url, true); }
@Test public void unsuccessfulRetrieve() throws Exception { this.server.enqueue(new MockResponse() .setResponseCode(500) .setBody("Error!")); server.start(); final HTTPFileRetriever httpFileRetriever = new HTTPFileRetriever(new OkHttpClient()); expectedException.expect(IOException.class); expectedException.expectMessage("Request failed: Server Error"); final Optional<String> ignored = httpFileRetriever.fetchFileIfNotModified(server.url("/").toString()); }
public static boolean timeStringWithSeconds(String val) { return timePattern.matcher(val).find(); }
@Test void timeStringWithSeconds() { assertTrue(TimeFunction.timeStringWithSeconds("10:10:00@Australia/Melbourne")); assertTrue(TimeFunction.timeStringWithSeconds("10:10:00+10:00")); assertTrue(TimeFunction.timeStringWithSeconds("10:10:00:123")); assertFalse(TimeFunction.timeStringWithSeconds("10:10@Australia/Melbourne")); assertFalse(TimeFunction.timeStringWithSeconds("10:10+10:00")); }
public static String byteCountToDisplaySize(long size) { if (size < 1024L) { return String.valueOf(size) + (size > 1 ? " bytes" : " byte"); } long exp = (long) (Math.log(size) / Math.log((long) 1024)); double value = size / Math.pow((long) 1024, exp); char unit = "KMGTPEZY".charAt((int) exp - 1); return String.format("%.1f %s%s", value, unit, "B"); }
@Test public void shouldConvertBytesToKilo() { assertThat(FileSizeUtils.byteCountToDisplaySize(1024 + 512), is("1.5 KB")); }
@Override public Process swapToObject(final YamlProcess yamlConfig) { return new Process(yamlConfig.getId(), yamlConfig.getStartMillis(), yamlConfig.getSql(), yamlConfig.getDatabaseName(), yamlConfig.getUsername(), yamlConfig.getHostname(), new AtomicInteger(yamlConfig.getTotalUnitCount()), new AtomicInteger(yamlConfig.getCompletedUnitCount()), new AtomicBoolean(yamlConfig.isIdle()), new AtomicBoolean(yamlConfig.isInterrupted())); }
@Test void assertSwapToObject() { Process actual = new YamlProcessSwapper().swapToObject(createYamlProcess()); assertThat(actual.getId(), is("foo_id")); assertThat(actual.getStartMillis(), is(1000L)); assertThat(actual.getSql(), is("SELECT 1")); assertThat(actual.getDatabaseName(), is("foo_db")); assertThat(actual.getUsername(), is("root")); assertThat(actual.getHostname(), is("localhost")); assertThat(actual.getTotalUnitCount().get(), is(10)); assertThat(actual.getCompletedUnitCount().get(), is(5)); assertTrue(actual.isIdle()); }
@Override @SuppressFBWarnings(value = "EI_EXPOSE_REP") public ImmutableSet<String> getSupportedProperties() { return SUPPORTED_PROPERTIES; }
@Test public void shouldGetSupportedProperties() { // Given: final ProtobufProperties properties = new ProtobufProperties(ImmutableMap.of()); // When: final ImmutableSet<String> supportedProperties = properties.getSupportedProperties(); // Then: assertThat(supportedProperties, is(ProtobufProperties.SUPPORTED_PROPERTIES)); }
public void readFully(final byte[] destination) throws EOFException { if (destination == null) { throw new NullPointerException("Destination must not be null"); } readFully(destination, 0, destination.length); }
@Test void shouldThrowNPEIfDestinationIsNull() throws Throwable { final UnsafeBuffer buffer = toUnsafeBuffer((out) -> {}); final DirectBufferDataInput dataInput = new DirectBufferDataInput(buffer); assertThrows(NullPointerException.class, () -> dataInput.readFully(null)); assertThrows(NullPointerException.class, () -> dataInput.readFully(null, 0, 0)); }
public TransactionConfidence seen(Sha256Hash hash, PeerAddress byPeer) { TransactionConfidence confidence; boolean fresh = false; lock.lock(); try { cleanTable(); confidence = getOrCreate(hash); fresh = confidence.markBroadcastBy(byPeer); } finally { lock.unlock(); } if (fresh) confidence.queueListeners(TransactionConfidence.Listener.ChangeReason.SEEN_PEERS); return confidence; }
@Test public void testSeen() { PeerAddress peer = createMock(PeerAddress.class); Sha256Hash brokenHash = createMock(Sha256Hash.class); Sha256Hash correctHash = createMock(Sha256Hash.class); TransactionConfidence brokenConfidence = createMock(TransactionConfidence.class); expect(brokenConfidence.getTransactionHash()).andReturn(brokenHash); expect(brokenConfidence.markBroadcastBy(peer)).andThrow(new ArithmeticException("some error")); TransactionConfidence correctConfidence = createMock(TransactionConfidence.class); expect(correctConfidence.getTransactionHash()).andReturn(correctHash); expect(correctConfidence.markBroadcastBy(peer)).andReturn(true); correctConfidence.queueListeners(anyObject(TransactionConfidence.Listener.ChangeReason.class)); expectLastCall(); TransactionConfidence.Factory factory = createMock(TransactionConfidence.Factory.class); expect(factory.createConfidence(brokenHash)).andReturn(brokenConfidence); expect(factory.createConfidence(correctHash)).andReturn(correctConfidence); replay(factory, brokenConfidence, correctConfidence); TxConfidenceTable table = new TxConfidenceTable(1, factory); try { table.seen(brokenHash, peer); } catch (ArithmeticException expected) { // do nothing } assertNotNull(table.seen(correctHash, peer)); }
public void emitWatermarkInsideMailbox(Watermark mark) throws Exception { maxInputWatermark = new Watermark(Math.max(maxInputWatermark.getTimestamp(), mark.getTimestamp())); emitWatermarkInsideMailbox(); }
@Test void testEmitWatermarkInsideMailbox() throws Exception { int priority = 42; final List<StreamElement> emittedElements = new ArrayList<>(); final TaskMailboxImpl mailbox = new TaskMailboxImpl(); final InternalTimeServiceManager<?> timerService = new NoOpInternalTimeServiceManager(); final MailboxWatermarkProcessor<StreamRecord<String>> watermarkProcessor = new MailboxWatermarkProcessor<>( new CollectorOutput<>(emittedElements), new MailboxExecutorImpl( mailbox, priority, StreamTaskActionExecutor.IMMEDIATE), timerService); final List<Watermark> expectedOutput = new ArrayList<>(); watermarkProcessor.emitWatermarkInsideMailbox(new Watermark(1)); watermarkProcessor.emitWatermarkInsideMailbox(new Watermark(2)); watermarkProcessor.emitWatermarkInsideMailbox(new Watermark(3)); expectedOutput.add(new Watermark(1)); expectedOutput.add(new Watermark(2)); expectedOutput.add(new Watermark(3)); assertThat(emittedElements).containsExactlyElementsOf(expectedOutput); mailbox.put(new Mail(() -> {}, TaskMailbox.MIN_PRIORITY, "checkpoint mail")); watermarkProcessor.emitWatermarkInsideMailbox(new Watermark(4)); watermarkProcessor.emitWatermarkInsideMailbox(new Watermark(5)); assertThat(emittedElements).containsExactlyElementsOf(expectedOutput); // FLINK-35528: do not allow yielding to continuation mails assertThat(mailbox.tryTake(priority)).isEqualTo(Optional.empty()); assertThat(emittedElements).containsExactlyElementsOf(expectedOutput); while (mailbox.hasMail()) { mailbox.take(TaskMailbox.MIN_PRIORITY).run(); } // Watermark(4) is processed together with Watermark(5) expectedOutput.add(new Watermark(5)); assertThat(emittedElements).containsExactlyElementsOf(expectedOutput); }
public InternalCompletableFuture<Collection<UUID>> invoke() { doInvoke(); return future; }
@Test public void testInvoke() { Node node = getNode(instance1); Collection<UUID> uuids = invokeOnStableClusterParallel( node.getNodeEngine(), NoOpOperation::new, 0 ).join(); Collection<UUID> expectedUuids = getUuidsOfInstances(instance1, instance2, instance3); assertThat(uuids) .containsExactlyInAnyOrderElementsOf(expectedUuids); }
public Command create( final ConfiguredStatement<? extends Statement> statement, final KsqlExecutionContext context) { return create(statement, context.getServiceContext(), context); }
@Test public void shouldNotRaiseExceptionWhenPrefixIsAdded() { configuredStatement = configuredStatement("ALTER SYSTEM 'TEST'='TEST';" , alterSystemProperty); when(alterSystemProperty.getPropertyName()).thenReturn("TEST"); when(alterSystemProperty.getPropertyValue()).thenReturn("TEST"); when(config.getBoolean(KsqlConfig.KSQL_SHARED_RUNTIME_ENABLED)).thenReturn(true); assertThrows(ConfigException.class, () -> commandFactory.create(configuredStatement, executionContext)); }
@Override public void removeListener(String dataId, String group, Listener listener) { worker.removeTenantListener(dataId, group, listener); }
@Test void testRemoveListener() { String dataId = "1"; String group = "2"; Listener listener = new Listener() { @Override public Executor getExecutor() { return null; } @Override public void receiveConfigInfo(String configInfo) { } }; nacosConfigService.removeListener(dataId, group, listener); Mockito.verify(mockWoker, Mockito.times(1)).removeTenantListener(dataId, group, listener); }
public CompletableFuture<Void> commitBackupId(final Account account, final BackupAuthCredentialRequest backupAuthCredentialRequest) { if (configuredBackupLevel(account).isEmpty()) { throw Status.PERMISSION_DENIED.withDescription("Backups not allowed on account").asRuntimeException(); } byte[] serializedRequest = backupAuthCredentialRequest.serialize(); byte[] existingRequest = account.getBackupCredentialRequest(); if (existingRequest != null && MessageDigest.isEqual(serializedRequest, existingRequest)) { // No need to update or enforce rate limits, this is the credential that the user has already // committed to. return CompletableFuture.completedFuture(null); } return rateLimiters.forDescriptor(RateLimiters.For.SET_BACKUP_ID) .validateAsync(account.getUuid()) .thenCompose(ignored -> this.accountsManager .updateAsync(account, acc -> acc.setBackupCredentialRequest(serializedRequest)) .thenRun(Util.NOOP)) .toCompletableFuture(); }
@Test void testRateLimits() { final AccountsManager accountsManager = mock(AccountsManager.class); final BackupAuthManager authManager = create(BackupLevel.MESSAGES, true); final BackupAuthCredentialRequest credentialRequest = backupAuthTestUtil.getRequest(backupKey, aci); final Account account = mock(Account.class); when(account.getUuid()).thenReturn(aci); when(accountsManager.updateAsync(any(), any())).thenReturn(CompletableFuture.completedFuture(account)); // Should be rate limited final RateLimitExceededException ex = CompletableFutureTestUtil.assertFailsWithCause( RateLimitExceededException.class, authManager.commitBackupId(account, credentialRequest)); // If we don't change the request, shouldn't be rate limited when(account.getBackupCredentialRequest()).thenReturn(credentialRequest.serialize()); assertDoesNotThrow(() -> authManager.commitBackupId(account, credentialRequest).join()); }
public ValidationBean checkConnection(final SubprocessExecutionContext execCtx) { GitCommand gitCommand = new GitCommand(null, null, refSpecOrBranch, false, secrets()); try { gitCommand.checkConnection(new UrlArgument(urlForCommandLine())); return ValidationBean.valid(); } catch (Exception e) { try { return handleException(e, gitCommand.version()); } catch (Exception notInstallGitException) { return ValidationBean.notValid(ERR_GIT_NOT_FOUND); } } }
@Test void shouldReturnInValidBean() { GitMaterial git = new GitMaterial("http://0.0.0.0"); ValidationBean validationBean = git.checkConnection(new TestSubprocessExecutionContext()); assertThat(validationBean.isValid()).as("Repository should not exist").isEqualTo(false); }
@PublicAPI(usage = ACCESS) public int size() { return throwsDeclarations.size(); }
@Test public void size() { assertThat(importMethod(SomeClass.class, "method").getThrowsClause().size()) .as("size of throws clause").isEqualTo(2); }
public WorkflowDefinition addWorkflowDefinition( WorkflowDefinition workflowDef, Properties changes) { LOG.info("Adding a new workflow definition with an id [{}]", workflowDef.getWorkflow().getId()); final Workflow workflow = workflowDef.getWorkflow(); final Metadata metadata = workflowDef.getMetadata(); return withMetricLogError( () -> withRetryableTransaction( conn -> { WorkflowInfo workflowInfo = getWorkflowInfoForUpdate(conn, workflow.getId()); final long nextVersionId = workflowInfo.getLatestVersionId() + 1; // update the metadata with version info and then metadata is complete. metadata.setWorkflowVersionId(nextVersionId); TriggerUuids triggerUuids = insertMaestroWorkflowVersion(conn, metadata, workflow); PropertiesSnapshot snapshot = updateWorkflowProps( conn, workflow.getId(), metadata.getVersionAuthor(), metadata.getCreateTime(), workflowInfo.getPrevPropertiesSnapshot(), changes, new PropertiesUpdate(Type.ADD_WORKFLOW_DEFINITION)); // add new snapshot to workflowDef if (snapshot != null) { workflowDef.setPropertiesSnapshot(snapshot); } else { workflowDef.setPropertiesSnapshot(workflowInfo.getPrevPropertiesSnapshot()); } final long[] upsertRes = upsertMaestroWorkflow(conn, workflowDef); Checks.notNull( upsertRes, "the upsert result should not be null for workflow [%s]", workflow.getId()); workflowDef.setIsLatest(true); // a new version will always be latest // add default flag and modified_time and then workflowDef is complete workflowDef.setIsDefault( workflowInfo.getPrevActiveVersionId() == Constants.INACTIVE_VERSION_ID || workflowDef.getIsActive()); workflowDef.setModifyTime(upsertRes[0]); workflowDef.setInternalId(upsertRes[1]); if (workflowDef.getIsActive()) { workflowInfo.setNextActiveWorkflow( MaestroWorkflowVersion.builder() .definition(workflow) .triggerUuids(triggerUuids) .metadata(metadata) .build(), workflowDef.getPropertiesSnapshot()); } else if (workflowInfo.getPrevActiveVersionId() != Constants.INACTIVE_VERSION_ID) { // getting an inactive new version but having an active old version updateWorkflowInfoForNextActiveWorkflow( conn, workflow.getId(), workflowInfo.getPrevActiveVersionId(), workflowInfo, workflowDef.getPropertiesSnapshot()); } if (workflowInfo.withWorkflow()) { addWorkflowTriggersIfNeeded(conn, workflowInfo); } MaestroJobEvent jobEvent = logToTimeline( conn, workflowDef, snapshot, workflowInfo.getPrevActiveVersionId()); publisher.publishOrThrow( jobEvent, "Failed to publish maestro definition change job event."); return workflowDef; }), "addWorkflowDefinition", "Failed creating a new workflow definition {}", workflow.getId()); }
@Test public void testWorkflowWithWorkflowTimeSubscriptionsFailsOnCronService() throws Exception { WorkflowDefinition wfd = loadWorkflow(TEST_WORKFLOW_ID4); Mockito.doThrow( new MaestroRuntimeException( MaestroRuntimeException.Code.INTERNAL_ERROR, "test error message")) .when(triggerClient) .upsertTriggerSubscription(Mockito.eq(wfd.getWorkflow()), any(), any()); AssertHelper.assertThrows( "expects mockito test error", MaestroRuntimeException.class, "test error message", () -> workflowDao.addWorkflowDefinition( wfd, wfd.getPropertiesSnapshot().extractProperties())); }
@Override public ValidationResult validate(RuleBuilderStep step) { final RuleFragment ruleFragment = actions.get(step.function()); FunctionDescriptor<?> functionDescriptor = ruleFragment.descriptor(); Map<String, Object> stepParameters = step.parameters(); //Add output to map String outputvariable = step.outputvariable(); if (StringUtils.isNotBlank(outputvariable)) { if (functionDescriptor.returnType() == Void.class) { return new ValidationResult(true, f("Return type is void. No output variable allowed", functionDescriptor.name())); } storeVariable(outputvariable, functionDescriptor.returnType()); } ImmutableList<ParameterDescriptor> parameterDescriptors = functionDescriptor.params(); for (ParameterDescriptor parameterDescriptor : parameterDescriptors) { String parameterName = parameterDescriptor.name(); Object value = stepParameters.get(parameterName); Class<?> variableType = getVariableType(value); if (!parameterDescriptor.optional() && value == null) { return new ValidationResult(true, f("Missing parameter %s", parameterName)); } //$ means it is stored in another variable and we need to fetch and verify that type if (value instanceof String s && s.startsWith("$")) { String substring = s.substring(1); Class<?> passedVariableType = variables.get(substring); if (Objects.isNull(passedVariableType)) { return new ValidationResult(true, f("Could not find passed variable %s", value)); } variableType = passedVariableType; } //Check if variable type matches function expectation Class<?> paramType = parameterDescriptor.type(); if (value != null && paramType != Object.class && variableType != paramType) { String errorMsg = "Found a wrong parameter type for parameter %s"; return new ValidationResult(true, f(errorMsg, parameterName)); } } return new ValidationResult(false, ""); }
@Test void failsWhenPassedParamMissing() { HashMap<String, Object> parameters = new HashMap<>(); parameters.put(INT_PARAM, "$fromOutput"); RuleBuilderStep stepWithValidNegation = RuleBuilderStep.builder().parameters(parameters).function(INTEGER_FUNCTION).build(); ValidationResult result = classUnderTest.validate(stepWithValidNegation); assertThat(result.failed()).isTrue(); assertThat(result.failureReason()).isEqualTo("Could not find passed variable $fromOutput"); }
public static double conversion(String expression) { return (new Calculator()).calculate(expression); }
@Test public void conversationTest7() { //https://gitee.com/dromara/hutool/issues/I4KONB final double conversion = Calculator.conversion("((-2395+0) * 0.3+140.24+35+90)/30"); assertEquals(-15.11, conversion, 0.01); }
@Override public void removeBinding(DeviceId deviceId) { deviceToPipeconf.remove(deviceId); }
@Test public void clearDeviceToPipeconfBinding() throws Exception { clear(); createOrUpdatePipeconfToDeviceBinding(); store.removeBinding(DEVICE_ID); assertFalse("Unexpected DeviceId in the map", store.deviceToPipeconf.containsKey(DEVICE_ID)); assertTrue("No value should be in the map", store.pipeconfToDevices.get(PIPECONF_ID).isEmpty()); }
void runOnce() { if (transactionManager != null) { try { transactionManager.maybeResolveSequences(); RuntimeException lastError = transactionManager.lastError(); // do not continue sending if the transaction manager is in a failed state if (transactionManager.hasFatalError()) { if (lastError != null) maybeAbortBatches(lastError); client.poll(retryBackoffMs, time.milliseconds()); return; } if (transactionManager.hasAbortableError() && shouldHandleAuthorizationError(lastError)) { return; } // Check whether we need a new producerId. If so, we will enqueue an InitProducerId // request which will be sent below transactionManager.bumpIdempotentEpochAndResetIdIfNeeded(); if (maybeSendAndPollTransactionalRequest()) { return; } } catch (AuthenticationException e) { // This is already logged as error, but propagated here to perform any clean ups. log.trace("Authentication exception while processing transactional request", e); transactionManager.authenticationFailed(e); } } long currentTimeMs = time.milliseconds(); long pollTimeout = sendProducerData(currentTimeMs); client.poll(pollTimeout, currentTimeMs); }
@SuppressWarnings("deprecation") @Test public void testDownConversionForMismatchedMagicValues() throws Exception { // it can happen that we construct a record set with mismatching magic values (perhaps // because the partition leader changed after the record set was initially constructed) // in this case, we down-convert record sets with newer magic values to match the oldest // created record set long offset = 0; // start off support produce request v3 apiVersions.update("0", NodeApiVersions.create()); Future<RecordMetadata> future1 = appendToAccumulator(tp0, 0L, "key", "value"); // now the partition leader supports only v2 apiVersions.update("0", NodeApiVersions.create(ApiKeys.PRODUCE.id, (short) 0, (short) 2)); Future<RecordMetadata> future2 = appendToAccumulator(tp1, 0L, "key", "value"); // start off support produce request v3 apiVersions.update("0", NodeApiVersions.create()); ProduceResponse.PartitionResponse resp = new ProduceResponse.PartitionResponse(Errors.NONE, offset, RecordBatch.NO_TIMESTAMP, 100); Map<TopicPartition, ProduceResponse.PartitionResponse> partResp = new HashMap<>(); partResp.put(tp0, resp); partResp.put(tp1, resp); ProduceResponse produceResponse = new ProduceResponse(partResp, 0); client.prepareResponse(body -> { ProduceRequest request = (ProduceRequest) body; if (request.version() != 2) return false; Map<TopicPartition, MemoryRecords> recordsMap = partitionRecords(request); if (recordsMap.size() != 2) return false; for (MemoryRecords records : recordsMap.values()) { if (records == null || records.sizeInBytes() == 0 || !records.hasMatchingMagic(RecordBatch.MAGIC_VALUE_V1)) return false; } return true; }, produceResponse); sender.runOnce(); // connect sender.runOnce(); // send produce request assertTrue(future1.isDone(), "Request should be completed"); assertTrue(future2.isDone(), "Request should be completed"); }
@Override public void updateIndices(SegmentDirectory.Writer segmentWriter) throws Exception { Map<String, List<Operation>> columnOperationsMap = computeOperations(segmentWriter); if (columnOperationsMap.isEmpty()) { return; } for (Map.Entry<String, List<Operation>> entry : columnOperationsMap.entrySet()) { String column = entry.getKey(); List<Operation> operations = entry.getValue(); for (Operation operation : operations) { switch (operation) { case DISABLE_FORWARD_INDEX: // Deletion of the forward index will be handled outside the index handler to ensure that other index // handlers that need the forward index to construct their own indexes will have it available. _tmpForwardIndexColumns.add(column); break; case ENABLE_FORWARD_INDEX: ColumnMetadata columnMetadata = createForwardIndexIfNeeded(segmentWriter, column, false); if (columnMetadata.hasDictionary()) { if (!segmentWriter.hasIndexFor(column, StandardIndexes.dictionary())) { throw new IllegalStateException(String.format( "Dictionary should still exist after rebuilding forward index for dictionary column: %s", column)); } } else { if (segmentWriter.hasIndexFor(column, StandardIndexes.dictionary())) { throw new IllegalStateException( String.format("Dictionary should not exist after rebuilding forward index for raw column: %s", column)); } } break; case DISABLE_DICTIONARY: Set<String> newForwardIndexDisabledColumns = FieldIndexConfigsUtil.columnsWithIndexDisabled(_fieldIndexConfigs.keySet(), StandardIndexes.forward(), _fieldIndexConfigs); if (newForwardIndexDisabledColumns.contains(column)) { removeDictionaryFromForwardIndexDisabledColumn(column, segmentWriter); if (segmentWriter.hasIndexFor(column, StandardIndexes.dictionary())) { throw new IllegalStateException( String.format("Dictionary should not exist after disabling dictionary for column: %s", column)); } } else { disableDictionaryAndCreateRawForwardIndex(column, segmentWriter); } break; case ENABLE_DICTIONARY: createDictBasedForwardIndex(column, segmentWriter); if (!segmentWriter.hasIndexFor(column, StandardIndexes.forward())) { throw new IllegalStateException(String.format("Forward index was not created for column: %s", column)); } break; case CHANGE_INDEX_COMPRESSION_TYPE: rewriteForwardIndexForCompressionChange(column, segmentWriter); break; default: throw new IllegalStateException("Unsupported operation for column " + column); } } } }
@Test public void testAddOtherIndexForForwardIndexDisabledColumn() throws Exception { SegmentMetadataImpl existingSegmentMetadata = new SegmentMetadataImpl(_segmentDirectory); SegmentDirectory segmentLocalFSDirectory = new SegmentLocalFSDirectory(_segmentDirectory, existingSegmentMetadata, ReadMode.mmap); SegmentDirectory.Writer writer = segmentLocalFSDirectory.createWriter(); IndexLoadingConfig indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig); Random rand = new Random(); // Add column to range index list. Must be a numerical type. String column; do { column = MV_FORWARD_INDEX_DISABLED_DUPLICATES_COLUMNS.get( rand.nextInt(MV_FORWARD_INDEX_DISABLED_DUPLICATES_COLUMNS.size())); } while (!column.equals(DIM_MV_FORWARD_INDEX_DISABLED_DUPLICATES_STRING) && !column.equals( DIM_MV_FORWARD_INDEX_DISABLED_DUPLICATES_BYTES)); indexLoadingConfig.addRangeIndexColumns(column); RangeIndexHandler rangeIndexHandler = new RangeIndexHandler(segmentLocalFSDirectory, indexLoadingConfig); rangeIndexHandler.updateIndices(writer); // Validate forward index exists before calling post cleanup validateIndexMap(column, true, false); rangeIndexHandler.postUpdateIndicesCleanup(writer); // Tear down before validation. Because columns.psf and index map cleanup happens at segmentDirectory.close() segmentLocalFSDirectory.close(); // Validate index map including range index. Forward index should not exist, range index and dictionary should validateIndexMap(column, true, true); SegmentMetadataImpl segmentMetadata = new SegmentMetadataImpl(_segmentDirectory); String segmentDir = INDEX_DIR + "/" + SEGMENT_NAME + "/v3"; File idxMapFile = new File(segmentDir, V1Constants.INDEX_MAP_FILE_NAME); String indexMapStr = FileUtils.readFileToString(idxMapFile, StandardCharsets.UTF_8); assertEquals(StringUtils.countMatches(indexMapStr, column + ".range_index" + ".startOffset"), 1, column); assertEquals(StringUtils.countMatches(indexMapStr, column + ".range_index" + ".size"), 1, column); // In column metadata, some values can change since MV columns with duplicates lose the duplicates on forward index // regeneration. ColumnMetadata metadata = existingSegmentMetadata.getColumnMetadataFor(column); validateMetadataProperties(column, true, 7, metadata.getCardinality(), metadata.getTotalDocs(), metadata.getDataType(), metadata.getFieldType(), metadata.isSorted(), metadata.isSingleValue(), metadata.getMaxNumberOfMultiValues(), metadata.getTotalNumberOfEntries(), metadata.isAutoGenerated(), metadata.getMinValue(), metadata.getMaxValue(), true); // Validate that expected metadata properties don't match. totalNumberOfEntries will definitely not match since // duplicates will be removed, but maxNumberOfMultiValues may still match if the row with max multi-values didn't // have any duplicates. segmentMetadata = new SegmentMetadataImpl(_segmentDirectory); ColumnMetadata columnMetadata = segmentMetadata.getColumnMetadataFor(column); assertNotEquals(metadata.getTotalNumberOfEntries(), columnMetadata.getTotalNumberOfEntries()); }
public static SourceOperationResponse performSplit( SourceSplitRequest request, PipelineOptions options) throws Exception { return performSplitWithApiLimit( request, options, DEFAULT_NUM_BUNDLES_LIMIT, DATAFLOW_SPLIT_RESPONSE_API_SIZE_LIMIT); }
@Test public void testSplittingProducedInvalidSource() throws Exception { com.google.api.services.dataflow.model.Source cloudSource = translateIOToCloudSource(new SourceProducingInvalidSplits("original", null), options); expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage( allOf( containsString("Splitting a valid source produced an invalid source"), containsString("original"), containsString("badBundle"))); expectedException.expectCause(hasMessage(containsString("intentionally invalid"))); performSplit( cloudSource, options, null /*desiredBundleSizeBytes*/, null /* numBundles limit */, null /* API limit */); }
public static String describe(List<org.apache.iceberg.expressions.Expression> exprs) { return exprs.stream().map(Spark3Util::describe).collect(Collectors.joining(", ")); }
@Test public void testDescribeExpression() { Expression refExpression = equal("id", 1); assertThat(Spark3Util.describe(refExpression)).isEqualTo("id = 1"); Expression yearExpression = greaterThan(year("ts"), 10); assertThat(Spark3Util.describe(yearExpression)).isEqualTo("year(ts) > 10"); Expression monthExpression = greaterThanOrEqual(month("ts"), 10); assertThat(Spark3Util.describe(monthExpression)).isEqualTo("month(ts) >= 10"); Expression dayExpression = lessThan(day("ts"), 10); assertThat(Spark3Util.describe(dayExpression)).isEqualTo("day(ts) < 10"); Expression hourExpression = lessThanOrEqual(hour("ts"), 10); assertThat(Spark3Util.describe(hourExpression)).isEqualTo("hour(ts) <= 10"); Expression bucketExpression = in(bucket("id", 5), 3); assertThat(Spark3Util.describe(bucketExpression)).isEqualTo("bucket[5](id) IN (3)"); Expression truncateExpression = notIn(truncate("name", 3), "abc"); assertThat(Spark3Util.describe(truncateExpression)) .isEqualTo("truncate[3](name) NOT IN ('abc')"); Expression andExpression = and(refExpression, yearExpression); assertThat(Spark3Util.describe(andExpression)).isEqualTo("(id = 1 AND year(ts) > 10)"); }
Queue<String> prepareRollingOrder(List<String> podNamesToConsider, List<Pod> pods) { Deque<String> rollingOrder = new ArrayDeque<>(); for (String podName : podNamesToConsider) { Pod matchingPod = pods.stream().filter(pod -> podName.equals(pod.getMetadata().getName())).findFirst().orElse(null); if (matchingPod == null || !Readiness.isPodReady(matchingPod)) { // Non-existing or unready pods are handled first // This helps to avoid rolling all pods into some situation where they would be all failing rollingOrder.addFirst(podName); } else { // Ready pods are rolled only at the end rollingOrder.addLast(podName); } } return rollingOrder; }
@Test public void testRollingOrderWithUnreadyAndMissingPod() { List<Pod> pods = List.of( renamePod(READY_POD, "my-connect-connect-0"), renamePod(UNREADY_POD, "my-connect-connect-1") ); KafkaConnectRoller roller = new KafkaConnectRoller(RECONCILIATION, CLUSTER, 1_000L, null); Queue<String> rollingOrder = roller.prepareRollingOrder(POD_NAMES, pods); assertThat(rollingOrder.size(), is(3)); assertThat(rollingOrder.poll(), is("my-connect-connect-2")); assertThat(rollingOrder.poll(), is("my-connect-connect-1")); assertThat(rollingOrder.poll(), is("my-connect-connect-0")); }
@Override public <R> QueryResult<R> query(final Query<R> query, final PositionBound positionBound, final QueryConfig config) { return store.query(query, positionBound, config); }
@SuppressWarnings("unchecked") @Test public void shouldQueryVersionedStore() { givenWrapperWithVersionedStore(); when(versionedStore.query(query, positionBound, queryConfig)).thenReturn(result); assertThat(wrapper.query(query, positionBound, queryConfig), equalTo(result)); }
@Override @CheckForNull public String getId() { return config.get(CoreProperties.SERVER_ID).orElse(null); }
@Test public void test_id() { settings.setProperty(CoreProperties.SERVER_ID, "foo"); assertThat(underTest.getId()).isEqualTo("foo"); }
public String getProcessedMetadata(Long resultId) { return resultRepository.findById(resultId).map(SamlMetadataProcessResult::getMetadata).orElse(null); }
@Test void getProcessedMetadata() { SamlMetadataProcessResult processResult = new SamlMetadataProcessResult(); processResult.setMetadata("metadata"); Optional<SamlMetadataProcessResult> resultOptional = Optional.of(processResult); when(resultRepositoryMock.findById(anyLong())).thenReturn(resultOptional); String result = metadataRetrieverServiceMock.getProcessedMetadata(1L); verify(resultRepositoryMock, times(1)).findById(anyLong()); assertNotNull(result); }
public static ValueFormat of( final FormatInfo format, final SerdeFeatures features ) { return new ValueFormat(format, features); }
@Test public void shouldImplementEquals() { new EqualsTester() .addEqualityGroup( ValueFormat.of(FORMAT_INFO, SerdeFeatures.of()), ValueFormat.of(FORMAT_INFO, SerdeFeatures.of()) ) .addEqualityGroup( ValueFormat.of(FormatInfo.of(JSON.name()), SerdeFeatures.of()) ) .addEqualityGroup( ValueFormat.of(FORMAT_INFO, SerdeFeatures.of(WRAP_SINGLES)) ) .testEquals(); }
@SuppressWarnings("MethodLength") static void dissectControlRequest( final ArchiveEventCode eventCode, final MutableDirectBuffer buffer, final int offset, final StringBuilder builder) { int encodedLength = dissectLogHeader(CONTEXT, eventCode, buffer, offset, builder); HEADER_DECODER.wrap(buffer, offset + encodedLength); encodedLength += MessageHeaderDecoder.ENCODED_LENGTH; switch (eventCode) { case CMD_IN_CONNECT: CONNECT_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendConnect(builder); break; case CMD_IN_CLOSE_SESSION: CLOSE_SESSION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendCloseSession(builder); break; case CMD_IN_START_RECORDING: START_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartRecording(builder); break; case CMD_IN_STOP_RECORDING: STOP_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecording(builder); break; case CMD_IN_REPLAY: REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplay(builder); break; case CMD_IN_STOP_REPLAY: STOP_REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopReplay(builder); break; case CMD_IN_LIST_RECORDINGS: LIST_RECORDINGS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordings(builder); break; case CMD_IN_LIST_RECORDINGS_FOR_URI: LIST_RECORDINGS_FOR_URI_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordingsForUri(builder); break; case CMD_IN_LIST_RECORDING: LIST_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecording(builder); break; case CMD_IN_EXTEND_RECORDING: EXTEND_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendExtendRecording(builder); break; case CMD_IN_RECORDING_POSITION: RECORDING_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendRecordingPosition(builder); break; case CMD_IN_TRUNCATE_RECORDING: TRUNCATE_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendTruncateRecording(builder); break; case CMD_IN_STOP_RECORDING_SUBSCRIPTION: STOP_RECORDING_SUBSCRIPTION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecordingSubscription(builder); break; case CMD_IN_STOP_POSITION: STOP_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopPosition(builder); break; case CMD_IN_FIND_LAST_MATCHING_RECORD: FIND_LAST_MATCHING_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendFindLastMatchingRecord(builder); break; case CMD_IN_LIST_RECORDING_SUBSCRIPTIONS: LIST_RECORDING_SUBSCRIPTIONS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordingSubscriptions(builder); break; case CMD_IN_START_BOUNDED_REPLAY: BOUNDED_REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartBoundedReplay(builder); break; case CMD_IN_STOP_ALL_REPLAYS: STOP_ALL_REPLAYS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopAllReplays(builder); break; case CMD_IN_REPLICATE: REPLICATE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplicate(builder); break; case CMD_IN_STOP_REPLICATION: STOP_REPLICATION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopReplication(builder); break; case CMD_IN_START_POSITION: START_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartPosition(builder); break; case CMD_IN_DETACH_SEGMENTS: DETACH_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendDetachSegments(builder); break; case CMD_IN_DELETE_DETACHED_SEGMENTS: DELETE_DETACHED_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendDeleteDetachedSegments(builder); break; case CMD_IN_PURGE_SEGMENTS: PURGE_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendPurgeSegments(builder); break; case CMD_IN_ATTACH_SEGMENTS: ATTACH_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendAttachSegments(builder); break; case CMD_IN_MIGRATE_SEGMENTS: MIGRATE_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendMigrateSegments(builder); break; case CMD_IN_AUTH_CONNECT: AUTH_CONNECT_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendAuthConnect(builder); break; case CMD_IN_KEEP_ALIVE: KEEP_ALIVE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendKeepAlive(builder); break; case CMD_IN_TAGGED_REPLICATE: TAGGED_REPLICATE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendTaggedReplicate(builder); break; case CMD_IN_START_RECORDING2: START_RECORDING_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartRecording2(builder); break; case CMD_IN_EXTEND_RECORDING2: EXTEND_RECORDING_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendExtendRecording2(builder); break; case CMD_IN_STOP_RECORDING_BY_IDENTITY: STOP_RECORDING_BY_IDENTITY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecordingByIdentity(builder); break; case CMD_IN_PURGE_RECORDING: PURGE_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendPurgeRecording(builder); break; case CMD_IN_REPLICATE2: REPLICATE_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplicate2(builder); break; case CMD_IN_REQUEST_REPLAY_TOKEN: REPLAY_TOKEN_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplayToken(builder); break; default: builder.append(": unknown command"); } }
@Test void controlRequestAuthConnect() { internalEncodeLogHeader(buffer, 0, 3, 6, () -> 5_500_000_000L); final AuthConnectRequestEncoder requestEncoder = new AuthConnectRequestEncoder(); requestEncoder.wrapAndApplyHeader(buffer, LOG_HEADER_LENGTH, headerEncoder) .correlationId(16) .responseStreamId(19) .version(2) .responseChannel("English Channel") .putEncodedCredentials("hello".getBytes(US_ASCII), 0, 5); dissectControlRequest(CMD_IN_AUTH_CONNECT, buffer, 0, builder); assertEquals("[5.500000000] " + CONTEXT + ": " + CMD_IN_AUTH_CONNECT.name() + " [3/6]:" + " correlationId=16" + " responseStreamId=19" + " version=2" + " responseChannel=English Channel" + " encodedCredentialsLength=5", builder.toString()); }
@Override public String resolve(Method method, Object[] arguments, String spelExpression) { if (StringUtils.isEmpty(spelExpression)) { return spelExpression; } if (spelExpression.matches(PLACEHOLDER_SPEL_REGEX) && stringValueResolver != null) { return stringValueResolver.resolveStringValue(spelExpression); } if (spelExpression.matches(METHOD_SPEL_REGEX)) { SpelRootObject rootObject = new SpelRootObject(method, arguments); MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer); Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext); return (String) evaluated; } if (spelExpression.matches(BEAN_SPEL_REGEX)) { SpelRootObject rootObject = new SpelRootObject(method, arguments); MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer); evaluationContext.setBeanResolver(new BeanFactoryResolver(this.beanFactory)); Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext); return (String) evaluated; } return spelExpression; }
@Test public void placeholderSpelTest() throws Exception { String testExpression = "${missingProperty:default}"; DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{}, testExpression); assertThat(result).isEqualTo("default"); }
public boolean isCorsSupportEnabled() { return enabled; }
@Test public void disabled() { final CorsConfig cors = forAnyOrigin().disable().build(); assertThat(cors.isCorsSupportEnabled(), is(false)); }
public FEELFnResult<Boolean> invoke(@ParameterName( "point1" ) Comparable point1, @ParameterName( "point2" ) Comparable point2) { if ( point1 == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "point1", "cannot be null")); } if ( point2 == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "point2", "cannot be null")); } try { boolean result = point1.compareTo( point2 ) == 0; return FEELFnResult.ofResult( result ); } catch( Exception e ) { // points are not comparable return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "point1", "cannot be compared to point2")); } }
@Test void invokeParamSingles() { FunctionTestUtil.assertResult( coincidesFunction.invoke( "a", "b" ), Boolean.FALSE ); FunctionTestUtil.assertResult( coincidesFunction.invoke( "a", "a" ), Boolean.TRUE ); FunctionTestUtil.assertResult( coincidesFunction.invoke( "b", "a" ), Boolean.FALSE ); FunctionTestUtil.assertResult( coincidesFunction.invoke( BigDecimal.valueOf(2), BigDecimal.valueOf(1) ), Boolean.FALSE ); FunctionTestUtil.assertResult( coincidesFunction.invoke( BigDecimal.valueOf(1), BigDecimal.valueOf(2) ), Boolean.FALSE ); FunctionTestUtil.assertResult( coincidesFunction.invoke( BigDecimal.valueOf(1), BigDecimal.valueOf(1) ), Boolean.TRUE ); }
@Override public String requestMessageForCheckConnectionToRepository(RepositoryConfiguration repositoryConfiguration) { Map configuredValues = new LinkedHashMap(); configuredValues.put("repository-configuration", jsonResultMessageHandler.configurationToMap(repositoryConfiguration)); return GSON.toJson(configuredValues); }
@Test public void shouldBuildRequestBodyForCheckRepositoryConnectionRequest() throws Exception { String requestMessage = messageHandler.requestMessageForCheckConnectionToRepository(repositoryConfiguration); assertThat(requestMessage, is("{\"repository-configuration\":{\"key-one\":{\"value\":\"value-one\"},\"key-two\":{\"value\":\"value-two\"}}}")); }
@Override public MapTileList computeFromSource(final MapTileList pSource, final MapTileList pReuse) { final MapTileList out = pReuse != null ? pReuse : new MapTileList(); for (int i = 0; i < pSource.getSize(); i++) { final long sourceIndex = pSource.get(i); final int sourceZoom = MapTileIndex.getZoom(sourceIndex); final int destZoom = sourceZoom + mZoomDelta; if (destZoom < 0 || destZoom > MapTileIndex.mMaxZoomLevel) { continue; } final int sourceX = MapTileIndex.getX(sourceIndex); final int sourceY = MapTileIndex.getY(sourceIndex); if (mZoomDelta <= 0) { out.put(MapTileIndex.getTileIndex(destZoom, sourceX >> -mZoomDelta, sourceY >> -mZoomDelta)); continue; } final int power = 1 << mZoomDelta; final int destX = sourceX << mZoomDelta; final int destY = sourceY << mZoomDelta; for (int j = 0; j < power; j++) { for (int k = 0; k < power; k++) { out.put(MapTileIndex.getTileIndex(destZoom, destX + j, destY + k)); } } } return out; }
@Test public void testComputeFromSource() { final MapTileList source = new MapTileList(); final MapTileList dest = new MapTileList(); final HashSet<Long> set = new HashSet<>(); final int sourceZoom = 5; final int sourceXMin = 10; final int sourceXMax = 15; final int sourceYMin = 20; final int sourceYMax = 22; final int destMinus1XMin = sourceXMin >> 1; final int destMinus1XMax = sourceXMax >> 1; final int destMinus1YMin = sourceYMin >> 1; final int destMinus1YMax = sourceYMax >> 1; final int destPlus1XMin = sourceXMin << 1; final int destPlus1XMax = (sourceXMax << 1) + 1; final int destPlus1YMin = sourceYMin << 1; final int destPlus1YMax = (sourceYMax << 1) + 1; for (int i = sourceXMin; i <= sourceXMax; i++) { for (int j = sourceYMin; j <= sourceYMax; j++) { source.put(MapTileIndex.getTileIndex(sourceZoom, i, j)); } } Assert.assertEquals((sourceXMax - sourceXMin + 1) * (sourceYMax - sourceYMin + 1), source.getSize()); // count checking final int minMaxDelta = 4; for (int zoomDelta = -minMaxDelta; zoomDelta < minMaxDelta; zoomDelta++) { final MapTileListZoomComputer computer = new MapTileListZoomComputer(zoomDelta); dest.clear(); computer.computeFromSource(source, dest); final String tag = "zoomDelta=" + zoomDelta; if (sourceZoom + zoomDelta < 0 || sourceZoom + zoomDelta > MapTileIndex.mMaxZoomLevel) { Assert.assertEquals(tag, 0, dest.getSize()); } else if (zoomDelta <= 0) { Assert.assertEquals(tag, source.getSize(), dest.getSize()); } else { Assert.assertEquals(tag, source.getSize() << (2 * zoomDelta), dest.getSize()); } } MapTileListZoomComputer computer; // data checking for -1 computer = new MapTileListZoomComputer(-1); dest.clear(); computer.computeFromSource(source, dest); set.clear(); populateSet(set, dest); check(set, sourceZoom + computer.getZoomDelta(), destMinus1XMin, destMinus1XMax, destMinus1YMin, destMinus1YMax); // data checking for +1 computer = new MapTileListZoomComputer(1); dest.clear(); computer.computeFromSource(source, dest); set.clear(); populateSet(set, dest); check(set, sourceZoom + computer.getZoomDelta(), destPlus1XMin, destPlus1XMax, destPlus1YMin, destPlus1YMax); }
@Override public SpringEmbeddedCacheManager getObject() throws Exception { return this.cacheManager; }
@Test public void testIfSpringEmbeddedCacheManagerFactoryBeanCreatesACustomizedCacheManagerIfGivenADefaultConfigurationLocation() throws Exception { objectUnderTest = SpringEmbeddedCacheManagerFactoryBeanBuilder .defaultBuilder().fromFile(NAMED_ASYNC_CACHE_CONFIG_LOCATION, getClass()).build(); final SpringEmbeddedCacheManager springEmbeddedCacheManager = objectUnderTest.getObject(); assertNotNull( "getObject() should have returned a valid SpringEmbeddedCacheManager, configured using the configuration file " + "set on SpringEmbeddedCacheManagerFactoryBean. However, it returned null.", springEmbeddedCacheManager); final SpringCache cacheDefinedInCustomConfiguration = springEmbeddedCacheManager .getCache(CACHE_NAME_FROM_CONFIGURATION_FILE); final org.infinispan.configuration.cache.Configuration configuration = ((Cache) cacheDefinedInCustomConfiguration.getNativeCache()) .getCacheConfiguration(); assertEquals( "The cache named [" + CACHE_NAME_FROM_CONFIGURATION_FILE + "] is configured to have asynchonous replication cache mode. Yet, the cache returned from getCache(" + CACHE_NAME_FROM_CONFIGURATION_FILE + ") has a different cache mode. Obviously, SpringEmbeddedCacheManagerFactoryBean did not use " + "the configuration file when instantiating SpringEmbeddedCacheManager.", org.infinispan.configuration.cache.CacheMode.REPL_ASYNC, configuration.clustering().cacheMode()); }
@Override public boolean equals(@Nullable Object obj) { if (!(obj instanceof GcsResourceId)) { return false; } GcsResourceId other = (GcsResourceId) obj; return this.gcsPath.equals(other.gcsPath); }
@Test public void testEquals() { assertEquals( toResourceIdentifier("gs://my_bucket/tmp/"), toResourceIdentifier("gs://my_bucket/tmp/")); assertNotEquals( toResourceIdentifier("gs://my_bucket/tmp"), toResourceIdentifier("gs://my_bucket/tmp/")); }
@Override public void deleteDataSourceConfig(Long id) { // 校验存在 validateDataSourceConfigExists(id); // 删除 dataSourceConfigMapper.deleteById(id); }
@Test public void testDeleteDataSourceConfig_success() { // mock 数据 DataSourceConfigDO dbDataSourceConfig = randomPojo(DataSourceConfigDO.class); dataSourceConfigMapper.insert(dbDataSourceConfig);// @Sql: 先插入出一条存在的数据 // 准备参数 Long id = dbDataSourceConfig.getId(); // 调用 dataSourceConfigService.deleteDataSourceConfig(id); // 校验数据不存在了 assertNull(dataSourceConfigMapper.selectById(id)); }
public static HybridPartitionDataConsumeConstraint getOrDecideHybridPartitionDataConsumeConstraint( Configuration configuration, boolean enableSpeculativeExecution) { final HybridPartitionDataConsumeConstraint hybridPartitionDataConsumeConstraint = configuration .getOptional(JobManagerOptions.HYBRID_PARTITION_DATA_CONSUME_CONSTRAINT) .orElseGet( () -> { HybridPartitionDataConsumeConstraint defaultConstraint = enableSpeculativeExecution ? ONLY_FINISHED_PRODUCERS : UNFINISHED_PRODUCERS; LOG.info( "Set {} to {} as it is not configured", JobManagerOptions .HYBRID_PARTITION_DATA_CONSUME_CONSTRAINT .key(), defaultConstraint.name()); return defaultConstraint; }); if (enableSpeculativeExecution) { Preconditions.checkState( hybridPartitionDataConsumeConstraint != UNFINISHED_PRODUCERS, "For speculative execution, only supports consume finished partition now."); } return hybridPartitionDataConsumeConstraint; }
@Test void testNotOnlyConsumeFinishedPartitionWithSpeculativeEnable() { Configuration configuration = new Configuration(); configuration.set( JobManagerOptions.HYBRID_PARTITION_DATA_CONSUME_CONSTRAINT, UNFINISHED_PRODUCERS); assertThatThrownBy( () -> getOrDecideHybridPartitionDataConsumeConstraint(configuration, true)) .isInstanceOf(IllegalStateException.class); }
@Override public BigDecimal getBigDecimal(final int columnIndex) throws SQLException { return (BigDecimal) ResultSetUtils.convertValue(mergeResultSet.getValue(columnIndex, BigDecimal.class), BigDecimal.class); }
@Test void assertGetBigDecimalWithColumnIndex() throws SQLException { when(mergeResultSet.getValue(1, BigDecimal.class)).thenReturn(new BigDecimal("1")); assertThat(shardingSphereResultSet.getBigDecimal(1), is(new BigDecimal("1"))); }
@Override public MapperResult findConfigInfoLike4PageFetchRows(MapperContext context) { final String tenantId = (String) context.getWhereParameter(FieldConstant.TENANT_ID); final String dataId = (String) context.getWhereParameter(FieldConstant.DATA_ID); final String group = (String) context.getWhereParameter(FieldConstant.GROUP_ID); final String appName = (String) context.getWhereParameter(FieldConstant.APP_NAME); final String content = (String) context.getWhereParameter(FieldConstant.CONTENT); final String[] types = (String[]) context.getWhereParameter(FieldConstant.TYPE); WhereBuilder where = new WhereBuilder( "SELECT id,data_id,group_id,tenant_id,app_name,content,encrypted_data_key,type FROM config_info"); where.like("tenant_id", tenantId); if (StringUtils.isNotBlank(dataId)) { where.and().like("data_id", dataId); } if (StringUtils.isNotBlank(group)) { where.and().like("group_id", group); } if (StringUtils.isNotBlank(appName)) { where.and().eq("app_name", appName); } if (StringUtils.isNotBlank(content)) { where.and().like("content", content); } if (!ArrayUtils.isEmpty(types)) { where.and().in("type", types); } where.offset(context.getStartRow(), context.getPageSize()); return where.build(); }
@Test void testFindConfigInfoLike4PageFetchRows() { MapperResult mapperResult = configInfoMapperByDerby.findConfigInfoLike4PageFetchRows(context); assertEquals(mapperResult.getSql(), "SELECT id,data_id,group_id,tenant_id,app_name,content,encrypted_data_key,type FROM config_info " + "WHERE tenant_id LIKE ? AND app_name = ? OFFSET " + startRow + " ROWS FETCH NEXT " + pageSize + " ROWS ONLY"); assertArrayEquals(new Object[] {tenantId, appName}, mapperResult.getParamList().toArray()); }
public static void checkNullOrNonNullEntries( @Nullable Collection<?> values, String propertyName) { if (values == null) { // pass return; } for (Object value : values) { Preconditions.checkNotNull( value, "Property '" + propertyName + "' cannot contain null entries"); } }
@Test public void testCheckNullOrNonNullEntries_emptyPass() { Validator.checkNullOrNonNullEntries(ImmutableList.of(), "test"); // pass }
public void sendCouponNewsletter() { try { // Retrieve the list of contacts from the "weekly-coupons-newsletter" contact // list // snippet-start:[sesv2.java2.newsletter.ListContacts] ListContactsRequest contactListRequest = ListContactsRequest.builder() .contactListName(CONTACT_LIST_NAME) .build(); List<String> contactEmails; try { ListContactsResponse contactListResponse = sesClient.listContacts(contactListRequest); contactEmails = contactListResponse.contacts().stream() .map(Contact::emailAddress) .toList(); } catch (Exception e) { // TODO: Remove when listContacts's GET body issue is resolved. contactEmails = this.contacts; } // snippet-end:[sesv2.java2.newsletter.ListContacts] // Send an email using the "weekly-coupons" template to each contact in the list // snippet-start:[sesv2.java2.newsletter.SendEmail.template] String coupons = Files.readString(Paths.get("resources/coupon_newsletter/sample_coupons.json")); for (String emailAddress : contactEmails) { SendEmailRequest newsletterRequest = SendEmailRequest.builder() .destination(Destination.builder().toAddresses(emailAddress).build()) .content(EmailContent.builder() .template(Template.builder() .templateName(TEMPLATE_NAME) .templateData(coupons) .build()) .build()) .fromEmailAddress(this.verifiedEmail) .listManagementOptions(ListManagementOptions.builder() .contactListName(CONTACT_LIST_NAME) .build()) .build(); SendEmailResponse newsletterResponse = sesClient.sendEmail(newsletterRequest); System.out.println("Newsletter sent to " + emailAddress + ": " + newsletterResponse.messageId()); } // snippet-end:[sesv2.java2.newsletter.SendEmail.template] } catch (NotFoundException e) { // If the contact list does not exist, fail the workflow and inform the user System.err.println("The contact list is missing. Please create the contact list and try again."); } catch (AccountSuspendedException e) { // If the account is suspended, fail the workflow and inform the user System.err.println("Your account is suspended. Please resolve the issue and try again."); } catch (MailFromDomainNotVerifiedException e) { // If the sending domain is not verified, fail the workflow and inform the user System.err.println("The sending domain is not verified. Please verify your domain and try again."); throw e; } catch (MessageRejectedException e) { // If the message is rejected due to invalid content, fail the workflow and // inform the user System.err.println("The message content is invalid. Please check your template and try again."); throw e; } catch (SendingPausedException e) { // If sending is paused, fail the workflow and inform the user System.err.println("Sending is currently paused for your account. Please resolve the issue and try again."); throw e; } catch (Exception e) { System.err.println("Error occurred while sending the newsletter: " + e.getMessage()); e.printStackTrace(); } }
@Test public void test_sendCouponNewsletter_error_accountSuspended() { // Mock the necessary AWS SDK calls and responses CreateEmailTemplateResponse templateResponse = CreateEmailTemplateResponse.builder().build(); when(sesClient.createEmailTemplate(any(CreateEmailTemplateRequest.class))).thenReturn(templateResponse); ListContactsResponse contactListResponse = ListContactsResponse.builder() .contacts(Contact.builder().emailAddress("user@example.com").build()) .build(); when(sesClient.listContacts(any(ListContactsRequest.class))).thenReturn( contactListResponse); when(sesClient.sendEmail(any(SendEmailRequest.class))).thenThrow( AccountSuspendedException.class); try { scenario.sendCouponNewsletter(); } catch (Exception e) { } String errorOutput = errContent.toString(); assertThat(errorOutput, containsString("Your account is suspended. Please resolve the issue and try again.")); }
public static Type convertType(TypeInfo typeInfo) { switch (typeInfo.getOdpsType()) { case BIGINT: return Type.BIGINT; case INT: return Type.INT; case SMALLINT: return Type.SMALLINT; case TINYINT: return Type.TINYINT; case FLOAT: return Type.FLOAT; case DECIMAL: DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; return ScalarType.createUnifiedDecimalType(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); case DOUBLE: return Type.DOUBLE; case CHAR: CharTypeInfo charTypeInfo = (CharTypeInfo) typeInfo; return ScalarType.createCharType(charTypeInfo.getLength()); case VARCHAR: VarcharTypeInfo varcharTypeInfo = (VarcharTypeInfo) typeInfo; return ScalarType.createVarcharType(varcharTypeInfo.getLength()); case STRING: case JSON: return ScalarType.createDefaultCatalogString(); case BINARY: return Type.VARBINARY; case BOOLEAN: return Type.BOOLEAN; case DATE: return Type.DATE; case TIMESTAMP: case DATETIME: return Type.DATETIME; case MAP: MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo; return new MapType(convertType(mapTypeInfo.getKeyTypeInfo()), convertType(mapTypeInfo.getValueTypeInfo())); case ARRAY: ArrayTypeInfo arrayTypeInfo = (ArrayTypeInfo) typeInfo; return new ArrayType(convertType(arrayTypeInfo.getElementTypeInfo())); case STRUCT: StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo; List<Type> fieldTypeList = structTypeInfo.getFieldTypeInfos().stream().map(EntityConvertUtils::convertType) .collect(Collectors.toList()); return new StructType(fieldTypeList); default: return Type.VARCHAR; } }
@Test public void testConvertTypeCaseDecimalLessThanOrEqualMaxDecimal32Precision() { DecimalTypeInfo decimalTypeInfo = TypeInfoFactory.getDecimalTypeInfo(5, 2); Type result = EntityConvertUtils.convertType(decimalTypeInfo); Type expectedType = ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL64, 5, 2); assertEquals(expectedType, result); }
public void addVertex(V vertex) { if (containsVertex(vertex)) { return; } neighbors.put(vertex, new ArrayList<>()); }
@Test void addVertex() { graph.addVertex('H'); assertTrue(graph.containsVertex('H')); }
public File contextProperties() { return new File(dir, "context-props.pb"); }
@Test public void contextProperties_file() throws Exception { File dir = temp.newFolder(); File file = new File(dir, "context-props.pb"); FileUtils.write(file, "content"); FileStructure structure = new FileStructure(dir); assertThat(structure.contextProperties()).exists().isFile().isEqualTo(file); }
static Optional<String> globalResponseError(Optional<ClientResponse> response) { if (!response.isPresent()) { return Optional.of("Timeout"); } if (response.get().authenticationException() != null) { return Optional.of("AuthenticationException"); } if (response.get().wasTimedOut()) { return Optional.of("Disonnected[Timeout]"); } if (response.get().wasDisconnected()) { return Optional.of("Disconnected"); } if (response.get().versionMismatch() != null) { return Optional.of("UnsupportedVersionException"); } if (response.get().responseBody() == null) { return Optional.of("EmptyResponse"); } if (!(response.get().responseBody() instanceof AssignReplicasToDirsResponse)) { return Optional.of("ClassCastException"); } AssignReplicasToDirsResponseData data = ((AssignReplicasToDirsResponse) response.get().responseBody()).data(); Errors error = Errors.forCode(data.errorCode()); if (error != Errors.NONE) { return Optional.of("Response-level error: " + error.name()); } return Optional.empty(); }
@Test public void testGlobalResponseErrorDisconnectedTimedOut() { assertEquals(Optional.of("Disonnected[Timeout]"), AssignmentsManager.globalResponseError(Optional.of( new ClientResponse(null, null, "", 0, 0, true, true, null, null, null)))); }
public Set<Long> findCmdIds(List<Status> statusList) throws JobDoesNotExistException { Set<Long> set = new HashSet<>(); for (Map.Entry<Long, CmdInfo> x : mInfoMap.entrySet()) { if (statusList.isEmpty() || statusList.contains(getCmdStatus( x.getValue().getJobControlId()))) { Long key = x.getKey(); set.add(key); } } return set; }
@Test public void testFindCmdIdsForMultipleCmds() throws Exception { long cancelId = generateMigrateCommandForStatus(Status.CANCELED); long runningIdA = generateMigrateCommandForStatus(Status.RUNNING); long runningIdB = generateMigrateCommandForStatus(Status.RUNNING); long failedId = generateMigrateCommandForStatus(Status.FAILED); long completedIdA = generateMigrateCommandForStatus(Status.COMPLETED); long completedIB = generateMigrateCommandForStatus(Status.COMPLETED); long createdId = generateMigrateCommandForStatus(Status.CREATED); // test cancel cmd ids. mSearchingCriteria.add(Status.CANCELED); Set<Long> cancelCmdIds = mCmdJobTracker.findCmdIds(mSearchingCriteria); Assert.assertEquals(cancelCmdIds.size(), 1); Assert.assertTrue(cancelCmdIds.contains(cancelId)); // test completed cmd ids. mSearchingCriteria.clear(); mSearchingCriteria.add(Status.COMPLETED); Set<Long> completedCmdIds = mCmdJobTracker.findCmdIds(mSearchingCriteria); Assert.assertEquals(completedCmdIds.size(), 2); Assert.assertTrue(completedCmdIds.contains(completedIdA)); Assert.assertTrue(completedCmdIds.contains(completedIB)); // test failed cmd ids. mSearchingCriteria.clear(); mSearchingCriteria.add(Status.FAILED); Set<Long> failCmdIds = mCmdJobTracker.findCmdIds(mSearchingCriteria); Assert.assertEquals(failCmdIds.size(), 1); Assert.assertTrue(failCmdIds.contains(failedId)); // test running cmd ids. mSearchingCriteria.clear(); mSearchingCriteria.add(Status.RUNNING); Set<Long> runningCmdIds = mCmdJobTracker.findCmdIds(mSearchingCriteria); Assert.assertEquals(runningCmdIds.size(), 3); // 2 running commands + 1 created command. Assert.assertTrue(runningCmdIds.contains(runningIdA)); Assert.assertTrue(runningCmdIds.contains(runningIdB)); Assert.assertTrue(runningCmdIds.contains(createdId)); // test running and completed cmd ids. mSearchingCriteria.clear(); mSearchingCriteria.add(Status.COMPLETED); mSearchingCriteria.add(Status.RUNNING); Set<Long> ids = mCmdJobTracker.findCmdIds(mSearchingCriteria); Assert.assertEquals(ids.size(), 5); Assert.assertTrue(ids.contains(completedIdA)); Assert.assertTrue(ids.contains(completedIB)); Assert.assertTrue(ids.contains(runningIdA)); Assert.assertTrue(ids.contains(runningIdB)); Assert.assertTrue(ids.contains(createdId)); }
public static CloudConfiguration buildCloudConfigurationForStorage(Map<String, String> properties) { return buildCloudConfigurationForStorage(properties, false); }
@Test public void testAzureADLS2ManagedIdentity() { Map<String, String> map = new HashMap<>() { { put(CloudConfigurationConstants.AZURE_ADLS2_OAUTH2_CLIENT_ENDPOINT, "endpoint"); put(CloudConfigurationConstants.AZURE_ADLS2_OAUTH2_CLIENT_SECRET, "client-secret"); put(CloudConfigurationConstants.AZURE_ADLS2_OAUTH2_CLIENT_ID, "client-id"); } }; CloudConfiguration cc = CloudConfigurationFactory.buildCloudConfigurationForStorage(map); Assert.assertEquals(cc.getCloudType(), CloudType.AZURE); Configuration conf = new Configuration(); cc.applyToConfiguration(conf); Assert.assertEquals("OAuth", conf.get("fs.azure.account.auth.type")); Assert.assertEquals("org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider", conf.get("fs.azure.account.oauth.provider.type")); Assert.assertEquals("client-secret", conf.get("fs.azure.account.oauth2.client.secret")); Assert.assertEquals("client-id", conf.get("fs.azure.account.oauth2.client.id")); Assert.assertEquals("endpoint", conf.get("fs.azure.account.oauth2.client.endpoint")); }
public SystemPoller(List<VespaService> services, Duration interval) { this.services = services; this.interval = interval; systemPollTimer = new Timer("systemPollTimer", true); jiffiesInterface = new GetJiffies() { @Override public JiffiesAndCpus getTotalSystemJiffies() { return SystemPoller.getTotalSystemJiffies(); } @Override public long getJiffies(VespaService service) { return SystemPoller.getPidJiffies(service); } }; lastTotalCpuJiffies = jiffiesInterface.getTotalSystemJiffies(); for (VespaService s : services) { lastCpuJiffiesMetrics.put(s, jiffiesInterface.getJiffies(s)); } }
@Test public void testSystemPoller() { DummyService s = new DummyService(0, "id"); List<VespaService> services = new ArrayList<>(); services.add(s); assertFalse(s.isAlive()); long n = SystemPoller.getPidJiffies(s); assertEquals(0L, n); long[] memusage = SystemPoller.getMemoryUsage(s); assertEquals(0L, memusage[0]); assertEquals(0L, memusage[1]); }
@Override public boolean isCancelled() { if (delegate == null) { return isCancel; } return delegate.isCancelled(); }
@Test public void isCancelled() { final Future<HttpResponse> delegate = Mockito.mock(Future.class); FutureDecorator decorator = new FutureDecorator(null); ReflectUtils.setFieldValue(decorator, "delegate", delegate); decorator.isCancelled(); Mockito.verify(delegate, Mockito.times(1)).isCancelled(); }
@Override public boolean syncVerifyData(DistroData verifyData, String targetServer) { if (isNoExistTarget(targetServer)) { return true; } // replace target server as self server so that can callback. verifyData.getDistroKey().setTargetServer(memberManager.getSelf().getAddress()); DistroDataRequest request = new DistroDataRequest(verifyData, DataOperation.VERIFY); Member member = memberManager.find(targetServer); if (checkTargetServerStatusUnhealthy(member)) { Loggers.DISTRO .warn("[DISTRO] Cancel distro verify caused by target server {} unhealthy, key: {}", targetServer, verifyData.getDistroKey()); return false; } try { Response response = clusterRpcClientProxy.sendRequest(member, request); return checkResponse(response); } catch (NacosException e) { Loggers.DISTRO.error("[DISTRO-FAILED] Verify distro data failed! key: {} ", verifyData.getDistroKey(), e); } return false; }
@Test void testSyncVerifyDataForMemberUnhealthy() throws NacosException { DistroData verifyData = new DistroData(); verifyData.setDistroKey(new DistroKey()); when(memberManager.hasMember(member.getAddress())).thenReturn(true); when(memberManager.find(member.getAddress())).thenReturn(member); assertFalse(transportAgent.syncVerifyData(verifyData, member.getAddress())); verify(clusterRpcClientProxy, never()).sendRequest(any(Member.class), any()); }
@Override @SuppressWarnings("unchecked") public int run() throws IOException { Preconditions.checkArgument(targets != null && targets.size() >= 1, "A Parquet file is required."); Preconditions.checkArgument(targets.size() == 1, "Cannot process multiple Parquet files."); String source = targets.get(0); try (ParquetFileReader reader = ParquetFileReader.open(getConf(), qualifiedPath(source))) { MessageType schema = reader.getFileMetaData().getSchema(); ColumnDescriptor descriptor = Util.descriptor(column, schema); PrimitiveType type = Util.primitive(column, schema); Preconditions.checkNotNull(type); DictionaryPageReadStore dictionaryReader; int rowGroup = 0; while ((dictionaryReader = reader.getNextDictionaryReader()) != null) { DictionaryPage page = dictionaryReader.readDictionaryPage(descriptor); if (page != null) { console.info("\nRow group {} dictionary for \"{}\":", rowGroup, column); Dictionary dict = page.getEncoding().initDictionary(descriptor, page); printDictionary(dict, type); } else { console.info("\nRow group {} has no dictionary for \"{}\"", rowGroup, column); } reader.skipNextRowGroup(); rowGroup += 1; } } console.info(""); return 0; }
@Test public void testShowDirectoryCommandWithoutDictionaryEncoding() throws IOException { File file = parquetFile(); ShowDictionaryCommand command = new ShowDictionaryCommand(createLogger()); command.targets = Arrays.asList(file.getAbsolutePath()); // the 'double_field' column does not have dictionary encoding command.column = DOUBLE_FIELD; command.setConf(new Configuration()); Assert.assertEquals(0, command.run()); }
public RuntimeOptionsBuilder parse(String... args) { return parse(Arrays.asList(args)); }
@Test void order_type_default_none() { RuntimeOptions options = parser .parse() .build(); Pickle a = createPickle("file:path/file1.feature", "a"); Pickle b = createPickle("file:path/file2.feature", "b"); assertThat(options.getPickleOrder() .orderPickles(Arrays.asList(a, b)), contains(a, b)); }
@Override public boolean match(final String rule) { return rule.matches("^email$"); }
@Test public void match() { assertTrue(generator.match("email")); assertFalse(generator.match("mail")); assertFalse(generator.match("Email")); }
@Nullable public IssueStatus getIssueStatus() { return IssueStatus.of(status, resolution); }
@Test void getIssueStatus_shouldReturnOpen_whenStatusIsNull() { IssueDto dto = new IssueDto(); assertThat(dto.getIssueStatus()) .isEqualTo(IssueStatus.OPEN); }
public PropertyDto setKey(String key) { checkArgument(key.length() <= MAX_KEY_LENGTH, "Setting key length (%s) is longer than the maximum authorized (%s). '%s' was provided", key.length(), MAX_KEY_LENGTH, key); this.key = key; return this; }
@Test void fail_if_key_longer_than_512_characters() { String veryLongKey = Strings.repeat("a", 513); assertThatThrownBy(() -> underTest.setKey(veryLongKey)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Setting key length (513) is longer than the maximum authorized (512). '" + veryLongKey + "' was provided"); }
public void addCvssBelow(Double cvss) { this.cvssBelow.add(cvss); }
@Test @SuppressWarnings("squid:S2699") public void testAddCvssBelow() { //already tested, this is just left so the IDE doesn't recreate it. }
@Override public void unregister(URL url) { if (url == null) { throw new IllegalArgumentException("unregister url == null"); } if (url.getPort() != 0) { if (logger.isInfoEnabled()) { logger.info("Unregister: " + url); } } registered.remove(url); }
@Test void testUnregisterIfUrlNull() { Assertions.assertThrows(IllegalArgumentException.class, () -> { abstractRegistry.unregister(null); Assertions.fail("unregister url == null"); }); }
@Override public String toString() { return "EipAttribute{" + "id='" + id + '\'' + ", exchangesTotal=" + exchangesTotal + ", index=" + index + ", totalProcessingTime=" + totalProcessingTime + ", properties=" + properties + ", childEipMap=" + childEipMap + '}'; }
@Test public void testToString() { String toString = getInstance().toString(); assertNotNull(toString); assertTrue(toString.contains("EipAttribute")); }
@Override public Space get() throws BackgroundException { return new SMBAttributesFinderFeature(session).find(new DefaultHomeFinderService(session).find()).getQuota(); }
@Test public void testGet() throws Exception { final Quota.Space quota = new SMBQuotaFeature(session).get(); assertNotNull(quota.available); assertNotNull(quota.used); assertNotEquals(0L, quota.available, 0L); assertNotEquals(0L, quota.used, 0L); }
@Override public int hashCode() { return Objects.hash(memberId, groupInstanceId, clientId, host, assignment, targetAssignment); }
@Test public void testEqualsWithGroupInstanceId() { // Check self equality. assertEquals(STATIC_MEMBER_DESCRIPTION, STATIC_MEMBER_DESCRIPTION); MemberDescription identityDescription = new MemberDescription(MEMBER_ID, INSTANCE_ID, CLIENT_ID, HOST, ASSIGNMENT); assertEquals(STATIC_MEMBER_DESCRIPTION, identityDescription); assertEquals(STATIC_MEMBER_DESCRIPTION.hashCode(), identityDescription.hashCode()); }
@Override public void run() { if (processor != null) { processor.execute(); } else { if (!beforeHook()) { logger.info("before-feature hook returned [false], aborting: {}", this); } else { scenarios.forEachRemaining(this::processScenario); } afterFeature(); } }
@Test void testCallOnceJsFromFeatureUtilsDefinedInKarateConfig() { System.setProperty("karate.env", "callonce"); run("callonce-config.feature", "classpath:com/intuit/karate/core/"); matchContains(fr.result.getVariables(), "{ foo: 'hello foo' }"); System.clearProperty("karate.env"); }
public Map<Integer, List<Integer>> replicasAssignments() { return replicasAssignments; }
@Test public void testUnmodifiableReplicasAssignments() { Map<Integer, List<Integer>> replicasAssignments = new HashMap<>(); replicasAssignments.put(0, BROKER_IDS); NewTopic newTopic = new NewTopic(TEST_TOPIC, replicasAssignments); Map<Integer, List<Integer>> returnedAssignments = newTopic.replicasAssignments(); assertThrows(UnsupportedOperationException.class, () -> returnedAssignments.put(1, Arrays.asList(3, 4)) ); }
@Override public void doFilter(HttpRequest request, HttpResponse response, FilterChain chain) throws IOException { try { userSession.checkIsSystemAdministrator(); } catch (ForbiddenException e) { AuthenticationError.handleError(request, response, "User needs to be logged in as system administrator to access this page."); return; } String csrfState = oAuthCsrfVerifier.generateState(request, response); try { samlAuthenticator.initLogin(oAuth2ContextFactory.generateCallbackUrl(SamlIdentityProvider.KEY), VALIDATION_RELAY_STATE + "/" + csrfState, request, response); } catch (IllegalArgumentException | IllegalStateException e) { response.sendRedirect("/" + SAML_VALIDATION_CONTROLLER_CONTEXT + "/" + SAML_VALIDATION_KEY + "?CSRFToken=" + csrfState); } }
@Test public void do_filter_as_anonymous() throws IOException { userSession.anonymous(); HttpRequest servletRequest = mock(HttpRequest.class); HttpResponse servletResponse = mock(HttpResponse.class); FilterChain filterChain = mock(FilterChain.class); String callbackUrl = "http://localhost:9000/api/validation_test"; when(oAuth2ContextFactory.generateCallbackUrl(anyString())) .thenReturn(callbackUrl); underTest.doFilter(servletRequest, servletResponse, filterChain); verifyNoInteractions(samlAuthenticator); verify(servletResponse).sendRedirect(anyString()); }
public Status download(String remoteFilePath, String localFilePath) { // 0. list to get to full name(with checksum) List<RemoteFile> remoteFiles = Lists.newArrayList(); Status status = storage.list(remoteFilePath + "*", remoteFiles); if (!status.ok()) { return status; } if (remoteFiles.size() != 1) { return new Status(ErrCode.COMMON_ERROR, "Expected one file with path: " + remoteFilePath + ". get: " + remoteFiles.size()); } if (!remoteFiles.get(0).isFile()) { return new Status(ErrCode.COMMON_ERROR, "Expected file with path: " + remoteFilePath + ". but get dir"); } String remoteFilePathWithChecksum = replaceFileNameWithChecksumFileName(remoteFilePath, remoteFiles.get(0).getName()); LOG.debug("get download filename with checksum: " + remoteFilePathWithChecksum); // 1. get checksum from remote file name Pair<String, String> pair = decodeFileNameWithChecksum(remoteFilePathWithChecksum); if (pair == null) { return new Status(ErrCode.COMMON_ERROR, "file name should contains checksum: " + remoteFilePathWithChecksum); } if (!remoteFilePath.endsWith(pair.first)) { return new Status(ErrCode.COMMON_ERROR, "File does not exist: " + remoteFilePath); } String md5sum = pair.second; // 2. download status = storage.downloadWithFileSize(remoteFilePathWithChecksum, localFilePath, remoteFiles.get(0).getSize()); if (!status.ok()) { return status; } // 3. verify checksum String localMd5sum; try { localMd5sum = DigestUtils.md5Hex(new FileInputStream(localFilePath)); } catch (FileNotFoundException e) { return new Status(ErrCode.NOT_FOUND, "file " + localFilePath + " does not exist"); } catch (IOException e) { return new Status(ErrCode.COMMON_ERROR, "failed to get md5sum of file: " + localFilePath); } if (!localMd5sum.equals(md5sum)) { return new Status(ErrCode.BAD_FILE, "md5sum does not equal. local: " + localMd5sum + ", remote: " + md5sum); } return Status.OK; }
@Test public void testDownload() { String localFilePath = "./tmp_" + System.currentTimeMillis(); File localFile = new File(localFilePath); try { try (PrintWriter out = new PrintWriter(localFile)) { out.print("a"); } catch (FileNotFoundException e) { e.printStackTrace(); Assert.fail(); } new Expectations() { { storage.list(anyString, (List<RemoteFile>) any); minTimes = 0; result = new Delegate() { public Status list(String remotePath, List<RemoteFile> result) { result.add(new RemoteFile("remote_file.0cc175b9c0f1b6a831c399e269772661", true, 100)); return Status.OK; } }; storage.downloadWithFileSize(anyString, anyString, anyLong); minTimes = 0; result = Status.OK; } }; repo = new Repository(10000, "repo", false, location, storage); String remoteFilePath = location + "/remote_file"; Status st = repo.download(remoteFilePath, localFilePath); Assert.assertTrue(st.ok()); } finally { localFile.delete(); } }
public Object nextEntry() throws IOException { return null; }
@Test public void testNextEntry() throws IOException { assertNull( inStream.nextEntry() ); }
@Override @CacheEvict(cacheNames = RedisKeyConstants.DEPT_CHILDREN_ID_LIST, allEntries = true) // allEntries 清空所有缓存,因为操作一个部门,涉及到多个缓存 public void deleteDept(Long id) { // 校验是否存在 validateDeptExists(id); // 校验是否有子部门 if (deptMapper.selectCountByParentId(id) > 0) { throw exception(DEPT_EXITS_CHILDREN); } // 删除部门 deptMapper.deleteById(id); }
@Test public void testDeleteDept_exitsChildren() { // mock 数据 DeptDO parentDept = randomPojo(DeptDO.class); deptMapper.insert(parentDept);// @Sql: 先插入出一条存在的数据 // 准备参数 DeptDO childrenDeptDO = randomPojo(DeptDO.class, o -> { o.setParentId(parentDept.getId()); o.setStatus(randomCommonStatus()); }); // 插入子部门 deptMapper.insert(childrenDeptDO); // 调用, 并断言异常 assertServiceException(() -> deptService.deleteDept(parentDept.getId()), DEPT_EXITS_CHILDREN); }
public boolean overlap(final Window other) throws IllegalArgumentException { if (getClass() != other.getClass()) { throw new IllegalArgumentException("Cannot compare windows of different type. Other window has type " + other.getClass() + "."); } final SessionWindow otherWindow = (SessionWindow) other; return !(otherWindow.endMs < startMs || endMs < otherWindow.startMs); }
@Test public void shouldOverlapIfOtherWindowEndIsWithinThisWindow() { /* * This: [-------] * Other: [---------] */ assertTrue(window.overlap(new SessionWindow(0, start))); assertTrue(window.overlap(new SessionWindow(0, start + 1))); assertTrue(window.overlap(new SessionWindow(0, 75))); assertTrue(window.overlap(new SessionWindow(0, end - 1))); assertTrue(window.overlap(new SessionWindow(0, end))); assertTrue(window.overlap(new SessionWindow(start - 1, start))); assertTrue(window.overlap(new SessionWindow(start - 1, start + 1))); assertTrue(window.overlap(new SessionWindow(start - 1, 75))); assertTrue(window.overlap(new SessionWindow(start - 1, end - 1))); assertTrue(window.overlap(new SessionWindow(start - 1, end))); }
@Override public boolean trySetComparator(Comparator<? super V> comparator) { String className = comparator.getClass().getName(); final String comparatorSign = className + ":" + calcClassSign(className); Boolean res = commandExecutor.get(commandExecutor.evalWriteAsync(list.getRawName(), StringCodec.INSTANCE, RedisCommands.EVAL_BOOLEAN, "if redis.call('llen', KEYS[1]) == 0 then " + "redis.call('set', KEYS[2], ARGV[1]); " + "return 1; " + "else " + "return 0; " + "end", Arrays.asList(list.getRawName(), getComparatorKeyName()), comparatorSign)); if (res) { this.comparator = comparator; } return res; }
@Test public void testTrySetComparator() { RSortedSet<Integer> set = redisson.getSortedSet("set"); boolean setRes = set.trySetComparator(Collections.reverseOrder()); Assertions.assertTrue(setRes); Assertions.assertTrue(set.add(1)); Assertions.assertTrue(set.add(2)); Assertions.assertTrue(set.add(3)); Assertions.assertTrue(set.add(4)); Assertions.assertTrue(set.add(5)); assertThat(set).containsExactly(5, 4, 3, 2, 1); boolean setRes2 = set.trySetComparator(Collections.reverseOrder(Collections.reverseOrder())); Assertions.assertFalse(setRes2); assertThat(set).containsExactly(5, 4, 3, 2, 1); set.clear(); boolean setRes3 = set.trySetComparator(Collections.reverseOrder(Collections.reverseOrder())); Assertions.assertTrue(setRes3); set.add(3); set.add(1); set.add(2); assertThat(set).containsExactly(1, 2, 3); }
@Override public Batch read(@Nullable ShufflePosition startPosition, @Nullable ShufflePosition endPosition) throws IOException { final BatchRange range = new BatchRange(startPosition, endPosition); try { return cache.get(range); } catch (RuntimeException | ExecutionException e) { Throwables.propagateIfPossible(e, IOException.class); throw new RuntimeException("unexpected", e); } }
@Test public void readerShouldNotCacheExceptions() throws IOException { ShuffleBatchReader base = mock(ShuffleBatchReader.class); CachingShuffleBatchReader reader = new CachingShuffleBatchReader(base); when(base.read(null, null)).thenThrow(new IOException("test")).thenReturn(testBatch); try { reader.read(null, null); fail("expected an IOException"); } catch (Exception e) { // Nothing to do -- exception is expected. } assertThat(reader.read(null, null), equalTo(testBatch)); verify(base, times(2)).read(null, null); }
@Override public CiConfiguration loadConfiguration() { String revision = system.envVariable(PROPERTY_COMMIT); if (isEmpty(revision)) { LoggerFactory.getLogger(getClass()).warn("Missing environment variable " + PROPERTY_COMMIT); } return new CiConfigurationImpl(revision, getName()); }
@Test public void log_warning_if_missing_commit_variable() { setEnvVariable("CIRRUS_PR", "1234"); CiConfiguration configuration = underTest.loadConfiguration(); assertThat(configuration.getScmRevision()).isEmpty(); assertThat(logs.logs(Level.WARN)).contains("Missing environment variable CIRRUS_CHANGE_IN_REPO"); }
@Override public void processElement(StreamRecord<IN> element) throws Exception { output.collect(element.replace(userFunction.map(element.getValue()))); }
@Test void testMap() throws Exception { StreamMap<Integer, String> operator = new StreamMap<Integer, String>(new Map()); OneInputStreamOperatorTestHarness<Integer, String> testHarness = new OneInputStreamOperatorTestHarness<Integer, String>(operator); long initialTime = 0L; ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>(); testHarness.open(); testHarness.processElement(new StreamRecord<Integer>(1, initialTime + 1)); testHarness.processElement(new StreamRecord<Integer>(2, initialTime + 2)); testHarness.processWatermark(new Watermark(initialTime + 2)); testHarness.processElement(new StreamRecord<Integer>(3, initialTime + 3)); expectedOutput.add(new StreamRecord<String>("+2", initialTime + 1)); expectedOutput.add(new StreamRecord<String>("+3", initialTime + 2)); expectedOutput.add(new Watermark(initialTime + 2)); expectedOutput.add(new StreamRecord<String>("+4", initialTime + 3)); TestHarnessUtil.assertOutputEquals( "Output was not correct.", expectedOutput, testHarness.getOutput()); }
@Override public V getAndRemove(long timestamp) { return get(getAndRemoveAsync(timestamp)); }
@Test public void testGetAndRemove() { RTimeSeries<String, String> t = redisson.getTimeSeries("test"); t.add(1, "10", "100"); t.add(2, "20"); t.add(3, "30", "300", Duration.ofSeconds(2)); t.add(4, "40"); String s1 = t.getAndRemove(1); assertThat(s1).isEqualTo("10"); String s2 = t.getAndRemove(2); assertThat(s2).isEqualTo("20"); String s3 = t.getAndRemove(3); assertThat(s3).isEqualTo("30"); assertThat(t.size()).isEqualTo(1); }
@Override public void processWatermark(org.apache.flink.streaming.api.watermark.Watermark mark) throws Exception { // if we receive a Long.MAX_VALUE watermark we forward it since it is used // to signal the end of input and to not block watermark progress downstream if (mark.getTimestamp() == Long.MAX_VALUE) { wmOutput.emitWatermark(Watermark.MAX_WATERMARK); } }
@Test void inputWatermarksAreNotForwarded() throws Exception { OneInputStreamOperatorTestHarness<Long, Long> testHarness = createTestHarness( WatermarkStrategy.forGenerator((ctx) -> new PeriodicWatermarkGenerator()) .withTimestampAssigner((ctx) -> new LongExtractor())); testHarness.processWatermark(createLegacyWatermark(42L)); testHarness.setProcessingTime(AUTO_WATERMARK_INTERVAL); assertThat(testHarness.getOutput()).isEmpty(); }
public static CompactSerializationConfig newCompactSerializationConfig( List<String> serializerClassNames, List<String> compactSerializableClassNames ) { CompactSerializationConfig config = new CompactSerializationConfig(); for (String compactSerializableClassName : compactSerializableClassNames) { CompactSerializationConfigAccessor.registerClass(config, compactSerializableClassName); } for (String serializerClassName : serializerClassNames) { CompactSerializationConfigAccessor.registerSerializer(config, serializerClassName); } return config; }
@Test public void should_create_compact_serialization_config() { List<String> compactSerializerClassNames = new ArrayList<>(); List<String> compactSerializableClassNames = new ArrayList<>(); compactSerializerClassNames.add("a"); compactSerializableClassNames.add("b"); CompactSerializationConfig config = ConfigFactory.newCompactSerializationConfig( compactSerializerClassNames, compactSerializableClassNames ); assertThat(CompactSerializationConfigAccessor.getSerializerClassNames(config)) .isEqualTo(compactSerializerClassNames); assertThat(CompactSerializationConfigAccessor.getCompactSerializableClassNames(config)) .isEqualTo(compactSerializableClassNames); }
public String siteUrlFor(String givenUrl) throws URISyntaxException { return siteUrlFor(givenUrl, false); }
@Test public void shouldGenerateSiteUrlUsingConfiguredSiteUrlForAuth() throws URISyntaxException { ServerSiteUrlConfig url = new SiteUrl("http://someurl.com"); assertThat(url.siteUrlFor("http://admin:badger@test.host/foo"), is("http://admin:badger@someurl.com/foo")); assertThat(url.siteUrlFor("http://admin@test.host/foo"), is("http://admin@someurl.com/foo")); }
static boolean configsEqual(Config configs, Map<String, String> expectedValues) { for (Map.Entry<String, String> entry : expectedValues.entrySet()) { ConfigEntry configEntry = configs.get(entry.getKey()); if (configEntry == null || configEntry.value() == null || configEntry.value().isEmpty()) { if (entry.getValue() != null) { return false; } } else if (configEntry.source().equals(ConfigEntry.ConfigSource.STATIC_BROKER_CONFIG) && entry.getValue() == null) { LOG.debug("Found static broker config: {}, skipping comparison", configEntry); } else if (!Objects.equals(entry.getValue(), configEntry.value())) { return false; } } return true; }
@Test public void testConfigsEqual() { Map<String, String> expectedConfigs = new HashMap<>(); List<ConfigEntry> entries = new ArrayList<>(); assertTrue(ReplicationThrottleHelper.configsEqual(EMPTY_CONFIG, expectedConfigs)); expectedConfigs.put("name1", "value1"); assertFalse(ReplicationThrottleHelper.configsEqual(EMPTY_CONFIG, expectedConfigs)); entries.add(new ConfigEntry("name1", "value1")); assertTrue(ReplicationThrottleHelper.configsEqual(new Config(entries), expectedConfigs)); expectedConfigs.put("name2", null); assertTrue(ReplicationThrottleHelper.configsEqual(new Config(entries), expectedConfigs)); entries.add(new ConfigEntry("name2", "")); assertTrue(ReplicationThrottleHelper.configsEqual(new Config(entries), expectedConfigs)); expectedConfigs.put("name3", null); assertTrue(ReplicationThrottleHelper.configsEqual(new Config(entries), expectedConfigs)); entries.add(new ConfigEntry("name3", null)); assertTrue(ReplicationThrottleHelper.configsEqual(new Config(entries), expectedConfigs)); expectedConfigs.put("name4", "value4"); assertFalse(ReplicationThrottleHelper.configsEqual(new Config(entries), expectedConfigs)); entries.add(new ConfigEntry("name4", "other-value")); assertFalse(ReplicationThrottleHelper.configsEqual(new Config(entries), expectedConfigs)); expectedConfigs.put("name4", "other-value"); assertTrue(ReplicationThrottleHelper.configsEqual(new Config(entries), expectedConfigs)); // In the case that a dynamic config is deleted and a static config exists, the comparison is skipped ConfigEntry mockStaticConfig = mockConfigEntry("name5", "value5", ConfigEntry.ConfigSource.STATIC_BROKER_CONFIG); expectedConfigs.put("name5", null); entries.add(mockStaticConfig); assertTrue(ReplicationThrottleHelper.configsEqual(new Config(entries), expectedConfigs)); EasyMock.verify(mockStaticConfig); }
List<EpochEntry> getLeaderEpochEntries(UnifiedLog log, long startOffset, long endOffset) { if (log.leaderEpochCache().isDefined()) { return log.leaderEpochCache().get().epochEntriesInRange(startOffset, endOffset); } else { return Collections.emptyList(); } }
@Test void testGetLeaderEpochCheckpoint() { checkpoint.write(totalEpochEntries); LeaderEpochFileCache cache = new LeaderEpochFileCache(tp, checkpoint, scheduler); when(mockLog.leaderEpochCache()).thenReturn(Option.apply(cache)); assertEquals(totalEpochEntries, remoteLogManager.getLeaderEpochEntries(mockLog, 0, 300)); List<EpochEntry> epochEntries = remoteLogManager.getLeaderEpochEntries(mockLog, 100, 200); assertEquals(1, epochEntries.size()); assertEquals(epochEntry1, epochEntries.get(0)); }
@Override public void process(Exchange exchange) throws Exception { final SchematronProcessor schematronProcessor = SchematronProcessorFactory.newSchematronEngine(endpoint.getRules()); final Object payload = exchange.getIn().getBody(); final String report; if (payload instanceof Source) { LOG.debug("Applying schematron validation on payload: {}", payload); report = schematronProcessor.validate((Source) payload); } else if (payload instanceof String) { LOG.debug("Applying schematron validation on payload: {}", payload); report = schematronProcessor.validate((String) payload); } else { String stringPayload = exchange.getIn().getBody(String.class); LOG.debug("Applying schematron validation on payload: {}", stringPayload); report = schematronProcessor.validate(stringPayload); } LOG.debug("Schematron validation report \n {}", report); String status = getValidationStatus(report); LOG.info("Schematron validation status : {}", status); setValidationReport(exchange, report, status); }
@Test public void testProcessInValidXML() throws Exception { Exchange exc = new DefaultExchange(context, ExchangePattern.InOut); exc.getIn().setBody(ClassLoader.getSystemResourceAsStream("xml/article-2.xml")); // process xml payload producer.process(exc); // assert assertEquals(Constants.FAILED, exc.getMessage().getHeader(Constants.VALIDATION_STATUS)); }
public Map<String, Object> convertValue(final Object entity, final Class<?> entityClass) { if (entityClass.equals(String.class)) { return Collections.singletonMap("data", objectMapper.convertValue(entity, String.class)); } else if (!entityClass.equals(Void.class) && !entityClass.equals(void.class)) { final TypeReference<Map<String, Object>> typeRef = new TypeReference<>() { }; try { return objectMapper.convertValue(entity, typeRef); } catch (IllegalArgumentException e) { // Try to convert the response to a list if converting to a map failed. final TypeReference<List<Object>> arrayTypeRef = new TypeReference<>() { }; return Collections.singletonMap("data", objectMapper.convertValue(entity, arrayTypeRef)); } } return null; }
@Test public void convertsSingleEntity() { final Map<String, Object> result = toTest.convertValue(new SimpleEntity("Text", 1), SimpleEntity.class); assertNotNull(result); assertEquals("Text", result.get("text")); assertEquals(1, result.get("number")); }
@Udf(description = "Converts a string representation of a date in the given format" + " into the TIMESTAMP value." + " Single quotes in the timestamp format can be escaped with ''," + " for example: 'yyyy-MM-dd''T''HH:mm:ssX'.") public Timestamp parseTimestamp( @UdfParameter( description = "The string representation of a date.") final String formattedTimestamp, @UdfParameter( description = "The format pattern should be in the format expected by" + " java.time.format.DateTimeFormatter.") final String formatPattern) { return parseTimestamp(formattedTimestamp, formatPattern, ZoneId.of("GMT").getId()); }
@Test public void shouldSupportUTCTimeZone() { // When: final Object result = udf.parseTimestamp("2018-08-15 17:10:43", "yyyy-MM-dd HH:mm:ss", "UTC"); // Then: assertThat(result, is(new Timestamp(1534353043000L))); }