focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public List<ImportValidationFeedback> verifyRule( Object subject ) { List<ImportValidationFeedback> feedback = new ArrayList<>(); if ( !isEnabled() || !( subject instanceof JobMeta ) ) { return feedback; } JobMeta jobMeta = (JobMeta) subject; String description = jobMeta.getDescription(); if ( null != description && minLength <= description.length() ) { feedback.add( new ImportValidationFeedback( this, ImportValidationResultType.APPROVAL, "A description is present" ) ); } else { feedback.add( new ImportValidationFeedback( this, ImportValidationResultType.ERROR, "A description is not present or too short" ) ); } return feedback; }
@Test public void testVerifyRule_NullDescription_DisabledRule() { JobHasDescriptionImportRule importRule = getImportRule( 10, false ); JobMeta jobMeta = new JobMeta(); jobMeta.setDescription( null ); List<ImportValidationFeedback> feedbackList = importRule.verifyRule( null ); assertNotNull( feedbackList ); assertTrue( feedbackList.isEmpty() ); }
@DataPermission(enable = false) // 忽略数据权限,避免因为过滤,导致找不到候选人 public Set<Long> calculateUsers(DelegateExecution execution) { Integer strategy = BpmnModelUtils.parseCandidateStrategy(execution.getCurrentFlowElement()); String param = BpmnModelUtils.parseCandidateParam(execution.getCurrentFlowElement()); // 1.1 计算任务的候选人 Set<Long> userIds = getCandidateStrategy(strategy).calculateUsers(execution, param); // 1.2 移除被禁用的用户 removeDisableUsers(userIds); // 2. 校验是否有候选人 if (CollUtil.isEmpty(userIds)) { log.error("[calculateUsers][流程任务({}/{}/{}) 任务规则({}/{}) 找不到候选人]", execution.getId(), execution.getProcessDefinitionId(), execution.getCurrentActivityId(), strategy, param); throw exception(TASK_CREATE_FAIL_NO_CANDIDATE_USER); } return userIds; }
@Test public void testCalculateUsers() { // 准备参数 String param = "1,2"; DelegateExecution execution = mock(DelegateExecution.class); // mock 方法(DelegateExecution) UserTask userTask = mock(UserTask.class); when(execution.getCurrentFlowElement()).thenReturn(userTask); when(userTask.getAttributeValue(eq(BpmnModelConstants.NAMESPACE), eq(BpmnModelConstants.USER_TASK_CANDIDATE_STRATEGY))) .thenReturn(BpmTaskCandidateStrategyEnum.USER.getStrategy().toString()); when(userTask.getAttributeValue(eq(BpmnModelConstants.NAMESPACE), eq(BpmnModelConstants.USER_TASK_CANDIDATE_PARAM))) .thenReturn(param); // mock 方法(adminUserApi) AdminUserRespDTO user1 = randomPojo(AdminUserRespDTO.class, o -> o.setId(1L) .setStatus(CommonStatusEnum.ENABLE.getStatus())); AdminUserRespDTO user2 = randomPojo(AdminUserRespDTO.class, o -> o.setId(2L) .setStatus(CommonStatusEnum.ENABLE.getStatus())); Map<Long, AdminUserRespDTO> userMap = MapUtil.builder(user1.getId(), user1) .put(user2.getId(), user2).build(); when(adminUserApi.getUserMap(eq(asSet(1L, 2L)))).thenReturn(userMap); // 调用 Set<Long> results = taskCandidateInvoker.calculateUsers(execution); // 断言 assertEquals(asSet(1L, 2L), results); }
public void encryptColumns( String inputFile, String outputFile, List<String> paths, FileEncryptionProperties fileEncryptionProperties) throws IOException { Path inPath = new Path(inputFile); Path outPath = new Path(outputFile); RewriteOptions options = new RewriteOptions.Builder(conf, inPath, outPath) .encrypt(paths) .encryptionProperties(fileEncryptionProperties) .build(); ParquetRewriter rewriter = new ParquetRewriter(options); rewriter.processBlocks(); rewriter.close(); }
@Test public void testAesGcm() throws IOException { String[] encryptColumns = {"DocId"}; testSetup("GZIP"); columnEncryptor.encryptColumns( inputFile.getFileName(), outputFile, Arrays.asList(encryptColumns), EncDecProperties.getFileEncryptionProperties(encryptColumns, ParquetCipher.AES_GCM_V1, true)); verifyResultDecryptionWithValidKey(); }
public static Set<Set<LogicalVertex>> computePipelinedRegions( final Iterable<? extends LogicalVertex> topologicallySortedVertices) { final Map<LogicalVertex, Set<LogicalVertex>> vertexToRegion = PipelinedRegionComputeUtil.buildRawRegions( topologicallySortedVertices, LogicalPipelinedRegionComputeUtil::getMustBePipelinedConsumedResults); // Since LogicalTopology is a DAG, there is no need to do cycle detection nor to merge // regions on cycles. return uniqueVertexGroups(vertexToRegion); }
@Test void testTwoInputsMergesIntoOne() { JobVertex v1 = new JobVertex("v1"); JobVertex v2 = new JobVertex("v2"); JobVertex v3 = new JobVertex("v3"); JobVertex v4 = new JobVertex("v4"); v3.connectNewDataSetAsInput( v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); v3.connectNewDataSetAsInput( v2, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING); v4.connectNewDataSetAsInput( v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); Set<Set<LogicalVertex>> regions = computePipelinedRegions(v1, v2, v3, v4); checkRegionSize(regions, 2, 3, 1); }
@Override public Destination getDestination( JmsDelegate meta ) { checkNotNull( meta.destinationName, getString( JmsConstants.PKG, "JmsWebsphereMQ.DestinationNameRequired" ) ); try { String destName = meta.destinationName; return isQueue( meta ) ? new MQQueue( destName ) : new MQTopic( destName ); } catch ( JMSException e ) { throw new IllegalStateException( e ); } }
@Test public void getQueueDestination() { jmsDelegate.destinationType = QUEUE.name(); jmsDelegate.destinationName = "somename"; Destination dest = jmsProvider.getDestination( jmsDelegate ); assertTrue( dest instanceof Queue ); }
public static Optional<KiePMMLModel> getFromCommonDataAndTransformationDictionaryAndModelWithSourcesCompiled(final CompilationDTO compilationDTO) { logger.trace("getFromCommonDataAndTransformationDictionaryAndModelWithSourcesCompiled {}", compilationDTO); final Function<ModelImplementationProvider<Model, KiePMMLModel>, KiePMMLModel> modelFunction = implementation -> implementation.getKiePMMLModelWithSourcesCompiled(compilationDTO); return getFromCommonDataAndTransformationDictionaryAndModelWithSourcesCommon(compilationDTO.getFields(), compilationDTO.getModel(), modelFunction); }
@Test void getFromCommonDataAndTransformationDictionaryAndModelWithSourcesCompiledWithoutProvider() throws Exception { String fileName = MINING_MODEL_WITH_NESTED_REFERS_SOURCE.substring(0, MINING_MODEL_WITH_NESTED_REFERS_SOURCE.lastIndexOf('.')); pmml = KiePMMLUtil.load(getFileInputStream(MINING_MODEL_WITH_NESTED_REFERS_SOURCE), MINING_MODEL_WITH_NESTED_REFERS_SOURCE); MiningModel parentModel = (MiningModel) pmml.getModels().get(0); Model model = parentModel.getSegmentation().getSegments().get(0).getModel(); final CommonCompilationDTO compilationDTO = CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME, pmml, model, new PMMLCompilationContextMock(), fileName); final Optional<KiePMMLModel> retrieved = getFromCommonDataAndTransformationDictionaryAndModelWithSourcesCompiled(compilationDTO); assertThat(retrieved).isNotNull(); assertThat(retrieved).isNotPresent(); }
public StreamDestinationFilterRuleDTO deleteFromStream(String streamId, String id) { final var dto = utils.getById(id) .orElseThrow(() -> new IllegalArgumentException(f("Couldn't find document with ID <%s> for deletion", id))); if (collection.deleteOne(and(eq(FIELD_STREAM_ID, streamId), idEq(id))).getDeletedCount() > 0) { clusterEventBus.post(StreamDestinationFilterDeletedEvent.of(id)); } return dto; }
@Test @MongoDBFixtures("StreamDestinationFilterServiceTest-2024-07-01-1.json") void deleteFromStream() { final var optionalDto = service.findByIdForStream("54e3deadbeefdeadbeef1000", "54e3deadbeefdeadbeef0000"); assertThat(optionalDto).isPresent(); final var deletedDto = service.deleteFromStream("54e3deadbeefdeadbeef1000", "54e3deadbeefdeadbeef0000"); assertThat(deletedDto.id()).isEqualTo("54e3deadbeefdeadbeef0000"); assertThat(service.findByIdForStream("54e3deadbeefdeadbeef1000", "54e3deadbeefdeadbeef0000")).isNotPresent(); }
public ApplicationBuilder readinessProbe(String readinessProbe) { this.readinessProbe = readinessProbe; return getThis(); }
@Test void readinessProbe() { ApplicationBuilder builder = new ApplicationBuilder(); builder.readinessProbe("TestProbe"); Assertions.assertEquals("TestProbe", builder.build().getReadinessProbe()); }
@Override public Num getValue(int index) { return values.get(index); }
@Test public void cashFlowBuyWithOnlyOnePosition() { BarSeries sampleBarSeries = new MockBarSeries(numFunction, 1d, 2d); TradingRecord tradingRecord = new BaseTradingRecord(Trade.buyAt(0, sampleBarSeries), Trade.sellAt(1, sampleBarSeries)); CashFlow cashFlow = new CashFlow(sampleBarSeries, tradingRecord); assertNumEquals(1, cashFlow.getValue(0)); assertNumEquals(2, cashFlow.getValue(1)); }
Optional<Integer> findOneStaleBroker() { BrokerHeartbeatStateIterator iterator = unfenced.iterator(); if (iterator.hasNext()) { BrokerHeartbeatState broker = iterator.next(); // The unfenced list is sorted on last contact time from each // broker. If the first broker is not stale, then none is. if (!hasValidSession(broker)) { return Optional.of(broker.id); } } return Optional.empty(); }
@Test public void testFindOneStaleBroker() { BrokerHeartbeatManager manager = newBrokerHeartbeatManager(); MockTime time = (MockTime) manager.time(); assertFalse(manager.hasValidSession(0)); for (int brokerId = 0; brokerId < 3; brokerId++) { manager.register(brokerId, true); } manager.touch(0, false, 0); time.sleep(5); manager.touch(1, false, 0); time.sleep(1); manager.touch(2, false, 0); Iterator<BrokerHeartbeatState> iter = manager.unfenced().iterator(); assertEquals(0, iter.next().id()); assertEquals(1, iter.next().id()); assertEquals(2, iter.next().id()); assertFalse(iter.hasNext()); assertEquals(Optional.empty(), manager.findOneStaleBroker()); time.sleep(5); assertEquals(Optional.of(0), manager.findOneStaleBroker()); manager.fence(0); assertEquals(Optional.empty(), manager.findOneStaleBroker()); iter = manager.unfenced().iterator(); assertEquals(1, iter.next().id()); assertEquals(2, iter.next().id()); assertFalse(iter.hasNext()); time.sleep(20); assertEquals(Optional.of(1), manager.findOneStaleBroker()); manager.fence(1); assertEquals(Optional.of(2), manager.findOneStaleBroker()); manager.fence(2); assertEquals(Optional.empty(), manager.findOneStaleBroker()); iter = manager.unfenced().iterator(); assertFalse(iter.hasNext()); }
@Override public void setValue(String value) throws IOException { checkValue(value); // if there are export values/an Opt entry there is a different // approach to setting the value if (!getExportValues().isEmpty()) { updateByOption(value); } else { updateByValue(value); } applyChange(); }
@Test void setAbstractedCheckboxGroupInvalidValue() throws IOException { PDField checkbox = acrobatAcroForm.getField("CheckboxGroup"); // Set a value which doesn't match the radio button list assertThrows(IllegalArgumentException.class, () -> checkbox.setValue("InvalidValue")); }
public void unlock() { if (lockFile != null) { try { lockFile.release(); lockFile = null; } catch (IOException e) { LOGGER.error("Error releasing lock", e); } } if (lockChannel != null) { try { lockChannel.close(); lockChannel = null; } catch (IOException e) { LOGGER.error("Error closing file channel", e); } } if (lockRandomAccessFile != null) { try { lockRandomAccessFile.close(); lockRandomAccessFile = null; } catch (IOException e) { LOGGER.error("Error closing file", e); } } }
@Test public void unlockWithoutLock() { lock.unlock(); }
@Override public void apply(ProcessRoutersRequest request, PolarisRouterContext routerContext) { //1. get feature env router label key String envLabelKey = routerContext.getLabel(LABEL_KEY_FEATURE_ENV_ROUTER_KEY); if (StringUtils.isBlank(envLabelKey)) { envLabelKey = DEFAULT_FEATURE_ENV_ROUTER_LABEL; } //2. get feature env router label value String envLabelValue = routerContext.getLabel(envLabelKey); if (envLabelValue == null) { // router to base env when not matched feature env envLabelValue = NOT_EXISTED_ENV; } //3. set env metadata to router request Set<RouteArgument> routeArguments = new HashSet<>(1); routeArguments.add(RouteArgument.buildCustom(envLabelKey, envLabelValue)); request.putRouterArgument(MetadataRouter.ROUTER_TYPE_METADATA, routeArguments); //4. set failover type to others request.setMetadataFailoverType(MetadataFailoverType.METADATAFAILOVERNOTKEY); }
@Test public void testDefaultRouterKey() { Map<String, String> labels = new HashMap<>(); labels.put("featureenv", "blue"); PolarisRouterContext routerContext = new PolarisRouterContext(); routerContext.putLabels(RouterConstant.ROUTER_LABELS, labels); ProcessRoutersRequest request = new ProcessRoutersRequest(); ServiceInstances serviceInstances = new DefaultServiceInstances(Mockito.mock(ServiceKey.class), new ArrayList<>()); request.setDstInstances(serviceInstances); FeatureEnvRouterRequestInterceptor interceptor = new FeatureEnvRouterRequestInterceptor(); interceptor.apply(request, routerContext); Map<String, String> metadataRouterLabels = request.getRouterMetadata().get(MetadataRouter.ROUTER_TYPE_METADATA); assertThat(metadataRouterLabels.size()).isEqualTo(1); assertThat(metadataRouterLabels.get("featureenv")).isEqualTo("blue"); }
public Hashlet<String,Integer> apply(Hashlet<String,Integer> input) { return mappings.computeIfAbsent(input, k -> filter(k)); }
@Test void testFiltering() { var h1 = makeHash(); var h2 = makeHash(); var h3 = makeHash(); var f1 = new MatchFeatureFilter(List.of("foo", "baz", "four")); var f2 = new MatchFeatureFilter(Set.of("bar", "five")); var f3 = new MatchFeatureFilter(List.of("not", "bar", "nope")); var fAll = new MatchFeatureFilter(Set.of("foo", "bar", "baz", "four", "five")); var h4 = f1.apply(h1); var h5 = f1.apply(h2); var h6 = f1.apply(h1); var h7 = f1.apply(h3); assertEquals(2, h4.size()); assertEquals(1, h4.get("bar")); assertEquals(5, h4.get("five")); assertEquals(h4, h5); assertEquals(h4, h6); assertEquals(h4, h7); // check that we get same instance out if we put the same instance in (only) assertFalse(h4 == h5); assertTrue(h4 == h6); assertFalse(h4 == h7); assertTrue(h5 == f1.apply(h2)); assertTrue(h7 == f1.apply(h3)); var h8 = f2.apply(h1); assertEquals(3, h8.size()); assertEquals(0, h8.get("foo")); assertEquals(2, h8.get("baz")); assertEquals(4, h8.get("four")); assertTrue(h8 == f2.apply(h1)); var h9 = f3.apply(h1); assertEquals(4, h9.size()); assertNull(h9.get("bar")); var empty = fAll.apply(h1); assertEquals(0, empty.size()); }
@Override public String toString() { return "DistCpOptions{" + "atomicCommit=" + atomicCommit + ", syncFolder=" + syncFolder + ", deleteMissing=" + deleteMissing + ", ignoreFailures=" + ignoreFailures + ", overwrite=" + overwrite + ", append=" + append + ", useDiff=" + useDiff + ", useRdiff=" + useRdiff + ", fromSnapshot=" + fromSnapshot + ", toSnapshot=" + toSnapshot + ", skipCRC=" + skipCRC + ", blocking=" + blocking + ", numListstatusThreads=" + numListstatusThreads + ", maxMaps=" + maxMaps + ", mapBandwidth=" + mapBandwidth + ", copyStrategy='" + copyStrategy + '\'' + ", preserveStatus=" + preserveStatus + ", atomicWorkPath=" + atomicWorkPath + ", logPath=" + logPath + ", sourceFileListing=" + sourceFileListing + ", sourcePaths=" + sourcePaths + ", targetPath=" + targetPath + ", filtersFile='" + filtersFile + '\'' + ", blocksPerChunk=" + blocksPerChunk + ", copyBufferSize=" + copyBufferSize + ", verboseLog=" + verboseLog + ", directWrite=" + directWrite + ", useiterator=" + useIterator + ", updateRoot=" + updateRoot + '}'; }
@Test public void testToString() { DistCpOptions option = new DistCpOptions.Builder(new Path("abc"), new Path("xyz")).build(); String val = "DistCpOptions{atomicCommit=false, syncFolder=false, " + "deleteMissing=false, ignoreFailures=false, overwrite=false, " + "append=false, useDiff=false, useRdiff=false, " + "fromSnapshot=null, toSnapshot=null, " + "skipCRC=false, blocking=true, numListstatusThreads=0, maxMaps=20, " + "mapBandwidth=0.0, copyStrategy='uniformsize', preserveStatus=[], " + "atomicWorkPath=null, logPath=null, sourceFileListing=abc, " + "sourcePaths=null, targetPath=xyz, filtersFile='null', " + "blocksPerChunk=0, copyBufferSize=8192, verboseLog=false, " + "directWrite=false, useiterator=false, updateRoot=false}"; String optionString = option.toString(); Assert.assertEquals(val, optionString); Assert.assertNotSame(DistCpOptionSwitch.ATOMIC_COMMIT.toString(), DistCpOptionSwitch.ATOMIC_COMMIT.name()); }
@Override public Db2ConnectorEmbeddedDebeziumConfiguration getConfiguration() { return configuration; }
@Test void testIfConnectorEndpointCreatedWithConfig() throws Exception { final Map<String, Object> params = new HashMap<>(); params.put("offsetStorageFileName", "/offset_test_file"); params.put("databaseHostname", "localhost"); params.put("databaseUser", "dbz"); params.put("databasePassword", "pwd"); params.put("topicPrefix", "test"); params.put("databaseServerId", 1234); params.put("schemaHistoryInternalFileFilename", "/db_history_file_test"); final String remaining = "test_name"; final String uri = "debezium?name=test_name&offsetStorageFileName=/test&" + "databaseHostname=localhost&databaseServerId=1234&databaseUser=dbz&databasePassword=pwd&" + "topicPrefix=test&schemaHistoryInternalFileFilename=/test"; try (final DebeziumComponent debeziumComponent = new DebeziumDb2Component(new DefaultCamelContext())) { debeziumComponent.start(); final DebeziumEndpoint debeziumEndpoint = debeziumComponent.createEndpoint(uri, remaining, params); assertNotNull(debeziumEndpoint); // test for config final Db2ConnectorEmbeddedDebeziumConfiguration configuration = (Db2ConnectorEmbeddedDebeziumConfiguration) debeziumEndpoint.getConfiguration(); assertEquals("test_name", configuration.getName()); assertEquals("/offset_test_file", configuration.getOffsetStorageFileName()); assertEquals("localhost", configuration.getDatabaseHostname()); assertEquals("dbz", configuration.getDatabaseUser()); assertEquals("pwd", configuration.getDatabasePassword()); assertEquals("test", configuration.getTopicPrefix()); assertEquals("/db_history_file_test", configuration.getSchemaHistoryInternalFileFilename()); } }
@Override public Map<String, Object> assembleFrom(OAuth2AccessTokenEntity accessToken, UserInfo userInfo, Set<String> authScopes) { Map<String, Object> result = newLinkedHashMap(); OAuth2Authentication authentication = accessToken.getAuthenticationHolder().getAuthentication(); result.put(ACTIVE, true); if (accessToken.getPermissions() != null && !accessToken.getPermissions().isEmpty()) { Set<Object> permissions = Sets.newHashSet(); for (Permission perm : accessToken.getPermissions()) { Map<String, Object> o = newLinkedHashMap(); o.put("resource_set_id", perm.getResourceSet().getId().toString()); Set<String> scopes = Sets.newHashSet(perm.getScopes()); o.put("scopes", scopes); permissions.add(o); } result.put("permissions", permissions); } else { Set<String> scopes = Sets.intersection(authScopes, accessToken.getScope()); result.put(SCOPE, Joiner.on(SCOPE_SEPARATOR).join(scopes)); } if (accessToken.getExpiration() != null) { try { result.put(EXPIRES_AT, dateFormat.valueToString(accessToken.getExpiration())); result.put(EXP, accessToken.getExpiration().getTime() / 1000L); } catch (ParseException e) { logger.error("Parse exception in token introspection", e); } } if (userInfo != null) { // if we have a UserInfo, use that for the subject result.put(SUB, userInfo.getSub()); } else { // otherwise, use the authentication's username result.put(SUB, authentication.getName()); } if(authentication.getUserAuthentication() != null) { result.put(USER_ID, authentication.getUserAuthentication().getName()); } result.put(CLIENT_ID, authentication.getOAuth2Request().getClientId()); result.put(TOKEN_TYPE, accessToken.getTokenType()); return result; }
@Test public void shouldAssembleExpectedResultForAccessTokenWithoutUserAuthentication() throws ParseException { // given OAuth2AccessTokenEntity accessToken = accessToken(new Date(123 * 1000L), scopes("foo", "bar"), null, "Bearer", oauth2Authentication(oauth2Request("clientId"), null)); Set<String> authScopes = scopes("foo", "bar", "baz"); // when Map<String, Object> result = assembler.assembleFrom(accessToken, null, authScopes); // then `user_id` should not be present Map<String, Object> expected = new ImmutableMap.Builder<String, Object>() .put("sub", "clientId") .put("exp", 123L) .put("expires_at", dateFormat.valueToString(new Date(123 * 1000L))) .put("scope", "bar foo") .put("active", Boolean.TRUE) .put("client_id", "clientId") .put("token_type", "Bearer") .build(); assertThat(result, is(equalTo(expected))); }
@Override public Optional<SimpleLock> lock(LockConfiguration lockConfiguration) { boolean lockObtained = doLock(lockConfiguration); if (lockObtained) { return Optional.of(new StorageLock(lockConfiguration, storageAccessor)); } else { return Optional.empty(); } }
@Test void updateOnDuplicateKey() { when(storageAccessor.insertRecord(LOCK_CONFIGURATION)).thenReturn(false); when(storageAccessor.updateRecord(LOCK_CONFIGURATION)).thenReturn(true); assertThat(lockProvider.lock(LOCK_CONFIGURATION)).isNotEmpty(); verify(storageAccessor).updateRecord(LOCK_CONFIGURATION); // Should update directly without insert reset(storageAccessor); when(storageAccessor.updateRecord(LOCK_CONFIGURATION)).thenReturn(true); assertThat(lockProvider.lock(LOCK_CONFIGURATION)).isNotEmpty(); verify(storageAccessor, never()).insertRecord(LOCK_CONFIGURATION); verify(storageAccessor).updateRecord(LOCK_CONFIGURATION); }
public static UBinary create(Kind binaryOp, UExpression lhs, UExpression rhs) { checkArgument( OP_CODES.containsKey(binaryOp), "%s is not a supported binary operation", binaryOp); return new AutoValue_UBinary(binaryOp, lhs, rhs); }
@Test public void greaterThan() { assertUnifiesAndInlines( "4 > 17", UBinary.create(Kind.GREATER_THAN, ULiteral.intLit(4), ULiteral.intLit(17))); }
@Override public void start() { Optional<String> passcodeOpt = configuration.get(WEB_SYSTEM_PASS_CODE.getKey()) // if present, result is never empty string .map(StringUtils::trimToNull); if (passcodeOpt.isPresent()) { logState("enabled"); configuredPasscode = passcodeOpt.get(); } else { logState("disabled"); configuredPasscode = null; } }
@Test public void startup_logs_show_that_feature_is_enabled() { configurePasscode("foo"); underTest.start(); assertThat(logTester.logs(Level.INFO)).contains("System authentication by passcode is enabled"); }
@Override public void onServerStart(Server server) { if (scheduler != null) { scheduler.startScheduling(); } }
@Test public void when_scheduler_should_start_on_server_start() { var scheduler = mock(TokenExpirationNotificationScheduler.class); var underTest = new TokenExpirationNotificationInitializer(scheduler); underTest.onServerStart(mock(Server.class)); verify(scheduler, times(1)).startScheduling(); }
@VisibleForTesting String getCacheStats() { StringJoiner cacheStats = new StringJoiner("\n"); cacheStats.add("========== CACHE STATS =========="); cacheStats.add(cache.describeStats()); return cacheStats.toString(); }
@Test public void testCacheStatsExist() { ManagedChannelFactory channelFactory = ManagedChannelFactory.createInProcess(); BeamFnStatusClient client = new BeamFnStatusClient( apiServiceDescriptor, channelFactory::forDescriptor, mock(BundleProcessorCache.class), PipelineOptionsFactory.create(), Caches.fromOptions( PipelineOptionsFactory.fromArgs("--maxCacheMemoryUsageMb=234").create())); assertThat(client.getCacheStats(), containsString("used/max 0/234 MB")); }
@Override public MetricsReporter createMetricsReporter(URL url) { try { return new PrometheusMetricsReporter(url, getApplicationModel()); } catch (NoClassDefFoundError ncde) { String msg = ncde.getMessage(); if (dependenciesNotFound(msg)) { logger.error( INTERNAL_ERROR, "", "", "Failed to load class \"org.apache.dubbo.metrics.prometheus.PrometheusMetricsReporter\".", ncde); logger.error( INTERNAL_ERROR, "", "", "Defaulting to no-operation (NOP) metricsReporter implementation", ncde); logger.error( INTERNAL_ERROR, "", "", "Introduce the micrometer-core package to use the ability of metrics", ncde); return new NopPrometheusMetricsReporter(); } else { logger.error(INTERNAL_ERROR, "", "", "Failed to instantiate PrometheusMetricsReporter", ncde); throw ncde; } } }
@Test void test() { ApplicationModel applicationModel = ApplicationModel.defaultModel(); PrometheusMetricsReporterFactory factory = new PrometheusMetricsReporterFactory(applicationModel); MetricsReporter reporter = factory.createMetricsReporter(URL.valueOf("prometheus://localhost:9090/")); Assertions.assertTrue(reporter instanceof PrometheusMetricsReporter); }
public static String getMethodResourceName(Invoker<?> invoker, Invocation invocation){ return getMethodResourceName(invoker, invocation, false); }
@Test public void testGetResourceNameWithGroupAndVersion() throws NoSuchMethodException { Invoker invoker = mock(Invoker.class); URL url = URL.valueOf("dubbo://127.0.0.1:2181") .addParameter(CommonConstants.VERSION_KEY, "1.0.0") .addParameter(CommonConstants.GROUP_KEY, "grp1") .addParameter(CommonConstants.INTERFACE_KEY, DemoService.class.getName()); when(invoker.getUrl()).thenReturn(url); when(invoker.getInterface()).thenReturn(DemoService.class); Invocation invocation = mock(Invocation.class); Method method = DemoService.class.getDeclaredMethod("sayHello", String.class, int.class); when(invocation.getMethodName()).thenReturn(method.getName()); when(invocation.getParameterTypes()).thenReturn(method.getParameterTypes()); String resourceNameUseGroupAndVersion = DubboUtils.getMethodResourceName(invoker, invocation, true); assertEquals("com.alibaba.csp.sentinel.adapter.dubbo3.provider.DemoService:1.0.0:grp1:sayHello(java.lang.String,int)", resourceNameUseGroupAndVersion); }
public void setRemoveBrackets(boolean removeBrackets) { this.removeBrackets = removeBrackets; }
@Test public void testMethodRemoveBrackets() { SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("CURRENT_SCHEMA"); Assert.assertEquals("CURRENT_SCHEMA()", SQLUtils.toSQLString(methodInvokeExpr)); methodInvokeExpr.setRemoveBrackets(true); Assert.assertEquals("CURRENT_SCHEMA", SQLUtils.toSQLString(methodInvokeExpr)); }
@Override public void onMsg(TbContext ctx, TbMsg msg) { String serviceIdStr = msg.getMetaData().getValue(config.getServiceIdMetaDataAttribute()); String sessionIdStr = msg.getMetaData().getValue(config.getSessionIdMetaDataAttribute()); String requestIdStr = msg.getMetaData().getValue(config.getRequestIdMetaDataAttribute()); if (msg.getOriginator().getEntityType() != EntityType.DEVICE) { ctx.tellFailure(msg, new RuntimeException("Message originator is not a device entity!")); } else if (StringUtils.isEmpty(requestIdStr)) { ctx.tellFailure(msg, new RuntimeException("Request id is not present in the metadata!")); } else if (StringUtils.isEmpty(serviceIdStr)) { ctx.tellFailure(msg, new RuntimeException("Service id is not present in the metadata!")); } else if (StringUtils.isEmpty(sessionIdStr)) { ctx.tellFailure(msg, new RuntimeException("Session id is not present in the metadata!")); } else if (StringUtils.isEmpty(msg.getData())) { ctx.tellFailure(msg, new RuntimeException("Request body is empty!")); } else { if (StringUtils.isNotBlank(msg.getMetaData().getValue(DataConstants.EDGE_ID))) { saveRpcResponseToEdgeQueue(ctx, msg, serviceIdStr, sessionIdStr, requestIdStr); } else { ctx.getRpcService().sendRpcReplyToDevice(serviceIdStr, UUID.fromString(sessionIdStr), Integer.parseInt(requestIdStr), msg.getData()); ctx.tellSuccess(msg); } } }
@Test public void sendReplyToTransport() { when(ctx.getRpcService()).thenReturn(rpcService); TbMsg msg = TbMsg.newMsg(TbMsgType.POST_TELEMETRY_REQUEST, deviceId, getDefaultMetadata(), TbMsgDataType.JSON, DUMMY_DATA, null, null); node.onMsg(ctx, msg); verify(rpcService).sendRpcReplyToDevice(DUMMY_SERVICE_ID, DUMMY_SESSION_ID, DUMMY_REQUEST_ID, DUMMY_DATA); verify(edgeEventService, never()).saveAsync(any()); }
public boolean containsKey(final int key) { return initialValue != get(key); }
@Test void shouldNotContainKeyOfAMissingKey() { assertFalse(map.containsKey(1)); }
@Override public HttpAction restore(final CallContext ctx, final String defaultUrl) { val webContext = ctx.webContext(); val sessionStore = ctx.sessionStore(); val optRequestedUrl = sessionStore.get(webContext, Pac4jConstants.REQUESTED_URL); HttpAction requestedAction = null; if (optRequestedUrl.isPresent()) { sessionStore.set(webContext, Pac4jConstants.REQUESTED_URL, null); val requestedUrl = optRequestedUrl.get(); if (requestedUrl instanceof String) { requestedAction = new FoundAction((String) requestedUrl); } else if (requestedUrl instanceof RedirectionAction) { requestedAction = (RedirectionAction) requestedUrl; } } if (requestedAction == null) { requestedAction = new FoundAction(defaultUrl); } LOGGER.debug("requestedAction: {}", requestedAction.getMessage()); if (requestedAction instanceof FoundAction) { return HttpActionHelper.buildRedirectUrlAction(webContext, ((FoundAction) requestedAction).getLocation()); } else { return HttpActionHelper.buildFormPostContentAction(webContext, ((OkAction) requestedAction).getContent()); } }
@Test public void testRestoreOkAction() { val context = MockWebContext.create().setFullRequestURL(PAC4J_URL).addRequestParameter(KEY, VALUE); val formPost = HttpActionHelper.buildFormPostContent(context); val sessionStore = new MockSessionStore(); sessionStore.set(context, Pac4jConstants.REQUESTED_URL, new OkAction(formPost)); val action = handler.restore(new CallContext(context, sessionStore), LOGIN_URL); assertTrue(action instanceof OkAction); assertEquals(FORM_DATA, ((OkAction) action).getContent()); assertFalse(sessionStore.get(context, Pac4jConstants.REQUESTED_URL).isPresent()); }
@Override @Transactional(rollbackFor = Exception.class) @CacheEvict(value = RedisKeyConstants.ROLE, key = "#id") @LogRecord(type = SYSTEM_ROLE_TYPE, subType = SYSTEM_ROLE_DELETE_SUB_TYPE, bizNo = "{{#id}}", success = SYSTEM_ROLE_DELETE_SUCCESS) public void deleteRole(Long id) { // 1. 校验是否可以更新 RoleDO role = validateRoleForUpdate(id); // 2.1 标记删除 roleMapper.deleteById(id); // 2.2 删除相关数据 permissionService.processRoleDeleted(id); // 3. 记录操作日志上下文 LogRecordContext.putVariable(DiffParseFunction.OLD_OBJECT, BeanUtils.toBean(role, RoleSaveReqVO.class)); LogRecordContext.putVariable("role", role); }
@Test public void testDeleteRole() { // mock 数据 RoleDO roleDO = randomPojo(RoleDO.class, o -> o.setType(RoleTypeEnum.CUSTOM.getType())); roleMapper.insert(roleDO); // 参数准备 Long id = roleDO.getId(); // 调用 roleService.deleteRole(id); // 断言 assertNull(roleMapper.selectById(id)); // verify 删除相关数据 verify(permissionService).processRoleDeleted(id); }
@Override public String getMessage() { if (!logPhi) { return super.getMessage(); } String answer; if (hasHl7MessageBytes() || hasHl7AcknowledgementBytes()) { String parentMessage = super.getMessage(); StringBuilder messageBuilder = new StringBuilder( parentMessage.length() + (hasHl7MessageBytes() ? hl7MessageBytes.length : 0) + (hasHl7AcknowledgementBytes() ? hl7AcknowledgementBytes.length : 0)); messageBuilder.append(parentMessage); if (hasHl7MessageBytes()) { messageBuilder.append("\n\t{hl7Message [") .append(hl7MessageBytes.length) .append("] = "); hl7Util.appendBytesAsPrintFriendlyString(messageBuilder, hl7MessageBytes, 0, hl7MessageBytes.length); messageBuilder.append('}'); } if (hasHl7AcknowledgementBytes()) { messageBuilder.append("\n\t{hl7Acknowledgement [") .append(hl7AcknowledgementBytes.length) .append("] = "); hl7Util.appendBytesAsPrintFriendlyString(messageBuilder, hl7AcknowledgementBytes, 0, hl7AcknowledgementBytes.length); messageBuilder.append('}'); } answer = messageBuilder.toString(); } else { answer = super.getMessage(); } return answer; }
@Test public void testNullHl7Message() { instance = new MllpException(EXCEPTION_MESSAGE, null, HL7_ACKNOWLEDGEMENT_BYTES, LOG_PHI_TRUE); assertEquals(expectedMessage(null, HL7_ACKNOWLEDGEMENT), instance.getMessage()); }
void removeWatchers() { List<NamespaceWatcher> localEntries = Lists.newArrayList(entries); while (localEntries.size() > 0) { NamespaceWatcher watcher = localEntries.remove(0); if (entries.remove(watcher)) { try { log.debug("Removing watcher for path: " + watcher.getUnfixedPath()); RemoveWatchesBuilderImpl builder = new RemoveWatchesBuilderImpl(client); builder.internalRemoval(watcher, watcher.getUnfixedPath()); } catch (Exception e) { log.error("Could not remove watcher for path: " + watcher.getUnfixedPath()); } } } }
@Test public void testSameWatcherDifferentKinds() throws Exception { CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), new RetryOneTime(1)); try { client.start(); WatcherRemovalFacade removerClient = (WatcherRemovalFacade) client.newWatcherRemoveCuratorFramework(); Watcher watcher = new Watcher() { @Override public void process(WatchedEvent event) { // NOP } }; removerClient.create().creatingParentsIfNeeded().forPath("/a/b/c"); removerClient.checkExists().usingWatcher(watcher).forPath("/a/b/c"); removerClient.getData().usingWatcher(watcher).forPath("/a/b/c"); removerClient.removeWatchers(); } finally { TestCleanState.closeAndTestClean(client); } }
public static List<String> wrapText(List<String> list, int columnWidth) { return list.stream() .map(line -> wrapText(line, columnWidth)) .collect(toList()); }
@Test public void testWrapText_String_int() { assertEquals("te<br/>st", StringUtils.wrapText("test", 2)); assertEquals("tes<br/>t", StringUtils.wrapText("test", 3)); assertEquals("test", StringUtils.wrapText("test", 10)); assertEquals(".\uD800\uDC01<br/>.", StringUtils.wrapText(".\uD800\uDC01.", 2)); assertEquals("..<br/>\uD800\uDC01", StringUtils.wrapText("..\uD800\uDC01", 3)); }
@Override public void readFields(DataInput in) throws IOException { super.readFields(in); readFromInput(in); }
@Test public void testReadFields() throws IOException { // create a mock for DataOutput that will be used in the readFields method // this way we can capture and verify if correct arguments were passed DataInput in = mock(DataInput.class); // register the mock responses to be returned when particular method call happens // on the mocked object when(in.readByte()).thenReturn((byte) fileSplitName.length()); // Answer implementation is used to guarantee the response in sequence of the mock method calls // since the same method is called many times, we need to return the responses in proper sequence when(in.readInt()).thenAnswer(new Answer<Integer>() { private int count = 0; private int[] answers = new int[]{basePath.length(), maxCommitTime.length(), deltaLogPaths.size(), deltaLogPaths.get(0).length()}; @Override public Integer answer(InvocationOnMock invocationOnMock) throws Throwable { return answers[count++]; } }); Answer<Void> readFullyAnswer = new Answer<Void>() { private int count = 0; private byte[][] answers = new byte[][] { getUTF8Bytes(fileSplitName), getUTF8Bytes(basePath), getUTF8Bytes(maxCommitTime), getUTF8Bytes(deltaLogPaths.get(0)), }; @Override public Void answer(InvocationOnMock invocation) throws Throwable { byte[] bytes = invocation.getArgument(0); byte[] answer = answers[count++]; System.arraycopy(answer, 0, bytes, 0, answer.length); return null; } }; doAnswer(readFullyAnswer).when(in).readFully(any()); doAnswer(readFullyAnswer).when(in).readFully(any(), anyInt(), anyInt()); // call readFields with mocked object HoodieRealtimeFileSplit read = new HoodieRealtimeFileSplit(); read.readFields(in); // assert proper returns after reading from the mocked object assertEquals(basePath, read.getBasePath()); assertEquals(maxCommitTime, read.getMaxCommitTime()); assertEquals(deltaLogPaths, read.getDeltaLogPaths()); assertEquals(split.toString(), read.toString()); }
@Override public T addChar(K name, char value) { throw new UnsupportedOperationException("read only"); }
@Test public void testAddChar() { assertThrows(UnsupportedOperationException.class, new Executable() { @Override public void execute() { HEADERS.addChar("name", 'a'); } }); }
@Override public String getName() { return ANALYZER_NAME; }
@Test public void testClassInformation() { JarAnalyzer.ClassNameInformation instance = new JarAnalyzer.ClassNameInformation("org/owasp/dependencycheck/analyzer/JarAnalyzer"); assertEquals("org/owasp/dependencycheck/analyzer/JarAnalyzer", instance.getName()); List<String> expected = Arrays.asList("owasp", "dependencycheck", "analyzer", "jaranalyzer"); List<String> results = instance.getPackageStructure(); assertEquals(expected, results); }
@Override public <T extends State> T state(StateNamespace namespace, StateTag<T> address) { return workItemState.get(namespace, address, StateContexts.nullContext()); }
@Test public void testMultimapLazyIterateHugeEntriesResultSingleEntry() { // A multimap with 1 key and 1 million values and a total of 10GBs data final String tag = "multimap"; final Integer key = 100; StateTag<MultimapState<Integer, byte[]>> addr = StateTags.multimap(tag, VarIntCoder.of(), ByteArrayCoder.of()); MultimapState<Integer, byte[]> multimapState = underTest.state(NAMESPACE, addr); SettableFuture<Iterable<Map.Entry<ByteString, Iterable<byte[]>>>> entriesFuture = SettableFuture.create(); when(mockReader.multimapFetchAllFuture( false, key(NAMESPACE, tag), STATE_FAMILY, ByteArrayCoder.of())) .thenReturn(entriesFuture); SettableFuture<Iterable<byte[]>> getKeyFuture = SettableFuture.create(); when(mockReader.multimapFetchSingleEntryFuture( encodeWithCoder(key, VarIntCoder.of()), key(NAMESPACE, tag), STATE_FAMILY, ByteArrayCoder.of())) .thenReturn(getKeyFuture); // a not weighted iterators that returns tons of data Iterable<byte[]> values = () -> new Iterator<byte[]>() { final int targetValues = 1_000_000; // return 1 million values, which is 10 GBs final byte[] value = new byte[10_000]; // each value is 10KB final Random rand = new Random(); int returnedValues = 0; @Override public boolean hasNext() { return returnedValues < targetValues; } @Override public byte[] next() { returnedValues++; rand.nextBytes(value); return value; } }; waitAndSet( entriesFuture, Collections.singletonList( new SimpleEntry<>(encodeWithCoder(key, VarIntCoder.of()), values)), 200); waitAndSet(getKeyFuture, values, 200); Iterable<Map.Entry<Integer, byte[]>> entries = multimapState.entries().read(); assertEquals(1_000_000, Iterables.size(entries)); Iterable<byte[]> valueResult = multimapState.get(key).read(); assertEquals(1_000_000, Iterables.size(valueResult)); }
public static StructType groupingKeyType(Schema schema, Collection<PartitionSpec> specs) { return buildPartitionProjectionType("grouping key", specs, commonActiveFieldIds(schema, specs)); }
@Test public void testGroupingKeyTypeWithProjectedSchema() { TestTables.TestTable table = TestTables.create(tableDir, "test", SCHEMA, BY_CATEGORY_DATA_SPEC, V1_FORMAT_VERSION); Schema projectedSchema = table.schema().select("id", "data"); StructType expectedType = StructType.of(NestedField.optional(1001, "data", Types.StringType.get())); StructType actualType = Partitioning.groupingKeyType(projectedSchema, table.specs().values()); assertThat(actualType).isEqualTo(expectedType); }
@Override public void write(InputT element, Context context) throws IOException, InterruptedException { while (bufferedRequestEntries.size() >= maxBufferedRequests) { flush(); } addEntryToBuffer(elementConverter.apply(element, context), false); nonBlockingFlush(); }
@Test public void testThatWhenNumberOfItemAndSizeOfRecordThresholdsAreMetSimultaneouslyAFlushOccurs() throws IOException, InterruptedException { AsyncSinkWriterImpl sink = new AsyncSinkWriterImplBuilder() .context(sinkInitContext) .maxBatchSize(7) .maxBatchSizeInBytes(32) .maxRecordSizeInBytes(32) .build(); for (int i = 0; i < 7; i++) { sink.write(String.valueOf(i)); } assertThat(res.size()).isEqualTo(7); for (int i = 7; i < 14; i++) { sink.write(String.valueOf(i)); } assertThat(res.size()).isEqualTo(14); }
@Override public Set<OAuth2RefreshTokenEntity> getAllRefreshTokens() { TypedQuery<OAuth2RefreshTokenEntity> query = manager.createNamedQuery(OAuth2RefreshTokenEntity.QUERY_ALL, OAuth2RefreshTokenEntity.class); return new LinkedHashSet<>(query.getResultList()); }
@Test public void testGetAllRefreshTokens(){ Set<OAuth2RefreshTokenEntity> tokens = repository.getAllRefreshTokens(); assertEquals(5, tokens.size()); }
@Override public Result invoke(Invoker<?> invoker, Invocation invocation) throws RpcException { URL url = invoker.getUrl(); boolean shouldAuth = url.getParameter(Constants.SERVICE_AUTH, false); if (shouldAuth) { Authenticator authenticator = applicationModel .getExtensionLoader(Authenticator.class) .getExtension(url.getParameter(Constants.AUTHENTICATOR, Constants.DEFAULT_AUTHENTICATOR)); try { authenticator.authenticate(invocation, url); } catch (Exception e) { return AsyncRpcResult.newDefaultAsyncResult(e, invocation); } } return invoker.invoke(invocation); }
@Test void testAuthEnabled() { URL url = URL.valueOf("dubbo://10.10.10.10:2181") .addParameter(Constants.ACCESS_KEY_ID_KEY, "ak") .addParameter(Constants.SECRET_ACCESS_KEY_KEY, "sk") .addParameter(CommonConstants.APPLICATION_KEY, "test") .addParameter(Constants.SERVICE_AUTH, true); Invoker invoker = mock(Invoker.class); Invocation invocation = mock(RpcInvocation.class); when(invoker.getUrl()).thenReturn(url); ProviderAuthFilter providerAuthFilter = new ProviderAuthFilter(ApplicationModel.defaultModel()); providerAuthFilter.invoke(invoker, invocation); verify(invocation, atLeastOnce()).getAttachment(anyString()); }
@Override public RowData nextRecord(RowData reuse) { // return the next row row.setRowId(this.nextRow++); return row; }
@Test void testReadFileInSplits() throws IOException { FileInputSplit[] splits = createSplits(testFileFlat, 4); long cnt = 0; long totalF0 = 0; // read all splits for (FileInputSplit split : splits) { try (OrcColumnarRowSplitReader reader = createReader(new int[] {0, 1}, testSchemaFlat, new HashMap<>(), split)) { // read and count all rows while (!reader.reachedEnd()) { RowData row = reader.nextRecord(null); assertThat(row.isNullAt(0)).isFalse(); assertThat(row.isNullAt(1)).isFalse(); totalF0 += row.getInt(0); assertThat(row.getString(1).toString()).isNotNull(); cnt++; } } } // check that all rows have been read assertThat(cnt).isEqualTo(1920800); assertThat(totalF0).isEqualTo(1844737280400L); }
public void validate(CreateReviewAnswerRequest request) { validateNotContainingText(request); Question question = questionRepository.findById(request.questionId()) .orElseThrow(() -> new SubmittedQuestionNotFoundException(request.questionId())); OptionGroup optionGroup = optionGroupRepository.findByQuestionId(question.getId()) .orElseThrow(() -> new OptionGroupNotFoundByQuestionIdException(question.getId())); validateRequiredQuestion(request, question); validateOnlyIncludingProvidedOptionItem(request, optionGroup); validateCheckedOptionItemCount(request, optionGroup); }
@Test void 옵션그룹에서_제공하지_않은_옵션아이템을_응답하면_예외가_발생한다() { // given OptionGroup savedOptionGroup = optionGroupRepository.save( new OptionGroup(savedQuestion.getId(), 1, 3) ); OptionItem savedOptionItem = optionItemRepository.save( new OptionItem("옵션", savedOptionGroup.getId(), 1, OptionType.KEYWORD) ); CreateReviewAnswerRequest request = new CreateReviewAnswerRequest( savedQuestion.getId(), List.of(savedOptionItem.getId() + 1L), null ); // when, then assertThatCode(() -> createCheckBoxAnswerRequestValidator.validate(request)) .isInstanceOf(CheckBoxAnswerIncludedNotProvidedOptionItemException.class); }
@Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } HeaderExchangeChannel other = (HeaderExchangeChannel) obj; return channel.equals(other.channel); }
@Test void equalsTest() { Assertions.assertThrows(IllegalArgumentException.class, () -> { Assertions.assertEquals(header, new HeaderExchangeChannel(channel)); header = new HeaderExchangeChannel(null); Assertions.assertNotEquals(header, new HeaderExchangeChannel(channel)); }); }
public static Optional<Object> getAdjacentValue(Type type, Object value, boolean isPrevious) { if (!type.isOrderable()) { throw new IllegalStateException("Type is not orderable: " + type); } requireNonNull(value, "value is null"); if (type.equals(BIGINT) || type instanceof TimestampType) { return getBigintAdjacentValue(value, isPrevious); } if (type.equals(INTEGER) || type.equals(DATE)) { return getIntegerAdjacentValue(value, isPrevious); } if (type.equals(SMALLINT)) { return getSmallIntAdjacentValue(value, isPrevious); } if (type.equals(TINYINT)) { return getTinyIntAdjacentValue(value, isPrevious); } if (type.equals(DOUBLE)) { return getDoubleAdjacentValue(value, isPrevious); } if (type.equals(REAL)) { return getRealAdjacentValue(value, isPrevious); } return Optional.empty(); }
@Test public void testNextValueForBigint() { long minValue = Long.MIN_VALUE; long maxValue = Long.MAX_VALUE; assertThat(getAdjacentValue(BIGINT, minValue, false)) .isEqualTo(Optional.of(minValue + 1)); assertThat(getAdjacentValue(BIGINT, minValue + 1, false)) .isEqualTo(Optional.of(minValue + 2)); assertThat(getAdjacentValue(BIGINT, 1234L, false)) .isEqualTo(Optional.of(1235L)); assertThat(getAdjacentValue(BIGINT, maxValue - 1, false)) .isEqualTo(Optional.of(maxValue)); assertThat(getAdjacentValue(BIGINT, maxValue, false)) .isEqualTo(Optional.empty()); }
public T divide(BigDecimal by) { return create(value.divide(by, MAX_VALUE_SCALE, RoundingMode.DOWN)); }
@Test void testDivideInteger() { final Resource resource = new TestResource(0.12); final int by = 4; assertTestResourceValueEquals(0.03, resource.divide(by)); }
public Timestamp updateToFinished(String partitionToken) { final TransactionResult<Void> transactionResult = runInTransaction( transaction -> transaction.updateToFinished(partitionToken), "updateToFinished"); return transactionResult.getCommitTimestamp(); }
@Test public void testInTransactionContextUpdateToFinished() { System.out.println("update to scheduled"); ResultSet resultSet = mock(ResultSet.class); when(transaction.executeQuery(any())).thenReturn(resultSet); when(resultSet.next()).thenReturn(true).thenReturn(false); when(resultSet.getString(any())).thenReturn(State.RUNNING.toString()); when(resultSet.getCurrentRowAsStruct()).thenReturn(Struct.newBuilder().build()); ArgumentCaptor<ImmutableList<Mutation>> mutations = ArgumentCaptor.forClass(ImmutableList.class); doNothing().when(transaction).buffer(mutations.capture()); assertNull(inTransactionContext.updateToFinished(PARTITION_TOKEN)); assertEquals(1, mutations.getValue().size()); Map<String, Value> mutationValueMap = mutations.getValue().iterator().next().asMap(); assertEquals( PARTITION_TOKEN, mutationValueMap.get(PartitionMetadataAdminDao.COLUMN_PARTITION_TOKEN).getString()); assertEquals( PartitionMetadata.State.FINISHED.toString(), mutationValueMap.get(PartitionMetadataAdminDao.COLUMN_STATE).getString()); }
public RowExpression extract(PlanNode node) { return node.accept(new Visitor(domainTranslator, functionAndTypeManager), null); }
@Test public void testLimit() { PlanNode node = new LimitNode( Optional.empty(), newId(), filter(baseTableScan, and( equals(AV, BV), equals(BV, CV), lessThan(CV, bigintLiteral(10)))), 1, FINAL); RowExpression effectivePredicate = effectivePredicateExtractor.extract(node); // Pass through assertEquals(normalizeConjuncts(effectivePredicate), normalizeConjuncts( equals(AV, BV), equals(BV, CV), lessThan(CV, bigintLiteral(10)))); }
@Override public Stream<MappingField> resolveAndValidateFields( boolean isKey, List<MappingField> userFields, Map<String, String> options, InternalSerializationService serializationService ) { if (userFields.isEmpty()) { throw QueryException.error("Column list is required for Compact format"); } Map<QueryPath, MappingField> fieldsByPath = extractFields(userFields, isKey); // Check if the compact type name is specified getCompactTypeName(fieldsByPath, options, isKey); return fieldsByPath.entrySet().stream() .map(entry -> { QueryPath path = entry.getKey(); if (path.isTopLevel()) { throw QueryException.error("Cannot use the '" + path + "' field with Compact serialization"); } QueryDataType type = entry.getValue().type(); if (type == QueryDataType.OBJECT) { throw QueryException.error("Cannot derive Compact type for '" + type.getTypeFamily() + "'"); } return entry.getValue(); }); }
@Test @Parameters({ "true, __key", "false, this" }) public void test_objectIsForbiddenForCompact(boolean key, String prefix) { InternalSerializationService ss = createSerializationService(); Map<String, String> options = Map.of(key ? OPTION_KEY_COMPACT_TYPE_NAME : OPTION_VALUE_COMPACT_TYPE_NAME, "testAll"); List<MappingField> fields = List.of(field("object", QueryDataType.OBJECT, prefix + ".object")); // TODO: fix compact nested types support? assertThatThrownBy(() -> INSTANCE.resolveAndValidateFields(key, fields, options, ss).collect(toList())) .isInstanceOf(QueryException.class) .hasMessageContaining("Cannot derive Compact type for '" + QueryDataTypeFamily.OBJECT + "'"); }
@Override public void parse() { String resource = type.toString(); if (!configuration.isResourceLoaded(resource)) { loadXmlResource(); configuration.addLoadedResource(resource); String mapperName = type.getName(); assistant.setCurrentNamespace(mapperName); parseCache(); parseCacheRef(); IgnoreStrategy ignoreStrategy = InterceptorIgnoreHelper.initSqlParserInfoCache(type); for (Method method : type.getMethods()) { if (!canHaveStatement(method)) { continue; } if (getAnnotationWrapper(method, false, Select.class, SelectProvider.class).isPresent() && method.getAnnotation(ResultMap.class) == null) { parseResultMap(method); } try { InterceptorIgnoreHelper.initSqlParserInfoCache(ignoreStrategy, mapperName, method); parseStatement(method); } catch (IncompleteElementException e) { configuration.addIncompleteMethod(new MybatisMethodResolver(this, method)); } } try { // https://github.com/baomidou/mybatis-plus/issues/3038 if (GlobalConfigUtils.isSupperMapperChildren(configuration, type)) { parserInjector(); } } catch (IncompleteElementException e) { configuration.addIncompleteMethod(new InjectorResolver(this)); } } configuration.parsePendingMethods(false); }
@Test void parse() { MybatisConfiguration configuration = new MybatisConfiguration(); MybatisMapperAnnotationBuilder a = new MybatisMapperAnnotationBuilder(configuration, AMapper.class); a.parse(); MybatisMapperAnnotationBuilder b = new MybatisMapperAnnotationBuilder(configuration, BMapper.class); b.parse(); configuration.getMappedStatement(AMapper.class.getName() + ".insert"); }
@Override public ObjectNode encode(K8sHost entity, CodecContext context) { checkNotNull(entity, "Kubernetes host cannot be null"); ObjectNode result = context.mapper().createObjectNode() .put(HOST_IP, entity.hostIp().toString()) .put(STATE, entity.state().name()); ArrayNode nodes = context.mapper().createArrayNode(); entity.nodeNames().forEach(nodes::add); result.set(NODE_NAMES, nodes); return result; }
@Test public void testK8sHostEncode() { K8sHost host = DefaultK8sHost.builder() .hostIp(IpAddress.valueOf("192.168.200.10")) .state(INIT) .nodeNames(ImmutableSet.of("1", "2")) .build(); ObjectNode hostJson = k8sHostCodec.encode(host, context); assertThat(hostJson, matchesK8sHost(host)); }
public static MetricName name(Class<?> klass, String... names) { return name(klass.getName(), names); }
@Test public void elidesNullValuesFromNamesWhenManyNullsPassedIn() throws Exception { assertThat(name("one", null, null)) .isEqualTo(MetricName.build("one")); }
@ConstantFunction(name = "weeks_sub", argTypes = {DATETIME, INT}, returnType = DATETIME, isMonotonic = true) public static ConstantOperator weeksSub(ConstantOperator date, ConstantOperator week) { return ConstantOperator.createDatetimeOrNull(date.getDatetime().minusWeeks(week.getInt())); }
@Test public void weeksSub() { assertEquals("2015-01-12T09:23:55", ScalarOperatorFunctions.weeksSub(O_DT_20150323_092355, O_INT_10).getDatetime().toString()); }
@VisibleForTesting public int getGetResourceTypeInfoRetrieved() { return numGetResourceTypeInfo.value(); }
@Test public void testGetResourceTypeInfoFailed() { long totalBadBefore = metrics.getGetResourceTypeInfoRetrieved(); badSubCluster.getResourceTypeInfo(); Assert.assertEquals(totalBadBefore + 1, metrics.getGetResourceTypeInfoRetrieved()); }
protected String addDatetimeToFilename( String filename, boolean addDate, String datePattern, boolean addTime, String timePattern, boolean specifyFormat, String datetimeFormat ) { if ( Utils.isEmpty( filename ) ) { return null; } // Replace possible environment variables... String realfilename = environmentSubstitute( filename ); String filenameNoExtension = FilenameUtils.removeExtension( realfilename ); String extension = FilenameUtils.getExtension( realfilename ); // If an extension exists, add the corresponding dot before if ( !StringUtil.isEmpty( extension ) ) { extension = '.' + extension; } final SimpleDateFormat sdf = new SimpleDateFormat(); Date now = new Date(); if ( specifyFormat && !Utils.isEmpty( datetimeFormat ) ) { sdf.applyPattern( datetimeFormat ); String dt = sdf.format( now ); filenameNoExtension += dt; } else { if ( addDate && null != datePattern ) { sdf.applyPattern( datePattern ); String d = sdf.format( now ); filenameNoExtension += '_' + d; } if ( addTime && null != timePattern ) { sdf.applyPattern( timePattern ); String t = sdf.format( now ); filenameNoExtension += '_' + t; } } return filenameNoExtension + extension; }
@Test public void testAddDatetimeToFilename_NoFilename() { JobEntryBase jobEntryBase = new JobEntryBase(); String fullFilename; String filename; // // null filename // filename = null; // add nothing fullFilename = jobEntryBase.addDatetimeToFilename( filename, false, null, false, null, false, null ); // add date assertNull( fullFilename ); fullFilename = jobEntryBase.addDatetimeToFilename( filename, true, "yyyyMMdd", false, null, false, null ); assertNull( fullFilename ); // add time fullFilename = jobEntryBase.addDatetimeToFilename( filename, false, null, true, "HHmmssSSS", false, null ); assertNull( fullFilename ); // add date and time fullFilename = jobEntryBase.addDatetimeToFilename( filename, true, "yyyyMMdd", true, "HHmmssSSS", false, null ); assertNull( fullFilename ); // add datetime fullFilename = jobEntryBase.addDatetimeToFilename( filename, false, null, false, null, true, "(yyyyMMdd_HHmmssSSS)" ); assertNull( fullFilename ); // // empty filename // filename = StringUtil.EMPTY_STRING; // add nothing fullFilename = jobEntryBase.addDatetimeToFilename( filename, false, null, false, null, false, null ); assertNull( fullFilename ); // add date fullFilename = jobEntryBase.addDatetimeToFilename( filename, true, "yyyyMMdd", false, null, false, null ); assertNull( fullFilename ); // add time fullFilename = jobEntryBase.addDatetimeToFilename( filename, false, null, true, "HHmmssSSS", false, null ); assertNull( fullFilename ); // add date and time fullFilename = jobEntryBase.addDatetimeToFilename( filename, true, "yyyyMMdd", true, "HHmmssSSS", false, null ); assertNull( fullFilename ); // add datetime fullFilename = jobEntryBase.addDatetimeToFilename( filename, false, null, false, null, true, "(yyyyMMdd_HHmmssSSS)" ); assertNull( fullFilename ); }
public static ScalarOperator getCompensationPredicateForDisjunctive(ScalarOperator src, ScalarOperator target) { List<ScalarOperator> srcItems = Utils.extractDisjunctive(src); List<ScalarOperator> targetItems = Utils.extractDisjunctive(target); if (!Sets.newHashSet(targetItems).containsAll(srcItems)) { return null; } targetItems.removeAll(srcItems); if (targetItems.isEmpty()) { // it is the same, so return true constant return ConstantOperator.createBoolean(true); } else { // the target has more or item, so return src return src; } }
@Test public void testGetCompensationPredicateForDisjunctive() { ConstantOperator alwaysTrue = ConstantOperator.TRUE; ConstantOperator alwaysFalse = ConstantOperator.createBoolean(false); CompoundPredicateOperator compound = new CompoundPredicateOperator( CompoundPredicateOperator.CompoundType.OR, alwaysFalse, alwaysTrue); Assert.assertEquals(alwaysTrue, MvUtils.getCompensationPredicateForDisjunctive(alwaysTrue, compound)); Assert.assertEquals(alwaysFalse, MvUtils.getCompensationPredicateForDisjunctive(alwaysFalse, compound)); Assert.assertEquals(null, MvUtils.getCompensationPredicateForDisjunctive(compound, alwaysFalse)); Assert.assertEquals(alwaysTrue, MvUtils.getCompensationPredicateForDisjunctive(compound, compound)); }
static boolean isDescendant(Path root, Path descendant) { return descendant.toAbsolutePath().normalize() .startsWith(root.toAbsolutePath().normalize()); }
@Test public void testDescendant() throws Exception { Path root = Paths.get("/ab/cd/"); Path descendant = root.resolve("ef/gh/ij.pdf"); assertTrue(FileSystemFetcher.isDescendant(root, descendant)); descendant = Paths.get("/cd/ef.pdf"); assertFalse(FileSystemFetcher.isDescendant(root, descendant)); descendant = root.resolve("../../ij.pdf"); assertFalse(FileSystemFetcher.isDescendant(root, descendant)); }
public OpenAPI filter(OpenAPI openAPI, OpenAPISpecFilter filter, Map<String, List<String>> params, Map<String, String> cookies, Map<String, List<String>> headers) { OpenAPI filteredOpenAPI = filterOpenAPI(filter, openAPI, params, cookies, headers); if (filteredOpenAPI == null) { return filteredOpenAPI; } OpenAPI clone = new OpenAPI(); clone.info(filteredOpenAPI.getInfo()); clone.openapi(filteredOpenAPI.getOpenapi()); clone.jsonSchemaDialect(filteredOpenAPI.getJsonSchemaDialect()); clone.setSpecVersion(filteredOpenAPI.getSpecVersion()); clone.setExtensions(filteredOpenAPI.getExtensions()); clone.setExternalDocs(filteredOpenAPI.getExternalDocs()); clone.setSecurity(filteredOpenAPI.getSecurity()); clone.setServers(filteredOpenAPI.getServers()); clone.tags(filteredOpenAPI.getTags() == null ? null : new ArrayList<>(openAPI.getTags())); final Set<String> allowedTags = new HashSet<>(); final Set<String> filteredTags = new HashSet<>(); Paths clonedPaths = new Paths(); if (filteredOpenAPI.getPaths() != null) { for (String resourcePath : filteredOpenAPI.getPaths().keySet()) { PathItem pathItem = filteredOpenAPI.getPaths().get(resourcePath); PathItem filteredPathItem = filterPathItem(filter, pathItem, resourcePath, params, cookies, headers); PathItem clonedPathItem = cloneFilteredPathItem(filter,filteredPathItem, resourcePath, params, cookies, headers, allowedTags, filteredTags); if (clonedPathItem != null) { if (!clonedPathItem.readOperations().isEmpty()) { clonedPaths.addPathItem(resourcePath, clonedPathItem); } } } clone.paths(clonedPaths); } filteredTags.removeAll(allowedTags); final List<Tag> tags = clone.getTags(); if (tags != null && !filteredTags.isEmpty()) { tags.removeIf(tag -> filteredTags.contains(tag.getName())); if (clone.getTags().isEmpty()) { clone.setTags(null); } } if (filteredOpenAPI.getWebhooks() != null) { for (String resourcePath : filteredOpenAPI.getWebhooks().keySet()) { PathItem pathItem = filteredOpenAPI.getPaths().get(resourcePath); PathItem filteredPathItem = filterPathItem(filter, pathItem, resourcePath, params, cookies, headers); PathItem clonedPathItem = cloneFilteredPathItem(filter,filteredPathItem, resourcePath, params, cookies, headers, allowedTags, filteredTags); if (clonedPathItem != null) { if (!clonedPathItem.readOperations().isEmpty()) { clone.addWebhooks(resourcePath, clonedPathItem); } } } } if (filteredOpenAPI.getComponents() != null) { clone.components(new Components()); clone.getComponents().setSchemas(filterComponentsSchema(filter, filteredOpenAPI.getComponents().getSchemas(), params, cookies, headers)); clone.getComponents().setSecuritySchemes(filteredOpenAPI.getComponents().getSecuritySchemes()); clone.getComponents().setCallbacks(filteredOpenAPI.getComponents().getCallbacks()); clone.getComponents().setExamples(filteredOpenAPI.getComponents().getExamples()); clone.getComponents().setExtensions(filteredOpenAPI.getComponents().getExtensions()); clone.getComponents().setHeaders(filteredOpenAPI.getComponents().getHeaders()); clone.getComponents().setLinks(filteredOpenAPI.getComponents().getLinks()); clone.getComponents().setParameters(filteredOpenAPI.getComponents().getParameters()); clone.getComponents().setRequestBodies(filteredOpenAPI.getComponents().getRequestBodies()); clone.getComponents().setResponses(filteredOpenAPI.getComponents().getResponses()); clone.getComponents().setPathItems(filteredOpenAPI.getComponents().getPathItems()); } if (filter.isRemovingUnreferencedDefinitions()) { clone = removeBrokenReferenceDefinitions(clone); } return clone; }
@Test(description = "it should filter any PathItem objects without Ref") public void filterAwayPathItemWithoutRef() throws IOException { final OpenAPI openAPI = getOpenAPI(RESOURCE_PATH); final OpenAPI filtered = new SpecFilter().filter(openAPI, new NoPathItemFilter(), null, null, null); assertEquals(0, filtered.getPaths().size()); }
public void logResponse(Config config, HttpRequest request, Response response) { long startTime = request.getStartTime(); long elapsedTime = request.getEndTime() - startTime; response.setResponseTime(elapsedTime); StringBuilder sb = new StringBuilder(); String uri = request.getUrl(); HttpLogModifier responseModifier = logModifier(config, uri); sb.append("response time in milliseconds: ").append(elapsedTime).append('\n'); sb.append(requestCount).append(" < ").append(response.getStatus()); logHeaders(requestCount, " < ", sb, responseModifier, response.getHeaders()); ResourceType rt = response.getResourceType(); if (rt == null || rt.isBinary()) { // don't log body } else { logBody(config, responseModifier, sb, uri, response.getBody(), false, rt); } sb.append('\n'); logger.debug("{}", sb); }
@Test void testResponseLoggingJsonPretty() { config.configure("logPrettyResponse", new Variable(true)); setup("json", "{a: 1}", "application/json"); httpRequestBuilder.path("/json"); Response response = handle(); match(response.getBodyAsString(), "{a: 1}"); match(response.getContentType(), "application/json"); httpLogger.logResponse(config, request, response); String logs = logAppender.collect(); assertTrue(logs.contains("{\n \"a\": 1\n}")); assertTrue(logs.contains("Content-Type: application/json")); }
@Override public void configure(Map<String, ?> configs) { super.configure(configs); configureSamplingInterval(configs); configurePrometheusAdapter(configs); configureQueryMap(configs); }
@Test public void testConfigureWithPrometheusScrapingInterval91sDoesNotFail() throws Exception { Map<String, Object> config = new HashMap<>(); config.put(PROMETHEUS_SERVER_ENDPOINT_CONFIG, "kafka-cluster-1.org:9090"); config.put(DefaultPrometheusQuerySupplier.PROMETHEUS_BROKER_METRICS_SCRAPING_INTERVAL_SECONDS, "91"); addCapacityConfig(config); _prometheusMetricSampler.configure(config); String expectedQuery = "1 - avg by (instance) (irate(node_cpu_seconds_total{mode=\"idle\"}[4m]))"; assertEquals(expectedQuery, _prometheusMetricSampler._metricToPrometheusQueryMap.get(RawMetricType.BROKER_CPU_UTIL)); }
public static Date parseHttpDate(CharSequence txt) { return parseHttpDate(txt, 0, txt.length()); }
@Test public void testParseWithDoubleDigitDay() { assertEquals(DATE, parseHttpDate("Sun, 06 Nov 1994 08:49:37 GMT")); }
static BlockStmt getTextIndexVariableDeclaration(final String variableName, final TextIndex textIndex) { final MethodDeclaration methodDeclaration = TEXTINDEX_TEMPLATE.getMethodsByName(GETKIEPMMLTEXTINDEX).get(0).clone(); final BlockStmt textIndexBody = methodDeclaration.getBody().orElseThrow(() -> new KiePMMLException(String.format(MISSING_BODY_TEMPLATE, methodDeclaration))); final VariableDeclarator variableDeclarator = getVariableDeclarator(textIndexBody, TEXTINDEX).orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, TEXTINDEX, textIndexBody))); variableDeclarator.setName(variableName); final BlockStmt toReturn = new BlockStmt(); String expressionVariableName = String.format("%s_Expression", variableName); final BlockStmt expressionBlockStatement = getKiePMMLExpressionBlockStmt(expressionVariableName, textIndex.getExpression()); expressionBlockStatement.getStatements().forEach(toReturn::addStatement); int counter = 0; final NodeList<Expression> arguments = new NodeList<>(); if (textIndex.hasTextIndexNormalizations()) { for (TextIndexNormalization textIndexNormalization : textIndex.getTextIndexNormalizations()) { String nestedVariableName = String.format(VARIABLE_NAME_TEMPLATE, variableName, counter); arguments.add(new NameExpr(nestedVariableName)); BlockStmt toAdd = getTextIndexNormalizationVariableDeclaration(nestedVariableName, textIndexNormalization); toAdd.getStatements().forEach(toReturn::addStatement); counter++; } } final MethodCallExpr initializer = variableDeclarator.getInitializer() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, TEXTINDEX, toReturn))) .asMethodCallExpr(); final MethodCallExpr builder = getChainedMethodCallExprFrom("builder", initializer); final StringLiteralExpr nameExpr = new StringLiteralExpr(textIndex.getTextField()); final NameExpr expressionExpr = new NameExpr(expressionVariableName); builder.setArgument(0, nameExpr); builder.setArgument(2, expressionExpr); Expression localTermWeightsExpression; if (textIndex.getLocalTermWeights() != null) { final LOCAL_TERM_WEIGHTS localTermWeights = LOCAL_TERM_WEIGHTS.byName(textIndex.getLocalTermWeights().value()); localTermWeightsExpression = new NameExpr(LOCAL_TERM_WEIGHTS.class.getName() + "." + localTermWeights.name()); } else { localTermWeightsExpression = new NullLiteralExpr(); } getChainedMethodCallExprFrom("withLocalTermWeights", initializer).setArgument(0, localTermWeightsExpression); getChainedMethodCallExprFrom("withIsCaseSensitive", initializer).setArgument(0, getExpressionForObject(textIndex.isCaseSensitive())); getChainedMethodCallExprFrom("withMaxLevenshteinDistance", initializer).setArgument(0, getExpressionForObject(textIndex.getMaxLevenshteinDistance())); Expression countHitsExpression; if (textIndex.getCountHits() != null) { final COUNT_HITS countHits = COUNT_HITS.byName(textIndex.getCountHits().value()); countHitsExpression = new NameExpr(COUNT_HITS.class.getName() + "." + countHits.name()); } else { countHitsExpression = new NullLiteralExpr(); } getChainedMethodCallExprFrom("withCountHits", initializer).setArgument(0, countHitsExpression); Expression wordSeparatorCharacterREExpression; if (textIndex.getWordSeparatorCharacterRE() != null) { String wordSeparatorCharacterRE = StringEscapeUtils.escapeJava(textIndex.getWordSeparatorCharacterRE()); wordSeparatorCharacterREExpression = new StringLiteralExpr(wordSeparatorCharacterRE); } else { wordSeparatorCharacterREExpression = new NullLiteralExpr(); } getChainedMethodCallExprFrom("withWordSeparatorCharacterRE", initializer).setArgument(0, wordSeparatorCharacterREExpression); getChainedMethodCallExprFrom("withTokenize", initializer).setArgument(0, getExpressionForObject(textIndex.isTokenize())); getChainedMethodCallExprFrom("asList", initializer).setArguments(arguments); textIndexBody.getStatements().forEach(toReturn::addStatement); return toReturn; }
@Test void getTextIndexVariableDeclaration() throws IOException { String variableName = "variableName"; BlockStmt retrieved = KiePMMLTextIndexFactory.getTextIndexVariableDeclaration(variableName, TEXTINDEX); String text = getFileContent(TEST_01_SOURCE); Statement expected = JavaParserUtils.parseBlock(String.format(text, variableName,TEXTINDEX.getTextField())); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); List<Class<?>> imports = Arrays.asList(Arrays.class, Collections.class, Collectors.class, KiePMMLFieldRef.class, KiePMMLInlineTable.class, KiePMMLTextIndex.class, KiePMMLTextIndexNormalization.class, KiePMMLRow.class, Map.class, Stream.class); commonValidateCompilationWithImports(retrieved, imports); }
protected String resolveOffchain( String lookupData, OffchainResolverContract resolver, int lookupCounter) throws Exception { if (EnsUtils.isEIP3668(lookupData)) { OffchainLookup offchainLookup = OffchainLookup.build(Numeric.hexStringToByteArray(lookupData.substring(10))); if (!resolver.getContractAddress().equals(offchainLookup.getSender())) { throw new EnsResolutionException( "Cannot handle OffchainLookup raised inside nested call"); } String gatewayResult = ccipReadFetch( offchainLookup.getUrls(), offchainLookup.getSender(), Numeric.toHexString(offchainLookup.getCallData())); if (gatewayResult == null) { throw new EnsResolutionException("CCIP Read disabled or provided no URLs."); } ObjectMapper objectMapper = ObjectMapperFactory.getObjectMapper(); EnsGatewayResponseDTO gatewayResponseDTO = objectMapper.readValue(gatewayResult, EnsGatewayResponseDTO.class); String resolvedNameHex = resolver.resolveWithProof( Numeric.hexStringToByteArray(gatewayResponseDTO.getData()), offchainLookup.getExtraData()) .send(); // This protocol can result in multiple lookups being requested by the same contract. if (EnsUtils.isEIP3668(resolvedNameHex)) { if (lookupCounter <= 0) { throw new EnsResolutionException("Lookup calls is out of limit."); } return resolveOffchain(lookupData, resolver, --lookupCounter); } else { byte[] resolvedNameBytes = DefaultFunctionReturnDecoder.decodeDynamicBytes(resolvedNameHex); return DefaultFunctionReturnDecoder.decodeAddress( Numeric.toHexString(resolvedNameBytes)); } } return lookupData; }
@Test void resolveOffchainWhenContractAddressNotEq() { OffchainResolverContract resolver = mock(OffchainResolverContract.class); when(resolver.getContractAddress()).thenReturn("0x123456"); assertThrows( EnsResolutionException.class, () -> ensResolver.resolveOffchain(LOOKUP_HEX, resolver, 4)); }
public void registerHandlerMethods(String pluginId, Object handler) { Class<?> handlerType = (handler instanceof String beanName ? obtainApplicationContext().getType(beanName) : handler.getClass()); if (handlerType != null) { final Class<?> userType = ClassUtils.getUserClass(handlerType); Map<Method, RequestMappingInfo> methods = MethodIntrospector.selectMethods(userType, (MethodIntrospector.MetadataLookup<RequestMappingInfo>) method -> getPluginMappingForMethod(pluginId, method, userType)); if (logger.isTraceEnabled()) { logger.trace(formatMappings(userType, methods)); } else if (mappingsLogger.isDebugEnabled()) { mappingsLogger.debug(formatMappings(userType, methods)); } methods.forEach((method, mapping) -> { Method invocableMethod = AopUtils.selectInvocableMethod(method, userType); registerHandlerMethod(handler, invocableMethod, mapping); pluginMappingInfo.add(pluginId, mapping); }); } }
@Test public void getHandlerBestMatch() { // register handler methods first handlerMapping.registerHandlerMethods("fakePlugin", new TestController()); Method expected = ResolvableMethod.on(TestController.class).annot(getMapping("/foo").params("p")).build(); String requestPath = "/apis/api.plugin.halo.run/v1alpha1/plugins/fakePlugin/foo?p=anything"; ServerWebExchange exchange = MockServerWebExchange.from(get(requestPath)); HandlerMethod hm = (HandlerMethod) this.handlerMapping.getHandler(exchange).block(); assertThat(hm).isNotNull(); assertThat(hm.getMethod()).isEqualTo(expected); }
public static boolean isAboveOrEqualVersion150(String version) { boolean isAboveOrEqualVersion150 = false; try { long clientVersion = convertVersion(version); long divideVersion = convertVersion(VERSION_1_5_0); isAboveOrEqualVersion150 = clientVersion >= divideVersion; } catch (Exception e) { LOGGER.error("convert version error, clientVersion:{}", version, e); } return isAboveOrEqualVersion150; }
@Test public void isAboveOrEqualVersion150() { Assertions.assertTrue(Version.isAboveOrEqualVersion150("2.0.2")); Assertions.assertTrue(Version.isAboveOrEqualVersion150("1.5")); Assertions.assertFalse(Version.isAboveOrEqualVersion150("1.4.9")); Assertions.assertFalse(Version.isAboveOrEqualVersion150("")); // Invalid version code will always return false. Assertions.assertFalse(Version.isAboveOrEqualVersion150("abd")); }
public static <T> PCollections<T> pCollections() { return new PCollections<>(); }
@Test @Category(NeedsRunner.class) public void testEqualWindowFnPropagation() { PCollection<String> input1 = p.apply("CreateInput1", Create.of("Input1")) .apply("Window1", Window.into(FixedWindows.of(Duration.standardMinutes(1)))); PCollection<String> input2 = p.apply("CreateInput2", Create.of("Input2")) .apply("Window2", Window.into(FixedWindows.of(Duration.standardMinutes(1)))); PCollection<String> output = PCollectionList.of(input1).and(input2).apply(Flatten.pCollections()); p.run(); Assert.assertTrue( output .getWindowingStrategy() .getWindowFn() .isCompatible(FixedWindows.of(Duration.standardMinutes(1)))); }
@Override public void accept(final MeterEntity entity, final DataTable value) { this.entityId = entity.id(); this.serviceId = entity.serviceId(); this.summation.append(value); DataTable c = new DataTable(); value.keys().forEach(key -> c.put(key, 1L)); this.count.append(c); }
@Test public void testAccept() { function.accept( MeterEntity.newService("request_count", Layer.GENERAL), build(asList("200", "404"), asList(10L, 2L))); assertResult(asList("200", "404"), asList(10L, 2L), asList(1L, 1L)); function.accept( MeterEntity.newService("request_count", Layer.GENERAL), build(asList("200", "500"), asList(2L, 3L))); assertResult(asList("200", "404", "500"), asList(12L, 2L, 3L), asList(2L, 1L, 1L)); }
@DeleteMapping("/token") @PermitAll @Operation(summary = "删除访问令牌") @Parameter(name = "token", required = true, description = "访问令牌", example = "biu") public CommonResult<Boolean> revokeToken(HttpServletRequest request, @RequestParam("token") String token) { // 校验客户端 String[] clientIdAndSecret = obtainBasicAuthorization(request); OAuth2ClientDO client = oauth2ClientService.validOAuthClientFromCache(clientIdAndSecret[0], clientIdAndSecret[1], null, null, null); // 删除访问令牌 return success(oauth2GrantService.revokeToken(client.getClientId(), token)); }
@Test public void testRevokeToken() { // 准备参数 HttpServletRequest request = mockRequest("demo_client_id", "demo_client_secret"); String token = randomString(); // mock 方法(client) OAuth2ClientDO client = randomPojo(OAuth2ClientDO.class).setClientId("demo_client_id"); when(oauth2ClientService.validOAuthClientFromCache(eq("demo_client_id"), eq("demo_client_secret"), isNull(), isNull(), isNull())).thenReturn(client); // mock 方法(移除) when(oauth2GrantService.revokeToken(eq("demo_client_id"), eq(token))).thenReturn(true); // 调用 CommonResult<Boolean> result = oauth2OpenController.revokeToken(request, token); // 断言 assertEquals(0, result.getCode()); assertTrue(result.getData()); }
@Override public void configure(Map<String, ?> configs) { config = new JsonConverterConfig(configs); serializer.configure(configs, config.type() == ConverterType.KEY); deserializer.configure(configs, config.type() == ConverterType.KEY); fromConnectSchemaCache = new SynchronizedCache<>(new LRUCache<>(config.schemaCacheSize())); toConnectSchemaCache = new SynchronizedCache<>(new LRUCache<>(config.schemaCacheSize())); }
@Test public void testJsonSchemaCacheSizeFromConfigFile() throws URISyntaxException, IOException { URL url = Objects.requireNonNull(getClass().getResource("/connect-test.properties")); File propFile = new File(url.toURI()); String workerPropsFile = propFile.getAbsolutePath(); Map<String, String> workerProps = !workerPropsFile.isEmpty() ? Utils.propsToStringMap(Utils.loadProps(workerPropsFile)) : Collections.emptyMap(); JsonConverter rc = new JsonConverter(); rc.configure(workerProps, false); }
public String abbreviate(String fqClassName) { if (fqClassName == null) { throw new IllegalArgumentException("Class name may not be null"); } int inLen = fqClassName.length(); if (inLen < targetLength) { return fqClassName; } StringBuilder buf = new StringBuilder(inLen); int rightMostDotIndex = fqClassName.lastIndexOf(DOT); if (rightMostDotIndex == -1) return fqClassName; // length of last segment including the dot int lastSegmentLength = inLen - rightMostDotIndex; int leftSegments_TargetLen = targetLength - lastSegmentLength; if (leftSegments_TargetLen < 0) leftSegments_TargetLen = 0; int leftSegmentsLen = inLen - lastSegmentLength; // maxPossibleTrim denotes the maximum number of characters we aim to trim // the actual number of character trimmed may be higher since segments, when // reduced, are reduced to just one character int maxPossibleTrim = leftSegmentsLen - leftSegments_TargetLen; int trimmed = 0; boolean inDotState = true; int i = 0; for (; i < rightMostDotIndex; i++) { char c = fqClassName.charAt(i); if (c == DOT) { // if trimmed too many characters, let us stop if (trimmed >= maxPossibleTrim) break; buf.append(c); inDotState = true; } else { if (inDotState) { buf.append(c); inDotState = false; } else { trimmed++; } } } // append from the position of i which may include the last seen DOT buf.append(fqClassName.substring(i)); return buf.toString(); }
@Test public void test3Dot() { { TargetLengthBasedClassNameAbbreviator abbreviator = new TargetLengthBasedClassNameAbbreviator(1); String name = "com.logback.xyz.Foobar"; assertEquals("c.l.x.Foobar", abbreviator.abbreviate(name)); } { TargetLengthBasedClassNameAbbreviator abbreviator = new TargetLengthBasedClassNameAbbreviator(13); String name = "com.logback.xyz.Foobar"; assertEquals("c.l.x.Foobar", abbreviator.abbreviate(name)); } { TargetLengthBasedClassNameAbbreviator abbreviator = new TargetLengthBasedClassNameAbbreviator(14); String name = "com.logback.xyz.Foobar"; assertEquals("c.l.xyz.Foobar", abbreviator.abbreviate(name)); } { TargetLengthBasedClassNameAbbreviator abbreviator = new TargetLengthBasedClassNameAbbreviator(15); String name = "com.logback.alligator.Foobar"; assertEquals("c.l.a.Foobar", abbreviator.abbreviate(name)); } }
public LatLong getCenterPoint() { double latitudeOffset = (this.maxLatitude - this.minLatitude) / 2; double longitudeOffset = (this.maxLongitude - this.minLongitude) / 2; return new LatLong(this.minLatitude + latitudeOffset, this.minLongitude + longitudeOffset); }
@Test public void getCenterPointTest() { BoundingBox boundingBox = new BoundingBox(MIN_LATITUDE, MIN_LONGITUDE, MAX_LATITUDE, MAX_LONGITUDE); LatLong centerPoint = boundingBox.getCenterPoint(); Assert.assertEquals((MIN_LATITUDE + MAX_LATITUDE) / 2, centerPoint.latitude, 0); Assert.assertEquals((MIN_LONGITUDE + MAX_LONGITUDE) / 2, centerPoint.longitude, 0); }
public Path getDefaultLocation(String dbName, String tableName) { Database database = getDb(dbName); if (database == null) { throw new StarRocksConnectorException("Database '%s' not found", dbName); } if (Strings.isNullOrEmpty(database.getLocation())) { throw new StarRocksConnectorException("Failed to find location in database '%s'. Please define the location" + " when you create table or recreate another database with location." + " You could execute the SQL command like 'CREATE TABLE <table_name> <columns> " + "PROPERTIES('location' = '<location>')", dbName); } String dbLocation = database.getLocation(); Path databasePath = new Path(dbLocation); if (!pathExists(databasePath, hadoopConf)) { throw new StarRocksConnectorException("Database '%s' location does not exist: %s", dbName, databasePath); } if (!isDirectory(databasePath, hadoopConf)) { throw new StarRocksConnectorException("Database '%s' location is not a directory: %s", dbName, databasePath); } Path targetPath = new Path(databasePath, tableName); if (pathExists(targetPath, hadoopConf)) { throw new StarRocksConnectorException("Target directory for table '%s.%s' already exists: %s", dbName, tableName, targetPath); } return targetPath; }
@Test public void testGetDefaultLocation() { class MockedTestMetaClient1 extends HiveMetastoreTest.MockedHiveMetaClient { public org.apache.hadoop.hive.metastore.api.Database getDb(String dbName) throws RuntimeException { org.apache.hadoop.hive.metastore.api.Database database = new org.apache.hadoop.hive.metastore.api.Database(); database.setName("db"); return database; } } HiveMetaClient client = new MockedTestMetaClient1(); HiveMetastore metastore = new HiveMetastore(client, "hive_catalog", MetastoreType.HMS); ExecutorService executor = Executors.newFixedThreadPool(5); CachingHiveMetastore cachingHiveMetastore = new CachingHiveMetastore( metastore, executor, expireAfterWriteSec, refreshAfterWriteSec, 1000, false); HiveMetastoreOperations hmsOps = new HiveMetastoreOperations(cachingHiveMetastore, true, new Configuration(), MetastoreType.HMS, "hive_catalog"); ExceptionChecker.expectThrowsWithMsg(StarRocksConnectorException.class, "Failed to find location in database 'db'", () -> hmsOps.getDefaultLocation("db", "table")); new MockUp<HiveWriteUtils>() { @Mock public boolean pathExists(Path path, Configuration conf) { return false; } }; class MockedTestMetaClient2 extends HiveMetastoreTest.MockedHiveMetaClient { public org.apache.hadoop.hive.metastore.api.Database getDb(String dbName) throws RuntimeException { org.apache.hadoop.hive.metastore.api.Database database = new org.apache.hadoop.hive.metastore.api.Database(); database.setName("db"); database.setLocationUri("my_location"); return database; } } HiveMetaClient client2 = new MockedTestMetaClient2(); HiveMetastore metastore2 = new HiveMetastore(client2, "hive_catalog", MetastoreType.HMS); CachingHiveMetastore cachingHiveMetastore2 = new CachingHiveMetastore( metastore2, executor, expireAfterWriteSec, refreshAfterWriteSec, 1000, false); HiveMetastoreOperations hmsOps2 = new HiveMetastoreOperations(cachingHiveMetastore2, true, new Configuration(), MetastoreType.HMS, "hive_catalog"); ExceptionChecker.expectThrowsWithMsg(StarRocksConnectorException.class, "Database 'db' location does not exist: my_location", () -> hmsOps2.getDefaultLocation("db", "table")); new MockUp<HiveWriteUtils>() { @Mock public boolean pathExists(Path path, Configuration conf) { return true; } @Mock public boolean isDirectory(Path path, Configuration conf) { return false; } }; ExceptionChecker.expectThrowsWithMsg(StarRocksConnectorException.class, "Database 'db' location is not a directory: my_location", () -> hmsOps2.getDefaultLocation("db", "table")); new MockUp<HiveWriteUtils>() { @Mock public boolean pathExists(Path path, Configuration conf) { return true; } @Mock public boolean isDirectory(Path path, Configuration conf) { return true; } }; ExceptionChecker.expectThrowsWithMsg(StarRocksConnectorException.class, "Target directory for table 'db.table' already exists: my_location/table", () -> hmsOps2.getDefaultLocation("db", "table")); }
void fetchSCMMetaData(GoPluginDescriptor pluginDescriptor) { try { SCMPropertyConfiguration scmConfiguration = scmExtension.getSCMConfiguration(pluginDescriptor.id()); if (scmConfiguration == null) { throw new RuntimeException(format("Plugin[%s] returned null SCM configuration", pluginDescriptor.id())); } SCMView scmView = scmExtension.getSCMView(pluginDescriptor.id()); if (scmView == null) { throw new RuntimeException(format("Plugin[%s] returned null SCM view", pluginDescriptor.id())); } scmMetadataStore.addMetadataFor(pluginDescriptor.id(), new SCMConfigurations(scmConfiguration), scmView); } catch (GoPluginFrameworkException e) { LOGGER.error("Failed to fetch SCM metadata for plugin : {}", pluginDescriptor.id(), e); } }
@Test public void shouldThrowExceptionWhenNullSCMConfigurationReturned() { when(scmExtension.getSCMConfiguration(pluginDescriptor.id())).thenReturn(null); try { metadataLoader.fetchSCMMetaData(pluginDescriptor); } catch (Exception e) { assertThat(e.getMessage()).isEqualTo("Plugin[plugin-id] returned null SCM configuration"); } assertThat(SCMMetadataStore.getInstance().getConfigurationMetadata(pluginDescriptor.id())).isNull(); }
public final void containsAnyOf( @Nullable Object first, @Nullable Object second, @Nullable Object @Nullable ... rest) { containsAnyIn(accumulate(first, second, rest)); }
@Test public void iterableContainsAnyOfFailsWithSameToStringAndHomogeneousList() { expectFailureWhenTestingThat(asList(1L, 2L, 3L)).containsAnyOf(2, 3); assertFailureKeys( "expected to contain any of", "but did not", "though it did contain", "full contents"); assertFailureValue("expected to contain any of", "[2, 3] (java.lang.Integer)"); assertFailureValue("though it did contain", "[2, 3] (java.lang.Long)"); assertFailureValue("full contents", "[1, 2, 3]"); }
static Optional<String> ensurePropertyAndGetWarning( Map<String, ? super String> props, String key, String expectedValue, String justification, boolean caseSensitive) { if (!props.containsKey(key)) { // Insert the expected value props.put(key, expectedValue); // But don't issue a warning to the user return Optional.empty(); } String value = Objects.toString(props.get(key)); boolean matchesExpectedValue = caseSensitive ? expectedValue.equals(value) : expectedValue.equalsIgnoreCase(value); if (matchesExpectedValue) { return Optional.empty(); } // Insert the expected value props.put(key, expectedValue); justification = justification != null ? " " + justification : ""; // And issue a warning to the user return Optional.of(String.format( "The value '%s' for the '%s' property will be ignored as it cannot be overridden%s. " + "The value '%s' will be used instead.", value, key, justification, expectedValue )); }
@Test public void testNoOverrideWarning() { Map<String, ? super String> props = new HashMap<>(); assertEquals( Optional.empty(), ConnectUtils.ensurePropertyAndGetWarning(props, "key", "value", "because i say so", true) ); assertEquals("value", props.get("key")); props.clear(); assertEquals( Optional.empty(), ConnectUtils.ensurePropertyAndGetWarning(props, "key", "value", "because i say so", false) ); assertEquals("value", props.get("key")); props.clear(); props.put("key", "value"); assertEquals( Optional.empty(), ConnectUtils.ensurePropertyAndGetWarning(props, "key", "value", "because i say so", true) ); assertEquals("value", props.get("key")); props.clear(); props.put("key", "VALUE"); assertEquals( Optional.empty(), ConnectUtils.ensurePropertyAndGetWarning(props, "key", "value", "because i say so", false) ); assertEquals("VALUE", props.get("key")); }
@Deprecated public DomainNameMapping<V> add(String hostname, V output) { map.put(normalizeHostname(checkNotNull(hostname, "hostname")), checkNotNull(output, "output")); return this; }
@Test public void testNullDomainNamePatternsAreForbiddenInDeprecatedApi() { assertThrows(NullPointerException.class, new Executable() { @Override public void execute() { new DomainNameMapping<String>("NotFound").add(null, "Some value"); } }); }
public static <InputT, OutputT> MapElements<InputT, OutputT> via( final InferableFunction<InputT, OutputT> fn) { return new MapElements<>(fn, fn.getInputTypeDescriptor(), fn.getOutputTypeDescriptor()); }
@Test public void testSimpleFunctionDisplayData() { SimpleFunction<Integer, ?> simpleFn = new SimpleFunction<Integer, Integer>() { @Override public Integer apply(Integer input) { return input; } @Override public void populateDisplayData(DisplayData.Builder builder) { builder.add(DisplayData.item("foo", "baz")); } }; MapElements<?, ?> simpleMap = MapElements.via(simpleFn); assertThat(DisplayData.from(simpleMap), hasDisplayItem("class", simpleFn.getClass())); assertThat(DisplayData.from(simpleMap), hasDisplayItem("foo", "baz")); }
public static RowType projectRowType(@Nonnull RowType rowType, @Nonnull int[] projection) throws IllegalArgumentException { final int fieldCnt = rowType.getFieldCount(); return RowType.of( Arrays.stream(projection) .mapToObj( index -> { if (index >= fieldCnt) { throw new IllegalArgumentException( String.format( "Invalid projection index: %d of source rowType size: %d", index, fieldCnt)); } return rowType.getTypeAt(index); }) .toArray(LogicalType[]::new), Arrays.stream(projection) .mapToObj(index -> rowType.getFieldNames().get(index)) .toArray(String[]::new)); }
@Test void testInvalidProjectRowType() { assertThatThrownBy(() -> RowTypeUtils.projectRowType(srcType, new int[] {0, 1, 2, 3})) .isInstanceOf(IllegalArgumentException.class) .hasMessageContaining("Invalid projection index: 3"); assertThatThrownBy(() -> RowTypeUtils.projectRowType(srcType, new int[] {0, 1, 3})) .isInstanceOf(IllegalArgumentException.class) .hasMessageContaining("Invalid projection index: 3"); assertThatThrownBy(() -> RowTypeUtils.projectRowType(srcType, new int[] {0, 0, 0, 0})) .isInstanceOf(ValidationException.class) .hasMessageContaining("Field names must be unique. Found duplicates"); }
@Override public List<String> selectList(String text) { List<String> results = new ArrayList<String>(); boolean first = true; for (Selector selector : selectors) { if (first) { results = selector.selectList(text); first = false; } else { List<String> resultsTemp = new ArrayList<String>(); for (String result : results) { resultsTemp.addAll(selector.selectList(result)); } results = resultsTemp; if (results == null || results.size() == 0) { return results; } } } return results; }
@Test public void testSelectList() { String htmlContent = "<!DOCTYPE html>\n" + "<html lang=\"en\">\n" + "<head>\n" + " <meta charset=\"UTF-8\">\n" + " <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\n" + " <title>HTML with XPath</title>\n" + "</head>\n" + "<body>\n" + " <div class=\"container\">\n" + " <div class=\"item1\">Item 1</div>\n" + " <div class=\"item2\">Item 2</div>\n" + " </div>\n" + "</body>\n" + "</html>"; List<Selector> selectors = new ArrayList<Selector>(); selectors.add(new CssSelector("div")); selectors.add(new XpathSelector("//div[@class='item1']")); AndSelector andSelector = new AndSelector(selectors); List<String> result = andSelector.selectList(htmlContent); assertEquals("<div class=\"item1\">\n Item 1\n</div>", result.get(0)); }
@GetMapping("/{id}") @RequiresPermissions("system:role:edit") public ShenyuAdminResult detailRole(@PathVariable("id") @Valid @Existed(provider = RoleMapper.class, message = "role is not existed") final String id) { RoleEditVO roleEditVO = roleService.findById(id); return Optional.ofNullable(roleEditVO) .map(item -> ShenyuAdminResult.success(ShenyuResultMessage.DETAIL_SUCCESS, item)) .orElse(ShenyuAdminResult.error(ShenyuResultMessage.DETAIL_FAILED)); }
@Test public void testDetailRole() throws Exception { given(roleService.findById(anyString())).willReturn(null); String urlTemplate = "/role/{id}"; this.mockMvc.perform(MockMvcRequestBuilders.get(urlTemplate, "test_id")) .andExpect(status().isOk()) .andExpect(jsonPath("$.message", is(ShenyuResultMessage.DETAIL_FAILED))) .andReturn(); RoleEditVO roleEditVO = buildRoleEditVO(); given(roleService.findById(roleEditVO.getSysRole().getId())).willReturn(roleEditVO); urlTemplate = "/role/{id}"; this.mockMvc.perform(MockMvcRequestBuilders.get(urlTemplate, roleEditVO.getSysRole().getId())) .andExpect(status().isOk()) .andExpect(jsonPath("$.message", is(ShenyuResultMessage.DETAIL_SUCCESS))) .andReturn(); }
public static ObjectNode convertFromGHResponse(GHResponse ghResponse, TranslationMap translationMap, Locale locale, DistanceConfig distanceConfig) { ObjectNode json = JsonNodeFactory.instance.objectNode(); if (ghResponse.hasErrors()) throw new IllegalStateException( "If the response has errors, you should use the method NavigateResponseConverter#convertFromGHResponseError"); PointList waypoints = ghResponse.getBest().getWaypoints(); final ArrayNode routesJson = json.putArray("routes"); List<ResponsePath> paths = ghResponse.getAll(); for (int i = 0; i < paths.size(); i++) { ResponsePath path = paths.get(i); ObjectNode pathJson = routesJson.addObject(); putRouteInformation(pathJson, path, i, translationMap, locale, distanceConfig); } final ArrayNode waypointsJson = json.putArray("waypoints"); for (int i = 0; i < waypoints.size(); i++) { ObjectNode waypointJson = waypointsJson.addObject(); // TODO get names waypointJson.put("name", ""); putLocation(waypoints.getLat(i), waypoints.getLon(i), waypointJson); } json.put("code", "Ok"); // TODO: Maybe we need a different format... uuid: "cji4ja4f8004o6xrsta8w4p4h" json.put("uuid", UUID.randomUUID().toString().replaceAll("-", "")); return json; }
@Test public void voiceInstructionsImperialTest() { GHResponse rsp = hopper.route(new GHRequest(42.554851, 1.536198, 42.510071, 1.548128).setProfile(profile)); ObjectNode json = NavigateResponseConverter.convertFromGHResponse(rsp, trMap, Locale.ENGLISH, new DistanceConfig(DistanceUtils.Unit.IMPERIAL, trMap, Locale.ENGLISH)); JsonNode steps = json.get("routes").get(0).get("legs").get(0).get("steps"); // Step 4 is about 240m long JsonNode step = steps.get(4); JsonNode maneuver = step.get("maneuver"); JsonNode voiceInstructions = step.get("voiceInstructions"); assertEquals(2, voiceInstructions.size()); JsonNode voiceInstruction = voiceInstructions.get(0); assertEquals(200, voiceInstruction.get("distanceAlongGeometry").asDouble(), 1); assertEquals("In 600 feet At roundabout, take exit 2 onto CS-340, then At roundabout, take exit 2 onto CG-3", voiceInstruction.get("announcement").asText()); // Step 14 is over 3km long step = steps.get(14); maneuver = step.get("maneuver"); voiceInstructions = step.get("voiceInstructions"); assertEquals(4, voiceInstructions.size()); voiceInstruction = voiceInstructions.get(0); assertEquals(3220, voiceInstruction.get("distanceAlongGeometry").asDouble(), 1); assertEquals("In 2 miles keep right", voiceInstruction.get("announcement").asText()); voiceInstruction = voiceInstructions.get(3); assertEquals("keep right", voiceInstruction.get("announcement").asText()); }
@Override public SerializedMetaSpec getSerializedMetaSpec(String dbName, String tableName, long snapshotId, String serializedPredicate, MetadataTableType metadataTableType) { List<RemoteMetaSplit> remoteMetaSplits = new ArrayList<>(); IcebergTable icebergTable = (IcebergTable) getTable(dbName, tableName); org.apache.iceberg.Table nativeTable = icebergTable.getNativeTable(); if (snapshotId == -1) { Snapshot currentSnapshot = nativeTable.currentSnapshot(); if (currentSnapshot == null) { return IcebergMetaSpec.EMPTY; } else { snapshotId = nativeTable.currentSnapshot().snapshotId(); } } Snapshot snapshot = nativeTable.snapshot(snapshotId); Expression predicate = Expressions.alwaysTrue(); if (!Strings.isNullOrEmpty(serializedPredicate)) { predicate = SerializationUtil.deserializeFromBase64(serializedPredicate); } FileIO fileIO = nativeTable.io(); if (fileIO instanceof IcebergCachingFileIO) { fileIO = ((IcebergCachingFileIO) fileIO).getWrappedIO(); } String serializedTable = SerializationUtil.serializeToBase64(new SerializableTable(nativeTable, fileIO)); if (IcebergMetaSplit.onlyNeedSingleSplit(metadataTableType)) { return new IcebergMetaSpec(serializedTable, List.of(IcebergMetaSplit.placeholderSplit()), false); } List<ManifestFile> dataManifests = snapshot.dataManifests(nativeTable.io()); List<ManifestFile> matchingDataManifests = filterManifests(dataManifests, nativeTable, predicate); for (ManifestFile file : matchingDataManifests) { remoteMetaSplits.add(IcebergMetaSplit.from(file)); } List<ManifestFile> deleteManifests = snapshot.deleteManifests(nativeTable.io()); List<ManifestFile> matchingDeleteManifests = filterManifests(deleteManifests, nativeTable, predicate); if (metadataTableType == MetadataTableType.FILES || metadataTableType == MetadataTableType.PARTITIONS) { for (ManifestFile file : matchingDeleteManifests) { remoteMetaSplits.add(IcebergMetaSplit.from(file)); } return new IcebergMetaSpec(serializedTable, remoteMetaSplits, false); } boolean loadColumnStats = enableCollectColumnStatistics(ConnectContext.get()) || (!matchingDeleteManifests.isEmpty() && mayHaveEqualityDeletes(snapshot) && catalogProperties.enableDistributedPlanLoadColumnStatsWithEqDelete()); return new IcebergMetaSpec(serializedTable, remoteMetaSplits, loadColumnStats); }
@Test public void testGetMetaSpecWithDeleteFile(@Mocked LocalMetastore localMetastore, @Mocked TemporaryTableMgr temporaryTableMgr) { mockedNativeTableA.newAppend().appendFile(FILE_A).commit(); // FILE_A_DELETES = positionalDelete / FILE_A2_DELETES = equalityDelete mockedNativeTableA.newRowDelta().addDeletes(FILE_A_DELETES).addDeletes(FILE_A2_DELETES).commit(); new MockUp<IcebergHiveCatalog>() { @Mock org.apache.iceberg.Table getTable(String dbName, String tableName) throws StarRocksConnectorException { return mockedNativeTableA; } }; Map<String, String> copiedMap = new HashMap<>(DEFAULT_CONFIG); copiedMap.put(ENABLE_DISTRIBUTED_PLAN_LOAD_DATA_FILE_COLUMN_STATISTICS_WITH_EQ_DELETE, "false"); IcebergCatalogProperties catalogProperties = new IcebergCatalogProperties(copiedMap); IcebergHiveCatalog icebergHiveCatalog = new IcebergHiveCatalog(CATALOG_NAME, new Configuration(), DEFAULT_CONFIG); CachingIcebergCatalog cachingIcebergCatalog = new CachingIcebergCatalog( CATALOG_NAME, icebergHiveCatalog, DEFAULT_CATALOG_PROPERTIES, Executors.newSingleThreadExecutor()); IcebergMetadata metadata = new IcebergMetadata(CATALOG_NAME, HDFS_ENVIRONMENT, cachingIcebergCatalog, Executors.newSingleThreadExecutor(), Executors.newSingleThreadExecutor(), catalogProperties); MetadataMgr metadataMgr = new MetadataMgr(localMetastore, temporaryTableMgr, null, null); new MockUp<MetadataMgr>() { @Mock public Optional<ConnectorMetadata> getOptionalMetadata(String catalogName) { return Optional.of(metadata); } }; SerializedMetaSpec metaSpec = metadataMgr.getSerializedMetaSpec( "catalog", "db", "tg", -1, null, MetadataTableType.LOGICAL_ICEBERG_METADATA); Assert.assertTrue(metaSpec instanceof IcebergMetaSpec); IcebergMetaSpec icebergMetaSpec = metaSpec.cast(); Assert.assertFalse(icebergMetaSpec.loadColumnStats()); }
@GetMapping("/readiness") public Result<String> readiness(HttpServletRequest request) { ReadinessResult result = ModuleHealthCheckerHolder.getInstance().checkReadiness(); if (result.isSuccess()) { return Result.success("ok"); } return Result.failure(result.getResultMessage()); }
@Test void testReadinessBothFailure() { // Config and Naming are not in readiness Mockito.when(configInfoPersistService.configInfoCount(any(String.class))) .thenThrow(new RuntimeException("HealthControllerV2Test.testReadiness")); Mockito.when(serverStatusManager.getServerStatus()).thenThrow(new RuntimeException("HealthControllerV2Test.testReadiness")); Result<String> result = healthControllerV2.readiness(null); assertEquals(30000, result.getCode().intValue()); assertEquals("naming and config not in readiness", result.getMessage()); }
@Override public ResourceAllocationResult tryFulfillRequirements( Map<JobID, Collection<ResourceRequirement>> missingResources, TaskManagerResourceInfoProvider taskManagerResourceInfoProvider, BlockedTaskManagerChecker blockedTaskManagerChecker) { final ResourceAllocationResult.Builder resultBuilder = ResourceAllocationResult.builder(); final List<InternalResourceInfo> registeredResources = getAvailableResources( taskManagerResourceInfoProvider, resultBuilder, blockedTaskManagerChecker); final List<InternalResourceInfo> pendingResources = getPendingResources(taskManagerResourceInfoProvider, resultBuilder); ResourceProfile totalCurrentResources = Stream.concat(registeredResources.stream(), pendingResources.stream()) .map(internalResourceInfo -> internalResourceInfo.totalProfile) .reduce(ResourceProfile.ZERO, ResourceProfile::merge); for (Map.Entry<JobID, Collection<ResourceRequirement>> resourceRequirements : missingResources.entrySet()) { final JobID jobId = resourceRequirements.getKey(); final Collection<ResourceRequirement> unfulfilledJobRequirements = tryFulfillRequirementsForJobWithResources( jobId, resourceRequirements.getValue(), registeredResources); if (!unfulfilledJobRequirements.isEmpty()) { totalCurrentResources = totalCurrentResources.merge( tryFulfillRequirementsForJobWithPendingResources( jobId, unfulfilledJobRequirements, pendingResources, resultBuilder)); } } // Unlike tryFulfillRequirementsForJobWithPendingResources, which updates pendingResources // to the latest state after a new PendingTaskManager is created, // tryFulFillRequiredResources will not update pendingResources even after new // PendingTaskManagers are created. // This is because the pendingResources are no longer needed afterward. tryFulFillRequiredResources( registeredResources, pendingResources, totalCurrentResources, resultBuilder); return resultBuilder.build(); }
@Test void testBlockedTaskManagerCannotFulfillRequirements() { final TaskManagerInfo registeredTaskManager = new TestingTaskManagerInfo( DEFAULT_SLOT_RESOURCE.multiply(NUM_OF_SLOTS), DEFAULT_SLOT_RESOURCE.multiply(NUM_OF_SLOTS), DEFAULT_SLOT_RESOURCE); final JobID jobId = new JobID(); final List<ResourceRequirement> requirements = new ArrayList<>(); final TaskManagerResourceInfoProvider taskManagerResourceInfoProvider = TestingTaskManagerResourceInfoProvider.newBuilder() .setRegisteredTaskManagersSupplier( () -> Collections.singleton(registeredTaskManager)) .build(); requirements.add(ResourceRequirement.create(ResourceProfile.UNKNOWN, 2 * NUM_OF_SLOTS)); final ResourceAllocationResult result = ANY_MATCHING_STRATEGY.tryFulfillRequirements( Collections.singletonMap(jobId, requirements), taskManagerResourceInfoProvider, registeredTaskManager.getTaskExecutorConnection().getResourceID()::equals); assertThat(result.getUnfulfillableJobs()).isEmpty(); assertThat(result.getAllocationsOnRegisteredResources()).isEmpty(); assertThat(result.getPendingTaskManagersToAllocate()).hasSize(2); }
@Override public Iterable<WindowedValue<InputT>> processElementInReadyWindows(WindowedValue<InputT> elem) { if (views.isEmpty()) { // When there are no side inputs, we can preserve the compressed representation. underlying.processElement(elem); return Collections.emptyList(); } ImmutableList.Builder<WindowedValue<InputT>> pushedBack = ImmutableList.builder(); for (WindowedValue<InputT> windowElem : elem.explodeWindows()) { BoundedWindow mainInputWindow = Iterables.getOnlyElement(windowElem.getWindows()); if (isReady(mainInputWindow)) { // When there are any side inputs, we have to process the element in each window // individually, to disambiguate access to per-window side inputs. underlying.processElement(windowElem); } else { notReadyWindows.add(mainInputWindow); pushedBack.add(windowElem); } } return pushedBack.build(); }
@Test public void processElementNoSideInputs() { SimplePushbackSideInputDoFnRunner<Integer, Integer> runner = createRunner(ImmutableList.of()); WindowedValue<Integer> multiWindow = WindowedValue.of( 2, new Instant(-2), ImmutableList.of( new IntervalWindow(new Instant(-500L), new Instant(0L)), new IntervalWindow(BoundedWindow.TIMESTAMP_MIN_VALUE, new Instant(250L)), GlobalWindow.INSTANCE), PaneInfo.ON_TIME_AND_ONLY_FIRING); Iterable<WindowedValue<Integer>> multiWindowPushback = runner.processElementInReadyWindows(multiWindow); assertThat(multiWindowPushback, emptyIterable()); // Should preserve the compressed representation when there's no side inputs. assertThat(underlying.inputElems, containsInAnyOrder(multiWindow)); }
public boolean isExisted(final String actualTableName) { return actualTables.contains(actualTableName); }
@Test void assertActualTableNameNotExisted() { ShardingTable actual = new ShardingTable(new ShardingTableRuleConfiguration("LOGIC_TABLE", "ds${0..1}.table_${0..2}"), Arrays.asList("ds0", "ds1"), null); assertFalse(actual.isExisted("table_3")); }
public static boolean isNumber(String text) { final int startPos = findStartPosition(text); if (startPos < 0) { return false; } for (int i = startPos; i < text.length(); i++) { char ch = text.charAt(i); if (!Character.isDigit(ch)) { return false; } } return true; }
@Test @DisplayName("Tests that isNumber returns true for integers") void isNumberIntegers() { assertTrue(ObjectHelper.isNumber("1234")); assertTrue(ObjectHelper.isNumber("-1234")); assertTrue(ObjectHelper.isNumber("1")); assertTrue(ObjectHelper.isNumber("0")); }
public boolean authenticate(LDAPConnection connection, String bindDn, EncryptedValue password) throws LDAPException { checkArgument(!isNullOrEmpty(bindDn), "Binding with empty principal is forbidden."); checkArgument(password != null, "Binding with null credentials is forbidden."); checkArgument(password.isSet(), "Binding with empty credentials is forbidden."); final SimpleBindRequest bindRequest = new SimpleBindRequest(bindDn, encryptedValueService.decrypt(password)); LOG.trace("Re-binding with DN <{}> using password", bindDn); try { final BindResult bind = connection.bind(bindRequest); if (!bind.getResultCode().equals(ResultCode.SUCCESS)) { LOG.trace("Re-binding DN <{}> failed", bindDn); throw new RuntimeException(bind.toString()); } final boolean authenticated = connection.getLastBindRequest().equals(bindRequest); LOG.trace("Binding DN <{}> did not throw, connection authenticated: {}", bindDn, authenticated); return authenticated; } catch (LDAPBindException e) { LOG.trace("Re-binding DN <{}> failed", bindDn); return false; } }
@Test public void authenticateThrowsIllegalArgumentExceptionIfCredentialsAreEmpty() throws LDAPException { assertThatThrownBy(() -> connector.authenticate(connection, "principal", EncryptedValue.createUnset())) .hasMessageContaining("Binding with empty credentials is forbidden.") .isInstanceOf(IllegalArgumentException.class); }
public static String removeLF( String in ) { return removeChar( in, '\n' ); }
@Test public void testRemoveLF() { assertEquals( "foo\r\tbar", Const.removeLF( "foo\r\n\tbar" ) ); assertEquals( "", Const.removeLF( "" ) ); assertEquals( "", Const.removeLF( null ) ); assertEquals( "", Const.removeLF( "\n" ) ); assertEquals( "\r\r", Const.removeLF( "\r\n\r" ) ); assertEquals( "This \ris \ra test \rof \rthe \remergency \rbroadcast system\r", Const.removeLF( "This \r\nis \ra \ntest \rof \n\rthe \r\nemergency \rbroadcast \nsystem\r\n" ) ); }
@Override public void onIssueChanges(QGChangeEvent qualityGateEvent, Set<ChangedIssue> changedIssues) { Optional<EvaluatedQualityGate> evaluatedQualityGate = qualityGateEvent.getQualityGateSupplier().get(); Optional<Metric.Level> previousStatusOptional = qualityGateEvent.getPreviousStatus(); if (evaluatedQualityGate.isEmpty() || previousStatusOptional.isEmpty()) { return; } Metric.Level currentStatus = evaluatedQualityGate.get().getStatus(); Metric.Level previousStatus = previousStatusOptional.get(); if (previousStatus.getColorName().equals(currentStatus.getColorName())) { // QG status didn't change - no action return; } addQualityGateEventToProject(qualityGateEvent, currentStatus); }
@Test public void onIssueChanges_givenEmptyEvent_doNotInteractWithDatabase() { Supplier<Optional<EvaluatedQualityGate>> qualityGateSupplier = Optional::empty; QGChangeEvent qualityGateEvent = new QGChangeEvent(project, branch, analysis, projectConfiguration, Metric.Level.OK, qualityGateSupplier); underTest.onIssueChanges(qualityGateEvent, Set.of()); verifyNoInteractions(dbClient); }
@Override public void start() { start(true); }
@Test void ifAnnouncingBackgroundSucceedsStartupMessageIsLogged() { backgroundJobServer.start(); await().atMost(10, SECONDS) .untilAsserted(() -> assertThat(logger) .hasInfoMessageContaining("BackgroundJobPerformers started successfully") .hasNoErrorLogMessages()); }
@Subscribe public void onVarbitChanged(VarbitChanged event) { if (event.getVarbitId() == Varbits.IN_RAID) { removeVarTimer(OVERLOAD_RAID); removeGameTimer(PRAYER_ENHANCE); } if (event.getVarbitId() == Varbits.VENGEANCE_COOLDOWN && config.showVengeance()) { if (event.getValue() == 1) { createGameTimer(VENGEANCE); } else { removeGameTimer(VENGEANCE); } } if (event.getVarbitId() == Varbits.SPELLBOOK_SWAP && config.showSpellbookSwap()) { if (event.getValue() == 1) { createGameTimer(SPELLBOOK_SWAP); } else { removeGameTimer(SPELLBOOK_SWAP); } } if (event.getVarbitId() == Varbits.HEAL_GROUP_COOLDOWN && config.showHealGroup()) { if (event.getValue() == 1) { createGameTimer(HEAL_GROUP); } else { removeGameTimer(HEAL_GROUP); } } if (event.getVarbitId() == Varbits.DEATH_CHARGE_COOLDOWN && config.showArceuusCooldown()) { if (event.getValue() == 1) { createGameTimer(DEATH_CHARGE_COOLDOWN); } else { removeGameTimer(DEATH_CHARGE_COOLDOWN); } } if (event.getVarbitId() == Varbits.CORRUPTION_COOLDOWN && config.showArceuusCooldown()) { if (event.getValue() == 1) { createGameTimer(CORRUPTION_COOLDOWN); } else { removeGameTimer(CORRUPTION_COOLDOWN); } } if (event.getVarbitId() == Varbits.RESURRECT_THRALL_COOLDOWN && config.showArceuusCooldown()) { if (event.getValue() == 1) { createGameTimer(RESURRECT_THRALL_COOLDOWN); } else { removeGameTimer(RESURRECT_THRALL_COOLDOWN); } } if (event.getVarbitId() == Varbits.SHADOW_VEIL_COOLDOWN && config.showArceuusCooldown()) { if (event.getValue() == 1) { createGameTimer(SHADOW_VEIL_COOLDOWN); } else { removeGameTimer(SHADOW_VEIL_COOLDOWN); } } if (event.getVarbitId() == Varbits.WARD_OF_ARCEUUS_COOLDOWN && config.showArceuusCooldown()) { if (event.getValue() == 1) { createGameTimer(WARD_OF_ARCEUUS_COOLDOWN); } else { removeGameTimer(WARD_OF_ARCEUUS_COOLDOWN); } } if (event.getVarbitId() == Varbits.VENGEANCE_ACTIVE && config.showVengeanceActive()) { updateVarCounter(VENGEANCE_ACTIVE, event.getValue()); } if (event.getVarbitId() == Varbits.DEATH_CHARGE && config.showArceuus()) { if (event.getValue() == 1) { createGameTimer(DEATH_CHARGE, Duration.of(client.getRealSkillLevel(Skill.MAGIC), RSTimeUnit.GAME_TICKS)); } else { removeGameTimer(DEATH_CHARGE); } } if (event.getVarbitId() == Varbits.RESURRECT_THRALL && event.getValue() == 0 && config.showArceuus()) { removeGameTimer(RESURRECT_THRALL); } if (event.getVarbitId() == Varbits.SHADOW_VEIL && event.getValue() == 0 && config.showArceuus()) { removeGameTimer(SHADOW_VEIL); } if (event.getVarpId() == VarPlayer.POISON && config.showAntiPoison()) { final int poisonVarp = event.getValue(); final int tickCount = client.getTickCount(); if (poisonVarp == 0) { nextPoisonTick = -1; } else if (nextPoisonTick - tickCount <= 0) { nextPoisonTick = tickCount + POISON_TICK_LENGTH; } updateVarTimer(ANTIPOISON, event.getValue(), i -> i >= 0 || i < VENOM_VALUE_CUTOFF, i -> nextPoisonTick - tickCount + Math.abs((i + 1) * POISON_TICK_LENGTH)); updateVarTimer(ANTIVENOM, event.getValue(), i -> i >= VENOM_VALUE_CUTOFF, i -> nextPoisonTick - tickCount + Math.abs((i + 1 - VENOM_VALUE_CUTOFF) * POISON_TICK_LENGTH)); } if ((event.getVarbitId() == Varbits.NMZ_OVERLOAD_REFRESHES_REMAINING || event.getVarbitId() == Varbits.COX_OVERLOAD_REFRESHES_REMAINING) && config.showOverload()) { final int overloadVarb = event.getValue(); final int tickCount = client.getTickCount(); if (overloadVarb <= 0) { nextOverloadRefreshTick = -1; } else if (nextOverloadRefreshTick - tickCount <= 0) { nextOverloadRefreshTick = tickCount + OVERLOAD_TICK_LENGTH; } GameTimer overloadTimer = client.getVarbitValue(Varbits.IN_RAID) == 1 ? OVERLOAD_RAID : OVERLOAD; updateVarTimer(overloadTimer, overloadVarb, i -> nextOverloadRefreshTick - tickCount + (i - 1) * OVERLOAD_TICK_LENGTH); } if (event.getVarbitId() == Varbits.TELEBLOCK && config.showTeleblock()) { updateVarTimer(TELEBLOCK, event.getValue() - 100, i -> i <= 0, IntUnaryOperator.identity()); } if (event.getVarpId() == VarPlayer.CHARGE_GOD_SPELL && config.showCharge()) { updateVarTimer(CHARGE, event.getValue(), i -> i * 2); } if (event.getVarbitId() == Varbits.IMBUED_HEART_COOLDOWN && config.showImbuedHeart()) { updateVarTimer(IMBUEDHEART, event.getValue(), i -> i * 10); } if (event.getVarbitId() == Varbits.DRAGONFIRE_SHIELD_COOLDOWN && config.showDFSSpecial()) { updateVarTimer(DRAGON_FIRE_SHIELD, event.getValue(), i -> i * 8); } if (event.getVarpId() == LAST_HOME_TELEPORT && config.showHomeMinigameTeleports()) { checkTeleport(LAST_HOME_TELEPORT); } if (event.getVarpId() == LAST_MINIGAME_TELEPORT && config.showHomeMinigameTeleports()) { checkTeleport(LAST_MINIGAME_TELEPORT); } if (event.getVarbitId() == Varbits.RUN_SLOWED_DEPLETION_ACTIVE || event.getVarbitId() == Varbits.STAMINA_EFFECT || event.getVarbitId() == Varbits.RING_OF_ENDURANCE_EFFECT) { // staminaEffectActive is checked to match https://github.com/Joshua-F/cs2-scripts/blob/741271f0c3395048c1bad4af7881a13734516adf/scripts/%5Bproc%2Cbuff_bar_get_value%5D.cs2#L25 int staminaEffectActive = client.getVarbitValue(Varbits.RUN_SLOWED_DEPLETION_ACTIVE); int staminaPotionEffectVarb = client.getVarbitValue(Varbits.STAMINA_EFFECT); int enduranceRingEffectVarb = client.getVarbitValue(Varbits.RING_OF_ENDURANCE_EFFECT); final int totalStaminaEffect = staminaPotionEffectVarb + enduranceRingEffectVarb; if (staminaEffectActive == 1 && config.showStamina()) { updateVarTimer(STAMINA, totalStaminaEffect, i -> i * 10); } } if (event.getVarbitId() == Varbits.ANTIFIRE && config.showAntiFire()) { final int antifireVarb = event.getValue(); final int tickCount = client.getTickCount(); if (antifireVarb == 0) { nextAntifireTick = -1; } else if (nextAntifireTick - tickCount <= 0) { nextAntifireTick = tickCount + ANTIFIRE_TICK_LENGTH; } updateVarTimer(ANTIFIRE, antifireVarb, i -> nextAntifireTick - tickCount + (i - 1) * ANTIFIRE_TICK_LENGTH); } if (event.getVarbitId() == Varbits.SUPER_ANTIFIRE && config.showAntiFire()) { final int superAntifireVarb = event.getValue(); final int tickCount = client.getTickCount(); if (superAntifireVarb == 0) { nextSuperAntifireTick = -1; } else if (nextSuperAntifireTick - tickCount <= 0) { nextSuperAntifireTick = tickCount + SUPERANTIFIRE_TICK_LENGTH; } updateVarTimer(SUPERANTIFIRE, event.getValue(), i -> nextSuperAntifireTick - tickCount + (i - 1) * SUPERANTIFIRE_TICK_LENGTH); } if (event.getVarbitId() == Varbits.MAGIC_IMBUE && config.showMagicImbue()) { updateVarTimer(MAGICIMBUE, event.getValue(), i -> i * 10); } if (event.getVarbitId() == Varbits.DIVINE_SUPER_ATTACK && config.showDivine()) { if (client.getVarbitValue(Varbits.DIVINE_SUPER_COMBAT) > event.getValue()) { return; } updateVarTimer(DIVINE_SUPER_ATTACK, event.getValue(), IntUnaryOperator.identity()); } if (event.getVarbitId() == Varbits.DIVINE_SUPER_STRENGTH && config.showDivine()) { if (client.getVarbitValue(Varbits.DIVINE_SUPER_COMBAT) > event.getValue()) { return; } updateVarTimer(DIVINE_SUPER_STRENGTH, event.getValue(), IntUnaryOperator.identity()); } if (event.getVarbitId() == Varbits.DIVINE_SUPER_DEFENCE && config.showDivine()) { if (client.getVarbitValue(Varbits.DIVINE_SUPER_COMBAT) > event.getValue() || client.getVarbitValue(Varbits.DIVINE_BASTION) > event.getValue() || client.getVarbitValue(Varbits.DIVINE_BATTLEMAGE) > event.getValue() // When drinking a dose of moonlight potion while already under its effects, desync between // Varbits.MOONLIGHT_POTION and Varbits.DIVINE_SUPER_DEFENCE can occur, with the latter being 1 tick // greater || client.getVarbitValue(Varbits.MOONLIGHT_POTION) >= event.getValue()) { return; } if (client.getVarbitValue(Varbits.MOONLIGHT_POTION) < event.getValue()) { removeVarTimer(MOONLIGHT_POTION); } updateVarTimer(DIVINE_SUPER_DEFENCE, event.getValue(), IntUnaryOperator.identity()); } if (event.getVarbitId() == Varbits.DIVINE_RANGING && config.showDivine()) { if (client.getVarbitValue(Varbits.DIVINE_BASTION) > event.getValue()) { return; } updateVarTimer(DIVINE_RANGING, event.getValue(), IntUnaryOperator.identity()); } if (event.getVarbitId() == Varbits.DIVINE_MAGIC && config.showDivine()) { if (client.getVarbitValue(Varbits.DIVINE_BATTLEMAGE) > event.getValue()) { return; } updateVarTimer(DIVINE_MAGIC, event.getValue(), IntUnaryOperator.identity()); } if (event.getVarbitId() == Varbits.DIVINE_SUPER_COMBAT && config.showDivine()) { if (client.getVarbitValue(Varbits.DIVINE_SUPER_ATTACK) == event.getValue()) { removeVarTimer(DIVINE_SUPER_ATTACK); } if (client.getVarbitValue(Varbits.DIVINE_SUPER_STRENGTH) == event.getValue()) { removeVarTimer(DIVINE_SUPER_STRENGTH); } if (client.getVarbitValue(Varbits.DIVINE_SUPER_DEFENCE) == event.getValue()) { removeVarTimer(DIVINE_SUPER_DEFENCE); } updateVarTimer(DIVINE_SUPER_COMBAT, event.getValue(), IntUnaryOperator.identity()); } if (event.getVarbitId() == Varbits.DIVINE_BASTION && config.showDivine()) { if (client.getVarbitValue(Varbits.DIVINE_RANGING) == event.getValue()) { removeVarTimer(DIVINE_RANGING); } if (client.getVarbitValue(Varbits.DIVINE_SUPER_DEFENCE) == event.getValue()) { removeVarTimer(DIVINE_SUPER_DEFENCE); } updateVarTimer(DIVINE_BASTION, event.getValue(), IntUnaryOperator.identity()); } if (event.getVarbitId() == Varbits.DIVINE_BATTLEMAGE && config.showDivine()) { if (client.getVarbitValue(Varbits.DIVINE_MAGIC) == event.getValue()) { removeVarTimer(DIVINE_MAGIC); } if (client.getVarbitValue(Varbits.DIVINE_SUPER_DEFENCE) == event.getValue()) { removeVarTimer(DIVINE_SUPER_DEFENCE); } updateVarTimer(DIVINE_BATTLEMAGE, event.getValue(), IntUnaryOperator.identity()); } if (event.getVarbitId() == Varbits.BUFF_STAT_BOOST && config.showOverload()) { updateVarTimer(SMELLING_SALTS, event.getValue(), i -> i * 25); } if (event.getVarbitId() == Varbits.MENAPHITE_REMEDY && config.showMenaphiteRemedy()) { updateVarTimer(MENAPHITE_REMEDY, event.getValue(), i -> i * 25); } if (event.getVarbitId() == Varbits.LIQUID_ADERNALINE_ACTIVE && event.getValue() == 0 && config.showLiquidAdrenaline()) { removeGameTimer(LIQUID_ADRENALINE); } if (event.getVarbitId() == Varbits.FARMERS_AFFINITY && config.showFarmersAffinity()) { updateVarTimer(FARMERS_AFFINITY, event.getValue(), i -> i * 20); } if (event.getVarbitId() == Varbits.GOD_WARS_ALTAR_COOLDOWN && config.showGodWarsAltar()) { updateVarTimer(GOD_WARS_ALTAR, event.getValue(), i -> i * 100); } if (event.getVarbitId() == Varbits.CURSE_OF_THE_MOONS && config.showCurseOfTheMoons()) { final int regionID = WorldPoint.fromLocal(client, client.getLocalPlayer().getLocalLocation()).getRegionID(); if (regionID == ECLIPSE_MOON_REGION_ID) { updateVarCounter(CURSE_OF_THE_MOONS_ECLIPSE, event.getValue()); } else { updateVarCounter(CURSE_OF_THE_MOONS_BLUE, event.getValue()); } } if (event.getVarbitId() == Varbits.COLOSSEUM_DOOM && config.showColosseumDoom()) { updateVarCounter(COLOSSEUM_DOOM, event.getValue()); } if (event.getVarbitId() == Varbits.MOONLIGHT_POTION && config.showMoonlightPotion()) { int moonlightValue = event.getValue(); // Increase the timer by 1 tick in case of desync due to drinking a dose of moonlight potion while already // under its effects. Otherwise, the timer would be 1 tick shorter than it is meant to be. if (client.getVarbitValue(Varbits.DIVINE_SUPER_DEFENCE) == moonlightValue + 1) { moonlightValue++; } updateVarTimer(MOONLIGHT_POTION, moonlightValue, IntUnaryOperator.identity()); } }
@Test public void testDivineRangingAfterBastion() { when(timersAndBuffsConfig.showDivine()).thenReturn(true); when(client.getVarbitValue(Varbits.DIVINE_BASTION)).thenReturn(400); VarbitChanged varbitChanged = new VarbitChanged(); varbitChanged.setVarbitId(Varbits.DIVINE_RANGING); varbitChanged.setValue(500); timersAndBuffsPlugin.onVarbitChanged(varbitChanged); ArgumentCaptor<InfoBox> captor = ArgumentCaptor.forClass(InfoBox.class); verify(infoBoxManager).addInfoBox(captor.capture()); TimerTimer infoBox = (TimerTimer) captor.getValue(); assertEquals(GameTimer.DIVINE_RANGING, infoBox.getTimer()); }
static long hexDigits(final long d) { if (d < 0) { throw new IllegalArgumentException("d = " + d + " < 0"); } else if (d > IMPLEMENTATION_LIMIT) { throw new IllegalArgumentException("d = " + d + " > IMPLEMENTATION_LIMIT = " + IMPLEMENTATION_LIMIT); } final double s1 = sum(1, d); final double s4 = sum(4, d); final double s5 = sum(5, d); final double s6 = sum(6, d); double pi = s1 + s1; if (pi >= 1) pi--; pi *= 2; if (pi >= 1) pi--; pi -= s4; if (pi < 0) pi++; pi -= s4; if (pi < 0) pi++; pi -= s5; if (pi < 0) pi++; pi -= s6; if (pi < 0) pi++; return (long) (pi * BBP_MULTIPLIER); }
@Test void testHexDigit() { final long[] answers = {0x43F6, 0xA308, 0x29B7, 0x49F1, 0x8AC8, 0x35EA}; long d = 1; for (int i = 0; i < answers.length; i++) { assertEquals(answers[i], BaileyBorweinPlouffe .hexDigits(d), "d=" + d); d *= 10; } assertEquals(0x243FL, BaileyBorweinPlouffe.hexDigits(0)); }
@SuppressWarnings("rawtypes") public static ShardingStrategy newInstance(final ShardingStrategyConfiguration shardingStrategyConfig, final ShardingAlgorithm shardingAlgorithm, final String defaultShardingColumn) { if (shardingStrategyConfig instanceof StandardShardingStrategyConfiguration && shardingAlgorithm instanceof StandardShardingAlgorithm) { String shardingColumn = ((StandardShardingStrategyConfiguration) shardingStrategyConfig).getShardingColumn(); return new StandardShardingStrategy(null == shardingColumn ? defaultShardingColumn : shardingColumn, (StandardShardingAlgorithm) shardingAlgorithm); } if (shardingStrategyConfig instanceof ComplexShardingStrategyConfiguration && shardingAlgorithm instanceof ComplexKeysShardingAlgorithm) { return new ComplexShardingStrategy(((ComplexShardingStrategyConfiguration) shardingStrategyConfig).getShardingColumns(), (ComplexKeysShardingAlgorithm) shardingAlgorithm); } if (shardingStrategyConfig instanceof HintShardingStrategyConfiguration && shardingAlgorithm instanceof HintShardingAlgorithm) { return new HintShardingStrategy((HintShardingAlgorithm) shardingAlgorithm); } return new NoneShardingStrategy(); }
@Test void assertNewInstanceForNoneShardingStrategy() { assertThat(ShardingStrategyFactory.newInstance(null, null, null), instanceOf(NoneShardingStrategy.class)); }
public LoggingConfiguration getLoggingConfiguration() { return loggingConfig; }
@Test public void loggingConfigurationShouldBeEnabledByDefault() { assertNotNull(newBatch().getLoggingConfiguration()); }
@Override public void checkClientTrusted( X509Certificate[] chain, String authType ) throws CertificateException { // Find and use the end entity as the selector for verification. final X509Certificate endEntityCert = CertificateUtils.identifyEndEntityCertificate( Arrays.asList( chain ) ); final X509CertSelector selector = new X509CertSelector(); selector.setCertificate( endEntityCert ); try { checkChainTrusted( selector, chain ); } catch ( InvalidAlgorithmParameterException | NoSuchAlgorithmException | CertPathBuilderException ex ) { throw new CertificateException( ex ); } }
@Test public void testInvalidChainExpiredIntermediate() throws Exception { // Setup fixture. // Execute system under test. assertThrows(CertificateException.class, () -> systemUnderTest.checkClientTrusted( expiredIntChain, "RSA" ) ); }
public FEELFnResult<String> invoke(@ParameterName("from") Object val) { if ( val == null ) { return FEELFnResult.ofResult( null ); } else { return FEELFnResult.ofResult( TypeUtil.formatValue(val, false) ); } }
@Test void invokeMaskNull() { FunctionTestUtil.assertResultError(stringFunction.invoke((String) null, null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(stringFunction.invoke((String) null, new Object[]{}), InvalidParametersEvent.class); }
public static Builder forPage(int page) { return new Builder(page); }
@Test void andSize_fails_with_IAE_if_size_is_less_than_0() { Pagination.Builder builder = forPage(1); assertThatThrownBy(() -> builder.andSize(-Math.abs(new Random().nextInt()) - 1)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("page size must be >= 1"); }
public IterableSubject asList() { return checkNoNeedToDisplayBothValues("asList()").that(Arrays.asList(checkNotNull(actual))); }
@Test public void multiDimensionalStringArrayAsList() { String[] ab = {"A", "B"}; assertThat(new String[][] {ab, {"C"}}).asList().contains(ab); }
public Long get( long key ) throws KettleValueException { int hashCode = generateHashCode( key ); int indexPointer = indexFor( hashCode, index.length ); LongHashIndexEntry check = index[indexPointer]; while ( check != null ) { if ( check.hashCode == hashCode && check.equalsKey( key ) ) { return check.value; } check = check.nextEntry; } return null; }
@Test public void testGet() throws KettleValueException { LongHashIndex index = new LongHashIndex(); index.put( 1L, 1L ); assertThat( "Element has uncorrect value.", index.get( 1L ), equalTo( 1L ) ); }
public SslContext getClientSslContext() { try { return SslContextBuilder.forClient() .sslProvider(chooseSslProvider()) .ciphers(getCiphers(), getCiphersFilter()) .protocols(getProtocols()) .build(); } catch (Exception e) { log.error("Error loading SslContext client request.", e); throw new RuntimeException("Error configuring SslContext for client request!", e); } }
@Test void testGetSslContext() { ClientSslContextFactory factory = new ClientSslContextFactory(new DefaultRegistry()); SslContext sslContext = factory.getClientSslContext(); assertThat(sslContext).isInstanceOf(OpenSslClientContext.class); assertThat(sslContext.isClient()).isTrue(); assertThat(sslContext.isServer()).isFalse(); SSLSessionContext sessionContext = sslContext.sessionContext(); assertThat(sessionContext.getSessionCacheSize()).isEqualTo(20480); assertThat(sessionContext.getSessionTimeout()).isEqualTo(300); }