focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public long getConnectTimeout() { return safelyParseLongValue(CONNECT_TIMEOUT_PROPERTY).orElse(DEFAULT_TIMEOUT); }
@Test public void getConnectTimeout_returns_value_of_property() { long expected = new Random().nextInt(9_456_789); settings.setProperty("sonar.alm.timeout.connect", expected); assertThat(underTest.getConnectTimeout()).isEqualTo(expected); }
public PluggableArtifactConfig findByArtifactId(String artifactId) { for (PluggableArtifactConfig artifact : getPluggableArtifactConfigs()) { if (artifact.getId().equals(artifactId)) { return artifact; } } return null; }
@Test public void findByArtifactId_shouldReturnNullWhenPluggableArtifactConfigNotExistWithGivenId() { ArtifactTypeConfigs allConfigs = new ArtifactTypeConfigs(); allConfigs.add(new PluggableArtifactConfig("s3", "cd.go.s3")); allConfigs.add(new PluggableArtifactConfig("docker", "cd.go.docker")); final PluggableArtifactConfig s3 = allConfigs.findByArtifactId("foo"); assertNull(s3); }
@Override public JFieldVar apply(String nodeName, JsonNode node, JsonNode parent, JFieldVar field, Schema currentSchema) { if (ruleFactory.getGenerationConfig().isIncludeJsr303Annotations() && isApplicableType(field)) { final Class<? extends Annotation> patternClass = ruleFactory.getGenerationConfig().isUseJakartaValidation() ? Pattern.class : javax.validation.constraints.Pattern.class; JAnnotationUse annotation = field.annotate(patternClass); annotation.param("regexp", node.asText()); } return field; }
@Test public void testRegex() { when(config.isIncludeJsr303Annotations()).thenReturn(true); final String patternValue = "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$"; when(node.asText()).thenReturn(patternValue); when(fieldVar.annotate(patternClass)).thenReturn(annotation); when(fieldVar.type().boxify().fullName()).thenReturn(fieldClass.getTypeName()); JFieldVar result = rule.apply("node", node, null, fieldVar, null); assertSame(fieldVar, result); verify(fieldVar, times(isApplicable ? 1 : 0)).annotate(patternClass); verify(annotation, times(isApplicable ? 1 : 0)).param("regexp", patternValue); }
final void ensureAvailable(int len) { if (available() < len) { if (buffer != null) { int newCap = Math.max(Math.max(buffer.length << 1, buffer.length + len), firstGrowthSize); buffer = Arrays.copyOf(buffer, newCap); } else { buffer = new byte[len > initialSize / 2 ? len * 2 : initialSize]; } } }
@Test public void testEnsureAvailable_smallLen() { out.buffer = null; out.ensureAvailable(1); assertEquals(10, out.buffer.length); }
@Override public ShenyuContext decorator(final ShenyuContext shenyuContext, final MetaData metaData) { String path = shenyuContext.getPath(); shenyuContext.setMethod(path); shenyuContext.setRealUrl(path); shenyuContext.setRpcType(RpcTypeEnum.HTTP.getName()); shenyuContext.setModule(Optional.ofNullable(metaData).map(MetaData::getAppName) .orElse(String.format("%s-%s", PluginEnum.DIVIDE.getName(), shenyuContext.getRpcType()))); return shenyuContext; }
@Test public void decoratorTest() { MetaData metaData = new MetaData(); ShenyuContext shenyuContext = new ShenyuContext(); shenyuContext.setPath(MOCK_CONTEXT_PATH); shenyuContext.setHttpMethod(MOCK_CONTEXT_PATH); ShenyuContext decorator = divideShenyuContextDecorator.decorator(shenyuContext, metaData); assert MOCK_CONTEXT_PATH.equals(decorator.getMethod()); assert MOCK_CONTEXT_PATH.equals(decorator.getRealUrl()); }
public static void closeAllQuietly(Collection<? extends AutoCloseable> collection) { if (collection == null) { return; } for (AutoCloseable closeable : collection) { closeQuietly(closeable); } }
@Test public void test_closeAllQuietly_whenNullCollection() { closeAllQuietly(null); }
public static ConfigurableResource parseResourceConfigValue(String value) throws AllocationConfigurationException { return parseResourceConfigValue(value, Long.MAX_VALUE); }
@Test public void testParseNewStyleResourceWithCustomResourceMemoryNegative() throws Exception { expectNegativeValueOfResource("memory"); parseResourceConfigValue("vcores=2,memory-mb=-5120,test1=4"); }
public static int getInt(String key, int def) { String value = get(key); if (value == null) { return def; } value = value.trim(); try { return Integer.parseInt(value); } catch (Exception e) { // Ignore } logger.warn( "Unable to parse the integer system property '{}':{} - using the default value: {}", key, value, def ); return def; }
@Test public void getIntDefaultValueWithPropertValueIsNotInt() { System.setProperty("key", "NotInt"); assertEquals(1, SystemPropertyUtil.getInt("key", 1)); }
public static void addConfigsToProperties( Properties props, Map<String, String> commonConf, Map<String, String> clientConf) { for (Map.Entry<String, String> commonEntry : commonConf.entrySet()) { props.setProperty(commonEntry.getKey(), commonEntry.getValue()); } for (Map.Entry<String, String> entry : clientConf.entrySet()) { props.setProperty(entry.getKey(), entry.getValue()); } }
@Test public void testClientConfigOverwritesBothDefaultAndCommonConfigs() { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ProducerConfig.ACKS_CONFIG, "all"); Properties resultProps = new Properties(); resultProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); resultProps.put(ProducerConfig.ACKS_CONFIG, "0"); WorkerUtils.addConfigsToProperties( props, Collections.singletonMap(ProducerConfig.ACKS_CONFIG, "1"), Collections.singletonMap(ProducerConfig.ACKS_CONFIG, "0")); assertEquals(resultProps, props); }
@Override protected void parse(final ProtocolFactory protocols, final Local file) throws AccessDeniedException { final NSDictionary serialized = NSDictionary.dictionaryWithContentsOfFile(file.getAbsolute()); if(null == serialized) { throw new LocalAccessDeniedException(String.format("Invalid bookmark file %s", file)); } final List<NSDictionary> array = new PlistDeserializer(serialized).listForKey("CustomPluginSettings"); if(null == array) { log.warn("Missing key CustomPluginSettings"); return; } for(NSDictionary dict : array) { final PlistDeserializer bookmark = new PlistDeserializer(dict); final String identifier = bookmark.stringForKey("MountFSClassName"); if(StringUtils.isBlank(identifier)) { log.warn("Missing key MountFSClassName"); continue; } final Protocol protocol; switch(identifier) { case "FtpConnection": protocol = protocols.forType(Protocol.Type.ftp); break; case "WebDAVConnection": protocol = protocols.forType(Protocol.Type.dav); break; case "OpenStackConnection": protocol = protocols.forType(Protocol.Type.swift); break; case "BBConnection": protocol = protocols.forType(Protocol.Type.b2); break; case "S3Connection": protocol = protocols.forType(Protocol.Type.s3); break; case "DropboxConnection": protocol = protocols.forType(Protocol.Type.dropbox); break; case "GDriveConnection": protocol = protocols.forType(Protocol.Type.googledrive); break; default: protocol = null; break; } if(null == protocol) { log.warn(String.format("Unable to determine protocol for %s", identifier)); continue; } final NSDictionary details = bookmark.objectForKey("MountFSOptions"); if(null == details) { continue; } final PlistDeserializer options = new PlistDeserializer(details); final String hostname = options.stringForKey("host"); if(StringUtils.isBlank(hostname)) { continue; } final Host host = new Host(protocol, hostname, new Credentials(options.stringForKey("login"))); host.setNickname(bookmark.stringForKey("MountFSLabel")); host.setDefaultPath(options.stringForKey("remotePath")); this.add(host); } }
@Test(expected = AccessDeniedException.class) public void testParseNotFound() throws Exception { new CloudMounterBookmarkCollection().parse(new ProtocolFactory(Collections.emptySet()), new Local(System.getProperty("java.io.tmpdir"), "f")); }
@Override public YamlSingleRuleConfiguration swapToYamlConfiguration(final SingleRuleConfiguration data) { YamlSingleRuleConfiguration result = new YamlSingleRuleConfiguration(); result.getTables().addAll(data.getTables()); data.getDefaultDataSource().ifPresent(result::setDefaultDataSource); return result; }
@Test void assertSwapToYamlWithoutDataSource() { assertNull(new YamlSingleRuleConfigurationSwapper().swapToYamlConfiguration(new SingleRuleConfiguration()).getDefaultDataSource()); }
public static Optional<ParsedMetricName> parseMetricName(String metricName) { if (metricName.isEmpty()) { return Optional.empty(); } List<String> metricNameSplit = Splitter.on(METRIC_NAME_DELIMITER).limit(2).splitToList(metricName); if (metricNameSplit.size() == 0 || metricNameSplit.get(0).isEmpty()) { return Optional.empty(); } if (metricNameSplit.size() == 1) { return Optional.of(ParsedMetricName.create(metricNameSplit.get(0))); } Splitter.MapSplitter splitter = Splitter.on(LABEL_DELIMITER).omitEmptyStrings().withKeyValueSeparator(METRIC_KV_DELIMITER); try { Map<String, String> labels = splitter.split(metricNameSplit.get(1)); return Optional.of(ParsedMetricName.create(metricNameSplit.get(0), labels)); } catch (IllegalArgumentException e) { return Optional.of(ParsedMetricName.create(metricNameSplit.get(0))); } }
@Test public void testParseMetricName_noLabels() { String baseMetricName = "baseMetricName"; LabeledMetricNameUtils.MetricNameBuilder builder = LabeledMetricNameUtils.MetricNameBuilder.baseNameBuilder(baseMetricName); String metricName = builder.build("namespace").getName(); Optional<LabeledMetricNameUtils.ParsedMetricName> parsedName = LabeledMetricNameUtils.parseMetricName(metricName); LabeledMetricNameUtils.ParsedMetricName expectedParsedName = LabeledMetricNameUtils.ParsedMetricName.create(baseMetricName); assertThat(parsedName.isPresent(), equalTo(true)); assertThat(parsedName.get(), equalTo(expectedParsedName)); assertThat(parsedName.get().getBaseName(), equalTo(baseMetricName)); }
public static String generateToken(final String userName, final String key, final String clientId) { return generateToken(userName, key, clientId, null); }
@Test public void testGenerateToken() { String token = JwtUtils.generateToken("userName", KEY, "clientId"); assertThat(token, notNullValue()); assertThat(JwtUtils.getIssuer(token), is("userName")); assertThat(JwtUtils.getClientId(token), is("clientId")); }
@Override public boolean decide(final SelectStatementContext selectStatementContext, final List<Object> parameters, final RuleMetaData globalRuleMetaData, final ShardingSphereDatabase database, final ShardingRule rule, final Collection<DataNode> includedDataNodes) { Collection<String> tableNames = rule.getShardingLogicTableNames(selectStatementContext.getTablesContext().getTableNames()); if (tableNames.isEmpty()) { return false; } includedDataNodes.addAll(getTableDataNodes(rule, tableNames, database)); if (selectStatementContext.isContainsSubquery() || selectStatementContext.isContainsHaving() || selectStatementContext.isContainsCombine() || selectStatementContext.isContainsPartialDistinctAggregation()) { return true; } if (!selectStatementContext.isContainsJoinQuery() || rule.isAllTablesInSameDataSource(tableNames)) { return false; } if (1 == tableNames.size() && selectStatementContext.isContainsJoinQuery() && !rule.isAllBindingTables(database, selectStatementContext, tableNames)) { return true; } return tableNames.size() > 1 && !rule.isAllBindingTables(database, selectStatementContext, tableNames); }
@Test void assertDecideWhenAllTablesIsNotBindingTablesAndContainsPagination() { SelectStatementContext select = createStatementContext(); when(select.isContainsJoinQuery()).thenReturn(true); when(select.getPaginationContext().isHasPagination()).thenReturn(true); ShardingRule shardingRule = createShardingRule(); ShardingSphereDatabase database = createDatabase(shardingRule); when(shardingRule.isAllBindingTables(database, select, Arrays.asList("t_order", "t_order_item"))).thenReturn(false); Collection<DataNode> includedDataNodes = new HashSet<>(); assertTrue(new ShardingSQLFederationDecider().decide(select, Collections.emptyList(), mock(RuleMetaData.class), database, shardingRule, includedDataNodes)); assertThat(includedDataNodes.size(), is(4)); }
public Mono<Void> createProducerAcl(KafkaCluster cluster, CreateProducerAclDTO request) { return adminClientService.get(cluster) .flatMap(ac -> createAclsWithLogging(ac, createProducerBindings(request))) .then(); }
@Test void createsProducerDependantAclsWhenTopicsAndTxIdSpecifiedByPrefix() { ArgumentCaptor<Collection<AclBinding>> createdCaptor = ArgumentCaptor.forClass(Collection.class); when(adminClientMock.createAcls(createdCaptor.capture())) .thenReturn(Mono.empty()); var principal = UUID.randomUUID().toString(); var host = UUID.randomUUID().toString(); aclsService.createProducerAcl( CLUSTER, new CreateProducerAclDTO() .principal(principal) .host(host) .topicsPrefix("topicPref") .transactionsIdPrefix("txIdPref") .idempotent(false) ).block(); //Write, Describe, Create permission on topics, Write, Describe on transactionalIds //IDEMPOTENT_WRITE on cluster if idempotent is enabled (false) Collection<AclBinding> createdBindings = createdCaptor.getValue(); assertThat(createdBindings) .hasSize(5) .contains(new AclBinding( new ResourcePattern(ResourceType.TOPIC, "topicPref", PatternType.PREFIXED), new AccessControlEntry(principal, host, AclOperation.WRITE, AclPermissionType.ALLOW))) .contains(new AclBinding( new ResourcePattern(ResourceType.TOPIC, "topicPref", PatternType.PREFIXED), new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW))) .contains(new AclBinding( new ResourcePattern(ResourceType.TOPIC, "topicPref", PatternType.PREFIXED), new AccessControlEntry(principal, host, AclOperation.CREATE, AclPermissionType.ALLOW))) .contains(new AclBinding( new ResourcePattern(ResourceType.TRANSACTIONAL_ID, "txIdPref", PatternType.PREFIXED), new AccessControlEntry(principal, host, AclOperation.WRITE, AclPermissionType.ALLOW))) .contains(new AclBinding( new ResourcePattern(ResourceType.TRANSACTIONAL_ID, "txIdPref", PatternType.PREFIXED), new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW))); }
public static int getAssuranceLevel(String key) throws SamlSessionException { if (!numberMap.containsKey(key)) { throw new SamlSessionException("Assurance level not found"); } return numberMap.get(key); }
@Test void invalidAssuranceLevelName() { SamlSessionException exception = assertThrows(SamlSessionException.class, () -> LevelOfAssurance.getAssuranceLevel("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS")); assertEquals("Assurance level not found", exception.getMessage()); }
Object getCellValue(Cell cell, Schema.FieldType type) { ByteString cellValue = cell.getValue(); int valueSize = cellValue.size(); switch (type.getTypeName()) { case BOOLEAN: checkArgument(valueSize == 1, message("Boolean", 1)); return cellValue.toByteArray()[0] != 0; case BYTE: checkArgument(valueSize == 1, message("Byte", 1)); return cellValue.toByteArray()[0]; case INT16: checkArgument(valueSize == 2, message("Int16", 2)); return Shorts.fromByteArray(cellValue.toByteArray()); case INT32: checkArgument(valueSize == 4, message("Int32", 4)); return Ints.fromByteArray(cellValue.toByteArray()); case INT64: checkArgument(valueSize == 8, message("Int64", 8)); return Longs.fromByteArray(cellValue.toByteArray()); case FLOAT: checkArgument(valueSize == 4, message("Float", 4)); return Float.intBitsToFloat(Ints.fromByteArray(cellValue.toByteArray())); case DOUBLE: checkArgument(valueSize == 8, message("Double", 8)); return Double.longBitsToDouble(Longs.fromByteArray(cellValue.toByteArray())); case DATETIME: return DateTime.parse(cellValue.toStringUtf8()); case STRING: return cellValue.toStringUtf8(); case BYTES: return cellValue.toByteArray(); case LOGICAL_TYPE: String identifier = checkArgumentNotNull(type.getLogicalType()).getIdentifier(); throw new IllegalStateException("Unsupported logical type: " + identifier); default: throw new IllegalArgumentException( String.format("Unsupported cell value type '%s'.", type.getTypeName())); } }
@Test public void shouldParseFloatType() { byte[] value = new byte[] {64, 0, 0, 0}; assertEquals(2.0f, (Float) PARSER.getCellValue(cell(value), FLOAT), 0.001); }
@Override public int read() { if (nextChar == UNSET || nextChar >= buf.length) { fill(); if (nextChar == UNSET) { return END_OF_STREAM; } } byte signedByte = buf[nextChar]; nextChar++; return signedByte & 0xFF; }
@Test void read_from_ClosableIterator_with_single_line() throws IOException { assertThat(read(create("line1"))).isEqualTo("line1"); }
public static <InputT, OutputT> MapElements<InputT, OutputT> via( final InferableFunction<InputT, OutputT> fn) { return new MapElements<>(fn, fn.getInputTypeDescriptor(), fn.getOutputTypeDescriptor()); }
@Test public void testSimpleFunctionClassDisplayData() { SimpleFunction<?, ?> simpleFn = new SimpleFunction<Integer, Integer>() { @Override public Integer apply(Integer input) { return input; } }; MapElements<?, ?> simpleMap = MapElements.via(simpleFn); assertThat(DisplayData.from(simpleMap), hasDisplayItem("class", simpleFn.getClass())); }
public Shader add(int type, String name) { units.add(new Unit(type, name)); return this; }
@Test public void testShaders() throws Exception { String verifier = System.getProperty("glslang.path"); Assume.assumeFalse("glslang.path is not set", Strings.isNullOrEmpty(verifier)); Template[] templates = { new Template() .addInclude(GpuPlugin.class) .add(key -> { if ("version_header".equals(key)) { return GpuPlugin.WINDOWS_VERSION_HEADER; } if ("thread_config".equals(key)) { int threadCount = 512; int facesPerThread = 1; return "#define THREAD_COUNT " + threadCount + "\n" + "#define FACES_PER_THREAD " + facesPerThread + "\n"; } return null; }), }; Shader[] shaders = { GpuPlugin.PROGRAM, GpuPlugin.COMPUTE_PROGRAM, GpuPlugin.UNORDERED_COMPUTE_PROGRAM, GpuPlugin.UI_PROGRAM, }; for (Template t : templates) { for (Shader s : shaders) { verify(t, s); } } }
public static ProxyBackendHandler newInstance(final SQLStatement sqlStatement, final ConnectionSession connectionSession) { return createBackendHandler(sqlStatement, connectionSession); }
@Test void assertDatabaseOperateBackendHandlerFactoryThrowUnsupportedOperationException() { assertThrows(UnsupportedSQLOperationException.class, () -> DatabaseOperateBackendHandlerFactory.newInstance(mock(AlterDatabaseStatement.class), mock(ConnectionSession.class))); }
public Optional<Session> login(@Nullable String currentSessionId, String host, ActorAwareAuthenticationToken authToken) throws AuthenticationServiceUnavailableException { final String previousSessionId = StringUtils.defaultIfBlank(currentSessionId, null); final Subject subject = new Subject.Builder().sessionId(previousSessionId).host(host).buildSubject(); ThreadContext.bind(subject); try { final Session session = subject.getSession(); subject.login(authToken); return createSession(subject, session, host); } catch (AuthenticationServiceUnavailableException e) { log.info("Session creation failed due to authentication service being unavailable. Actor: \"{}\"", authToken.getActor().urn()); final Map<String, Object> auditEventContext = ImmutableMap.of( "remote_address", host, "message", "Authentication service unavailable: " + e.getMessage() ); auditEventSender.failure(authToken.getActor(), SESSION_CREATE, auditEventContext); throw e; } catch (AuthenticationException e) { log.info("Invalid credentials in session create request. Actor: \"{}\"", authToken.getActor().urn()); final Map<String, Object> auditEventContext = ImmutableMap.of( "remote_address", host ); auditEventSender.failure(authToken.getActor(), SESSION_CREATE, auditEventContext); return Optional.empty(); } }
@Test public void validAuthToken() { setUpUserMock(); assertFalse(SecurityUtils.getSubject().isAuthenticated()); Optional<Session> session = sessionCreator.login(null, "host", validToken); assertTrue(session.isPresent()); assertEquals(SESSION_TIMEOUT, session.get().getTimeout()); assertTrue(SecurityUtils.getSubject().isAuthenticated()); verify(auditEventSender).success(eq(AuditActor.user("username")), anyString(), anyMap()); }
@VisibleForTesting public ConfigDO validateConfigExists(Long id) { if (id == null) { return null; } ConfigDO config = configMapper.selectById(id); if (config == null) { throw exception(CONFIG_NOT_EXISTS); } return config; }
@Test public void testValidateConfigExists_success() { // mock 数据 ConfigDO dbConfigDO = randomConfigDO(); configMapper.insert(dbConfigDO);// @Sql: 先插入出一条存在的数据 // 调用成功 configService.validateConfigExists(dbConfigDO.getId()); }
@Override public void execute(SensorContext context) { Set<String> reportPaths = loadReportPaths(); Map<String, SarifImportResults> filePathToImportResults = new HashMap<>(); for (String reportPath : reportPaths) { try { SarifImportResults sarifImportResults = processReport(context, reportPath); filePathToImportResults.put(reportPath, sarifImportResults); } catch (NoSuchFileException e) { throw MessageException.of(format("SARIF report file not found: %s", e.getFile())); } catch (Exception exception) { LOG.warn("Failed to process SARIF report from file '{}', error: '{}'", reportPath, exception.getMessage()); } } filePathToImportResults.forEach(SarifIssuesImportSensor::displayResults); }
@Test public void execute_whenDeserializationThrowsMessageException_shouldRethrow() throws NoSuchFileException { sensorSettings.setProperty("sonar.sarifReportPaths", FILE_1); NoSuchFileException e = new NoSuchFileException("non-existent"); failDeserializingReportWithException(FILE_1, e); SarifIssuesImportSensor sensor = new SarifIssuesImportSensor(sarifSerializer, sarifImporter, sensorSettings.asConfig()); assertThatThrownBy(() -> sensor.execute(sensorContext)) .isInstanceOf(MessageException.class) .hasMessage("SARIF report file not found: non-existent"); }
@VisibleForTesting CompletableFuture<Optional<SendPushNotificationResult>> sendNotification(final PushNotification pushNotification) { if (pushNotification.tokenType() == PushNotification.TokenType.APN && !pushNotification.urgent()) { // APNs imposes a per-device limit on background push notifications; schedule a notification for some time in the // future (possibly even now!) rather than sending a notification directly return pushNotificationScheduler .scheduleBackgroundApnsNotification(pushNotification.destination(), pushNotification.destinationDevice()) .whenComplete(logErrors()) .thenApply(ignored -> Optional.<SendPushNotificationResult>empty()) .toCompletableFuture(); } final PushNotificationSender sender = switch (pushNotification.tokenType()) { case FCM -> fcmSender; case APN, APN_VOIP -> apnSender; }; return sender.sendNotification(pushNotification).whenComplete((result, throwable) -> { if (throwable == null) { Tags tags = Tags.of("tokenType", pushNotification.tokenType().name(), "notificationType", pushNotification.notificationType().name(), "urgent", String.valueOf(pushNotification.urgent()), "accepted", String.valueOf(result.accepted()), "unregistered", String.valueOf(result.unregistered())); if (result.errorCode().isPresent()) { tags = tags.and("errorCode", result.errorCode().get()); } Metrics.counter(SENT_NOTIFICATION_COUNTER_NAME, tags).increment(); if (result.unregistered() && pushNotification.destination() != null && pushNotification.destinationDevice() != null) { handleDeviceUnregistered(pushNotification.destination(), pushNotification.destinationDevice(), pushNotification.tokenType(), result.errorCode(), result.unregisteredTimestamp()); } if (result.accepted() && pushNotification.tokenType() == PushNotification.TokenType.APN_VOIP && pushNotification.notificationType() == PushNotification.NotificationType.NOTIFICATION && pushNotification.destination() != null && pushNotification.destinationDevice() != null) { pushNotificationScheduler.scheduleRecurringApnsVoipNotification( pushNotification.destination(), pushNotification.destinationDevice()) .whenComplete(logErrors()); } } else { logger.debug("Failed to deliver {} push notification to {} ({})", pushNotification.notificationType(), pushNotification.deviceToken(), pushNotification.tokenType(), throwable); Metrics.counter(FAILED_NOTIFICATION_COUNTER_NAME, "cause", throwable.getClass().getSimpleName()).increment(); } }) .thenApply(Optional::of); }
@Test void testSendNotificationUnregisteredApn() { final Account account = mock(Account.class); final Device device = mock(Device.class); final UUID aci = UUID.randomUUID(); when(device.getId()).thenReturn(Device.PRIMARY_ID); when(device.getApnId()).thenReturn("apns-token"); when(device.getVoipApnId()).thenReturn("apns-voip-token"); when(account.getDevice(Device.PRIMARY_ID)).thenReturn(Optional.of(device)); when(account.getUuid()).thenReturn(aci); when(accountsManager.getByAccountIdentifier(aci)).thenReturn(Optional.of(account)); final PushNotification pushNotification = new PushNotification( "token", PushNotification.TokenType.APN_VOIP, PushNotification.NotificationType.NOTIFICATION, null, account, device, true); when(apnSender.sendNotification(pushNotification)) .thenReturn(CompletableFuture.completedFuture(new SendPushNotificationResult(false, Optional.empty(), true, Optional.empty()))); when(pushNotificationScheduler.cancelScheduledNotifications(account, device)) .thenReturn(CompletableFuture.completedFuture(null)); pushNotificationManager.sendNotification(pushNotification); verifyNoInteractions(fcmSender); verify(accountsManager).updateDevice(eq(account), eq(Device.PRIMARY_ID), any()); verify(device).setVoipApnId(null); verify(device, never()).setApnId(any()); verify(pushNotificationScheduler).cancelScheduledNotifications(account, device); }
@Nonnull @Override public Optional<? extends INode> parse( @Nullable final String str, @Nonnull DetectionLocation detectionLocation) { if (str == null) { return Optional.empty(); } for (IMapper mapper : jcaSpecificAlgorithmMappers) { Optional<? extends INode> asset = mapper.parse(str, detectionLocation); if (asset.isPresent()) { return asset; } } return switch (str.toUpperCase().trim()) { case "PBE", "PBES2" -> Optional.of(new PasswordBasedEncryption(detectionLocation)); case "DH", "DIFFIEHELLMAN" -> Optional.of(new DH(detectionLocation)); case "RSA" -> Optional.of(new RSA(detectionLocation)); case "EC" -> Optional.of(new Algorithm(str, PublicKeyEncryption.class, detectionLocation)); default -> { final Algorithm algorithm = new Algorithm(str, Unknown.class, detectionLocation); algorithm.put(new Unknown(detectionLocation)); yield Optional.of(algorithm); } }; }
@Test void keyAgreement() { DetectionLocation testDetectionLocation = new DetectionLocation("testfile", 1, 1, List.of("test"), () -> "SSL"); JcaAlgorithmMapper jcaAlgorithmMapper = new JcaAlgorithmMapper(); Optional<? extends INode> assetOptional = jcaAlgorithmMapper.parse("X448", testDetectionLocation); assertThat(assetOptional).isPresent(); assertThat(assetOptional.get().is(KeyAgreement.class)).isTrue(); }
public FEELFnResult<Boolean> invoke(@ParameterName("range1") Range range1, @ParameterName("range2") Range range2) { if (range1 == null) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "range1", "cannot be null")); } if (range2 == null) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "range2", "cannot be null")); } try { boolean result = (range1.getHighEndPoint().compareTo(range2.getLowEndPoint()) > 0 || (range1.getHighEndPoint().compareTo(range2.getLowEndPoint()) == 0 && range1.getHighBoundary() == RangeBoundary.CLOSED && range2.getLowBoundary() == RangeBoundary.CLOSED)) && (range1.getLowEndPoint().compareTo(range2.getHighEndPoint()) < 0 || (range1.getLowEndPoint().compareTo(range2.getHighEndPoint()) == 0 && range1.getLowBoundary() == RangeBoundary.CLOSED && range2.getHighBoundary() == RangeBoundary.CLOSED)); return FEELFnResult.ofResult(result); } catch (Exception e) { // points are not comparable return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "range1", "cannot be compared to range2")); } }
@Test void invokeParamRangeAndRange() { FunctionTestUtil.assertResult( overlapsFunction.invoke( new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED ), new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED ) ), Boolean.TRUE ); FunctionTestUtil.assertResult( overlapsFunction.invoke( new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED ), new RangeImpl( Range.RangeBoundary.CLOSED, "c", "k", Range.RangeBoundary.CLOSED ) ), Boolean.TRUE ); FunctionTestUtil.assertResult( overlapsFunction.invoke( new RangeImpl( Range.RangeBoundary.CLOSED, "c", "k", Range.RangeBoundary.CLOSED ), new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED ) ), Boolean.TRUE ); FunctionTestUtil.assertResult( overlapsFunction.invoke( new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED ), new RangeImpl( Range.RangeBoundary.OPEN, "a", "k", Range.RangeBoundary.CLOSED ) ), Boolean.TRUE ); }
@Override public ExecuteContext onThrow(ExecuteContext context) { ThreadLocalUtils.removeRequestData(); return context; }
@Test public void testOnThrow() { ThreadLocalUtils.setRequestData(new RequestData(null, "", "")); interceptor.onThrow(context); Assert.assertNull(ThreadLocalUtils.getRequestData()); }
public ProviderBuilder charset(String charset) { this.charset = charset; return getThis(); }
@Test void charset() { ProviderBuilder builder = ProviderBuilder.newBuilder(); builder.charset("utf-8"); Assertions.assertEquals("utf-8", builder.build().getCharset()); }
public RingbufferConfig setBackupCount(int backupCount) { this.backupCount = checkBackupCount(backupCount, asyncBackupCount); return this; }
@Test public void setBackupCount() { RingbufferConfig config = new RingbufferConfig(NAME); config.setBackupCount(4); assertEquals(4, config.getBackupCount()); }
@Override public boolean checkAccess(UserGroupInformation callerUGI, JobACL jobOperation) { AccessControlList jobACL = jobACLs.get(jobOperation); if (jobACL == null) { return true; } return aclsManager.checkAccess(callerUGI, jobOperation, userName, jobACL); }
@Test public void testCheckAccess() { // Create two unique users String user1 = System.getProperty("user.name"); String user2 = user1 + "1234"; UserGroupInformation ugi1 = UserGroupInformation.createRemoteUser(user1); UserGroupInformation ugi2 = UserGroupInformation.createRemoteUser(user2); // Create the job JobID jobID = JobID.forName("job_1234567890000_0001"); JobId jobId = TypeConverter.toYarn(jobID); // Setup configuration access only to user1 (owner) Configuration conf1 = new Configuration(); conf1.setBoolean(MRConfig.MR_ACLS_ENABLED, true); conf1.set(MRJobConfig.JOB_ACL_VIEW_JOB, ""); // Verify access JobImpl job1 = new JobImpl(jobId, null, conf1, null, null, null, null, null, null, null, null, true, user1, 0, null, null, null, null); Assert.assertTrue(job1.checkAccess(ugi1, JobACL.VIEW_JOB)); Assert.assertFalse(job1.checkAccess(ugi2, JobACL.VIEW_JOB)); // Setup configuration access to the user1 (owner) and user2 Configuration conf2 = new Configuration(); conf2.setBoolean(MRConfig.MR_ACLS_ENABLED, true); conf2.set(MRJobConfig.JOB_ACL_VIEW_JOB, user2); // Verify access JobImpl job2 = new JobImpl(jobId, null, conf2, null, null, null, null, null, null, null, null, true, user1, 0, null, null, null, null); Assert.assertTrue(job2.checkAccess(ugi1, JobACL.VIEW_JOB)); Assert.assertTrue(job2.checkAccess(ugi2, JobACL.VIEW_JOB)); // Setup configuration access with security enabled and access to all Configuration conf3 = new Configuration(); conf3.setBoolean(MRConfig.MR_ACLS_ENABLED, true); conf3.set(MRJobConfig.JOB_ACL_VIEW_JOB, "*"); // Verify access JobImpl job3 = new JobImpl(jobId, null, conf3, null, null, null, null, null, null, null, null, true, user1, 0, null, null, null, null); Assert.assertTrue(job3.checkAccess(ugi1, JobACL.VIEW_JOB)); Assert.assertTrue(job3.checkAccess(ugi2, JobACL.VIEW_JOB)); // Setup configuration access without security enabled Configuration conf4 = new Configuration(); conf4.setBoolean(MRConfig.MR_ACLS_ENABLED, false); conf4.set(MRJobConfig.JOB_ACL_VIEW_JOB, ""); // Verify access JobImpl job4 = new JobImpl(jobId, null, conf4, null, null, null, null, null, null, null, null, true, user1, 0, null, null, null, null); Assert.assertTrue(job4.checkAccess(ugi1, JobACL.VIEW_JOB)); Assert.assertTrue(job4.checkAccess(ugi2, JobACL.VIEW_JOB)); // Setup configuration access without security enabled Configuration conf5 = new Configuration(); conf5.setBoolean(MRConfig.MR_ACLS_ENABLED, true); conf5.set(MRJobConfig.JOB_ACL_VIEW_JOB, ""); // Verify access JobImpl job5 = new JobImpl(jobId, null, conf5, null, null, null, null, null, null, null, null, true, user1, 0, null, null, null, null); Assert.assertTrue(job5.checkAccess(ugi1, null)); Assert.assertTrue(job5.checkAccess(ugi2, null)); }
public static List<TriStateSelection> forAgentsResources(Set<ResourceConfig> resourceConfigs, Agents agents) { return convert(resourceConfigs, agents, new Assigner<>() { @Override public boolean shouldAssociate(Agent agent, ResourceConfig resourceConfig) { return agent.getResourcesAsList().contains(resourceConfig.getName()); } @Override public String identifier(ResourceConfig resourceConfig) { return resourceConfig.getName(); } @Override public boolean shouldEnable(Agent agent, ResourceConfig resourceConfig) { return true; } }); }
@Test public void shouldHaveActionRemoveIfThereAreNoAgents() { List<TriStateSelection> selections = TriStateSelection.forAgentsResources(resourceConfigs, agents); assertThat(selections, hasItem(new TriStateSelection("one", TriStateSelection.Action.remove))); assertThat(selections, hasItem(new TriStateSelection("two", TriStateSelection.Action.remove))); assertThat(selections.size(), is(2)); }
@Override public boolean addClass(final Class<?> stepClass) { if (stepClasses.contains(stepClass)) { return true; } checkNoComponentAnnotations(stepClass); if (hasCucumberContextConfiguration(stepClass)) { checkOnlyOneClassHasCucumberContextConfiguration(stepClass); withCucumberContextConfiguration = stepClass; } stepClasses.add(stepClass); return true; }
@Test void shouldNotFailWithCucumberContextConfigurationInheritedAnnotation() { final ObjectFactory factory = new SpringFactory(); factory.addClass(WithInheritedAnnotation.class); assertDoesNotThrow(factory::start); }
public List<ContainerLogMeta> collect( LogAggregationFileController fileController) throws IOException { List<ContainerLogMeta> containersLogMeta = new ArrayList<>(); RemoteIterator<FileStatus> appDirs = fileController. getApplicationDirectoriesOfUser(logsRequest.getUser()); while (appDirs.hasNext()) { FileStatus currentAppDir = appDirs.next(); if (logsRequest.getAppId() == null || logsRequest.getAppId().equals(currentAppDir.getPath().getName())) { ApplicationId appId = ApplicationId.fromString( currentAppDir.getPath().getName()); RemoteIterator<FileStatus> nodeFiles = fileController .getNodeFilesOfApplicationDirectory(currentAppDir); while (nodeFiles.hasNext()) { FileStatus currentNodeFile = nodeFiles.next(); if (!logsRequest.getNodeId().match(currentNodeFile.getPath() .getName())) { continue; } if (currentNodeFile.getPath().getName().equals( logsRequest.getAppId() + ".har")) { Path p = new Path("har:///" + currentNodeFile.getPath().toUri().getRawPath()); nodeFiles = HarFs.get(p.toUri(), conf).listStatusIterator(p); continue; } try { Map<String, List<ContainerLogFileInfo>> metaFiles = fileController .getLogMetaFilesOfNode(logsRequest, currentNodeFile, appId); if (metaFiles == null) { continue; } metaFiles.entrySet().removeIf(entry -> !(logsRequest.getContainerId() == null || logsRequest.getContainerId().equals(entry.getKey()))); containersLogMeta.addAll(createContainerLogMetas( currentNodeFile.getPath().getName(), metaFiles)); } catch (IOException ioe) { LOG.warn("Can not get log meta from the log file:" + currentNodeFile.getPath() + "\n" + ioe.getMessage()); } } } } return containersLogMeta; }
@Test void testMultipleFileBetweenSize() throws IOException { ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder request = new ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder(); Set<String> fileSizeExpressions = new HashSet<>(); fileSizeExpressions.add(">50"); fileSizeExpressions.add("<101"); request.setAppId(null); request.setContainerId(null); request.setFileName(null); request.setFileSize(fileSizeExpressions); request.setModificationTime(null); request.setNodeId(null); request.setUser(null); LogAggregationMetaCollector collector = new LogAggregationMetaCollector( request.build(), new YarnConfiguration()); List<ContainerLogMeta> res = collector.collect(fileController); List<ContainerLogFileInfo> allFile = res.stream() .flatMap(m -> m.getContainerLogMeta().stream()) .collect(Collectors.toList()); assertEquals(4, allFile.size()); assertTrue(allFile.stream().allMatch( f -> f.getFileSize().equals("100"))); }
public static String getPartitionColumn(TableConfig tableConfig) { // check InstanceAssignmentConfigMap is null or empty, if (!MapUtils.isEmpty(tableConfig.getInstanceAssignmentConfigMap())) { for (InstanceAssignmentConfig instanceAssignmentConfig : tableConfig.getInstanceAssignmentConfigMap().values()) { //check InstanceAssignmentConfig has the InstanceReplicaGroupPartitionConfig with non-empty partitionColumn if (StringUtils.isNotEmpty(instanceAssignmentConfig.getReplicaGroupPartitionConfig().getPartitionColumn())) { return instanceAssignmentConfig.getReplicaGroupPartitionConfig().getPartitionColumn(); } } } // for backward-compatibility, If partitionColumn value isn't there in InstanceReplicaGroupPartitionConfig // check ReplicaGroupStrategyConfig for partitionColumn ReplicaGroupStrategyConfig replicaGroupStrategyConfig = tableConfig.getValidationConfig().getReplicaGroupStrategyConfig(); return replicaGroupStrategyConfig != null ? replicaGroupStrategyConfig.getPartitionColumn() : null; }
@Test public void testGetPartitionColumnWithReplicaGroupConfig() { ReplicaGroupStrategyConfig replicaGroupStrategyConfig = new ReplicaGroupStrategyConfig(PARTITION_COLUMN, 1); TableConfig tableConfig = new TableConfigBuilder(TableType.REALTIME).setTableName(TABLE_NAME).build(); // setting up ReplicaGroupStrategyConfig for backward compatibility test. SegmentsValidationAndRetentionConfig validationConfig = new SegmentsValidationAndRetentionConfig(); validationConfig.setReplicaGroupStrategyConfig(replicaGroupStrategyConfig); tableConfig.setValidationConfig(validationConfig); Assert.assertEquals(PARTITION_COLUMN, TableConfigUtils.getPartitionColumn(tableConfig)); }
@Override public PageResult<RoleDO> getRolePage(RolePageReqVO reqVO) { return roleMapper.selectPage(reqVO); }
@Test public void testGetRolePage() { // mock 数据 RoleDO dbRole = randomPojo(RoleDO.class, o -> { // 等会查询到 o.setName("土豆"); o.setCode("tudou"); o.setStatus(CommonStatusEnum.ENABLE.getStatus()); o.setCreateTime(buildTime(2022, 2, 8)); }); roleMapper.insert(dbRole); // 测试 name 不匹配 roleMapper.insert(cloneIgnoreId(dbRole, o -> o.setName("红薯"))); // 测试 code 不匹配 roleMapper.insert(cloneIgnoreId(dbRole, o -> o.setCode("hong"))); // 测试 createTime 不匹配 roleMapper.insert(cloneIgnoreId(dbRole, o -> o.setCreateTime(buildTime(2022, 2, 16)))); // 准备参数 RolePageReqVO reqVO = new RolePageReqVO(); reqVO.setName("土豆"); reqVO.setCode("tu"); reqVO.setStatus(CommonStatusEnum.ENABLE.getStatus()); reqVO.setCreateTime(buildBetweenTime(2022, 2, 1, 2022, 2, 12)); // 调用 PageResult<RoleDO> pageResult = roleService.getRolePage(reqVO); // 断言 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(dbRole, pageResult.getList().get(0)); }
public static String getFileName(String application) { return "JavaMelody_" + application.replace(' ', '_').replace("/", "") + '_' + I18N.getCurrentDate().replace('/', '_') + ".pdf"; }
@Test public void testGetFileName() { assertNotNull("filename", PdfReport.getFileName("test")); }
@Override public PollResult poll(long currentTimeMs) { return pollInternal( prepareFetchRequests(), this::handleFetchSuccess, this::handleFetchFailure ); }
@Test public void testLeaderEpochInConsumerRecord() { buildFetcher(); assignFromUser(singleton(tp0)); subscriptions.seek(tp0, 0); int partitionLeaderEpoch = 1; ByteBuffer buffer = ByteBuffer.allocate(1024); MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, RecordBatch.CURRENT_MAGIC_VALUE, Compression.NONE, TimestampType.CREATE_TIME, 0L, System.currentTimeMillis(), partitionLeaderEpoch); builder.append(0L, "key".getBytes(), Integer.toString(partitionLeaderEpoch).getBytes()); builder.append(0L, "key".getBytes(), Integer.toString(partitionLeaderEpoch).getBytes()); builder.close(); partitionLeaderEpoch += 7; builder = MemoryRecords.builder(buffer, RecordBatch.CURRENT_MAGIC_VALUE, Compression.NONE, TimestampType.CREATE_TIME, 2L, System.currentTimeMillis(), partitionLeaderEpoch); builder.append(0L, "key".getBytes(), Integer.toString(partitionLeaderEpoch).getBytes()); builder.close(); partitionLeaderEpoch += 5; builder = MemoryRecords.builder(buffer, RecordBatch.CURRENT_MAGIC_VALUE, Compression.NONE, TimestampType.CREATE_TIME, 3L, System.currentTimeMillis(), partitionLeaderEpoch); builder.append(0L, "key".getBytes(), Integer.toString(partitionLeaderEpoch).getBytes()); builder.append(0L, "key".getBytes(), Integer.toString(partitionLeaderEpoch).getBytes()); builder.append(0L, "key".getBytes(), Integer.toString(partitionLeaderEpoch).getBytes()); builder.close(); buffer.flip(); MemoryRecords records = MemoryRecords.readableRecords(buffer); assertEquals(1, sendFetches()); assertFalse(fetcher.hasCompletedFetches()); client.prepareResponse(fullFetchResponse(tidp0, records, Errors.NONE, 100L, 0)); networkClientDelegate.poll(time.timer(0)); assertTrue(fetcher.hasCompletedFetches()); Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords = fetchRecords(); assertTrue(partitionRecords.containsKey(tp0)); assertEquals(6, partitionRecords.get(tp0).size()); for (ConsumerRecord<byte[], byte[]> record : partitionRecords.get(tp0)) { int expectedLeaderEpoch = Integer.parseInt(Utils.utf8(record.value())); assertEquals(Optional.of(expectedLeaderEpoch), record.leaderEpoch()); } }
@Override public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { super.onDataReceived(device, data); if (data.size() < 2) { onInvalidDataReceived(device, data); return; } // Read the Op Code final int opCode = data.getIntValue(Data.FORMAT_UINT8, 0); // Estimate the expected operand size based on the Op Code int expectedOperandSize; switch (opCode) { case OP_CODE_COMMUNICATION_INTERVAL_RESPONSE -> // UINT8 expectedOperandSize = 1; case OP_CODE_CALIBRATION_VALUE_RESPONSE -> // Calibration Value expectedOperandSize = 10; case OP_CODE_PATIENT_HIGH_ALERT_LEVEL_RESPONSE, OP_CODE_PATIENT_LOW_ALERT_LEVEL_RESPONSE, OP_CODE_HYPO_ALERT_LEVEL_RESPONSE, OP_CODE_HYPER_ALERT_LEVEL_RESPONSE, OP_CODE_RATE_OF_DECREASE_ALERT_LEVEL_RESPONSE, OP_CODE_RATE_OF_INCREASE_ALERT_LEVEL_RESPONSE -> // SFLOAT expectedOperandSize = 2; case OP_CODE_RESPONSE_CODE -> // Request Op Code (UINT8), Response Code Value (UINT8) expectedOperandSize = 2; default -> { onInvalidDataReceived(device, data); return; } } // Verify packet length if (data.size() != 1 + expectedOperandSize && data.size() != 1 + expectedOperandSize + 2) { onInvalidDataReceived(device, data); return; } // Verify CRC if present final boolean crcPresent = data.size() == 1 + expectedOperandSize + 2; // opCode + expected operand + CRC if (crcPresent) { final int expectedCrc = data.getIntValue(Data.FORMAT_UINT16_LE, 1 + expectedOperandSize); final int actualCrc = CRC16.MCRF4XX(data.getValue(), 0, 1 + expectedOperandSize); if (expectedCrc != actualCrc) { onCGMSpecificOpsResponseReceivedWithCrcError(device, data); return; } } switch (opCode) { case OP_CODE_COMMUNICATION_INTERVAL_RESPONSE -> { final int interval = data.getIntValue(Data.FORMAT_UINT8, 1); onContinuousGlucoseCommunicationIntervalReceived(device, interval, crcPresent); return; } case OP_CODE_CALIBRATION_VALUE_RESPONSE -> { final float glucoseConcentrationOfCalibration = data.getFloatValue(Data.FORMAT_SFLOAT, 1); final int calibrationTime = data.getIntValue(Data.FORMAT_UINT16_LE, 3); final int calibrationTypeAndSampleLocation = data.getIntValue(Data.FORMAT_UINT8, 5); @SuppressLint("WrongConstant") final int calibrationType = calibrationTypeAndSampleLocation & 0x0F; final int calibrationSampleLocation = calibrationTypeAndSampleLocation >> 4; final int nextCalibrationTime = data.getIntValue(Data.FORMAT_UINT16_LE, 6); final int calibrationDataRecordNumber = data.getIntValue(Data.FORMAT_UINT16_LE, 8); final int calibrationStatus = data.getIntValue(Data.FORMAT_UINT8, 10); onContinuousGlucoseCalibrationValueReceived(device, glucoseConcentrationOfCalibration, calibrationTime, nextCalibrationTime, calibrationType, calibrationSampleLocation, calibrationDataRecordNumber, new CGMCalibrationStatus(calibrationStatus), crcPresent); return; } case OP_CODE_RESPONSE_CODE -> { final int requestCode = data.getIntValue(Data.FORMAT_UINT8, 1); // ignore final int responseCode = data.getIntValue(Data.FORMAT_UINT8, 2); if (responseCode == CGM_RESPONSE_SUCCESS) { onCGMSpecificOpsOperationCompleted(device, requestCode, crcPresent); } else { onCGMSpecificOpsOperationError(device, requestCode, responseCode, crcPresent); } return; } } // Read SFLOAT value final float value = data.getFloatValue(Data.FORMAT_SFLOAT, 1); switch (opCode) { case OP_CODE_PATIENT_HIGH_ALERT_LEVEL_RESPONSE -> onContinuousGlucosePatientHighAlertReceived(device, value, crcPresent); case OP_CODE_PATIENT_LOW_ALERT_LEVEL_RESPONSE -> onContinuousGlucosePatientLowAlertReceived(device, value, crcPresent); case OP_CODE_HYPO_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseHypoAlertReceived(device, value, crcPresent); case OP_CODE_HYPER_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseHyperAlertReceived(device, value, crcPresent); case OP_CODE_RATE_OF_DECREASE_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseRateOfDecreaseAlertReceived(device, value, crcPresent); case OP_CODE_RATE_OF_INCREASE_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseRateOfIncreaseAlertReceived(device, value, crcPresent); } }
@Test public void onContinuousGlucoseCalibrationValueReceived_withCrc() { final MutableData data = new MutableData(new byte[13]); data.setValue(6, Data.FORMAT_UINT8, 0); data.setValue(1, 2, Data.FORMAT_SFLOAT, 1); data.setValue(10, Data.FORMAT_UINT16_LE, 3); data.setValue(0x32, Data.FORMAT_UINT8, 5); data.setValue(20, Data.FORMAT_UINT16_LE, 6); data.setValue(1, Data.FORMAT_UINT16_LE, 8); data.setValue(0b100, Data.FORMAT_UINT8, 10); data.setValue(0xB2BF, Data.FORMAT_UINT16_LE, 11); callback.onDataReceived(null, data); assertTrue(valueReceived); assertTrue(secured); }
public void setContract(@Nullable Produce contract) { this.contract = contract; setStoredContract(contract); handleContractState(); }
@Test public void cabbageContractOnionHarvestableAndCabbageGrowing() { final long unixNow = Instant.now().getEpochSecond(); final long expectedTime = unixNow + 60; // Get the two allotment patches final FarmingPatch patch1 = farmingGuildPatches.get(Varbits.FARMING_4773); final FarmingPatch patch2 = farmingGuildPatches.get(Varbits.FARMING_4774); assertNotNull(patch1); assertNotNull(patch2); // Specify the two allotment patches when(farmingTracker.predictPatch(patch1)) .thenReturn(new PatchPrediction(Produce.ONION, CropState.HARVESTABLE, unixNow, 3, 3)); when(farmingTracker.predictPatch(patch2)) .thenReturn(new PatchPrediction( Produce.CABBAGE, CropState.GROWING, expectedTime, 2, 3)); farmingContractManager.setContract(Produce.CABBAGE); assertEquals(SummaryState.IN_PROGRESS, farmingContractManager.getSummary()); assertEquals(CropState.GROWING, farmingContractManager.getContractCropState()); assertEquals(expectedTime, farmingContractManager.getCompletionTime()); }
public static String truncate(String string, int maxLength) { return truncate(string, maxLength, n -> "...[truncated " + n + " symbols]"); }
@Test void testTruncate() { int maxLength = 5; assertThat(StringUtils.truncate(null, maxLength)).isNull(); assertThat(StringUtils.truncate("", maxLength)).isEmpty(); assertThat(StringUtils.truncate("123", maxLength)).isEqualTo("123"); assertThat(StringUtils.truncate("1234567", maxLength)).isEqualTo("12345...[truncated 2 symbols]"); assertThat(StringUtils.truncate("1234567", 0)).isEqualTo("1234567"); }
@Override public void afterCommitted(TransactionState txnState, boolean txnOperated) throws UserException { long taskBeId = -1L; try { if (txnOperated) { // find task in job Optional<RoutineLoadTaskInfo> routineLoadTaskInfoOptional = routineLoadTaskInfoList.stream().filter( entity -> entity.getTxnId() == txnState.getTransactionId()).findFirst(); if (routineLoadTaskInfoOptional.isPresent()) { RoutineLoadTaskInfo routineLoadTaskInfo = routineLoadTaskInfoOptional.get(); taskBeId = routineLoadTaskInfo.getBeId(); executeTaskOnTxnStatusChanged(routineLoadTaskInfo, txnState, TransactionStatus.COMMITTED, null); routineLoadTaskInfo.afterCommitted(txnState, txnOperated); } ++committedTaskNum; TableMetricsEntity entity = TableMetricsRegistry.getInstance().getMetricsEntity(tableId); entity.counterRoutineLoadCommittedTasksTotal.increase(1L); LOG.debug("routine load task committed. task id: {}, job id: {}", txnState.getLabel(), id); StreamLoadTask streamLoadTask = GlobalStateMgr.getCurrentState().getStreamLoadMgr(). getSyncSteamLoadTaskByTxnId(txnState.getTransactionId()); if (streamLoadTask != null) { streamLoadTask.afterCommitted(txnState, txnOperated); } } } catch (Throwable e) { LOG.warn("after committed failed", e); String errmsg = "be " + taskBeId + " commit task failed " + txnState.getLabel() + " with error " + e.getMessage() + " while transaction " + txnState.getTransactionId() + " has been committed"; updateState(JobState.PAUSED, new ErrorReason(InternalErrorCode.INTERNAL_ERR, errmsg), false /* not replay */); } finally { // this lock is locked in executeBeforeCheck function writeUnlock(); LOG.debug("unlock write lock of routine load job after committed: {}", id); } }
@Test public void testAfterCommitted(@Mocked RoutineLoadMgr routineLoadMgr, @Injectable TransactionState transactionState, @Injectable KafkaTaskInfo routineLoadTaskInfo) throws UserException { Deencapsulation.setField(routineLoadTaskInfo, "routineLoadManager", routineLoadMgr); List<RoutineLoadTaskInfo> routineLoadTaskInfoList = Lists.newArrayList(); routineLoadTaskInfoList.add(routineLoadTaskInfo); long txnId = 1L; RLTaskTxnCommitAttachment attachment = new RLTaskTxnCommitAttachment(); TKafkaRLTaskProgress tKafkaRLTaskProgress = new TKafkaRLTaskProgress(); tKafkaRLTaskProgress.partitionCmtOffset = Maps.newHashMap(); KafkaProgress kafkaProgress = new KafkaProgress(tKafkaRLTaskProgress.getPartitionCmtOffset()); Deencapsulation.setField(attachment, "progress", kafkaProgress); KafkaProgress currentProgress = new KafkaProgress(tKafkaRLTaskProgress.getPartitionCmtOffset()); RoutineLoadJob routineLoadJob = new KafkaRoutineLoadJob(); new Expectations() { { transactionState.getTransactionId(); minTimes = 0; result = txnId; routineLoadTaskInfo.getTxnId(); minTimes = 0; result = txnId; transactionState.getTxnCommitAttachment(); minTimes = 0; result = attachment; routineLoadTaskInfo.getPartitions(); minTimes = 0; result = Lists.newArrayList(); routineLoadTaskInfo.getId(); minTimes = 0; result = UUID.randomUUID(); routineLoadMgr.getJob(anyLong); minTimes = 0; result = routineLoadJob; } }; new MockUp<RoutineLoadJob>() { @Mock void writeUnlock() { } }; Deencapsulation.setField(routineLoadJob, "state", RoutineLoadJob.JobState.RUNNING); Deencapsulation.setField(routineLoadJob, "routineLoadTaskInfoList", routineLoadTaskInfoList); Deencapsulation.setField(routineLoadJob, "progress", currentProgress); TableMetricsEntity entity = TableMetricsRegistry.getInstance().getMetricsEntity(routineLoadTaskInfo.getJob().tableId); long prevValue = entity.counterRoutineLoadCommittedTasksTotal.getValue(); routineLoadJob.afterCommitted(transactionState, true); Assert.assertEquals(RoutineLoadJob.JobState.RUNNING, routineLoadJob.getState()); Assert.assertEquals(new Long(1), Deencapsulation.getField(routineLoadJob, "committedTaskNum")); Assert.assertEquals(new Long(prevValue + 1), entity.counterRoutineLoadCommittedTasksTotal.getValue()); }
@Override public boolean validateTree(ValidationContext validationContext) { validate(validationContext); return !hasErrors(); }
@Test public void validateTree_shouldValidateNullId() { PluggableArtifactConfig artifactConfig = new PluggableArtifactConfig(null, "s3"); final ArtifactStores artifactStores = new ArtifactStores(new ArtifactStore("s3", "cd.go.s3")); final boolean result = artifactConfig.validateTree(ValidationContextMother.validationContext(artifactStores)); assertFalse(result); }
@Override public boolean shouldWait() { RingbufferContainer ringbuffer = getRingBufferContainerOrNull(); if (ringbuffer == null) { return true; } if (ringbuffer.isTooLargeSequence(sequence) || ringbuffer.isStaleSequence(sequence)) { //no need to wait, let the operation continue and fail in beforeRun return false; } // the sequence is not readable return sequence == ringbuffer.tailSequence() + 1; }
@Test public void whenOnTailAndBufferEmpty() { ReadOneOperation op = getReadOneOperation(ringbuffer.tailSequence()); // since there is an item, we don't need to wait op.shouldWait(); assertThrows(StaleSequenceException.class, op::beforeRun); }
@Override protected Map<String, GroupInfo> createGroupInfos() { AbstractSeriesSelector seriesSelector = new AbstractSeriesSelector() { @Override public Iterable<String> select(Sample sample) { return Collections.singletonList(sampleVariableName); } }; GraphValueSelector graphValueSelector = (series, sample) -> { String value; if (isNativeSampleVariableName) { value = sample.getData(sampleVariableName); } else { value = sample.getData(CSVSaveService.VARIABLE_NAME_QUOTE_CHAR + sampleVariableName + CSVSaveService.VARIABLE_NAME_QUOTE_CHAR); } if (StringUtils.isEmpty(value) || "null".equals(value)) { return null; } try { return Converters.convert(Double.class, value); } catch (ConvertException e) { throw new IllegalArgumentException("Double converter failed", e); } }; return Collections.singletonMap( AbstractGraphConsumer.DEFAULT_GROUP, new GroupInfo( new MeanAggregatorFactory(), seriesSelector, // We ignore Transaction Controller results graphValueSelector, false, false)); }
@Test public void testCreateGroupInfos() { // Testing defaults values assertThat(map.containsKey("Generic group"), equalTo(true)); assertThat(map.containsKey("foo"), equalTo(false)); assertThat(map.get("Generic group").getAggregatorFactory().getClass(), equalTo(org.apache.jmeter.report.processor.MeanAggregatorFactory.class)); GroupData groupData = map.get("Generic group").getGroupData(); assertThat(groupData.getOverallSeries(), equalTo(null)); assertThat(groupData.getSeriesInfo(), equalTo(new HashMap<String, SeriesData>())); // Testing native sample variable customGraphConsumer.setSampleVariableName("bytes"); Sample sample = new Sample(0, sampleMetaData, data); Double testedValue = map.get("Generic group").getValueSelector().select("bytes", sample); assertThat(testedValue, equalTo(492.0)); // Testing non-native sample variable customGraphConsumer.setSampleVariableName("mm-miss"); testedValue = map.get("Generic group").getValueSelector().select("mm-miss", sample); assertThat(testedValue, equalTo(null)); // Testing empty data value, the change between data and data2 // is on the last value that switchs from "null" to "" String[] data2 = {"1527089951383", "0", "Read-compute", "200", "OK", "setupRegion 1-1", "true", "", "492", "0", "1", "1", "null", "0", "0", "0", "/stream1a/master.m3u8?e=0&h=56345c61b7b415e0260c19963a153092", "null", "5500000", "null", "null", "null", "null", "null", "null", "null", "null", "null", "null", "null", "null", "null", "null", ""}; sample = new Sample(0, sampleMetaData, data2); testedValue = map.get("Generic group").getValueSelector().select("mm-miss", sample); assertThat(testedValue, equalTo(null)); }
public static String capitalize(String string) { return string == null ? null : string.substring( 0, 1 ).toUpperCase( Locale.ROOT ) + string.substring( 1 ); }
@Test public void testCapitalize() { assertThat( Strings.capitalize( null ) ).isNull(); assertThat( Strings.capitalize( "c" ) ).isEqualTo( "C" ); assertThat( Strings.capitalize( "capitalize" ) ).isEqualTo( "Capitalize" ); assertThat( Strings.capitalize( "AlreadyCapitalized" ) ).isEqualTo( "AlreadyCapitalized" ); assertThat( Strings.capitalize( "notCapitalized" ) ).isEqualTo( "NotCapitalized" ); }
@Override public int run(String[] args) throws Exception { try { webServiceClient = WebServiceClient.getWebServiceClient().createClient(); return runCommand(args); } finally { if (yarnClient != null) { yarnClient.close(); } if (webServiceClient != null) { webServiceClient.destroy(); } } }
@Test (timeout = 5000) public void testWithNonMatchingEntityIds() throws Exception { ApplicationId appId1 = ApplicationId.newInstance(0, 1); ApplicationId appId2 = ApplicationId.newInstance(0, 2); ApplicationAttemptId appAttemptId1 = ApplicationAttemptId.newInstance(appId1, 1); ApplicationAttemptId appAttemptId2 = ApplicationAttemptId.newInstance(appId2, 1); ContainerId containerId0 = ContainerId.newContainerId(appAttemptId1, 0); LogsCLI cli = createCli(); // Non-matching applicationId and applicationAttemptId int exitCode = cli.run(new String[] {"-applicationId", appId2.toString(), "-applicationAttemptId", appAttemptId1.toString()}); assertTrue(exitCode == -1); assertTrue(sysErrStream.toString().contains( "The Application:" + appId2.toString() + " does not have the AppAttempt:" + appAttemptId1.toString())); sysErrStream.reset(); // Non-matching applicationId and containerId exitCode = cli.run(new String[] {"-applicationId", appId2.toString(), "-containerId", containerId0.toString()}); assertTrue(exitCode == -1); assertTrue(sysErrStream.toString().contains( "The Application:" + appId2.toString() + " does not have the container:" + containerId0.toString())); sysErrStream.reset(); // Non-matching applicationAttemptId and containerId exitCode = cli.run(new String[] {"-applicationAttemptId", appAttemptId2.toString(), "-containerId", containerId0.toString()}); assertTrue(exitCode == -1); assertTrue(sysErrStream.toString().contains( "The AppAttempt:" + appAttemptId2.toString() + " does not have the container:" + containerId0.toString())); sysErrStream.reset(); }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { try { final DownloadBuilder builder = new DbxUserFilesRequests(session.getClient(file)) .downloadBuilder(containerService.getKey(file)).withRev(file.attributes().getVersionId()); if(status.isAppend()) { final HttpRange range = HttpRange.withStatus(status); builder.range(range.getStart()); } final DbxDownloader<FileMetadata> downloader = builder.start(); return downloader.getInputStream(); } catch(DbxException e) { throw new DropboxExceptionMappingService().map("Download {0} failed", e, file); } }
@Test public void testReadInterrupt() throws Exception { final DropboxWriteFeature write = new DropboxWriteFeature(session); final TransferStatus writeStatus = new TransferStatus(); final byte[] content = RandomUtils.nextBytes(66800); writeStatus.setLength(content.length); final Path test = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final OutputStream out = write.write(test, writeStatus, new DisabledConnectionCallback()); assertNotNull(out); new StreamCopier(new TransferStatus(), new TransferStatus()).transfer(new ByteArrayInputStream(content), out); // Unknown length in status final TransferStatus readStatus = new TransferStatus(); // Read a single byte { final InputStream in = new DropboxReadFeature(session).read(test, readStatus, new DisabledConnectionCallback()); assertNotNull(in.read()); in.close(); } { final InputStream in = new DropboxReadFeature(session).read(test, readStatus, new DisabledConnectionCallback()); assertNotNull(in); in.close(); } new DropboxDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public byte[] echo(byte[] message) { return read(null, ByteArrayCodec.INSTANCE, ECHO, message); }
@Test public void testEcho() { assertThat(connection.echo("test".getBytes())).isEqualTo("test".getBytes()); }
public Set<Long> calculateUsers(DelegateExecution execution, int level) { Assert.isTrue(level > 0, "level 必须大于 0"); // 获得发起人 ProcessInstance processInstance = processInstanceService.getProcessInstance(execution.getProcessInstanceId()); Long startUserId = NumberUtils.parseLong(processInstance.getStartUserId()); // 获得对应 leve 的部门 DeptRespDTO dept = null; for (int i = 0; i < level; i++) { // 获得 level 对应的部门 if (dept == null) { dept = getStartUserDept(startUserId); if (dept == null) { // 找不到发起人的部门,所以无法使用该规则 return emptySet(); } } else { DeptRespDTO parentDept = deptApi.getDept(dept.getParentId()).getCheckedData(); if (parentDept == null) { // 找不到父级部门,所以只好结束寻找。原因是:例如说,级别比较高的人,所在部门层级比较少 break; } dept = parentDept; } } return dept.getLeaderUserId() != null ? asSet(dept.getLeaderUserId()) : emptySet(); }
@Test public void testCalculateUsers_noParentDept() { // 准备参数 DelegateExecution execution = mockDelegateExecution(1L); // mock 方法(startUser) AdminUserRespDTO startUser = randomPojo(AdminUserRespDTO.class, o -> o.setDeptId(10L)); when(adminUserApi.getUser(eq(1L))).thenReturn(success(startUser)); DeptRespDTO startUserDept = randomPojo(DeptRespDTO.class, o -> o.setId(10L).setParentId(100L) .setLeaderUserId(20L)); // mock 方法(getDept) when(deptApi.getDept(eq(10L))).thenReturn(success(startUserDept)); when(deptApi.getDept(eq(100L))).thenReturn(success(null)); // 调用 Set<Long> result = expression.calculateUsers(execution, 2); // 断言 assertEquals(asSet(20L), result); }
@Nonnull public TypeSerializer<T> getSerializer() { return typeSerializer.duplicate(); }
@Test void testSerializerDuplication() { // we need a serializer that actually duplicates for testing (a stateful one) // we use Kryo here, because it meets these conditions TestStateDescriptor<String> descr = new TestStateDescriptor<>("foobar", new GenericTypeInfo<>(String.class)); TypeSerializer<String> serializerA = descr.getSerializer(); TypeSerializer<String> serializerB = descr.getSerializer(); // check that the retrieved serializers are not the same assertThat(serializerB).isNotSameAs(serializerA); }
public List<Entry> entries() { return entriesCache; }
@Test void shouldCreateNewIndex() { try (RecordingLog recordingLog = new RecordingLog(tempDir, true)) { assertEquals(0, recordingLog.entries().size()); } }
public synchronized long nextGtid() { long timestamp = timeGen(); if (timestamp < lastTimestamp) { timestamp = lastTimestamp; } if (lastTimestamp == timestamp) { sequence = (sequence + 1) & MAX_SEQUENCE; if (sequence == 0) { timestamp += 1; } } else { sequence = 0L; } if (timestamp - EPOCH >= (1L << 42)) { throw new IllegalStateException("Timestamp overflow"); } lastTimestamp = timestamp; return ((timestamp - EPOCH) << TIMESTAMP_SHIFT) | (CLUSTER_ID << CLUSTER_ID_SHIFT) | sequence; }
@Test public void testClusterIdInGtid() { long gtid = gtidGenerator.nextGtid(); long clusterId = (gtid >> GtidGenerator.CLUSTER_ID_SHIFT) & GtidGenerator.MAX_CLUSTER_ID; Assertions.assertEquals(GtidGenerator.CLUSTER_ID, clusterId, "Cluster ID should be correctly set in GTID"); }
public final static boolean isEnd(String commandPart) { return commandPart.length() == 1 && commandPart.charAt(0) == 'e'; }
@Test public void testEnd() { assertTrue(Protocol.isEnd("e")); assertFalse(Protocol.isEnd("")); assertFalse(Protocol.isEnd("btrue")); try { Protocol.isEnd(null); fail(); } catch (NullPointerException e) { assertTrue(true); } }
@Override public void setValue(Object value, Object elContext) { expression.setValue(elContext, value); }
@Test void testSetValue() { ExpressionParser parser = new SpelExpressionParser(); String expression = "name"; SpringELExpressionObject springELExpressionObject = new SpringELExpressionObject(); Expression defaultExpression = parser.parseExpression(expression); SpringELExpression springELExpression = new SpringELExpression(defaultExpression); springELExpression.setValue("test", springELExpressionObject); Assertions.assertEquals(springELExpressionObject.getName(), "test"); }
public static void validateRequestHeadersAndUpdateResourceContext(final Map<String, String> headers, final Set<String> customMimeTypesSupported, ServerResourceContext resourceContext) { validateRequestHeadersAndUpdateResourceContext(headers, customMimeTypesSupported, resourceContext, new RequestContext()); }
@Test() public void testValidateRequestHeadersForInProcessRequest() throws Exception { Map<String, String> headers = new AbstractMap<String, String>() { @Override public Set<Entry<String, String>> entrySet() { throw new IllegalStateException("Didn't expect headers to be accessed."); } }; RequestContext requestContext = new RequestContext(); requestContext.putLocalAttr(ServerResourceContext.CONTEXT_IN_PROCESS_RESOLUTION_KEY, true); ServerResourceContext resourceContext = new ResourceContextImpl(); RestUtils.validateRequestHeadersAndUpdateResourceContext(headers, Collections.emptySet(), resourceContext, requestContext); Assert.assertEquals(resourceContext.getResponseMimeType(), ContentType.JSON.getHeaderKey()); }
public static int getIndexCharsCount( int MaxIndex ) { // int CharsCount = 1; // if ( MaxIndex <= 0 ) return 1; // CharsCount = (int)Math.log10( MaxIndex ) + 1; // return CharsCount; }
@Test public void testgetIndexCharsCount() throws Exception { // assertEquals( 1, BTools.getIndexCharsCount( -5 ) ); assertEquals( 1, BTools.getIndexCharsCount( 5 ) ); assertEquals( 3, BTools.getIndexCharsCount( 345 ) ); // }
TopicPartition renameTopicPartition(TopicPartition upstreamTopicPartition) { if (targetClusterAlias.equals(replicationPolicy.topicSource(upstreamTopicPartition.topic()))) { // this topic came from the target cluster, so we rename like us-west.topic1 -> topic1 return new TopicPartition(replicationPolicy.originalTopic(upstreamTopicPartition.topic()), upstreamTopicPartition.partition()); } else { // rename like topic1 -> us-west.topic1 return new TopicPartition(replicationPolicy.formatRemoteTopic(sourceClusterAlias, upstreamTopicPartition.topic()), upstreamTopicPartition.partition()); } }
@Test public void testDownstreamTopicRenaming() { MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source1", "target2", new DefaultReplicationPolicy(), null, Collections.emptySet(), Collections.emptyMap(), new CheckpointStore(Collections.emptyMap())); assertEquals(new TopicPartition("source1.topic3", 4), mirrorCheckpointTask.renameTopicPartition(new TopicPartition("topic3", 4)), "Renaming source1.topic3 failed"); assertEquals(new TopicPartition("topic3", 5), mirrorCheckpointTask.renameTopicPartition(new TopicPartition("target2.topic3", 5)), "Renaming target2.topic3 failed"); assertEquals(new TopicPartition("source1.source6.topic7", 8), mirrorCheckpointTask.renameTopicPartition(new TopicPartition("source6.topic7", 8)), "Renaming source1.source6.topic7 failed"); }
public static Object removeClass(final Object object) { if (object instanceof Map) { Map<?, ?> map = (Map<?, ?>) object; Object result = map.get("result"); if (result instanceof Map) { Map<?, ?> resultMap = (Map<?, ?>) result; resultMap.remove("class"); } map.remove("class"); } return object; }
@Test public void removeClass() { Map<String, Map<String, String>> testMap = new HashMap<>(); Map<String, String> testSubMap = new HashMap<>(); testSubMap.put("class", "NullPointerException.class"); testSubMap.put("not_class", "ClassNotFoundException.class"); testMap.put("class", testSubMap); testMap.put("not_class", testSubMap); JsonUtils.removeClass(testMap); assertNull(testMap.getOrDefault("class", null)); assertEquals(testMap.get("not_class").get("not_class"), "ClassNotFoundException.class"); testMap = new HashMap<>(); testMap.put("result", testSubMap); JsonUtils.removeClass(testMap); assertNotNull(testMap.getOrDefault("result", null)); assertEquals(testMap.get("result").get("not_class"), "ClassNotFoundException.class"); }
public MemoryLRUCacheBytesIterator range(final String namespace, final Bytes from, final Bytes to) { return range(namespace, from, to, true); }
@Test public void shouldThrowIfNoPeekNextKeyRange() { final ThreadCache cache = setupThreadCache(0, 0, 10000L, false); shouldThrowIfNoPeekNextKey(() -> cache.range(namespace, Bytes.wrap(new byte[]{0}), Bytes.wrap(new byte[]{1}))); }
@Override public void deleteNotice(Long id) { // 校验是否存在 validateNoticeExists(id); // 删除通知公告 noticeMapper.deleteById(id); }
@Test public void testDeleteNotice_success() { // 插入前置数据 NoticeDO dbNotice = randomPojo(NoticeDO.class); noticeMapper.insert(dbNotice); // 删除 noticeService.deleteNotice(dbNotice.getId()); // 检查是否删除成功 assertNull(noticeMapper.selectById(dbNotice.getId())); }
public Set<Analysis.AliasedDataSource> extractDataSources(final AstNode node) { new Visitor().process(node, null); return getAllSources(); }
@Test public void shouldHandleAliasedDataSources() { // Given: final AstNode stmt = givenQuery("SELECT * FROM TEST1 t;"); // When: extractor.extractDataSources(stmt); // Then: assertContainsAlias(SourceName.of("T")); }
@Override public void streamRequest(StreamRequest request, Callback<StreamResponse> callback) { streamRequest(request, new RequestContext(), callback); }
@Test(retryAnalyzer = ThreeRetries.class) // Known to be flaky in CI public void testIgnoreStreamRetry() throws Exception { SimpleLoadBalancer balancer = prepareLoadBalancer(Arrays.asList("http://test.linkedin.com/retry1", "http://test.linkedin.com/good"), HttpClientFactory.UNLIMITED_CLIENT_REQUEST_RETRY_RATIO); DynamicClient dynamicClient = new DynamicClient(balancer, null); RetryClient client = new RetryClient( dynamicClient, balancer, D2ClientConfig.DEFAULT_RETRY_LIMIT, RetryClient.DEFAULT_UPDATE_INTERVAL_MS, RetryClient.DEFAULT_AGGREGATED_INTERVAL_NUM, SystemClock.instance(), true, false); URI uri = URI.create("d2://retryService?arg1arg2"); StreamRequest streamRequest = new StreamRequestBuilder(uri).build(EntityStreams.newEntityStream(new ByteStringWriter(CONTENT))); DegraderTrackerClientTest.TestCallback<StreamResponse> restCallback = new DegraderTrackerClientTest.TestCallback<>(); client.streamRequest(streamRequest, restCallback); assertNull(restCallback.t); assertNotNull(restCallback.e); assertTrue(restCallback.e.getMessage().contains("Data not available")); }
public EclipseProfile getPublicProfile(String personId) { checkApiUrl(); var urlTemplate = eclipseApiUrl + "account/profile/{personId}"; var uriVariables = Map.of("personId", personId); var headers = new HttpHeaders(); headers.setAccept(Arrays.asList(MediaType.APPLICATION_JSON)); var request = new HttpEntity<Void>(headers); try { var response = restTemplate.exchange(urlTemplate, HttpMethod.GET, request, String.class, uriVariables); return parseEclipseProfile(response); } catch (RestClientException exc) { if (exc instanceof HttpStatusCodeException) { var status = ((HttpStatusCodeException) exc).getStatusCode(); if (status == HttpStatus.NOT_FOUND) throw new ErrorResultException("No Eclipse profile data available for user: " + personId); } var url = UriComponentsBuilder.fromUriString(urlTemplate).build(uriVariables); logger.error("Get request failed with URL: " + url, exc); throw new ErrorResultException("Request for retrieving user profile failed: " + exc.getMessage(), HttpStatus.INTERNAL_SERVER_ERROR); } }
@Test public void testGetPublicProfile() throws Exception { var urlTemplate = "https://test.openvsx.eclipse.org/account/profile/{personId}"; Mockito.when(restTemplate.exchange(eq(urlTemplate), eq(HttpMethod.GET), any(HttpEntity.class), eq(String.class), eq(Map.of("personId", "test")))) .thenReturn(mockProfileResponse()); var profile = eclipse.getPublicProfile("test"); assertThat(profile).isNotNull(); assertThat(profile.name).isEqualTo("test"); assertThat(profile.githubHandle).isEqualTo("test"); assertThat(profile.publisherAgreements).isNotNull(); assertThat(profile.publisherAgreements.openVsx).isNotNull(); assertThat(profile.publisherAgreements.openVsx.version).isEqualTo("1"); }
public <T> T fromXmlPartial(String partial, Class<T> o) throws Exception { return fromXmlPartial(toInputStream(partial, UTF_8), o); }
@Test void shouldLoadIgnoresFromP4Partial() throws Exception { String gitPartial = """ <p4 port="localhost:8080"> <filter> <ignore pattern="x"/> </filter> <view></view> </p4>"""; MaterialConfig p4Material = xmlLoader.fromXmlPartial(gitPartial, P4MaterialConfig.class); Filter parsedFilter = p4Material.filter(); Filter expectedFilter = new Filter(); expectedFilter.add(new IgnoredFiles("x")); assertThat(parsedFilter).isEqualTo(expectedFilter); }
public static String getSnapshotPath(String snapshottableDir, String snapshotRelativePath) { final StringBuilder b = new StringBuilder(snapshottableDir); if (b.charAt(b.length() - 1) != Path.SEPARATOR_CHAR) { b.append(Path.SEPARATOR); } return b.append(HdfsConstants.DOT_SNAPSHOT_DIR) .append(Path.SEPARATOR) .append(snapshotRelativePath) .toString(); }
@Test (timeout=60000) public void testUpdateDirectory() throws Exception { Path dir = new Path("/dir"); Path sub = new Path(dir, "sub"); Path subFile = new Path(sub, "file"); DFSTestUtil.createFile(hdfs, subFile, BLOCKSIZE, REPLICATION, seed); FileStatus oldStatus = hdfs.getFileStatus(sub); hdfs.allowSnapshot(dir); hdfs.createSnapshot(dir, "s1"); hdfs.setTimes(sub, 100L, 100L); Path snapshotPath = SnapshotTestHelper.getSnapshotPath(dir, "s1", "sub"); FileStatus snapshotStatus = hdfs.getFileStatus(snapshotPath); assertEquals(oldStatus.getModificationTime(), snapshotStatus.getModificationTime()); assertEquals(oldStatus.getAccessTime(), snapshotStatus.getAccessTime()); }
public static boolean isEmpty(final CharSequence cs) { return cs == null || cs.length() == 0; }
@Test void testIsEmpty() { assertTrue(StringUtils.isEmpty(null)); assertTrue(StringUtils.isEmpty("")); assertFalse(StringUtils.isEmpty(" ")); assertFalse(StringUtils.isEmpty("bob")); assertFalse(StringUtils.isEmpty(" bob ")); }
public static boolean matchDomain(String name, String pattern) { final int index = pattern.indexOf('*'); if (index == -1) { return name.equals(pattern); } else { String[] names = name.split("\\."); String[] patterns = pattern.split("\\."); if (patterns.length > names.length) { return false; } int nameIndexDiff = names.length - patterns.length; for (int i = patterns.length - 1; i > -1; i--) { if ("*".equals(patterns[i])) { continue; } if (!patterns[i].equals(names[i + nameIndexDiff])) { return false; } } return true; } }
@Test public void testMatchDomain() { assertTrue(AddressUtil.matchDomain("hazelcast.com", "hazelcast.com")); assertTrue(AddressUtil.matchDomain("hazelcast.com", "*.com")); assertTrue(AddressUtil.matchDomain("jobs.hazelcast.com", "*.hazelcast.com")); assertTrue(AddressUtil.matchDomain("download.hazelcast.org", "*.hazelcast.*")); assertTrue(AddressUtil.matchDomain("download.hazelcast.org", "*.hazelcast.org")); assertFalse(AddressUtil.matchDomain("hazelcast.com", "abc.com")); assertFalse(AddressUtil.matchDomain("hazelcast.com", "*.hazelcast.com")); assertFalse(AddressUtil.matchDomain("hazelcast.com", "hazelcast.com.tr")); assertFalse(AddressUtil.matchDomain("hazelcast.com", "*.com.tr")); assertFalse(AddressUtil.matchDomain("www.hazelcast.com", "www.hazelcast.com.tr")); }
@CanIgnoreReturnValue @Override public JsonWriter value(String value) throws IOException { if (value == null) { return nullValue(); } put(new JsonPrimitive(value)); return this; }
@Test public void testBoolMaisValue() throws Exception { JsonTreeWriter writer = new JsonTreeWriter(); Boolean bool = true; assertThat(writer.value(bool)).isEqualTo(writer); }
@Override public String getValueFromText(String text) { return text; }
@Test public void getValueFromText() { for (String text : Arrays.asList("a", null, "b", "")) { assertSame(text, render.getValueFromText(text)); } }
protected void hideModel(EpoxyModel<?> model) { showModel(model, false); }
@Test public void testHideModel() { TestModel testModel = new TestModel(); testAdapter.addModels(testModel); testAdapter.hideModel(testModel); verify(observer).onItemRangeChanged(0, 1, null); assertFalse(testModel.isShown()); checkDifferState(); }
@Override public Response postDelegationToken(DelegationToken tokenData, HttpServletRequest hsr) throws AuthorizationException, IOException, InterruptedException, Exception { if (tokenData == null || hsr == null) { RouterAuditLogger.logFailure(getUser().getShortUserName(), POST_DELEGATION_TOKEN, UNKNOWN, TARGET_WEB_SERVICE, "Parameter error, the tokenData or hsr is null."); throw new IllegalArgumentException("Parameter error, the tokenData or hsr is null."); } try { // get Caller UserGroupInformation Configuration conf = federationFacade.getConf(); UserGroupInformation callerUGI = getKerberosUserGroupInformation(conf, hsr); // create a delegation token return createDelegationToken(tokenData, callerUGI); } catch (YarnException e) { LOG.error("Create delegation token request failed.", e); RouterAuditLogger.logFailure(getUser().getShortUserName(), POST_DELEGATION_TOKEN, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); return Response.status(Status.FORBIDDEN).entity(e.getMessage()).build(); } }
@Test public void testPostDelegationToken() throws Exception { Long now = Time.now(); DelegationToken token = new DelegationToken(); token.setRenewer(TEST_RENEWER); Principal principal = mock(Principal.class); when(principal.getName()).thenReturn(TEST_RENEWER); HttpServletRequest request = mock(HttpServletRequest.class); when(request.getRemoteUser()).thenReturn(TEST_RENEWER); when(request.getUserPrincipal()).thenReturn(principal); when(request.getAuthType()).thenReturn("kerberos"); Response response = interceptor.postDelegationToken(token, request); Assert.assertNotNull(response); Object entity = response.getEntity(); Assert.assertNotNull(entity); Assert.assertTrue(entity instanceof DelegationToken); DelegationToken dtoken = (DelegationToken) entity; Assert.assertEquals(TEST_RENEWER, dtoken.getRenewer()); Assert.assertEquals(TEST_RENEWER, dtoken.getOwner()); Assert.assertEquals("RM_DELEGATION_TOKEN", dtoken.getKind()); Assert.assertNotNull(dtoken.getToken()); Assert.assertTrue(dtoken.getNextExpirationTime() > now); }
public static List<Tab> getTabsFromJson(@Nullable final String tabsJson) throws InvalidJsonException { if (tabsJson == null || tabsJson.isEmpty()) { return getDefaultTabs(); } final List<Tab> returnTabs = new ArrayList<>(); final JsonObject outerJsonObject; try { outerJsonObject = JsonParser.object().from(tabsJson); if (!outerJsonObject.has(JSON_TABS_ARRAY_KEY)) { throw new InvalidJsonException("JSON doesn't contain \"" + JSON_TABS_ARRAY_KEY + "\" array"); } final JsonArray tabsArray = outerJsonObject.getArray(JSON_TABS_ARRAY_KEY); for (final Object o : tabsArray) { if (!(o instanceof JsonObject)) { continue; } final Tab tab = Tab.from((JsonObject) o); if (tab != null) { returnTabs.add(tab); } } } catch (final JsonParserException e) { throw new InvalidJsonException(e); } if (returnTabs.isEmpty()) { return getDefaultTabs(); } return returnTabs; }
@Test public void testInvalidRead() { final List<String> invalidList = Arrays.asList( "{\"notTabsArray\":[]}", "{invalidJSON]}", "{}" ); for (final String invalidContent : invalidList) { try { TabsJsonHelper.getTabsFromJson(invalidContent); fail("didn't throw exception"); } catch (final Exception e) { final boolean isExpectedException = e instanceof TabsJsonHelper.InvalidJsonException; assertTrue("\"" + e.getClass().getSimpleName() + "\" is not the expected exception", isExpectedException); } } }
@Override public TableStatistics getTableStatistics( ConnectorSession session, SchemaTableName table, Map<String, ColumnHandle> columns, Map<String, Type> columnTypes, List<HivePartition> partitions) { if (!isStatisticsEnabled(session)) { return TableStatistics.empty(); } if (partitions.isEmpty()) { return createZeroStatistics(columns, columnTypes); } int sampleSize = getPartitionStatisticsSampleSize(session); List<HivePartition> partitionsSample = getPartitionsSample(partitions, sampleSize); try { Map<String, PartitionStatistics> statisticsSample = statisticsProvider.getPartitionsStatistics(session, table, partitionsSample); validatePartitionStatistics(table, statisticsSample); return getTableStatistics(columns, columnTypes, partitions, statisticsSample); } catch (PrestoException e) { if (e.getErrorCode().equals(HIVE_CORRUPTED_COLUMN_STATISTICS.toErrorCode()) && isIgnoreCorruptedStatistics(session)) { log.error(e); return TableStatistics.empty(); } throw e; } }
@Test public void testGetTableStatistics() { String partitionName = "p1=string1/p2=1234"; PartitionStatistics statistics = PartitionStatistics.builder() .setBasicStatistics(new HiveBasicStatistics(OptionalLong.empty(), OptionalLong.of(1000), OptionalLong.of(5000), OptionalLong.empty())) .setColumnStatistics(ImmutableMap.of(COLUMN, createIntegerColumnStatistics(OptionalLong.of(-100), OptionalLong.of(100), OptionalLong.of(500), OptionalLong.of(300)))) .build(); MetastoreHiveStatisticsProvider statisticsProvider = new MetastoreHiveStatisticsProvider((session, table, hivePartitions) -> ImmutableMap.of(partitionName, statistics), quickStatsProvider); TestingConnectorSession session = new TestingConnectorSession(new HiveSessionProperties( new HiveClientConfig(), new OrcFileWriterConfig(), new ParquetFileWriterConfig(), new CacheConfig()).getSessionProperties()); HiveColumnHandle columnHandle = new HiveColumnHandle(COLUMN, HIVE_LONG, BIGINT.getTypeSignature(), 2, REGULAR, Optional.empty(), Optional.empty()); TableStatistics expected = TableStatistics.builder() .setRowCount(Estimate.of(1000)) .setTotalSize(Estimate.of(5000)) .setColumnStatistics( PARTITION_COLUMN_1, ColumnStatistics.builder() .setDataSize(Estimate.of(7000)) .setNullsFraction(Estimate.of(0)) .setDistinctValuesCount(Estimate.of(1)) .build()) .setColumnStatistics( PARTITION_COLUMN_2, ColumnStatistics.builder() .setRange(new DoubleRange(1234, 1234)) .setNullsFraction(Estimate.of(0)) .setDistinctValuesCount(Estimate.of(1)) .build()) .setColumnStatistics( columnHandle, ColumnStatistics.builder() .setRange(new DoubleRange(-100, 100)) .setNullsFraction(Estimate.of(0.5)) .setDistinctValuesCount(Estimate.of(300)) .build()) .build(); assertEquals( statisticsProvider.getTableStatistics( session, TABLE, ImmutableMap.of( "p1", PARTITION_COLUMN_1, "p2", PARTITION_COLUMN_2, COLUMN, columnHandle), ImmutableMap.of( "p1", VARCHAR, "p2", BIGINT, COLUMN, BIGINT), ImmutableList.of(partition(partitionName))), expected); }
@Override public void init(SubsetConfiguration metrics2Properties) { properties = metrics2Properties; basePath = new Path(properties.getString(BASEPATH_KEY, BASEPATH_DEFAULT)); source = properties.getString(SOURCE_KEY, SOURCE_DEFAULT); ignoreError = properties.getBoolean(IGNORE_ERROR_KEY, DEFAULT_IGNORE_ERROR); allowAppend = properties.getBoolean(ALLOW_APPEND_KEY, DEFAULT_ALLOW_APPEND); rollOffsetIntervalMillis = getNonNegative(ROLL_OFFSET_INTERVAL_MILLIS_KEY, DEFAULT_ROLL_OFFSET_INTERVAL_MILLIS); rollIntervalMillis = getRollInterval(); conf = loadConf(); UserGroupInformation.setConfiguration(conf); // Don't do secure setup if it's not needed. if (UserGroupInformation.isSecurityEnabled()) { // Validate config so that we don't get an NPE checkIfPropertyExists(KEYTAB_PROPERTY_KEY); checkIfPropertyExists(USERNAME_PROPERTY_KEY); try { // Login as whoever we're supposed to be and let the hostname be pulled // from localhost. If security isn't enabled, this does nothing. SecurityUtil.login(conf, properties.getString(KEYTAB_PROPERTY_KEY), properties.getString(USERNAME_PROPERTY_KEY)); } catch (IOException ex) { throw new MetricsException("Error logging in securely: [" + ex.toString() + "]", ex); } } }
@Test public void testInit() { ConfigBuilder builder = new ConfigBuilder(); SubsetConfiguration conf = builder.add("sink.roll-interval", "10m") .add("sink.roll-offset-interval-millis", "1") .add("sink.basepath", "path") .add("sink.ignore-error", "true") .add("sink.allow-append", "true") .add("sink.source", "src") .subset("sink"); RollingFileSystemSink sink = new RollingFileSystemSink(); sink.init(conf); assertEquals("The roll interval was not set correctly", sink.rollIntervalMillis, 600000); assertEquals("The roll offset interval was not set correctly", sink.rollOffsetIntervalMillis, 1); assertEquals("The base path was not set correctly", sink.basePath, new Path("path")); assertEquals("ignore-error was not set correctly", sink.ignoreError, true); assertEquals("allow-append was not set correctly", sink.allowAppend, true); assertEquals("The source was not set correctly", sink.source, "src"); }
public void validate() throws TelegramApiException { if (useHttps) { File file = new File(keyStorePath); if (!file.exists() || !file.canRead()) { throw new TelegramApiException("Can't find or access server keystore file."); } } }
@Test public void testWhenHttpsEnabledAndKeyStoreFileNotPresentExceptionIsRaised() { WebhookOptions webhookOptions = new WebhookOptions(); webhookOptions.setUseHttps(true); webhookOptions.setKeyStorePath("/Random/path"); try { webhookOptions.validate(); fail("Exception should have been raised during Https webhook options validation"); } catch (TelegramApiException e) { // Ignore } }
@Override public List<?> deserialize(final String topic, final byte[] bytes) { if (bytes == null) { return null; } try { final String recordCsvString = new String(bytes, StandardCharsets.UTF_8); final List<CSVRecord> csvRecords = CSVParser.parse(recordCsvString, csvFormat) .getRecords(); if (csvRecords.isEmpty()) { throw new SerializationException("No fields in record"); } final CSVRecord csvRecord = csvRecords.get(0); if (csvRecord == null || csvRecord.size() == 0) { throw new SerializationException("No fields in record."); } SerdeUtils.throwOnColumnCountMismatch(parsers.size(), csvRecord.size(), false, topic); final List<Object> values = new ArrayList<>(parsers.size()); final Iterator<Parser> pIt = parsers.iterator(); for (int i = 0; i < csvRecord.size(); i++) { final String value = csvRecord.get(i); final Parser parser = pIt.next(); final Object parsed = value == null || value.isEmpty() ? null : parser.parse(value); values.add(parsed); } return values; } catch (final Exception e) { throw new SerializationException("Error deserializing delimited", e); } }
@Test public void shouldDeserializeNegativeDecimalSerializedAsNumber() { // Given: final PersistenceSchema schema = persistenceSchema( column("cost", SqlTypes.decimal(4, 2)) ); final KsqlDelimitedDeserializer deserializer = createDeserializer(schema); final byte[] bytes = "-1.12".getBytes(StandardCharsets.UTF_8); // When: final List<?> result = deserializer.deserialize("", bytes); // Then: assertThat(result, contains(new BigDecimal("-1.12"))); }
@Override public int read(long position, byte[] buffer, int offset, int length) throws IOException { // When bufferedPreadDisabled = true, this API does not use any shared buffer, // cursor position etc. So this is implemented as NOT synchronized. HBase // kind of random reads on a shared file input stream will greatly get // benefited by such implementation. // Strict close check at the begin of the API only not for the entire flow. synchronized (this) { if (closed) { throw new IOException(FSExceptionMessages.STREAM_IS_CLOSED); } } LOG.debug("pread requested offset = {} len = {} bufferedPreadDisabled = {}", offset, length, bufferedPreadDisabled); if (!bufferedPreadDisabled) { return super.read(position, buffer, offset, length); } validatePositionedReadArgs(position, buffer, offset, length); if (length == 0) { return 0; } if (streamStatistics != null) { streamStatistics.readOperationStarted(); } int bytesRead = readRemote(position, buffer, offset, length, tracingContext); if (statistics != null) { statistics.incrementBytesRead(bytesRead); } if (streamStatistics != null) { streamStatistics.bytesRead(bytesRead); } return bytesRead; }
@Test public void testReadAheadManagerForFailedReadAhead() throws Exception { AbfsClient client = getMockAbfsClient(); AbfsRestOperation successOp = getMockRestOp(); // Stub : // Read request leads to 3 readahead calls: Fail all 3 readahead-client.read() // Actual read request fails with the failure in readahead thread doThrow(new TimeoutException("Internal Server error for RAH-Thread-X")) .doThrow(new TimeoutException("Internal Server error for RAH-Thread-Y")) .doThrow(new TimeoutException("Internal Server error RAH-Thread-Z")) .doReturn(successOp) // Any extra calls to read, pass it. .when(client) .read(any(String.class), any(Long.class), any(byte[].class), any(Integer.class), any(Integer.class), any(String.class), any(String.class), any(), any(TracingContext.class)); AbfsInputStream inputStream = getAbfsInputStream(client, "testReadAheadManagerForFailedReadAhead.txt"); queueReadAheads(inputStream); // AbfsInputStream Read would have waited for the read-ahead for the requested offset // as we are testing from ReadAheadManager directly, sleep for a sec to // get the read ahead threads to complete Thread.sleep(1000); // if readAhead failed for specific offset, getBlock should // throw exception from the ReadBuffer that failed within last thresholdAgeMilliseconds sec intercept(IOException.class, () -> ReadBufferManager.getBufferManager().getBlock( inputStream, 0, ONE_KB, new byte[ONE_KB])); // Only the 3 readAhead threads should have triggered client.read verifyReadCallCount(client, 3); // Stub returns success for the 4th read request, if ReadBuffers still // persisted, ReadAheadManager getBlock would have returned exception. checkEvictedStatus(inputStream, 0, false); }
public ZFrame duplicate() { return new ZFrame(this.data); }
@Test public void testZFrameEquals() { ZFrame f = new ZFrame("Hello".getBytes()); ZFrame clone = f.duplicate(); assertThat(clone, is(f)); }
public static int getProcessorsCount() { int processorsCount = 0; String processorsCountPreSet = getProperty(PROCESSORS_PROP_NAME, PROCESSORS_ENV_NAME); if (processorsCountPreSet != null) { try { processorsCount = Integer.parseInt(processorsCountPreSet); } catch (NumberFormatException ignored) { } } if (processorsCount <= 0) { processorsCount = Runtime.getRuntime().availableProcessors(); } return processorsCount; }
@Test void getProcessorsCount() { int processorsCount = PropertyUtils.getProcessorsCount(); assertNotNull(processorsCount); }
@Override public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) throws IOException, ServletException { if (bizConfig.isAdminServiceAccessControlEnabled()) { HttpServletRequest request = (HttpServletRequest) req; HttpServletResponse response = (HttpServletResponse) resp; String token = request.getHeader(HttpHeaders.AUTHORIZATION); if (!checkAccessToken(token)) { logger.warn("Invalid access token: {} for uri: {}", token, request.getRequestURI()); response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized"); return; } } chain.doFilter(req, resp); }
@Test public void testWithAccessControlDisabled() throws Exception { when(bizConfig.isAdminServiceAccessControlEnabled()).thenReturn(false); authenticationFilter.doFilter(servletRequest, servletResponse, filterChain); verify(bizConfig, times(1)).isAdminServiceAccessControlEnabled(); verify(filterChain, times(1)).doFilter(servletRequest, servletResponse); verify(bizConfig, never()).getAdminServiceAccessTokens(); verify(servletRequest, never()).getHeader(HttpHeaders.AUTHORIZATION); verify(servletResponse, never()).sendError(anyInt(), anyString()); }
@RequestMapping("/server") public ResponseEntity server() { ObjectNode result = JacksonUtils.createEmptyJsonNode(); result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: " + MetricsMonitor.getDomCountMonitor() + ", local port:" + EnvUtil.getPort()); return ResponseEntity.ok(result); }
@Test void testServer() { ResponseEntity responseEntity = healthController.server(); assertEquals(200, responseEntity.getStatusCodeValue()); }
public static boolean exceedsPersistentQueryCapacity( final KsqlExecutionContext executionContext, final KsqlConfig ksqlConfig ) { return executionContext.getPersistentQueries().size() > getQueryLimit(ksqlConfig); }
@Test public void shouldNotReportCapacityExceededIfReached() { // Given: givenActivePersistentQueries(2); givenQueryLimit(2); // Then: assertThat(QueryCapacityUtil.exceedsPersistentQueryCapacity(ksqlEngine, ksqlConfig), equalTo(false)); }
public StepBreakpoint addStepBreakpoint( String workflowId, long version, long instanceId, long runId, String stepId, long stepAttemptId, User user) { final String revisedWorkflowId = getRevisedWorkflowId(workflowId, stepId, true); return withMetricLogError( () -> withRetryableTransaction( conn -> { try (PreparedStatement stmt = conn.prepareStatement(ADD_STEP_BREAKPOINT)) { int idx = 0; stmt.setString(++idx, revisedWorkflowId); stmt.setLong(++idx, version); stmt.setLong(++idx, instanceId); stmt.setLong(++idx, runId); stmt.setString(++idx, stepId); stmt.setLong(++idx, stepAttemptId); stmt.setString(++idx, toJson(user)); try (ResultSet rs = stmt.executeQuery()) { if (rs.next()) { return stepBreakpointFromResultSet(rs); } else { throw new MaestroBadRequestException( Collections.emptyList(), "Breakpoint could not be set with identifier [%s][%d][%d][%d][%s][%d]", workflowId, version, instanceId, runId, stepId, stepAttemptId); } } } }), "addStepBreakpointForStepIdentifier", "Failed to addStepBreakpointForStepIdentifier [{}][{}][{}][{}][{}][{}]", workflowId, version, instanceId, runId, stepId, stepAttemptId); }
@Test public void testAddBreakpoint() { when(workflowDao.getWorkflowDefinition(anyString(), anyString())).thenReturn(wfd); StepBreakpoint bp = maestroStepBreakpointDao.addStepBreakpoint( TEST_WORKFLOW_ID2, TEST_WORKFLOW_VERSION1, TEST_WORKFLOW_INSTANCE1, Constants.MATCH_ALL_RUNS, TEST_STEP_ID1, Constants.MATCH_ALL_STEP_ATTEMPTS, TEST_USER); assertEquals(TEST_WORKFLOW_ID2, bp.getWorkflowId()); assertEquals(TEST_STEP_ID1, bp.getStepId()); assertEquals(TEST_WORKFLOW_INSTANCE1, bp.getWorkflowInstanceId().longValue()); assertEquals(TEST_WORKFLOW_VERSION1, bp.getWorkflowVersionId().longValue()); assertNull(bp.getWorkflowRunId()); assertNull(bp.getStepAttemptId()); bp = maestroStepBreakpointDao.addStepBreakpoint( TEST_WORKFLOW_ID1, Constants.MATCH_ALL_WORKFLOW_VERSIONS, Constants.MATCH_ALL_WORKFLOW_INSTANCES, Constants.MATCH_ALL_RUNS, TEST_STEP_ID1, Constants.MATCH_ALL_STEP_ATTEMPTS, TEST_USER); assertEquals(TEST_WORKFLOW_ID1, bp.getWorkflowId()); assertEquals(TEST_STEP_ID1, bp.getStepId()); assertNull(bp.getWorkflowVersionId()); assertNull(bp.getWorkflowInstanceId()); assertNull(bp.getWorkflowRunId()); assertNull(bp.getStepAttemptId()); }
public void validate(OptionRule rule) { List<RequiredOption> requiredOptions = rule.getRequiredOptions(); for (RequiredOption requiredOption : requiredOptions) { validate(requiredOption); for (Option<?> option : requiredOption.getOptions()) { if (SingleChoiceOption.class.isAssignableFrom(option.getClass())) { // is required option and not match condition, skip validate if (isConditionOption(requiredOption) && !matchCondition( (RequiredOption.ConditionalRequiredOptions) requiredOption)) { continue; } validateSingleChoice(option); } } } for (Option option : rule.getOptionalOptions()) { if (SingleChoiceOption.class.isAssignableFrom(option.getClass())) { validateSingleChoice(option); } } }
@Test public void testAbsolutelyRequiredOption() { OptionRule rule = OptionRule.builder().required(TEST_PORTS, KEY_USERNAME, KEY_PASSWORD).build(); Map<String, Object> config = new HashMap<>(); Executable executable = () -> validate(config, rule); // absent config.put(TEST_PORTS.key(), "[9090]"); assertEquals( "ErrorCode:[API-02], ErrorDescription:[Option item validate failed] - There are unconfigured options, the options('username', 'password') are required.", assertThrows(OptionValidationException.class, executable).getMessage()); config.put(KEY_USERNAME.key(), "asuka"); assertEquals( "ErrorCode:[API-02], ErrorDescription:[Option item validate failed] - There are unconfigured options, the options('password') are required.", assertThrows(OptionValidationException.class, executable).getMessage()); // all present config.put(KEY_PASSWORD.key(), "saitou"); Assertions.assertDoesNotThrow(executable); }
public static ScheduledTaskHandler of(UUID uuid, String schedulerName, String taskName) { return new ScheduledTaskHandlerImpl(uuid, -1, schedulerName, taskName); }
@Test public void of_equalityNull() { String urnA = "urn:hzScheduledTaskHandler:39ffc539-a356-444c-bec7-6f644462c208-1SchedulerTask"; assertNotNull(ScheduledTaskHandler.of(urnA)); }
public static ConfigRemoveResponse buildFailResponse(String errorMsg) { ConfigRemoveResponse removeResponse = new ConfigRemoveResponse(); removeResponse.setResultCode(ResponseCode.FAIL.getCode()); removeResponse.setMessage(errorMsg); return removeResponse; }
@Override @Test public void testSerializeFailResponse() throws JsonProcessingException { ConfigRemoveResponse configRemoveResponse = ConfigRemoveResponse.buildFailResponse("Fail"); String json = mapper.writeValueAsString(configRemoveResponse); assertTrue(json.contains("\"resultCode\":" + ResponseCode.FAIL.getCode())); assertTrue(json.contains("\"errorCode\":0")); assertTrue(json.contains("\"message\":\"Fail\"")); assertTrue(json.contains("\"success\":false")); }
public void registerCredentialListener(CredentialListener listener) { this.listener = listener; }
@Test void testRegisterCredentialListener() { CredentialListener expect = mock(CredentialListener.class); CredentialService credentialService1 = CredentialService.getInstance(); credentialService1.registerCredentialListener(expect); Credentials newCredentials = new Credentials(); newCredentials.setAccessKey("ak"); credentialService1.setCredential(newCredentials); verify(expect, times(1)).onUpdateCredential(); }
public void createFolder() throws Exception { try { Collection<UIRepositoryDirectory> directories = folderTree.getSelectedItems(); if ( directories == null || directories.size() == 0 ) { return; } UIRepositoryDirectory selectedFolder = directories.iterator().next(); // First, ask for a name for the folder XulPromptBox prompt = promptForName( null ); prompt.addDialogCallback( new XulDialogCallback<String>() { public void onClose( XulComponent component, Status status, String value ) { if ( status == Status.ACCEPT ) { newName = value; } else { newName = null; } } public void onError( XulComponent component, Throwable err ) { throw new RuntimeException( err ); } } ); prompt.open(); if ( newName != null ) { if ( selectedFolder == null ) { selectedFolder = repositoryDirectory; } if ( newName.equals( "." ) || newName.equals( ".." ) ) { throw new Exception( BaseMessages.getString( PKG, "BrowserController.InvalidFolderName" ) ); } //Do an explicit check here to see if the folder already exists in the ui //This is to prevent a double message being sent in case the folder does //not exist in the ui but does exist in the repo (PDI-5202) boolean folderExistsInUI = selectedFolder.contains( newName ); if ( folderExistsInUI ) { throw new Exception( BaseMessages.getString( PKG, "BrowserController.DirAlreadyExistsInUI", newName ) ); } //PDI-5202 String newNameInRepo = selectedFolder.checkDirNameExistsInRepo( newName ); if ( newNameInRepo != null ) { messageBox.setTitle( BaseMessages.getString( PKG, "Dialog.Warning" ) ); messageBox.setAcceptLabel( BaseMessages.getString( PKG, "Dialog.Ok" ) ); messageBox.setMessage( BaseMessages.getString( PKG, "BrowserController.DirAlreadyExistsInRepository", newNameInRepo ) ); messageBox.open(); newName = newNameInRepo; } UIRepositoryDirectory newDir = selectedFolder.createFolder( newName ); dirMap.put( newDir.getObjectId(), newDir ); directoryBinding.fireSourceChanged(); selectedItemsBinding.fireSourceChanged(); this.folderTree.setSelectedItems( Collections.singletonList( selectedFolder ) ); } newName = null; } catch ( Exception e ) { if ( mainController == null || !mainController.handleLostRepository( e ) ) { confirm( BaseMessages.getString( PKG, "Dialog.Error" ), e.getLocalizedMessage() ); } } }
@Test public void shouldNotCreateFolderOnCloseCreationDialog() throws Exception { XulPromptBox prompt = new XulPromptBoxMock( XulDialogCallback.Status.CANCEL ); when( document.createElement( PROMPTBOX ) ).thenReturn( prompt ); controller.createFolder(); assertTrue( directoryMap.isEmpty() ); verify( selectedFolder, never() ).createFolder( anyString() ); verify( directoryBinding, never() ).fireSourceChanged(); verify( selectedItemsBinding, never() ).fireSourceChanged(); }
public static Write write() { return new AutoValue_RabbitMqIO_Write.Builder() .setExchangeDeclare(false) .setQueueDeclare(false) .build(); }
@Test public void testWriteQueue() throws Exception { final int maxNumRecords = 1000; List<RabbitMqMessage> data = RabbitMqTestUtils.generateRecords(maxNumRecords).stream() .map(RabbitMqMessage::new) .collect(Collectors.toList()); p.apply(Create.of(data)) .apply( RabbitMqIO.write().withUri("amqp://guest:guest@localhost:" + port).withQueue("TEST")); ConnectionFactory connectionFactory = new ConnectionFactory(); connectionFactory.setUri("amqp://guest:guest@localhost:" + port); Connection connection = null; Channel channel = null; try { connection = connectionFactory.newConnection(); channel = connection.createChannel(); channel.queueDeclare("TEST", true, false, false, null); RabbitMqTestUtils.TestConsumer consumer = new RabbitMqTestUtils.TestConsumer(channel); channel.basicConsume("TEST", true, consumer); p.run(); while (consumer.getReceived().size() < maxNumRecords) { Thread.sleep(500); } assertEquals(maxNumRecords, consumer.getReceived().size()); for (int i = 0; i < maxNumRecords; i++) { assertTrue(consumer.getReceived().contains("Test " + i)); } } finally { if (channel != null) { channel.close(); } if (connection != null) { connection.close(); } } }
@Override public void execute(String commandName, BufferedReader reader, BufferedWriter writer) throws Py4JException, IOException { String fqn = reader.readLine(); List<Object> arguments = getArguments(reader); ReturnObject returnObject = invokeConstructor(fqn, arguments); String returnCommand = Protocol.getOutputCommand(returnObject); logger.finest("Returning command: " + returnCommand); writer.write(returnCommand); writer.flush(); }
@Test public void testWrongConstructor() { String inputCommand = "py4j.examples.Stack\ni5\ne\n"; try { command.execute("i", new BufferedReader(new StringReader(inputCommand)), writer); assertTrue(sWriter.toString().startsWith("!x")); } catch (Exception e) { e.printStackTrace(); fail(); } }
@Override public void handle(TaskEvent event) { if (LOG.isDebugEnabled()) { LOG.debug("Processing " + event.getTaskID() + " of type " + event.getType()); } try { writeLock.lock(); TaskStateInternal oldState = getInternalState(); try { stateMachine.doTransition(event.getType(), event); } catch (InvalidStateTransitionException e) { LOG.error("Can't handle this event at current state for " + this.taskId, e); internalError(event.getType()); } if (oldState != getInternalState()) { LOG.info(taskId + " Task Transitioned from " + oldState + " to " + getInternalState()); } } finally { writeLock.unlock(); } }
@Test public void testKillSuccessfulTask() { LOG.info("--- START: testKillSuccesfulTask ---"); mockTask = createMockTask(TaskType.MAP); TaskId taskId = getNewTaskID(); scheduleTaskAttempt(taskId); launchTaskAttempt(getLastAttempt().getAttemptId()); commitTaskAttempt(getLastAttempt().getAttemptId()); mockTask.handle(new TaskTAttemptEvent(getLastAttempt().getAttemptId(), TaskEventType.T_ATTEMPT_SUCCEEDED)); assertTaskSucceededState(); mockTask.handle(new TaskEvent(taskId, TaskEventType.T_KILL)); assertTaskSucceededState(); }
public Object resolve(final Expression expression) { return new Visitor().process(expression, null); }
@Test public void shouldThrowIfCannotParseDate() { // Given: final SqlType type = SqlTypes.DATE; final Expression exp = new StringLiteral("abc"); // When: final KsqlException e = assertThrows( KsqlException.class, () -> new GenericExpressionResolver(type, FIELD_NAME, registry, config, "insert value", false).resolve(exp)); // Then: assertThat(e.getMessage(), containsString("Date format must be yyyy-mm-dd")); }
public byte[] getBytes() { return bytes; }
@Test public void testGetBytes() throws Exception { for (int i = 0; i < 10000; i++) { // generate a random string String before = getTestString(); // Check that the bytes are stored correctly in Modified-UTF8 format. // Note that the DataInput and DataOutput interfaces convert between // bytes and Strings using the Modified-UTF8 format. assertEquals(before, readModifiedUTF(UTF8.getBytes(before))); } }
public static Date parseTM(TimeZone tz, String s, DatePrecision precision) { return parseTM(tz, s, false, precision); }
@Test public void testParseTMacrnema() { DatePrecision precision = new DatePrecision(); assertEquals(0, DateUtils.parseTM(tz, "02:00:00", precision).getTime()); assertEquals(Calendar.SECOND, precision.lastField); }
public Optional<String> evaluate(Number toEvaluate) { return interval.isIn(toEvaluate) ? Optional.of(binValue) : Optional.empty(); }
@Test void evaluateOpenClosed() { KiePMMLDiscretizeBin kiePMMLDiscretizeBin = getKiePMMLDiscretizeBin(new KiePMMLInterval(null, 20, CLOSURE.OPEN_CLOSED)); Optional<String> retrieved = kiePMMLDiscretizeBin.evaluate(10); assertThat(retrieved).isPresent(); assertThat(retrieved.get()).isEqualTo(BINVALUE); retrieved = kiePMMLDiscretizeBin.evaluate(20); assertThat(retrieved).isPresent(); assertThat(retrieved.get()).isEqualTo(BINVALUE); retrieved = kiePMMLDiscretizeBin.evaluate(30); assertThat(retrieved).isNotPresent(); kiePMMLDiscretizeBin = getKiePMMLDiscretizeBin(new KiePMMLInterval(20, null, CLOSURE.OPEN_CLOSED)); retrieved = kiePMMLDiscretizeBin.evaluate(30); assertThat(retrieved).isPresent(); assertThat(retrieved.get()).isEqualTo(BINVALUE); retrieved = kiePMMLDiscretizeBin.evaluate(20); assertThat(retrieved).isNotPresent(); retrieved = kiePMMLDiscretizeBin.evaluate(10); assertThat(retrieved).isNotPresent(); kiePMMLDiscretizeBin = getKiePMMLDiscretizeBin(new KiePMMLInterval(20, 40, CLOSURE.OPEN_CLOSED)); retrieved = kiePMMLDiscretizeBin.evaluate(30); assertThat(retrieved).isPresent(); assertThat(retrieved.get()).isEqualTo(BINVALUE); retrieved = kiePMMLDiscretizeBin.evaluate(10); assertThat(retrieved).isNotPresent(); retrieved = kiePMMLDiscretizeBin.evaluate(20); assertThat(retrieved).isNotPresent(); retrieved = kiePMMLDiscretizeBin.evaluate(40); assertThat(retrieved).isPresent(); assertThat(retrieved.get()).isEqualTo(BINVALUE); retrieved = kiePMMLDiscretizeBin.evaluate(50); assertThat(retrieved).isNotPresent(); }
public URI baseUri() { return server.configuration().baseUri(); }
@Test void run_selectIdp() { var baseUri = application.baseUri(); var sessionID = UUID.randomUUID().toString(); var response = given() .log() .all() .cookie("session_id", sessionID) .formParam("identityProvider", "") .when() .post(baseUri.resolve(IDP_PATH)) .then() .contentType(ContentType.HTML) .statusCode(400) .extract() .response(); var responseBody = response.getBody().asString(); assertTrue(responseBody.contains("de-DE")); assertTrue(responseBody.contains("Kein Identitätsanbieter ausgewählt. Bitte zurückgehen.")); }
public File getLogDirectory() { return logDirectory; }
@Test public void getLogDirectory_is_configured_with_non_nullable_PATH_LOG_variable() throws IOException { File sqHomeDir = temp.newFolder(); File logDir = temp.newFolder(); Props props = new Props(new Properties()); props.set(PATH_DATA.getKey(), temp.newFolder().getAbsolutePath()); props.set(PATH_HOME.getKey(), sqHomeDir.getAbsolutePath()); props.set(PATH_TEMP.getKey(), temp.newFolder().getAbsolutePath()); props.set(PATH_LOGS.getKey(), logDir.getAbsolutePath()); EsInstallation underTest = new EsInstallation(props); assertThat(underTest.getLogDirectory()).isEqualTo(logDir); }
@Override public BackgroundException map(final SardineException failure) { final StringBuilder buffer = new StringBuilder(); switch(failure.getStatusCode()) { case HttpStatus.SC_OK: case HttpStatus.SC_MULTI_STATUS: // HTTP method status this.append(buffer, failure.getMessage()); // Failure unmarshalling XML response return new InteroperabilityException(buffer.toString(), failure); } this.append(buffer, String.format("%s (%d %s)", failure.getReasonPhrase(), failure.getStatusCode(), failure.getResponsePhrase())); return super.map(failure, buffer, failure.getStatusCode()); }
@Test public void testMap() { Assert.assertEquals(LoginFailureException.class, new DAVExceptionMappingService().map(new SardineException("m", 401, "r")).getClass()); assertEquals(AccessDeniedException.class, new DAVExceptionMappingService().map(new SardineException("m", 403, "r")).getClass()); assertEquals(NotfoundException.class, new DAVExceptionMappingService().map(new SardineException("m", 404, "r")).getClass()); }
public LeaderInformation forComponentIdOrEmpty(String componentId) { return forComponentId(componentId).orElse(LeaderInformation.empty()); }
@Test void testForComponentIdOrEmpty() { final String componentId = "component-id"; final LeaderInformation leaderInformation = LeaderInformation.known(UUID.randomUUID(), "address"); assertThat( LeaderInformationRegister.of(componentId, leaderInformation) .forComponentIdOrEmpty(componentId)) .isEqualTo(leaderInformation); }