focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public Path copy(final Path file, final Path target, final TransferStatus status, final ConnectionCallback callback, final StreamListener listener) throws BackgroundException { try { if(status.isExists()) { if(log.isWarnEnabled()) { log.warn(String.format("Delete file %s to be replaced with %s", target, file)); } new BoxDeleteFeature(session, fileid).delete(Collections.singletonList(target), callback, new Delete.DisabledCallback()); } if(file.isDirectory()) { return target.withAttributes(new BoxAttributesFinderFeature(session, fileid).toAttributes( new FoldersApi(new BoxApiClient(session.getClient())).postFoldersIdCopy( fileid.getFileId(file), new FolderIdCopyBody().name(target.getName()).parent(new FoldersfolderIdcopyParent().id(fileid.getFileId(target.getParent()))), BoxAttributesFinderFeature.DEFAULT_FIELDS) )); } return target.withAttributes(new BoxAttributesFinderFeature(session, fileid).toAttributes( new FilesApi(new BoxApiClient(session.getClient())).postFilesIdCopy( fileid.getFileId(file), new FileIdCopyBody() .name(target.getName()) .parent(new FilesfileIdcopyParent().id(fileid.getFileId(target.getParent()))), null, BoxAttributesFinderFeature.DEFAULT_FIELDS) )); } catch(ApiException e) { throw new BoxExceptionMappingService(fileid).map("Cannot copy {0}", e, file); } }
@Test(expected = NotfoundException.class) public void testMoveNotFound() throws Exception { final BoxFileidProvider fileid = new BoxFileidProvider(session); final Path test = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new BoxCopyFeature(session, fileid).copy(test, new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus(), new DisabledLoginCallback(), new DisabledStreamListener()); }
@SuppressWarnings("unused") // Part of required API. public void execute( final ConfiguredStatement<InsertValues> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { final InsertValues insertValues = statement.getStatement(); final MetaStore metaStore = executionContext.getMetaStore(); final KsqlConfig config = statement.getSessionConfig().getConfig(true); final DataSource dataSource = getDataSource(config, metaStore, insertValues); validateInsert(insertValues.getColumns(), dataSource); final ProducerRecord<byte[], byte[]> record = buildRecord(statement, metaStore, dataSource, serviceContext); try { producer.sendRecord(record, serviceContext, config.getProducerClientConfigProps()); } catch (final TopicAuthorizationException e) { // TopicAuthorizationException does not give much detailed information about why it failed, // except which topics are denied. Here we just add the ACL to make the error message // consistent with other authorization error messages. final Exception rootCause = new KsqlTopicAuthorizationException( AclOperation.WRITE, e.unauthorizedTopics() ); throw new KsqlException(createInsertFailedExceptionMessage(insertValues), rootCause); } catch (final ClusterAuthorizationException e) { // ClusterAuthorizationException is thrown when using idempotent producers // and either a topic write permission or a cluster-level idempotent write // permission (only applicable for broker versions no later than 2.8) is // missing. In this case, we include additional context to help the user // distinguish this type of failure from other permissions exceptions // such as the ones thrown above when TopicAuthorizationException is caught. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } catch (final KafkaException e) { if (e.getCause() != null && e.getCause() instanceof ClusterAuthorizationException) { // The error message thrown when an idempotent producer is missing permissions // is (nondeterministically) inconsistent: it is either a raw ClusterAuthorizationException, // as checked for above, or a ClusterAuthorizationException wrapped inside a KafkaException. // ksqlDB handles these two the same way, accordingly. // See https://issues.apache.org/jira/browse/KAFKA-14138 for more. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } else { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } } catch (final Exception e) { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } }
@Test public void shouldSupportInsertIntoWithSchemaInferenceMatch() throws Exception { // Given: when(srClient.getLatestSchemaMetadata(Mockito.any())) .thenReturn(new SchemaMetadata(1, 1, "")); when(srClient.getSchemaById(1)) .thenReturn(new AvroSchema(AVRO_RAW_ONE_KEY_SCHEMA)); givenDataSourceWithSchema( TOPIC_NAME, SCHEMA, SerdeFeatures.of(SerdeFeature.SCHEMA_INFERENCE, SerdeFeature.WRAP_SINGLES), SerdeFeatures.of(), FormatInfo.of(FormatFactory.AVRO.name()), FormatInfo.of(FormatFactory.AVRO.name()), false, false); final ConfiguredStatement<InsertValues> statement = givenInsertValues( ImmutableList.of(K0, COL0), ImmutableList.of( new StringLiteral("foo"), new StringLiteral("bar")) ); // When: executor.execute(statement, mock(SessionProperties.class), engine, serviceContext); // Then: verify(keySerializer).serialize(TOPIC_NAME, genericKey("foo")); verify(valueSerializer).serialize(TOPIC_NAME, genericRow("bar", null)); verify(producer).send(new ProducerRecord<>(TOPIC_NAME, null, 1L, KEY, VALUE)); }
@Override public void run() { updateElasticSearchHealthStatus(); updateFileSystemMetrics(); }
@Test public void when_elasticsearch_up_status_is_updated_to_green() { ClusterHealthResponse clusterHealthResponse = new ClusterHealthResponse(); clusterHealthResponse.setStatus(ClusterHealthStatus.GREEN); when(esClient.clusterHealth(any())).thenReturn(clusterHealthResponse); underTest.run(); verify(serverMonitoringMetrics, times(1)).setElasticSearchStatusToGreen(); verifyNoMoreInteractions(serverMonitoringMetrics); }
@Override public Object toConnectRow(final Object ksqlData) { if (!(ksqlData instanceof Struct)) { return ksqlData; } final Schema schema = getSchema(); final Struct struct = new Struct(schema); Struct originalData = (Struct) ksqlData; Schema originalSchema = originalData.schema(); if (originalSchema.name() == null && schema.name() != null) { originalSchema = AvroSchemas.getAvroCompatibleConnectSchema( originalSchema, schema.name() ); originalData = ConnectSchemas.withCompatibleRowSchema(originalData, originalSchema); } validate(originalSchema, schema); copyStruct(originalData, originalSchema, struct, schema); return struct; }
@Test public void shouldThrowIfMissingField() { // Given: final Schema schema = SchemaBuilder.struct() .field("f1", SchemaBuilder.OPTIONAL_STRING_SCHEMA) .field("f3", SchemaBuilder.OPTIONAL_INT64_SCHEMA) .build(); final Struct struct = new Struct(ORIGINAL_SCHEMA) .put("f1", "abc") .put("f2", 12); // When: final Exception e = assertThrows( KsqlException.class, () -> new AvroSRSchemaDataTranslator(schema).toConnectRow(struct) ); // Then: assertThat(e.getMessage(), is("Schema from Schema Registry misses field with name: f2")); }
static SpecificData getModelForSchema(Schema schema) { final Class<?> clazz; if (schema != null && (schema.getType() == Schema.Type.RECORD || schema.getType() == Schema.Type.UNION)) { clazz = SpecificData.get().getClass(schema); } else { return null; } // If clazz == null, the underlying Avro class for the schema is not on the classpath if (clazz == null) { return null; } final SpecificData model; try { final Field modelField = clazz.getDeclaredField("MODEL$"); modelField.setAccessible(true); model = (SpecificData) modelField.get(null); } catch (NoSuchFieldException e) { LOG.info(String.format("Generated Avro class %s did not contain a MODEL$ field. ", clazz) + "Parquet will use default SpecificData model for reading and writing."); return null; } catch (IllegalAccessException e) { LOG.warn( String.format("Field `MODEL$` in class %s was inaccessible. ", clazz) + "Parquet will use default SpecificData model for reading and writing.", e); return null; } final String avroVersion = getRuntimeAvroVersion(); // Avro 1.7 and 1.8 don't include conversions in the MODEL$ field by default if (avroVersion != null && (avroVersion.startsWith("1.8.") || avroVersion.startsWith("1.7."))) { try { addLogicalTypeConversion(model, schema, new HashSet<>()); } catch (IllegalAccessException e) { LOG.warn( String.format("Logical-type conversions were inaccessible for %s", clazz) + "Parquet will use default SpecificData model for reading and writing.", e); return null; } } return model; }
@Test public void testModelForSpecificRecordWithoutLogicalTypes() { SpecificData model = AvroRecordConverter.getModelForSchema(Car.SCHEMA$); assertTrue(model.getConversions().isEmpty()); }
public static String getAttributesXml( Map<String, Map<String, String>> attributesMap ) { return getAttributesXml( attributesMap, XML_TAG ); }
@Test public void testGetAttributesXml_DefaultTag_NullParameter() { try ( MockedStatic<AttributesUtil> attributesUtilMockedStatic = mockStatic( AttributesUtil.class ) ) { attributesUtilMockedStatic.when( () -> AttributesUtil.getAttributesXml( anyMap() ) ).thenCallRealMethod(); attributesUtilMockedStatic.when( () -> AttributesUtil.getAttributesXml( anyMap(), anyString() ) ) .thenCallRealMethod(); String attributesXml = AttributesUtil.getAttributesXml( new HashMap<>() ); assertNotNull( attributesXml ); // Check that it's not an empty XML fragment assertTrue( attributesXml.contains( AttributesUtil.XML_TAG ) ); } }
public ExponentialBackoff(long initialInterval, int multiplier, long maxInterval, double jitter) { this.initialInterval = Math.min(maxInterval, initialInterval); this.multiplier = multiplier; this.maxInterval = maxInterval; this.jitter = jitter; this.expMax = maxInterval > initialInterval ? Math.log(maxInterval / (double) Math.max(initialInterval, 1)) / Math.log(multiplier) : 0; }
@Test public void testExponentialBackoff() { long scaleFactor = 100; int ratio = 2; long backoffMax = 2000; double jitter = 0.2; ExponentialBackoff exponentialBackoff = new ExponentialBackoff( scaleFactor, ratio, backoffMax, jitter ); for (int i = 0; i <= 100; i++) { for (int attempts = 0; attempts <= 10; attempts++) { if (attempts <= 4) { assertEquals(scaleFactor * Math.pow(ratio, attempts), exponentialBackoff.backoff(attempts), scaleFactor * Math.pow(ratio, attempts) * jitter); } else { assertTrue(exponentialBackoff.backoff(attempts) <= backoffMax * (1 + jitter)); } } } }
public FEELFnResult<List> invoke(@ParameterName("list") List list, @ParameterName("start position") BigDecimal start) { return invoke( list, start, null ); }
@Test void invokeStartPositive() { FunctionTestUtil.assertResult(sublistFunction.invoke(Arrays.asList(1, 2, 3), BigDecimal.valueOf(2)), Arrays.asList(2, 3)); FunctionTestUtil.assertResult(sublistFunction.invoke(Arrays.asList(1, "test", 3), BigDecimal.valueOf(2)), Arrays.asList("test", 3)); FunctionTestUtil.assertResult(sublistFunction.invoke(Arrays.asList(1, "test", 3), BigDecimal.valueOf(2), BigDecimal.ONE), Collections.singletonList("test")); }
public static Predicate parse(String expression) { final Stack<Predicate> predicateStack = new Stack<>(); final Stack<Character> operatorStack = new Stack<>(); final String trimmedExpression = TRIMMER_PATTERN.matcher(expression).replaceAll(""); final StringTokenizer tokenizer = new StringTokenizer(trimmedExpression, OPERATORS, true); boolean isTokenMode = true; while (true) { final Character operator; final String token; if (isTokenMode) { if (tokenizer.hasMoreTokens()) { token = tokenizer.nextToken(); } else { break; } if (OPERATORS.contains(token)) { operator = token.charAt(0); } else { operator = null; } } else { operator = operatorStack.pop(); token = null; } isTokenMode = true; if (operator == null) { try { predicateStack.push(Class.forName(token).asSubclass(Predicate.class).getDeclaredConstructor().newInstance()); } catch (ClassCastException e) { throw new RuntimeException(token + " must implement " + Predicate.class.getName(), e); } catch (Exception e) { throw new RuntimeException(e); } } else { if (operatorStack.empty() || operator == '(') { operatorStack.push(operator); } else if (operator == ')') { while (operatorStack.peek() != '(') { evaluate(predicateStack, operatorStack); } operatorStack.pop(); } else { if (OPERATOR_PRECEDENCE.get(operator) < OPERATOR_PRECEDENCE.get(operatorStack.peek())) { evaluate(predicateStack, operatorStack); isTokenMode = false; } operatorStack.push(operator); } } } while (!operatorStack.empty()) { evaluate(predicateStack, operatorStack); } if (predicateStack.size() > 1) { throw new RuntimeException("Invalid logical expression"); } return predicateStack.pop(); }
@Test public void testNot() { final Predicate parsed = PredicateExpressionParser.parse("!com.linkedin.data.it.AlwaysTruePredicate"); Assert.assertEquals(parsed.getClass(), NotPredicate.class); Assert.assertEquals(((NotPredicate) parsed).getChildPredicate().getClass(), AlwaysTruePredicate.class); }
@Override public BulkFormat.Reader<T> createReader( final Configuration config, final FileSourceSplit split) throws IOException { final TrackingFsDataInputStream trackingStream = openStream(split.path(), config, split.offset()); final long splitEnd = split.offset() + split.length(); return doWithCleanupOnException( trackingStream, () -> { final StreamFormat.Reader<T> streamReader = streamFormat.createReader( config, trackingStream, trackingStream.getFileLength(), splitEnd); return new Reader<>( streamReader, trackingStream, CheckpointedPosition.NO_OFFSET, 0L); }); }
@Test void testReadEmptyFile() throws IOException { final StreamFormatAdapter<Integer> format = new StreamFormatAdapter<>(new CheckpointedIntFormat()); final File emptyFile = new File(tmpDir.toFile(), "testFile-empty"); emptyFile.createNewFile(); Path emptyFilePath = Path.fromLocalFile(emptyFile); final BulkFormat.Reader<Integer> reader = format.createReader( new Configuration(), new FileSourceSplit("test-id", emptyFilePath, 0L, 0, 0L, 0)); final List<Integer> result = new ArrayList<>(); readNumbers(reader, result, 0); assertThat(result).isEmpty(); }
@Override public List<String> validateText(String text, List<String> tags) { Assert.isTrue(ENABLED, "敏感词功能未开启,请将 ENABLED 设置为 true"); // 无标签时,默认所有 if (CollUtil.isEmpty(tags)) { return defaultSensitiveWordTrie.validate(text); } // 有标签的情况 Set<String> result = new HashSet<>(); tags.forEach(tag -> { SimpleTrie trie = tagSensitiveWordTries.get(tag); if (trie == null) { return; } result.addAll(trie.validate(text)); }); return new ArrayList<>(result); }
@Test public void testValidateText_hasTag() { testInitLocalCache(); // 准备参数 String text = "你是傻瓜,你是笨蛋"; // 调用 List<String> result = sensitiveWordService.validateText(text, singletonList("论坛")); // 断言 assertEquals(singletonList("傻瓜"), result); // 准备参数 String text2 = "你是白"; // 调用 List<String> result2 = sensitiveWordService.validateText(text2, singletonList("测试")); // 断言 assertEquals(singletonList("白"), result2); }
public static Read<String> readStrings() { return Read.newBuilder( (PubsubMessage message) -> new String(message.getPayload(), StandardCharsets.UTF_8)) .setCoder(StringUtf8Coder.of()) .build(); }
@Test public void testReadTopicDisplayData() { String topic = "projects/project/topics/topic"; PubsubIO.Read<String> read = PubsubIO.readStrings() .fromTopic(StaticValueProvider.of(topic)) .withTimestampAttribute("myTimestamp") .withIdAttribute("myId"); DisplayData displayData = DisplayData.from(read); assertThat(displayData, hasDisplayItem("topic", topic)); assertThat(displayData, hasDisplayItem("timestampAttribute", "myTimestamp")); assertThat(displayData, hasDisplayItem("idAttribute", "myId")); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatSelectStarCorrectly() { final String statementString = "CREATE STREAM S AS SELECT * FROM address;"; final Statement statement = parseSingle(statementString); assertThat(SqlFormatter.formatSql(statement), equalTo("CREATE STREAM S AS SELECT *\n" + "FROM ADDRESS ADDRESS\nEMIT CHANGES")); }
@Override public QueryCacheScheduler getQueryCacheScheduler() { return queryCacheScheduler; }
@Test public void testGetQueryCacheScheduler() { QueryCacheScheduler scheduler = context.getQueryCacheScheduler(); assertNotNull(scheduler); final QuerySchedulerTask task = new QuerySchedulerTask(); scheduler.execute(task); final QuerySchedulerRepetitionTask repetitionTask = new QuerySchedulerRepetitionTask(); scheduler.scheduleWithRepetition(repetitionTask, 1); assertTrueEventually(() -> { assertTrue(task.executed); assertTrue(repetitionTask.counter.get() > 1); }); scheduler.shutdown(); }
public static Integer subStringToInteger(String src, String start, String to) { return stringToInteger(subString(src, start, to)); }
@Test public void testSubStringToInteger() { Assert.assertNull(StringUtils.subStringToInteger("foobar", "1", "3")); Assert.assertEquals(new Integer(2), StringUtils.subStringToInteger("1234", "1", "3")); }
@Override public void preflight(final Path workdir, final String filename) throws BackgroundException { if(workdir.isRoot() || new DeepboxPathContainerService(session).isDeepbox(workdir) || new DeepboxPathContainerService(session).isBox(workdir)) { throw new AccessDeniedException(MessageFormat.format(LocaleFactory.localizedString("Cannot create {0}", "Error"), filename)).withFile(workdir); } final Acl acl = workdir.attributes().getAcl(); if(Acl.EMPTY == acl) { // Missing initialization log.warn(String.format("Unknown ACLs on %s", workdir)); return; } if(!acl.get(new Acl.CanonicalUser()).contains(CANADDCHILDREN)) { if(log.isWarnEnabled()) { log.warn(String.format("ACL %s for %s does not include %s", acl, workdir, CANADDCHILDREN)); } throw new AccessDeniedException(MessageFormat.format(LocaleFactory.localizedString("Cannot create {0}", "Error"), filename)).withFile(workdir); } }
@Test public void testNoAddChildrenDocuments() throws Exception { final DeepboxIdProvider nodeid = new DeepboxIdProvider(session); final Path folder = new Path("/ORG 1 - DeepBox Desktop App/ORG1:Box1/Documents/", EnumSet.of(Path.Type.directory, Path.Type.volume)); final PathAttributes attributes = new DeepboxAttributesFinderFeature(session, nodeid).find(folder); assertFalse(new BoxRestControllerApi(session.getClient()).getBox(ORG1, ORG1_BOX1).getBoxPolicy().isCanAddFilesRoot()); assertFalse(attributes.getAcl().get(new Acl.CanonicalUser()).contains(CANADDCHILDREN)); assertThrows(AccessDeniedException.class, () -> new DeepboxTouchFeature(session, nodeid).preflight(folder.withAttributes(attributes), new AlphanumericRandomStringService().random())); assertThrows(AccessDeniedException.class, () -> new DeepboxDirectoryFeature(session, nodeid).preflight(folder.withAttributes(attributes), new AlphanumericRandomStringService().random())); }
@Override public RouteContext createRouteContext(final QueryContext queryContext, final RuleMetaData globalRuleMetaData, final ShardingSphereDatabase database, final SingleRule rule, final ConfigurationProperties props, final ConnectionContext connectionContext) { if (1 == database.getResourceMetaData().getStorageUnits().size()) { return createSingleDataSourceRouteContext(rule, database, queryContext); } RouteContext result = new RouteContext(); SQLStatementContext sqlStatementContext = queryContext.getSqlStatementContext(); SingleMetaDataValidatorFactory.newInstance(sqlStatementContext.getSqlStatement()).ifPresent(optional -> optional.validate(rule, sqlStatementContext, database)); Collection<QualifiedTable> singleTables = getSingleTables(database, rule, result, sqlStatementContext); SingleRouteEngineFactory.newInstance(singleTables, sqlStatementContext.getSqlStatement()).ifPresent(optional -> optional.route(result, rule)); return result; }
@Test void assertCreateRouteContextWithMultiDataSource() throws SQLException { SingleRule rule = new SingleRule(new SingleRuleConfiguration(), DefaultDatabase.LOGIC_NAME, new H2DatabaseType(), createMultiDataSourceMap(), Collections.emptyList()); ShardingSphereDatabase database = mockDatabaseWithMultipleResources(); RouteContext actual = new SingleSQLRouter().createRouteContext( createQueryContext(), mock(RuleMetaData.class), database, rule, new ConfigurationProperties(new Properties()), new ConnectionContext(Collections::emptySet)); List<RouteUnit> routeUnits = new ArrayList<>(actual.getRouteUnits()); assertThat(actual.getRouteUnits().size(), is(1)); assertThat(routeUnits.get(0).getDataSourceMapper().getLogicName(), is(routeUnits.get(0).getDataSourceMapper().getActualName())); assertThat(routeUnits.get(0).getTableMappers().size(), is(1)); RouteMapper tableMapper = routeUnits.get(0).getTableMappers().iterator().next(); assertThat(tableMapper.getActualName(), is("t_order")); assertThat(tableMapper.getLogicName(), is("t_order")); }
public void installIntents(Optional<IntentData> toUninstall, Optional<IntentData> toInstall) { // If no any Intents to be uninstalled or installed, ignore it. if (!toUninstall.isPresent() && !toInstall.isPresent()) { return; } // Classify installable Intents to different installers. ArrayListMultimap<IntentInstaller, Intent> uninstallInstallers; ArrayListMultimap<IntentInstaller, Intent> installInstallers; Set<IntentInstaller> allInstallers = Sets.newHashSet(); if (toUninstall.isPresent()) { uninstallInstallers = getInstallers(toUninstall.get()); allInstallers.addAll(uninstallInstallers.keySet()); } else { uninstallInstallers = ArrayListMultimap.create(); } if (toInstall.isPresent()) { installInstallers = getInstallers(toInstall.get()); allInstallers.addAll(installInstallers.keySet()); } else { installInstallers = ArrayListMultimap.create(); } // Generates an installation context for the high level Intent. IntentInstallationContext installationContext = new IntentInstallationContext(toUninstall.orElse(null), toInstall.orElse(null)); //Generates different operation context for different installable Intents. Map<IntentInstaller, IntentOperationContext> contexts = Maps.newHashMap(); allInstallers.forEach(installer -> { List<Intent> intentsToUninstall = uninstallInstallers.get(installer); List<Intent> intentsToInstall = installInstallers.get(installer); // Connect context to high level installation context IntentOperationContext context = new IntentOperationContext(intentsToUninstall, intentsToInstall, installationContext); installationContext.addPendingContext(context); contexts.put(installer, context); }); // Apply contexts to installers contexts.forEach((installer, context) -> { installer.apply(context); }); }
@Test public void testInstallFailed() { installerRegistry.unregisterInstaller(TestInstallableIntent.class); installerRegistry.registerInstaller(TestInstallableIntent.class, new TestFailedIntentInstaller()); IntentData toUninstall = new IntentData(createTestIntent(), IntentState.INSTALLED, new WallClockTimestamp()); IntentData toInstall = new IntentData(createTestIntent(), IntentState.INSTALLING, new WallClockTimestamp()); List<Intent> intentsToUninstall = Lists.newArrayList(); List<Intent> intentsToInstall = Lists.newArrayList(); IntStream.range(0, 10).forEach(val -> { intentsToUninstall.add(new TestInstallableIntent(val)); }); IntStream.range(10, 20).forEach(val -> { intentsToInstall.add(new TestInstallableIntent(val)); }); toUninstall = IntentData.compiled(toUninstall, intentsToUninstall); toInstall = IntentData.compiled(toInstall, intentsToInstall); installCoordinator.installIntents(Optional.of(toUninstall), Optional.of(toInstall)); Intent toUninstallIntent = toUninstall.intent(); TestTools.assertAfter(INSTALL_DELAY, INSTALL_DURATION, () -> { IntentData newData = intentStore.newData; assertEquals(toUninstallIntent, newData.intent()); assertEquals(IntentState.CORRUPT, newData.state()); assertEquals(intentsToUninstall, newData.installables()); }); }
public static String leftTruncate(@Nullable Object element, int maxLen) { if (element == null) { return ""; } String s = element.toString(); if (s.length() > maxLen) { return s.substring(0, maxLen) + "..."; } return s; }
@Test public void testLeftTruncate() { assertEquals("", StringUtils.leftTruncate(null, 3)); assertEquals("", StringUtils.leftTruncate("", 3)); assertEquals("abc...", StringUtils.leftTruncate("abcd", 3)); }
@Udf(description = "Returns the inverse (arc) cosine of an INT value") public Double acos( @UdfParameter( value = "value", description = "The value to get the inverse cosine of." ) final Integer value ) { return acos(value == null ? null : value.doubleValue()); }
@Test public void shouldHandleZero() { assertThat(udf.acos(0.0), closeTo(1.5707963267948966, 0.000000000000001)); assertThat(udf.acos(0), closeTo(1.5707963267948966, 0.000000000000001)); assertThat(udf.acos(0L), closeTo(1.5707963267948966, 0.000000000000001)); }
@Override protected ActivityState<TransportProtos.SessionInfoProto> updateState(UUID sessionId, ActivityState<TransportProtos.SessionInfoProto> state) { SessionMetaData session = sessions.get(sessionId); if (session == null) { return null; } state.setMetadata(session.getSessionInfo()); var sessionInfo = state.getMetadata(); if (sessionInfo.getGwSessionIdMSB() == 0L || sessionInfo.getGwSessionIdLSB() == 0L) { return state; } var gwSessionId = new UUID(sessionInfo.getGwSessionIdMSB(), sessionInfo.getGwSessionIdLSB()); SessionMetaData gwSession = sessions.get(gwSessionId); if (gwSession == null || !gwSession.isOverwriteActivityTime()) { return state; } long lastRecordedTime = state.getLastRecordedTime(); long gwLastRecordedTime = getLastRecordedTime(gwSessionId); log.debug("Session with id: [{}] has gateway session with id: [{}] with overwrite activity time enabled. " + "Updating last activity time. Session last recorded time: [{}], gateway session last recorded time: [{}].", sessionId, gwSessionId, lastRecordedTime, gwLastRecordedTime); state.setLastRecordedTime(Math.max(lastRecordedTime, gwLastRecordedTime)); return state; }
@Test void givenHasGwSessionIdButGwSessionIsNotNull_whenUpdatingActivityState_thenShouldReturnSameInstanceWithUpdatedSessionInfo() { // GIVEN var gwSessionId = UUID.fromString("19864038-9b48-11ee-b9d1-0242ac120002"); TransportProtos.SessionInfoProto sessionInfo = TransportProtos.SessionInfoProto.newBuilder() .setSessionIdMSB(SESSION_ID.getMostSignificantBits()) .setSessionIdLSB(SESSION_ID.getLeastSignificantBits()) .setGwSessionIdMSB(gwSessionId.getMostSignificantBits()) .setGwSessionIdLSB(gwSessionId.getLeastSignificantBits()) .build(); SessionMsgListener listenerMock = mock(SessionMsgListener.class); sessions.put(SESSION_ID, new SessionMetaData(sessionInfo, TransportProtos.SessionType.ASYNC, listenerMock)); long lastRecordedTime = 123L; ActivityState<TransportProtos.SessionInfoProto> state = new ActivityState<>(); state.setLastRecordedTime(lastRecordedTime); state.setMetadata(TransportProtos.SessionInfoProto.getDefaultInstance()); when(transportServiceMock.updateState(SESSION_ID, state)).thenCallRealMethod(); // WHEN ActivityState<TransportProtos.SessionInfoProto> updatedState = transportServiceMock.updateState(SESSION_ID, state); // THEN assertThat(updatedState).isSameAs(state); assertThat(updatedState.getLastRecordedTime()).isEqualTo(lastRecordedTime); assertThat(updatedState.getMetadata()).isEqualTo(sessionInfo); verify(transportServiceMock, never()).getLastRecordedTime(gwSessionId); }
@Override public void submitPopConsumeRequest( final List<MessageExt> msgs, final PopProcessQueue processQueue, final MessageQueue messageQueue) { final int consumeBatchSize = this.defaultMQPushConsumer.getConsumeMessageBatchMaxSize(); if (msgs.size() <= consumeBatchSize) { ConsumeRequest consumeRequest = new ConsumeRequest(msgs, processQueue, messageQueue); try { this.consumeExecutor.submit(consumeRequest); } catch (RejectedExecutionException e) { this.submitConsumeRequestLater(consumeRequest); } } else { for (int total = 0; total < msgs.size(); ) { List<MessageExt> msgThis = new ArrayList<>(consumeBatchSize); for (int i = 0; i < consumeBatchSize; i++, total++) { if (total < msgs.size()) { msgThis.add(msgs.get(total)); } else { break; } } ConsumeRequest consumeRequest = new ConsumeRequest(msgThis, processQueue, messageQueue); try { this.consumeExecutor.submit(consumeRequest); } catch (RejectedExecutionException e) { for (; total < msgs.size(); total++) { msgThis.add(msgs.get(total)); } this.submitConsumeRequestLater(consumeRequest); } } } }
@Test public void testSubmitPopConsumeRequest() throws IllegalAccessException { List<MessageExt> msgs = Collections.singletonList(createMessageExt()); PopProcessQueue processQueue = mock(PopProcessQueue.class); MessageQueue messageQueue = mock(MessageQueue.class); ThreadPoolExecutor consumeExecutor = mock(ThreadPoolExecutor.class); FieldUtils.writeDeclaredField(popService, "consumeExecutor", consumeExecutor, true); popService.submitPopConsumeRequest(msgs, processQueue, messageQueue); verify(consumeExecutor, times(1)).submit(any(Runnable.class)); }
@VisibleForTesting public static JobGraph createJobGraph(StreamGraph streamGraph) { return new StreamingJobGraphGenerator( Thread.currentThread().getContextClassLoader(), streamGraph, null, Runnable::run) .createJobGraph(); }
@Test void testIteration() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream<Integer> source = env.fromData(1, 2, 3).name("source"); IterativeStream<Integer> iteration = source.iterate(3000); iteration.name("iteration").setParallelism(2); DataStream<Integer> map = iteration.map(x -> x + 1).name("map").setParallelism(2); DataStream<Integer> filter = map.filter((x) -> false).name("filter").setParallelism(2); iteration.closeWith(filter).print(); JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(env.getStreamGraph()); SlotSharingGroup slotSharingGroup = jobGraph.getVerticesAsArray()[0].getSlotSharingGroup(); assertThat(slotSharingGroup).isNotNull(); CoLocationGroup iterationSourceCoLocationGroup = null; CoLocationGroup iterationSinkCoLocationGroup = null; for (JobVertex jobVertex : jobGraph.getVertices()) { // all vertices have same slot sharing group by default assertThat(jobVertex.getSlotSharingGroup()).isEqualTo(slotSharingGroup); // all iteration vertices have same co-location group, // others have no co-location group by default if (jobVertex.getName().startsWith(StreamGraph.ITERATION_SOURCE_NAME_PREFIX)) { iterationSourceCoLocationGroup = jobVertex.getCoLocationGroup(); assertThat(iterationSourceCoLocationGroup.getVertexIds()) .contains(jobVertex.getID()); } else if (jobVertex.getName().startsWith(StreamGraph.ITERATION_SINK_NAME_PREFIX)) { iterationSinkCoLocationGroup = jobVertex.getCoLocationGroup(); assertThat(iterationSinkCoLocationGroup.getVertexIds()).contains(jobVertex.getID()); } else { assertThat(jobVertex.getCoLocationGroup()).isNull(); } } assertThat(iterationSourceCoLocationGroup).isNotNull(); assertThat(iterationSinkCoLocationGroup).isNotNull(); assertThat(iterationSinkCoLocationGroup).isEqualTo(iterationSourceCoLocationGroup); }
@Deprecated public String createToken(Authentication authentication) { return createToken(authentication.getName()); }
@Test void testCreateTokenAndSecretKeyWithoutSpecialSymbol() throws AccessException { createToken("SecretKey0123567890234567890123456789012345678901234567890123456789"); }
@Override public String getDisplayName() { return "map(" + keyType.getDisplayName() + ", " + valueType.getDisplayName() + ")"; }
@Test public void testMapDisplayName() { MapType mapType = new MapType( BIGINT, createVarcharType(42), MethodHandleUtil.methodHandle(TestMapType.class, "throwUnsupportedOperation"), MethodHandleUtil.methodHandle(TestMapType.class, "throwUnsupportedOperation")); assertEquals(mapType.getDisplayName(), "map(bigint, varchar(42))"); mapType = new MapType( BIGINT, VARCHAR, MethodHandleUtil.methodHandle(TestMapType.class, "throwUnsupportedOperation"), MethodHandleUtil.methodHandle(TestMapType.class, "throwUnsupportedOperation")); assertEquals(mapType.getDisplayName(), "map(bigint, varchar)"); }
@SuppressWarnings("unchecked") private SpscChannelConsumer<E> newConsumer(Object... args) { return mapper.newFlyweight(SpscChannelConsumer.class, "ChannelConsumerTemplate.java", Template.fromFile(Channel.class, "ChannelConsumerTemplate.java"), args); }
@Test public void shouldNotReadUnCommittedMessages() { ChannelConsumer consumer = newConsumer(); assertTrue(producer.claim()); Example writer = producer.currentElement(); writer.setBar(10L); assertFalse(consumer.read()); }
public void enqueue(ByteBuffer payload) throws QueueException { final int messageSize = LENGTH_HEADER_SIZE + payload.remaining(); if (headSegment.hasSpace(currentHeadPtr, messageSize)) { LOG.debug("Head segment has sufficient space for message length {}", LENGTH_HEADER_SIZE + payload.remaining()); writeData(headSegment, currentHeadPtr.plus(1), payload); // move head segment currentHeadPtr = currentHeadPtr.moveForward(messageSize); return; } LOG.debug("Head segment doesn't have enough space"); // the payload can't be fully contained into the current head segment and needs to be splitted // with another segment. final int dataSize = payload.remaining(); final ByteBuffer rawData = (ByteBuffer) ByteBuffer.allocate(LENGTH_HEADER_SIZE + dataSize) .putInt(dataSize) .put(payload) .flip(); // the bytes written from the payload input long bytesRemainingInHeaderSegment = Math.min(rawData.remaining(), headSegment.bytesAfter(currentHeadPtr)); LOG.trace("Writing partial payload to offset {} for {} bytes", currentHeadPtr, bytesRemainingInHeaderSegment); if (bytesRemainingInHeaderSegment > 0) { int copySize = (int) bytesRemainingInHeaderSegment; ByteBuffer slice = rawData.slice(); slice.limit(copySize); writeDataNoHeader(headSegment, currentHeadPtr.plus(1), slice); currentHeadPtr = currentHeadPtr.moveForward(bytesRemainingInHeaderSegment); // No need to move newSegmentPointer the pointer because the last spinningMove has already moved it // shift forward the consumption point rawData.position(rawData.position() + copySize); } Segment newSegment = null; // till the payload is not completely stored, // save the remaining part into a new segment. while (rawData.hasRemaining()) { // To request the next segment, it's needed to be done in global lock. newSegment = queuePool.nextFreeSegment(); //notify segment creation for queue in queue pool allocationListener.segmentedCreated(name, newSegment); int copySize = (int) Math.min(rawData.remaining(), allocator.getSegmentSize()); ByteBuffer slice = rawData.slice(); slice.limit(copySize); currentHeadPtr = currentHeadPtr.moveForward(copySize); writeDataNoHeader(newSegment, newSegment.begin, slice); headSegment = newSegment; // shift forward the consumption point rawData.position(rawData.position() + copySize); } }
@Test public void basicNoBlockEnqueue() throws QueueException, IOException { final MappedByteBuffer pageBuffer = Utils.createPageFile(); final Segment head = new Segment(pageBuffer, new SegmentPointer(0, 0), new SegmentPointer(0, 1024)); final VirtualPointer currentHead = VirtualPointer.buildUntouched(); final Queue queue = new Queue("test", head, currentHead, head, currentHead, new DummySegmentAllocator(), (name, segment) -> { // NOOP }, null); // generate byte array to insert. ByteBuffer payload = randomPayload(128); queue.enqueue(payload); }
Set<String> getRetry() { return retry; }
@Test public void determineRetryWhenSetToRetryable() { Athena2QueryHelper helper = athena2QueryHelperWithRetry("retryable"); assertEquals(new HashSet<>(Collections.singletonList("retryable")), helper.getRetry()); }
@Override public boolean shutdownServiceUninterruptible(long timeoutMs) { final Deadline deadline = Deadline.fromNow(Duration.ofMillis(timeoutMs)); boolean shutdownComplete = false; boolean receivedInterrupt = false; do { try { // wait for a reasonable time for all pending timer threads to finish shutdownComplete = shutdownAndAwaitPending( deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS); } catch (InterruptedException iex) { receivedInterrupt = true; LOG.trace("Intercepted attempt to interrupt timer service shutdown.", iex); } } while (deadline.hasTimeLeft() && !shutdownComplete); if (receivedInterrupt) { Thread.currentThread().interrupt(); } return shutdownComplete; }
@Test void testShutdownServiceUninterruptible() { final OneShotLatch blockUntilTriggered = new OneShotLatch(); final AtomicBoolean timerFinished = new AtomicBoolean(false); final SystemProcessingTimeService timeService = createBlockingSystemProcessingTimeService(blockUntilTriggered, timerFinished); assertThat(timeService.isTerminated()).isFalse(); final Thread interruptTarget = Thread.currentThread(); final AtomicBoolean runInterrupts = new AtomicBoolean(true); final Thread interruptCallerThread = new Thread( () -> { while (runInterrupts.get()) { interruptTarget.interrupt(); try { Thread.sleep(1); } catch (InterruptedException ignore) { } } }); interruptCallerThread.start(); final long timeoutMs = 50L; final long startTime = System.nanoTime(); assertThat(timeService.isTerminated()).isFalse(); // check that termination did not succeed (because of blocking timer execution) assertThat(timeService.shutdownServiceUninterruptible(timeoutMs)).isFalse(); // check that termination flag was set. assertThat(timeService.isTerminated()).isTrue(); // check that the blocked timer is still in flight. assertThat(timerFinished).isFalse(); // check that we waited until timeout assertThat(System.nanoTime() - startTime).isGreaterThanOrEqualTo(1_000_000L * timeoutMs); runInterrupts.set(false); do { try { interruptCallerThread.join(); } catch (InterruptedException ignore) { } } while (interruptCallerThread.isAlive()); // clear the interrupted flag in case join didn't do it final boolean ignored = Thread.interrupted(); blockUntilTriggered.trigger(); assertThat(timeService.shutdownServiceUninterruptible(timeoutMs)).isTrue(); assertThat(timerFinished).isTrue(); }
@Override public List<String> listPartitionNames(String databaseName, String tableName, TableVersionRange version) { updatePartitionInfo(databaseName, tableName); return new ArrayList<>(this.partitionInfos.keySet()); }
@Test public void testListPartitionNames(@Mocked FileStoreTable mockPaimonTable, @Mocked PartitionsTable mockPartitionTable, @Mocked RecordReader<InternalRow> mockRecordReader) throws Catalog.TableNotExistException, IOException { RowType tblRowType = RowType.of( new DataType[] { new IntType(true), new IntType(true) }, new String[] {"year", "month"}); List<String> partitionNames = Lists.newArrayList("year", "month"); Identifier tblIdentifier = new Identifier("db1", "tbl1"); Identifier partitionTblIdentifier = new Identifier("db1", "tbl1$partitions"); RowType partitionRowType = new RowType( Arrays.asList( new DataField(0, "partition", SerializationUtils.newStringType(true)), new DataField(1, "record_count", new BigIntType(false)), new DataField(2, "file_size_in_bytes", new BigIntType(false)), new DataField(3, "file_count", new BigIntType(false)), new DataField(4, "last_update_time", DataTypes.TIMESTAMP_MILLIS()) )); GenericRow row1 = new GenericRow(2); row1.setField(0, BinaryString.fromString("[2020, 1]")); row1.setField(1, Timestamp.fromLocalDateTime(LocalDateTime.of(2023, 1, 1, 0, 0, 0, 0))); GenericRow row2 = new GenericRow(2); row2.setField(0, BinaryString.fromString("[2020, 2]")); row2.setField(1, Timestamp.fromLocalDateTime(LocalDateTime.of(2023, 2, 1, 0, 0, 0, 0))); new MockUp<RecordReaderIterator>() { private int callCount; private final GenericRow[] elements = {row1, row2}; private final boolean[] hasNextOutputs = {true, true, false}; @Mock public boolean hasNext() { if (callCount < hasNextOutputs.length) { return hasNextOutputs[callCount]; } return false; } @Mock public InternalRow next() { if (callCount < elements.length) { return elements[callCount++]; } return null; } }; new Expectations() { { paimonNativeCatalog.getTable(tblIdentifier); result = mockPaimonTable; mockPaimonTable.partitionKeys(); result = partitionNames; mockPaimonTable.rowType(); result = tblRowType; paimonNativeCatalog.getTable(partitionTblIdentifier); result = mockPartitionTable; mockPartitionTable.rowType(); result = partitionRowType; mockPartitionTable.newReadBuilder().withProjection((int[]) any).newRead().createReader((TableScan.Plan) any); result = mockRecordReader; } }; List<String> result = metadata.listPartitionNames("db1", "tbl1", TableVersionRange.empty()); Assert.assertEquals(2, result.size()); List<String> expections = Lists.newArrayList("year=2020/month=1", "year=2020/month=2"); Assertions.assertThat(result).hasSameElementsAs(expections); }
public URI rootUrl() { return this.build("/"); }
@Test void root() { assertThat(uriProvider.rootUrl().toString(), containsString("mysuperhost.com/subpath/")); }
public boolean removeByLine(int line) { boolean updated = problems.remove(line) != null; if (updated) Unchecked.checkedForEach(listeners, ProblemInvalidationListener::onProblemInvalidation, (listener, t) -> logger.error("Exception thrown when removing problem from tracking", t)); return updated; }
@Test void removeByLine() { ProblemTracking tracking = new ProblemTracking(); tracking.add(new Problem(0, 0, ERROR, LINT, "message")); tracking.add(new Problem(1, 0, ERROR, LINT, "message")); assertEquals(2, tracking.getProblems().size()); assertTrue(tracking.removeByLine(1)); assertEquals(1, tracking.getProblems().size()); }
public static boolean parse(final String str, ResTable_config out) { return parse(str, out, true); }
@Test public void parse_navigation_nonav() { ResTable_config config = new ResTable_config(); ConfigDescription.parse("nonav", config); assertThat(config.navigation).isEqualTo(NAVIGATION_NONAV); }
public void addValueProviders(final String segmentName, final RocksDB db, final Cache cache, final Statistics statistics) { if (storeToValueProviders.isEmpty()) { logger.debug("Adding metrics recorder of task {} to metrics recording trigger", taskId); streamsMetrics.rocksDBMetricsRecordingTrigger().addMetricsRecorder(this); } else if (storeToValueProviders.containsKey(segmentName)) { throw new IllegalStateException("Value providers for store " + segmentName + " of task " + taskId + " has been already added. This is a bug in Kafka Streams. " + "Please open a bug report under https://issues.apache.org/jira/projects/KAFKA/issues"); } verifyDbAndCacheAndStatistics(segmentName, db, cache, statistics); logger.debug("Adding value providers for store {} of task {}", segmentName, taskId); storeToValueProviders.put(segmentName, new DbAndCacheAndStatistics(db, cache, statistics)); }
@Test public void shouldThrowIfCacheToAddIsNotSameAsAllExistingCaches() { recorder.addValueProviders(SEGMENT_STORE_NAME_1, dbToAdd1, cacheToAdd1, statisticsToAdd1); recorder.addValueProviders(SEGMENT_STORE_NAME_2, dbToAdd2, cacheToAdd1, statisticsToAdd2); final Throwable exception = assertThrows( IllegalStateException.class, () -> recorder.addValueProviders(SEGMENT_STORE_NAME_3, dbToAdd3, cacheToAdd2, statisticsToAdd3) ); assertThat( exception.getMessage(), is("Caches for store " + STORE_NAME + " of task " + TASK_ID1 + " are either not all distinct or do not all refer to the same cache. This is a bug in Kafka Streams. " + "Please open a bug report under https://issues.apache.org/jira/projects/KAFKA/issues") ); }
@Override public DynamicTableSink createDynamicTableSink(Context context) { Configuration conf = FlinkOptions.fromMap(context.getCatalogTable().getOptions()); checkArgument(!StringUtils.isNullOrEmpty(conf.getString(FlinkOptions.PATH)), "Option [path] should not be empty."); setupTableOptions(conf.getString(FlinkOptions.PATH), conf); ResolvedSchema schema = context.getCatalogTable().getResolvedSchema(); sanityCheck(conf, schema); setupConfOptions(conf, context.getObjectIdentifier(), context.getCatalogTable(), schema); setupSortOptions(conf, context.getConfiguration()); return new HoodieTableSink(conf, schema); }
@Test void testSetupWriteOptionsForSink() { final HoodieTableSink tableSink1 = (HoodieTableSink) new HoodieTableFactory().createDynamicTableSink(MockContext.getInstance(this.conf)); final Configuration conf1 = tableSink1.getConf(); assertThat(conf1.get(FlinkOptions.PRE_COMBINE), is(true)); // check setup database name and table name automatically assertThat(conf1.get(FlinkOptions.TABLE_NAME), is("t1")); assertThat(conf1.get(FlinkOptions.DATABASE_NAME), is("db1")); // set up operation as 'insert' this.conf.setString(FlinkOptions.OPERATION, "insert"); HoodieTableSink tableSink2 = (HoodieTableSink) new HoodieTableFactory().createDynamicTableSink(MockContext.getInstance(this.conf)); Configuration conf2 = tableSink2.getConf(); assertThat(conf2.get(FlinkOptions.PRE_COMBINE), is(false)); }
public byte[] getSignature() { return signature; }
@Test public void testGetSignaure() { assertEquals(TestParameters.VP_CONTROL_DATA_SIGNATURE.getBytes(UTF_8).length, chmLzxcControlData.getSignature().length); }
public String name() { return name; }
@Test public void testConstruction() { assertThat(name1.name(), is(NAME1)); }
@Override public Iterator<Map.Entry<String, Object>> iterator() { return map.entrySet().iterator(); }
@Test public void oneProperty() { map.put("key", "value"); CamelMessagingHeadersExtractAdapter adapter = new CamelMessagingHeadersExtractAdapter(map, true); Iterator<Map.Entry<String, Object>> iterator = adapter.iterator(); Map.Entry<String, Object> entry = iterator.next(); assertEquals("key", entry.getKey()); assertEquals("value", entry.getValue()); }
@Deprecated public String idFromFilename(@NonNull String filename) { return filename; }
@SuppressWarnings("deprecation") @Test public void caseSensitive() { IdStrategy idStrategy = new IdStrategy.CaseSensitive(); assertRestrictedNames(idStrategy); assertThat(idStrategy.idFromFilename("foo"), is("foo")); assertThat(idStrategy.idFromFilename("~foo"), is("Foo")); assertThat(idStrategy.idFromFilename("foo$002fbar"), is("foo/bar")); assertThat(idStrategy.idFromFilename("~foo$002f~bar"), is("Foo/Bar")); assertThat(idStrategy.idFromFilename("..$002ftest"), is("../test")); assertThat(idStrategy.idFromFilename("..$002f~test"), is("../Test")); assertThat(idStrategy.idFromFilename("0123 _-@$007ea"), is("0123 _-@~a")); assertThat(idStrategy.idFromFilename("0123 _-@~a"), is("0123 _-@A")); assertThat(idStrategy.idFromFilename("foo$002e"), is("foo.")); assertThat(idStrategy.idFromFilename("$002dfoo"), is("-foo")); assertThat(idStrategy.idFromFilename("~con"), is("Con")); assertThat(idStrategy.idFromFilename("~prn"), is("Prn")); assertThat(idStrategy.idFromFilename("~aux"), is("Aux")); assertThat(idStrategy.idFromFilename("~nul"), is("Nul")); assertThat(idStrategy.idFromFilename("~com1"), is("Com1")); assertThat(idStrategy.idFromFilename("~lpt1"), is("Lpt1")); assertThat(idStrategy.idFromFilename("big$money"), is("big$money")); assertThat(idStrategy.idFromFilename("$00c1aaa"), is("\u00c1aaa")); assertThat(idStrategy.idFromFilename("$00e1aaa"), is("\u00e1aaa")); assertThat(idStrategy.idFromFilename("aaaa$00e1"), is("aaaa\u00e1")); assertThat(idStrategy.idFromFilename("aaaa$00e1kkkk"), is("aaaa\u00e1kkkk")); assertThat(idStrategy.idFromFilename("aa$00e1zz$00e9pp"), is("aa\u00e1zz\u00e9pp")); assertThat(idStrategy.idFromFilename("$306f$56fd$5185$3067$6700$5927"), is("\u306f\u56fd\u5185\u3067\u6700\u5927")); assertThat(idStrategy.idFromFilename("$00E1aaa"), is("$00E1aaa")); assertThat(idStrategy.idFromFilename("$001gggg"), is("$001gggg")); assertThat(idStrategy.idFromFilename("rRr$t123"), is("rRr$t123")); assertThat(idStrategy.idFromFilename("iiii _-@$007~ea"), is("iiii _-@$007Ea")); }
@Bean("ComputationTempFolder") public TempFolder provide(ServerFileSystem fs) { File tempDir = new File(fs.getTempDir(), "ce"); try { FileUtils.forceMkdir(tempDir); } catch (IOException e) { throw new IllegalStateException("Unable to create computation temp directory " + tempDir, e); } File computationDir = new DefaultTempFolder(tempDir).newDir(); return new DefaultTempFolder(computationDir, true); }
@Test public void existing_temp_dir() throws Exception { ServerFileSystem fs = mock(ServerFileSystem.class); File tmpDir = temp.newFolder(); when(fs.getTempDir()).thenReturn(tmpDir); TempFolder folder = underTest.provide(fs); assertThat(folder).isNotNull(); File newDir = folder.newDir(); assertThat(newDir).exists().isDirectory(); assertThat(newDir.getParentFile().getCanonicalPath()).startsWith(tmpDir.getCanonicalPath()); }
public boolean isRetryBranch() { return retryBranch; }
@Test public void testIsRetryBranch() { // Test the isSuccess method assertFalse(event.isRetryBranch()); }
public <T> T create(Class<T> clazz) { return create(clazz, new Class<?>[]{}, new Object[]{}); }
@Test void testProxyWorks() throws Exception { final SessionDao sessionDao = new SessionDao(sessionFactory); final UnitOfWorkAwareProxyFactory unitOfWorkAwareProxyFactory = new UnitOfWorkAwareProxyFactory("default", sessionFactory); final OAuthAuthenticator oAuthAuthenticator = unitOfWorkAwareProxyFactory .create(OAuthAuthenticator.class, SessionDao.class, sessionDao); assertThat(oAuthAuthenticator.authenticate("67ab89d")).isTrue(); assertThat(oAuthAuthenticator.authenticate("bd1e23a")).isFalse(); }
public synchronized void setLevel(Level newLevel) { if (level == newLevel) { // nothing to do; return; } if (newLevel == null && isRootLogger()) { throw new IllegalArgumentException( "The level of the root logger cannot be set to null"); } level = newLevel; if (newLevel == null) { effectiveLevelInt = parent.effectiveLevelInt; newLevel = parent.getEffectiveLevel(); } else { effectiveLevelInt = newLevel.levelInt; } if (childrenList != null) { int len = childrenList.size(); for (int i = 0; i < len; i++) { Logger child = (Logger) childrenList.get(i); // tell child to handle parent levelInt change child.handleParentLevelChange(effectiveLevelInt); } } // inform listeners loggerContext.fireOnLevelChange(this, newLevel); }
@Test public void setRootLevelToNull() { try { root.setLevel(null); fail("The level of the root logger should not be settable to null"); } catch (IllegalArgumentException e) { } }
@Description("round to integer by dropping digits after decimal point") @ScalarFunction @SqlType(StandardTypes.DOUBLE) public static double truncate(@SqlType(StandardTypes.DOUBLE) double num) { return Math.signum(num) * Math.floor(Math.abs(num)); }
@Test public void testTruncate() { // DOUBLE final String maxDouble = Double.toString(Double.MAX_VALUE); final String minDouble = Double.toString(-Double.MAX_VALUE); assertFunction("truncate(17.18E0)", DOUBLE, 17.0); assertFunction("truncate(-17.18E0)", DOUBLE, -17.0); assertFunction("truncate(17.88E0)", DOUBLE, 17.0); assertFunction("truncate(-17.88E0)", DOUBLE, -17.0); assertFunction("truncate(REAL '17.18')", REAL, 17.0f); assertFunction("truncate(REAL '-17.18')", REAL, -17.0f); assertFunction("truncate(REAL '17.88')", REAL, 17.0f); assertFunction("truncate(REAL '-17.88')", REAL, -17.0f); assertFunction("truncate(DOUBLE '" + maxDouble + "')", DOUBLE, Double.MAX_VALUE); assertFunction("truncate(DOUBLE '" + minDouble + "')", DOUBLE, -Double.MAX_VALUE); // TRUNCATE short DECIMAL -> short DECIMAL assertFunction("truncate(DECIMAL '1234')", createDecimalType(4, 0), SqlDecimal.of("1234")); assertFunction("truncate(DECIMAL '-1234')", createDecimalType(4, 0), SqlDecimal.of("-1234")); assertFunction("truncate(DECIMAL '1234.56')", createDecimalType(4, 0), SqlDecimal.of("1234")); assertFunction("truncate(DECIMAL '-1234.56')", createDecimalType(4, 0), SqlDecimal.of("-1234")); assertFunction("truncate(DECIMAL '123456789123456.999')", createDecimalType(15, 0), SqlDecimal.of("123456789123456")); assertFunction("truncate(DECIMAL '-123456789123456.999')", createDecimalType(15, 0), SqlDecimal.of("-123456789123456")); // TRUNCATE long DECIMAL -> short DECIMAL assertFunction("truncate(DECIMAL '1.99999999999999999999999999')", createDecimalType(1, 0), SqlDecimal.of("1")); assertFunction("truncate(DECIMAL '-1.99999999999999999999999999')", createDecimalType(1, 0), SqlDecimal.of("-1")); // TRUNCATE long DECIMAL -> long DECIMAL assertFunction("truncate(DECIMAL '1234567890123456789012')", createDecimalType(22, 0), SqlDecimal.of("1234567890123456789012")); assertFunction("truncate(DECIMAL '-1234567890123456789012')", createDecimalType(22, 0), SqlDecimal.of("-1234567890123456789012")); assertFunction("truncate(DECIMAL '1234567890123456789012.999')", createDecimalType(22, 0), SqlDecimal.of("1234567890123456789012")); assertFunction("truncate(DECIMAL '-1234567890123456789012.999')", createDecimalType(22, 0), SqlDecimal.of("-1234567890123456789012")); // TRUNCATE_N short DECIMAL -> short DECIMAL assertFunction("truncate(DECIMAL '1234', 1)", createDecimalType(4, 0), SqlDecimal.of("1234")); assertFunction("truncate(DECIMAL '1234', -1)", createDecimalType(4, 0), SqlDecimal.of("1230")); assertFunction("truncate(DECIMAL '1234.56', 1)", createDecimalType(6, 2), SqlDecimal.of("1234.50")); assertFunction("truncate(DECIMAL '1234.56', -1)", createDecimalType(6, 2), SqlDecimal.of("1230.00")); assertFunction("truncate(DECIMAL '-1234.56', 1)", createDecimalType(6, 2), SqlDecimal.of("-1234.50")); assertFunction("truncate(DECIMAL '1239.99', 1)", createDecimalType(6, 2), SqlDecimal.of("1239.90")); assertFunction("truncate(DECIMAL '-1239.99', 1)", createDecimalType(6, 2), SqlDecimal.of("-1239.90")); assertFunction("truncate(DECIMAL '1239.999', 2)", createDecimalType(7, 3), SqlDecimal.of("1239.990")); assertFunction("truncate(DECIMAL '1239.999', -2)", createDecimalType(7, 3), SqlDecimal.of("1200.000")); assertFunction("truncate(DECIMAL '123456789123456.999', 2)", createDecimalType(18, 3), SqlDecimal.of("123456789123456.990")); assertFunction("truncate(DECIMAL '123456789123456.999', -2)", createDecimalType(18, 3), SqlDecimal.of("123456789123400.000")); assertFunction("truncate(DECIMAL '1234', -4)", createDecimalType(4, 0), SqlDecimal.of("0000")); assertFunction("truncate(DECIMAL '1234.56', -4)", createDecimalType(6, 2), SqlDecimal.of("0000.00")); assertFunction("truncate(DECIMAL '-1234.56', -4)", createDecimalType(6, 2), SqlDecimal.of("0000.00")); assertFunction("truncate(DECIMAL '1234.56', 3)", createDecimalType(6, 2), SqlDecimal.of("1234.56")); assertFunction("truncate(DECIMAL '-1234.56', 3)", createDecimalType(6, 2), SqlDecimal.of("-1234.56")); // TRUNCATE_N long DECIMAL -> long DECIMAL assertFunction("truncate(DECIMAL '1234567890123456789012', 1)", createDecimalType(22, 0), SqlDecimal.of("1234567890123456789012")); assertFunction("truncate(DECIMAL '1234567890123456789012', -1)", createDecimalType(22, 0), SqlDecimal.of("1234567890123456789010")); assertFunction("truncate(DECIMAL '1234567890123456789012.23', 1)", createDecimalType(24, 2), SqlDecimal.of("1234567890123456789012.20")); assertFunction("truncate(DECIMAL '1234567890123456789012.23', -1)", createDecimalType(24, 2), SqlDecimal.of("1234567890123456789010.00")); assertFunction("truncate(DECIMAL '123456789012345678999.99', -1)", createDecimalType(23, 2), SqlDecimal.of("123456789012345678990.00")); assertFunction("truncate(DECIMAL '-123456789012345678999.99', -1)", createDecimalType(23, 2), SqlDecimal.of("-123456789012345678990.00")); assertFunction("truncate(DECIMAL '123456789012345678999.999', 2)", createDecimalType(24, 3), SqlDecimal.of("123456789012345678999.990")); assertFunction("truncate(DECIMAL '123456789012345678999.999', -2)", createDecimalType(24, 3), SqlDecimal.of("123456789012345678900.000")); assertFunction("truncate(DECIMAL '123456789012345678901', -21)", createDecimalType(21, 0), SqlDecimal.of("000000000000000000000")); assertFunction("truncate(DECIMAL '123456789012345678901.23', -21)", createDecimalType(23, 2), SqlDecimal.of("000000000000000000000.00")); assertFunction("truncate(DECIMAL '123456789012345678901.23', 3)", createDecimalType(23, 2), SqlDecimal.of("123456789012345678901.23")); assertFunction("truncate(DECIMAL '-123456789012345678901.23', 3)", createDecimalType(23, 2), SqlDecimal.of("-123456789012345678901.23")); // NULL assertFunction("truncate(CAST(NULL AS DOUBLE))", DOUBLE, null); assertFunction("truncate(CAST(NULL AS DECIMAL(1,0)), -1)", createDecimalType(1, 0), null); assertFunction("truncate(CAST(NULL AS DECIMAL(1,0)))", createDecimalType(1, 0), null); assertFunction("truncate(CAST(NULL AS DECIMAL(18,5)))", createDecimalType(13, 0), null); assertFunction("truncate(CAST(NULL AS DECIMAL(25,2)))", createDecimalType(23, 0), null); assertFunction("truncate(NULL, NULL)", createDecimalType(1, 0), null); }
public static String getInterfaceName(Invoker invoker) { return getInterfaceName(invoker, false); }
@Test public void testGetInterfaceNameWithGroupAndVersion() throws NoSuchMethodException { URL url = URL.valueOf("dubbo://127.0.0.1:2181") .addParameter(CommonConstants.VERSION_KEY, "1.0.0") .addParameter(CommonConstants.GROUP_KEY, "grp1") .addParameter(CommonConstants.INTERFACE_KEY, DemoService.class.getName()); Invoker invoker = mock(Invoker.class); when(invoker.getUrl()).thenReturn(url); when(invoker.getInterface()).thenReturn(DemoService.class); SentinelConfig.setConfig(DubboAdapterGlobalConfig.DUBBO_INTERFACE_GROUP_VERSION_ENABLED, "true"); assertEquals("com.alibaba.csp.sentinel.adapter.dubbo3.provider.DemoService:1.0.0:grp1", DubboUtils.getInterfaceName(invoker, true)); }
public void merge(HllBuffer buffer1, HllBuffer buffer2) { int idx = 0; int wordOffset = 0; while (wordOffset < numWords) { long word1 = buffer1.array[wordOffset]; long word2 = buffer2.array[wordOffset]; long word = 0L; int i = 0; long mask = REGISTER_WORD_MASK; while (idx < m && i < REGISTERS_PER_WORD) { word |= Math.max(word1 & mask, word2 & mask); mask <<= REGISTER_SIZE; i += 1; idx += 1; } buffer1.array[wordOffset] = word; wordOffset += 1; } }
@Test public void testMerge() { HyperLogLogPlusPlus hll = new HyperLogLogPlusPlus(0.05); HllBuffer buffer1a = createHllBuffer(hll); HllBuffer buffer1b = createHllBuffer(hll); HllBuffer buffer2 = createHllBuffer(hll); // Create the // Add the lower half int i = 0; while (i < 500000) { hll.updateByHashcode(buffer1a, hashInt(i, DEFAULT_SEED)); i += 1; } // Add the upper half i = 500000; while (i < 1000000) { hll.updateByHashcode(buffer1b, hashInt(i, DEFAULT_SEED)); i += 1; } // Merge the lower and upper halves. hll.merge(buffer1a, buffer1b); // Create the other buffer in reverse i = 999999; while (i >= 0) { hll.updateByHashcode(buffer2, hashInt(i, DEFAULT_SEED)); i -= 1; } assertThat(buffer2.array).isEqualTo(buffer1a.array); }
public static IUser normalizeUserInfo( IUser user ) { user.setLogin( user.getLogin().trim() ); user.setName( user.getName().trim() ); return user; }
@Test public void normalizeUserInfo_WithSpaces() { IUser normalized = RepositoryCommonValidations.normalizeUserInfo( user( " login \t\n ", "name" ) ); assertEquals( "login", normalized.getLogin() ); assertEquals( "login", normalized.getName() ); }
@VisibleForTesting public static Domain getDomain(Type type, long rowCount, ColumnStatistics columnStatistics) { if (rowCount == 0) { return Domain.none(type); } if (columnStatistics == null) { return Domain.all(type); } if (columnStatistics.hasNumberOfValues() && columnStatistics.getNumberOfValues() == 0) { return Domain.onlyNull(type); } boolean hasNullValue = columnStatistics.getNumberOfValues() != rowCount; if (type.getJavaType() == boolean.class && columnStatistics.getBooleanStatistics() != null) { BooleanStatistics booleanStatistics = columnStatistics.getBooleanStatistics(); boolean hasTrueValues = (booleanStatistics.getTrueValueCount() != 0); boolean hasFalseValues = (columnStatistics.getNumberOfValues() != booleanStatistics.getTrueValueCount()); if (hasTrueValues && hasFalseValues) { return Domain.all(BOOLEAN); } if (hasTrueValues) { return Domain.create(ValueSet.of(BOOLEAN, true), hasNullValue); } if (hasFalseValues) { return Domain.create(ValueSet.of(BOOLEAN, false), hasNullValue); } } else if (isShortDecimal(type) && columnStatistics.getDecimalStatistics() != null) { return createDomain(type, hasNullValue, columnStatistics.getDecimalStatistics(), value -> rescale(value, (DecimalType) type).unscaledValue().longValue()); } else if (isLongDecimal(type) && columnStatistics.getDecimalStatistics() != null) { return createDomain(type, hasNullValue, columnStatistics.getDecimalStatistics(), value -> encodeUnscaledValue(rescale(value, (DecimalType) type).unscaledValue())); } else if (isCharType(type) && columnStatistics.getStringStatistics() != null) { return createDomain(type, hasNullValue, columnStatistics.getStringStatistics(), value -> truncateToLengthAndTrimSpaces(value, type)); } else if (isVarcharType(type) && columnStatistics.getStringStatistics() != null) { return createDomain(type, hasNullValue, columnStatistics.getStringStatistics()); } else if (type.getTypeSignature().getBase().equals(StandardTypes.DATE) && columnStatistics.getDateStatistics() != null) { return createDomain(type, hasNullValue, columnStatistics.getDateStatistics(), value -> (long) value); } else if (type.getJavaType() == long.class && columnStatistics.getIntegerStatistics() != null) { return createDomain(type, hasNullValue, columnStatistics.getIntegerStatistics()); } else if (type.getJavaType() == double.class && columnStatistics.getDoubleStatistics() != null) { return createDomain(type, hasNullValue, columnStatistics.getDoubleStatistics()); } else if (REAL.equals(type) && columnStatistics.getDoubleStatistics() != null) { return createDomain(type, hasNullValue, columnStatistics.getDoubleStatistics(), value -> (long) floatToRawIntBits(value.floatValue())); } return Domain.create(ValueSet.all(type), hasNullValue); }
@Test public void testChar() { assertEquals(getDomain(CHAR, 0, null), Domain.none(CHAR)); assertEquals(getDomain(CHAR, 10, null), Domain.all(CHAR)); assertEquals(getDomain(CHAR, 0, stringColumnStats(null, null, null)), Domain.none(CHAR)); assertEquals(getDomain(CHAR, 0, stringColumnStats(0L, null, null)), Domain.none(CHAR)); assertEquals(getDomain(CHAR, 0, stringColumnStats(0L, "taco ", "taco ")), Domain.none(CHAR)); assertEquals(getDomain(CHAR, 0, stringColumnStats(0L, "taco", "taco ")), Domain.none(CHAR)); assertEquals(getDomain(CHAR, 10, stringColumnStats(0L, null, null)), onlyNull(CHAR)); assertEquals(getDomain(CHAR, 10, stringColumnStats(10L, null, null)), notNull(CHAR)); assertEquals(getDomain(CHAR, 10, stringColumnStats(10L, "taco ", "taco ")), singleValue(CHAR, utf8Slice("taco"))); assertEquals(getDomain(CHAR, 10, stringColumnStats(10L, "taco", "taco ")), singleValue(CHAR, utf8Slice("taco"))); assertEquals(getDomain(CHAR, 10, stringColumnStats(10L, "apple ", "taco ")), create(ValueSet.ofRanges(range(CHAR, utf8Slice("apple"), true, utf8Slice("taco"), true)), false)); assertEquals(getDomain(CHAR, 10, stringColumnStats(10L, "apple ", "taco")), create(ValueSet.ofRanges(range(CHAR, utf8Slice("apple"), true, utf8Slice("taco"), true)), false)); assertEquals(getDomain(CHAR, 10, stringColumnStats(10L, null, "taco ")), create(ValueSet.ofRanges(lessThanOrEqual(CHAR, utf8Slice("taco"))), false)); assertEquals(getDomain(CHAR, 10, stringColumnStats(10L, null, "taco")), create(ValueSet.ofRanges(lessThanOrEqual(CHAR, utf8Slice("taco"))), false)); assertEquals(getDomain(CHAR, 10, stringColumnStats(10L, "apple ", null)), create(ValueSet.ofRanges(greaterThanOrEqual(CHAR, utf8Slice("apple"))), false)); assertEquals(getDomain(CHAR, 10, stringColumnStats(10L, "apple", null)), create(ValueSet.ofRanges(greaterThanOrEqual(CHAR, utf8Slice("apple"))), false)); assertEquals(getDomain(CHAR, 10, stringColumnStats(5L, "apple ", "taco ")), create(ValueSet.ofRanges(range(CHAR, utf8Slice("apple"), true, utf8Slice("taco"), true)), true)); assertEquals(getDomain(CHAR, 10, stringColumnStats(5L, "apple ", "taco")), create(ValueSet.ofRanges(range(CHAR, utf8Slice("apple"), true, utf8Slice("taco"), true)), true)); assertEquals(getDomain(CHAR, 10, stringColumnStats(5L, null, "taco ")), create(ValueSet.ofRanges(lessThanOrEqual(CHAR, utf8Slice("taco"))), true)); assertEquals(getDomain(CHAR, 10, stringColumnStats(5L, null, "taco")), create(ValueSet.ofRanges(lessThanOrEqual(CHAR, utf8Slice("taco"))), true)); assertEquals(getDomain(CHAR, 10, stringColumnStats(5L, "apple ", null)), create(ValueSet.ofRanges(greaterThanOrEqual(CHAR, utf8Slice("apple"))), true)); assertEquals(getDomain(CHAR, 10, stringColumnStats(5L, "apple", null)), create(ValueSet.ofRanges(greaterThanOrEqual(CHAR, utf8Slice("apple"))), true)); assertEquals(getDomain(CHAR, 10, stringColumnStats(10L, "\0 ", " ")), create(ValueSet.ofRanges(range(CHAR, utf8Slice("\0"), true, utf8Slice(""), true)), false)); }
public static int chineseToNumber(String chinese) { final int length = chinese.length(); int result = 0; // 节总和 int section = 0; int number = 0; ChineseUnit unit = null; char c; for (int i = 0; i < length; i++) { c = chinese.charAt(i); final int num = chineseToNumber(c); if (num >= 0) { if (num == 0) { // 遇到零时节结束,权位失效,比如两万二零一十 if (number > 0 && null != unit) { section += number * (unit.value / 10); } unit = null; } else if (number > 0) { // 多个数字同时出现,报错 throw new IllegalArgumentException(StrUtil.format("Bad number '{}{}' at: {}", chinese.charAt(i - 1), c, i)); } // 普通数字 number = num; } else { unit = chineseToUnit(c); if (null == unit) { // 出现非法字符 throw new IllegalArgumentException(StrUtil.format("Unknown unit '{}' at: {}", c, i)); } //单位 if (unit.secUnit) { // 节单位,按照节求和 section = (section + number) * unit.value; result += section; section = 0; } else { // 非节单位,和单位前的单数字组合为值 int unitNumber = number; if (0 == number && 0 == i) { // issue#1726,对于单位开头的数组,默认赋予1 // 十二 -> 一十二 // 百二 -> 一百二 unitNumber = 1; } section += (unitNumber * unit.value); } number = 0; } } if (number > 0 && null != unit) { number = number * (unit.value / 10); } return result + section + number; }
@Test public void chineseToNumberTest() { assertEquals(0, NumberChineseFormatter.chineseToNumber("零")); assertEquals(102, NumberChineseFormatter.chineseToNumber("一百零二")); assertEquals(112, NumberChineseFormatter.chineseToNumber("一百一十二")); assertEquals(1012, NumberChineseFormatter.chineseToNumber("一千零一十二")); assertEquals(1000000, NumberChineseFormatter.chineseToNumber("一百万")); assertEquals(2000100112, NumberChineseFormatter.chineseToNumber("二十亿零一十万零一百一十二")); }
public synchronized CryptoKey getOrCreateCryptoKey(String keyRingId, String keyName) { // Get the keyring, creating it if it does not already exist if (keyRing == null) { maybeCreateKeyRing(keyRingId); } try (KeyManagementServiceClient client = clientFactory.getKMSClient()) { // Build the symmetric key to create. CryptoKey keyToCreate = CryptoKey.newBuilder() .setPurpose(CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT) .setVersionTemplate( CryptoKeyVersionTemplate.newBuilder() .setAlgorithm( CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION)) .build(); LOG.info("Checking if symmetric key {} already exists in KMS.", keyName); // Loop through the existing keys in the given keyring to see if the // key already exists. String newKeyName = CryptoKeyName.of(projectId, region, keyRingId, keyName).toString(); Optional<CryptoKey> existingKey = StreamSupport.stream( client.listCryptoKeys(keyRing.getName()).iterateAll().spliterator(), false) .filter(kRing -> kRing.getName().equals(newKeyName)) .findFirst(); // Create the symmetric key if it does not exist, otherwise, return the found key. CryptoKey cryptoKey; if (!existingKey.isPresent()) { LOG.info("Symmetric key {} does not exist. Creating the key in KMS.", keyName); cryptoKey = client.createCryptoKey(keyRing.getName(), keyName, keyToCreate); LOG.info("Created symmetric key {}.", cryptoKey.getName()); } else { LOG.info("Symmetric key {} already exists. Retrieving the key from KMS.", keyName); cryptoKey = existingKey.get(); LOG.info("Retrieved symmetric key {}.", cryptoKey.getName()); } return cryptoKey; } }
@Test public void testGetOrCreateCryptoKeyShouldNotCreateCryptoKeyWhenItAlreadyExists() { String keyRingName = KeyRingName.of(PROJECT_ID, REGION, KEYRING_ID).toString(); KeyRing keyRing = KeyRing.newBuilder().setName(keyRingName).build(); CryptoKey cryptoKey = CryptoKey.newBuilder() .setName(CryptoKeyName.of(PROJECT_ID, REGION, KEYRING_ID, KEY_ID).toString()) .build(); when(kmsClientFactory.getKMSClient()).thenReturn(serviceClient); when(serviceClient.createKeyRing(any(LocationName.class), anyString(), any(KeyRing.class))) .thenReturn(keyRing); when(serviceClient.listCryptoKeys(keyRingName).iterateAll()) .thenReturn(ImmutableList.of(cryptoKey)); testManager.getOrCreateCryptoKey(KEYRING_ID, KEY_ID); verify(serviceClient, never()).createCryptoKey(anyString(), anyString(), any(CryptoKey.class)); }
@Override public ExampleTableHandle getTableHandle(ConnectorSession session, SchemaTableName tableName) { if (!listSchemaNames(session).contains(tableName.getSchemaName())) { return null; } ExampleTable table = exampleClient.getTable(tableName.getSchemaName(), tableName.getTableName()); if (table == null) { return null; } return new ExampleTableHandle(connectorId, tableName.getSchemaName(), tableName.getTableName()); }
@Test public void testGetTableHandle() { assertEquals(metadata.getTableHandle(SESSION, new SchemaTableName("example", "numbers")), NUMBERS_TABLE_HANDLE); assertNull(metadata.getTableHandle(SESSION, new SchemaTableName("example", "unknown"))); assertNull(metadata.getTableHandle(SESSION, new SchemaTableName("unknown", "numbers"))); assertNull(metadata.getTableHandle(SESSION, new SchemaTableName("unknown", "unknown"))); }
public static void hasText(String text, String message) { if (!StringUtil.hasText(text)) { throw new IllegalArgumentException(message); } }
@Test(expected = IllegalArgumentException.class) public void assertHasText() { Assert.hasText(" ", "text is null"); }
public Lease acquire() throws Exception { String path = internals.attemptLock(-1, null, null); return makeLease(path); }
@Test public void testClientClose() throws Exception { final Timing timing = new Timing(); CuratorFramework client1 = null; CuratorFramework client2 = null; InterProcessSemaphoreV2 semaphore1; InterProcessSemaphoreV2 semaphore2; try { client1 = CuratorFrameworkFactory.newClient( server.getConnectString(), timing.session(), timing.connection(), new RetryOneTime(1)); client2 = CuratorFrameworkFactory.newClient( server.getConnectString(), timing.session(), timing.connection(), new RetryOneTime(1)); client1.start(); client2.start(); semaphore1 = new InterProcessSemaphoreV2(client1, "/test", 1); semaphore2 = new InterProcessSemaphoreV2(client2, "/test", 1); Lease lease = semaphore2.acquire(timing.forWaiting().seconds(), TimeUnit.SECONDS); assertNotNull(lease); lease.close(); lease = semaphore1.acquire(10, TimeUnit.SECONDS); assertNotNull(lease); client1.close(); // should release any held leases client1 = null; assertNotNull(semaphore2.acquire(timing.forWaiting().seconds(), TimeUnit.SECONDS)); } finally { TestCleanState.closeAndTestClean(client1); TestCleanState.closeAndTestClean(client2); } }
public static void assertThatClassIsUtility(Class<?> clazz) { final UtilityClassChecker checker = new UtilityClassChecker(); if (!checker.isProperlyDefinedUtilityClass(clazz)) { final Description toDescription = new StringDescription(); final Description mismatchDescription = new StringDescription(); checker.describeTo(toDescription); checker.describeMismatch(mismatchDescription); final String reason = "\n" + "Expected: is \"" + toDescription.toString() + "\"\n" + " but : was \"" + mismatchDescription.toString() + "\""; throw new AssertionError(reason); } }
@Test public void testFinalNoConstructorClass() throws Exception { boolean gotException = false; try { assertThatClassIsUtility(FinalNoConstructor.class); } catch (AssertionError assertion) { assertThat(assertion.getMessage(), containsString("class with a default constructor that " + "is not private")); gotException = true; } assertThat(gotException, is(true)); }
@Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.about_menu_option: Navigation.findNavController(requireView()) .navigate(MainFragmentDirections.actionMainFragmentToAboutAnySoftKeyboardFragment()); return true; case R.id.tweaks_menu_option: Navigation.findNavController(requireView()) .navigate(MainFragmentDirections.actionMainFragmentToMainTweaksFragment()); return true; case R.id.backup_prefs: mDialogController.showDialog(R.id.backup_prefs); return true; case R.id.restore_prefs: mDialogController.showDialog(R.id.restore_prefs); return true; default: return super.onOptionsItemSelected(item); } }
@Test public void testBackupMenuItem() throws Exception { final MainFragment fragment = startFragment(); final FragmentActivity activity = fragment.getActivity(); Menu menu = Shadows.shadowOf(activity).getOptionsMenu(); Assert.assertNotNull(menu); final MenuItem item = menu.findItem(R.id.backup_prefs); Assert.assertNotNull(item); fragment.onOptionsItemSelected(item); TestRxSchedulers.foregroundFlushAllJobs(); final AlertDialog dialog = GeneralDialogTestUtil.getLatestShownDialog(); Assert.assertNotSame(GeneralDialogTestUtil.NO_DIALOG, dialog); Assert.assertEquals( getApplicationContext().getText(R.string.pick_prefs_providers_to_backup), GeneralDialogTestUtil.getTitleFromDialog(dialog)); final ListView dialogListView = dialog.getListView(); Assert.assertNotNull(dialogListView); Assert.assertEquals(View.VISIBLE, dialogListView.getVisibility()); final List<GlobalPrefsBackup.ProviderDetails> allPrefsProviders = GlobalPrefsBackup.getAllPrefsProviders(getApplicationContext()); Assert.assertEquals(allPrefsProviders.size(), dialogListView.getCount()); // everything is checked at first for (int providerIndex = 0; providerIndex < allPrefsProviders.size(); providerIndex++) { Assert.assertEquals( activity.getText(allPrefsProviders.get(providerIndex).providerTitle), dialogListView.getItemAtPosition(providerIndex)); } Assert.assertTrue(dialog.getButton(DialogInterface.BUTTON_NEGATIVE).callOnClick()); // no dialog here Assert.assertSame( GeneralDialogTestUtil.NO_DIALOG, GeneralDialogTestUtil.getLatestShownDialog()); }
public static Mode parse(String value) { if (StringUtils.isBlank(value)) { throw new IllegalArgumentException(ExceptionMessage.INVALID_MODE.getMessage(value)); } try { return parseNumeric(value); } catch (NumberFormatException e) { // Treat as symbolic return parseSymbolic(value); } }
@Test public void symbolicsPartial() { Mode parsed = ModeParser.parse("u=rwx"); assertEquals(Mode.Bits.ALL, parsed.getOwnerBits()); assertEquals(Mode.Bits.NONE, parsed.getGroupBits()); assertEquals(Mode.Bits.NONE, parsed.getOtherBits()); parsed = ModeParser.parse("go=rw"); assertEquals(Mode.Bits.NONE, parsed.getOwnerBits()); assertEquals(Mode.Bits.READ_WRITE, parsed.getGroupBits()); assertEquals(Mode.Bits.READ_WRITE, parsed.getOtherBits()); parsed = ModeParser.parse("o=x"); assertEquals(Mode.Bits.NONE, parsed.getOwnerBits()); assertEquals(Mode.Bits.NONE, parsed.getGroupBits()); assertEquals(Mode.Bits.EXECUTE, parsed.getOtherBits()); }
@PublicEvolving public static <IN, OUT> TypeInformation<OUT> getMapReturnTypes( MapFunction<IN, OUT> mapInterface, TypeInformation<IN> inType) { return getMapReturnTypes(mapInterface, inType, null, false); }
@Test void testBasicArray2() { RichMapFunction<Boolean[], ?> function = new IdentityMapper<Boolean[]>(); TypeInformation<?> ti = TypeExtractor.getMapReturnTypes( function, BasicArrayTypeInfo.BOOLEAN_ARRAY_TYPE_INFO); assertThat(ti).isInstanceOf(BasicArrayTypeInfo.class); BasicArrayTypeInfo<?, ?> bati = (BasicArrayTypeInfo<?, ?>) ti; assertThat(bati.getComponentInfo().isBasicType()).isTrue(); assertThat(bati.getComponentInfo()).isEqualTo(BasicTypeInfo.BOOLEAN_TYPE_INFO); }
public static String getDynamicPropertiesAsString( Configuration baseConfig, Configuration targetConfig) { String[] newAddedConfigs = targetConfig.keySet().stream() .flatMap( (String key) -> { final String baseValue = baseConfig.get( ConfigOptions.key(key) .stringType() .noDefaultValue()); final String targetValue = targetConfig.get( ConfigOptions.key(key) .stringType() .noDefaultValue()); if (!baseConfig.keySet().contains(key) || !baseValue.equals(targetValue)) { return Stream.of( "-" + CommandLineOptions.DYNAMIC_PROPERTY_OPTION .getOpt() + key + CommandLineOptions.DYNAMIC_PROPERTY_OPTION .getValueSeparator() + escapeForDifferentOS(targetValue)); } else { return Stream.empty(); } }) .toArray(String[]::new); return String.join(" ", newAddedConfigs); }
@Test void testGetDynamicPropertiesAsString() { final Configuration baseConfig = new Configuration(); baseConfig.setString("key.a", "a"); baseConfig.setString("key.b", "b1"); final Configuration targetConfig = new Configuration(); targetConfig.setString("key.b", "b2"); targetConfig.setString("key.c", "c"); final String dynamicProperties = BootstrapTools.getDynamicPropertiesAsString(baseConfig, targetConfig); if (OperatingSystem.isWindows()) { assertThat(dynamicProperties).isEqualTo("-Dkey.b=\"b2\" -Dkey.c=\"c\""); } else { assertThat(dynamicProperties).isEqualTo("-Dkey.b='b2' -Dkey.c='c'"); } }
@Nonnull public static String removeBracketsFromIpv6Address(@Nonnull final String address) { final String result; if (address.startsWith("[") && address.endsWith("]")) { result = address.substring(1, address.length()-1); try { Ipv6.parse(result); // The remainder is a valid IPv6 address. Return the original value. return result; } catch (IllegalArgumentException e) { // The remainder isn't a valid IPv6 address. Return the original value. return address; } } // Not a bracket-enclosed string. Return the original input. return address; }
@Test public void stripBracketsIpv6() throws Exception { // Setup test fixture. final String input = "[0:0:0:0:0:0:0:1]"; // Execute system under test. final String result = AuthCheckFilter.removeBracketsFromIpv6Address(input); // Verify result. assertEquals("0:0:0:0:0:0:0:1", result); }
@Override public Object deserialize(Asn1ObjectInputStream in, Class<? extends Object> type, Asn1ObjectMapper mapper) { final Asn1Entity entity = type.getAnnotation(Asn1Entity.class); final Fields fields = new FieldSet(entity.partial(), mapper.getFields(type)); return readFields(mapper, in, fields, ObjectUtils.newInstance(type)); }
@Test public void shouldDeserialize() { assertEquals(new Set(1, 2), deserialize( new SetConverter(), Set.class, new byte[] { (byte) 0x81, 1, 0x01, (byte) 0x82, 1, 0x02 } )); }
public Node parse() throws ScanException { if (tokenList == null || tokenList.isEmpty()) return null; return E(); }
@Test public void withEmptryDefault() throws ScanException { Tokenizer tokenizer = new Tokenizer("${b:-}"); Parser parser = new Parser(tokenizer.tokenize()); Node node = parser.parse(); Node witness = new Node(Node.Type.VARIABLE, new Node(Node.Type.LITERAL, "b")); witness.defaultPart = new Node(Node.Type.LITERAL, ""); assertEquals(witness, node); }
public <E> ChainableFunction<E, T> after(Function<? super E, ? extends F> function) { return new ChainableFunction<E, T>() { @Override public T apply(E input) { return ChainableFunction.this.apply(function.apply(input)); } }; }
@Test public void after() { Integer result = plus(7).after(parseInteger()).apply("11"); assertThat(result).as("Adding 7 after parseInt('11')").isEqualTo(18); }
@Override public Column convert(BasicTypeDefine typeDefine) { try { return super.convert(typeDefine); } catch (SeaTunnelRuntimeException e) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String kingbaseDataType = typeDefine.getDataType().toUpperCase(); switch (kingbaseDataType) { case KB_TINYINT: builder.dataType(BasicType.BYTE_TYPE); break; case KB_MONEY: builder.dataType(new DecimalType(38, 18)); builder.columnLength(38L); builder.scale(18); break; case KB_BLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength((long) (1024 * 1024 * 1024)); break; case KB_CLOB: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(typeDefine.getLength()); builder.columnLength((long) (1024 * 1024 * 1024)); break; case KB_BIT: builder.dataType(PrimitiveByteArrayType.INSTANCE); // BIT(M) -> BYTE(M/8) long byteLength = typeDefine.getLength() / 8; byteLength += typeDefine.getLength() % 8 > 0 ? 1 : 0; builder.columnLength(byteLength); break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.KINGBASE, typeDefine.getDataType(), typeDefine.getName()); } return builder.build(); } }
@Test public void testConvertBlob() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder().name("test").columnType("BLOB").dataType("BLOB").build(); Column column = KingbaseTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(PrimitiveByteArrayType.INSTANCE, column.getDataType()); Assertions.assertEquals(1024 * 1024 * 1024, column.getColumnLength()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType().toUpperCase()); }
public static DnsServerAddresses rotational(Iterable<? extends InetSocketAddress> addresses) { return rotational0(sanitize(addresses)); }
@Test public void testRotational() { DnsServerAddresses seq = DnsServerAddresses.rotational(ADDR1, ADDR2, ADDR3); DnsServerAddressStream i = seq.stream(); assertNext(i, ADDR1); assertNext(i, ADDR2); assertNext(i, ADDR3); assertNext(i, ADDR1); assertNext(i, ADDR2); assertNext(i, ADDR3); i = seq.stream(); assertNext(i, ADDR2); assertNext(i, ADDR3); assertNext(i, ADDR1); assertNext(i, ADDR2); assertNext(i, ADDR3); assertNext(i, ADDR1); i = seq.stream(); assertNext(i, ADDR3); assertNext(i, ADDR1); assertNext(i, ADDR2); assertNext(i, ADDR3); assertNext(i, ADDR1); assertNext(i, ADDR2); i = seq.stream(); assertNext(i, ADDR1); assertNext(i, ADDR2); assertNext(i, ADDR3); assertNext(i, ADDR1); assertNext(i, ADDR2); assertNext(i, ADDR3); }
public static String[][] assignExecutors( List<? extends ScanTaskGroup<?>> taskGroups, List<String> executorLocations) { Map<Integer, JavaHash<StructLike>> partitionHashes = Maps.newHashMap(); String[][] locations = new String[taskGroups.size()][]; for (int index = 0; index < taskGroups.size(); index++) { locations[index] = assign(taskGroups.get(index), executorLocations, partitionHashes); } return locations; }
@TestTemplate public void testDataTasks() { List<ScanTask> tasks = ImmutableList.of( new MockDataTask(mockDataFile(Row.of(1, "a"))), new MockDataTask(mockDataFile(Row.of(2, "b"))), new MockDataTask(mockDataFile(Row.of(3, "c")))); ScanTaskGroup<ScanTask> taskGroup = new BaseScanTaskGroup<>(tasks); List<ScanTaskGroup<ScanTask>> taskGroups = ImmutableList.of(taskGroup); String[][] locations = SparkPlanningUtil.assignExecutors(taskGroups, EXECUTOR_LOCATIONS); // should not assign executors for data tasks assertThat(locations.length).isEqualTo(1); assertThat(locations[0]).isEmpty(); }
public static String[] getFieldFormatTypeCodes() { return fieldFormatTypeCodes; }
@Test public void testRoundTrip() throws KettleException { List<String> attributes = Arrays.asList( /*"connection",*/ "schema", "table", "encoding", "delimiter", "enclosure", "escape_char", "replace", "ignore", "local", "fifo_file_name", "bulk_size", "stream_name", "field_name", "field_format_ok" ); Map<String, String> getterMap = new HashMap<String, String>(); //getterMap.put( "connection", "" ); getterMap.put( "schema", "getSchemaName" ); getterMap.put( "table", "getTableName" ); getterMap.put( "encoding", "getEncoding" ); getterMap.put( "delimiter", "getDelimiter" ); getterMap.put( "enclosure", "getEnclosure" ); getterMap.put( "escape_char", "getEscapeChar" ); getterMap.put( "replace", "isReplacingData" ); getterMap.put( "ignore", "isIgnoringErrors" ); getterMap.put( "local", "isLocalFile" ); getterMap.put( "fifo_file_name", "getFifoFileName" ); getterMap.put( "bulk_size", "getBulkSize" ); getterMap.put( "stream_name", "getFieldTable" ); getterMap.put( "field_name", "getFieldStream" ); getterMap.put( "field_format_ok", "getFieldFormatType" ); Map<String, String> setterMap = new HashMap<String, String>(); //setterMap.put( "connection", "" ); setterMap.put( "schema", "setSchemaName" ); setterMap.put( "table", "setTableName" ); setterMap.put( "encoding", "setEncoding" ); setterMap.put( "delimiter", "setDelimiter" ); setterMap.put( "enclosure", "setEnclosure" ); setterMap.put( "escape_char", "setEscapeChar" ); setterMap.put( "replace", "setReplacingData" ); setterMap.put( "ignore", "setIgnoringErrors" ); setterMap.put( "local", "setLocalFile" ); setterMap.put( "fifo_file_name", "setFifoFileName" ); setterMap.put( "bulk_size", "setBulkSize" ); setterMap.put( "stream_name", "setFieldTable" ); setterMap.put( "field_name", "setFieldStream" ); setterMap.put( "field_format_ok", "setFieldFormatType" ); Map<String, FieldLoadSaveValidator<?>> fieldLoadSaveValidatorAttributeMap = new HashMap<String, FieldLoadSaveValidator<?>>(); FieldLoadSaveValidator<String[]> stringArrayLoadSaveValidator = new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 25 ); FieldLoadSaveValidator<int[]> fieldFormatTypeArrayLoadSaveValidator = new PrimitiveIntArrayLoadSaveValidator( new IntLoadSaveValidator( MySQLBulkLoaderMeta.getFieldFormatTypeCodes().length ), 25 ); fieldLoadSaveValidatorAttributeMap.put( "stream_name", stringArrayLoadSaveValidator ); fieldLoadSaveValidatorAttributeMap.put( "field_name", stringArrayLoadSaveValidator ); fieldLoadSaveValidatorAttributeMap.put( "field_format_ok", fieldFormatTypeArrayLoadSaveValidator ); LoadSaveTester loadSaveTester = new LoadSaveTester( MySQLBulkLoaderMeta.class, attributes, getterMap, setterMap, fieldLoadSaveValidatorAttributeMap, new HashMap<String, FieldLoadSaveValidator<?>>() ); loadSaveTester.testSerialization(); }
@NonNull public final Launcher decorateByEnv(@NonNull EnvVars _env) { final EnvVars env = new EnvVars(_env); final Launcher outer = this; return new Launcher(outer) { @Override public boolean isUnix() { return outer.isUnix(); } @Override public Proc launch(ProcStarter starter) throws IOException { EnvVars e = new EnvVars(env); if (starter.envs != null) { for (String env : starter.envs) { e.addLine(env); } } starter.envs = Util.mapToEnv(e); return outer.launch(starter); } @Override public Channel launchChannel(String[] cmd, OutputStream out, FilePath workDir, Map<String, String> envVars) throws IOException, InterruptedException { EnvVars e = new EnvVars(env); e.putAll(envVars); return outer.launchChannel(cmd, out, workDir, e); } @Override public void kill(Map<String, String> modelEnvVars) throws IOException, InterruptedException { outer.kill(modelEnvVars); } }; }
@Issue("JENKINS-15733") @Test public void decorateByEnv() throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); TaskListener l = new StreamBuildListener(baos, Charset.defaultCharset()); Launcher base = new Launcher.LocalLauncher(l); EnvVars env = new EnvVars("key1", "val1"); Launcher decorated = base.decorateByEnv(env); int res = decorated.launch().envs("key2=val2").cmds(Functions.isWindows() ? new String[] {"cmd", "/q", "/c", "echo %key1% %key2%"} : new String[] {"sh", "-c", "echo $key1 $key2"}).stdout(l).join(); String log = baos.toString(Charset.defaultCharset()); assertEquals(log, 0, res); assertTrue(log, log.contains("val1 val2")); }
@Override public ValueSet complement() { return new AllOrNoneValueSet(type, !all); }
@Test public void testComplement() { AllOrNoneValueSet all = AllOrNoneValueSet.all(HYPER_LOG_LOG); AllOrNoneValueSet none = AllOrNoneValueSet.none(HYPER_LOG_LOG); assertEquals(all.complement(), none); assertEquals(none.complement(), all); }
@Override public AbortTransactionResult abortTransaction(AbortTransactionSpec spec, AbortTransactionOptions options) { AdminApiFuture.SimpleAdminApiFuture<TopicPartition, Void> future = AbortTransactionHandler.newFuture(Collections.singleton(spec.topicPartition())); AbortTransactionHandler handler = new AbortTransactionHandler(spec, logContext); invokeDriver(handler, future, options.timeoutMs); return new AbortTransactionResult(future.all()); }
@Test public void testAbortTransaction() throws Exception { try (AdminClientUnitTestEnv env = mockClientEnv()) { TopicPartition topicPartition = new TopicPartition("foo", 13); AbortTransactionSpec abortSpec = new AbortTransactionSpec( topicPartition, 12345L, (short) 15, 200); Node leader = env.cluster().nodes().iterator().next(); expectMetadataRequest(env, topicPartition, leader); env.kafkaClient().prepareResponseFrom( request -> request instanceof WriteTxnMarkersRequest, writeTxnMarkersResponse(abortSpec, Errors.NONE), leader ); AbortTransactionResult result = env.adminClient().abortTransaction(abortSpec); assertNull(result.all().get()); } }
@Override public boolean test(final Path test) { return this.equals(new CaseSensitivePathPredicate(test)); }
@Test public void testPredicateTest() { final Path t = new Path("/f", EnumSet.of(Path.Type.file)); assertTrue(new CaseSensitivePathPredicate(t).test(new Path("/f", EnumSet.of(Path.Type.file)))); assertEquals(new CaseSensitivePathPredicate(t).hashCode(), new CaseSensitivePathPredicate(new Path("/f", EnumSet.of(Path.Type.file))).hashCode()); assertFalse(new CaseSensitivePathPredicate(t).test(new Path("/F", EnumSet.of(Path.Type.file)))); assertNotEquals(new CaseSensitivePathPredicate(t).hashCode(), new CaseSensitivePathPredicate(new Path("/F", EnumSet.of(Path.Type.file))).hashCode()); assertFalse(new CaseSensitivePathPredicate(t).test(new Path("/f", EnumSet.of(Path.Type.directory)))); }
@Override public double calcDist3D(double fromY, double fromX, double fromHeight, double toY, double toX, double toHeight) { return sqrt(calcNormalizedDist(fromY, fromX, toY, toX) + calcNormalizedDist(toHeight - fromHeight)); }
@Test public void testDistance3dEuclidean() { DistanceCalcEuclidean distCalc = new DistanceCalcEuclidean(); assertEquals(1, distCalc.calcDist3D( 0, 0, 0, 0, 0, 1 ), 1e-6); assertEquals(10, distCalc.calcDist3D( 0, 0, 0, 0, 0, 10 ), 1e-6); }
@Override public Decorator findById(String decoratorId) throws NotFoundException { final Decorator result = coll.findOneById(decoratorId); if (result == null) { throw new NotFoundException("Decorator with id " + decoratorId + " not found."); } return result; }
@Test public void findByIdThrowsIllegalArgumentExceptionForInvalidObjectId() throws NotFoundException { expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage("state should be: hexString has 24 characters"); decoratorService.findById("NOPE"); }
@Override public void start(Callback<None> callback) { LOG.info("{} enabled", _printName); Callback<None> prepareWarmUpCallback = new Callback<None>() { @Override public void onError(Throwable e) { if (e instanceof TimeoutException) { LOG.info("{} hit timeout: {}ms. The WarmUp will continue in background", _printName, _warmUpTimeoutMillis); callback.onSuccess(None.none()); } else { LOG.error("{} failed to fetch dual read mode, continuing warmup.", _printName, e); } continueWarmUp(callback); } @Override public void onSuccess(None result) { continueWarmUp(callback); } }; _loadBalancer.start(new Callback<None>() { @Override public void onError(Throwable e) { callback.onError(e); } @Override public void onSuccess(None result) { _allStartTime = _timeSupplier.get(); _executorService.submit(() -> prepareWarmUp(prepareWarmUpCallback)); } }); }
@Test(retryAnalyzer = ThreeRetries.class) public void testNoMakingWarmUpRequestsWithoutWarmUp() throws URISyntaxException, InterruptedException, ExecutionException, TimeoutException { createDefaultServicesIniFiles(); TestLoadBalancer balancer = new TestLoadBalancer(); AtomicInteger requestCount = balancer.getRequestCount(); FutureCallback<None> callback = new FutureCallback<>(); balancer.start(callback); callback.get(5000, TimeUnit.MILLISECONDS); Assert.assertEquals(0, requestCount.get()); }
public static CheckResult mergeCheckResults(CheckResult... checkResults) { List<CheckResult> notPassConfig = Arrays.stream(checkResults) .filter(item -> !item.isSuccess()) .collect(Collectors.toList()); if (notPassConfig.isEmpty()) { return CheckResult.success(); } else { String errMessage = notPassConfig.stream() .map(CheckResult::getMsg) .collect(Collectors.joining(",")); return CheckResult.error(errMessage); } }
@Test public void testMergeCheckResults() { Config config = getConfig(); CheckResult checkResult1 = checkAllExists(config, "k0", "k1"); CheckResult checkResult2 = checkAtLeastOneExists(config, "k1", "k3"); CheckResult checkResult3 = checkAllExists(config, "k0", "k3"); CheckResult checkResult4 = checkAtLeastOneExists(config, "k2", "k3"); CheckResult finalResult = mergeCheckResults(checkResult1, checkResult2); Assertions.assertTrue(finalResult.isSuccess()); String errorMsg1 = "please specify [%s] as non-empty"; String errorMsg2 = "please specify at least one config of [%s] as non-empty"; finalResult = mergeCheckResults(checkResult3, checkResult2); Assertions.assertEquals(String.format(errorMsg1, "k3"), finalResult.getMsg()); finalResult = mergeCheckResults(checkResult3, checkResult4); Assertions.assertEquals( String.format(errorMsg1 + "," + errorMsg2, "k3", "k2,k3"), finalResult.getMsg()); }
public static LayoutLocation fromCompactString(String s) { String[] tokens = s.split(COMMA); if (tokens.length != 4) { throw new IllegalArgumentException(E_BAD_COMPACT + s); } String id = tokens[0]; String type = tokens[1]; String latY = tokens[2]; String longX = tokens[3]; if (Strings.isNullOrEmpty(id)) { throw new IllegalArgumentException(E_BAD_COMPACT + E_EMPTY_ID); } double latOrY; double longOrX; try { latOrY = Double.parseDouble(latY); longOrX = Double.parseDouble(longX); } catch (NumberFormatException nfe) { throw new IllegalArgumentException(E_BAD_COMPACT + E_BAD_DOUBLE); } return LayoutLocation.layoutLocation(id, type, latOrY, longOrX); }
@Test(expected = IllegalArgumentException.class) public void badCompactUnparsableY() { fromCompactString("foo,GEO,yyy,2.3"); }
public static MessageHeaders createAfnemersberichtAanDGLHeaders(Map<String, Object> additionalHeaders) { validateHeaders(additionalHeaders); Map<String, Object> headersMap = createBasicHeaderMap(); headersMap.put(nl.logius.digid.digilevering.lib.model.Headers.X_AUX_ACTION, "BRPAfnemersberichtAanDGL"); headersMap.put(nl.logius.digid.digilevering.lib.model.Headers.X_AUX_ACTIVITY, "dgl:objecten:1.0"); headersMap.put(nl.logius.digid.digilevering.lib.model.Headers.X_AUX_PROCESS_TYPE, "dgl:objecten:1.0"); headersMap.put(nl.logius.digid.digilevering.lib.model.Headers.X_AUX_PROCESS_VERSION, "1.0"); headersMap.putAll(additionalHeaders); MessageHeaders headers = new MessageHeaders(headersMap); return headers; }
@Test public void testSenderHeaderPresent() { Map<String, Object> map = new HashMap<>(); map.put(Headers.X_AUX_RECEIVER_ID, "receiverId"); assertThrows(IllegalArgumentException.class, () -> HeaderUtil.createAfnemersberichtAanDGLHeaders(map), "x_aux_sender_id sender header is mandatory"); }
public boolean isDuplicateFilteringEnabled() { return filterDuplicates; }
@Test public void testDuplicateFilteringDisabledByDefault() { EpoxyController controller = new EpoxyController() { @Override protected void buildModels() { } }; assertFalse(controller.isDuplicateFilteringEnabled()); }
@Override public AlarmInfo ack(Alarm alarm, User user) throws ThingsboardException { return ack(alarm, System.currentTimeMillis(), user); }
@Test public void testAck() throws ThingsboardException { var alarm = new Alarm(); when(alarmSubscriptionService.acknowledgeAlarm(any(), any(), anyLong())) .thenReturn(AlarmApiCallResult.builder().successful(true).modified(true).alarm(new AlarmInfo()).build()); service.ack(alarm, new User(new UserId(UUID.randomUUID()))); verify(alarmCommentService, times(1)).saveAlarmComment(any(), any(), any()); verify(logEntityActionService, times(1)).logEntityAction(any(), any(), any(), any(), eq(ActionType.ALARM_ACK), any()); verify(alarmSubscriptionService, times(1)).acknowledgeAlarm(any(), any(), anyLong()); }
public static KTableHolder<GenericKey> build( final KGroupedStreamHolder groupedStream, final StreamAggregate aggregate, final RuntimeBuildContext buildContext, final MaterializedFactory materializedFactory) { return build( groupedStream, aggregate, buildContext, materializedFactory, new AggregateParamsFactory() ); }
@Test public void shouldBuildValueSerdeCorrectlyForUnwindowedAggregate() { // Given: givenUnwindowedAggregate(); // When: aggregate.build(planBuilder, planInfo); // Then: verify(buildContext).buildValueSerde( VALUE_FORMAT, PHYSICAL_AGGREGATE_SCHEMA, MATERIALIZE_CTX ); }
public static Timestamp toTimestamp(BigDecimal bigDecimal) { final BigDecimal nanos = bigDecimal.remainder(BigDecimal.ONE.scaleByPowerOfTen(9)); final BigDecimal seconds = bigDecimal.subtract(nanos).scaleByPowerOfTen(-9).add(MIN_SECONDS); return Timestamp.ofTimeSecondsAndNanos(seconds.longValue(), nanos.intValue()); }
@Test(expected = IllegalArgumentException.class) public void testToTimestampThrowsExceptionWhenThereIsAnUnderflow() { TimestampUtils.toTimestamp(BigDecimal.valueOf(-1L)); }
public static <T extends CharSequence> T[] removeEmpty(T[] array) { return filter(array, StrUtil::isNotEmpty); }
@Test public void removeEmptyTest() { String[] a = {"a", "b", "", null, " ", "c"}; String[] resultA = {"a", "b", " ", "c"}; assertArrayEquals(ArrayUtil.removeEmpty(a), resultA); }
@Override public void validateJoinRequest(JoinMessage joinMessage) { // check joining member's major.minor version is same as current cluster version's major.minor numbers MemberVersion memberVersion = joinMessage.getMemberVersion(); Version clusterVersion = node.getClusterService().getClusterVersion(); if (!memberVersion.asVersion().equals(clusterVersion)) { String msg = "Joining node's version " + memberVersion + " is not compatible with cluster version " + clusterVersion; if (clusterVersion.getMajor() != memberVersion.getMajor()) { msg += " (Rolling Member Upgrades are only supported for the same major version)"; } if (clusterVersion.getMinor() > memberVersion.getMinor()) { msg += " (Rolling Member Upgrades are only supported for the next minor version)"; } if (!BuildInfoProvider.getBuildInfo().isEnterprise()) { msg += " (Rolling Member Upgrades are only supported in Hazelcast Enterprise)"; } throw new VersionMismatchException(msg); } }
@Test public void test_joinRequestAllowed_whenNextPatchVersion() { MemberVersion nextPatchVersion = MemberVersion.of(nodeVersion.getMajor(), nodeVersion.getMinor(), nodeVersion.getPatch() + 1); JoinRequest joinRequest = new JoinRequest(Packet.VERSION, buildNumber, nextPatchVersion, joinAddress, newUnsecureUUID(), false, null, null, null, null, null); nodeExtension.validateJoinRequest(joinRequest); }
@ApiOperation(value = "List jobs", tags = { "Jobs" }, nickname = "listJobs") @ApiImplicitParams({ @ApiImplicitParam(name = "id", dataType = "string", value = "Only return job with the given id", paramType = "query"), @ApiImplicitParam(name = "processInstanceId", dataType = "string", value = "Only return jobs part of a process with the given id", paramType = "query"), @ApiImplicitParam(name = "withoutProcessInstanceId", dataType = "boolean", value = "If true, only returns jobs without a process instance id set. If false, the withoutProcessInstanceId parameter is ignored.", paramType = "query"), @ApiImplicitParam(name = "executionId", dataType = "string", value = "Only return jobs part of an execution with the given id", paramType = "query"), @ApiImplicitParam(name = "processDefinitionId", dataType = "string", value = "Only return jobs with the given process definition id", paramType = "query"), @ApiImplicitParam(name = "elementId", dataType = "string", value = "Only return jobs with the given element id", paramType = "query"), @ApiImplicitParam(name = "elementName", dataType = "string", value = "Only return jobs with the given element name", paramType = "query"), @ApiImplicitParam(name = "handlerType", dataType = "string", value = "Only return jobs with the given handler type", paramType = "query"), @ApiImplicitParam(name = "handlerTypes", dataType = "string", value = "Only return jobs which have one of the given job handler type", paramType = "query"), @ApiImplicitParam(name = "timersOnly", dataType = "boolean", value = "If true, only return jobs which are timers. If false, this parameter is ignored. Cannot be used together with 'messagesOnly'.", paramType = "query"), @ApiImplicitParam(name = "messagesOnly", dataType = "boolean", value = "If true, only return jobs which are messages. If false, this parameter is ignored. Cannot be used together with 'timersOnly'", paramType = "query"), @ApiImplicitParam(name = "withException", dataType = "boolean", value = "If true, only return jobs for which an exception occurred while executing it. If false, this parameter is ignored.", paramType = "query"), @ApiImplicitParam(name = "dueBefore", dataType = "string", format="date-time", value = "Only return jobs which are due to be executed before the given date. Jobs without duedate are never returned using this parameter.", paramType = "query"), @ApiImplicitParam(name = "dueAfter", dataType = "string", format="date-time", value = "Only return jobs which are due to be executed after the given date. Jobs without duedate are never returned using this parameter.", paramType = "query"), @ApiImplicitParam(name = "exceptionMessage", dataType = "string", value = "Only return jobs with the given exception message", paramType = "query"), @ApiImplicitParam(name = "tenantId", dataType = "string", value = "Only return jobs with the given tenantId.", paramType = "query"), @ApiImplicitParam(name = "tenantIdLike", dataType = "string", value = "Only return jobs with a tenantId like the given value.", paramType = "query"), @ApiImplicitParam(name = "withoutTenantId", dataType = "boolean", value = "If true, only returns jobs without a tenantId set. If false, the withoutTenantId parameter is ignored.", paramType = "query"), @ApiImplicitParam(name = "locked", dataType = "boolean", value = "If true, only return jobs which are locked. If false, this parameter is ignored.", paramType = "query"), @ApiImplicitParam(name = "unlocked", dataType = "boolean", value = "If true, only return jobs which are unlocked. If false, this parameter is ignored.", paramType = "query"), @ApiImplicitParam(name = "withoutScopeId", dataType = "boolean", value = "If true, only returns jobs without a scope id set. If false, the withoutScopeId parameter is ignored.", paramType = "query"), @ApiImplicitParam(name = "withoutScopeType", dataType = "boolean", value = "If true, only returns jobs without a scope type set. If false, the withoutScopeType parameter is ignored.", paramType = "query"), @ApiImplicitParam(name = "sort", dataType = "string", value = "Property to sort on, to be used together with the order.", allowableValues = "id,dueDate,executionId,processInstanceId,retries,tenantId", paramType = "query") }) @ApiResponses(value = { @ApiResponse(code = 200, message = "Indicates the requested jobs were returned."), @ApiResponse(code = 400, message = "Indicates an illegal value has been used in a url query parameter or the both 'messagesOnly' and 'timersOnly' are used as parameters. Status description contains additional details about the error.") }) @GetMapping(value = "/management/jobs", produces = "application/json") public DataResponse<JobResponse> getJobs(@ApiParam(hidden = true) @RequestParam Map<String, String> allRequestParams) { JobQuery query = managementService.createJobQuery(); if (allRequestParams.containsKey("id")) { query.jobId(allRequestParams.get("id")); } if (allRequestParams.containsKey("processInstanceId")) { query.processInstanceId(allRequestParams.get("processInstanceId")); } if (allRequestParams.containsKey("withoutProcessInstanceId") && Boolean.parseBoolean(allRequestParams.get("withoutProcessInstanceId"))) { query.withoutProcessInstanceId(); } if (allRequestParams.containsKey("executionId")) { query.executionId(allRequestParams.get("executionId")); } if (allRequestParams.containsKey("processDefinitionId")) { query.processDefinitionId(allRequestParams.get("processDefinitionId")); } if (allRequestParams.containsKey("elementId")) { query.elementId(allRequestParams.get("elementId")); } if (allRequestParams.containsKey("elementName")) { query.elementName(allRequestParams.get("elementName")); } if (allRequestParams.containsKey("handlerType")) { query.handlerType(allRequestParams.get("handlerType")); } if (allRequestParams.containsKey("handlerTypes")) { query.handlerTypes(Arrays.asList(allRequestParams.get("handlerTypes").split(","))); } if (allRequestParams.containsKey("timersOnly")) { if (allRequestParams.containsKey("messagesOnly")) { throw new FlowableIllegalArgumentException("Only one of 'timersOnly' or 'messagesOnly' can be provided."); } if (Boolean.parseBoolean(allRequestParams.get("timersOnly"))) { query.timers(); } } if (allRequestParams.containsKey("messagesOnly") && Boolean.parseBoolean(allRequestParams.get("messagesOnly"))) { query.messages(); } if (allRequestParams.containsKey("dueBefore")) { query.duedateLowerThan(RequestUtil.getDate(allRequestParams, "dueBefore")); } if (allRequestParams.containsKey("dueAfter")) { query.duedateHigherThan(RequestUtil.getDate(allRequestParams, "dueAfter")); } if (allRequestParams.containsKey("withException") && Boolean.parseBoolean(allRequestParams.get("withException"))) { query.withException(); } if (allRequestParams.containsKey("exceptionMessage")) { query.exceptionMessage(allRequestParams.get("exceptionMessage")); } if (allRequestParams.containsKey("tenantId")) { query.jobTenantId(allRequestParams.get("tenantId")); } if (allRequestParams.containsKey("tenantIdLike")) { query.jobTenantIdLike(allRequestParams.get("tenantIdLike")); } if (allRequestParams.containsKey("withoutTenantId") && Boolean.parseBoolean(allRequestParams.get("withoutTenantId"))) { query.jobWithoutTenantId(); } if (allRequestParams.containsKey("locked") && Boolean.parseBoolean(allRequestParams.get("locked"))) { query.locked(); } if (allRequestParams.containsKey("unlocked") && Boolean.parseBoolean(allRequestParams.get("unlocked"))) { query.unlocked(); } if (allRequestParams.containsKey("withoutScopeType") && Boolean.parseBoolean(allRequestParams.get("withoutScopeType"))) { query.withoutScopeType(); } if (allRequestParams.containsKey("withoutScopeId") && Boolean.parseBoolean(allRequestParams.get("withoutScopeId"))) { query.withoutScopeId(); } if (restApiInterceptor != null) { restApiInterceptor.accessJobInfoWithQuery(query); } return paginateList(allRequestParams, query, "id", JobQueryProperties.PROPERTIES, restResponseFactory::createJobResponseList); }
@Test @Deployment(resources = { "org/flowable/rest/service/api/management/JobCollectionResourceTest.testTimerProcess.bpmn20.xml" }) public void testGetJobs() throws Exception { Calendar hourAgo = Calendar.getInstance(); hourAgo.add(Calendar.HOUR, -1); Calendar inAnHour = Calendar.getInstance(); inAnHour.add(Calendar.HOUR, 1); // Start process, forcing error on job-execution ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("timerProcess", Collections.singletonMap("error", (Object) Boolean.TRUE)); Job timerJob = managementService.createTimerJobQuery().processInstanceId(processInstance.getId()).timers().singleResult(); assertThat(timerJob).isNotNull(); CloseableHttpResponse response = executeRequest( new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?id=" + timerJob.getId()), HttpStatus.SC_OK); JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent()); closeResponse(response); JsonNode timerJobNode = responseNode.get("data").get(0); assertThat(timerJobNode.get("id").asText()).isEqualTo(timerJob.getId()); assertThat(timerJobNode.get("url").asText()).contains("management/timer-jobs/" + timerJob.getId()); String url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION); assertResultsPresentInDataResponse(url, timerJob.getId()); // Fetch using id url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?id=" + timerJob.getId(); assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?id=" + timerJob.getId() + "xyzzy"; assertResultsPresentInDataResponse(url); // Fetch using processInstanceId url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?processInstanceId=" + processInstance.getId(); assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?processInstanceId=" + processInstance.getId() + "xyzzy"; assertResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?withoutProcessInstanceId=true"; assertResultsPresentInDataResponse(url); // Fetch using executionId url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?executionId=" + timerJob.getExecutionId(); assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?executionId=" + timerJob.getExecutionId() + "xyzzy"; assertResultsPresentInDataResponse(url); // Fetch using processDefinitionId url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?processDefinitionId=" + timerJob.getProcessDefinitionId(); assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?processDefinitionId=" + timerJob.getProcessDefinitionId() + "xyzzy"; assertResultsPresentInDataResponse(url); // Fetch using dueBefore url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?dueBefore=" + getISODateString(inAnHour.getTime()); assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?dueBefore=" + getISODateString(hourAgo.getTime()); assertResultsPresentInDataResponse(url); // Fetch using dueAfter url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?dueAfter=" + getISODateString(hourAgo.getTime()); assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?dueAfter=" + getISODateString(inAnHour.getTime()); assertResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?elementId=escalationTimer"; assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?elementId=unknown"; assertEmptyResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?elementName=Escalation"; assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?elementName=unknown"; assertEmptyResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?executable"; assertEmptyResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?tenantId=xyzzy"; assertResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?tenantIdLike=xyzzy"; assertResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?withoutTenantId=true"; assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?timersOnly=true"; assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?messagesOnly=true"; assertResultsPresentInDataResponse(url); // Combining messagesOnly with timersOnly should result in exception closeResponse(executeRequest(new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?timersOnly=true&messagesOnly=true"), HttpStatus.SC_BAD_REQUEST)); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?withException=true"; assertResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?exceptionMessage="; assertResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?exceptionMessage=FlowableException"; assertResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?withoutScopeId=true"; assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?handlerType=trigger-timer"; assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?handlerType=unknown-type"; assertResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?handlerTypes=unknown-type,trigger-timer"; assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB_COLLECTION) + "?handlerTypes=unknown-type"; assertResultsPresentInDataResponse(url); Job timerJob2 = managementService.createTimerJobQuery().processInstanceId(processInstance.getId()).timers().singleResult(); for (int i = 0; i < timerJob2.getRetries(); i++) { // Force execution of job until retries are exhausted assertThatThrownBy(() -> { managementService.moveTimerToExecutableJob(timerJob2.getId()); managementService.executeJob(timerJob2.getId()); }) .isExactlyInstanceOf(FlowableException.class); } timerJob = managementService.createDeadLetterJobQuery().processInstanceId(processInstance.getId()).timers().singleResult(); assertThat(timerJob.getRetries()).isZero(); // Fetch the async-job (which has retries left) Job asyncJob = managementService.createJobQuery().processInstanceId(processInstance.getId()).singleResult(); // Test fetching all jobs url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION); assertResultsPresentInDataResponse(url, asyncJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION); response = executeRequest( new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION)), HttpStatus.SC_OK); responseNode = objectMapper.readTree(response.getEntity().getContent()); closeResponse(response); JsonNode deadletterJobNode = responseNode.get("data").get(0); assertThat(deadletterJobNode.get("id").asText()).isEqualTo(timerJob.getId()); assertThat(deadletterJobNode.get("url").asText()).contains("management/deadletter-jobs/" + timerJob.getId()); assertResultsPresentInDataResponse(url, timerJob.getId()); // Fetch using job-id url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?id=" + asyncJob.getId(); assertResultsPresentInDataResponse(url, asyncJob.getId()); // Fetch using processInstanceId url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?processInstanceId=" + processInstance.getId(); assertResultsPresentInDataResponse(url, asyncJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?processInstanceId=unexisting"; assertResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?withoutProcessInstanceId=true"; assertResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?withoutProcessInstanceId=true"; assertResultsPresentInDataResponse(url); // Fetch using executionId url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?executionId=" + asyncJob.getExecutionId(); assertResultsPresentInDataResponse(url, asyncJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?executionId=" + timerJob.getExecutionId(); assertResultsPresentInDataResponse(url, timerJob.getId()); // Fetch using processDefinitionId url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?processDefinitionId=" + processInstance.getProcessDefinitionId(); assertResultsPresentInDataResponse(url, asyncJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?processDefinitionId=" + processInstance.getProcessDefinitionId(); assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?processDefinitionId=unexisting"; assertResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?processDefinitionId=unexisting"; assertResultsPresentInDataResponse(url); // Fetch using element id and name url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?elementId=escalationTimer"; assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?elementId=unexisting"; assertResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?elementName=Escalation"; assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?elementName=unexisting"; assertResultsPresentInDataResponse(url); // Fetch using withRetriesLeft url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?withRetriesLeft=true"; assertResultsPresentInDataResponse(url, asyncJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?withoutScopeId=true"; assertResultsPresentInDataResponse(url, asyncJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?withoutScopeId=true"; assertResultsPresentInDataResponse(url, timerJob.getId()); // Fetch using executable // url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) // + "?executable=true"; // assertResultsPresentInDataResponse(url, asyncJob.getId()); // Fetch using timers only url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?timersOnly=true"; assertResultsPresentInDataResponse(url, timerJob.getId()); // Combining messagesOnly with timersOnly should result in exception closeResponse(executeRequest(new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?timersOnly=true&messagesOnly=true"), HttpStatus.SC_BAD_REQUEST)); // Fetch using withException url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?withException=true"; assertResultsPresentInDataResponse(url, timerJob.getId()); // Fetch with exceptionMessage url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?exceptionMessage=" + encode(timerJob.getExceptionMessage()); assertResultsPresentInDataResponse(url, timerJob.getId()); // Fetch with empty exceptionMessage url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?exceptionMessage="; assertResultsPresentInDataResponse(url); // Without tenant id, before tenant update url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?withoutTenantId=true"; assertResultsPresentInDataResponse(url, asyncJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?withoutTenantId=true"; assertResultsPresentInDataResponse(url, timerJob.getId()); // Set tenant on deployment managementService.executeCommand(new ChangeDeploymentTenantIdCmd(deploymentId, "myTenant")); // Without tenant id, after tenant update url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?withoutTenantId=true"; assertResultsPresentInDataResponse(url); // Tenant id url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?tenantId=myTenant"; assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?tenantId=anotherTenant"; assertResultsPresentInDataResponse(url); // Tenant id like url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?tenantIdLike=" + encode("%enant"); assertResultsPresentInDataResponse(url, asyncJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?tenantIdLike=" + encode("%enant"); assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?tenantIdLike=anotherTenant"; assertResultsPresentInDataResponse(url); // Handler type(s) url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?handlerType=async-continuation"; assertResultsPresentInDataResponse(url, asyncJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?handlerType=unknown-type"; assertResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?handlerType=trigger-timer"; assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?handlerType=unknown-type"; assertResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?handlerTypes=unknown-type,async-continuation"; assertResultsPresentInDataResponse(url, asyncJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_JOB_COLLECTION) + "?handlerTypes=unknown-type"; assertResultsPresentInDataResponse(url); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?handlerTypes=unknown-type,trigger-timer"; assertResultsPresentInDataResponse(url, timerJob.getId()); url = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB_COLLECTION) + "?handlerTypes=unknown-type"; assertResultsPresentInDataResponse(url); }
@Override public boolean alterOffsets(Map<String, String> connectorConfig, Map<Map<String, ?>, Map<String, ?>> offsets) { for (Map.Entry<Map<String, ?>, Map<String, ?>> offsetEntry : offsets.entrySet()) { Map<String, ?> sourceOffset = offsetEntry.getValue(); if (sourceOffset == null) { // We allow tombstones for anything; if there's garbage in the offsets for the connector, we don't // want to prevent users from being able to clean it up using the REST API continue; } Map<String, ?> sourcePartition = offsetEntry.getKey(); if (sourcePartition == null) { throw new ConnectException("Source partitions may not be null"); } MirrorUtils.validateSourcePartitionString(sourcePartition, SOURCE_CLUSTER_KEY); MirrorUtils.validateSourcePartitionString(sourcePartition, TOPIC_KEY); MirrorUtils.validateSourcePartitionPartition(sourcePartition); MirrorUtils.validateSourceOffset(sourcePartition, sourceOffset, false); } // We never commit offsets with our source consumer, so no additional effort is required beyond just validating // the format of the user-supplied offsets return true; }
@Test public void testAlterOffsetsOffsetValues() { MirrorSourceConnector connector = new MirrorSourceConnector(); Function<Object, Boolean> alterOffsets = offset -> connector.alterOffsets(null, Collections.singletonMap( sourcePartition("t", 5, "backup"), Collections.singletonMap(MirrorUtils.OFFSET_KEY, offset) )); assertThrows(ConnectException.class, () -> alterOffsets.apply("nan")); assertThrows(ConnectException.class, () -> alterOffsets.apply(null)); assertThrows(ConnectException.class, () -> alterOffsets.apply(new Object())); assertThrows(ConnectException.class, () -> alterOffsets.apply(3.14)); assertThrows(ConnectException.class, () -> alterOffsets.apply(-420)); assertThrows(ConnectException.class, () -> alterOffsets.apply("-420")); assertThrows(ConnectException.class, () -> alterOffsets.apply("10")); assertTrue(() -> alterOffsets.apply(0)); assertTrue(() -> alterOffsets.apply(10)); assertTrue(() -> alterOffsets.apply(((long) Integer.MAX_VALUE) + 1)); }
@Override public ScheduledFuture<?> schedule(Runnable command, long delay, TimeUnit unit) { return internalExecutor.schedule(command, delay, unit); }
@Test public void testSchedule() throws Exception { TestRunnable runnable = new TestRunnable(); ScheduledFuture<?> future = executionService.schedule(runnable, 0, SECONDS); Object result = future.get(); assertTrue(runnable.isExecuted()); assertNull(result); }
@Override public MetadataRequest.Builder buildRequest(Set<TopicPartition> partitions) { MetadataRequestData request = new MetadataRequestData(); request.setAllowAutoTopicCreation(false); partitions.stream().map(TopicPartition::topic).distinct().forEach(topic -> request.topics().add(new MetadataRequestData.MetadataRequestTopic().setName(topic)) ); return new MetadataRequest.Builder(request); }
@Test public void testBuildLookupRequest() { Set<TopicPartition> topicPartitions = mkSet( new TopicPartition("foo", 0), new TopicPartition("bar", 0), new TopicPartition("foo", 1), new TopicPartition("baz", 0) ); PartitionLeaderStrategy strategy = newStrategy(); MetadataRequest allRequest = strategy.buildRequest(topicPartitions).build(); assertEquals(mkSet("foo", "bar", "baz"), new HashSet<>(allRequest.topics())); assertFalse(allRequest.allowAutoTopicCreation()); MetadataRequest partialRequest = strategy.buildRequest( topicPartitions.stream().filter(tp -> tp.topic().equals("foo")).collect(Collectors.toSet()) ).build(); assertEquals(mkSet("foo"), new HashSet<>(partialRequest.topics())); assertFalse(partialRequest.allowAutoTopicCreation()); }
public static <T> Global<T> globally() { return new Global<>(); }
@Test @Category(NeedsRunner.class) public void testAggregateLogicalValuesGlobally() { Collection<BasicEnum> elements = Lists.newArrayList( BasicEnum.of("a", BasicEnum.TestEnum.ONE), BasicEnum.of("a", BasicEnum.TestEnum.TWO)); CombineFn<EnumerationType.Value, ?, Iterable<EnumerationType.Value>> sampleAnyCombineFn = Sample.anyCombineFn(100); Field aggField = Field.of("sampleList", FieldType.array(FieldType.logicalType(BASIC_ENUM_ENUMERATION))); pipeline .apply(Create.of(elements)) .apply( Group.<BasicEnum>globally().aggregateField("enumeration", sampleAnyCombineFn, aggField)) .apply( ParDo.of( new DoFn<Row, List<Integer>>() { @ProcessElement // TODO: List<enum> doesn't get converted properly by ConvertHelpers, so the // following line does // not work. TO fix this we need to move logical-type conversion out of // RowWithGetters and into // the actual getters. // public void process(@FieldAccess("sampleList") List<BasicEnum.Test> values) // { public void process(@Element Row value) { assertThat( value.getArray(0), containsInAnyOrder( BASIC_ENUM_ENUMERATION.valueOf(1), BASIC_ENUM_ENUMERATION.valueOf(2))); } })); pipeline.run(); }
@CanIgnoreReturnValue public Caffeine<K, V> weakValues() { requireState(valueStrength == null, "Value strength was already set to %s", valueStrength); valueStrength = Strength.WEAK; return this; }
@Test public void weakValues() { var cache = Caffeine.newBuilder().weakValues().build(); assertThat(cache).isNotNull(); }
public <T> TypeAdapter<T> getAdapter(TypeToken<T> type) { Objects.requireNonNull(type, "type must not be null"); TypeAdapter<?> cached = typeTokenCache.get(type); if (cached != null) { @SuppressWarnings("unchecked") TypeAdapter<T> adapter = (TypeAdapter<T>) cached; return adapter; } Map<TypeToken<?>, TypeAdapter<?>> threadCalls = threadLocalAdapterResults.get(); boolean isInitialAdapterRequest = false; if (threadCalls == null) { threadCalls = new HashMap<>(); threadLocalAdapterResults.set(threadCalls); isInitialAdapterRequest = true; } else { // the key and value type parameters always agree @SuppressWarnings("unchecked") TypeAdapter<T> ongoingCall = (TypeAdapter<T>) threadCalls.get(type); if (ongoingCall != null) { return ongoingCall; } } TypeAdapter<T> candidate = null; try { FutureTypeAdapter<T> call = new FutureTypeAdapter<>(); threadCalls.put(type, call); for (TypeAdapterFactory factory : factories) { candidate = factory.create(this, type); if (candidate != null) { call.setDelegate(candidate); // Replace future adapter with actual adapter threadCalls.put(type, candidate); break; } } } finally { if (isInitialAdapterRequest) { threadLocalAdapterResults.remove(); } } if (candidate == null) { throw new IllegalArgumentException( "GSON (" + GsonBuildConfig.VERSION + ") cannot handle " + type); } if (isInitialAdapterRequest) { /* * Publish resolved adapters to all threads * Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA * would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA * See https://github.com/google/gson/issues/625 */ typeTokenCache.putAll(threadCalls); } return candidate; }
@Test public void testGetAdapter_Null() { Gson gson = new Gson(); NullPointerException e = assertThrows(NullPointerException.class, () -> gson.getAdapter((TypeToken<?>) null)); assertThat(e).hasMessageThat().isEqualTo("type must not be null"); }
public <T extends MongoEntity> MongoCollection<T> collection(String collectionName, Class<T> valueType) { return getCollection(collectionName, valueType); }
@Test void testBasicTypes() { final MongoCollection<Person> collection = collections.collection("people", Person.class); final Person person = new Person( "000000000000000000000001", "000000000000000000000002", new ObjectId("000000000000000000000003"), "Gary", ZonedDateTime.now(ZoneOffset.UTC).withNano(0), DateTime.now(DateTimeZone.UTC).withMillisOfSecond(0)); final InsertOneResult insertOneResult = collection.insertOne(person); assertThat(insertOneResult.getInsertedId()).isNotNull().satisfies(bson -> assertThat(bson.asObjectId().getValue().toHexString()).isEqualTo(person.id())); assertThat(collection.find()).hasSize(1).allMatch(person::equals); assertThat(collection.find(Filters.eq("_id", new ObjectId(person.id())))).hasSize(1); assertThat(collection.find(Filters.type("_id", BsonType.OBJECT_ID))).hasSize(1); assertThat(collection.find(Filters.type("external_id", BsonType.OBJECT_ID))).hasSize(1); assertThat(collection.find(Filters.eq("external_id", new ObjectId(person.externalId())))).hasSize(1); assertThat(collection.find(Filters.type("object_id", BsonType.OBJECT_ID))).hasSize(1); assertThat(collection.find(Filters.eq("object_id", person.objectId()))).hasSize(1); assertThat(collection.find(Filters.type("first_name", BsonType.STRING))).hasSize(1); assertThat(collection.find(Filters.eq("first_name", person.firstName()))).hasSize(1); assertThat(collection.find(Filters.type("created_at", BsonType.DATE_TIME))).hasSize(1); assertThat(collection.find(Filters.eq("created_at", person.createdAt()))).hasSize(1); assertThat(collection.find(Filters.type("last_modified_at", BsonType.DATE_TIME))).hasSize(1); assertThat(collection.find(Filters.eq("last_modified_at", person.lastModifiedAt()))).hasSize(1); }
@Override public Object convertToPropertyType(Class<?> entityType, String[] propertyPath, String value) { IndexValueFieldDescriptor fieldDescriptor = getValueFieldDescriptor(entityType, propertyPath); if (fieldDescriptor == null) { return super.convertToPropertyType(entityType, propertyPath, value); } Class<?> type = fieldDescriptor.type().dslArgumentClass(); if (Date.class != type) { return super.convertToPropertyType(entityType, propertyPath, value); } try { return DateTools.stringToDate(value); } catch (ParseException e) { throw new ParsingException(e); } }
@Test public void testConvertLongProperty() { assertThat(convertToPropertyType(TestEntity.class, "l", "42")).isEqualTo(42L); }
@Override public long arraySize(String path) { return get(arraySizeAsync(path)); }
@Test public void testArraySize() { RJsonBucket<TestType> al = redisson.getJsonBucket("test", new JacksonCodec<>(TestType.class)); TestType t = new TestType(); NestedType nt = new NestedType(); nt.setValues(Arrays.asList("t1", "t2", "t4")); t.setType(nt); al.set(t); long s = al.arraySize("type.values"); assertThat(s).isEqualTo(3); List<Long> s1 = al.arraySizeMulti("$.type.values"); assertThat(s1).containsExactly(3L); }
public static PDImageXObject createFromFile(String imagePath, PDDocument doc) throws IOException { return createFromFileByExtension(new File(imagePath), doc); }
@Test void testCreateFromFile() throws IOException, URISyntaxException { testCompareCreatedFileWithCreatedByCCITTFactory("ccittg4.tif"); testCompareCreatedFileWithCreatedByJPEGFactory("jpeg.jpg"); testCompareCreatedFileWithCreatedByJPEGFactory("jpegcmyk.jpg"); testCompareCreatedFileWithCreatedByLosslessFactory("gif.gif"); testCompareCreatedFileWithCreatedByLosslessFactory("gif-1bit-transparent.gif"); testCompareCreatedFileWithCreatedByLosslessFactory("png_indexed_8bit_alpha.png"); testCompareCreatedFileWithCreatedByLosslessFactory("png.png"); testCompareCreatedFileWithCreatedByLosslessFactory("lzw.tif"); }
public boolean isSuccess() { return isSuccess; }
@Test public void testIsSuccess() { result.setSuccess(true); assertTrue(result.isSuccess()); }
public static <T> RedistributeArbitrarily<T> arbitrarily() { return new RedistributeArbitrarily<>(null, false); }
@Test @Category(ValidatesRunner.class) public void testRedistributeAfterSessionsAndGroupByKey() { PCollection<KV<String, Iterable<Integer>>> input = pipeline .apply( Create.of(GBK_TESTABLE_KVS) .withCoder(KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of()))) .apply(Window.into(Sessions.withGapDuration(Duration.standardMinutes(10)))) .apply(GroupByKey.create()); PCollection<KV<String, Iterable<Integer>>> output = input.apply(Redistribute.arbitrarily()); PAssert.that(output).satisfies(new AssertThatHasExpectedContents()); assertEquals(input.getWindowingStrategy(), output.getWindowingStrategy()); pipeline.run(); }
@Override @Nullable public User load(final String username) { LOG.debug("Loading user {}", username); // special case for the locally defined user, we don't store that in MongoDB. if (!configuration.isRootUserDisabled() && configuration.getRootUsername().equals(username)) { LOG.debug("User {} is the built-in admin user", username); return userFactory.createLocalAdminUser(roleService.getAdminRoleObjectId()); } final DBObject query = new BasicDBObject(); query.put(UserImpl.USERNAME, username); final List<DBObject> result = query(UserImpl.class, query); if (result == null || result.isEmpty()) { return null; } if (result.size() > 1) { final String msg = "There was more than one matching user for username " + username + ". This should never happen."; LOG.error(msg); throw new DuplicateUserException(msg); } final DBObject userObject = result.get(0); final Object userId = userObject.get("_id"); LOG.debug("Loaded user {}/{} from MongoDB", username, userId); return userFactory.create((ObjectId) userId, userObject.toMap()); }
@Test(expected = RuntimeException.class) @MongoDBFixtures("UserServiceImplTest.json") public void testLoadDuplicateUser() throws Exception { userService.load("user-duplicate"); }
public int doWork() { final long nowNs = nanoClock.nanoTime(); trackTime(nowNs); int workCount = 0; workCount += processTimers(nowNs); if (!asyncClientCommandInFlight) { workCount += clientCommandAdapter.receive(); } workCount += drainCommandQueue(); workCount += trackStreamPositions(workCount, nowNs); workCount += nameResolver.doWork(cachedEpochClock.time()); workCount += freeEndOfLifeResources(ctx.resourceFreeLimit()); return workCount; }
@Test void shouldNotErrorWhenConflictingUnreliableSessionSpecificSubscriptionAddedToDifferentSessionsVsWildcard() { final long id1 = driverProxy.addSubscription(CHANNEL_4000 + "|session-id=1024|reliable=false", STREAM_ID_1); driverConductor.doWork(); final long id2 = driverProxy.addSubscription(CHANNEL_4000 + "|reliable=true", STREAM_ID_1); driverConductor.doWork(); final long id3 = driverProxy.addSubscription(CHANNEL_4000 + "|session-id=1025|reliable=false", STREAM_ID_1); driverConductor.doWork(); verify(mockClientProxy).onSubscriptionReady(eq(id1), anyInt()); verify(mockClientProxy).onSubscriptionReady(eq(id2), anyInt()); verify(mockClientProxy).onSubscriptionReady(eq(id3), anyInt()); }
@Override public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); StepOption.checkBoolean( remarks, stepMeta, space, getString( PKG, "JmsDialog.Options.DISABLE_MESSAGE_ID" ), disableMessageId ); StepOption.checkBoolean( remarks, stepMeta, space, getString( PKG, "JmsDialog.Options.DISABLE_MESSAGE_TIMESTAMP" ), disableMessageTimestamp ); StepOption.checkInteger( remarks, stepMeta, space, getString( PKG, "JmsDialog.Options.DELIVERY_MODE" ), deliveryMode ); StepOption.checkInteger( remarks, stepMeta, space, getString( PKG, "JmsDialog.Options.PRIORITY" ), prio ); StepOption.checkLong( remarks, stepMeta, space, getString( PKG, "JmsDialog.Options.TIME_TO_LIVE" ), timeToLive ); StepOption.checkLong( remarks, stepMeta, space, getString( PKG, "JmsDialog.Options.DELIVERY_DELAY" ), deliveryDelay ); }
@Test public void testCheck() { List<CheckResultInterface> remarks = new ArrayList<>(); JmsProducerMeta jmsProducerMeta = new JmsProducerMeta(); jmsProducerMeta.setDisableMessageId( "asdf" ); jmsProducerMeta.setDisableMessageTimestamp( "asdf" ); jmsProducerMeta.setDeliveryMode( "asdf" ); jmsProducerMeta.setPriority( "asdf" ); jmsProducerMeta.setTimeToLive( "asdf" ); jmsProducerMeta.setDeliveryDelay( "asdf" ); jmsProducerMeta.check( remarks, null, null, null, null, null, null, new Variables(), null, null ); assertEquals( 6, remarks.size() ); assertTrue( remarks.get( 0 ).getText() .contains( BaseMessages.getString( JmsProducerMeta.class, "JmsDialog.Options.DISABLE_MESSAGE_ID" ) ) ); assertTrue( remarks.get( 1 ).getText() .contains( BaseMessages.getString( JmsProducerMeta.class, "JmsDialog.Options.DISABLE_MESSAGE_TIMESTAMP" ) ) ); assertTrue( remarks.get( 2 ).getText() .contains( BaseMessages.getString( JmsProducerMeta.class, "JmsDialog.Options.DELIVERY_MODE" ) ) ); assertTrue( remarks.get( 3 ).getText() .contains( BaseMessages.getString( JmsProducerMeta.class, "JmsDialog.Options.PRIORITY" ) ) ); assertTrue( remarks.get( 4 ).getText() .contains( BaseMessages.getString( JmsProducerMeta.class, "JmsDialog.Options.TIME_TO_LIVE" ) ) ); assertTrue( remarks.get( 5 ).getText() .contains( BaseMessages.getString( JmsProducerMeta.class, "JmsDialog.Options.DELIVERY_DELAY" ) ) ); remarks = new ArrayList<>(); jmsProducerMeta.setDisableMessageId( "true" ); jmsProducerMeta.setDisableMessageTimestamp( "false" ); jmsProducerMeta.setDeliveryMode( "1" ); jmsProducerMeta.setPriority( "2" ); jmsProducerMeta.setTimeToLive( "3" ); jmsProducerMeta.setDeliveryDelay( "4" ); jmsProducerMeta.check( remarks, null, null, null, null, null, null, new Variables(), null, null ); assertEquals( 0, remarks.size() ); }
static @Nullable <V> V getWhenSuccessful(@Nullable CompletableFuture<V> future) { try { return (future == null) ? null : future.join(); } catch (CancellationException | CompletionException e) { return null; } }
@Test public void getWhenSuccessful_success_async() { var future = new CompletableFuture<Integer>(); var result = new AtomicInteger(); ConcurrentTestHarness.execute(() -> { result.set(1); result.set(Async.getWhenSuccessful(future)); }); await().untilAtomic(result, is(1)); future.complete(2); await().untilAtomic(result, is(2)); }
public Collection<ServerPluginInfo> loadPlugins() { Map<String, ServerPluginInfo> bundledPluginsByKey = new LinkedHashMap<>(); for (ServerPluginInfo bundled : getBundledPluginsMetadata()) { failIfContains(bundledPluginsByKey, bundled, plugin -> MessageException.of(format("Found two versions of the plugin %s [%s] in the directory %s. Please remove one of %s or %s.", bundled.getName(), bundled.getKey(), getRelativeDir(fs.getInstalledBundledPluginsDir()), bundled.getNonNullJarFile().getName(), plugin.getNonNullJarFile().getName()))); bundledPluginsByKey.put(bundled.getKey(), bundled); } Map<String, ServerPluginInfo> externalPluginsByKey = new LinkedHashMap<>(); for (ServerPluginInfo external : getExternalPluginsMetadata()) { failIfContains(bundledPluginsByKey, external, plugin -> MessageException.of(format("Found a plugin '%s' in the directory '%s' with the same key [%s] as a built-in feature '%s'. Please remove '%s'.", external.getName(), getRelativeDir(fs.getInstalledExternalPluginsDir()), external.getKey(), plugin.getName(), new File(getRelativeDir(fs.getInstalledExternalPluginsDir()), external.getNonNullJarFile().getName())))); failIfContains(externalPluginsByKey, external, plugin -> MessageException.of(format("Found two versions of the plugin '%s' [%s] in the directory '%s'. Please remove %s or %s.", external.getName(), external.getKey(), getRelativeDir(fs.getInstalledExternalPluginsDir()), external.getNonNullJarFile().getName(), plugin.getNonNullJarFile().getName()))); externalPluginsByKey.put(external.getKey(), external); } for (PluginInfo downloaded : getDownloadedPluginsMetadata()) { failIfContains(bundledPluginsByKey, downloaded, plugin -> MessageException.of(format("Fail to update plugin: %s. Built-in feature with same key already exists: %s. Move or delete plugin from %s directory", plugin.getName(), plugin.getKey(), getRelativeDir(fs.getDownloadedPluginsDir())))); ServerPluginInfo installedPlugin; if (externalPluginsByKey.containsKey(downloaded.getKey())) { deleteQuietly(externalPluginsByKey.get(downloaded.getKey()).getNonNullJarFile()); installedPlugin = moveDownloadedPluginToExtensions(downloaded); LOG.info("Plugin {} [{}] updated to version {}", installedPlugin.getName(), installedPlugin.getKey(), installedPlugin.getVersion()); } else { installedPlugin = moveDownloadedPluginToExtensions(downloaded); LOG.info("Plugin {} [{}] installed", installedPlugin.getName(), installedPlugin.getKey()); } externalPluginsByKey.put(downloaded.getKey(), installedPlugin); } Map<String, ServerPluginInfo> plugins = new HashMap<>(externalPluginsByKey.size() + bundledPluginsByKey.size()); plugins.putAll(externalPluginsByKey); plugins.putAll(bundledPluginsByKey); PluginRequirementsValidator.unloadIncompatiblePlugins(plugins); return plugins.values(); }
@Test public void fail_if_bundled_plugins_have_same_key() throws IOException { File jar1 = createJar(fs.getInstalledBundledPluginsDir(), "plugin1", "main", null); File jar2 = createJar(fs.getInstalledBundledPluginsDir(), "plugin1", "main", null); String dir = getDirName(fs.getInstalledBundledPluginsDir()); assertThatThrownBy(() -> underTest.loadPlugins()) .isInstanceOf(MessageException.class) .hasMessageContaining("Found two versions of the plugin plugin1 [plugin1] in the directory " + dir + ". Please remove one of ") .hasMessageContaining(jar2.getName()) .hasMessageContaining(jar1.getName()); }