focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static <T extends SearchablePlugin> List<T> search(Collection<T> searchablePlugins, String query) { return searchablePlugins.stream() .filter(plugin -> Text.matchesSearchTerms(SPLITTER.split(query.toLowerCase()), plugin.getKeywords())) .sorted(comparator(query)) .collect(Collectors.toList()); }
@Test public void searchReturnsMatchingPlugins() { List<SearchablePlugin> results = PluginSearch.search(plugins.values(), "sTATus"); assertThat(results, hasSize(2)); assertThat(results, containsInAnyOrder(plugins.get("Discord"), plugins.get("Status Bars"))); }
public static JobDataNodeEntry unmarshal(final String text) { List<String> segments = Splitter.on(":").splitToList(text); String logicTableName = segments.get(0); List<DataNode> dataNodes = new LinkedList<>(); for (String each : Splitter.on(",").omitEmptyStrings().splitToList(segments.get(1))) { dataNodes.add(DataNodeUtils.parseWithSchema(each)); } return new JobDataNodeEntry(logicTableName, dataNodes); }
@Test void assertUnmarshalWithSchema() { JobDataNodeEntry actual = JobDataNodeEntry.unmarshal("t_order:ds_0.public.t_order_0,ds_1.test.t_order_1"); assertThat(actual.getLogicTableName(), is("t_order")); assertNotNull(actual.getDataNodes()); assertThat(actual.getDataNodes().size(), is(2)); DataNode first = actual.getDataNodes().get(0); assertThat(first.getDataSourceName(), is("ds_0")); assertThat(first.getSchemaName(), is("public")); assertThat(first.getTableName(), is("t_order_0")); DataNode second = actual.getDataNodes().get(1); assertThat(second.getDataSourceName(), is("ds_1")); assertThat(second.getSchemaName(), is("test")); assertThat(second.getTableName(), is("t_order_1")); }
public static <S> S load(Class<S> service, ClassLoader loader) throws EnhancedServiceNotFoundException { return InnerEnhancedServiceLoader.getServiceLoader(service).load(loader, true); }
@Test public void testLoadByClassAndActivateName() { Hello englishHello = EnhancedServiceLoader.load(Hello.class, "EnglishHello"); assertThat(englishHello.say()).isEqualTo("hello!"); }
@Override public VectorCollectionConfig setName(String name) { validateName(name); this.name = name; return this; }
@Test public void setNameValidation_failed() { assertThatThrownBy(() -> new VectorCollectionConfig().setName("asd*^")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("The name of the vector collection should " + "only consist of letters, numbers, and the symbols \"-\", \"_\" or \"*\"."); }
public Analysis analyze(Statement statement) { return analyze(statement, false); }
@Test public void testNotNullInJoinClause() { analyze("SELECT * FROM (VALUES (1)) a (x) JOIN (VALUES (2)) b ON a.x IS NOT NULL"); }
public static String formatExpression(final Expression expression) { return formatExpression(expression, FormatOptions.of(s -> false)); }
@Test public void shouldFormatSimpleCaseExpressionWithDefaultValue() { final SimpleCaseExpression expression = new SimpleCaseExpression( new StringLiteral("operand"), Collections.singletonList( new WhenClause(new StringLiteral("foo"), new LongLiteral(1))), Optional.of(new LongLiteral(2))); assertThat(ExpressionFormatter.formatExpression(expression), equalTo("(CASE 'operand' WHEN 'foo' THEN 1 ELSE 2 END)")); }
public static Schema schemaFromJavaBeanClass( TypeDescriptor<?> typeDescriptor, FieldValueTypeSupplier fieldValueTypeSupplier) { return StaticSchemaInference.schemaFromClass(typeDescriptor, fieldValueTypeSupplier); }
@Test public void testNestedCollection() { Schema schema = JavaBeanUtils.schemaFromJavaBeanClass( new TypeDescriptor<NestedCollectionBean>() {}, GetterTypeSupplier.INSTANCE); SchemaTestUtils.assertSchemaEquivalent(NESTED_COLLECTION_BEAN_SCHEMA, schema); }
@Override public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { return !DatasourceConfiguration.isEmbeddedStorage(); }
@Test void testMatches() { MockedStatic<DatasourceConfiguration> mockedStatic = Mockito.mockStatic(DatasourceConfiguration.class); mockedStatic.when(DatasourceConfiguration::isEmbeddedStorage).thenReturn(true); assertFalse(conditionOnExternalStorage.matches(context, metadata)); mockedStatic.when(DatasourceConfiguration::isEmbeddedStorage).thenReturn(false); assertTrue(conditionOnExternalStorage.matches(context, metadata)); mockedStatic.close(); }
public BufferPool get() { WeakReference<BufferPool> ref = threadLocal.get(); if (ref == null) { BufferPool pool = bufferPoolFactory.create(serializationService); ref = new WeakReference<>(pool); strongReferences.put(Thread.currentThread(), pool); threadLocal.set(ref); return pool; } else { BufferPool pool = ref.get(); if (pool == null) { throw notActiveExceptionSupplier.get(); } return pool; } }
@Test public void get_whenDifferentThreadLocals_thenDifferentInstances() { BufferPoolThreadLocal bufferPoolThreadLocal2 = new BufferPoolThreadLocal( serializationService, new BufferPoolFactoryImpl(), null); BufferPool pool1 = bufferPoolThreadLocal.get(); BufferPool pool2 = bufferPoolThreadLocal2.get(); assertNotSame(pool1, pool2); }
public static String getNamenodeNameServiceId(Configuration conf) { return getNameServiceId(conf, DFS_NAMENODE_RPC_ADDRESS_KEY); }
@Test public void getNameNodeNameServiceId() { Configuration conf = setupAddress(DFS_NAMENODE_RPC_ADDRESS_KEY); assertEquals("nn1", DFSUtil.getNamenodeNameServiceId(conf)); }
public static RowCoder of(Schema schema) { return new RowCoder(schema); }
@Test public void testConsistentWithEqualsBytesField() throws Exception { Schema schema = Schema.of(Schema.Field.of("f1", FieldType.BYTES)); Row row1 = Row.withSchema(schema).addValue(new byte[] {1, 2, 3, 4}).build(); Row row2 = Row.withSchema(schema).addValue(new byte[] {1, 2, 3, 4}).build(); RowCoder coder = RowCoder.of(schema); Assume.assumeTrue(coder.consistentWithEquals()); CoderProperties.coderConsistentWithEquals(coder, row1, row2); }
private void watch() { try { AclFileWatchService aclFileWatchService = new AclFileWatchService(defaultAclDir, defaultAclFile, new AclFileWatchService.Listener() { @Override public void onFileChanged(String aclFileName) { load(aclFileName); } @Override public void onFileNumChanged(String path) { load(); } }); aclFileWatchService.start(); log.info("Succeed to start AclFileWatchService"); this.isWatchStart = true; } catch (Exception e) { log.error("Failed to start AclWatcherService", e); } }
@Test public void testWatch() throws IOException, IllegalAccessException, InterruptedException { String fileName = Joiner.on(File.separator).join(new String[]{System.getProperty("rocketmq.home.dir"), "conf", "acl", "plain_acl_test.yml"}); File transport = new File(fileName); transport.delete(); transport.createNewFile(); FileWriter writer = new FileWriter(transport); writer.write("accounts:\r\n"); writer.write("- accessKey: watchrocketmqx\r\n"); writer.write(" secretKey: 12345678\r\n"); writer.write(" whiteRemoteAddress: 127.0.0.1\r\n"); writer.write(" admin: true\r\n"); writer.flush(); writer.close(); Thread.sleep(1000); PlainPermissionManager plainPermissionManager = new PlainPermissionManager(); Assert.assertTrue(plainPermissionManager.isWatchStart()); Map<String, String> accessKeyTable = (Map<String, String>) FieldUtils.readDeclaredField(plainPermissionManager, "accessKeyTable", true); String aclFileName = accessKeyTable.get("watchrocketmqx"); { Map<String, Map<String, PlainAccessResource>> plainAccessResourceMap = (Map<String, Map<String, PlainAccessResource>>) FieldUtils.readDeclaredField(plainPermissionManager, "aclPlainAccessResourceMap", true); PlainAccessResource accessResource = plainAccessResourceMap.get(aclFileName).get("watchrocketmqx"); Assert.assertNotNull(accessResource); Assert.assertEquals(accessResource.getSecretKey(), "12345678"); Assert.assertTrue(accessResource.isAdmin()); } PlainAccessData updatedMap = AclUtils.getYamlDataObject(fileName, PlainAccessData.class); List<PlainAccessConfig> accounts = updatedMap.getAccounts(); accounts.get(0).setAccessKey("watchrocketmq1y"); accounts.get(0).setSecretKey("88888888"); accounts.get(0).setAdmin(false); // Update file and flush to yaml file AclUtils.writeDataObject(fileName, updatedMap); Thread.sleep(10000); { Map<String, Map<String, PlainAccessResource>> plainAccessResourceMap = (Map<String, Map<String, PlainAccessResource>>) FieldUtils.readDeclaredField(plainPermissionManager, "aclPlainAccessResourceMap", true); PlainAccessResource accessResource = plainAccessResourceMap.get(aclFileName).get("watchrocketmq1y"); Assert.assertNotNull(accessResource); Assert.assertEquals(accessResource.getSecretKey(), "88888888"); Assert.assertFalse(accessResource.isAdmin()); } transport.delete(); }
public static Builder newBuilder(double cpuCores, int taskHeapMemoryMB) { return new Builder(new CPUResource(cpuCores), MemorySize.ofMebiBytes(taskHeapMemoryMB)); }
@Test void testSerializable() throws Exception { ResourceSpec rs1 = ResourceSpec.newBuilder(1.0, 100) .setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1.1)) .build(); ResourceSpec rs2 = CommonTestUtils.createCopySerializable(rs1); assertThat(rs2).isEqualTo(rs1); }
public TopicPartition registeredChangelogPartitionFor(final String storeName) { final StateStoreMetadata storeMetadata = stores.get(storeName); if (storeMetadata == null) { throw new IllegalStateException("State store " + storeName + " for which the registered changelog partition should be" + " retrieved has not been registered" ); } if (storeMetadata.changelogPartition == null) { throw new IllegalStateException("Registered state store " + storeName + " does not have a registered changelog partition." + " This may happen if logging is disabled for the state store." ); } return storeMetadata.changelogPartition; }
@Test public void shouldThrowIfStateStoreIsNotRegistered() { final ProcessorStateManager stateMgr = getStateManager(Task.TaskType.ACTIVE); assertThrows(IllegalStateException.class, () -> stateMgr.registeredChangelogPartitionFor(persistentStoreName), "State store " + persistentStoreName + " for which the registered changelog partition should be" + " retrieved has not been registered" ); }
public void addDescription(String path, String description) throws IOException, InvalidTokenException { Map<String, String[]> tags = new LinkedHashMap<>(); tags.put("description", new String[] {description}); Map<String, Object> body = new LinkedHashMap<>(); body.put("tags", tags); String url; try { url = getUriBuilder() .setPath(API_PATH_PREFIX + "/mounts/primary/files/tags/add") .setParameter("path", path) .build() .toString(); } catch (URISyntaxException e) { throw new IllegalStateException("Could not produce url.", e); } Request.Builder requestBuilder = getRequestBuilder(url); requestBuilder.post( RequestBody.create( MediaType.parse("application/json"), objectMapper.writeValueAsString(body))); try (Response response = getResponse(requestBuilder)) { int code = response.code(); if ((code < 200 || code > 299) && code != 409) { throw new KoofrClientIOException(response); } } }
@Test public void testAddDescriptionError() { server.enqueue(new MockResponse().setResponseCode(500).setBody("Internal error")); Exception caughtExc = assertThrows( KoofrClientIOException.class, () -> client.addDescription("/path/to/folder", "Test description")); assertNotNull(caughtExc); assertEquals( "Got error code: 500 message: Server Error body: Internal error", caughtExc.getMessage()); assertEquals(1, server.getRequestCount()); }
public static String buildBasicAuthValue(String username, String password) { String credentials = join(":", username, password); return format("Basic %s", Util.encodeToBase64(credentials)); }
@Test public void testBuildBasicAuthValue() { String username = "testUser"; String password = "testPassword"; String expectedHeaderValue = "Basic dGVzdFVzZXI6dGVzdFBhc3N3b3Jk"; String actualHeaderValue = CruiseControlUtil.buildBasicAuthValue(username, password); assertEquals(expectedHeaderValue, actualHeaderValue); }
@Udf(description = "Returns the natural logarithm (base e) of an INT value.") public Double ln( @UdfParameter( value = "value", description = "the value get the natual logarithm of." ) final Integer value ) { return ln(value == null ? null : value.doubleValue()); }
@Test public void shouldHandleNegative() { assertThat(Double.isNaN(udf.ln(-1.0)), is(true)); }
public boolean shouldOverwriteHeaderWithName(String headerName) { notNull(headerName, "Header name"); return headersToOverwrite.get(headerName.toUpperCase()); }
@Test public void content_type_header_is_overwritable_by_default() { HeaderConfig headerConfig = new HeaderConfig(); assertThat(headerConfig.shouldOverwriteHeaderWithName("content-type"), is(true)); }
Database getDatabase(@NotNull String dbName) throws TException { return client.get_database(dbName); }
@Test public void getDatabase() throws TException { Database db = client.getDatabase(TEST_DATABASE); MatcherAssert.assertThat(db.getName(), Matchers.equalToIgnoringCase(TEST_DATABASE)); MatcherAssert.assertThat(db.getDescription(), Matchers.equalTo(TEST_DATABASE_DESCRIPTION)); MatcherAssert.assertThat(db.getParameters(), Matchers.equalTo(TEST_DATABASE_PARAMS)); MatcherAssert.assertThat(db.getLocationUri(), Matchers.containsString(TEST_DATABASE.toLowerCase())); }
public void publishArtifacts(List<ArtifactPlan> artifactPlans, EnvironmentVariableContext environmentVariableContext) { final File pluggableArtifactFolder = publishPluggableArtifacts(artifactPlans, environmentVariableContext); try { final List<ArtifactPlan> mergedPlans = artifactPlanFilter.getBuiltInMergedArtifactPlans(artifactPlans); if (isMetadataFolderEmpty(pluggableArtifactFolder)) { LOGGER.info("Pluggable metadata folder is empty."); } else if (pluggableArtifactFolder != null) { mergedPlans.add(0, new ArtifactPlan(ArtifactPlanType.file, format("%s%s*", pluggableArtifactFolder.getName(), File.separator), PLUGGABLE_ARTIFACT_METADATA_FOLDER)); } for (ArtifactPlan artifactPlan : mergedPlans) { try { artifactPlan.publishBuiltInArtifacts(goPublisher, workingDirectory); } catch (Exception e) { failedArtifact.add(artifactPlan); } } if (!failedArtifact.isEmpty()) { StringBuilder builder = new StringBuilder(); for (ArtifactPlan artifactPlan : failedArtifact) { artifactPlan.printArtifactInfo(builder); } throw new RuntimeException(format("[%s] Uploading finished. Failed to upload %s.", PRODUCT_NAME, builder)); } } finally { FileUtils.deleteQuietly(pluggableArtifactFolder); } }
@Test public void shouldContinueWithOtherPluginWhenPublishArtifactCallFailsForOnePlugin() { final ArtifactStore s3ArtifactStore = new ArtifactStore("s3", "cd.go.s3", create("access_key", false, "some-key")); final ArtifactStore dockerArtifactStore = new ArtifactStore("docker", "cd.go.docker", create("registry-url", false, "docker.io")); final ArtifactStores artifactStores = new ArtifactStores(s3ArtifactStore, dockerArtifactStore); final ArtifactPlan s3ArtifactPlan = new ArtifactPlan(new PluggableArtifactConfig("installers", "s3", create("Baz", true, "Car"))); final ArtifactPlan dockerArtifactPlan = new ArtifactPlan(new PluggableArtifactConfig("test-reports", "docker", create("junit", false, "junit.xml"))); when(artifactExtension.publishArtifact(eq("cd.go.s3"), eq(s3ArtifactPlan), eq(s3ArtifactStore), anyString(), eq(env))) .thenThrow(new RuntimeException("Interaction with plugin `cd.go.s3` failed.")); when(artifactExtension.publishArtifact(eq("cd.go.docker"), eq(dockerArtifactPlan), eq(dockerArtifactStore), anyString(), eq(env))) .thenReturn(new PublishArtifactResponse(Collections.singletonMap("tag", "10.12.0"))); assertThatThrownBy(() -> new ArtifactsPublisher(publisher, artifactExtension, artifactStores, registry, workingFolder) .publishArtifacts(Arrays.asList(s3ArtifactPlan, dockerArtifactPlan), env)) .hasMessageContaining("[go] Uploading finished. Failed to upload [installers]."); assertThat(uploadedPluggableMetadataFiles(publisher.publishedFiles())).containsExactly("cd.go.docker.json"); assertThat(publisher.getMessage()).contains("[go] Interaction with plugin `cd.go.s3` failed"); }
static Slice shiftLeft(Slice decimal, int leftShifts) { Slice result = Slices.copyOf(decimal); shiftLeftDestructive(result, leftShifts); return result; }
@Test public void testShiftLeft() { assertEquals(shiftLeft(wrappedLongArray(0x1234567890ABCDEFL, 0xEFDCBA0987654321L), 0), wrappedLongArray(0x1234567890ABCDEFL, 0xEFDCBA0987654321L)); assertEquals(shiftLeft(wrappedLongArray(0x1234567890ABCDEFL, 0xEFDCBA0987654321L), 1), wrappedLongArray(0x2468ACF121579BDEL, 0xDFB974130ECA8642L)); assertEquals(shiftLeft(wrappedLongArray(0x1234567890ABCDEFL, 0x00DCBA0987654321L), 8), wrappedLongArray(0x34567890ABCDEF00L, 0xDCBA098765432112L)); assertEquals(shiftLeft(wrappedLongArray(0x1234567890ABCDEFL, 0x0000BA0987654321L), 16), wrappedLongArray(0x567890ABCDEF0000L, 0xBA09876543211234L)); assertEquals(shiftLeft(wrappedLongArray(0x1234567890ABCDEFL, 0x0000000087654321L), 32), wrappedLongArray(0x90ABCDEF00000000L, 0x8765432112345678L)); assertEquals(shiftLeft(wrappedLongArray(0x1234567890ABCDEFL, 0L), 64), wrappedLongArray(0x0000000000000000L, 0x1234567890ABCDEFL)); assertEquals(shiftLeft(wrappedLongArray(0x0034567890ABCDEFL, 0L), 64 + 8), wrappedLongArray(0x0000000000000000L, 0x34567890ABCDEF00L)); assertEquals(shiftLeft(wrappedLongArray(0x000000000000CDEFL, 0L), 64 + 48), wrappedLongArray(0x0000000000000000L, 0xCDEF000000000000L)); assertEquals(shiftLeft(wrappedLongArray(0x1L, 0L), 64 + 63), wrappedLongArray(0x0000000000000000L, 0x8000000000000000L)); }
@VisibleForTesting public SmsChannelDO validateSmsChannel(Long channelId) { SmsChannelDO channelDO = smsChannelService.getSmsChannel(channelId); if (channelDO == null) { throw exception(SMS_CHANNEL_NOT_EXISTS); } if (CommonStatusEnum.isDisable(channelDO.getStatus())) { throw exception(SMS_CHANNEL_DISABLE); } return channelDO; }
@Test public void testValidateSmsChannel_disable() { // 准备参数 Long channelId = randomLongId(); // mock 方法 SmsChannelDO channelDO = randomPojo(SmsChannelDO.class, o -> { o.setId(channelId); o.setStatus(CommonStatusEnum.DISABLE.getStatus()); // 保证 status 禁用,触发失败 }); when(smsChannelService.getSmsChannel(eq(channelId))).thenReturn(channelDO); // 调用,校验异常 assertServiceException(() -> smsTemplateService.validateSmsChannel(channelId), SMS_CHANNEL_DISABLE); }
public static boolean isNotEmpty(@Nullable String string) { return string != null && !string.isEmpty(); }
@Test public void testNull() { assertThat(StringUtils.isNotEmpty(null)).isFalse(); }
@Override public void warn(String msg) { logger.warn(msg); }
@Test public void testWarn() { Logger mockLogger = mock(Logger.class); when(mockLogger.getName()).thenReturn("foo"); InternalLogger logger = new Slf4JLogger(mockLogger); logger.warn("a"); verify(mockLogger).getName(); verify(mockLogger).warn("a"); }
public void complete(T value) { try { if (value instanceof RuntimeException) throw new IllegalArgumentException("The argument to complete can not be an instance of RuntimeException"); if (!result.compareAndSet(INCOMPLETE_SENTINEL, value)) throw new IllegalStateException("Invalid attempt to complete a request future which is already complete"); fireSuccess(); } finally { completedLatch.countDown(); } }
@Test public void invokeExceptionAfterSuccess() { RequestFuture<Void> future = new RequestFuture<>(); future.complete(null); assertThrows(IllegalStateException.class, future::exception); }
public static void tar(@NotNull File source, @NotNull File dest) throws IOException { if (!source.exists()) { throw new IllegalArgumentException("No source file or folder exists: " + source.getAbsolutePath()); } if (dest.exists()) { throw new IllegalArgumentException("Destination refers to existing file or folder: " + dest.getAbsolutePath()); } try (TarArchiveOutputStream tarOut = new TarArchiveOutputStream(new GZIPOutputStream( new BufferedOutputStream(Files.newOutputStream(dest.toPath())), 0x1000))) { doTar("", source, tarOut); } catch (IOException e) { IOUtil.deleteFile(dest); // operation filed, let's remove the destination archive throw e; } }
@Test public void testNoSource() throws Exception { try { CompressBackupUtil.tar(new File(randName + ".txt"), dest); } catch (IllegalArgumentException e) { return; } Assert.fail("No source file/folder exists. Should have thrown an exception."); }
@Override public void pluginJarAdded(BundleOrPluginFileDetails bundleOrPluginFileDetails) { final GoPluginBundleDescriptor bundleDescriptor = goPluginBundleDescriptorBuilder.build(bundleOrPluginFileDetails); try { LOGGER.info("Plugin load starting: {}", bundleOrPluginFileDetails.file()); validateIfExternalPluginRemovingBundledPlugin(bundleDescriptor); validatePluginCompatibilityWithCurrentOS(bundleDescriptor); validatePluginCompatibilityWithGoCD(bundleDescriptor); addPlugin(bundleOrPluginFileDetails, bundleDescriptor); } finally { LOGGER.info("Plugin load finished: {}", bundleOrPluginFileDetails.file()); } }
@Test void shouldNotLoadAPluginWhenTargetedGocdVersionIsIncorrect() throws Exception { File pluginJarFile = new File(pluginWorkDir, PLUGIN_JAR_FILE_NAME); copyPluginToTheDirectory(pluginWorkDir, PLUGIN_JAR_FILE_NAME); final GoPluginDescriptor pluginDescriptor1 = getPluginDescriptor("some.old.id.1", "1.0", pluginJarFile.getAbsolutePath(), new File(PLUGIN_JAR_FILE_NAME), false, "17.5.0", "Linux", "Mac OS X"); final GoPluginDescriptor pluginDescriptor2 = getPluginDescriptor("some.old.id.2", "1.0", pluginJarFile.getAbsolutePath(), new File(PLUGIN_JAR_FILE_NAME), false, "9999.0.0.1.2", "Linux", "Mac OS X"); GoPluginBundleDescriptor bundleDescriptor = new GoPluginBundleDescriptor(pluginDescriptor1, pluginDescriptor2); when(goPluginBundleDescriptorBuilder.build(new BundleOrPluginFileDetails(pluginJarFile, true, pluginWorkDir))).thenReturn(bundleDescriptor); listener = new DefaultPluginJarChangeListener(registry, osgiManifestGenerator, pluginLoader, goPluginBundleDescriptorBuilder, systemEnvironment); listener.pluginJarAdded(new BundleOrPluginFileDetails(pluginJarFile, true, pluginWorkDir)); verify(registry, times(1)).loadPlugin(bundleDescriptor); verifyNoMoreInteractions(pluginLoader); assertThat(pluginDescriptor1.getStatus().getMessages().size()).isEqualTo(1); assertThat(pluginDescriptor1.getStatus().getMessages().get(0)).isEqualTo("Plugins with IDs ([some.old.id.1, some.old.id.2]) are not valid: Incorrect target GoCD version (17.5.0 & 9999.0.0.1.2) specified."); assertThat(pluginDescriptor2.getStatus().getMessages().size()).isEqualTo(1); assertThat(pluginDescriptor2.getStatus().getMessages().get(0)).isEqualTo("Plugins with IDs ([some.old.id.1, some.old.id.2]) are not valid: Incorrect target GoCD version (17.5.0 & 9999.0.0.1.2) specified."); }
@Override public void updateIndices(SegmentDirectory.Writer segmentWriter) throws Exception { Map<String, List<Operation>> columnOperationsMap = computeOperations(segmentWriter); if (columnOperationsMap.isEmpty()) { return; } for (Map.Entry<String, List<Operation>> entry : columnOperationsMap.entrySet()) { String column = entry.getKey(); List<Operation> operations = entry.getValue(); for (Operation operation : operations) { switch (operation) { case DISABLE_FORWARD_INDEX: // Deletion of the forward index will be handled outside the index handler to ensure that other index // handlers that need the forward index to construct their own indexes will have it available. _tmpForwardIndexColumns.add(column); break; case ENABLE_FORWARD_INDEX: ColumnMetadata columnMetadata = createForwardIndexIfNeeded(segmentWriter, column, false); if (columnMetadata.hasDictionary()) { if (!segmentWriter.hasIndexFor(column, StandardIndexes.dictionary())) { throw new IllegalStateException(String.format( "Dictionary should still exist after rebuilding forward index for dictionary column: %s", column)); } } else { if (segmentWriter.hasIndexFor(column, StandardIndexes.dictionary())) { throw new IllegalStateException( String.format("Dictionary should not exist after rebuilding forward index for raw column: %s", column)); } } break; case DISABLE_DICTIONARY: Set<String> newForwardIndexDisabledColumns = FieldIndexConfigsUtil.columnsWithIndexDisabled(_fieldIndexConfigs.keySet(), StandardIndexes.forward(), _fieldIndexConfigs); if (newForwardIndexDisabledColumns.contains(column)) { removeDictionaryFromForwardIndexDisabledColumn(column, segmentWriter); if (segmentWriter.hasIndexFor(column, StandardIndexes.dictionary())) { throw new IllegalStateException( String.format("Dictionary should not exist after disabling dictionary for column: %s", column)); } } else { disableDictionaryAndCreateRawForwardIndex(column, segmentWriter); } break; case ENABLE_DICTIONARY: createDictBasedForwardIndex(column, segmentWriter); if (!segmentWriter.hasIndexFor(column, StandardIndexes.forward())) { throw new IllegalStateException(String.format("Forward index was not created for column: %s", column)); } break; case CHANGE_INDEX_COMPRESSION_TYPE: rewriteForwardIndexForCompressionChange(column, segmentWriter); break; default: throw new IllegalStateException("Unsupported operation for column " + column); } } } }
@Test public void testDisableForwardIndexForRawAndInvertedIndexDisabledColumns() throws Exception { Set<String> forwardIndexDisabledColumns = new HashSet<>(SV_FORWARD_INDEX_DISABLED_COLUMNS); forwardIndexDisabledColumns.addAll(MV_FORWARD_INDEX_DISABLED_COLUMNS); forwardIndexDisabledColumns.addAll(MV_FORWARD_INDEX_DISABLED_DUPLICATES_COLUMNS); forwardIndexDisabledColumns.addAll(FORWARD_INDEX_DISABLED_RAW_COLUMNS); forwardIndexDisabledColumns.add(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER_WITHOUT_INV_IDX); forwardIndexDisabledColumns.add(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER_WITH_RANGE_INDEX); for (String column : _noDictionaryColumns) { if (FORWARD_INDEX_DISABLED_RAW_COLUMNS.contains(column) || RAW_SORTED_INDEX_COLUMNS.contains(column)) { // Forward index already disabled for these columns, skip them continue; } SegmentMetadataImpl existingSegmentMetadata = new SegmentMetadataImpl(_segmentDirectory); SegmentDirectory segmentLocalFSDirectory = new SegmentLocalFSDirectory(_segmentDirectory, existingSegmentMetadata, ReadMode.mmap); SegmentDirectory.Writer writer = segmentLocalFSDirectory.createWriter(); IndexLoadingConfig indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig); forwardIndexDisabledColumns.add(column); indexLoadingConfig.setForwardIndexDisabledColumns(forwardIndexDisabledColumns); Set<String> invertedIndexColumns = new HashSet<>(forwardIndexDisabledColumns); invertedIndexColumns.removeAll(FORWARD_INDEX_DISABLED_RAW_COLUMNS); invertedIndexColumns.remove(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER_WITHOUT_INV_IDX); invertedIndexColumns.remove(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER_WITH_RANGE_INDEX); invertedIndexColumns.remove(column); indexLoadingConfig.setInvertedIndexColumns(invertedIndexColumns); ForwardIndexHandler fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, _schema); fwdIndexHandler.updateIndices(writer); fwdIndexHandler.postUpdateIndicesCleanup(writer); // Tear down before validation. Because columns.psf and index map cleanup happens at segmentDirectory.close() segmentLocalFSDirectory.close(); validateIndexMap(column, false, true); validateIndexesForForwardIndexDisabledColumns(column); // In column metadata, nothing other than hasDictionary and dictionaryElementSize should change. ColumnMetadata metadata = existingSegmentMetadata.getColumnMetadataFor(column); FieldSpec.DataType dataType = metadata.getDataType(); validateMetadataProperties(column, false, 0, metadata.getCardinality(), metadata.getTotalDocs(), dataType, metadata.getFieldType(), metadata.isSorted(), metadata.isSingleValue(), metadata.getMaxNumberOfMultiValues(), metadata.getTotalNumberOfEntries(), metadata.isAutoGenerated(), metadata.getMinValue(), metadata.getMaxValue(), false); } }
public CompletableFuture<Boolean> isAllowAutoTopicCreationAsync(final String topic) { TopicName topicName = TopicName.get(topic); return isAllowAutoTopicCreationAsync(topicName); }
@Test public void testIsSystemTopicAllowAutoTopicCreationAsync() throws Exception { BrokerService brokerService = pulsar.getBrokerService(); assertFalse(brokerService.isAllowAutoTopicCreationAsync( ServiceUnitStateChannelImpl.TOPIC).get()); assertTrue(brokerService.isAllowAutoTopicCreationAsync( "persistent://pulsar/system/my-system-topic").get()); }
@Override public PageResult<SmsTemplateDO> getSmsTemplatePage(SmsTemplatePageReqVO pageReqVO) { return smsTemplateMapper.selectPage(pageReqVO); }
@Test public void testGetSmsTemplatePage() { // mock 数据 SmsTemplateDO dbSmsTemplate = randomPojo(SmsTemplateDO.class, o -> { // 等会查询到 o.setType(SmsTemplateTypeEnum.PROMOTION.getType()); o.setStatus(CommonStatusEnum.ENABLE.getStatus()); o.setCode("tudou"); o.setContent("芋道源码"); o.setApiTemplateId("yunai"); o.setChannelId(1L); o.setCreateTime(buildTime(2021, 11, 11)); }); smsTemplateMapper.insert(dbSmsTemplate); // 测试 type 不匹配 smsTemplateMapper.insert(ObjectUtils.cloneIgnoreId(dbSmsTemplate, o -> o.setType(SmsTemplateTypeEnum.VERIFICATION_CODE.getType()))); // 测试 status 不匹配 smsTemplateMapper.insert(ObjectUtils.cloneIgnoreId(dbSmsTemplate, o -> o.setStatus(CommonStatusEnum.DISABLE.getStatus()))); // 测试 code 不匹配 smsTemplateMapper.insert(ObjectUtils.cloneIgnoreId(dbSmsTemplate, o -> o.setCode("yuanma"))); // 测试 content 不匹配 smsTemplateMapper.insert(ObjectUtils.cloneIgnoreId(dbSmsTemplate, o -> o.setContent("源码"))); // 测试 apiTemplateId 不匹配 smsTemplateMapper.insert(ObjectUtils.cloneIgnoreId(dbSmsTemplate, o -> o.setApiTemplateId("nai"))); // 测试 channelId 不匹配 smsTemplateMapper.insert(ObjectUtils.cloneIgnoreId(dbSmsTemplate, o -> o.setChannelId(2L))); // 测试 createTime 不匹配 smsTemplateMapper.insert(ObjectUtils.cloneIgnoreId(dbSmsTemplate, o -> o.setCreateTime(buildTime(2021, 12, 12)))); // 准备参数 SmsTemplatePageReqVO reqVO = new SmsTemplatePageReqVO(); reqVO.setType(SmsTemplateTypeEnum.PROMOTION.getType()); reqVO.setStatus(CommonStatusEnum.ENABLE.getStatus()); reqVO.setCode("tu"); reqVO.setContent("芋道"); reqVO.setApiTemplateId("yu"); reqVO.setChannelId(1L); reqVO.setCreateTime(buildBetweenTime(2021, 11, 1, 2021, 12, 1)); // 调用 PageResult<SmsTemplateDO> pageResult = smsTemplateService.getSmsTemplatePage(reqVO); // 断言 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(dbSmsTemplate, pageResult.getList().get(0)); }
public static ByteBuffer allocateAndConfigureBuffer(int bufferSize) { ByteBuffer buffer = ByteBuffer.allocateDirect(bufferSize); buffer.order(ByteOrder.nativeOrder()); return buffer; }
@Test void testAllocateAndConfigureBuffer() { final int bufferSize = 16; ByteBuffer buffer = FileRegionWriteReadUtils.allocateAndConfigureBuffer(bufferSize); assertThat(buffer.capacity()).isEqualTo(16); assertThat(buffer.limit()).isEqualTo(16); assertThat(buffer.position()).isZero(); assertThat(buffer.isDirect()).isTrue(); assertThat(buffer.order()).isEqualTo(ByteOrder.nativeOrder()); }
@SuppressWarnings("unchecked") @Override public boolean canHandleReturnType(Class returnType) { return rxSupportedTypes.stream() .anyMatch(classType -> classType.isAssignableFrom(returnType)); }
@Test public void testCheckTypes() { assertThat(rxJava2BulkheadAspectExt.canHandleReturnType(Flowable.class)).isTrue(); assertThat(rxJava2BulkheadAspectExt.canHandleReturnType(Single.class)).isTrue(); }
public static YamlSequence asSequence(YamlNode node) { if (node != null && !(node instanceof YamlSequence)) { String nodeName = node.nodeName(); throw new YamlException(String.format("Child %s is not a sequence, it's actual type is %s", nodeName, node.getClass())); } return (YamlSequence) node; }
@Test(expected = YamlException.class) public void asSequenceThrowsIfNonSequencePassed() { YamlNode genericNode = new YamlMappingImpl(null, "mapping"); YamlUtil.asSequence(genericNode); }
@Override public OptionalLong apply(OptionalLong previousSendTimeNs) { long delayNs; if (previousGlobalFailures > 0) { // If there were global failures (like a response timeout), we want to wait for the // full backoff period. delayNs = backoff.backoff(previousGlobalFailures); } else if ((numReadyRequests > MAX_ASSIGNMENTS_PER_REQUEST) && !hasInflightRequests) { // If there were no previous failures, and we have lots of requests, send it as soon // as possible. delayNs = 0; } else { // Otherwise, use the standard delay period. This helps to promote batching, which // reduces load on the controller. delayNs = backoff.initialInterval(); } long newSendTimeNs = nowNs + delayNs; if (previousSendTimeNs.isPresent() && previousSendTimeNs.getAsLong() < newSendTimeNs) { // If the previous send time was before the new one we calculated, go with the // previous one. return previousSendTimeNs; } // Otherwise, return our new send time. return OptionalLong.of(newSendTimeNs); }
@Test public void applyBackoffInterval() { assertEquals(OptionalLong.of(BACKOFF.initialInterval() * 2), new AssignmentsManagerDeadlineFunction(BACKOFF, 0, 1, false, 12). apply(OptionalLong.empty())); }
public boolean setLocations(DefaultIssue issue, @Nullable Object locations) { if (!locationsEqualsIgnoreHashes(locations, issue.getLocations())) { issue.setLocations(locations); issue.setChanged(true); issue.setLocationsChanged(true); return true; } return false; }
@Test void do_not_change_locations_if_secondary_hash_changed() { DbCommons.TextRange range = DbCommons.TextRange.newBuilder().setStartLine(1).build(); DbIssues.Locations locations = DbIssues.Locations.newBuilder() .addFlow(DbIssues.Flow.newBuilder() .addLocation(DbIssues.Location.newBuilder().setTextRange(range)) .build()) .setChecksum("1") .build(); issue.setLocations(locations); DbIssues.Locations.Builder builder = locations.toBuilder(); builder.getFlowBuilder(0).getLocationBuilder(0).setChecksum("2"); boolean updated = underTest.setLocations(issue, builder.build()); assertThat(updated).isFalse(); assertThat(issue.currentChange()).isNull(); assertThat(issue.mustSendNotifications()).isFalse(); }
public synchronized Topology addSource(final String name, final String... topics) { internalTopologyBuilder.addSource(null, name, null, null, null, topics); return this; }
@Test public void testPatternMatchesAlreadyProvidedTopicSource() { topology.addSource("source-1", "foo"); try { topology.addSource("source-2", Pattern.compile("f.*")); fail("Should have thrown TopologyException for overlapping pattern with already registered topic"); } catch (final TopologyException expected) { } }
public static Ip4Prefix valueOf(int address, int prefixLength) { return new Ip4Prefix(Ip4Address.valueOf(address), prefixLength); }
@Test(expected = IllegalArgumentException.class) public void testInvalidValueOfByteArrayNegativePrefixLengthIPv4() { Ip4Prefix ipPrefix; byte[] value; value = new byte[] {1, 2, 3, 4}; ipPrefix = Ip4Prefix.valueOf(value, -1); }
static RequestConfigElement parse(String property, String key, Object value) throws RequestConfigKeyParsingException { RequestConfigKeyParsingErrorListener errorListener = new RequestConfigKeyParsingErrorListener(); ANTLRInputStream input = new ANTLRInputStream(key); RequestConfigKeyLexer lexer = new RequestConfigKeyLexer(input); lexer.removeErrorListeners(); lexer.addErrorListener(errorListener); CommonTokenStream tokens = new CommonTokenStream(lexer); RequestConfigKeyParser parser = new RequestConfigKeyParser(tokens); parser.removeErrorListeners(); parser.addErrorListener(errorListener); KeyContext keyTree = parser.key(); if (!errorListener.hasErrors()) { InboundContext inbound = keyTree.inbound(); OutboundContext outbound = keyTree.outbound(); Optional<String> inboundName = handlingWildcard(inbound.restResource()); Optional<String> outboundName = handlingWildcard(outbound.restResource()); Optional<String> inboundOp = getOpIn(inbound.operationIn()); Optional<ResourceMethod> outboundOp = getOpOut(outbound.operationOut()); Optional<String> inboundOpName = inboundOp.flatMap(method -> getOpInName(method, inbound.operationIn())); Optional<String> outboundOpName = outboundOp.flatMap(method -> getOpOutName(method, outbound.operationOut())); return new RequestConfigElement(key, coerceValue(property, value), property, inboundName, outboundName, inboundOpName, outboundOpName, inboundOp, outboundOp); } else { throw new RequestConfigKeyParsingException( "Error" + ((errorListener.errorsSize() > 1) ? "s" : "") + " parsing key: " + key + "\n" + errorListener); } }
@Test(expectedExceptions = {RequestConfigKeyParsingException.class}) public void testParsingInvalidKey() throws RequestConfigKeyParsingException { RequestConfigElement.parse("timeoutMs", "greetings.POST/greetings.DELETE/timeoutMs", 100L); }
@Override public List<ParsedStatement> parse(final String sql) { return primaryContext.parse(sql); }
@Test public void shouldBeAbleToPrepareTerminateAndDrop() { // Given: setupKsqlEngineWithSharedRuntimeEnabled(); givenSqlAlreadyExecuted("CREATE STREAM FOO AS SELECT * FROM TEST1;"); final List<ParsedStatement> parsed = ksqlEngine.parse( "TERMINATE CSAS_FOO_0;" + "DROP STREAM FOO;"); // When: parsed.forEach(ksqlEngine::prepare); // Then: did not throw. }
@Override public Collection<RedisServer> masters() { List<Map<String, String>> masters = connection.sync(StringCodec.INSTANCE, RedisCommands.SENTINEL_MASTERS); return toRedisServersList(masters); }
@Test public void testMasters() { Collection<RedisServer> masters = connection.masters(); assertThat(masters).hasSize(1); }
@Nullable public static Field findPropertyField(Class<?> clazz, String fieldName) { Field field; try { field = clazz.getField(fieldName); } catch (NoSuchFieldException e) { return null; } if (!Modifier.isPublic(field.getModifiers()) || Modifier.isStatic(field.getModifiers())) { return null; } return field; }
@Test public void when_findPropertyField_public_then_returnsIt() { assertNotNull(findPropertyField(JavaFields.class, "publicField")); }
public static DataSourceProvider tryGetDataSourceProviderOrNull(Configuration hdpConfig) { final String configuredPoolingType = MetastoreConf.getVar(hdpConfig, MetastoreConf.ConfVars.CONNECTION_POOLING_TYPE); return Iterables.tryFind(FACTORIES, factory -> { String poolingType = factory.getPoolingType(); return poolingType != null && poolingType.equalsIgnoreCase(configuredPoolingType); }).orNull(); }
@Test public void testSetHikariCpNumberProperty() throws SQLException { MetastoreConf.setVar(conf, ConfVars.CONNECTION_POOLING_TYPE, HikariCPDataSourceProvider.HIKARI); conf.set(HikariCPDataSourceProvider.HIKARI + ".idleTimeout", "59999"); conf.set(HikariCPDataSourceProvider.HIKARI + ".initializationFailTimeout", "-1"); DataSourceProvider dsp = DataSourceProviderFactory.tryGetDataSourceProviderOrNull(conf); Assert.assertNotNull(dsp); DataSource ds = dsp.create(conf); Assert.assertTrue(ds instanceof HikariDataSource); Assert.assertEquals(59999L, ((HikariDataSource)ds).getIdleTimeout()); }
public static File getSourceJarFile(URL classesJarFileUrl) throws IOException { final String file = classesJarFileUrl.getFile(); if (file.endsWith(".jar")) { final File sources = new File(file.replace(".jar", "-sources.jar")); if (sources.exists()) { return sources; } } final byte[] pomProperties = readMavenFileFromJarFile(classesJarFileUrl, "pom.properties"); if (pomProperties == null) { final Map<String, String> sourceFilePaths = getSourceFilePathsByJarFileNames(); String jarFileName = file; if (jarFileName.endsWith("!/")) { // remove "!/" at the end, for spring-boot launched with "java -jar" jarFileName = jarFileName.substring(0, jarFileName.length() - "!/".length()); } jarFileName = jarFileName.substring(jarFileName.lastIndexOf('/') + 1); final String sourceFilePath = sourceFilePaths.get(jarFileName); if (sourceFilePath != null) { return getMavenArtifact(sourceFilePath); } return null; } final Properties properties = new Properties(); properties.load(new ByteArrayInputStream(pomProperties)); final MavenArtifact mavenArtifact = new MavenArtifact(); mavenArtifact.groupId = properties.getProperty("groupId"); mavenArtifact.artifactId = properties.getProperty("artifactId"); mavenArtifact.version = properties.getProperty("version"); final String filePath = mavenArtifact.getPath("-sources.jar"); return getMavenArtifact(filePath); }
@Test public void testGetSourceJarFile() throws ClassNotFoundException, IOException { final File storageDirectory = Parameters .getStorageDirectory(Parameters.getCurrentApplication()); rmdir(new File(storageDirectory, "poms")); rmdir(new File(storageDirectory, "sources")); final Class<?> clazz = Class.forName("org.apache.commons.dbcp2.BasicDataSource"); final URL location = clazz.getProtectionDomain().getCodeSource().getLocation(); assertNotNull("getSourceJarFile", MavenArtifact.getSourceJarFile(location)); Utils.setProperty(Parameter.MAVEN_REPOSITORIES, LOCAL_REPO.getPath() + ',' + MAVEN_CENTRAL); assertNotNull("getSourceJarFile", MavenArtifact.getSourceJarFile(location)); }
public Float asFloat(Map<String, ValueReference> parameters) { switch (valueType()) { case FLOAT: if (value() instanceof Number) { return ((Number) value()).floatValue(); } throw new IllegalStateException("Expected value reference of type FLOAT but got " + value().getClass()); case PARAMETER: return asType(parameters, Float.class); default: throw new IllegalStateException("Expected value reference of type FLOAT but got " + valueType()); } }
@Test public void asFloat() { assertThat(ValueReference.of(1.0f).asFloat(Collections.emptyMap())).isEqualTo(1.0f); assertThatThrownBy(() -> ValueReference.of("Test").asFloat(Collections.emptyMap())) .isInstanceOf(IllegalStateException.class) .hasMessage("Expected value reference of type FLOAT but got STRING"); }
public ProviderBuilder wait(Integer wait) { this.wait = wait; return getThis(); }
@Test void Wait() { ProviderBuilder builder = ProviderBuilder.newBuilder(); builder.wait(Integer.valueOf(1000)); Assertions.assertEquals(1000, builder.build().getWait()); }
static GeneratedClassResource getGeneratedClassResource(String fullClassName) { return new GeneratedClassResource(fullClassName); }
@Test void getGeneratedIntermediateResource() { String className = "className"; GeneratedClassResource retrieved = CompilationManagerUtils.getGeneratedClassResource(className); assertThat(retrieved).isNotNull(); assertThat(retrieved.getFullClassName()).isEqualTo(className); }
public static <ID, T> TaskDispatcher<ID, T> createNonBatchingTaskDispatcher(String id, int maxBufferSize, int workerCount, long maxBatchingDelay, long congestionRetryDelayMs, long networkFailureRetryMs, TaskProcessor<T> taskProcessor) { final AcceptorExecutor<ID, T> acceptorExecutor = new AcceptorExecutor<>( id, maxBufferSize, 1, maxBatchingDelay, congestionRetryDelayMs, networkFailureRetryMs ); final TaskExecutors<ID, T> taskExecutor = TaskExecutors.singleItemExecutors(id, workerCount, taskProcessor, acceptorExecutor); return new TaskDispatcher<ID, T>() { @Override public void process(ID id, T task, long expiryTime) { acceptorExecutor.process(id, task, expiryTime); } @Override public void shutdown() { acceptorExecutor.shutdown(); taskExecutor.shutdown(); } }; }
@Test public void testSingleTaskDispatcher() throws Exception { dispatcher = TaskDispatchers.createNonBatchingTaskDispatcher( "TEST", MAX_BUFFER_SIZE, 1, MAX_BATCHING_DELAY_MS, SERVER_UNAVAILABLE_SLEEP_TIME_MS, RETRY_SLEEP_TIME_MS, processor ); dispatcher.process(1, ProcessingResult.Success, System.currentTimeMillis() + 60 * 1000); ProcessingResult result = processor.completedTasks.poll(5, TimeUnit.SECONDS); assertThat(result, is(equalTo(ProcessingResult.Success))); }
@Override public Map<String, String> convertToEntityAttribute(String dbData) { return GSON.fromJson(dbData, TYPE); }
@Test void convertToEntityAttribute_null() { assertNull(this.converter.convertToEntityAttribute(null)); assertNull(this.converter.convertToEntityAttribute("null")); }
public int distanceOf(PartitionTableView partitionTableView) { int distance = 0; for (int i = 0; i < partitions.length; i++) { distance += distanceOf(partitions[i], partitionTableView.partitions[i]); } return distance; }
@Test public void testDistanceIsZero_whenSame() throws Exception { // distanceOf([A, B, C], [A, B, C]) == 0 PartitionTableView table1 = createRandomPartitionTable(); InternalPartition[] partitions = extractPartitions(table1); PartitionTableView table2 = new PartitionTableView(partitions); assertEquals(0, table2.distanceOf(table1)); }
public GoConfigHolder loadConfigHolder(final String content, Callback callback) throws Exception { CruiseConfig configForEdit; CruiseConfig config; LOGGER.debug("[Config Save] Loading config holder"); configForEdit = deserializeConfig(content); if (callback != null) callback.call(configForEdit); config = preprocessAndValidate(configForEdit); return new GoConfigHolder(config, configForEdit); }
@Test void shouldNotAllowMoreThanOneOnCancelTaskWhenDefined() { String xml = (""" <cruise schemaVersion='%d'> <server> <artifacts> <artifactsDir>artifactsDir</artifactsDir> </artifacts> </server> <pipelines> <pipeline name='pipeline1' template='abc'> <materials> <svn url ='svnurl' username='foo' password='password'/> </materials> </pipeline> </pipelines> <templates> <pipeline name='abc'> <stage name='stage1'> <jobs> <job name='job1'> <tasks> <exec command="rake"> <arg>all_test</arg> <oncancel> <ant target='kill' /> <ant target='kill' /> </oncancel> </exec> </tasks> </job> </jobs> </stage> </pipeline> </templates> </cruise>""").formatted(CONFIG_SCHEMA_VERSION); assertThatThrownBy(() -> xmlLoader.loadConfigHolder(xml)) .hasMessage("Invalid content was found starting with element 'ant'. No child element is expected at this point."); }
public static String getSourceIpForGrpcRequest(RequestMeta meta) { String sourceIp = getSourceIp(); // If can't get from request context, get from grpc request meta. if (StringUtils.isBlank(sourceIp)) { sourceIp = meta.getClientIp(); } return sourceIp; }
@Test void getSourceIpForGrpcRequest() { when(meta.getClientIp()).thenReturn("3.3.3.3"); assertEquals("2.2.2.2", NamingRequestUtil.getSourceIpForGrpcRequest(meta)); RequestContextHolder.getContext().getBasicContext().getAddressContext().setSourceIp(null); assertEquals("1.1.1.1", NamingRequestUtil.getSourceIpForGrpcRequest(meta)); RequestContextHolder.getContext().getBasicContext().getAddressContext().setRemoteIp(null); assertEquals("3.3.3.3", NamingRequestUtil.getSourceIpForGrpcRequest(meta)); }
public static long readVLong(ByteData arr, long position) { byte b = arr.get(position++); if(b == (byte) 0x80) throw new RuntimeException("Attempting to read null value as long"); long value = b & 0x7F; while ((b & 0x80) != 0) { b = arr.get(position++); value <<= 7; value |= (b & 0x7F); } return value; }
@Test(expected = EOFException.class) public void testReadVLongEmptyInputStream() throws IOException { InputStream is = new ByteArrayInputStream(BYTES_EMPTY); VarInt.readVLong(is); }
@Override public NSImage folderIcon(final Integer size) { NSImage folder = this.iconNamed("NSFolder", size); if(null == folder) { return this.iconNamed("NSFolder", size); } return folder; }
@Test public void testFolderIcon512() { final NSImage icon = new NSImageIconCache().folderIcon(512); assertNotNull(icon); assertTrue(icon.isValid()); assertFalse(icon.isTemplate()); assertEquals(512, icon.size().width.intValue()); assertEquals(512, icon.size().height.intValue()); assertTrue(icon.representations().count().intValue() >= 1); }
@Override public List<OAuth2ApproveDO> getApproveList(Long userId, Integer userType, String clientId) { List<OAuth2ApproveDO> approveDOs = oauth2ApproveMapper.selectListByUserIdAndUserTypeAndClientId( userId, userType, clientId); approveDOs.removeIf(o -> DateUtils.isExpired(o.getExpiresTime())); return approveDOs; }
@Test public void testGetApproveList() { // 准备参数 Long userId = 10L; Integer userType = UserTypeEnum.ADMIN.getValue(); String clientId = randomString(); // mock 数据 OAuth2ApproveDO approve = randomPojo(OAuth2ApproveDO.class).setUserId(userId) .setUserType(userType).setClientId(clientId).setExpiresTime(LocalDateTimeUtil.offset(LocalDateTime.now(), 1L, ChronoUnit.DAYS)); oauth2ApproveMapper.insert(approve); // 未过期 oauth2ApproveMapper.insert(ObjectUtil.clone(approve).setId(null) .setExpiresTime(LocalDateTimeUtil.offset(LocalDateTime.now(), -1L, ChronoUnit.DAYS))); // 已过期 // 调用 List<OAuth2ApproveDO> result = oauth2ApproveService.getApproveList(userId, userType, clientId); // 断言 assertEquals(1, result.size()); assertPojoEquals(approve, result.get(0)); }
public Properties apply(final Properties properties) { if (properties == null) { throw new IllegalArgumentException("properties must not be null"); } else { if (properties.isEmpty()) { return new Properties(); } else { final Properties filtered = new Properties(); for (Map.Entry<Object, Object> entry : properties.entrySet()) { final Object key = entry.getKey(); final Object value = entry.getValue(); if (!keysToRemove.contains(key)) { filtered.put(key, value); } } return filtered; } } }
@Test public void doesNotAcceptNullInput() { // Given Properties nullProperties = null; Filter f = new Filter(); // When/Then try { f.apply(nullProperties); fail("IllegalArgumentException expected because input is null"); } catch (IllegalArgumentException e) { // ignore } }
public static Properties getProperties(Set<ClassLoader> classLoaders) { String path = System.getProperty(CommonConstants.DUBBO_PROPERTIES_KEY); if (StringUtils.isEmpty(path)) { path = System.getenv(CommonConstants.DUBBO_PROPERTIES_KEY); if (StringUtils.isEmpty(path)) { path = CommonConstants.DEFAULT_DUBBO_PROPERTIES; } } return ConfigUtils.loadProperties(classLoaders, path, false, true); }
@Test void testGetProperties2() throws Exception { System.clearProperty(CommonConstants.DUBBO_PROPERTIES_KEY); Properties p = ConfigUtils.getProperties(Collections.emptySet()); assertThat((String) p.get("dubbo"), equalTo("properties")); }
public boolean sendProvisioningMessage(final ProvisioningAddress address, final byte[] body) { final PubSubProtos.PubSubMessage pubSubMessage = PubSubProtos.PubSubMessage.newBuilder() .setType(PubSubProtos.PubSubMessage.Type.DELIVER) .setContent(ByteString.copyFrom(body)) .build(); final boolean receiverPresent = circuitBreaker.executeSupplier( () -> publicationConnection.sync() .publish(address.serialize().getBytes(StandardCharsets.UTF_8), pubSubMessage.toByteArray()) > 0); Metrics.counter(SEND_PROVISIONING_MESSAGE_COUNTER_NAME, "online", String.valueOf(receiverPresent)).increment(); return receiverPresent; }
@Test void sendProvisioningMessage() { final ProvisioningAddress address = ProvisioningAddress.create("address"); final byte[] content = TestRandomUtil.nextBytes(16); @SuppressWarnings("unchecked") final Consumer<PubSubProtos.PubSubMessage> subscribedConsumer = mock(Consumer.class); provisioningManager.addListener(address, subscribedConsumer); provisioningManager.sendProvisioningMessage(address, content); final ArgumentCaptor<PubSubProtos.PubSubMessage> messageCaptor = ArgumentCaptor.forClass(PubSubProtos.PubSubMessage.class); verify(subscribedConsumer, timeout(PUBSUB_TIMEOUT_MILLIS)).accept(messageCaptor.capture()); assertEquals(PubSubProtos.PubSubMessage.Type.DELIVER, messageCaptor.getValue().getType()); assertEquals(ByteString.copyFrom(content), messageCaptor.getValue().getContent()); }
public static List<DynamicMessage> buildJsonMessageList(final Map<String, Object> jsonParamMap) { ParamCheckUtils.checkParamsLength(jsonParamMap.size(), GrpcConstants.JSON_DESCRIPTOR_PROTO_FIELD_NUM); JsonArray jsonParams = (JsonArray) jsonParamMap.get(GrpcConstants.JSON_DESCRIPTOR_PROTO_FIELD_NAME); List<DynamicMessage> jsonMessageList = new ArrayList<>(jsonParams.size()); jsonParams.forEach(jsonParam -> { DynamicMessage jsonMessage = buildJsonMessage(GsonUtils.getInstance().toJson(jsonParam)); jsonMessageList.add(jsonMessage); }); return jsonMessageList; }
@Test public void testBuildJsonMessageList() { String jsonParam = "{\"data\":[{\"text\":\"hello\"}, {\"text\":\"world\"}]}\n"; List<DynamicMessage> jsonMessageList = JsonMessage.buildJsonMessageList(GsonUtils.getInstance().toObjectMap(jsonParam)); assertEquals(2, jsonMessageList.size()); DynamicMessage jsonMessage = jsonMessageList.get(0); assertEquals(GrpcConstants.JSON_DESCRIPTOR_PROTO_NAME, jsonMessage.getDescriptorForType().getFullName()); Descriptors.FieldDescriptor fieldDescriptor = jsonMessage.getDescriptorForType().findFieldByName(GrpcConstants.JSON_DESCRIPTOR_PROTO_FIELD_NAME); assertTrue(jsonMessage.hasField(fieldDescriptor)); String field = (String) jsonMessage.getField(fieldDescriptor); assertEquals("{\"text\":\"hello\"}", field); jsonMessage = jsonMessageList.get(1); assertEquals(GrpcConstants.JSON_DESCRIPTOR_PROTO_NAME, jsonMessage.getDescriptorForType().getFullName()); fieldDescriptor = jsonMessage.getDescriptorForType().findFieldByName(GrpcConstants.JSON_DESCRIPTOR_PROTO_FIELD_NAME); assertTrue(jsonMessage.hasField(fieldDescriptor)); field = (String) jsonMessage.getField(fieldDescriptor); assertEquals("{\"text\":\"world\"}", field); }
public static String humanReadableByteCountSI(long bytes) { if (-1000 < bytes && bytes < 1000) { if (bytes == 1) { return bytes + " byte"; } return bytes + " bytes"; } CharacterIterator ci = new StringCharacterIterator("kMGTPE"); while (bytes <= -999_950 || bytes >= 999_950) { bytes /= 1000; ci.next(); } return String.format(Locale.ENGLISH, "%.1f %cB", bytes / 1000.0, ci.current()); }
@Test public void humanReadableByteCountSI_returns_tbs() { assertThat(FileUtils.humanReadableByteCountSI(1_234_000_000_000L)).isEqualTo("1.2 TB"); }
public static void main(String[] argv) { //Use JCommander to parse the CLI args into a useful class CommandLineArgs args = parseCommandLineArgs(argv); execute( args.dataFile, configFromYaml(args.yamlConfig) ); }
@Disabled @Test public void mainMethodProcessesOneFile() throws IOException { // This test tasks about 10 minutes to run (it processes, 1.2M RH messages) // Ideally it would be classified as a "large test" and be run less often (e.g. pre-PR but not in the main-development loop) String[] args = new String[]{ "-c", "src/test/resources/processOneFile.yaml", "-f", "/users/jiparker/rawData/data-from-2021-05-30/STARS_D10_RH_20210530.txt.gz" }; assertDoesNotThrow( () -> RunAirborneOnFile.main(args) ); File targetAvroFile = new File("airborneEvents.avro"); if (targetAvroFile.exists()) { targetAvroFile.delete(); } File eventDir = new File("myEventsGoHere"); Files.deleteIfExists(eventDir.toPath()); // //Uses org.apache.commons.io.FileUtils // if (eventDir.exists()) { // FileUtils.deleteDirectory(eventDir); // } }
@Override public int compareTo(AlluxioURI other) { return mUri.compareTo(other.mUri); }
@Test public void compareToTests() { AlluxioURI[] uris = new AlluxioURI[] {new AlluxioURI("file://127.0.0.0:8081/a/b/c.txt"), new AlluxioURI("glusterfs://127.0.0.0:8081/a/b/c.txt"), new AlluxioURI("hdfs://127.0.0.0:8081/a/b/c.txt"), new AlluxioURI("hdfs://127.0.0.1:8081/a/b/c.txt"), new AlluxioURI("hdfs://127.0.0.1:8081/a/b/d.txt"), new AlluxioURI("hdfs://127.0.0.1:8081/a/c/c.txt"), new AlluxioURI("hdfs://127.0.0.1:8082/a/c/c.txt"), new AlluxioURI("hdfs://localhost:8080/a/b/c.txt"), new AlluxioURI("s3://localhost:8080/a/b/c.txt"), new AlluxioURI("scheme://localhost:8080/a.txt"), new AlluxioURI("scheme://localhost:8080/a.txt?a=a"), new AlluxioURI("scheme://localhost:8080/a.txt?b=b"), new AlluxioURI("scheme://localhost:8080/a.txt?c=c"), new AlluxioURI("scheme:scheme://localhost:8080/a.txt"), new AlluxioURI("scheme:scheme://localhost:8080/b.txt"), new AlluxioURI("scheme:schemeB://localhost:8080/a.txt"), new AlluxioURI("scheme:schemeB://localhost:8080/b.txt"), new AlluxioURI("schemeA:scheme://localhost:8080/a.txt"), new AlluxioURI("schemeA:scheme://localhost:8080/b.txt"), new AlluxioURI("schemeA:schemeB:schemeC://localhost:8080/a.txt"), new AlluxioURI("schemeA:schemeB:schemeC://localhost:8080/b.txt"), new AlluxioURI("schemeA:schemeB:schemeD://localhost:8080/a.txt"), new AlluxioURI("schemeA:schemeB:schemeD://localhost:8080/b.txt"), new AlluxioURI("schemeE:schemeB:schemeB://localhost:8080/a.txt"), new AlluxioURI("schemeE:schemeB:schemeB://localhost:8080/b.txt"), }; for (int i = 0; i < uris.length - 1; i++) { assertTrue(uris[i].compareTo(uris[i + 1]) < 0); assertTrue(uris[i + 1].compareTo(uris[i]) > 0); assertEquals(0, uris[i].compareTo(uris[i])); } }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { return this.list(directory, listener, new HostPreferences(session.getHost()).getInteger("eue.listing.chunksize")); }
@Test public void testListRoot() throws Exception { final EueResourceIdProvider fileid = new EueResourceIdProvider(session); final Path root = new Path("/", EnumSet.of(directory)); final Path folder = new EueDirectoryFeature(session, fileid).mkdir( new Path(root, new AlphanumericRandomStringService().random(), EnumSet.of(directory)), new TransferStatus()); final AttributedList<Path> list = new EueListService(session, fileid).list(folder.getParent(), new DisabledListProgressListener()); assertNotNull(list.find(new SimplePathPredicate(folder))); for(Path bucket : list) { assertEquals(bucket.attributes(), new EueAttributesFinderFeature(session, fileid).find(bucket, new DisabledListProgressListener())); } assertNotNull(list.find(f -> f.attributes().getFileId().equals(EueResourceIdProvider.TRASH))); assertTrue(list.contains(new Path("Gelöschte Dateien", EnumSet.of(directory)).withAttributes(new PathAttributes().withFileId("TRASH")))); assertEquals(folder.attributes().getFileId(), list.find(new SimplePathPredicate(folder)).attributes().getFileId()); assertSame(root, list.find(new SimplePathPredicate(folder)).getParent()); new EueDeleteFeature(session, fileid).delete(Collections.singletonList(folder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public void filterProducer(Exchange exchange, WebServiceMessage response) { if (exchange != null) { processHeaderAndAttachments(exchange.getIn(AttachmentMessage.class), response); } }
@Test public void producerWithHeader() throws Exception { // foo is already in the header.in from the parent ExchangeTestSupport exchange.getIn().getHeaders().put("headerAttributeKey", "testAttributeValue"); exchange.getIn().getHeaders().put("headerAttributeElement", new QName("http://shouldBeInHeader", "myElement")); filter.filterProducer(exchange, message); Assertions.assertThat(message.getAttachments()).isEmpty(); Assertions.assertThat(message.getSoapHeader().examineAllHeaderElements()).isNotEmpty().hasSize(1); Assertions.assertThat(message.getSoapHeader().getAllAttributes()).isNotEmpty().hasSize(2); }
public Map<TopicPartition, Long> endOffsets(Set<TopicPartition> partitions) { if (partitions == null || partitions.isEmpty()) { return Collections.emptyMap(); } Map<TopicPartition, OffsetSpec> offsetSpecMap = partitions.stream().collect(Collectors.toMap(Function.identity(), tp -> OffsetSpec.latest())); ListOffsetsResult resultFuture = admin.listOffsets(offsetSpecMap, new ListOffsetsOptions(IsolationLevel.READ_UNCOMMITTED)); // Get the individual result for each topic partition so we have better error messages Map<TopicPartition, Long> result = new HashMap<>(); for (TopicPartition partition : partitions) { try { ListOffsetsResultInfo info = resultFuture.partitionResult(partition).get(); result.put(partition, info.offset()); } catch (ExecutionException e) { Throwable cause = e.getCause(); String topic = partition.topic(); if (cause instanceof AuthorizationException) { String msg = String.format("Not authorized to get the end offsets for topic '%s' on brokers at %s", topic, bootstrapServers); throw new ConnectException(msg, cause); } else if (cause instanceof UnsupportedVersionException) { // Should theoretically never happen, because this method is the same as what the consumer uses and therefore // should exist in the broker since before the admin client was added String msg = String.format("API to get the get the end offsets for topic '%s' is unsupported on brokers at %s", topic, bootstrapServers); throw new UnsupportedVersionException(msg, cause); } else if (cause instanceof TimeoutException) { String msg = String.format("Timed out while waiting to get end offsets for topic '%s' on brokers at %s", topic, bootstrapServers); throw new TimeoutException(msg, cause); } else if (cause instanceof LeaderNotAvailableException) { String msg = String.format("Unable to get end offsets during leader election for topic '%s' on brokers at %s", topic, bootstrapServers); throw new LeaderNotAvailableException(msg, cause); } else if (cause instanceof org.apache.kafka.common.errors.RetriableException) { throw (org.apache.kafka.common.errors.RetriableException) cause; } else { String msg = String.format("Error while getting end offsets for topic '%s' on brokers at %s", topic, bootstrapServers); throw new ConnectException(msg, cause); } } catch (InterruptedException e) { Thread.interrupted(); String msg = String.format("Interrupted while attempting to read end offsets for topic '%s' on brokers at %s", partition.topic(), bootstrapServers); throw new RetriableException(msg, e); } } return result; }
@Test public void endOffsetsShouldReturnEmptyMapWhenPartitionsSetIsNull() { String topicName = "myTopic"; Cluster cluster = createCluster(1, topicName, 1); try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(new MockTime(), cluster)) { TopicAdmin admin = new TopicAdmin(env.adminClient()); Map<TopicPartition, Long> offsets = admin.endOffsets(Collections.emptySet()); assertTrue(offsets.isEmpty()); } }
public static boolean acceptEndpoint(String endpointUrl) { return endpointUrl != null && endpointUrl.matches(ENDPOINT_PATTERN_STRING); }
@Test public void testAcceptEndpoint() { assertTrue(WebSocketMessageConsumptionTask.acceptEndpoint("ws://localhost:4000/")); assertTrue(WebSocketMessageConsumptionTask.acceptEndpoint("ws://localhost:4000/websocket")); assertTrue(WebSocketMessageConsumptionTask .acceptEndpoint("ws://microcks-ws.example.com/api/ws/Service/1.0.0/channel/sub/path")); }
public void setOrganization(String organization) { this.organization = organization; }
@Test public void testSetOrganization() { String organization = "apache"; Model instance = new Model(); instance.setOrganization(organization); assertEquals("apache", instance.getOrganization()); }
@VisibleForTesting public static Map<String, ColumnStats> convertToColumnStatsMap( Map<String, CatalogColumnStatisticsDataBase> columnStatisticsData) { Map<String, ColumnStats> columnStatsMap = new HashMap<>(); for (Map.Entry<String, CatalogColumnStatisticsDataBase> entry : columnStatisticsData.entrySet()) { if (entry.getValue() != null) { ColumnStats columnStats = convertToColumnStats(entry.getValue()); columnStatsMap.put(entry.getKey(), columnStats); } } return columnStatsMap; }
@Test void testConvertToColumnStatsMapWithNullColumnStatisticsData() { Map<String, CatalogColumnStatisticsDataBase> columnStatisticsDataBaseMap = new HashMap<>(); columnStatisticsDataBaseMap.put( "first", new CatalogColumnStatisticsDataString(10L, 5.2, 3L, 100L)); columnStatisticsDataBaseMap.put("second", null); Map<String, ColumnStats> columnStatsMap = CatalogTableStatisticsConverter.convertToColumnStatsMap( columnStatisticsDataBaseMap); assertThat(columnStatsMap).isNotNull(); assertThat(columnStatsMap).hasSize(columnStatisticsDataBaseMap.size() - 1); assertThat(columnStatsMap).containsKey("first"); assertThat(columnStatsMap).doesNotContainKey("second"); }
public static int calculateFor(final ConnectionSession connectionSession) { int result = 0; result |= connectionSession.isAutoCommit() ? MySQLStatusFlag.SERVER_STATUS_AUTOCOMMIT.getValue() : 0; result |= connectionSession.getTransactionStatus().isInTransaction() ? MySQLStatusFlag.SERVER_STATUS_IN_TRANS.getValue() : 0; return result; }
@Test void assertAutoCommitNotInTransaction() { when(connectionSession.isAutoCommit()).thenReturn(true); assertThat(ServerStatusFlagCalculator.calculateFor(connectionSession), is(MySQLStatusFlag.SERVER_STATUS_AUTOCOMMIT.getValue())); }
@Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() != ChatMessageType.SPAM) { return; } if (event.getMessage().startsWith("You retrieve a bar of")) { if (session == null) { session = new SmeltingSession(); } session.increaseBarsSmelted(); } else if (event.getMessage().endsWith(" to form 8 cannonballs.")) { cannonBallsMade = 8; } else if (event.getMessage().endsWith(" to form 4 cannonballs.")) { cannonBallsMade = 4; } else if (event.getMessage().startsWith("You remove the cannonballs from the mould")) { if (session == null) { session = new SmeltingSession(); } session.increaseCannonBallsSmelted(cannonBallsMade); } }
@Test public void testCannonballsAmmoMould() { ChatMessage chatMessageAmmoMould = new ChatMessage(null, ChatMessageType.SPAM, "", SMELT_CANNONBALL_AMMO_MOULD, "", 0); smeltingPlugin.onChatMessage(chatMessageAmmoMould); ChatMessage chatMessageDone = new ChatMessage(null, ChatMessageType.SPAM, "", SMELT_CANNONBALL_DONE_MESSAGE, "", 0); smeltingPlugin.onChatMessage(chatMessageDone); SmeltingSession smeltingSession = smeltingPlugin.getSession(); assertNotNull(smeltingSession); assertEquals(4, smeltingSession.getCannonBallsSmelted()); }
void doImportCheckpoint(FSNamesystem target) throws IOException { Collection<URI> checkpointDirs = FSImage.getCheckpointDirs(conf, null); List<URI> checkpointEditsDirs = FSImage.getCheckpointEditsDirs(conf, null); if (checkpointDirs == null || checkpointDirs.isEmpty()) { throw new IOException("Cannot import image from a checkpoint. " + "\"dfs.namenode.checkpoint.dir\" is not set."); } if (checkpointEditsDirs == null || checkpointEditsDirs.isEmpty()) { throw new IOException("Cannot import image from a checkpoint. " + "\"dfs.namenode.checkpoint.edits.dir\" is not set."); } FSImage realImage = target.getFSImage(); FSImage ckptImage = new FSImage(conf, checkpointDirs, checkpointEditsDirs); // load from the checkpoint dirs try { ckptImage.recoverTransitionRead(StartupOption.REGULAR, target, null); } finally { ckptImage.close(); } // return back the real image realImage.getStorage().setStorageInfo(ckptImage.getStorage()); realImage.getEditLog().setNextTxId(ckptImage.getEditLog().getLastWrittenTxId()+1); realImage.initEditLog(StartupOption.IMPORT); realImage.getStorage().setBlockPoolID(ckptImage.getBlockPoolID()); // and save it but keep the same checkpointTime saveNamespace(target); updateStorageVersion(); }
@Test public void testImportCheckpoint() throws Exception{ Configuration conf = new Configuration(); conf.set(DFSConfigKeys.DFS_NAMENODE_CHECKPOINT_EDITS_DIR_KEY, ""); try(MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build()){ cluster.waitActive(); FSNamesystem fsn = cluster.getNamesystem(); FSImage fsImage= new FSImage(conf); LambdaTestUtils.intercept( IOException.class, "Cannot import image from a checkpoint. " + "\"dfs.namenode.checkpoint.edits.dir\" is not set.", () -> fsImage.doImportCheckpoint(fsn)); } }
Object[] findValues(int ordinal) { return getAllValues(ordinal, type, 0); }
@Test public void testListObjectReference() throws Exception { ListObjectReference listType = new ListObjectReference(); SimpleValue val1 = new SimpleValue(); val1.id = 1; SimpleValue val2 = new SimpleValue(); val2.id = 2; listType.intValues = Arrays.asList(val1, val2); objectMapper.add(listType); StateEngineRoundTripper.roundTripSnapshot(writeStateEngine, readStateEngine); FieldPath fieldPath; Object[] values; //with partial auto expand fieldPath = new FieldPath(readStateEngine, "ListObjectReference", "intValues.element"); values = fieldPath.findValues(0); Assert.assertEquals(1, (int) values[0]); Assert.assertEquals(2, (int) values[1]); //without auto expand fieldPath = new FieldPath(readStateEngine, "ListObjectReference", "intValues.element.id"); values = fieldPath.findValues(0); Assert.assertEquals(1, (int) values[0]); Assert.assertEquals(2, (int) values[1]); }
@Override public ObjectNode encode(Instruction instruction, CodecContext context) { checkNotNull(instruction, "Instruction cannot be null"); return new EncodeInstructionCodecHelper(instruction, context).encode(); }
@Test public void modIPDstInstructionTest() { final Ip4Address ip = Ip4Address.valueOf("1.2.3.4"); final L3ModificationInstruction.ModIPInstruction instruction = (L3ModificationInstruction.ModIPInstruction) Instructions.modL3Dst(ip); final ObjectNode instructionJson = instructionCodec.encode(instruction, context); assertThat(instructionJson, matchesInstruction(instruction)); }
@Override public boolean isInputConsumable( SchedulingExecutionVertex executionVertex, Set<ExecutionVertexID> verticesToDeploy, Map<ConsumedPartitionGroup, Boolean> consumableStatusCache) { for (ConsumedPartitionGroup consumedPartitionGroup : executionVertex.getConsumedPartitionGroups()) { if (!consumableStatusCache.computeIfAbsent( consumedPartitionGroup, this::isConsumableBasedOnFinishedProducers)) { return false; } } return true; }
@Test void testNotFinishedHybridInput() { final TestingSchedulingTopology topology = new TestingSchedulingTopology(); final List<TestingSchedulingExecutionVertex> producers = topology.addExecutionVertices().withParallelism(2).finish(); final List<TestingSchedulingExecutionVertex> consumer = topology.addExecutionVertices().withParallelism(2).finish(); topology.connectAllToAll(producers, consumer) .withResultPartitionState(ResultPartitionState.CREATED) .withResultPartitionType(ResultPartitionType.HYBRID_FULL) .finish(); PartialFinishedInputConsumableDecider inputConsumableDecider = createPartialFinishedInputConsumableDecider(); assertThat( inputConsumableDecider.isInputConsumable( consumer.get(0), Collections.emptySet(), new HashMap<>())) .isFalse(); assertThat( inputConsumableDecider.isInputConsumable( consumer.get(1), Collections.emptySet(), new HashMap<>())) .isFalse(); }
@Override public PageResult<MailLogDO> getMailLogPage(MailLogPageReqVO pageVO) { return mailLogMapper.selectPage(pageVO); }
@Test public void testGetMailLogPage() { // mock 数据 MailLogDO dbMailLog = randomPojo(MailLogDO.class, o -> { // 等会查询到 o.setUserId(1L); o.setUserType(UserTypeEnum.ADMIN.getValue()); o.setToMail("768@qq.com"); o.setAccountId(10L); o.setTemplateId(100L); o.setSendStatus(MailSendStatusEnum.INIT.getStatus()); o.setSendTime(buildTime(2023, 2, 10)); o.setTemplateParams(randomTemplateParams()); }); mailLogMapper.insert(dbMailLog); // 测试 userId 不匹配 mailLogMapper.insert(cloneIgnoreId(dbMailLog, o -> o.setUserId(2L))); // 测试 userType 不匹配 mailLogMapper.insert(cloneIgnoreId(dbMailLog, o -> o.setUserType(UserTypeEnum.MEMBER.getValue()))); // 测试 toMail 不匹配 mailLogMapper.insert(cloneIgnoreId(dbMailLog, o -> o.setToMail("788@.qq.com"))); // 测试 accountId 不匹配 mailLogMapper.insert(cloneIgnoreId(dbMailLog, o -> o.setAccountId(11L))); // 测试 templateId 不匹配 mailLogMapper.insert(cloneIgnoreId(dbMailLog, o -> o.setTemplateId(101L))); // 测试 sendStatus 不匹配 mailLogMapper.insert(cloneIgnoreId(dbMailLog, o -> o.setSendStatus(MailSendStatusEnum.SUCCESS.getStatus()))); // 测试 sendTime 不匹配 mailLogMapper.insert(cloneIgnoreId(dbMailLog, o -> o.setSendTime(buildTime(2023, 3, 10)))); // 准备参数 MailLogPageReqVO reqVO = new MailLogPageReqVO(); reqVO.setUserId(1L); reqVO.setUserType(UserTypeEnum.ADMIN.getValue()); reqVO.setToMail("768"); reqVO.setAccountId(10L); reqVO.setTemplateId(100L); reqVO.setSendStatus(MailSendStatusEnum.INIT.getStatus()); reqVO.setSendTime((buildBetweenTime(2023, 2, 1, 2023, 2, 15))); // 调用 PageResult<MailLogDO> pageResult = mailLogService.getMailLogPage(reqVO); // 断言 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(dbMailLog, pageResult.getList().get(0)); }
public static org.slf4j.Logger getLogger(String name, String appname) { //从"com/alipay/sofa/rpc/log"中获取 rpc 的日志配置并寻找对应logger对象,log 为默认添加的后缀 if (name == null || name.isEmpty()) { return null; } Map<String, String> properties = new HashMap<String, String>(); properties.put(APPNAME, appname == null ? "" : appname); SpaceId spaceId = new SpaceId(RPC_LOG_SPACE); if (appname != null) { spaceId.withTag(APPNAME, appname); } return LoggerSpaceManager.getLoggerBySpace(name, spaceId, properties); }
@Test public void getLogger() throws Exception { Assert.assertNull(RpcLoggerFactory.getLogger(null, "appname1")); Logger logger1 = RpcLoggerFactory.getLogger("xxx", "appname1"); Assert.assertNotNull(logger1); Logger logger2 = RpcLoggerFactory.getLogger("xxx", "appname1"); Assert.assertNotNull(logger1); Assert.assertEquals(logger1, logger2); Logger logger3 = RpcLoggerFactory.getLogger("xxx", "appname2"); Assert.assertFalse(logger1.equals(logger3)); }
public void onGlobalPropertyChange(String key, @Nullable String value) { GlobalPropertyChangeHandler.PropertyChange change = GlobalPropertyChangeHandler.PropertyChange.create(key, value); for (GlobalPropertyChangeHandler changeHandler : changeHandlers) { changeHandler.onChange(change); } }
@Test public void no_handlers() { SettingsChangeNotifier notifier = new SettingsChangeNotifier(); assertThat(notifier.changeHandlers).isEmpty(); // does not fail notifier.onGlobalPropertyChange("foo", "bar"); }
public static <T> AggregateOperation1<T, ArrayList<T>, List<T>> sorting( @Nonnull ComparatorEx<? super T> comparator ) { checkSerializable(comparator, "comparator"); return AggregateOperation .withCreate(ArrayList<T>::new) .<T>andAccumulate(ArrayList::add) .andCombine(ArrayList::addAll) .andExport(list -> { // sorting the accumulator doesn't harm - will make the next sort an easier job list.sort(comparator); return (List<T>) new ArrayList<>(list); }) .andFinish(list -> { list.sort(comparator); return list; }); }
@Test public void when_sorting() { validateOpWithoutDeduct( sorting(naturalOrder()), identity(), 2, 1, singletonList(2), asList(2, 1), asList(1, 2) ); }
@Override public void uncaughtException(@NonNull Thread thread, Throwable ex) { ex.printStackTrace(); Logger.e(TAG, "Caught an unhandled exception!!!", ex); // https://github.com/AnySoftKeyboard/AnySoftKeyboard/issues/15 // https://github.com/AnySoftKeyboard/AnySoftKeyboard/issues/433 final String stackTrace = Logger.getStackTrace(ex); if (ex instanceof NullPointerException) { if (stackTrace.contains( "android.inputmethodservice.IInputMethodSessionWrapper.executeMessage(IInputMethodSessionWrapper.java") || stackTrace.contains( "android.inputmethodservice.IInputMethodWrapper.executeMessage(IInputMethodWrapper.java")) { Logger.w(TAG, "An OS bug has been adverted. Move along, there is nothing to see here."); return; } } else if (ex instanceof java.util.concurrent.TimeoutException && stackTrace.contains(".finalize")) { Logger.w(TAG, "An OS bug has been adverted. Move along, there is nothing to see here."); return; } StringBuilder reportMessage = new StringBuilder(); reportMessage .append("Hi. It seems that we have crashed.... Here are some details:") .append(NEW_LINE) .append("****** UTC Time: ") .append(DateFormat.format("kk:mm:ss dd.MM.yyyy", System.currentTimeMillis())) .append(NEW_LINE) .append("****** Application name: ") .append(getAppDetails()) .append(NEW_LINE) .append("******************************") .append(NEW_LINE) .append(ex.getClass().getName()) .append(NEW_LINE) .append("****** Exception message: ") .append(ex.getMessage()) .append(NEW_LINE) .append(HEADER_BREAK_LINE) .append(NEW_LINE) .append("****** Trace trace:") .append(NEW_LINE) .append(stackTrace) .append(NEW_LINE) .append("******************************") .append(NEW_LINE) .append("****** Device information:") .append(NEW_LINE) .append(ChewbaccaUtils.getSysInfo(mApp)) .append(NEW_LINE); if (ex instanceof OutOfMemoryError || (ex.getCause() != null && ex.getCause() instanceof OutOfMemoryError)) { reportMessage .append("******************************") .append(NEW_LINE) .append("****** Memory: ") .append(Runtime.getRuntime().totalMemory()) .append(NEW_LINE) .append("Free: ") .append(Runtime.getRuntime().freeMemory()) .append(NEW_LINE) .append("Max: ") .append(Runtime.getRuntime().maxMemory()) .append(NEW_LINE); } reportMessage .append("******************************") .append(NEW_LINE) .append("****** Log-Cat: ") .append(NEW_LINE) .append(Logger.getAllLogLines()) .append(NEW_LINE); try (OutputStreamWriter writer = new OutputStreamWriter( mApp.openFileOutput(NEW_CRASH_FILENAME, Context.MODE_PRIVATE), Charset.forName("UTF-8"))) { writer.write(reportMessage.toString()); Logger.i(TAG, "Wrote crash report to %s.", NEW_CRASH_FILENAME); Logger.d(TAG, "Crash report:"); for (String line : TextUtils.split(reportMessage.toString(), NEW_LINE)) { Logger.d(TAG, "err: %s", line); } } catch (Exception writeEx) { Logger.e(TAG, writeEx, "Failed to write crash report file!"); } // and sending to the OS if (mOsDefaultHandler != null) { Logger.i(TAG, "Sending the exception to OS exception handler..."); mOsDefaultHandler.uncaughtException(thread, ex); } }
@Test public void testCallsPreviousHandler() { final AtomicReference<Pair<Thread, Throwable>> receiver = new AtomicReference<>(); final Thread.UncaughtExceptionHandler handler = (t, e) -> receiver.set(Pair.create(t, e)); TestableChewbaccaUncaughtExceptionHandler underTest = new TestableChewbaccaUncaughtExceptionHandler( ApplicationProvider.getApplicationContext(), handler, Mockito.mock(NotificationDriver.class)); Thread thread = new Thread(); IOException exception = new IOException("an error"); underTest.uncaughtException(thread, exception); Assert.assertSame(thread, receiver.get().first); Assert.assertSame(exception, receiver.get().second); }
public static Result find(List<Path> files, Consumer<LogEvent> logger) { List<String> mainClasses = new ArrayList<>(); for (Path file : files) { // Makes sure classFile is valid. if (!Files.exists(file)) { logger.accept(LogEvent.debug("MainClassFinder: " + file + " does not exist; ignoring")); continue; } if (!Files.isRegularFile(file)) { logger.accept( LogEvent.debug("MainClassFinder: " + file + " is not a regular file; skipping")); continue; } if (!file.toString().endsWith(".class")) { logger.accept( LogEvent.debug("MainClassFinder: " + file + " is not a class file; skipping")); continue; } MainClassVisitor mainClassVisitor = new MainClassVisitor(); try (InputStream classFileInputStream = Files.newInputStream(file)) { ClassReader reader = new ClassReader(classFileInputStream); reader.accept(mainClassVisitor, 0); if (mainClassVisitor.visitedMainClass) { mainClasses.add(reader.getClassName().replace('/', '.')); } } catch (IllegalArgumentException ex) { throw new UnsupportedOperationException( "Check the full stace trace, and if the root cause is from ASM ClassReader about " + "unsupported class file version, see " + "https://github.com/GoogleContainerTools/jib/blob/master/docs/faq.md" + "#i-am-seeing-unsupported-class-file-major-version-when-building", ex); } catch (ArrayIndexOutOfBoundsException ignored) { // Not a valid class file (thrown by ClassReader if it reads an invalid format) logger.accept(LogEvent.warn("Invalid class file found: " + file)); } catch (IOException ignored) { // Could not read class file. logger.accept(LogEvent.warn("Could not read file: " + file)); } } if (mainClasses.size() == 1) { // Valid class found. return Result.success(mainClasses.get(0)); } if (mainClasses.isEmpty()) { // No main class found anywhere. return Result.mainClassNotFound(); } // More than one main class found. return Result.multipleMainClasses(mainClasses); }
@Test public void testFindMainClass_importedMethods() throws URISyntaxException, IOException { Path rootDirectory = Paths.get(Resources.getResource("core/class-finder-tests/imported-methods").toURI()); MainClassFinder.Result mainClassFinderResult = MainClassFinder.find(new DirectoryWalker(rootDirectory).walk(), logEventConsumer); Assert.assertSame(Result.Type.MAIN_CLASS_FOUND, mainClassFinderResult.getType()); MatcherAssert.assertThat( mainClassFinderResult.getFoundMainClass(), CoreMatchers.containsString("main.MainClass")); }
@InvokeOnHeader(Web3jConstants.ETH_NEW_FILTER) void ethNewFilter(Message message) throws IOException { DefaultBlockParameter fromBlock = toDefaultBlockParameter( message.getHeader(Web3jConstants.FROM_BLOCK, configuration::getFromBlock, String.class)); DefaultBlockParameter toBlock = toDefaultBlockParameter(message.getHeader(Web3jConstants.TO_BLOCK, configuration::getToBlock, String.class)); List<String> addresses = message.getHeader(Web3jConstants.ADDRESSES, configuration::getAddresses, List.class); List<String> topics = message.getHeader(Web3jConstants.TOPICS, configuration::getTopics, List.class); org.web3j.protocol.core.methods.request.EthFilter ethFilter = Web3jEndpoint.buildEthFilter(fromBlock, toBlock, addresses, topics); Request<?, EthFilter> request = web3j.ethNewFilter(ethFilter); setRequestId(message, request); EthFilter response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getFilterId()); } }
@Test public void ethNewFilterTest() throws Exception { EthFilter response = Mockito.mock(EthFilter.class); Mockito.when(mockWeb3j.ethNewFilter(any())).thenReturn(request); Mockito.when(request.send()).thenReturn(response); Mockito.when(response.getFilterId()).thenReturn(BigInteger.ONE); Exchange exchange = createExchangeWithBodyAndHeader(null, OPERATION, Web3jConstants.ETH_NEW_FILTER); template.send(exchange); BigInteger body = exchange.getIn().getBody(BigInteger.class); assertEquals(BigInteger.ONE, body); }
@Override public ImportResult importItem( UUID jobId, IdempotentImportExecutor idempotentImportExecutor, TokensAndUrlAuthData authData, MusicContainerResource data) throws Exception { if (data == null) { // Nothing to do return new ImportResult(new AppleContentException("Null MusicContainerResource received on AppleMusicImporter::importItem")); } int playlistsCount = 0; int playlistItemsCount = 0; AppleMusicInterface musicInterface = factory .getOrCreateMusicInterface(jobId, authData, appCredentials, exportingService, monitor); if (!data.getPlaylists().isEmpty()) { playlistsCount = musicInterface.importPlaylists(jobId, idempotentImportExecutor, data.getPlaylists()); } if (!data.getPlaylistItems().isEmpty()) { playlistItemsCount = musicInterface.importMusicPlaylistItems(jobId, idempotentImportExecutor, data.getPlaylistItems()); } final Map<String, Integer> counts = new ImmutableMap.Builder<String, Integer>() .put(AppleMusicConstants.PLAYLISTS_COUNT_DATA_NAME, playlistsCount) .put(AppleMusicConstants.PLAYLIST_ITEMS_COUNT_DATA_NAME, playlistItemsCount) .build(); return ImportResult.OK .copyWithCounts(counts); }
@Test public void testImportPlaylistTracksFailure() throws Exception { MusicPlaylistItem item1 = createTestPlaylistItem(randomString(), 1); List<MusicPlaylistItem> musicPlaylistItems = List.of(item1); setUpImportPlaylistTracksBatchResponse(musicPlaylistItems.stream().collect( Collectors.toMap(item -> item.getTrack().getIsrcCode(), item -> SC_BAD_REQUEST))); MusicContainerResource playlistItemsResource = new MusicContainerResource(null, musicPlaylistItems, null, null); final ImportResult importResult = appleMusicImporter.importItem(uuid, executor, authData, playlistItemsResource); verify(appleMusicInterface) .importMusicPlaylistItemsBatch(uuid.toString(), musicPlaylistItems); assertThat(executor.getErrors()).hasSize(musicPlaylistItems.size()); assertThat(importResult.getCounts().isPresent()); ErrorDetail errorDetail = executor.getErrors().iterator().next(); assertThat(errorDetail.id().equals(item1.toString())); assertThat(errorDetail.exception()).contains("Failed to import playlist track"); }
@Override public Boolean exists(final String key) { try { KV kvClient = etcdClient.getKVClient(); GetOption option = GetOption.newBuilder().isPrefix(true).withCountOnly(true).build(); GetResponse response = kvClient.get(bytesOf(key), option).get(); return response.getCount() > 0; } catch (InterruptedException | ExecutionException e) { throw new ShenyuException(e); } }
@Test void testExists() throws ExecutionException, InterruptedException { final String key = "key"; final KV kvClient = mock(KV.class); when(etcdClient.getKVClient()).thenReturn(kvClient); final GetResponse getResponse = mock(GetResponse.class); when(getResponse.getCount()).thenReturn(1L); final CompletableFuture<GetResponse> completableFuture = mock(CompletableFuture.class); when(completableFuture.get()).thenReturn(getResponse); when(kvClient.get(any(ByteSequence.class), any(GetOption.class))).thenReturn(completableFuture); final Boolean result = etcdDiscoveryServiceUnderTest.exists(key); assertTrue(result); doThrow(new InterruptedException("test")).when(completableFuture).get(); assertThrows(ShenyuException.class, () -> etcdDiscoveryServiceUnderTest.exists(key)); }
public static void closeQuietly(Closeable closeable) { try { if (null == closeable) { return; } closeable.close(); } catch (Exception e) { log.error("Close closeable error", e); } }
@Test public void testCloseQuietly() throws FileNotFoundException { InputStream ins = IOKitTest.class.getResourceAsStream("/application.properties"); IOKit.closeQuietly(ins); IOKit.closeQuietly(null); }
public void reset() { removeShutdownHook(); getLifeCycleManager().reset(); propertyMap.clear(); objectMap.clear(); }
@Test public void resetTest() { context.setName("hello"); context.putProperty("keyA", "valA"); context.putObject("keyA", "valA"); Assertions.assertEquals("valA", context.getProperty("keyA")); Assertions.assertEquals("valA", context.getObject("keyA")); MockLifeCycleComponent component = new MockLifeCycleComponent(); context.register(component); Assertions.assertSame(component, lifeCycleManager.getLastComponent()); context.reset(); Assertions.assertNull(context.getProperty("keyA")); Assertions.assertNull(context.getObject("keyA")); Assertions.assertTrue(lifeCycleManager.isReset()); }
@ApiOperation(value = "Delete the Dashboard (deleteDashboard)", notes = "Delete the Dashboard." + TENANT_AUTHORITY_PARAGRAPH) @PreAuthorize("hasAuthority('TENANT_ADMIN')") @RequestMapping(value = "/dashboard/{dashboardId}", method = RequestMethod.DELETE) @ResponseStatus(value = HttpStatus.OK) public void deleteDashboard( @Parameter(description = DASHBOARD_ID_PARAM_DESCRIPTION) @PathVariable(DASHBOARD_ID) String strDashboardId) throws ThingsboardException { checkParameter(DASHBOARD_ID, strDashboardId); DashboardId dashboardId = new DashboardId(toUUID(strDashboardId)); Dashboard dashboard = checkDashboardId(dashboardId, Operation.DELETE); tbDashboardService.delete(dashboard, getCurrentUser()); }
@Test public void testDeleteDashboard() throws Exception { Dashboard dashboard = new Dashboard(); dashboard.setTitle("My dashboard"); Dashboard savedDashboard = doPost("/api/dashboard", dashboard, Dashboard.class); Mockito.reset(tbClusterService, auditLogService); doDelete("/api/dashboard/" + savedDashboard.getId().getId().toString()).andExpect(status().isOk()); testNotifyEntityAllOneTime(savedDashboard, savedDashboard.getId(), savedDashboard.getId(), savedDashboard.getTenantId(), tenantAdmin.getCustomerId(), tenantAdmin.getId(), tenantAdmin.getEmail(), ActionType.DELETED, savedDashboard.getId().getId().toString()); String dashboardIdStr = savedDashboard.getId().getId().toString(); doGet("/api/dashboard/" + savedDashboard.getId().getId().toString()) .andExpect(status().isNotFound()) .andExpect(statusReason(containsString(msgErrorNoFound("Dashboard", dashboardIdStr)))); }
@SqlNullable @Description("Returns an array of geometries in the specified collection") @ScalarFunction("ST_Geometries") @SqlType("array(" + GEOMETRY_TYPE_NAME + ")") public static Block stGeometries(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Geometry geometry = deserialize(input); if (geometry.isEmpty()) { return null; } GeometryType type = GeometryType.getForJtsGeometryType(geometry.getGeometryType()); if (!type.isMultitype()) { BlockBuilder blockBuilder = GEOMETRY.createBlockBuilder(null, 1); GEOMETRY.writeSlice(blockBuilder, serialize(geometry)); return blockBuilder.build(); } GeometryCollection collection = (GeometryCollection) geometry; BlockBuilder blockBuilder = GEOMETRY.createBlockBuilder(null, collection.getNumGeometries()); for (int i = 0; i < collection.getNumGeometries(); i++) { GEOMETRY.writeSlice(blockBuilder, serialize(collection.getGeometryN(i))); } return blockBuilder.build(); }
@Test public void testSTGeometries() { assertFunction("ST_Geometries(ST_GeometryFromText('POINT EMPTY'))", new ArrayType(GEOMETRY), null); assertSTGeometries("POINT (1 5)", "POINT (1 5)"); assertSTGeometries("LINESTRING (77.29 29.07, 77.42 29.26, 77.27 29.31, 77.29 29.07)", "LINESTRING (77.29 29.07, 77.42 29.26, 77.27 29.31, 77.29 29.07)"); assertSTGeometries("POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))", "POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))"); assertSTGeometries("MULTIPOINT (1 2, 4 8, 16 32)", "POINT (1 2)", "POINT (4 8)", "POINT (16 32)"); assertSTGeometries("MULTILINESTRING ((1 1, 2 2))", "LINESTRING (1 1, 2 2)"); assertSTGeometries("MULTIPOLYGON (((0 0, 0 1, 1 1, 1 0, 0 0)), ((1 1, 3 1, 3 3, 1 3, 1 1)))", "POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))", "POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))"); assertSTGeometries("GEOMETRYCOLLECTION (POINT (2 3), LINESTRING (2 3, 3 4))", "POINT (2 3)", "LINESTRING (2 3, 3 4)"); assertSTGeometries("GEOMETRYCOLLECTION(MULTIPOINT(0 0, 1 1), GEOMETRYCOLLECTION(MULTILINESTRING((2 2, 3 3))))", "MULTIPOINT ((0 0), (1 1))", "GEOMETRYCOLLECTION (MULTILINESTRING ((2 2, 3 3)))"); }
@Override public String getSource() { return source; }
@Test public void testGetSource() { assertEquals("source", localCacheWideEventData.getSource()); }
public static Version fromString(String versionString) { checkArgument(!Strings.isNullOrEmpty(versionString)); Version version = builder().setVersionType(Type.NORMAL).setVersionString(versionString).build(); if (!EPOCH_PATTERN.matcher(versionString).matches()) { versionString = "0:" + versionString; } boolean isValid = version.segments().stream() .flatMap(segment -> segment.tokens().stream()) .anyMatch( token -> (token.isNumeric() && token.getNumeric() != 0) || (token.isText() && !token.getText().isEmpty())); if (!isValid) { throw new IllegalArgumentException( String.format( "Input version string %s is not valid, it should contain at least one non-empty" + " field.", versionString)); } return version; }
@Test public void create_whnNormalVersionAndValueIsEmpty_throwsExceptionIfStringIsNull() { assertThrows(IllegalArgumentException.class, () -> Version.fromString("")); }
public static HCatFieldSchema getHCatFieldSchema(FieldSchema fs) throws HCatException { String fieldName = fs.getName(); TypeInfo baseTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(fs.getType()); return getHCatFieldSchema(fieldName, baseTypeInfo, fs.getComment()); }
@Test public void testHCatFieldSchemaConversion() throws Exception { FieldSchema stringFieldSchema = new FieldSchema("name1", serdeConstants.STRING_TYPE_NAME, "comment1"); HCatFieldSchema stringHCatFieldSchema = HCatSchemaUtils.getHCatFieldSchema(stringFieldSchema); assertEquals(stringHCatFieldSchema.getName(), "name1"); assertEquals(stringHCatFieldSchema.getCategory(), Category.PRIMITIVE); assertEquals(stringHCatFieldSchema.getComment(), "comment1"); FieldSchema listFieldSchema = new FieldSchema("name1", "array<tinyint>", "comment1"); HCatFieldSchema listHCatFieldSchema = HCatSchemaUtils.getHCatFieldSchema(listFieldSchema); assertEquals(listHCatFieldSchema.getName(), "name1"); assertEquals(listHCatFieldSchema.getCategory(), Category.ARRAY); assertEquals(listHCatFieldSchema.getComment(), "comment1"); FieldSchema mapFieldSchema = new FieldSchema("name1", "map<string,int>", "comment1"); HCatFieldSchema mapHCatFieldSchema = HCatSchemaUtils.getHCatFieldSchema(mapFieldSchema); assertEquals(mapHCatFieldSchema.getName(), "name1"); assertEquals(mapHCatFieldSchema.getCategory(), Category.MAP); assertEquals(mapHCatFieldSchema.getComment(), "comment1"); FieldSchema structFieldSchema = new FieldSchema("name1", "struct<s:string,i:tinyint>", "comment1"); HCatFieldSchema structHCatFieldSchema = HCatSchemaUtils.getHCatFieldSchema(structFieldSchema); assertEquals(structHCatFieldSchema.getName(), "name1"); assertEquals(structHCatFieldSchema.getCategory(), Category.STRUCT); assertEquals(structHCatFieldSchema.getComment(), "comment1"); }
public static List<InetSocketAddress> getMasterRpcAddresses(AlluxioConfiguration conf) { // First check whether rpc addresses are explicitly configured. if (conf.isSet(PropertyKey.MASTER_RPC_ADDRESSES)) { return parseInetSocketAddresses(conf.getList(PropertyKey.MASTER_RPC_ADDRESSES)); } // Fall back on server-side journal configuration. int rpcPort = NetworkAddressUtils.getPort(NetworkAddressUtils.ServiceType.MASTER_RPC, conf); return overridePort(getEmbeddedJournalAddresses(conf, ServiceType.MASTER_RAFT), rpcPort); }
@Test public void getMasterRpcAddressesDefault() { AlluxioConfiguration conf = createConf(Collections.emptyMap()); String host = NetworkAddressUtils.getLocalHostName(5 * Constants.SECOND_MS); assertEquals(Arrays.asList(InetSocketAddress.createUnresolved(host, 19998)), ConfigurationUtils.getMasterRpcAddresses(conf)); }
public boolean areWithin(Distance altitudeProximityReq, Distance lateralProximityReq) { //test the altitudeDelta first to reduce the number of calls to lateralDistance() return altitudeDelta().isLessThanOrEqualTo(altitudeProximityReq) && lateralDistance().isLessThanOrEqualTo(lateralProximityReq); }
@Test public void verifyLateralDistanceIsIgnoredIfAltitudeIsTooDifferent() { Distance requiredAltitudeProximity = Distance.ofFeet(500); //alt = 2400 Point p1 = NopHit.from("[RH],STARS,A80_B,02/12/2018,18:36:46.667,JIA5545,CRJ9,E,5116,024,157,270,033.63143,-084.33913,1334,5116,22.4031,27.6688,1,O,A,A80,OZZ,OZZ,ATL,1827,ATL,ACT,IFR,,01719,,,,,27L,L,1,,0,{RH}"); //alt = 3400 Point p2 = NopHit.from("[RH],STARS,A80_B,02/12/2018,18:36:46.667,JIA5545,CRJ9,E,5116,034,157,270,033.63143,-084.33913,1334,5116,22.4031,27.6688,1,O,A,A80,OZZ,OZZ,ATL,1827,ATL,ACT,IFR,,01719,,,,,27L,L,1,,0,{RH}"); assertEquals(p1.altitude().inFeet(), 2400.0, 0.01); assertEquals(p2.altitude().inFeet(), 3400.0, 0.01); PointPair pair = new PointPair(p1, p2); //IMPORTANT: no NPE is thrown...so we know we didn't compute the lateral distance assertFalse(pair.areWithin(requiredAltitudeProximity, null)); }
final WorkAttempt performWork() { try { LOG.debug("[Agent Loop] Trying to retrieve work."); agentUpgradeService.checkForUpgradeAndExtraProperties(); sslInfrastructureService.registerIfNecessary(getAgentAutoRegistrationProperties()); if (pluginJarLocationMonitor.hasRunAtLeastOnce()) { return tryDoWork(); } else { LOG.debug("[Agent Loop] PluginLocationMonitor has not yet run. Not retrieving work since plugins may not be initialized."); return WorkAttempt.FAILED; } } catch (Exception e) { if (isCausedBySecurity(e)) { handleSecurityException(e); } else { LOG.error("[Agent Loop] Error occurred during loop: ", e); } return WorkAttempt.FAILED; } }
@Test void shouldNotTryWorkIfPluginMonitorHasNotRun() { when(pluginJarLocationMonitor.hasRunAtLeastOnce()).thenReturn(false); assertThat(agentController.performWork()).isEqualTo(WorkAttempt.FAILED); verify(pluginJarLocationMonitor).hasRunAtLeastOnce(); }
@Override public PageData<AlarmCommentInfo> findAlarmComments(TenantId tenantId, AlarmId id, PageLink pageLink) { log.trace("Try to find alarm comments by alarm id using [{}]", id); return DaoUtil.toPageData( alarmCommentRepository.findAllByAlarmId(id.getId(), DaoUtil.toPageable(pageLink))); }
@Test public void testFindAlarmCommentsByAlarmId() { log.info("Current system time in millis = {}", System.currentTimeMillis()); UUID tenantId = UUID.randomUUID(); UUID userId = UUID.randomUUID(); UUID alarmId1 = UUID.randomUUID(); UUID alarmId2 = UUID.randomUUID(); UUID commentId1 = UUID.randomUUID(); UUID commentId2 = UUID.randomUUID(); UUID commentId3 = UUID.randomUUID(); saveAlarm(alarmId1, UUID.randomUUID(), UUID.randomUUID(), "TEST_ALARM"); saveAlarm(alarmId2, UUID.randomUUID(), UUID.randomUUID(), "TEST_ALARM"); saveAlarmComment(commentId1, alarmId1, userId, AlarmCommentType.OTHER); saveAlarmComment(commentId2, alarmId1, userId, AlarmCommentType.OTHER); saveAlarmComment(commentId3, alarmId2, userId, AlarmCommentType.OTHER); int count = alarmCommentDao.findAlarmComments(TenantId.fromUUID(tenantId), new AlarmId(alarmId1), new PageLink(10, 0)).getData().size(); assertEquals(2, count); }
@Override public T getValue() { return value; }
@Test public void newSettableGaugeWithoutDefaultReturnsNull() { DefaultSettableGauge<String> gauge = new DefaultSettableGauge<>(); assertThat(gauge.getValue()).isNull(); }
@Override public Column convert(BasicTypeDefine typeDefine) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String xuguDataType = typeDefine.getDataType().toUpperCase(); switch (xuguDataType) { case XUGU_BOOLEAN: case XUGU_BOOL: builder.dataType(BasicType.BOOLEAN_TYPE); break; case XUGU_TINYINT: builder.dataType(BasicType.BYTE_TYPE); break; case XUGU_SMALLINT: builder.dataType(BasicType.SHORT_TYPE); break; case XUGU_INT: case XUGU_INTEGER: builder.dataType(BasicType.INT_TYPE); break; case XUGU_BIGINT: builder.dataType(BasicType.LONG_TYPE); break; case XUGU_FLOAT: builder.dataType(BasicType.FLOAT_TYPE); break; case XUGU_DOUBLE: builder.dataType(BasicType.DOUBLE_TYPE); break; case XUGU_NUMBER: case XUGU_DECIMAL: case XUGU_NUMERIC: DecimalType decimalType; if (typeDefine.getPrecision() != null && typeDefine.getPrecision() > 0) { decimalType = new DecimalType( typeDefine.getPrecision().intValue(), typeDefine.getScale()); } else { decimalType = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE); } builder.dataType(decimalType); builder.columnLength(Long.valueOf(decimalType.getPrecision())); builder.scale(decimalType.getScale()); break; case XUGU_CHAR: case XUGU_NCHAR: builder.dataType(BasicType.STRING_TYPE); if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.columnLength(TypeDefineUtils.charTo4ByteLength(1L)); } else { builder.columnLength(typeDefine.getLength()); } break; case XUGU_VARCHAR: case XUGU_VARCHAR2: builder.dataType(BasicType.STRING_TYPE); if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.columnLength(TypeDefineUtils.charTo4ByteLength(MAX_VARCHAR_LENGTH)); } else { builder.columnLength(typeDefine.getLength()); } break; case XUGU_CLOB: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(BYTES_2GB - 1); break; case XUGU_JSON: case XUGU_GUID: builder.dataType(BasicType.STRING_TYPE); break; case XUGU_BINARY: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(MAX_BINARY_LENGTH); break; case XUGU_BLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(BYTES_2GB - 1); break; case XUGU_DATE: builder.dataType(LocalTimeType.LOCAL_DATE_TYPE); break; case XUGU_TIME: case XUGU_TIME_WITH_TIME_ZONE: builder.dataType(LocalTimeType.LOCAL_TIME_TYPE); break; case XUGU_DATETIME: case XUGU_DATETIME_WITH_TIME_ZONE: builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); break; case XUGU_TIMESTAMP: case XUGU_TIMESTAMP_WITH_TIME_ZONE: builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); if (typeDefine.getScale() == null) { builder.scale(TIMESTAMP_DEFAULT_SCALE); } else { builder.scale(typeDefine.getScale()); } break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.XUGU, xuguDataType, typeDefine.getName()); } return builder.build(); }
@Test public void testConvertBigint() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder() .name("test") .columnType("bigint") .dataType("bigint") .build(); Column column = XuguTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.LONG_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); }
public Statement buildStatement(final ParserRuleContext parseTree) { return build(Optional.of(getSources(parseTree)), parseTree); }
@Test public void shouldSupportExplicitEmitFinalForInsertInto() { // Given: final SingleStatementContext stmt = givenQuery("INSERT INTO TEST1 SELECT * FROM TEST2 EMIT FINAL;"); // When: final Query result = ((QueryContainer) builder.buildStatement(stmt)).getQuery(); // Then: assertThat("Should be push", result.isPullQuery(), is(false)); assertThat(result.getRefinement().get().getOutputRefinement(), is(OutputRefinement.FINAL)); }
public static <T> T getAnnotationValue(AnnotatedElement annotationEle, Class<? extends Annotation> annotationType) throws UtilException { return getAnnotationValue(annotationEle, annotationType, "value"); }
@Test public void getAnnotationValueTest2() { final String[] names = AnnotationUtil.getAnnotationValue(ClassWithAnnotation.class, AnnotationForTest::names); assertTrue(names.length == 1 && names[0].isEmpty() || ArrayUtil.equals(names, new String[]{"测试1", "测试2"})); }
public static ConfigurableResource parseResourceConfigValue(String value) throws AllocationConfigurationException { return parseResourceConfigValue(value, Long.MAX_VALUE); }
@Test public void testParseNewStyleResourceVcoresNegative() throws Exception { expectNegativeValueOfResource("vcores"); parseResourceConfigValue("memory-mb=5120,vcores=-2"); }
private JobMetrics getJobMetrics() throws IOException { if (cachedMetricResults != null) { // Metric results have been cached after the job ran. return cachedMetricResults; } JobMetrics result = dataflowClient.getJobMetrics(dataflowPipelineJob.getJobId()); if (dataflowPipelineJob.getState().isTerminal()) { // Add current query result to the cache. cachedMetricResults = result; } return result; }
@Test public void testSingleStringSetUpdates() throws IOException { AppliedPTransform<?, ?, ?> myStep = mock(AppliedPTransform.class); when(myStep.getFullName()).thenReturn("myStepName"); BiMap<AppliedPTransform<?, ?, ?>, String> transformStepNames = HashBiMap.create(); transformStepNames.put(myStep, "s2"); JobMetrics jobMetrics = new JobMetrics(); DataflowPipelineJob job = mock(DataflowPipelineJob.class); DataflowPipelineOptions options = mock(DataflowPipelineOptions.class); when(options.isStreaming()).thenReturn(false); when(job.getDataflowOptions()).thenReturn(options); when(job.getState()).thenReturn(State.RUNNING); when(job.getJobId()).thenReturn(JOB_ID); when(job.getTransformStepNames()).thenReturn(transformStepNames); // The parser relies on the fact that one tentative and one committed metric update exist in // the job metrics results. MetricUpdate mu1 = makeStringSetMetricUpdate( "counterName", "counterNamespace", "s2", ImmutableSet.of("ab", "cd"), false); MetricUpdate mu1Tentative = makeStringSetMetricUpdate( "counterName", "counterNamespace", "s2", ImmutableSet.of("ab", "cd"), true); jobMetrics.setMetrics(ImmutableList.of(mu1, mu1Tentative)); DataflowClient dataflowClient = mock(DataflowClient.class); when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics); DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient); MetricQueryResults result = dataflowMetrics.allMetrics(); assertThat( result.getStringSets(), containsInAnyOrder( attemptedMetricsResult( "counterNamespace", "counterName", "myStepName", StringSetResult.create(ImmutableSet.of("ab", "cd"))))); assertThat( result.getStringSets(), containsInAnyOrder( committedMetricsResult( "counterNamespace", "counterName", "myStepName", StringSetResult.create(ImmutableSet.of("ab", "cd"))))); }
public <InputT, OutputT, CollectionT extends PCollection<? extends InputT>> DataStream<OutputT> applyBeamPTransform( DataStream<InputT> input, PTransform<CollectionT, PCollection<OutputT>> transform) { return (DataStream) getNonNull( applyBeamPTransformInternal( ImmutableMap.of("input", input), (pipeline, map) -> (CollectionT) getNonNull(map, "input"), (output) -> ImmutableMap.of("output", output), transform, input.getExecutionEnvironment()), "output"); }
@Test public void testApplyPreservesInputTimestamps() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); DataStream<Long> input = env.fromCollection(ImmutableList.of(1L, 2L, 12L)) .assignTimestampsAndWatermarks( WatermarkStrategy.<Long>forBoundedOutOfOrderness(java.time.Duration.ofMillis(100)) .withTimestampAssigner( (SerializableTimestampAssigner<Long>) (element, recordTimestamp) -> element)); DataStream<Long> result = new BeamFlinkDataStreamAdapter() .applyBeamPTransform( input, new PTransform<PCollection<Long>, PCollection<Long>>() { @Override public PCollection<Long> expand(PCollection<Long> input) { return input .apply(Window.into(FixedWindows.of(Duration.millis(10)))) .apply(Sum.longsGlobally().withoutDefaults()); } }); assertThat(ImmutableList.copyOf(result.executeAndCollect()), containsInAnyOrder(3L, 12L)); }