focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public FEELFnResult<BigDecimal> invoke(@ParameterName( "list" ) List list) { if ( list == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null")); } FEELFnResult<BigDecimal> s = sum.invoke( list ); Function<FEELEvent, FEELFnResult<BigDecimal>> ifLeft = event -> FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "unable to sum the elements which is required to calculate the mean")); Function<BigDecimal, FEELFnResult<BigDecimal>> ifRight = (sum) -> { try { return FEELFnResult.ofResult( sum.divide( BigDecimal.valueOf( list.size() ), MathContext.DECIMAL128 ) ); } catch (Exception e) { return FEELFnResult.ofError( new InvalidParametersEvent(Severity.ERROR, "unable to perform division to calculate the mean", e) ); } }; return s.cata(ifLeft, ifRight); }
@Test void invokeListWithDoubles() { FunctionTestUtil.assertResult(meanFunction.invoke(Arrays.asList(10.0d, 20.0d, 30.0d)), BigDecimal.valueOf(20)); FunctionTestUtil.assertResult(meanFunction.invoke(Arrays.asList(10.2d, 20.2d, 30.2d)), BigDecimal.valueOf(20.2)); }
static Predicate obtainPredicateFromInstance(final Object instance) { if (Predicate.class.isAssignableFrom(instance.getClass())) { return (Predicate) instance; } else { throw new IllegalArgumentException(ERROR_PREDICATE_CLASS); } }
@Test void obtainPredicateFromInstanceWhenNotPredicateInstance() { String expectedPredicate = "thisMightHurt"; Exception ex = assertThrows(IllegalArgumentException.class, () -> DynamicRouterControlService.obtainPredicateFromInstance(expectedPredicate)); assertEquals(ERROR_PREDICATE_CLASS, ex.getMessage()); }
@Override public final Logger getParentLogger() { return Logger.getLogger(Logger.GLOBAL_LOGGER_NAME); }
@Test void assertGetParentLogger() { assertThat(shardingSphereDataSource.getParentLogger().getName(), is(Logger.GLOBAL_LOGGER_NAME)); }
public static Void unwrapAndThrowException(ServiceException se) throws IOException, YarnException { Throwable cause = se.getCause(); if (cause == null) { // SE generated by the RPC layer itself. throw new IOException(se); } else { if (cause instanceof RemoteException) { RemoteException re = (RemoteException) cause; Class<?> realClass = null; try { realClass = Class.forName(re.getClassName()); } catch (ClassNotFoundException cnf) { // Assume this to be a new exception type added to YARN. This isn't // absolutely correct since the RPC layer could add an exception as // well. throw instantiateYarnException(YarnException.class, re); } if (YarnException.class.isAssignableFrom(realClass)) { throw instantiateYarnException( realClass.asSubclass(YarnException.class), re); } else if (IOException.class.isAssignableFrom(realClass)) { throw instantiateIOException(realClass.asSubclass(IOException.class), re); } else if (RuntimeException.class.isAssignableFrom(realClass)) { throw instantiateRuntimeException( realClass.asSubclass(RuntimeException.class), re); } else { throw re; } // RemoteException contains useful information as against the // java.lang.reflect exceptions. } else if (cause instanceof IOException) { // RPC Client exception. throw (IOException) cause; } else if (cause instanceof RuntimeException) { // RPC RuntimeException throw (RuntimeException) cause; } else { // Should not be generated. throw new IOException(se); } } }
@Test void testRPCIOExceptionUnwrapping() { String message = "DirectIOExceptionMessage"; IOException ioException = new FileNotFoundException(message); ServiceException se = new ServiceException(ioException); Throwable t = null; try { RPCUtil.unwrapAndThrowException(se); } catch (Throwable thrown) { t = thrown; } assertTrue(FileNotFoundException.class.isInstance(t)); assertTrue(t.getMessage().contains(message)); }
@Override public <T> T convert(DataTable dataTable, Type type) { return convert(dataTable, type, false); }
@Test void convert_to_map_of_primitive_to_list_of_primitive__default_converter_present() { registry.setDefaultDataTableEntryTransformer(TABLE_ENTRY_BY_TYPE_CONVERTER_SHOULD_NOT_BE_USED); registry.setDefaultDataTableCellTransformer(TABLE_CELL_BY_TYPE_CONVERTER_SHOULD_NOT_BE_USED); DataTable table = parse("", "| KMSY | 29.993333 | -90.258056 |", "| KSFO | 37.618889 | -122.375 |", "| KSEA | 47.448889 | -122.309444 |", "| KJFK | 40.639722 | -73.778889 |"); Map<String, List<Double>> expected = new HashMap<String, List<Double>>() { { put("KMSY", asList(29.993333, -90.258056)); put("KSFO", asList(37.618889, -122.375)); put("KSEA", asList(47.448889, -122.309444)); put("KJFK", asList(40.639722, -73.778889)); } }; assertEquals(expected, converter.convert(table, MAP_OF_STRING_TO_LIST_OF_DOUBLE)); }
public static String initEndpoint(final NacosClientProperties properties) { if (properties == null) { return ""; } // Whether to enable domain name resolution rules String isUseEndpointRuleParsing = properties.getProperty(PropertyKeyConst.IS_USE_ENDPOINT_PARSING_RULE, properties.getProperty(SystemPropertyKeyConst.IS_USE_ENDPOINT_PARSING_RULE, String.valueOf(ParamUtil.USE_ENDPOINT_PARSING_RULE_DEFAULT_VALUE))); boolean isUseEndpointParsingRule = Boolean.parseBoolean(isUseEndpointRuleParsing); String endpointUrl; if (isUseEndpointParsingRule) { // Get the set domain name information endpointUrl = ParamUtil.parsingEndpointRule(properties.getProperty(PropertyKeyConst.ENDPOINT)); if (StringUtils.isBlank(endpointUrl)) { return ""; } } else { endpointUrl = properties.getProperty(PropertyKeyConst.ENDPOINT); } if (StringUtils.isBlank(endpointUrl)) { return ""; } String endpointPort = TemplateUtils .stringEmptyAndThenExecute(properties.getProperty(PropertyKeyConst.SystemEnv.ALIBABA_ALIWARE_ENDPOINT_PORT), () -> properties.getProperty(PropertyKeyConst.ENDPOINT_PORT)); endpointPort = TemplateUtils.stringEmptyAndThenExecute(endpointPort, () -> DEFAULT_END_POINT_PORT); return endpointUrl + ":" + endpointPort; }
@Test void testInitEndpointForNullProperties() { assertEquals("", InitUtils.initEndpoint(null)); }
public int getRenewDelegationTokenFailedRetrieved() { return numRenewDelegationTokenFailedRetrieved.value(); }
@Test public void testRenewDelegationTokenRetrievedFailed() { long totalBadBefore = metrics.getRenewDelegationTokenFailedRetrieved(); badSubCluster.getRenewDelegationTokenFailed(); Assert.assertEquals(totalBadBefore + 1, metrics.getRenewDelegationTokenFailedRetrieved()); }
@Override protected void registerMetadata(final MetaDataRegisterDTO metaDataDTO) { MetaDataService metaDataService = getMetaDataService(); MetaDataDO metaDataDO = metaDataService.findByPath(metaDataDTO.getPath()); metaDataService.saveOrUpdateMetaData(metaDataDO, metaDataDTO); }
@Test public void testRegisterMetadata() { MetaDataDO metaDataDO = MetaDataDO.builder().build(); when(metaDataService.findByPath(any())).thenReturn(metaDataDO); MetaDataRegisterDTO metaDataDTO = MetaDataRegisterDTO.builder().path("/contextPath/test").build(); shenyuClientRegisterSpringCloudService.registerMetadata(metaDataDTO); verify(metaDataService).findByPath("/contextPath/test"); verify(metaDataService).saveOrUpdateMetaData(metaDataDO, metaDataDTO); }
public List<ErasureCodingPolicy> loadPolicy(String policyFilePath) { try { File policyFile = getPolicyFile(policyFilePath); if (!policyFile.exists()) { LOG.warn("Not found any EC policy file"); return Collections.emptyList(); } return loadECPolicies(policyFile); } catch (ParserConfigurationException | IOException | SAXException e) { throw new RuntimeException("Failed to load EC policy file: " + policyFilePath); } }
@Test public void testRepeatECSchema() throws Exception { PrintWriter out = new PrintWriter(new FileWriter(POLICY_FILE)); out.println("<?xml version=\"1.0\"?>"); out.println("<configuration>"); out.println("<layoutversion>1</layoutversion>"); out.println("<schemas>"); out.println(" <schema id=\"RSk12m4\">"); out.println(" <codec>RS-legacy</codec>"); out.println(" <k>12</k>"); out.println(" <m>4</m>"); out.println(" </schema>"); out.println(" <schema id=\"RS-legacyk12m4\">"); out.println(" <codec>RS-legacy</codec>"); out.println(" <k>12</k>"); out.println(" <m>4</m>"); out.println(" </schema>"); out.println("</schemas>"); out.println("<policies>"); out.println(" <policy>"); out.println(" <schema>RS-legacyk12m4</schema>"); out.println(" <cellsize>1024</cellsize>"); out.println(" </policy>"); out.println(" <policy>"); out.println(" <schema>RSk12m4</schema>"); out.println(" <cellsize>20480</cellsize>"); out.println(" </policy>"); out.println("</policies>"); out.println("</configuration>"); out.close(); ECPolicyLoader ecPolicyLoader = new ECPolicyLoader(); try { ecPolicyLoader.loadPolicy(POLICY_FILE); fail("RuntimeException should be thrown for repetitive elements"); } catch (RuntimeException e) { assertExceptionContains("Repetitive schemas in EC policy" + " configuration file: RS-legacyk12m4", e); } }
@Deprecated public B injvm(Boolean injvm) { this.injvm = injvm; return getThis(); }
@Test void injvm() { ReferenceBuilder builder = new ReferenceBuilder(); builder.injvm(true); Assertions.assertTrue(builder.build().isInjvm()); builder.injvm(false); Assertions.assertFalse(builder.build().isInjvm()); }
public static String toLogDateTimeFormat(long timestamp) { final DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss,SSS XXX"); return Instant.ofEpochMilli(timestamp).atZone(ZoneId.systemDefault()).format(dateTimeFormatter); }
@Test public void testToLogDateTimeFormat() { final LocalDateTime timestampWithMilliSeconds = LocalDateTime.of(2020, 11, 9, 12, 34, 5, 123000000); final LocalDateTime timestampWithSeconds = LocalDateTime.of(2020, 11, 9, 12, 34, 5); DateTimeFormatter offsetFormatter = DateTimeFormatter.ofPattern("XXX"); ZoneOffset offset = ZoneId.systemDefault().getRules().getOffset(timestampWithSeconds); String requiredOffsetFormat = offsetFormatter.format(offset); assertEquals(String.format("2020-11-09 12:34:05,123 %s", requiredOffsetFormat), Utils.toLogDateTimeFormat(timestampWithMilliSeconds.atZone(ZoneId.systemDefault()).toInstant().toEpochMilli())); assertEquals(String.format("2020-11-09 12:34:05,000 %s", requiredOffsetFormat), Utils.toLogDateTimeFormat(timestampWithSeconds.atZone(ZoneId.systemDefault()).toInstant().toEpochMilli())); }
@SuppressWarnings({"rawtypes", "unchecked"}) public <T extends Gauge> T gauge(String name) { return (T) getOrAdd(name, MetricBuilder.GAUGES); }
@Test public void accessingAnExistingSettableGaugeReusesIt() { final Gauge<String> gauge1 = registry.gauge("thing", () -> new DefaultSettableGauge<>("settable-gauge")); final Gauge<String> gauge2 = registry.gauge("thing"); assertThat(gauge1).isSameAs(gauge2); assertThat(gauge2.getValue()).isEqualTo("settable-gauge"); verify(listener).onGaugeAdded("thing", gauge1); }
public boolean createMetadataTable() { GCRules.GCRule gcRules = GCRules.GCRULES.maxVersions(1); if (tableAdminClient.exists(tableId)) { Table table = tableAdminClient.getTable(tableId); List<ColumnFamily> currentCFs = table.getColumnFamilies(); ModifyColumnFamiliesRequest request = ModifyColumnFamiliesRequest.of(tableId); boolean needsNewColumnFamily = false; for (String targetCF : COLUMN_FAMILIES) { boolean exists = false; for (ColumnFamily currentCF : currentCFs) { if (targetCF.equals(currentCF.getId())) { exists = true; break; } } if (!exists) { needsNewColumnFamily = true; request.addFamily(targetCF, gcRules); } } if (needsNewColumnFamily) { tableAdminClient.modifyFamilies(request); } return false; } CreateTableRequest createTableRequest = CreateTableRequest.of(tableId); for (String cf : COLUMN_FAMILIES) { createTableRequest.addFamily(cf, gcRules); } tableAdminClient.createTable(createTableRequest); return true; }
@Test public void testCreateTableAlreadyExists() { assertTrue(metadataTableAdminDao.createMetadataTable()); // Verify column families are correct. Table table = tableAdminClient.getTable(tableId); assertEquals(COLUMN_FAMILIES.size(), table.getColumnFamilies().size()); assertThat( table.getColumnFamilies().stream().map(ColumnFamily::getId).collect(Collectors.toList()), Matchers.containsInAnyOrder(COLUMN_FAMILIES.toArray())); assertFalse(metadataTableAdminDao.createMetadataTable()); // Verify the expected column families are still there. table = tableAdminClient.getTable(tableId); assertEquals(COLUMN_FAMILIES.size(), table.getColumnFamilies().size()); assertThat( table.getColumnFamilies().stream().map(ColumnFamily::getId).collect(Collectors.toList()), Matchers.containsInAnyOrder(COLUMN_FAMILIES.toArray())); }
public static SimpleTransform div(double operand) { return new SimpleTransform(Operation.div,operand); }
@Test public void testDiv() { TransformationMap t = new TransformationMap(Collections.singletonList(SimpleTransform.div(45)),new HashMap<>()); testSimple(t,(double a) -> a / 45); }
@Override public Mono<Theme> reloadTheme(String name) { return client.fetch(Theme.class, name) .flatMap(oldTheme -> { String settingName = oldTheme.getSpec().getSettingName(); return waitForSettingDeleted(settingName) .then(waitForAnnotationSettingsDeleted(name)); }) .then(Mono.defer(() -> { Path themePath = themeRoot.get().resolve(name); Path themeManifestPath = ThemeUtils.resolveThemeManifest(themePath); if (themeManifestPath == null) { throw new IllegalArgumentException( "The manifest file [theme.yaml] is required."); } Unstructured unstructured = loadThemeManifest(themeManifestPath); Theme newTheme = Unstructured.OBJECT_MAPPER.convertValue(unstructured, Theme.class); return client.fetch(Theme.class, name) .map(oldTheme -> { newTheme.getMetadata().setVersion(oldTheme.getMetadata().getVersion()); return newTheme; }) .flatMap(client::update); })) .flatMap(theme -> { String settingName = theme.getSpec().getSettingName(); return Flux.fromIterable(ThemeUtils.loadThemeResources(getThemePath(theme))) .filter(unstructured -> (Setting.KIND.equals(unstructured.getKind()) && unstructured.getMetadata().getName().equals(settingName)) || AnnotationSetting.KIND.equals(unstructured.getKind()) ) .doOnNext(unstructured -> populateThemeNameLabel(unstructured, name)) .flatMap(client::create) .then(Mono.just(theme)); }); }
@Test void reloadThemeWhenSettingNameNotSetBefore() throws IOException { Theme theme = new Theme(); theme.setMetadata(new Metadata()); theme.getMetadata().setName("fake-theme"); theme.setSpec(new Theme.ThemeSpec()); theme.getSpec().setDisplayName("Hello"); when(client.fetch(Theme.class, "fake-theme")) .thenReturn(Mono.just(theme)); Setting setting = new Setting(); setting.setMetadata(new Metadata()); setting.setSpec(new Setting.SettingSpec()); setting.getSpec().setForms(List.of()); when(client.fetch(eq(Setting.class), eq(null))).thenReturn(Mono.empty()); Path themeWorkDir = themeRoot.get().resolve(theme.getMetadata().getName()); if (!Files.exists(themeWorkDir)) { Files.createDirectories(themeWorkDir); } Files.writeString(themeWorkDir.resolve("settings.yaml"), """ apiVersion: v1alpha1 kind: Setting metadata: name: fake-setting spec: forms: - group: sns label: 社交资料 formSchema: - $el: h1 children: Register """); Files.writeString(themeWorkDir.resolve("theme.yaml"), """ apiVersion: v1alpha1 kind: Theme metadata: name: fake-theme spec: displayName: Fake Theme settingName: fake-setting """); when(client.update(any(Theme.class))) .thenAnswer((Answer<Mono<Theme>>) invocation -> { Theme argument = invocation.getArgument(0); return Mono.just(argument); }); when(client.create(any(Unstructured.class))) .thenAnswer((Answer<Mono<Unstructured>>) invocation -> { Unstructured argument = invocation.getArgument(0); JSONAssert.assertEquals(""" { "spec": { "forms": [ { "group": "sns", "label": "社交资料", "formSchema": [ { "$el": "h1", "children": "Register" } ] } ] }, "apiVersion": "v1alpha1", "kind": "Setting", "metadata": { "name": "fake-setting", "labels": { "theme.halo.run/theme-name": "fake-theme" } } } """, JsonUtils.objectToJson(argument), true); return Mono.just(invocation.getArgument(0)); }); when(client.list(eq(AnnotationSetting.class), any(), eq(null))).thenReturn(Flux.empty()); themeService.reloadTheme("fake-theme") .as(StepVerifier::create) .consumeNextWith(themeUpdated -> { try { JSONAssert.assertEquals(""" { "spec": { "settingName": "fake-setting", "displayName": "Fake Theme", "version": "*", "requires": "*" }, "apiVersion": "theme.halo.run/v1alpha1", "kind": "Theme", "metadata": { "name": "fake-theme" } } """, JsonUtils.objectToJson(themeUpdated), true); } catch (JSONException e) { throw new RuntimeException(e); } }) .verifyComplete(); }
@Override public void start() { boolean isTelemetryActivated = config.getBoolean(SONAR_TELEMETRY_ENABLE.getKey()) .orElseThrow(() -> new IllegalStateException(String.format("Setting '%s' must be provided.", SONAR_TELEMETRY_URL.getKey()))); boolean hasOptOut = internalProperties.read(I_PROP_OPT_OUT).isPresent(); if (!isTelemetryActivated && !hasOptOut) { optOut(); internalProperties.write(I_PROP_OPT_OUT, String.valueOf(system2.now())); LOG.info("Sharing of SonarQube statistics is disabled."); } if (isTelemetryActivated && hasOptOut) { internalProperties.write(I_PROP_OPT_OUT, null); } if (!isTelemetryActivated) { return; } LOG.info("Sharing of SonarQube statistics is enabled."); int frequencyInSeconds = frequency(); scheduleWithFixedDelay(telemetryCommand(), frequencyInSeconds, frequencyInSeconds, TimeUnit.SECONDS); }
@Test void send_data_via_client_at_startup_after_initial_delay() throws IOException { initTelemetrySettingsToDefaultValues(); when(lockManager.tryLock(any(), anyInt())).thenReturn(true); settings.setProperty("sonar.telemetry.frequencyInSeconds", "1"); when(dataLoader.load()).thenReturn(SOME_TELEMETRY_DATA); mockDataJsonWriterDoingSomething(); underTest.start(); verify(client, timeout(4_000).atLeastOnce()).upload(anyString()); verify(dataJsonWriter).writeTelemetryData(any(JsonWriter.class), same(SOME_TELEMETRY_DATA)); }
public void resetErrors() { scesimData.forEach(AbstractScesimData::resetErrors); }
@Test public void resetErrors() { model.resetErrors(); model.scesimData.forEach(scesimData -> verify(scesimData, times(1)).resetErrors()); }
protected Map<String, String> resolveStatusCheckerNamesMap() { Map<String, String> statusCheckerNamesMap = new LinkedHashMap<>(); statusCheckerNamesMap.putAll(resolveStatusCheckerNamesMapFromDubboHealthIndicatorProperties()); statusCheckerNamesMap.putAll(resolveStatusCheckerNamesMapFromProtocolConfigs()); statusCheckerNamesMap.putAll(resolveStatusCheckerNamesMapFromProviderConfig()); return statusCheckerNamesMap; }
@Test public void testResolveStatusCheckerNamesMap() { Map<String, String> statusCheckerNamesMap = dubboHealthIndicator.resolveStatusCheckerNamesMap(); Assert.assertEquals(5, statusCheckerNamesMap.size()); Assert.assertEquals("dubbo-protocol@ProtocolConfig.getStatus()", statusCheckerNamesMap.get("registry")); Assert.assertEquals("dubbo-provider@ProviderConfig.getStatus()", statusCheckerNamesMap.get("server")); Assert.assertEquals("management.health.dubbo.status.defaults", statusCheckerNamesMap.get("memory")); Assert.assertEquals("management.health.dubbo.status.extras", statusCheckerNamesMap.get("load")); Assert.assertEquals("management.health.dubbo.status.extras", statusCheckerNamesMap.get("threadpool")); }
@Override public SlotAssignmentResult ensure(long key1, long key2) { return super.ensure0(key1, key2); }
@Test(expected = AssertionError.class) @RequireAssertEnabled public void testPut_whenDisposed() { hsa.dispose(); hsa.ensure(1, 1); }
@Override public int[] getAccessTypeList() { return new int[] { DatabaseMeta.TYPE_ACCESS_NATIVE, DatabaseMeta.TYPE_ACCESS_JNDI }; }
@Test public void testGetAccessTypeList() { int[] accessTypeList = dbMeta.getAccessTypeList(); assertEquals( 0, accessTypeList[0] ); assertEquals( 4, accessTypeList[1] ); }
public CsvReader ignoreInvalidLines() { ignoreInvalidLines = true; return this; }
@Test void testIgnoreInvalidLinesConfigure() { CsvReader reader = getCsvReader(); assertThat(reader.ignoreInvalidLines).isFalse(); reader.ignoreInvalidLines(); assertThat(reader.ignoreInvalidLines).isTrue(); }
public void executeRequestCallback() { if (requestCallback != null) { if (sendRequestOk && cause == null) { requestCallback.onSuccess(responseMsg); } else { requestCallback.onException(cause); } } }
@Test public void testExecuteRequestCallback() throws Exception { final AtomicInteger cc = new AtomicInteger(0); RequestResponseFuture future = new RequestResponseFuture(UUID.randomUUID().toString(), 3 * 1000L, new RequestCallback() { @Override public void onSuccess(Message message) { cc.incrementAndGet(); } @Override public void onException(Throwable e) { } }); future.setSendRequestOk(true); future.executeRequestCallback(); assertThat(cc.get()).isEqualTo(1); }
public static Integer getIntOrNull(String property, JsonNode node) { if (!node.hasNonNull(property)) { return null; } return getInt(property, node); }
@Test public void getIntOrNull() throws JsonProcessingException { assertThat(JsonUtil.getIntOrNull("x", JsonUtil.mapper().readTree("{}"))).isNull(); assertThat(JsonUtil.getIntOrNull("x", JsonUtil.mapper().readTree("{\"x\": 23}"))).isEqualTo(23); assertThat(JsonUtil.getIntOrNull("x", JsonUtil.mapper().readTree("{\"x\": null}"))).isNull(); assertThatThrownBy( () -> JsonUtil.getIntOrNull("x", JsonUtil.mapper().readTree("{\"x\": \"23\"}"))) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot parse to an integer value: x: \"23\""); assertThatThrownBy( () -> JsonUtil.getIntOrNull("x", JsonUtil.mapper().readTree("{\"x\": 23.0}"))) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot parse to an integer value: x: 23.0"); }
public static IpPrefix valueOf(int address, int prefixLength) { return new IpPrefix(IpAddress.valueOf(address), prefixLength); }
@Test(expected = IllegalArgumentException.class) public void testInvalidValueOfIntegerTooLongPrefixLengthIPv4() { IpPrefix ipPrefix; ipPrefix = IpPrefix.valueOf(0x01020304, 33); }
public List<SearchResult> getSearchResults(String query) throws IOException { Elements results = fetchSearchResults(query); List<SearchResult> resultList = new ArrayList<>(); for (Element result : results) { Element title = result.getElementsByClass("links_main").first().getElementsByTag("a").first(); Element snippet = result.getElementsByClass("result__snippet").first(); String snippetText = snippet.text().substring(0, snippet.text().length() > 250 ? 250 : snippet.text().length()); resultList.add(new SearchResult(title.attr("href"), title.text(), snippetText)); } return resultList.subList(0, resultList.size() > 5 ? 5 : resultList.size()); }
@Test void testGetSearchResultsWithLongSnippet() throws IOException { searchWebAction = new SearchWebAction() { @Override Elements fetchSearchResults(String query) { Elements mockResults = new Elements(); mockResults.add(createMockResult("http://example.com", "Title", "A".repeat(300))); return mockResults; } }; List<SearchWebAction.SearchResult> results = searchWebAction.getSearchResults("test query"); assertEquals(1, results.size()); assertEquals(250, results.get(0).snippet().length()); }
public List<Block> chunk(String resourceId, List<Statement> statements) { List<Statement> filtered = new ArrayList<>(); int i = 0; while (i < statements.size()) { Statement first = statements.get(i); int j = i + 1; while (j < statements.size() && statements.get(j).getValue().equals(first.getValue())) { j++; } filtered.add(statements.get(i)); if (i < j - 1) { filtered.add(statements.get(j - 1)); } i = j; } statements = filtered; if (statements.size() < blockSize) { return Collections.emptyList(); } Statement[] statementsArr = statements.toArray(new Statement[statements.size()]); List<Block> blocks = new ArrayList<>(statementsArr.length - blockSize + 1); long hash = 0; int first = 0; int last = 0; for (; last < blockSize - 1; last++) { hash = hash * PRIME_BASE + statementsArr[last].getValue().hashCode(); } Block.Builder blockBuilder = Block.builder().setResourceId(resourceId); for (; last < statementsArr.length; last++, first++) { Statement firstStatement = statementsArr[first]; Statement lastStatement = statementsArr[last]; // add last statement to hash hash = hash * PRIME_BASE + lastStatement.getValue().hashCode(); // create block Block block = blockBuilder.setBlockHash(new ByteArray(hash)) .setIndexInFile(first) .setLines(firstStatement.getStartLine(), lastStatement.getEndLine()) .build(); blocks.add(block); // remove first statement from hash hash -= power * firstStatement.getValue().hashCode(); } return blocks; }
@Test public void shouldCalculateHashes() { List<Statement> statements = createStatementsFromStrings("aaaaaa", "bbbbbb", "cccccc", "dddddd", "eeeeee"); BlockChunker blockChunker = createChunkerWithBlockSize(3); List<Block> blocks = blockChunker.chunk("resource", statements); assertThat(blocks.get(0).getBlockHash(), equalTo(hash("aaaaaa", "bbbbbb", "cccccc"))); assertThat(blocks.get(1).getBlockHash(), equalTo(hash("bbbbbb", "cccccc", "dddddd"))); assertThat(blocks.get(2).getBlockHash(), equalTo(hash("cccccc", "dddddd", "eeeeee"))); assertThat(blocks.get(0).getBlockHash().toString(), is("fffffeb6ae1af4c0")); assertThat(blocks.get(1).getBlockHash().toString(), is("fffffebd8512d120")); assertThat(blocks.get(2).getBlockHash().toString(), is("fffffec45c0aad80")); }
static final String[] getPrincipalNames(String keytabFileName) throws IOException { Keytab keytab = Keytab.loadKeytab(new File(keytabFileName)); Set<String> principals = new HashSet<>(); List<PrincipalName> entries = keytab.getPrincipals(); for (PrincipalName entry : entries) { principals.add(entry.getName().replace("\\", "/")); } return principals.toArray(new String[0]); }
@Test public void testGetPrincipalNamesMissingKeytab() { try { KerberosUtil.getPrincipalNames(testKeytab); Assert.fail("Exception should have been thrown"); } catch (IllegalArgumentException e) { //expects exception } catch (IOException e) { } }
@Override public Serializer serializer(String topic, Target type) { String subject = schemaSubject(topic, type); SchemaMetadata meta = getSchemaBySubject(subject) .orElseThrow(() -> new ValidationException( String.format("No schema for subject '%s' found", subject))); ParsedSchema schema = getSchemaById(meta.getId()) .orElseThrow(() -> new IllegalStateException( String.format("Schema found for id %s, subject '%s'", meta.getId(), subject))); SchemaType schemaType = SchemaType.fromString(meta.getSchemaType()) .orElseThrow(() -> new IllegalStateException("Unknown schema type: " + meta.getSchemaType())); return switch (schemaType) { case PROTOBUF -> input -> serializeProto(schemaRegistryClient, topic, type, (ProtobufSchema) schema, meta.getId(), input); case AVRO -> input -> serializeAvro((AvroSchema) schema, meta.getId(), input); case JSON -> input -> serializeJson((JsonSchema) schema, meta.getId(), input); }; }
@Test void serializeTreatsInputAsJsonAvroSchemaPayload() throws RestClientException, IOException { AvroSchema schema = new AvroSchema( "{" + " \"type\": \"record\"," + " \"name\": \"TestAvroRecord1\"," + " \"fields\": [" + " {" + " \"name\": \"field1\"," + " \"type\": \"string\"" + " }," + " {" + " \"name\": \"field2\"," + " \"type\": \"int\"" + " }" + " ]" + "}" ); String jsonValue = "{ \"field1\":\"testStr\", \"field2\": 123 }"; String topic = "test"; int schemaId = registryClient.register(topic + "-value", schema); byte[] serialized = serde.serializer(topic, Serde.Target.VALUE).serialize(jsonValue); byte[] expected = toBytesWithMagicByteAndSchemaId(schemaId, jsonValue, schema); assertThat(serialized).isEqualTo(expected); }
public boolean authenticate(String user, String password) { return cache.getUnchecked(new Credential(user, password)); }
@Test public void testAuthenticate() { PasswordStore store = createStore("userbcrypt:" + BCRYPT_PASSWORD, "userpbkdf2:" + PBKDF2_PASSWORD); assertTrue(store.authenticate("userbcrypt", "user123")); assertFalse(store.authenticate("userbcrypt", "user999")); assertFalse(store.authenticate("userbcrypt", "password")); assertTrue(store.authenticate("userpbkdf2", "password")); assertFalse(store.authenticate("userpbkdf2", "password999")); assertFalse(store.authenticate("userpbkdf2", "user123")); assertFalse(store.authenticate("baduser", "user123")); assertFalse(store.authenticate("baduser", "password")); }
@Override public void route(final RouteContext routeContext, final SingleRule singleRule) { if (routeContext.getRouteUnits().isEmpty() || sqlStatement instanceof SelectStatement) { routeStatement(routeContext, singleRule); } else { RouteContext newRouteContext = new RouteContext(); routeStatement(newRouteContext, singleRule); combineRouteContext(routeContext, newRouteContext); } }
@Test void assertRouteDuplicateSingleTable() { SingleStandardRouteEngine engine = new SingleStandardRouteEngine(Collections.singleton(new QualifiedTable(DefaultDatabase.LOGIC_NAME, "t_order")), mockStatement(false)); assertThrows(TableExistsException.class, () -> engine.route(new RouteContext(), mockSingleRule())); }
public Coin parse(String str) throws NumberFormatException { return Coin.valueOf(parseValue(str, Coin.SMALLEST_UNIT_EXPONENT)); }
@Test(expected = NumberFormatException.class) public void parseInvalidMultipleDecimalMarks() { NO_CODE.parse("1.0.0"); }
@Override @Deprecated @SuppressWarnings("unchecked") public <T extends Number> Counter<T> counter(String name, Class<T> type, Unit unit) { if (Integer.class.equals(type)) { return (Counter<T>) new DefaultCounter(unit).asIntCounter(); } if (Long.class.equals(type)) { return (Counter<T>) new DefaultCounter(unit).asLongCounter(); } throw new IllegalArgumentException( String.format("Counter for type %s is not supported", type.getName())); }
@Test public void intCounter() { MetricsContext metricsContext = new DefaultMetricsContext(); MetricsContext.Counter<Integer> counter = metricsContext.counter("intCounter", Integer.class, MetricsContext.Unit.BYTES); counter.increment(5); assertThat(counter.value()).isEqualTo(5); assertThat(counter.unit()).isEqualTo(MetricsContext.Unit.BYTES); }
@Override public Result invoke(Invoker<?> invoker, Invocation invocation) throws RpcException { String application = invoker.getUrl().getParameter(CommonConstants.APPLICATION_KEY); if (application != null) { RpcContext.getContext().setAttachment(DubboUtils.SENTINEL_DUBBO_APPLICATION_KEY, application); } return invoker.invoke(invocation); }
@Test public void testInvokeApplicationKey() { Invoker invoker = mock(Invoker.class); Invocation invocation = mock(Invocation.class); URL url = URL.valueOf("test://test:111/test?application=serviceA"); when(invoker.getUrl()).thenReturn(url); filter.invoke(invoker, invocation); verify(invoker).invoke(invocation); String application = RpcContext.getContext().getAttachment(DubboUtils.SENTINEL_DUBBO_APPLICATION_KEY); assertEquals("serviceA", application); }
@Override public void setVariablesLocally(Map<String, Object> variables) { this.variables.putAll(variables); }
@Test public void testSetVariablesLocally() { ProcessContextImpl context = new ProcessContextImpl(); Map<String, Object> map = new HashMap<>(); map.put("key", "value"); context.setVariablesLocally(map); Assertions.assertEquals("value", context.getVariableLocally("key")); }
List<KinesisLogEntry> processMessages(final byte[] payloadBytes, Instant approximateArrivalTimestamp) throws IOException { // This method will be called from a codec, and therefore will not perform any detection. It will rely // exclusively on the AWSMessageType detected in the setup HealthCheck. // If a user needs to change the type of data stored in a stream, they will need to set the integration up again. if (awsMessageType == AWSMessageType.KINESIS_CLOUDWATCH_FLOW_LOGS || awsMessageType == AWSMessageType.KINESIS_CLOUDWATCH_RAW) { final CloudWatchLogSubscriptionData logSubscriptionData = decompressCloudWatchMessages(payloadBytes, objectMapper); return logSubscriptionData.logEvents().stream() .map(le -> { DateTime timestamp = new DateTime(le.timestamp(), DateTimeZone.UTC); return KinesisLogEntry.create(kinesisStream, // Use the log group and stream returned from CloudWatch. logSubscriptionData.logGroup(), logSubscriptionData.logStream(), timestamp, le.message()); }) .collect(Collectors.toList()); } else if (awsMessageType == AWSMessageType.KINESIS_RAW) { // The best timestamp available is the approximate arrival time of the message to the Kinesis stream. final DateTime timestamp = new DateTime(approximateArrivalTimestamp.toEpochMilli(), DateTimeZone.UTC); final KinesisLogEntry kinesisLogEntry = KinesisLogEntry.create(kinesisStream, "", "", timestamp, new String(payloadBytes, StandardCharsets.UTF_8)); return Collections.singletonList(kinesisLogEntry); } else { LOG.error("The AWSMessageType [{}] is not supported by the KinesisTransport", awsMessageType); return new ArrayList<>(); } }
@Test public void testCloudWatchFlowLogDecoding() throws IOException { final List<KinesisLogEntry> logEntries = flowLogDecoder.processMessages(AWSTestingUtils.cloudWatchFlowLogPayload(), Instant.ofEpochMilli(AWSTestingUtils.CLOUD_WATCH_TIMESTAMP.getMillis())); Assert.assertEquals(2, logEntries.size()); // Verify that there are two flowlogs present in the parsed result. Assert.assertEquals(2, logEntries.stream().filter(logEntry -> { final AWSLogMessage logMessage = new AWSLogMessage(logEntry.message()); return logMessage.isFlowLog(); }).count()); // Verify that both messages have to correct timestamp. Assert.assertEquals(2, logEntries.stream() .filter(logEntry -> logEntry.timestamp().equals(AWSTestingUtils.CLOUD_WATCH_TIMESTAMP)) .count()); }
private Function<KsqlConfig, Kudf> getUdfFactory( final Method method, final UdfDescription udfDescriptionAnnotation, final String functionName, final FunctionInvoker invoker, final String sensorName ) { return ksqlConfig -> { final Object actualUdf = FunctionLoaderUtils.instantiateFunctionInstance( method.getDeclaringClass(), udfDescriptionAnnotation.name()); if (actualUdf instanceof Configurable) { ExtensionSecurityManager.INSTANCE.pushInUdf(); try { ((Configurable) actualUdf) .configure(ksqlConfig.getKsqlFunctionsConfigProps(functionName)); } finally { ExtensionSecurityManager.INSTANCE.popOutUdf(); } } final PluggableUdf theUdf = new PluggableUdf(invoker, actualUdf); return metrics.<Kudf>map(m -> new UdfMetricProducer( m.getSensor(sensorName), theUdf, Time.SYSTEM )).orElse(theUdf); }; }
@Test public void shouldLoadUdfsInKSQLIfLoadCustomerUdfsFalse() { // udf in ksql-engine will throw if not found FUNC_REG_WITHOUT_CUSTOM.getUdfFactory(FunctionName.of("substring")); }
static DynamicState stateMachineStep(DynamicState dynamicState, StaticState staticState) throws Exception { LOG.debug("STATE {}", dynamicState.state); switch (dynamicState.state) { case EMPTY: return handleEmpty(dynamicState, staticState); case RUNNING: return handleRunning(dynamicState, staticState); case WAITING_FOR_WORKER_START: return handleWaitingForWorkerStart(dynamicState, staticState); case KILL_BLOB_UPDATE: return handleKillBlobUpdate(dynamicState, staticState); case KILL_AND_RELAUNCH: return handleKillAndRelaunch(dynamicState, staticState); case KILL: return handleKill(dynamicState, staticState); case WAITING_FOR_BLOB_LOCALIZATION: return handleWaitingForBlobLocalization(dynamicState, staticState); case WAITING_FOR_BLOB_UPDATE: return handleWaitingForBlobUpdate(dynamicState, staticState); default: throw new IllegalStateException("Code not ready to handle a state of " + dynamicState.state); } }
@Test public void testLaunchContainerFromEmpty() throws Exception { try (SimulatedTime ignored = new SimulatedTime(1010)) { int port = 8080; String topoId = "NEW"; List<ExecutorInfo> execList = mkExecutorInfoList(1, 2, 3, 4, 5); LocalAssignment newAssignment = mkLocalAssignment(topoId, execList, mkWorkerResources(100.0, 100.0, 100.0)); AsyncLocalizer localizer = mock(AsyncLocalizer.class); BlobChangingCallback cb = mock(BlobChangingCallback.class); Container container = mock(Container.class); LocalState state = mock(LocalState.class); ContainerLauncher containerLauncher = mock(ContainerLauncher.class); when(containerLauncher.launchContainer(port, newAssignment, state)).thenReturn(container); LSWorkerHeartbeat hb = mkWorkerHB(topoId, port, execList, Time.currentTimeSecs()); when(container.readHeartbeat()).thenReturn(hb, hb); @SuppressWarnings("unchecked") CompletableFuture<Void> blobFuture = mock(CompletableFuture.class); when(localizer.requestDownloadTopologyBlobs(newAssignment, port, cb)).thenReturn(blobFuture); ISupervisor iSuper = mock(ISupervisor.class); SlotMetrics slotMetrics = new SlotMetrics(new StormMetricsRegistry()); StaticState staticState = new StaticState(localizer, 5000, 120000, 1000, 1000, containerLauncher, "localhost", port, iSuper, state, cb, null, null, slotMetrics); DynamicState dynamicState = new DynamicState(null, null, null, slotMetrics) .withNewAssignment(newAssignment); DynamicState nextState = Slot.stateMachineStep(dynamicState, staticState); verify(localizer).requestDownloadTopologyBlobs(newAssignment, port, cb); assertEquals(MachineState.WAITING_FOR_BLOB_LOCALIZATION, nextState.state); assertSame(blobFuture, nextState.pendingDownload, "pendingDownload not set properly"); assertEquals(newAssignment, nextState.pendingLocalization); assertEquals(0, Time.currentTimeMillis()); nextState = Slot.stateMachineStep(nextState, staticState); verify(blobFuture).get(1000, TimeUnit.MILLISECONDS); verify(containerLauncher).launchContainer(port, newAssignment, state); assertEquals(MachineState.WAITING_FOR_WORKER_START, nextState.state); assertNull(nextState.pendingDownload, "pendingDownload is not null"); assertNull(nextState.pendingLocalization); assertSame(newAssignment, nextState.currentAssignment); assertSame(container, nextState.container); assertEquals(0, Time.currentTimeMillis()); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); assertNull(nextState.pendingDownload, "pendingDownload is not null"); assertNull(nextState.pendingLocalization); assertSame(newAssignment, nextState.currentAssignment); assertSame(container, nextState.container); assertEquals(0, Time.currentTimeMillis()); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); assertNull(nextState.pendingDownload, "pendingDownload is not null"); assertNull(nextState.pendingLocalization); assertSame(newAssignment, nextState.currentAssignment); assertSame(container, nextState.container); assertTrue(Time.currentTimeMillis() > 1000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); assertNull(nextState.pendingDownload, "pendingDownload is not null"); assertNull(nextState.pendingLocalization); assertSame(newAssignment, nextState.currentAssignment); assertSame(container, nextState.container); assertTrue(Time.currentTimeMillis() > 2000); } }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } PluginInfo that = (PluginInfo) o; return Objects.equals(className, that.className) && Objects.equals(type, that.type) && Objects.equals(version, that.version); }
@Test public void testNoVersionFilter() { PluginInfo.NoVersionFilter filter = new PluginInfo.NoVersionFilter(); // We intentionally refrain from using assertEquals and assertNotEquals // here to ensure that the filter's equals() method is used assertFalse(filter.equals("1.0")); assertFalse(filter.equals(new Object())); assertFalse(filter.equals(null)); assertTrue(filter.equals(PluginDesc.UNDEFINED_VERSION)); }
@PostMapping("/authorize") @Operation(summary = "申请授权", description = "适合 code 授权码模式,或者 implicit 简化模式;在 sso.vue 单点登录界面被【提交】调用") @Parameters({ @Parameter(name = "response_type", required = true, description = "响应类型", example = "code"), @Parameter(name = "client_id", required = true, description = "客户端编号", example = "tudou"), @Parameter(name = "scope", description = "授权范围", example = "userinfo.read"), // 使用 Map<String, Boolean> 格式,Spring MVC 暂时不支持这么接收参数 @Parameter(name = "redirect_uri", required = true, description = "重定向 URI", example = "https://www.iocoder.cn"), @Parameter(name = "auto_approve", required = true, description = "用户是否接受", example = "true"), @Parameter(name = "state", example = "1") }) public CommonResult<String> approveOrDeny(@RequestParam("response_type") String responseType, @RequestParam("client_id") String clientId, @RequestParam(value = "scope", required = false) String scope, @RequestParam("redirect_uri") String redirectUri, @RequestParam(value = "auto_approve") Boolean autoApprove, @RequestParam(value = "state", required = false) String state) { @SuppressWarnings("unchecked") Map<String, Boolean> scopes = JsonUtils.parseObject(scope, Map.class); scopes = ObjectUtil.defaultIfNull(scopes, Collections.emptyMap()); // 0. 校验用户已经登录。通过 Spring Security 实现 // 1.1 校验 responseType 是否满足 code 或者 token 值 OAuth2GrantTypeEnum grantTypeEnum = getGrantTypeEnum(responseType); // 1.2 校验 redirectUri 重定向域名是否合法 + 校验 scope 是否在 Client 授权范围内 OAuth2ClientDO client = oauth2ClientService.validOAuthClientFromCache(clientId, null, grantTypeEnum.getGrantType(), scopes.keySet(), redirectUri); // 2.1 假设 approved 为 null,说明是场景一 if (Boolean.TRUE.equals(autoApprove)) { // 如果无法自动授权通过,则返回空 url,前端不进行跳转 if (!oauth2ApproveService.checkForPreApproval(getLoginUserId(), getUserType(), clientId, scopes.keySet())) { return success(null); } } else { // 2.2 假设 approved 非 null,说明是场景二 // 如果计算后不通过,则跳转一个错误链接 if (!oauth2ApproveService.updateAfterApproval(getLoginUserId(), getUserType(), clientId, scopes)) { return success(OAuth2Utils.buildUnsuccessfulRedirect(redirectUri, responseType, state, "access_denied", "User denied access")); } } // 3.1 如果是 code 授权码模式,则发放 code 授权码,并重定向 List<String> approveScopes = convertList(scopes.entrySet(), Map.Entry::getKey, Map.Entry::getValue); if (grantTypeEnum == OAuth2GrantTypeEnum.AUTHORIZATION_CODE) { return success(getAuthorizationCodeRedirect(getLoginUserId(), client, approveScopes, redirectUri, state)); } // 3.2 如果是 token 则是 implicit 简化模式,则发送 accessToken 访问令牌,并重定向 return success(getImplicitGrantRedirect(getLoginUserId(), client, approveScopes, redirectUri, state)); }
@Test public void testApproveOrDeny_grantTypeError() { // 调用,并断言 assertServiceException(() -> oauth2OpenController.approveOrDeny(randomString(), null, null, null, null, null), new ErrorCode(400, "response_type 参数值只允许 code 和 token")); }
public static String prettyHex(byte[] data, int offset, int length) { if (length == 0) return ""; final StringBuilder sb = new StringBuilder(length * 3 - 1); sb.append(String.format("%02X", data[offset])); for (int i = 1; i < length; i++) { sb.append(String.format(" %02X", data[offset + i])); } return sb.toString(); }
@Test public void prettyHexStringWithOffset() { assertEquals("CA FE BA BE", ByteArrayUtils.prettyHex( new byte[] { 0, -54, -2, -70, -66, 0}, 1, 4) ); }
public Future<Void> executeFunction(final Supplier<Void> f) { return executor.submit(f::get); }
@Test public void testSupplierSucceeds() throws Exception { ExecutorServiceFuturePool futurePool = new ExecutorServiceFuturePool(executorService); final AtomicBoolean atomicBoolean = new AtomicBoolean(false); Future<Void> future = futurePool.executeFunction(() -> { atomicBoolean.set(true); return null; }); future.get(30, TimeUnit.SECONDS); assertTrue("atomicBoolean set to true?", atomicBoolean.get()); }
public int id() { return id; }
@Test public void testValues() { assertEquals(0, REGISTRATIONS.get(0).id()); assertEquals(1, REGISTRATIONS.get(1).id()); assertEquals(2, REGISTRATIONS.get(2).id()); assertEquals(3, REGISTRATIONS.get(3).id()); }
@Override public HashSlotCursor16byteKey cursor() { return new CursorLongKey2(); }
@Test(expected = AssertionError.class) @RequireAssertEnabled public void testCursor_key2_withoutAdvance() { HashSlotCursor16byteKey cursor = hsa.cursor(); cursor.key2(); }
public static long gcd(long... values) { long res = 0; for (long value : values) { res = gcd(res, value); } return res; }
@Test public void test_calculateGcdN() { assertEquals(0, gcd()); assertEquals(4, gcd(4, 4, 4)); assertEquals(4, gcd(4, 8, 12)); assertEquals(1, gcd(4, 8, 13)); }
@Override public double[] smoothDerivative(double[] input) { if (input.length < weights.length) { return averageDerivativeForVeryShortTrack(input); } double[] smoothed = new double[input.length]; int halfWindowFloored = weights.length / 2; // we want to exclude the center point for (int i = halfWindowFloored; i < input.length - halfWindowFloored; i++) { for (int windowIndex = 0; windowIndex < smoothCoeff.length; windowIndex++) { smoothed[i] += derivCoeff[windowIndex] * input[i + windowIndex + offsetFromWindowCenter]; } smoothed[i] = smoothed[i] / timeStep; } fillSmoothDerivativeLeftSide(smoothed, halfWindowFloored); fillSmoothDerivativeRightSide(smoothed, halfWindowFloored); return smoothed; }
@Test public void Derivative_FromEmptyTrack_ReturnEmptyTrack() { SavitzkyGolayFilter test = new SavitzkyGolayFilter(1.0); double[] actual = test.smoothDerivative(new double[0]); assertThat(actual.length, equalTo(0)); }
@Override public MaskRuleConfiguration findRuleConfiguration(final ShardingSphereDatabase database) { return database.getRuleMetaData().findSingleRule(MaskRule.class) .map(optional -> getConfiguration(optional.getConfiguration())).orElseGet(() -> new MaskRuleConfiguration(new LinkedList<>(), new LinkedHashMap<>())); }
@Test void assertFindRuleConfigurationWhenRuleDoesNotExist() { ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); when(database.getRuleMetaData().findSingleRule(MaskRule.class)).thenReturn(Optional.empty()); assertTrue(new MaskTableChangedProcessor().findRuleConfiguration(database).getMaskAlgorithms().isEmpty()); }
protected final void ensureCapacity(final int index, final int length) { if (index < 0 || length < 0) { throw new IndexOutOfBoundsException("negative value: index=" + index + " length=" + length); } final long resultingPosition = index + (long)length; final int currentCapacity = capacity; if (resultingPosition > currentCapacity) { if (resultingPosition > MAX_BUFFER_LENGTH) { throw new IndexOutOfBoundsException( "index=" + index + " length=" + length + " maxCapacity=" + MAX_BUFFER_LENGTH); } final int newCapacity = calculateExpansion(currentCapacity, resultingPosition); final ByteBuffer newBuffer = ByteBuffer.allocateDirect(newCapacity); final long newAddress = address(newBuffer); getBytes(0, newBuffer, 0, currentCapacity); byteBuffer = newBuffer; addressOffset = newAddress; capacity = newCapacity; } }
@Test void ensureCapacityIsANoOpIfExistingCapacityIsEnough() { final int index = 1; final int capacity = 5; final ExpandableDirectByteBuffer buffer = new ExpandableDirectByteBuffer(capacity); buffer.ensureCapacity(index, capacity - index); assertEquals(capacity, buffer.capacity()); }
@Override public TransferAction action(final Session<?> source, final Session<?> destination, final boolean resumeRequested, final boolean reloadRequested, final TransferPrompt prompt, final ListProgressListener listener) throws BackgroundException { if(log.isDebugEnabled()) { log.debug(String.format("Find transfer action with prompt %s", prompt)); } if(resumeRequested) { // Force resume by user or retry of failed transfer return TransferAction.resume; } final TransferAction action; if(reloadRequested) { action = TransferAction.forName( PreferencesFactory.get().getProperty("queue.download.reload.action")); } else { // Use default action = TransferAction.forName( PreferencesFactory.get().getProperty("queue.download.action")); } if(action.equals(TransferAction.callback)) { for(TransferItem download : roots) { final Local local = download.local; if(local.exists()) { if(local.isDirectory()) { if(local.list().isEmpty()) { // Do not prompt for existing empty directories continue; } } if(local.isFile()) { if(local.attributes().getSize() == 0) { // Dragging a file to the local volume creates the file already continue; } } // Prompt user to choose a filter return prompt.prompt(download); } } // No files exist yet therefore it is most straightforward to use the overwrite action return TransferAction.overwrite; } return action; }
@Test public void testActionDirectoryExistsFalse() throws Exception { final Path root = new Path("t", EnumSet.of(Path.Type.directory)); final Transfer t = new DownloadTransfer(new Host(new TestProtocol()), root, new NullLocal("p", "t") { @Override public boolean exists() { return false; } @Override public AttributedList<Local> list() { return new AttributedList<>(Collections.singletonList(new NullLocal("p", "a"))); } }); final AtomicBoolean prompt = new AtomicBoolean(); assertEquals(TransferAction.overwrite, t.action(null, new NullSession(new Host(new TestProtocol())), false, false, new DisabledTransferPrompt() { @Override public TransferAction prompt(final TransferItem file) { fail(); return TransferAction.callback; } }, new DisabledListProgressListener())); assertFalse(prompt.get()); }
public static List<? extends Token> tokenize(String format) { DateFormat lexer = new com.facebook.presto.teradata.functions.DateFormat(new ANTLRInputStream(format)); return lexer.getAllTokens(); }
@Test public void testGreedinessLongFirst() { assertEquals(1, DateFormatParser.tokenize("yy").size()); assertEquals(1, DateFormatParser.tokenize("yyyy").size()); assertEquals(2, DateFormatParser.tokenize("yyyyyy").size()); }
public static <T> Inner<T> create() { return new Inner<T>(); }
@Test @Category(NeedsRunner.class) public void testFilterOnNestedField() { // Pass only elements where field1 == "pass && field2 > 50. PCollection<AutoValue_FilterTest_Nested> filtered = pipeline .apply( Create.of( new AutoValue_FilterTest_Nested(new AutoValue_FilterTest_Simple("pass", 52, 2)), new AutoValue_FilterTest_Nested(new AutoValue_FilterTest_Simple("pass", 2, 2)), new AutoValue_FilterTest_Nested( new AutoValue_FilterTest_Simple("fail", 100, 100)))) .apply( Filter.<AutoValue_FilterTest_Nested>create() .whereFieldName("nested.field1", s -> "pass".equals(s)) .whereFieldName("nested.field2", (Integer i) -> i > 50)); PAssert.that(filtered) .containsInAnyOrder( new AutoValue_FilterTest_Nested(new AutoValue_FilterTest_Simple("pass", 52, 2))); pipeline.run(); }
public Map<String, String> cacheLevelTagMap(final String threadId, final String taskId, final String storeName) { final Map<String, String> tagMap = new LinkedHashMap<>(); tagMap.put(THREAD_ID_TAG, threadId); tagMap.put(TASK_ID_TAG, taskId); tagMap.put(RECORD_CACHE_ID_TAG, storeName); return tagMap; }
@Test public void shouldGetCacheLevelTagMap() { final StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(metrics, THREAD_ID1, VERSION, time); final String taskName = "taskName"; final String storeName = "storeName"; final Map<String, String> tagMap = streamsMetrics.cacheLevelTagMap(THREAD_ID1, taskName, storeName); assertThat(tagMap.size(), equalTo(3)); assertThat( tagMap.get(StreamsMetricsImpl.THREAD_ID_TAG), equalTo(THREAD_ID1) ); assertThat(tagMap.get(TASK_ID_TAG), equalTo(taskName)); assertThat(tagMap.get(RECORD_CACHE_ID_TAG), equalTo(storeName)); }
@Override public <T> Optional<T> getProperty(String key, Class<T> targetType) { var targetKey = targetPropertyName(key); var result = binder.bind(targetKey, Bindable.of(targetType)); return result.isBound() ? Optional.of(result.get()) : Optional.empty(); }
@Test void resolvedSingleValueProperties() { env.setProperty("prop.0.strProp", "testStr"); env.setProperty("prop.0.intProp", "123"); var resolver = new PropertyResolverImpl(env); assertThat(resolver.getProperty("prop.0.strProp", String.class)) .hasValue("testStr"); assertThat(resolver.getProperty("prop.0.intProp", Integer.class)) .hasValue(123); }
@VisibleForTesting protected void monitor() { try { initWaiter.get(); // Monitor role // Periodically check the role in case ZK watcher fails. var isChannelOwner = serviceUnitStateChannel.isChannelOwner(); if (isChannelOwner) { // System topic config might fail due to the race condition // with topic policy init(Topic policies cache have not init). if (!configuredSystemTopics) { configuredSystemTopics = configureSystemTopics(pulsar); } if (role != Leader) { log.warn("Current role:{} does not match with the channel ownership:{}. " + "Playing the leader role.", role, isChannelOwner); playLeader(); } } else { if (role != Follower) { log.warn("Current role:{} does not match with the channel ownership:{}. " + "Playing the follower role.", role, isChannelOwner); playFollower(); } } } catch (Throwable e) { log.error("Failed to get the channel ownership.", e); } }
@Test(timeOut = 30 * 1000) public void compactionScheduleTest() { Awaitility.await() .pollInterval(200, TimeUnit.MILLISECONDS) .atMost(30, TimeUnit.SECONDS) .ignoreExceptions() .untilAsserted(() -> { // wait until true primaryLoadManager.monitor(); secondaryLoadManager.monitor(); var threshold = admin.topicPolicies() .getCompactionThreshold(ServiceUnitStateChannelImpl.TOPIC, false); AssertJUnit.assertEquals(5 * 1024 * 1024, threshold == null ? 0 : threshold.longValue()); }); }
static Map<String, String> toMap(List<Settings.Setting> settingsList) { Map<String, String> result = new LinkedHashMap<>(); for (Settings.Setting s : settingsList) { // we need the "*.file.suffixes" and "*.file.patterns" properties for language detection // see DefaultLanguagesRepository.populateFileSuffixesAndPatterns() if (!s.getInherited() || s.getKey().endsWith(".file.suffixes") || s.getKey().endsWith(".file.patterns")) { switch (s.getValueOneOfCase()) { case VALUE: result.put(s.getKey(), s.getValue()); break; case VALUES: result.put(s.getKey(), s.getValues().getValuesList().stream().map(StringEscapeUtils::escapeCsv).collect(Collectors.joining(","))); break; case FIELDVALUES: convertPropertySetToProps(result, s); break; default: if (!s.getKey().endsWith(".secured")) { throw new IllegalStateException("Unknown property value for " + s.getKey()); } } } } return result; }
@Test public void should_load_global_multivalue_settings() { assertThat(AbstractSettingsLoader.toMap(singletonList(Setting.newBuilder() .setKey("sonar.preview.supportedPlugins") .setValues(Values.newBuilder().addValues("java").addValues("php")).build()))) .containsExactly(entry("sonar.preview.supportedPlugins", "java,php")); }
@Override public Set<NodeFileDescriptorStats> fileDescriptorStats() { final List<NodeResponse> result = nodes(); return result.stream() .map(node -> NodeFileDescriptorStats.create(node.name(), node.ip(), node.host(), node.fileDescriptorMax())) .collect(Collectors.toSet()); }
@Test void testFileDescriptorStats() { doReturn(List.of(NODE_WITH_CORRECT_INFO, NODE_WITH_MISSING_DISK_STATISTICS)).when(catApi).nodes(); final Set<NodeFileDescriptorStats> nodeFileDescriptorStats = clusterAdapter.fileDescriptorStats(); assertThat(nodeFileDescriptorStats) .hasSize(1) .noneSatisfy( nodeDescr -> assertThat(nodeDescr.name()).isEqualTo("nodeWithMissingDiskStatistics") ) .first() .satisfies( nodeDescr -> { assertThat(nodeDescr.name()).isEqualTo("nodeWithCorrectInfo"); assertThat(nodeDescr.ip()).isEqualTo("182.88.0.2"); assertThat(nodeDescr.fileDescriptorMax()).isPresent(); assertThat(nodeDescr.fileDescriptorMax().get()).isEqualTo(1048576L); } ); }
@Override public String[] split(String text) { boundary.setText(text); ArrayList<String> sentences = new ArrayList<>(); int start = boundary.first(); for (int end = boundary.next(); end != BreakIterator.DONE; start = end, end = boundary.next()) { sentences.add(text.substring(start, end).trim()); } return sentences.toArray(new String[0]); }
@Test public void testSplit() { System.out.println("split"); String text = "THE BIG RIPOFF\n\nMr. John B. Smith bought cheapsite.com for 1.5 million dollars, i.e. he paid far too much for it. Did he mind? Adam Jones Jr. thinks he didn't. In any case, this isn't true... Well, with a probability of .9 it isn't. "; String[] expResult = { "THE BIG RIPOFF\n\nMr.", "John B.", "Smith bought cheapsite.com for 1.5 million dollars, i.e. he paid far too much for it.", "Did he mind?", "Adam Jones Jr. thinks he didn't.", "In any case, this isn't true...", "Well, with a probability of .9 it isn't." }; BreakIteratorSentenceSplitter instance = new BreakIteratorSentenceSplitter(); String[] result = instance.split(text); assertEquals(expResult.length, result.length); for (int i = 0; i < result.length; i++) assertEquals(expResult[i], result[i]); }
public List<T> getUnmodifiableData() { return Collections.unmodifiableList(scesimData); }
@Test public void getUnmodifiableData_isUnmodifiable() { assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> model.getUnmodifiableData().add(new Scenario())); }
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
@Test public void testValidateValueMatchingType() { ConnectSchema.validateValue(Schema.INT8_SCHEMA, (byte) 1); ConnectSchema.validateValue(Schema.INT16_SCHEMA, (short) 1); ConnectSchema.validateValue(Schema.INT32_SCHEMA, 1); ConnectSchema.validateValue(Schema.INT64_SCHEMA, (long) 1); ConnectSchema.validateValue(Schema.FLOAT32_SCHEMA, 1.f); ConnectSchema.validateValue(Schema.FLOAT64_SCHEMA, 1.); ConnectSchema.validateValue(Schema.BOOLEAN_SCHEMA, true); ConnectSchema.validateValue(Schema.STRING_SCHEMA, "a string"); ConnectSchema.validateValue(Schema.BYTES_SCHEMA, "a byte array".getBytes()); ConnectSchema.validateValue(Schema.BYTES_SCHEMA, ByteBuffer.wrap("a byte array".getBytes())); ConnectSchema.validateValue(SchemaBuilder.array(Schema.INT32_SCHEMA).build(), Arrays.asList(1, 2, 3)); ConnectSchema.validateValue( SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.STRING_SCHEMA).build(), Collections.singletonMap(1, "value") ); // Struct tests the basic struct layout + complex field types + nested structs Struct structValue = new Struct(STRUCT_SCHEMA) .put("first", 1) .put("second", "foo") .put("array", Arrays.asList(1, 2, 3)) .put("map", Collections.singletonMap(1, "value")) .put("nested", new Struct(FLAT_STRUCT_SCHEMA).put("field", 12)); ConnectSchema.validateValue(STRUCT_SCHEMA, structValue); }
@Override public void shutdown() { LOG.info("Shutting down FederationInterceptor for {}", this.attemptId); // Do not stop uamPool service and kill UAMs here because of possible second // app attempt try { this.uamPool.shutDownConnections(); } catch (YarnException e) { LOG.error("Error shutting down all UAM clients without killing them", e); } if (this.threadpool != null) { try { this.threadpool.shutdown(); } catch (Throwable ex) { } this.threadpool = null; } // Stop the home heartbeat thread this.homeHeartbeatHandler.shutdown(); this.homeRMRelayer.shutdown(); // Shutdown needs to clean up app removeAppFromRegistry(); super.shutdown(); }
@Test(timeout = 5000) public void testConcurrentRegister() throws InterruptedException, ExecutionException { ExecutorService threadpool = Executors.newCachedThreadPool(); ExecutorCompletionService<RegisterApplicationMasterResponse> compSvc = new ExecutorCompletionService<>(threadpool); Object syncObj = MockResourceManagerFacade.getRegisterSyncObj(); // Two register threads synchronized (syncObj) { // Make sure first thread will block within RM, before the second thread // starts LOG.info("Starting first register thread"); compSvc.submit(new ConcurrentRegisterAMCallable()); try { LOG.info("Test main starts waiting for the first thread to block"); syncObj.wait(); LOG.info("Test main wait finished"); } catch (Exception e) { LOG.info("Test main wait interrupted", e); } } // The second thread will get already registered exception from RM. LOG.info("Starting second register thread"); compSvc.submit(new ConcurrentRegisterAMCallable()); // Notify the first register thread to return LOG.info("Let first blocked register thread move on"); synchronized (syncObj) { syncObj.notifyAll(); } // Both thread should return without exception RegisterApplicationMasterResponse response = compSvc.take().get(); Assert.assertNotNull(response); response = compSvc.take().get(); Assert.assertNotNull(response); threadpool.shutdown(); }
@Override @PublicAPI(usage = ACCESS) public boolean isAnnotatedWith(Class<? extends Annotation> type) { return isAnnotatedWith(type.getName()); }
@Test public void isAnnotatedWith_type() { assertThat(importField(SomeClass.class, "someField").isAnnotatedWith(SomeAnnotation.class)) .as("field is annotated with @%s", SomeAnnotation.class.getSimpleName()).isTrue(); assertThat(importField(SomeClass.class, "someField").isAnnotatedWith(Retention.class)) .as("field is annotated with @Retention").isFalse(); }
public static Action resolve(Schema writer, Schema reader, GenericData data) { return resolve(Schema.applyAliases(writer, reader), reader, data, new HashMap<>()); }
@Test void resolveEnum() throws IOException { final Schema writeSchema = Schema.createEnum("myEnum", "", "n1", Arrays.asList("e1", "e3", "e4")); final Schema readSchema = Schema.createEnum("myEnum", "", "n1", Arrays.asList("e1", "e2", "e3"), "e2"); Resolver.Action action = Resolver.resolve(writeSchema, readSchema); Assertions.assertNotNull(action); Assertions.assertEquals(action.type, Resolver.Action.Type.ENUM); MatcherAssert.assertThat("Wrong class for action", action, Matchers.instanceOf(Resolver.EnumAdjust.class)); Resolver.EnumAdjust adjust = (Resolver.EnumAdjust) action; Assertions.assertArrayEquals(new int[] { 0, 2, 1 }, adjust.adjustments); Assertions.assertEquals("e1", adjust.values[0].toString()); Assertions.assertEquals("e3", adjust.values[1].toString()); Assertions.assertEquals("e2", adjust.values[2].toString()); FastReaderBuilder reader = FastReaderBuilder.get(); Schema writeRecord = Schema.createRecord("rec1", "", "", false, Arrays.asList(new Schema.Field("f1", writeSchema, ""))); Schema readRecord = Schema.createRecord("rec1", "", "", false, Arrays.asList(new Schema.Field("f1", readSchema, ""))); DatumReader<Object> datumReader = reader.createDatumReader(writeRecord, readRecord); JsonDecoder e2 = DecoderFactory.get().jsonDecoder(readRecord, "{ \"f1\" : \"e2\" }"); Object read = datumReader.read(null, e2); Assertions.assertNotNull(read); MatcherAssert.assertThat("", read, Matchers.instanceOf(IndexedRecord.class)); IndexedRecord result = (IndexedRecord) read; Assertions.assertEquals("e3", result.get(0).toString()); }
public static List<Event> computeEventDiff(final Params params) { final List<Event> events = new ArrayList<>(); emitPerNodeDiffEvents(createBaselineParams(params), events); emitWholeClusterDiffEvent(createBaselineParams(params), events); emitDerivedBucketSpaceStatesDiffEvents(params, events); return events; }
@Test void single_distributor_node_state_transition_emits_altered_node_state_event() { final EventFixture fixture = EventFixture.createForNodes(3) .clusterStateBefore("distributor:3 storage:3") .clusterStateAfter("distributor:3 .1.s:d storage:3"); final List<Event> events = fixture.computeEventDiff(); assertThat(events.size(), equalTo(1)); assertThat(events, hasItem(allOf( eventForNode(distributorNode(1)), eventTypeIs(NodeEvent.Type.CURRENT), nodeEventWithDescription("Altered node state in cluster state from 'U' to 'D'")))); }
@GET @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) @Override public ClusterInfo get() { return getClusterInfo(); }
@Test public void testClusterSchedulerFifoXML() throws JSONException, Exception { WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("cluster") .path("scheduler").accept(MediaType.APPLICATION_XML) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8, response.getType().toString()); String xml = response.getEntity(String.class); verifySchedulerFifoXML(xml); }
@Override public String toString() { return toStringHelper(getClass()) .add("label", Integer.toString(label)) .add("bos", Byte.toString(bos)) .add("ttl", Byte.toString(ttl)) .add("protocol", Byte.toString(protocol)) .toString(); }
@Test public void testToStringMpls() throws Exception { MPLS mpls = deserializer.deserialize(bytes, 0, bytes.length); String str = mpls.toString(); assertTrue(StringUtils.contains(str, "label=" + label)); assertTrue(StringUtils.contains(str, "bos=" + bos)); assertTrue(StringUtils.contains(str, "ttl=" + ttl)); assertTrue(StringUtils.contains(str, "protocol=" + protocol)); }
public static WriteBuilder write(OutputFile file) { return new WriteBuilder(file); }
@Test public void testMetricsMissingColumnStatisticsInRowGroups() throws IOException { Schema schema = new Schema(optional(1, "stringCol", Types.StringType.get())); File file = createTempFile(temp); List<GenericData.Record> records = Lists.newArrayListWithCapacity(1); org.apache.avro.Schema avroSchema = AvroSchemaUtil.convert(schema.asStruct()); GenericData.Record smallRecord = new GenericData.Record(avroSchema); smallRecord.put("stringCol", "test"); records.add(smallRecord); GenericData.Record largeRecord = new GenericData.Record(avroSchema); largeRecord.put("stringCol", Strings.repeat("a", 2048)); records.add(largeRecord); write( file, schema, ImmutableMap.<String, String>builder() .put(PARQUET_ROW_GROUP_SIZE_BYTES, "1") .put(PARQUET_ROW_GROUP_CHECK_MIN_RECORD_COUNT, "1") .put(PARQUET_ROW_GROUP_CHECK_MAX_RECORD_COUNT, "1") .buildOrThrow(), ParquetAvroWriter::buildWriter, records.toArray(new GenericData.Record[] {})); InputFile inputFile = Files.localInput(file); try (ParquetFileReader reader = ParquetFileReader.open(ParquetIO.file(inputFile))) { assertThat(reader.getRowGroups()).hasSize(2); List<BlockMetaData> blocks = reader.getFooter().getBlocks(); assertThat(blocks).hasSize(2); Statistics<?> smallStatistics = getOnlyElement(blocks.get(0).getColumns()).getStatistics(); assertThat(smallStatistics.hasNonNullValue()).isTrue(); assertThat(smallStatistics.getMinBytes()).isEqualTo("test".getBytes(UTF_8)); assertThat(smallStatistics.getMaxBytes()).isEqualTo("test".getBytes(UTF_8)); // parquet-mr doesn't write stats larger than the max size rather than truncating Statistics<?> largeStatistics = getOnlyElement(blocks.get(1).getColumns()).getStatistics(); assertThat(largeStatistics.hasNonNullValue()).isFalse(); assertThat(largeStatistics.getMinBytes()).isNull(); assertThat(largeStatistics.getMaxBytes()).isNull(); } // Null count, lower and upper bounds should be empty because // one of the statistics in row groups is missing Metrics metrics = ParquetUtil.fileMetrics(inputFile, MetricsConfig.getDefault()); assertThat(metrics.nullValueCounts()).isEmpty(); assertThat(metrics.lowerBounds()).isEmpty(); assertThat(metrics.upperBounds()).isEmpty(); }
public static void checkTypeMatch(SelTypes lhs, SelTypes rhs) { if (lhs != rhs) { throw new IllegalArgumentException( "Type mismatch, lhs type: " + lhs + ", rhs object type: " + rhs); } }
@Test(expected = IllegalArgumentException.class) public void testTypeMisMatch() { SelTypeUtil.checkTypeMatch(SelTypes.NULL, SelTypes.VOID); }
public static List<TypeRef<?>> getAllTypeArguments(TypeRef typeRef) { List<TypeRef<?>> types = getTypeArguments(typeRef); LinkedHashSet<TypeRef<?>> allTypeArguments = new LinkedHashSet<>(types); for (TypeRef<?> type : types) { allTypeArguments.addAll(getAllTypeArguments(type)); } return new ArrayList<>(allTypeArguments); }
@Test public void getAllTypeArguments() { TypeRef<Tuple2<String, Map<String, BeanA>>> typeRef = new TypeRef<Tuple2<String, Map<String, BeanA>>>() {}; List<TypeRef<?>> allTypeArguments = TypeUtils.getAllTypeArguments(typeRef); assertEquals(allTypeArguments.size(), 3); assertEquals(allTypeArguments.get(2).getRawType(), BeanA.class); }
@Override public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException { // Automatically detect the character encoding try (AutoDetectReader reader = new AutoDetectReader(CloseShieldInputStream.wrap(stream), metadata, getEncodingDetector(context))) { //try to get detected content type; could be a subclass of text/plain //such as vcal, etc. String incomingMime = metadata.get(Metadata.CONTENT_TYPE); MediaType mediaType = MediaType.TEXT_PLAIN; if (incomingMime != null) { MediaType tmpMediaType = MediaType.parse(incomingMime); if (tmpMediaType != null) { mediaType = tmpMediaType; } } Charset charset = reader.getCharset(); MediaType type = new MediaType(mediaType, charset); metadata.set(Metadata.CONTENT_TYPE, type.toString()); // deprecated, see TIKA-431 metadata.set(Metadata.CONTENT_ENCODING, charset.name()); XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata); xhtml.startDocument(); xhtml.startElement("p"); char[] buffer = new char[4096]; int n = reader.read(buffer); while (n != -1) { xhtml.characters(buffer, 0, n); n = reader.read(buffer); } xhtml.endElement("p"); xhtml.endDocument(); } }
@Test public void testLatinDetectionHeuristics() throws Exception { String windows = "test\r\n"; String unix = "test\n"; String euro = "test \u20ac\n"; Metadata metadata; metadata = new Metadata(); parser.parse(new ByteArrayInputStream(windows.getBytes("ISO-8859-15")), new DefaultHandler(), metadata, new ParseContext()); assertEquals("text/plain; charset=windows-1252", metadata.get(Metadata.CONTENT_TYPE)); assertEquals("UniversalEncodingDetector", metadata.get(TikaCoreProperties.ENCODING_DETECTOR)); assertEquals("windows-1252", metadata.get(TikaCoreProperties.DETECTED_ENCODING)); metadata = new Metadata(); parser.parse(new ByteArrayInputStream(unix.getBytes("ISO-8859-15")), new DefaultHandler(), metadata, new ParseContext()); assertEquals("text/plain; charset=ISO-8859-1", metadata.get(Metadata.CONTENT_TYPE)); metadata = new Metadata(); parser.parse(new ByteArrayInputStream(euro.getBytes("ISO-8859-15")), new DefaultHandler(), metadata, new ParseContext()); assertEquals("text/plain; charset=ISO-8859-15", metadata.get(Metadata.CONTENT_TYPE)); }
public Statement buildStatement(final ParserRuleContext parseTree) { return build(Optional.of(getSources(parseTree)), parseTree); }
@Test public void shouldBuildLambdaFunction() { // Given: final SingleStatementContext stmt = givenQuery("SELECT TRANSFORM_ARRAY(Col4, X => X + 5) FROM TEST1;"); // When: final Query result = (Query) builder.buildStatement(stmt); // Then: assertThat(result.getSelect(), is(new Select(ImmutableList.of( new SingleColumn( new FunctionCall( FunctionName.of("TRANSFORM_ARRAY"), ImmutableList.of( column("COL4"), new LambdaFunctionCall( ImmutableList.of("X"), new ArithmeticBinaryExpression( Operator.ADD, new LambdaVariable("X"), new IntegerLiteral(5)) ) ) ), Optional.empty()) )))); }
@Override public List<String> listTableNames(String dbName) { return hmsOps.getAllTableNames(dbName); }
@Test public void testListTableNames() { List<String> databaseNames = hudiMetadata.listTableNames("db1"); Assert.assertEquals(Lists.newArrayList("table1", "table2"), databaseNames); }
public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } }
@Test public void testPrimitiveTypeProjection() { Object projected; projected = SchemaProjector.project(Schema.BOOLEAN_SCHEMA, false, Schema.BOOLEAN_SCHEMA); assertEquals(false, projected); byte[] bytes = {(byte) 1, (byte) 2}; projected = SchemaProjector.project(Schema.BYTES_SCHEMA, bytes, Schema.BYTES_SCHEMA); assertEquals(bytes, projected); projected = SchemaProjector.project(Schema.STRING_SCHEMA, "abc", Schema.STRING_SCHEMA); assertEquals("abc", projected); projected = SchemaProjector.project(Schema.BOOLEAN_SCHEMA, false, Schema.OPTIONAL_BOOLEAN_SCHEMA); assertEquals(false, projected); projected = SchemaProjector.project(Schema.BYTES_SCHEMA, bytes, Schema.OPTIONAL_BYTES_SCHEMA); assertEquals(bytes, projected); projected = SchemaProjector.project(Schema.STRING_SCHEMA, "abc", Schema.OPTIONAL_STRING_SCHEMA); assertEquals("abc", projected); assertThrows(DataException.class, () -> SchemaProjector.project(Schema.OPTIONAL_BOOLEAN_SCHEMA, false, Schema.BOOLEAN_SCHEMA), "Cannot project optional schema to schema with no default value."); assertThrows(DataException.class, () -> SchemaProjector.project(Schema.OPTIONAL_BYTES_SCHEMA, bytes, Schema.BYTES_SCHEMA), "Cannot project optional schema to schema with no default value."); assertThrows(DataException.class, () -> SchemaProjector.project(Schema.OPTIONAL_STRING_SCHEMA, "abc", Schema.STRING_SCHEMA), "Cannot project optional schema to schema with no default value."); }
@Override public SCMUploaderNotifyResponse notify(SCMUploaderNotifyRequest request) throws YarnException, IOException { SCMUploaderNotifyResponse response = recordFactory.newRecordInstance(SCMUploaderNotifyResponse.class); // TODO (YARN-2774): proper security/authorization needs to be implemented String filename = store.addResource(request.getResourceKey(), request.getFileName()); boolean accepted = filename.equals(request.getFileName()); if (accepted) { this.metrics.incAcceptedUploads(); } else { this.metrics.incRejectedUploads(); } response.setAccepted(accepted); return response; }
@Test void testNotify_noEntry() throws Exception { long accepted = SharedCacheUploaderMetrics.getInstance().getAcceptedUploads(); SCMUploaderNotifyRequest request = recordFactory.newRecordInstance(SCMUploaderNotifyRequest.class); request.setResourceKey("key1"); request.setFilename("foo.jar"); assertTrue(proxy.notify(request).getAccepted()); Collection<SharedCacheResourceReference> set = store.getResourceReferences("key1"); assertNotNull(set); assertEquals(0, set.size()); assertEquals( 1, SharedCacheUploaderMetrics.getInstance().getAcceptedUploads() - accepted, "NM upload metrics aren't updated."); }
public void validateAndMergeOutputParams(StepRuntimeSummary runtimeSummary) { Optional<String> externalJobId = extractExternalJobId(runtimeSummary); if (externalJobId.isPresent()) { Optional<OutputData> outputDataOpt = outputDataDao.getOutputDataForExternalJob(externalJobId.get(), ExternalJobType.TITUS); outputDataOpt.ifPresent( outputData -> { ParamsMergeHelper.mergeOutputDataParams( runtimeSummary.getParams(), outputData.getParams()); }); } }
@Test public void testValidOutputParamTypes() throws IOException { Map<String, Parameter> runtimeParams = loadObject("fixtures/outputdata/sample-output-data-params-defaults.json", paramMap); Map<String, Parameter> outputParams = loadObject("fixtures/outputdata/sample-output-data-params-updated.json", paramMap); outputData = new OutputData( ExternalJobType.TITUS, titusTaskId, "wfid", System.currentTimeMillis(), System.currentTimeMillis(), outputParams, new HashMap<>()); setupOutputDataDao(); runtimeSummary = runtimeSummaryBuilder().artifacts(artifacts).params(runtimeParams).build(); outputDataManager.validateAndMergeOutputParams(runtimeSummary); long[] expectedLongArray = {4L, 5L, 6L}; assertEquals("goodbye", runtimeSummary.getParams().get("str_param").asString()); assertArrayEquals( expectedLongArray, runtimeSummary.getParams().get("long_array_param").asLongArray()); assertEquals(51L, runtimeSummary.getParams().get("long_param").asLong().longValue()); assertEquals("e", runtimeSummary.getParams().get("string_map_param").asStringMap().get("a")); assertEquals("f", runtimeSummary.getParams().get("string_map_param").asStringMap().get("b")); assertArrayEquals( new String[] {"p1", "p2", "p3"}, (String[]) runtimeSummary.getParams().get("map_param").asMap().get("table_name")); }
public static String unifyCollisionChars(String topic) { return topic.replace('.', '_'); }
@Test public void testUnifyCollisionChars() { assertEquals("topic", Topic.unifyCollisionChars("topic")); assertEquals("_topic", Topic.unifyCollisionChars(".topic")); assertEquals("_topic", Topic.unifyCollisionChars("_topic")); assertEquals("__topic", Topic.unifyCollisionChars("_.topic")); }
@Override @CheckForNull public String revisionId(Path path) { RepositoryBuilder builder = getVerifiedRepositoryBuilder(path); try { return Optional.ofNullable(getHead(builder.build())) .map(Ref::getObjectId) .map(ObjectId::getName) .orElse(null); } catch (IOException e) { throw new IllegalStateException("I/O error while getting revision ID for path: " + path, e); } }
@Test public void revisionId_should_return_null_in_empty_repo() throws IOException { worktree = temp.newFolder().toPath(); Repository repo = FileRepositoryBuilder.create(worktree.resolve(".git").toFile()); repo.create(); git = new Git(repo); Path projectDir = worktree.resolve("project"); Files.createDirectory(projectDir); GitScmProvider provider = newGitScmProvider(); assertThat(provider.revisionId(projectDir)).isNull(); }
public static boolean systemSupportsSleepDetection() { return internal.supportsSystemSleepDetection(); }
@Test void testSupportSystemSleepDetection() { boolean supportsSystemSleepDetection = DesktopUtils.systemSupportsSleepDetection(); if (System.getenv("CI") != null) { assertThat(supportsSystemSleepDetection).isFalse(); } else { assertThat(supportsSystemSleepDetection).isTrue(); } }
@Override public List<SnowflakeIdentifier> listSchemas(SnowflakeIdentifier scope) { StringBuilder baseQuery = new StringBuilder("SHOW SCHEMAS"); String[] queryParams = null; switch (scope.type()) { case ROOT: // account-level listing baseQuery.append(" IN ACCOUNT"); break; case DATABASE: // database-level listing baseQuery.append(" IN DATABASE IDENTIFIER(?)"); queryParams = new String[] {scope.toIdentifierString()}; break; default: throw new IllegalArgumentException( String.format("Unsupported scope type for listSchemas: %s", scope)); } final String finalQuery = baseQuery.toString(); final String[] finalQueryParams = queryParams; List<SnowflakeIdentifier> schemas; try { schemas = connectionPool.run( conn -> queryHarness.query( conn, finalQuery, SCHEMA_RESULT_SET_HANDLER, finalQueryParams)); } catch (SQLException e) { throw snowflakeExceptionToIcebergException( scope, e, String.format("Failed to list schemas for scope '%s'", scope)); } catch (InterruptedException e) { throw new UncheckedInterruptedException( e, "Interrupted while listing schemas for scope '%s'", scope); } schemas.forEach( schema -> Preconditions.checkState( schema.type() == SnowflakeIdentifier.Type.SCHEMA, "Expected SCHEMA, got identifier '%s' for scope '%s'", schema, scope)); return schemas; }
@SuppressWarnings("unchecked") @Test public void testListSchemasInterruptedException() throws SQLException, InterruptedException { Exception injectedException = new InterruptedException("Fake interrupted exception"); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedInterruptedException.class) .isThrownBy(() -> snowflakeClient.listSchemas(SnowflakeIdentifier.ofDatabase("DB_1"))) .withMessageContaining("Interrupted while listing schemas for scope 'DATABASE: 'DB_1''") .withCause(injectedException); }
public String registerFileResource(ResourceUri resourceUri) throws IOException { Map<ResourceUri, URL> stagingResources = prepareStagingResources( Collections.singletonList(resourceUri), ResourceType.FILE, false, url -> {}, false); registerResources(stagingResources, false); return resourceInfos.get(new ArrayList<>(stagingResources.keySet()).get(0)).getPath(); }
@Test public void testRegisterFileResource() throws Exception { ResourceUri normalizedResource = new ResourceUri( ResourceType.FILE, resourceManager.getURLFromPath(new Path(file.getPath())).getPath()); // register file resource, uri is formatted with "file" scheme prefix String localFilePath = resourceManager.registerFileResource( new ResourceUri(ResourceType.FILE, "file://" + file.getPath())); assertEquals(file.getPath(), localFilePath); Map<ResourceUri, URL> actualResource = Collections.singletonMap( normalizedResource, resourceManager.getURLFromPath(new Path(localFilePath))); assertThat(resourceManager.getResources()).containsExactlyEntriesOf(actualResource); // register the same file resource repeatedly, but without scheme assertThat( resourceManager.registerFileResource( new ResourceUri(ResourceType.FILE, file.getPath()))) .isEqualTo(localFilePath); assertThat(resourceManager.getResources()).containsExactlyEntriesOf(actualResource); // register the same file resource repeatedly, use relative path as uri assertThat( resourceManager.registerFileResource( new ResourceUri( ResourceType.FILE, new File(".") .getCanonicalFile() .toPath() .relativize(file.toPath()) .toString()))) .isEqualTo(localFilePath); assertThat(resourceManager.getResources()).containsExactlyEntriesOf(actualResource); }
@Override public RemotingCommand processRequest(final ChannelHandlerContext ctx, RemotingCommand request) throws RemotingCommandException { return this.processRequest(ctx.channel(), request, true, true); }
@Test public void testProcessRequest_MsgWasRemoving() throws RemotingCommandException { GetMessageResult getMessageResult = createGetMessageResult(); getMessageResult.setStatus(GetMessageStatus.MESSAGE_WAS_REMOVING); when(messageStore.getMessageAsync(anyString(), anyString(), anyInt(), anyLong(), anyInt(), any(ExpressionMessageFilter.class))).thenReturn(CompletableFuture.completedFuture(getMessageResult)); final RemotingCommand request = createPullMsgCommand(RequestCode.PULL_MESSAGE); pullMessageProcessor.processRequest(handlerContext, request); RemotingCommand response = embeddedChannel.readOutbound(); assertThat(response).isNotNull(); assertThat(response.getCode()).isEqualTo(ResponseCode.PULL_RETRY_IMMEDIATELY); }
public static DecimalParseResult parse(String stringValue) { return parse(stringValue, false); }
@Test public void testParse() { assertParseResult("0", 0L, 1, 0); assertParseResult("0.", 0L, 1, 0); assertParseResult(".0", 0L, 1, 1); assertParseResult("+0", 0L, 1, 0); assertParseResult("-0", 0L, 1, 0); assertParseResult("000", 0L, 1, 0); assertParseResult("+000", 0L, 1, 0); assertParseResult("-000", 0L, 1, 0); assertParseResult("0000000000000000000000000000", 0L, 1, 0); assertParseResult("+0000000000000000000000000000", 0L, 1, 0); assertParseResult("-0000000000000000000000000000", 0L, 1, 0); assertParseResult("1.1", 11L, 2, 1); assertParseResult("1.", 1L, 1, 0); assertParseResult("+1.1", 11L, 2, 1); assertParseResult("+1.", 1L, 1, 0); assertParseResult("-1.1", -11L, 2, 1); assertParseResult("-1.", -1L, 1, 0); assertParseResult("0001.1", 11L, 2, 1); assertParseResult("+0001.1", 11L, 2, 1); assertParseResult("-0001.1", -11L, 2, 1); assertParseResult("0.1", 1L, 1, 1); assertParseResult(".1", 1L, 1, 1); assertParseResult("+0.1", 1L, 1, 1); assertParseResult("+.1", 1L, 1, 1); assertParseResult("-0.1", -1L, 1, 1); assertParseResult("-.1", -1L, 1, 1); assertParseResult(".1", 1L, 1, 1); assertParseResult("+.1", 1L, 1, 1); assertParseResult("-.1", -1L, 1, 1); assertParseResult("000.1", 1L, 1, 1); assertParseResult("+000.1", 1L, 1, 1); assertParseResult("-000.1", -1L, 1, 1); assertParseResult("12345678901234567", 12345678901234567L, 17, 0); assertParseResult("+12345678901234567", 12345678901234567L, 17, 0); assertParseResult("-12345678901234567", -12345678901234567L, 17, 0); assertParseResult("00012345678901234567", 12345678901234567L, 17, 0); assertParseResult("+00012345678901234567", 12345678901234567L, 17, 0); assertParseResult("-00012345678901234567", -12345678901234567L, 17, 0); assertParseResult("0.12345678901234567", 12345678901234567L, 17, 17); assertParseResult("+0.12345678901234567", 12345678901234567L, 17, 17); assertParseResult("-0.12345678901234567", -12345678901234567L, 17, 17); assertParseResult("000.12345678901234567", 12345678901234567L, 17, 17); assertParseResult("+000.12345678901234567", 12345678901234567L, 17, 17); assertParseResult("-000.12345678901234567", -12345678901234567L, 17, 17); assertParseResult("12345678901234567890.123456789012345678", encodeUnscaledValue("12345678901234567890123456789012345678"), 38, 18); assertParseResult("+12345678901234567890.123456789012345678", encodeUnscaledValue("12345678901234567890123456789012345678"), 38, 18); assertParseResult("-12345678901234567890.123456789012345678", encodeUnscaledValue("-12345678901234567890123456789012345678"), 38, 18); assertParseResult("00012345678901234567890.123456789012345678", encodeUnscaledValue("12345678901234567890123456789012345678"), 38, 18); assertParseResult("+00012345678901234567890.123456789012345678", encodeUnscaledValue("12345678901234567890123456789012345678"), 38, 18); assertParseResult("-00012345678901234567890.123456789012345678", encodeUnscaledValue("-12345678901234567890123456789012345678"), 38, 18); assertParseResult("0.12345678901234567890123456789012345678", encodeUnscaledValue("12345678901234567890123456789012345678"), 38, 38); assertParseResult("+0.12345678901234567890123456789012345678", encodeUnscaledValue("12345678901234567890123456789012345678"), 38, 38); assertParseResult("-0.12345678901234567890123456789012345678", encodeUnscaledValue("-12345678901234567890123456789012345678"), 38, 38); assertParseResult(".12345678901234567890123456789012345678", encodeUnscaledValue("12345678901234567890123456789012345678"), 38, 38); assertParseResult("+.12345678901234567890123456789012345678", encodeUnscaledValue("12345678901234567890123456789012345678"), 38, 38); assertParseResult("-.12345678901234567890123456789012345678", encodeUnscaledValue("-12345678901234567890123456789012345678"), 38, 38); assertParseResult("0000.12345678901234567890123456789012345678", encodeUnscaledValue("12345678901234567890123456789012345678"), 38, 38); assertParseResult("+0000.12345678901234567890123456789012345678", encodeUnscaledValue("12345678901234567890123456789012345678"), 38, 38); assertParseResult("-0000.12345678901234567890123456789012345678", encodeUnscaledValue("-12345678901234567890123456789012345678"), 38, 38); }
public Number getParam() { return param; }
@Test public void testParamDouble() { JaegerConfig jaegerConfig = (JaegerConfig) Config.getInstance().getJsonObjectConfig("jaeger-tracing-false", JaegerConfig.class); Assert.assertEquals(jaegerConfig.getParam(), 0.5); }
protected long weightedSize() { throw new UnsupportedOperationException(); }
@Test(dataProvider = "caches") @CacheSpec(maximumSize = Maximum.FULL, weigher = CacheWeigher.TEN) public void weightedSize_maintenance(BoundedLocalCache<Int, Int> cache, CacheContext context, Eviction<Int, Int> eviction) { cache.drainStatus = REQUIRED; var weight = eviction.weightedSize(); assertThat(weight).isPresent(); assertThat(cache.drainStatus).isEqualTo(IDLE); }
@Override public Result invoke(Invoker<?> invoker, Invocation inv) throws RpcException { if ((inv.getMethodName().equals($INVOKE) || inv.getMethodName().equals($INVOKE_ASYNC)) && inv.getArguments() != null && inv.getArguments().length == 3 && !GenericService.class.isAssignableFrom(invoker.getInterface())) { String name = ((String) inv.getArguments()[0]).trim(); String[] types = (String[]) inv.getArguments()[1]; Object[] args = (Object[]) inv.getArguments()[2]; try { Method method = findMethodByMethodSignature(invoker.getInterface(), name, types, inv.getServiceModel()); Class<?>[] params = method.getParameterTypes(); if (args == null) { args = new Object[params.length]; } if (types == null) { types = new String[params.length]; } if (args.length != types.length) { throw new RpcException( "GenericFilter#invoke args.length != types.length, please check your " + "params"); } String generic = inv.getAttachment(GENERIC_KEY); if (StringUtils.isBlank(generic)) { generic = getGenericValueFromRpcContext(); } if (StringUtils.isEmpty(generic) || ProtocolUtils.isDefaultGenericSerialization(generic) || ProtocolUtils.isGenericReturnRawResult(generic)) { try { args = PojoUtils.realize(args, params, method.getGenericParameterTypes()); } catch (Exception e) { logger.error( LoggerCodeConstants.PROTOCOL_ERROR_DESERIALIZE, "", "", "Deserialize generic invocation failed. ServiceKey: " + inv.getTargetServiceUniqueName(), e); throw new RpcException(e); } } else if (ProtocolUtils.isGsonGenericSerialization(generic)) { args = getGsonGenericArgs(args, method.getGenericParameterTypes()); } else if (ProtocolUtils.isJavaGenericSerialization(generic)) { Configuration configuration = ApplicationModel.ofNullable(applicationModel) .modelEnvironment() .getConfiguration(); if (!configuration.getBoolean(CommonConstants.ENABLE_NATIVE_JAVA_GENERIC_SERIALIZE, false)) { String notice = "Trigger the safety barrier! " + "Native Java Serializer is not allowed by default." + "This means currently maybe being attacking by others. " + "If you are sure this is a mistake, " + "please set `" + CommonConstants.ENABLE_NATIVE_JAVA_GENERIC_SERIALIZE + "` enable in configuration! " + "Before doing so, please make sure you have configure JEP290 to prevent serialization attack."; logger.error(CONFIG_FILTER_VALIDATION_EXCEPTION, "", "", notice); throw new RpcException(new IllegalStateException(notice)); } for (int i = 0; i < args.length; i++) { if (byte[].class == args[i].getClass()) { try (UnsafeByteArrayInputStream is = new UnsafeByteArrayInputStream((byte[]) args[i])) { args[i] = applicationModel .getExtensionLoader(Serialization.class) .getExtension(GENERIC_SERIALIZATION_NATIVE_JAVA) .deserialize(null, is) .readObject(); } catch (Exception e) { throw new RpcException("Deserialize argument [" + (i + 1) + "] failed.", e); } } else { throw new RpcException("Generic serialization [" + GENERIC_SERIALIZATION_NATIVE_JAVA + "] only support message type " + byte[].class + " and your message type is " + args[i].getClass()); } } } else if (ProtocolUtils.isBeanGenericSerialization(generic)) { for (int i = 0; i < args.length; i++) { if (args[i] != null) { if (args[i] instanceof JavaBeanDescriptor) { args[i] = JavaBeanSerializeUtil.deserialize((JavaBeanDescriptor) args[i]); } else { throw new RpcException("Generic serialization [" + GENERIC_SERIALIZATION_BEAN + "] only support message type " + JavaBeanDescriptor.class.getName() + " and your message type is " + args[i].getClass().getName()); } } } } else if (ProtocolUtils.isProtobufGenericSerialization(generic)) { // as proto3 only accept one protobuf parameter if (args.length == 1 && args[0] instanceof String) { try (UnsafeByteArrayInputStream is = new UnsafeByteArrayInputStream(((String) args[0]).getBytes())) { args[0] = applicationModel .getExtensionLoader(Serialization.class) .getExtension(GENERIC_SERIALIZATION_PROTOBUF) .deserialize(null, is) .readObject(method.getParameterTypes()[0]); } catch (Exception e) { throw new RpcException("Deserialize argument failed.", e); } } else { throw new RpcException("Generic serialization [" + GENERIC_SERIALIZATION_PROTOBUF + "] only support one " + String.class.getName() + " argument and your message size is " + args.length + " and type is" + args[0].getClass().getName()); } } RpcInvocation rpcInvocation = new RpcInvocation( inv.getTargetServiceUniqueName(), invoker.getUrl().getServiceModel(), method.getName(), invoker.getInterface().getName(), invoker.getUrl().getProtocolServiceKey(), method.getParameterTypes(), args, inv.getObjectAttachments(), inv.getInvoker(), inv.getAttributes(), inv instanceof RpcInvocation ? ((RpcInvocation) inv).getInvokeMode() : null); return invoker.invoke(rpcInvocation); } catch (NoSuchMethodException | ClassNotFoundException e) { throw new RpcException(e.getMessage(), e); } } return invoker.invoke(inv); }
@Test void testInvokeWithMethodNamtNot$Invoke() { Method genericInvoke = GenericService.class.getMethods()[0]; Map<String, Object> person = new HashMap<String, Object>(); person.put("name", "dubbo"); person.put("age", 10); RpcInvocation invocation = new RpcInvocation( "sayHi", GenericService.class.getName(), "", genericInvoke.getParameterTypes(), new Object[] { "getPerson", new String[] {Person.class.getCanonicalName()}, new Object[] {person} }); URL url = URL.valueOf( "test://test:11/org.apache.dubbo.rpc.support.DemoService?" + "accesslog=true&group=dubbo&version=1.1"); Invoker invoker = Mockito.mock(Invoker.class); when(invoker.invoke(any(Invocation.class))).thenReturn(new AppResponse(new Person("person", 10))); when(invoker.getUrl()).thenReturn(url); when(invoker.getInterface()).thenReturn(DemoService.class); Result result = genericFilter.invoke(invoker, invocation); Assertions.assertEquals(Person.class, result.getValue().getClass()); Assertions.assertEquals(10, ((Person) (result.getValue())).getAge()); }
@Override public boolean alterOffsets(Map<String, String> config, Map<Map<String, ?>, Map<String, ?>> offsets) { for (Map.Entry<Map<String, ?>, Map<String, ?>> offsetEntry : offsets.entrySet()) { Map<String, ?> sourceOffset = offsetEntry.getValue(); if (sourceOffset == null) { // We allow tombstones for anything; if there's garbage in the offsets for the connector, we don't // want to prevent users from being able to clean it up using the REST API continue; } Map<String, ?> sourcePartition = offsetEntry.getKey(); if (sourcePartition == null) { throw new ConnectException("Source partitions may not be null"); } MirrorUtils.validateSourcePartitionString(sourcePartition, SOURCE_CLUSTER_ALIAS_KEY); MirrorUtils.validateSourcePartitionString(sourcePartition, TARGET_CLUSTER_ALIAS_KEY); MirrorUtils.validateSourceOffset(sourcePartition, sourceOffset, true); } // We don't actually use these offsets in the task class, so no additional effort is required beyond just validating // the format of the user-supplied offsets return true; }
@Test public void testSuccessfulAlterOffsets() { MirrorHeartbeatConnector connector = new MirrorHeartbeatConnector(); Map<Map<String, ?>, Map<String, ?>> offsets = Collections.singletonMap( sourcePartition("primary", "backup"), SOURCE_OFFSET ); // Expect no exception to be thrown when a valid offsets map is passed. An empty offsets map is treated as valid // since it could indicate that the offsets were reset previously or that no offsets have been committed yet // (for a reset operation) assertTrue(connector.alterOffsets(null, offsets)); assertTrue(connector.alterOffsets(null, Collections.emptyMap())); }
public static Collection<PValue> nonAdditionalInputs(AppliedPTransform<?, ?, ?> application) { ImmutableList.Builder<PValue> mainInputs = ImmutableList.builder(); PTransform<?, ?> transform = application.getTransform(); for (Map.Entry<TupleTag<?>, PCollection<?>> input : application.getInputs().entrySet()) { if (!transform.getAdditionalInputs().containsKey(input.getKey())) { mainInputs.add(input.getValue()); } } checkArgument( !mainInputs.build().isEmpty() || application.getInputs().isEmpty(), "Expected at least one main input if any inputs exist"); return mainInputs.build(); }
@Test public void nonAdditionalInputsWithNoInputSucceeds() { AppliedPTransform<PInput, POutput, TestTransform> transform = AppliedPTransform.of( "input-free", Collections.emptyMap(), Collections.emptyMap(), new TestTransform(), ResourceHints.create(), pipeline); assertThat(TransformInputs.nonAdditionalInputs(transform), Matchers.empty()); }
@Override public SchemaKStream<?> buildStream(final PlanBuildContext buildContext) { if (!joinKey.isForeignKey()) { ensureMatchingPartitionCounts(buildContext.getServiceContext().getTopicClient()); } final JoinerFactory joinerFactory = new JoinerFactory( buildContext, this, buildContext.buildNodeContext(getId().toString())); return joinerFactory.getJoiner(left.getNodeOutputType(), right.getNodeOutputType()).join(); }
@Test public void shouldPerformStreamToStreamInnerJoin() { // Given: setupStream(left, leftSchemaKStream); setupStream(right, rightSchemaKStream); final JoinNode joinNode = new JoinNode(nodeId, INNER, joinKey, true, left, right, WITHIN_EXPRESSION, "KAFKA"); // When: joinNode.buildStream(planBuildContext); // Then: verify(leftSchemaKStream).innerJoin( rightSchemaKStream, SYNTH_KEY, WITHIN_EXPRESSION.get(), VALUE_FORMAT.getFormatInfo(), OTHER_FORMAT.getFormatInfo(), CONTEXT_STACKER ); }
public static long min(long first, long... rest) { long min = first; for (long r : rest) { if (r < min) min = r; } return min; }
@Test public void testMin() { assertEquals(1, Utils.min(1)); assertEquals(1, Utils.min(1, 2, 3)); assertEquals(1, Utils.min(2, 1, 3)); assertEquals(1, Utils.min(2, 3, 1)); }
public static Map<String, Object> getTopologySummary(TopologyPageInfo topologyPageInfo, String window, Map<String, Object> config, String remoteUser) { Map<String, Object> result = new HashMap(); Map<String, Object> topologyConf = (Map<String, Object>) JSONValue.parse(topologyPageInfo.get_topology_conf()); int messageTimeout = (int) topologyConf.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS); Map<String, Object> unpackedTopologyPageInfo = unpackTopologyInfo(topologyPageInfo, window, config); result.putAll(unpackedTopologyPageInfo); result.put("user", remoteUser); result.put("window", window); result.put("windowHint", getWindowHint(window)); result.put("msgTimeout", messageTimeout); result.put("configuration", topologyConf); result.put("visualizationTable", new ArrayList()); result.put("schedulerDisplayResource", config.get(DaemonConfig.SCHEDULER_DISPLAY_RESOURCE)); result.put("bugtracker-url", config.get(DaemonConfig.UI_PROJECT_BUGTRACKER_URL)); result.put("central-log-url", config.get(DaemonConfig.UI_CENTRAL_LOGGING_URL)); return result; }
@Test void test_getTopologySpoutAggStatsMap_generalFields() { // Define inputs final String expectedSpoutId = "MySpoutId"; final double expectedCompleteLatency = 432.0D; final long expectedEmitted = 43234L; final long expectedAcked = 5553L; final long expectedFailed = 220L; final int expectedExecutors = 2; final int expectedTasks = 3; final long expectedTransferred = 3423423L; final double expectedOnMemoryHeap = 1024D; final double expectedOffMemoryHeap = 2048D; final double expectedCpuCorePercent = 75D; // Build stats instance for our spout final ComponentAggregateStats aggregateStats = buildSpoutAggregateStatsBase(); // Common stats final CommonAggregateStats commonStats = aggregateStats.get_common_stats(); commonStats.set_acked(expectedAcked); commonStats.set_emitted(expectedEmitted); commonStats.set_failed(expectedFailed); commonStats.set_num_executors(expectedExecutors); commonStats.set_num_tasks(expectedTasks); commonStats.set_transferred(expectedTransferred); // Spout stats final SpoutAggregateStats spoutStats = aggregateStats.get_specific_stats().get_spout(); spoutStats.set_complete_latency_ms(expectedCompleteLatency); // Build Resources Map final Map<String, Double> resourcesMap = new HashMap<>(); resourcesMap.put(Constants.COMMON_ONHEAP_MEMORY_RESOURCE_NAME, expectedOnMemoryHeap); resourcesMap.put(Constants.COMMON_OFFHEAP_MEMORY_RESOURCE_NAME, expectedOffMemoryHeap); resourcesMap.put(Constants.COMMON_CPU_RESOURCE_NAME, expectedCpuCorePercent); commonStats.set_resources_map(resourcesMap); // Add to TopologyPageInfo addSpoutStats(expectedSpoutId, aggregateStats); // Call method under test. final Map<String, Object> result = UIHelpers.getTopologySummary( topoPageInfo, WINDOW, new HashMap<>(), "spp" ); // Validate assertNotNull(result, "Should never return null"); // Validate our Spout result final Map<String, Object> spoutResult = getSpoutStatsFromTopologySummaryResult(result, expectedSpoutId); assertNotNull(spoutResult, "Should have an entry for spout"); // Validate fields assertEquals(expectedSpoutId, spoutResult.get("spoutId")); assertEquals(expectedSpoutId, spoutResult.get("encodedSpoutId")); assertEquals(expectedTransferred, spoutResult.get("transferred")); assertEquals(String.format("%.3f", expectedCompleteLatency), spoutResult.get("completeLatency")); assertEquals(expectedFailed, spoutResult.get("failed")); assertEquals(expectedAcked, spoutResult.get("acked")); assertEquals(expectedEmitted, spoutResult.get("emitted")); assertEquals(expectedExecutors, spoutResult.get("executors")); assertEquals(expectedTasks, spoutResult.get("tasks")); // Validate resources assertEquals(expectedOnMemoryHeap, (double) spoutResult.get("requestedMemOnHeap"), 0.01); assertEquals(expectedOffMemoryHeap, (double) spoutResult.get("requestedMemOffHeap"), 0.01); assertEquals(expectedCpuCorePercent, (double) spoutResult.get("requestedCpu"), 0.01); assertEquals("", spoutResult.get("requestedGenericResourcesComp")); // We expect there to be no error populated. assertEquals("", spoutResult.get("lastError"), "No error should be reported as empty string"); }
@Override @Cacheable(value = RedisKeyConstants.MAIL_ACCOUNT, key = "#id", unless = "#result == null") public MailAccountDO getMailAccountFromCache(Long id) { return getMailAccount(id); }
@Test public void testGetMailAccountFromCache() { // mock 数据 MailAccountDO dbMailAccount = randomPojo(MailAccountDO.class); mailAccountMapper.insert(dbMailAccount);// @Sql: 先插入出一条存在的数据 // 准备参数 Long id = dbMailAccount.getId(); // 调用 MailAccountDO mailAccount = mailAccountService.getMailAccountFromCache(id); // 断言 assertPojoEquals(dbMailAccount, mailAccount); }
@Override public SQLType getSQLType() { return SQLType.SELECT_FOR_UPDATE; }
@Test public void testGetSqlType() { String sql = "SELECT * FROM t WITH (UPDLOCK) WHERE id = ?"; SQLStatement ast = getSQLStatement(sql); SqlServerSelectForUpdateRecognizer recognizer = new SqlServerSelectForUpdateRecognizer(sql, ast); Assertions.assertEquals(recognizer.getSQLType(), SQLType.SELECT_FOR_UPDATE); }
@Override public ServerReloadResponse handle(ServerReloadRequest request, RequestMeta meta) throws NacosException { ServerReloadResponse response = new ServerReloadResponse(); Loggers.REMOTE.info("server reload request receive,reload count={},redirectServer={},requestIp={}", request.getReloadCount(), request.getReloadServer(), meta.getClientIp()); int reloadCount = request.getReloadCount(); Map<String, String> filter = new HashMap<>(2); filter.put(RemoteConstants.LABEL_SOURCE, RemoteConstants.LABEL_SOURCE_SDK); int sdkCount = connectionManager.currentClientsCount(filter); if (sdkCount <= reloadCount) { response.setMessage("ignore"); } else { reloadCount = (int) Math.max(reloadCount, sdkCount * (1 - RemoteUtils.LOADER_FACTOR)); connectionManager.loadCount(reloadCount, request.getReloadServer()); response.setMessage("ok"); } return response; }
@Test void testHandle() { Mockito.when(connectionManager.currentClientsCount(Mockito.any())).thenReturn(2); ServerReloadRequest reloadRequest = new ServerReloadRequest(); reloadRequest.setReloadCount(2); reloadRequest.setReloadServer("test"); RequestMeta meta = new RequestMeta(); meta.setClientIp("1.1.1.1"); try { ServerReloadResponse reloadResponse = handler.handle(reloadRequest, meta); assertEquals("ignore", reloadResponse.getMessage()); } catch (NacosException e) { e.printStackTrace(); fail(e.getMessage()); } reloadRequest.setReloadCount(1); try { ServerReloadResponse reloadResponse = handler.handle(reloadRequest, meta); assertEquals("ok", reloadResponse.getMessage()); } catch (NacosException e) { e.printStackTrace(); fail(e.getMessage()); } }
@Override public void doFilter(HttpRequest request, HttpResponse response, FilterChain chain) { IdentityProvider provider = resolveProviderOrHandleResponse(request, response, CALLBACK_PATH); if (provider != null) { handleProvider(request, response, provider); } }
@Test public void fail_when_no_oauth2_provider_provided() throws Exception { when(request.getRequestURI()).thenReturn("/oauth2/callback"); underTest.doFilter(request, response, chain); assertError("No provider key found in URI"); verifyNoInteractions(authenticationEvent); }
@NonNull @Override public String getId() { return ID; }
@Test public void getRepositoriesWithoutCredentialId() throws IOException, UnirestException { createCredential(BitbucketCloudScm.ID); Map repoResp = new RequestBuilder(baseUrl) .crumb(crumb) .status(200) .jwtToken(getJwtToken(j.jenkins, authenticatedUser.getId(), authenticatedUser.getId())) .post("/organizations/jenkins/scm/"+BitbucketCloudScm.ID+"/organizations/" + BbCloudWireMock.TEAM_UUID + "/repositories/"+getApiUrlParam()) .build(Map.class); List repos = (List) ((Map)repoResp.get("repositories")).get("items"); assertEquals("pipeline-demo-test", ((Map)repos.get(0)).get("name")); assertEquals("pipeline-demo-test", ((Map)repos.get(0)).get("description")); assertTrue((Boolean) ((Map)repos.get(0)).get("private")); assertEquals("master",((Map)repos.get(0)).get("defaultBranch")); assertEquals(2, repos.size()); assertEquals("emptyrepo", ((Map)repos.get(1)).get("name")); assertEquals("emptyrepo", ((Map)repos.get(1)).get("description")); assertTrue((Boolean) ((Map)repos.get(1)).get("private")); assertNull(((Map)repos.get(1)).get("defaultBranch")); }
public static CoordinatorRecord newConsumerGroupSubscriptionMetadataRecord( String groupId, Map<String, TopicMetadata> newSubscriptionMetadata ) { ConsumerGroupPartitionMetadataValue value = new ConsumerGroupPartitionMetadataValue(); newSubscriptionMetadata.forEach((topicName, topicMetadata) -> { List<ConsumerGroupPartitionMetadataValue.PartitionMetadata> partitionMetadata = new ArrayList<>(); // If the partition rack information map is empty, store an empty list in the record. if (!topicMetadata.partitionRacks().isEmpty()) { topicMetadata.partitionRacks().forEach((partition, racks) -> partitionMetadata.add(new ConsumerGroupPartitionMetadataValue.PartitionMetadata() .setPartition(partition) .setRacks(new ArrayList<>(racks)) ) ); } value.topics().add(new ConsumerGroupPartitionMetadataValue.TopicMetadata() .setTopicId(topicMetadata.id()) .setTopicName(topicMetadata.name()) .setNumPartitions(topicMetadata.numPartitions()) .setPartitionMetadata(partitionMetadata) ); }); return new CoordinatorRecord( new ApiMessageAndVersion( new ConsumerGroupPartitionMetadataKey() .setGroupId(groupId), (short) 4 ), new ApiMessageAndVersion( value, (short) 0 ) ); }
@Test public void testEmptyPartitionMetadataWhenRacksUnavailableGroupSubscriptionMetadataRecord() { Uuid fooTopicId = Uuid.randomUuid(); Uuid barTopicId = Uuid.randomUuid(); Map<String, TopicMetadata> subscriptionMetadata = new LinkedHashMap<>(); subscriptionMetadata.put("foo", new TopicMetadata( fooTopicId, "foo", 10, Collections.emptyMap() )); subscriptionMetadata.put("bar", new TopicMetadata( barTopicId, "bar", 20, Collections.emptyMap() )); CoordinatorRecord expectedRecord = new CoordinatorRecord( new ApiMessageAndVersion( new ConsumerGroupPartitionMetadataKey() .setGroupId("group-id"), (short) 4 ), new ApiMessageAndVersion( new ConsumerGroupPartitionMetadataValue() .setTopics(Arrays.asList( new ConsumerGroupPartitionMetadataValue.TopicMetadata() .setTopicId(fooTopicId) .setTopicName("foo") .setNumPartitions(10) .setPartitionMetadata(Collections.emptyList()), new ConsumerGroupPartitionMetadataValue.TopicMetadata() .setTopicId(barTopicId) .setTopicName("bar") .setNumPartitions(20) .setPartitionMetadata(Collections.emptyList()))), (short) 0)); assertRecordEquals(expectedRecord, newConsumerGroupSubscriptionMetadataRecord( "group-id", subscriptionMetadata )); }
@Override public void execute() throws MojoExecutionException { if (skip) { getLog().info("Skipped execution of ActiveMQ Broker"); return; } addActiveMQSystemProperties(); getLog().info("Loading broker configUri: " + configUri); if (this.xBeanFileResolver.isXBeanFile(configUri)) { getLog().debug("configUri before transformation: " + configUri); configUri = this.xBeanFileResolver.toUrlCompliantAbsolutePath(configUri); getLog().debug("configUri after transformation: " + configUri); } this.useBrokerManager().start(fork, configUri); // // Register the transport connector URIs in the Maven project. // this.registerTransportConnectorUris(); getLog().info("Started the ActiveMQ Broker"); }
@Test public void testExecute () throws Exception { this.startBrokerMojo.execute(); Mockito.verify(this.mockBrokerManager).start(false, "x-config-uri-x"); }
public static void appendSelectColumns(StringBuilder stringBuilder, List<? extends ColumnHandle> columns) { appendSelectColumns(stringBuilder, columns, true); }
@Test public void testAppendSelectColumns() { List<CassandraColumnHandle> columns = ImmutableList.of( new CassandraColumnHandle("", "foo", 0, CassandraType.VARCHAR, null, false, false, false, false), new CassandraColumnHandle("", "bar", 0, CassandraType.VARCHAR, null, false, false, false, false), new CassandraColumnHandle("", "table", 0, CassandraType.VARCHAR, null, false, false, false, false)); StringBuilder sb = new StringBuilder(); CassandraCqlUtils.appendSelectColumns(sb, columns); String str = sb.toString(); assertEquals("foo,bar,\"table\"", str); }
public int compute(final long address, final int offset, final int length) { try { return (int)UPDATE_DIRECT_BYTE_BUFFER.invokeExact(address, offset, offset + length /* end */); } catch (final Throwable t) { LangUtil.rethrowUnchecked(t); return -1; } }
@Test void compute() throws ReflectiveOperationException { final Random random = new Random(54893045794L); final int offset = 7; final ByteBuffer buffer = ByteBuffer.allocateDirect(1024 + offset); final long address = address(buffer); for (int i = 1; i <= 1024; i++) { final int length = i; final byte[] data = new byte[length]; random.nextBytes(data); buffer.clear().position(offset); buffer.put(data); buffer.flip().position(offset); final Checksum crc32c = (Checksum)constructor.newInstance(); method.invoke(crc32c, buffer); final int checksum = (int)crc32c.getValue(); assertEquals(checksum, INSTANCE.compute(address, offset, length), () -> "Failed on length: " + length); } }
@Override protected Object getContent(ScmGetRequest request) { GithubScm.validateUserHasPushPermission(request.getApiUrl(), request.getCredentials().getPassword().getPlainText(), request.getOwner(), request.getRepo()); String url = String.format("%s/repos/%s/%s/contents/%s", request.getApiUrl(), request.getOwner(), request.getRepo(), request.getPath()); if(request.getBranch() != null){ //if branch is present fetch this file from branch url += "?ref="+request.getBranch(); } try { Map ghContent = HttpRequest.get(url) .withAuthorizationToken(request.getCredentials().getPassword().getPlainText()) .to(Map.class); if(ghContent == null){ throw new ServiceException.UnexpectedErrorException("Failed to load file: "+request.getPath()); } String base64Data = (String)ghContent.get("content"); // JENKINS-47887 - this content contains \n which breaks IE11 base64Data = base64Data == null ? null : base64Data.replace("\n", ""); return new GithubFile(new GitContent.Builder() .sha((String)ghContent.get("sha")) .name((String)ghContent.get("name")) .repo(request.getRepo()) .owner(request.getOwner()) .path(request.getPath()) .base64Data(base64Data) .build()); } catch (IOException e) { throw new ServiceException.UnexpectedErrorException(String.format("Failed to load file %s: %s", request.getPath(),e.getMessage()), e); } }
@Test public void unauthorizedAccessToContentForMbpGHEShouldFail() throws UnirestException, IOException { User alice = User.get("alice"); alice.setFullName("Alice Cooper"); alice.addProperty(new Mailer.UserProperty("alice@jenkins-ci.org")); String aliceCredentialId = createGithubEnterpriseCredential(alice); StaplerRequest staplerRequest = mockStapler(GithubEnterpriseScm.ID); MultiBranchProject mbp = mockMbp(aliceCredentialId, alice, GithubEnterpriseScm.DOMAIN_NAME); try { //Bob trying to access content but his credential is not setup so should fail new GithubScmContentProvider().getContent(staplerRequest, mbp); }catch (ServiceException.PreconditionRequired e){ assertEquals("Can't access content from github: no credential found", e.getMessage()); return; } fail("Should have failed with PreConditionException"); }
@VisibleForTesting public List<LocationBlockIdListEntry> convertBlockListMapToProto( Map<BlockStoreLocation, List<Long>> blockListOnLocation) { final List<LocationBlockIdListEntry> entryList = new ArrayList<>(); Map<BlockStoreLocationProto, List<Long>> tierToBlocks = new HashMap<>(); for (Map.Entry<BlockStoreLocation, List<Long>> entry : blockListOnLocation.entrySet()) { BlockStoreLocation loc = entry.getKey(); BlockStoreLocationProto locationProto = BlockStoreLocationProto.newBuilder() .setTierAlias(loc.tierAlias()) .setMediumType(loc.mediumType()) .build(); if (tierToBlocks.containsKey(locationProto)) { tierToBlocks.get(locationProto).addAll(entry.getValue()); } else { List<Long> blockList = new ArrayList<>(entry.getValue()); tierToBlocks.put(locationProto, blockList); } } for (Map.Entry<BlockStoreLocationProto, List<Long>> entry : tierToBlocks.entrySet()) { BlockIdList blockIdList = BlockIdList.newBuilder().addAllBlockId(entry.getValue()).build(); LocationBlockIdListEntry listEntry = LocationBlockIdListEntry.newBuilder() .setKey(entry.getKey()).setValue(blockIdList).build(); entryList.add(listEntry); } return entryList; }
@Test public void convertBlockListMapToProtoMergeDirsInSameTier() { BlockMasterClient client = new BlockMasterClient( MasterClientContext.newBuilder(ClientContext.create()).build()); Map<BlockStoreLocation, List<Long>> blockMap = new HashMap<>(); BlockStoreLocation memDir0 = new BlockStoreLocation("MEM", 0); blockMap.put(memDir0, Arrays.asList(1L, 2L, 3L)); BlockStoreLocation memDir1 = new BlockStoreLocation("MEM", 1); blockMap.put(memDir1, Arrays.asList(4L, 5L, 6L, 7L)); BlockStoreLocation ssdDir0 = new BlockStoreLocation("SSD", 0); blockMap.put(ssdDir0, Arrays.asList(11L, 12L, 13L, 14L)); BlockStoreLocation ssdDir1 = new BlockStoreLocation("SSD", 1); blockMap.put(ssdDir1, Arrays.asList(15L, 16L, 17L, 18L, 19L)); // Directories on the same tier will be merged together List<LocationBlockIdListEntry> protoList = client.convertBlockListMapToProto(blockMap); assertEquals(2, protoList.size()); BlockStoreLocationProto memLocationProto = BlockStoreLocationProto.newBuilder() .setTierAlias("MEM").setMediumType("").build(); BlockStoreLocationProto ssdLocationProto = BlockStoreLocationProto.newBuilder() .setTierAlias("SSD").setMediumType("").build(); Set<BlockStoreLocationProto> blockLocations = protoList.stream() .map(LocationBlockIdListEntry::getKey).collect(Collectors.toSet()); assertEquals(ImmutableSet.of(memLocationProto, ssdLocationProto), blockLocations); LocationBlockIdListEntry firstEntry = protoList.get(0); if (firstEntry.getKey().getTierAlias().equals("MEM")) { LocationBlockIdListEntry memTierEntry = protoList.get(0); List<Long> memProtoBlockList = memTierEntry.getValue().getBlockIdList(); assertEquals(ImmutableSet.of(1L, 2L, 3L, 4L, 5L, 6L, 7L), new HashSet<>(memProtoBlockList)); LocationBlockIdListEntry ssdTierEntry = protoList.get(1); List<Long> ssdProtoBlockList = ssdTierEntry.getValue().getBlockIdList(); assertEquals(ImmutableSet.of(11L, 12L, 13L, 14L, 15L, 16L, 17L, 18L, 19L), new HashSet<>(ssdProtoBlockList)); } else { LocationBlockIdListEntry memTierEntry = protoList.get(1); List<Long> memProtoBlockList = memTierEntry.getValue().getBlockIdList(); assertEquals(ImmutableSet.of(1L, 2L, 3L, 4L, 5L, 6L, 7L), new HashSet<>(memProtoBlockList)); LocationBlockIdListEntry ssdTierEntry = protoList.get(0); List<Long> ssdProtoBlockList = ssdTierEntry.getValue().getBlockIdList(); assertEquals(ImmutableSet.of(11L, 12L, 13L, 14L, 15L, 16L, 17L, 18L, 19L), new HashSet<>(ssdProtoBlockList)); } }
@Override public Object getSingleValue() { if (!isSingleValue()) { throw new IllegalStateException("EquatableValueSet does not have just a single value"); } return entries.iterator().next().getValue(); }
@Test public void testGetSingleValue() { assertEquals(EquatableValueSet.of(TestingIdType.ID, 0L).getSingleValue(), 0L); assertThrows(IllegalStateException.class, () -> EquatableValueSet.all(TestingIdType.ID).getSingleValue()); }