focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@VisibleForTesting String createRunConfigurationName( List<String> runConfigurations, String slaveServerName ) { String defaultName = String.format( "pentaho_auto_%s_config", slaveServerName ); long count = runConfigurations.stream().filter( s -> s.matches( defaultName + "(_\\d)*" ) ).count(); if ( count == 0 ) { return defaultName; } Optional<Integer> index = runConfigurations.stream() .filter( s -> s.matches( defaultName + "_\\d+" ) ) .map( s -> s.substring( defaultName.length() + 1 ) ) .filter( s -> s.matches( "\\d+" ) ) .map( Integer::valueOf ) .sorted( Comparator.reverseOrder() ) .findFirst(); return String.format( "%s_%d", defaultName, index.orElse( 0 ) + 1 ); }
@Test public void testCreateRunConfigurationName() throws Exception { assertEquals( "pentaho_auto_carte_config", runConfigurationImportExtensionPoint.createRunConfigurationName( Collections.emptyList(), "carte" ) ); assertEquals( "pentaho_auto_carte_config_3", runConfigurationImportExtensionPoint.createRunConfigurationName( Arrays.asList( "pentaho_auto_carte_config_2", "pentaho_auto_carte_config_manuallyUpdated" ), "carte" ) ); }
@Override public <T extends State> T state(StateNamespace namespace, StateTag<T> address) { return workItemState.get(namespace, address, StateContexts.nullContext()); }
@Test public void testWatermarkClearBeforeRead() throws Exception { StateTag<WatermarkHoldState> addr = StateTags.watermarkStateInternal("watermark", TimestampCombiner.EARLIEST); WatermarkHoldState bag = underTest.state(NAMESPACE, addr); bag.clear(); assertThat(bag.read(), Matchers.nullValue()); bag.add(new Instant(300)); assertThat(bag.read(), Matchers.equalTo(new Instant(300))); // Shouldn't need to read from windmill because the value is already available. Mockito.verifyNoMoreInteractions(mockReader); }
@Override public Object apply(Object input) { return PropertyOrFieldSupport.EXTRACTION.getValueOf(propertyOrFieldName, input); }
@Test void should_fallback_to_field_if_exception_has_been_thrown_on_property_access() { // GIVEN Employee employee = new Employee(1L, new Name("Name"), 0) { @Override public Name getName() { throw new RuntimeException(); } }; ByNameSingleExtractor underTest = new ByNameSingleExtractor("name"); // WHEN Object result = underTest.apply(employee); // THEN then(result).isEqualTo(new Name("Name")); }
@Override public DdlCommand create( final String sqlExpression, final DdlStatement ddlStatement, final SessionConfig config ) { return FACTORIES .getOrDefault(ddlStatement.getClass(), (statement, cf, ci) -> { throw new KsqlException( "Unable to find ddl command factory for statement:" + statement.getClass() + " valid statements:" + FACTORIES.keySet() ); }) .handle( this, new CallInfo(sqlExpression, config), ddlStatement); }
@Test public void shouldCreateCommandForCreateTableWithOverriddenProperties() { // Given: final CreateTable statement = new CreateTable(SOME_NAME, TableElements.of( tableElement("COL1", new Type(SqlTypes.BIGINT)), tableElement( "COL2", new Type(SqlTypes.STRING))), false, true, withProperties, false); // When: commandFactories.create(sqlExpression, statement, SessionConfig.of(ksqlConfig, OVERRIDES)); // Then: verify(createSourceFactory).createTableCommand( statement, ksqlConfig.cloneWithPropertyOverwrite(OVERRIDES) ); }
@SuppressWarnings("checkstyle:HiddenField") public AwsCredentialsProvider credentialsProvider( String accessKeyId, String secretAccessKey, String sessionToken) { if (!Strings.isNullOrEmpty(accessKeyId) && !Strings.isNullOrEmpty(secretAccessKey)) { if (Strings.isNullOrEmpty(sessionToken)) { return StaticCredentialsProvider.create( AwsBasicCredentials.create(accessKeyId, secretAccessKey)); } else { return StaticCredentialsProvider.create( AwsSessionCredentials.create(accessKeyId, secretAccessKey, sessionToken)); } } if (!Strings.isNullOrEmpty(this.clientCredentialsProvider)) { return credentialsProvider(this.clientCredentialsProvider); } // Create a new credential provider for each client return DefaultCredentialsProvider.builder().build(); }
@Test public void testBasicCredentialsConfiguration() { AwsClientProperties awsClientProperties = new AwsClientProperties(); // set access key id and secret access key AwsCredentialsProvider credentialsProvider = awsClientProperties.credentialsProvider("key", "secret", null); assertThat(credentialsProvider.resolveCredentials()) .as("Should use basic credentials if access key ID and secret access key are set") .isInstanceOf(AwsBasicCredentials.class); assertThat(credentialsProvider.resolveCredentials().accessKeyId()) .as("The access key id should be the same as the one set by tag ACCESS_KEY_ID") .isEqualTo("key"); assertThat(credentialsProvider.resolveCredentials().secretAccessKey()) .as("The secret access key should be the same as the one set by tag SECRET_ACCESS_KEY") .isEqualTo("secret"); }
public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } // Calculate difference in days long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } }
@Test public void testSetCalendarEndNull() throws KettleException { SalesforceConnection connection = new SalesforceConnection( logInterface, url, username, password ); GregorianCalendar startDate = new GregorianCalendar( 2000, 2, 10 ); try { connection.setCalendar( recordsFilter, startDate, null ); fail(); } catch ( KettleException expected ) { // OK } }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { try { if(log.isWarnEnabled()) { log.warn(String.format("Disable checksum verification for %s", file)); // Do not set checksum when metadata key X-Static-Large-Object is present. Disable checksum verification in download filter. status.setChecksum(Checksum.NONE); } final Response response; if(status.isAppend()) { final HttpRange range = HttpRange.withStatus(status); if(TransferStatus.UNKNOWN_LENGTH == range.getEnd()) { response = session.getClient().getObject(regionService.lookup(file), containerService.getContainer(file).getName(), containerService.getKey(file), range.getStart()); } else { response = session.getClient().getObject(regionService.lookup(file), containerService.getContainer(file).getName(), containerService.getKey(file), range.getStart(), range.getLength()); } } else { response = session.getClient().getObject(regionService.lookup(file), containerService.getContainer(file).getName(), containerService.getKey(file)); } return new HttpMethodReleaseInputStream(response.getResponse(), status); } catch(GenericException e) { throw new SwiftExceptionMappingService().map("Download {0} failed", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map(e, file); } }
@Test public void testDownloadGzip() throws Exception { final TransferStatus status = new TransferStatus(); status.setLength(182L); final Path container = new Path(".ACCESS_LOGS", EnumSet.of(Path.Type.directory, Path.Type.volume)); container.attributes().setRegion("DFW"); final SwiftRegionService regionService = new SwiftRegionService(session); final InputStream in = new SwiftReadFeature(session, regionService).read(new Path(container, "/cdn.cyberduck.ch/2015/03/01/10/3b1d6998c430d58dace0c16e58aaf925.log.gz", EnumSet.of(Path.Type.file)), status, new DisabledConnectionCallback()); assertNotNull(in); final BytecountStreamListener count = new BytecountStreamListener(); new StreamCopier(status, status).withListener(count).transfer(in, NullOutputStream.NULL_OUTPUT_STREAM); assertEquals(182L, count.getRecv()); assertEquals(182L, status.getLength()); in.close(); }
@Override public OAuth2AccessTokenDO grantPassword(String username, String password, String clientId, List<String> scopes) { // 使用账号 + 密码进行登录 AdminUserDO user = adminAuthService.authenticate(username, password); Assert.notNull(user, "用户不能为空!"); // 防御性编程 // 创建访问令牌 return oauth2TokenService.createAccessToken(user.getId(), UserTypeEnum.ADMIN.getValue(), clientId, scopes); }
@Test public void testGrantPassword() { // 准备参数 String username = randomString(); String password = randomString(); String clientId = randomString(); List<String> scopes = Lists.newArrayList("read", "write"); // mock 方法(认证) AdminUserDO user = randomPojo(AdminUserDO.class); when(adminAuthService.authenticate(eq(username), eq(password))).thenReturn(user); // mock 方法(访问令牌) OAuth2AccessTokenDO accessTokenDO = randomPojo(OAuth2AccessTokenDO.class); when(oauth2TokenService.createAccessToken(eq(user.getId()), eq(UserTypeEnum.ADMIN.getValue()), eq(clientId), eq(scopes))).thenReturn(accessTokenDO); // 调用,并断言 assertPojoEquals(accessTokenDO, oauth2GrantService.grantPassword( username, password, clientId, scopes)); }
@Override public AppResponse process(Flow flow, AppSessionRequest request) { Map<String, Object> result = new HashMap<>(digidClient.getAccountRequestGbaStatus(appSession.getRegistrationId())); if (result.get(lowerUnderscore(STATUS)).equals("OK")) { return new OkResponse(); } else if (result.get(lowerUnderscore(STATUS)).equals("PENDING")) { setValid(false); // gba check in progress, do not transition to next step yet return new StatusResponse("PENDING"); } else { if (result.get(lowerUnderscore(STATUS)).equals("NOK") && result.get(ERROR) != null) { return new PollBrpResponse((String) result.get(ERROR), result); } return new NokResponse(); } }
@Test void processPendingTest() { when(digidClientMock.getAccountRequestGbaStatus(1337L)).thenReturn(Map.of( lowerUnderscore(STATUS), "PENDING" )); AppResponse appResponse = pollBrp.process(flowMock, null); assertTrue(appResponse instanceof StatusResponse); assertEquals("PENDING", ((StatusResponse) appResponse).getStatus()); assertFalse(pollBrp.isValid()); }
public static InetSocketAddress parseAddress(String address, int defaultPort) { return parseAddress(address, defaultPort, false); }
@Test void shouldParseAddressForHostName() { InetSocketAddress socketAddress = AddressUtils.parseAddress("example.com", 80); assertThat(socketAddress.isUnresolved()).isTrue(); assertThat(socketAddress.getPort()).isEqualTo(80); assertThat(socketAddress.getHostString()).isEqualTo("example.com"); }
private static void listener(XmlGenerator gen, List<ListenerConfig> listeners) { if (listeners.isEmpty()) { return; } gen.open("listeners"); for (ListenerConfig listener : listeners) { gen.node("listener", classNameOrImplClass(listener.getClassName(), listener.getImplementation())); } gen.close(); }
@Test public void listener() { ListenerConfig expected = new ListenerConfig(randomString()); clientConfig.addListenerConfig(expected); ClientConfig actual = newConfigViaGenerator(); assertCollection(clientConfig.getListenerConfigs(), actual.getListenerConfigs()); }
public static String formatHostnameForHttp(InetSocketAddress addr) { String hostString = NetUtil.getHostname(addr); if (NetUtil.isValidIpV6Address(hostString)) { if (!addr.isUnresolved()) { hostString = NetUtil.toAddressString(addr.getAddress()); } else if (hostString.charAt(0) == '[' && hostString.charAt(hostString.length() - 1) == ']') { // If IPv6 address already contains brackets, let's return as is. return hostString; } return '[' + hostString + ']'; } return hostString; }
@Test public void testIpv4() throws Exception { InetSocketAddress socketAddress = new InetSocketAddress(InetAddress.getByName("10.0.0.1"), 8080); assertEquals("10.0.0.1", HttpUtil.formatHostnameForHttp(socketAddress)); }
public void set(Map<String, FileHashesDto> previousFileHashesByUuid) { checkState(this.previousFileHashesByUuid == null, "Repository already initialized"); checkNotNull(previousFileHashesByUuid); this.previousFileHashesByUuid = Collections.unmodifiableMap(previousFileHashesByUuid); }
@Test public void fail_if_set_twice() { Map<String, FileHashesDto> empty = emptyMap(); previousFileHashesRepository.set(empty); assertThatThrownBy(() -> previousFileHashesRepository.set(empty)).isInstanceOf(IllegalStateException.class); }
public static Long jsToInteger( Object value, Class<?> clazz ) { if ( Number.class.isAssignableFrom( clazz ) ) { return ( (Number) value ).longValue(); } else { String classType = clazz.getName(); if ( classType.equalsIgnoreCase( "java.lang.String" ) ) { return ( new Long( (String) value ) ); } else if ( classType.equalsIgnoreCase( JS_UNDEFINED ) ) { return null; } else if ( classType.equalsIgnoreCase( JS_NATIVE_NUM ) ) { Number nb = Context.toNumber( value ); return nb.longValue(); } else if ( classType.equalsIgnoreCase( JS_NATIVE_JAVA_OBJ ) ) { // Is it a Value? // try { Value v = (Value) Context.jsToJava( value, Value.class ); return v.getInteger(); } catch ( Exception e2 ) { String string = Context.toString( value ); return Long.parseLong( Const.trim( string ) ); } } else { return Long.parseLong( value.toString() ); } } }
@Test public void jsToInteger_Other_Int() throws Exception { assertEquals( LONG_ONE, JavaScriptUtils.jsToInteger( 1, getClass() ) ); }
@Override public boolean isProjectManaged(DbSession dbSession, String projectUuid) { return findManagedProjectService() .map(managedProjectService -> managedProjectService.isProjectManaged(dbSession, projectUuid)) .orElse(false); }
@Test public void isProjectManaged_whenManagedInstanceServices_shouldDelegatesToRightService() { DelegatingManagedServices managedInstanceService = new DelegatingManagedServices(Set.of(new NeverManagedInstanceService(), new AlwaysManagedInstanceService())); assertThat(managedInstanceService.isProjectManaged(dbSession, "whatever")).isTrue(); }
public static boolean instanceOfNavigationView(Object view) { return ReflectUtil.isInstance(view, "android.support.design.widget.NavigationView", "com.google.android.material.navigation.NavigationView"); }
@Test public void instanceOfNavigationView() { CheckBox textView1 = new CheckBox(mApplication); textView1.setText("child1"); Assert.assertFalse(SAViewUtils.instanceOfNavigationView(textView1)); }
@Override public String convertTo(SortedSet<Path> value) { if (value == null) { throw new ParameterException("String list of Paths must not be null."); } return value.stream().map(Path::toString).collect(Collectors.joining(",")); }
@Test(expected = ParameterException.class) public void testConvertToNull() { converter.convertTo(null); }
public void start() { super.start(); setupStackTraceLayout(); }
@Test public void unknownHostShouldNotCauseStopToFail() { // See LOGBACK-960 sa.setSyslogHost("unknown.host"); sa.setFacility("MAIL"); sa.start(); sa.stop(); }
@Override public Iterator<T> iterator() { return new LinkedQueueIterator(Direction.ASCENDING); }
@Test public void testDoubleRemoveFails() { LinkedDeque<Integer> q = new LinkedDeque<>(Arrays.asList(1, 2, 3)); Iterator<Integer> i = q.iterator(); i.next(); i.remove(); try { i.remove(); } catch (IllegalStateException e) { // Expected } }
@Override public <K, V> ICache<K, V> getCache(String name) { checkNotNull(name, "Retrieving a cache instance with a null name is not allowed!"); return getCacheByFullName(HazelcastCacheManager.CACHE_MANAGER_PREFIX + name); }
@Test public void getCache_when_serviceNotFoundExceptionIsThrown_then_illegalStateExceptionIsThrown() { // when HazelcastException with ServiceNotFoundException cause was thrown by hzInstance.getDistributedObject // (i.e. cache support is not available server-side) HazelcastInstance hzInstance = mock(HazelcastInstance.class); HazelcastException hzException = new HazelcastException("mock exception", new ServiceNotFoundException("mock exception")); when(hzInstance.getDistributedObject(anyString(), anyString())).thenThrow(hzException); ClientICacheManager clientCacheManager = new ClientICacheManager(hzInstance); // then an IllegalStateException will be thrown by getCache assertThrows(IllegalStateException.class, () -> clientCacheManager.getCache("any-cache")); }
public LinkedHashMap<String, String> getKeyPropertyList(ObjectName mbeanName) { LinkedHashMap<String, String> keyProperties = keyPropertiesPerBean.get(mbeanName); if (keyProperties == null) { keyProperties = new LinkedHashMap<>(); String properties = mbeanName.getKeyPropertyListString(); Matcher match = PROPERTY_PATTERN.matcher(properties); while (match.lookingAt()) { keyProperties.put(match.group(1), match.group(2)); properties = properties.substring(match.end()); if (properties.startsWith(",")) { properties = properties.substring(1); } match.reset(properties); } keyPropertiesPerBean.put(mbeanName, keyProperties); } return keyProperties; }
@Test public void testIdempotentGet() throws Throwable { JmxMBeanPropertyCache testCache = new JmxMBeanPropertyCache(); ObjectName testObjectName = new ObjectName("com.organisation:name=value"); LinkedHashMap<String, String> parameterListFirst = testCache.getKeyPropertyList(testObjectName); LinkedHashMap<String, String> parameterListSecond = testCache.getKeyPropertyList(testObjectName); assertEquals(parameterListFirst, parameterListSecond); }
static void parseAndValidateExtensionSchemas(String resolverPath, File inputDir) throws IOException, InvalidExtensionSchemaException { // Parse each extension schema and validate it Iterator<File> iterator = FileUtils.iterateFiles(inputDir, new String[]{PDL}, true); DataSchemaResolver resolver = MultiFormatDataSchemaResolver.withBuiltinFormats(resolverPath); while (iterator.hasNext()) { File inputFile = iterator.next(); PdlSchemaParser parser = new PdlSchemaParser(resolver); parser.parse(new FileInputStream(inputFile)); if (parser.hasError()) { throw new InvalidExtensionSchemaException(parser.errorMessage()); } List<DataSchema> topLevelDataSchemas = parser.topLevelDataSchemas(); if (topLevelDataSchemas == null || topLevelDataSchemas.isEmpty() || topLevelDataSchemas.size() > 1) { throw new InvalidExtensionSchemaException("Could not parse extension schema : " + inputFile.getAbsolutePath()); } // Validate that the schema is a named schema DataSchema topLevelDataSchema = topLevelDataSchemas.get(0); if (!(topLevelDataSchema instanceof NamedDataSchema)) { throw new InvalidExtensionSchemaException("Invalid extension schema : " + inputFile.getAbsolutePath() + ", the schema is not a named schema."); } // Validate that the schema has the proper suffix in its name if (!((NamedDataSchema) topLevelDataSchema).getName().endsWith(EXTENSIONS_SUFFIX)) { throw new InvalidExtensionSchemaException( "Invalid extension schema name: '" + ((NamedDataSchema) topLevelDataSchema).getName() + "'. The name of the extension schema must be <baseSchemaName> + 'Extensions'"); } // Validate that the schema includes exactly one base schema List<NamedDataSchema> includes = ((RecordDataSchema) topLevelDataSchema).getInclude(); if (includes.size() != 1) { throw new InvalidExtensionSchemaException("The extension schema: '" + ((NamedDataSchema) topLevelDataSchema).getName() + "' should include and only include the base schema"); } // Validate that the schema's name is suffixed with the name of the base schema NamedDataSchema includeSchema = includes.get(0); if (!((NamedDataSchema) topLevelDataSchema).getName().startsWith(includeSchema.getName())) { throw new InvalidExtensionSchemaException( "Invalid extension schema name: '" + ((NamedDataSchema) topLevelDataSchema).getName() + "'. The name of the extension schema must be baseSchemaName: '" + includeSchema.getName() + "' + 'Extensions"); } List<RecordDataSchema.Field> extensionSchemaFields = ((RecordDataSchema) topLevelDataSchema).getFields() .stream() .filter(f -> !((RecordDataSchema) topLevelDataSchema).isFieldFromIncludes(f)) .collect(Collectors.toList()); // Validate all the extension fields checkExtensionSchemaFields(extensionSchemaFields); } }
@Test(dataProvider = "extensionSchemaInputFiles") public void testExtensionSchemaValidation(String inputDir, boolean isValid, String errorMessage) { String resolverPath = testPegasusDir; String inputPath = testExtensionDir + File.separator + inputDir; SoftAssert softAssert = new SoftAssert(); try { ExtensionSchemaValidationCmdLineApp.parseAndValidateExtensionSchemas(resolverPath, new File(inputPath)); softAssert.assertTrue(isValid); softAssert.assertEquals(null, errorMessage); } catch (Exception e) { softAssert.assertTrue(!isValid); softAssert.assertEquals(e.getMessage(), errorMessage); } softAssert.assertAll(); }
@Override public String randomKey() { return commandExecutor.get(randomKeyAsync()); }
@Test public void testRandomKey() { RBucket<String> bucket = redisson.getBucket("test1"); bucket.set("someValue1"); RBucket<String> bucket2 = redisson.getBucket("test2"); bucket2.set("someValue2"); assertThat(redisson.getKeys().randomKey()).isIn("test1", "test2"); redisson.getKeys().delete("test1"); Assertions.assertEquals("test2", redisson.getKeys().randomKey()); redisson.getKeys().flushdb(); Assertions.assertNull(redisson.getKeys().randomKey()); }
@Override public int hashCode() { int result = version; result = 31 * result + hits; result = 31 * result + lastAccessTime; result = 31 * result + lastUpdateTime; result = 31 * result + creationTime; result = 31 * result + lastStoredTime; return result; }
@Test public void testHashCode() { assertEquals(record.hashCode(), record.hashCode()); assertEquals(record.hashCode(), recordSameAttributes.hashCode()); assumeDifferentHashCodes(); assertNotEquals(record.hashCode(), recordOtherVersion.hashCode()); assertNotEquals(record.hashCode(), recordOtherCreationTime.hashCode()); assertNotEquals(record.hashCode(), recordOtherHits.hashCode()); assertNotEquals(record.hashCode(), recordOtherLastAccessTime.hashCode()); assertNotEquals(record.hashCode(), recordOtherLastUpdateTime.hashCode()); }
@PostConstruct void registerTpsPoint() { tpsControlManager.registerTpsPoint(POINT_CONFIG_PUSH); tpsControlManager.registerTpsPoint(POINT_CONFIG_PUSH_SUCCESS); tpsControlManager.registerTpsPoint(POINT_CONFIG_PUSH_FAIL); }
@Test void testRegisterTpsPoint() { rpcConfigChangeNotifier.registerTpsPoint(); Mockito.verify(tpsControlManager, Mockito.times(1)).registerTpsPoint(eq(POINT_CONFIG_PUSH)); Mockito.verify(tpsControlManager, Mockito.times(1)).registerTpsPoint(eq(POINT_CONFIG_PUSH_SUCCESS)); Mockito.verify(tpsControlManager, Mockito.times(1)).registerTpsPoint(eq(POINT_CONFIG_PUSH_FAIL)); }
public static String dashToCamelCase(final String text) { return dashToCamelCase(text, false); }
@Test public void testDashToCamelCaseSkipQuotedOrKeyed() { String line = "camel.component.rabbitmq.args[queue.x-queue-type]"; // no preserve assertEquals("camel.component.rabbitmq.args[queue.xQueueType]", dashToCamelCase(line)); // preserved assertEquals(line, dashToCamelCase(line, true)); }
public void addToIfExists(EnvironmentVariableContext variableContext) { for (EnvironmentVariable variable : this) { variable.addToIfExists(variableContext); } }
@Test void addToIfExists_shouldAddEnvironmentVariableToEnvironmentVariableContext() { final EnvironmentVariableContext environmentVariableContext = mock(EnvironmentVariableContext.class); final EnvironmentVariables environmentVariables = new EnvironmentVariables( new EnvironmentVariable("foo", "bar"), new EnvironmentVariable("baz", "car", true) ); when(environmentVariableContext.hasProperty("foo")).thenReturn(false); when(environmentVariableContext.hasProperty("baz")).thenReturn(true); environmentVariables.addToIfExists(environmentVariableContext); verify(environmentVariableContext, times(0)).setProperty("foo", "bar", false); verify(environmentVariableContext, times(1)).setProperty("baz", "car", true); }
@Override public void markEvent() { this.counter.inc(); }
@Test void testMarkEvent() { Counter c = new SimpleCounter(); Meter m = new MeterView(c); assertThat(m.getCount()).isEqualTo(0); m.markEvent(); assertThat(m.getCount()).isEqualTo(1); m.markEvent(2); assertThat(m.getCount()).isEqualTo(3); }
@Override public ColumnStatisticsObj aggregate(List<ColStatsObjWithSourceInfo> colStatsWithSourceInfo, List<String> partNames, boolean areAllPartsFound) throws MetaException { checkStatisticsList(colStatsWithSourceInfo); ColumnStatisticsObj statsObj = null; String colType; String colName = null; // check if all the ColumnStatisticsObjs contain stats and all the ndv are // bitvectors boolean doAllPartitionContainStats = partNames.size() == colStatsWithSourceInfo.size(); NumDistinctValueEstimator ndvEstimator = null; boolean areAllNDVEstimatorsMergeable = true; for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); if (statsObj == null) { colName = cso.getColName(); colType = cso.getColType(); statsObj = ColumnStatsAggregatorFactory.newColumnStaticsObj(colName, colType, cso.getStatsData().getSetField()); LOG.trace("doAllPartitionContainStats for column: {} is: {}", colName, doAllPartitionContainStats); } DoubleColumnStatsDataInspector columnStatsData = doubleInspectorFromStats(cso); // check if we can merge NDV estimators if (columnStatsData.getNdvEstimator() == null) { areAllNDVEstimatorsMergeable = false; break; } else { NumDistinctValueEstimator estimator = columnStatsData.getNdvEstimator(); if (ndvEstimator == null) { ndvEstimator = estimator; } else { if (!ndvEstimator.canMerge(estimator)) { areAllNDVEstimatorsMergeable = false; break; } } } } if (areAllNDVEstimatorsMergeable && ndvEstimator != null) { ndvEstimator = NumDistinctValueEstimatorFactory.getEmptyNumDistinctValueEstimator(ndvEstimator); } LOG.debug("all of the bit vectors can merge for {} is {}", colName, areAllNDVEstimatorsMergeable); ColumnStatisticsData columnStatisticsData = initColumnStatisticsData(); if (doAllPartitionContainStats || colStatsWithSourceInfo.size() < 2) { DoubleColumnStatsDataInspector aggregateData = null; long lowerBound = 0; long higherBound = 0; double densityAvgSum = 0.0; DoubleColumnStatsMerger merger = new DoubleColumnStatsMerger(); for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); DoubleColumnStatsDataInspector newData = doubleInspectorFromStats(cso); lowerBound = Math.max(lowerBound, newData.getNumDVs()); higherBound += newData.getNumDVs(); densityAvgSum += (newData.getHighValue() - newData.getLowValue()) / newData.getNumDVs(); if (areAllNDVEstimatorsMergeable && ndvEstimator != null) { ndvEstimator.mergeEstimators(newData.getNdvEstimator()); } if (aggregateData == null) { aggregateData = newData.deepCopy(); } else { aggregateData.setLowValue(merger.mergeLowValue( merger.getLowValue(aggregateData), merger.getLowValue(newData))); aggregateData.setHighValue(merger.mergeHighValue( merger.getHighValue(aggregateData), merger.getHighValue(newData))); aggregateData.setNumNulls(merger.mergeNumNulls(aggregateData.getNumNulls(), newData.getNumNulls())); aggregateData.setNumDVs(merger.mergeNumDVs(aggregateData.getNumDVs(), newData.getNumDVs())); } } if (areAllNDVEstimatorsMergeable && ndvEstimator != null) { // if all the ColumnStatisticsObjs contain bitvectors, we do not need to // use uniform distribution assumption because we can merge bitvectors // to get a good estimation. aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues()); } else { long estimation; if (useDensityFunctionForNDVEstimation) { // We have estimation, lowerbound and higherbound. We use estimation // if it is between lowerbound and higherbound. double densityAvg = densityAvgSum / partNames.size(); estimation = (long) ((aggregateData.getHighValue() - aggregateData.getLowValue()) / densityAvg); if (estimation < lowerBound) { estimation = lowerBound; } else if (estimation > higherBound) { estimation = higherBound; } } else { estimation = (long) (lowerBound + (higherBound - lowerBound) * ndvTuner); } aggregateData.setNumDVs(estimation); } columnStatisticsData.setDoubleStats(aggregateData); } else { // TODO: bail out if missing stats are over a certain threshold // we need extrapolation LOG.debug("start extrapolation for {}", colName); Map<String, Integer> indexMap = new HashMap<>(); for (int index = 0; index < partNames.size(); index++) { indexMap.put(partNames.get(index), index); } Map<String, Double> adjustedIndexMap = new HashMap<>(); Map<String, ColumnStatisticsData> adjustedStatsMap = new HashMap<>(); // while we scan the css, we also get the densityAvg, lowerbound and // higherbound when useDensityFunctionForNDVEstimation is true. double densityAvgSum = 0.0; if (!areAllNDVEstimatorsMergeable) { // if not every partition uses bitvector for ndv, we just fall back to // the traditional extrapolation methods. for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); String partName = csp.getPartName(); DoubleColumnStatsData newData = cso.getStatsData().getDoubleStats(); if (useDensityFunctionForNDVEstimation && newData.isSetLowValue() && newData.isSetHighValue()) { densityAvgSum += (newData.getHighValue() - newData.getLowValue()) / newData.getNumDVs(); } adjustedIndexMap.put(partName, (double) indexMap.get(partName)); adjustedStatsMap.put(partName, cso.getStatsData()); } } else { // we first merge all the adjacent bitvectors that we could merge and // derive new partition names and index. StringBuilder pseudoPartName = new StringBuilder(); double pseudoIndexSum = 0; int length = 0; int curIndex = -1; DoubleColumnStatsData aggregateData = null; for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); String partName = csp.getPartName(); DoubleColumnStatsDataInspector newData = doubleInspectorFromStats(cso); // newData.isSetBitVectors() should be true for sure because we // already checked it before. if (indexMap.get(partName) != curIndex) { // There is bitvector, but it is not adjacent to the previous ones. if (length > 0) { // we have to set ndv adjustedIndexMap.put(pseudoPartName.toString(), pseudoIndexSum / length); aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues()); ColumnStatisticsData csd = new ColumnStatisticsData(); csd.setDoubleStats(aggregateData); adjustedStatsMap.put(pseudoPartName.toString(), csd); if (useDensityFunctionForNDVEstimation) { densityAvgSum += (aggregateData.getHighValue() - aggregateData.getLowValue()) / aggregateData.getNumDVs(); } // reset everything pseudoPartName = new StringBuilder(); pseudoIndexSum = 0; length = 0; ndvEstimator = NumDistinctValueEstimatorFactory.getEmptyNumDistinctValueEstimator(ndvEstimator); } aggregateData = null; } curIndex = indexMap.get(partName); pseudoPartName.append(partName); pseudoIndexSum += curIndex; length++; curIndex++; if (aggregateData == null) { aggregateData = newData.deepCopy(); } else { aggregateData.setLowValue(Math.min(aggregateData.getLowValue(), newData.getLowValue())); aggregateData.setHighValue(Math.max(aggregateData.getHighValue(), newData.getHighValue())); aggregateData.setNumNulls(aggregateData.getNumNulls() + newData.getNumNulls()); } ndvEstimator.mergeEstimators(newData.getNdvEstimator()); } if (length > 0) { // we have to set ndv adjustedIndexMap.put(pseudoPartName.toString(), pseudoIndexSum / length); aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues()); ColumnStatisticsData csd = new ColumnStatisticsData(); csd.setDoubleStats(aggregateData); adjustedStatsMap.put(pseudoPartName.toString(), csd); if (useDensityFunctionForNDVEstimation) { densityAvgSum += (aggregateData.getHighValue() - aggregateData.getLowValue()) / aggregateData.getNumDVs(); } } } extrapolate(columnStatisticsData, partNames.size(), colStatsWithSourceInfo.size(), adjustedIndexMap, adjustedStatsMap, densityAvgSum / adjustedStatsMap.size()); } LOG.debug( "Ndv estimation for {} is {}. # of partitions requested: {}. # of partitions found: {}", colName, columnStatisticsData.getDoubleStats().getNumDVs(), partNames.size(), colStatsWithSourceInfo.size()); KllHistogramEstimator mergedKllHistogramEstimator = mergeHistograms(colStatsWithSourceInfo); if (mergedKllHistogramEstimator != null) { columnStatisticsData.getDoubleStats().setHistogram(mergedKllHistogramEstimator.serialize()); } statsObj.setStatsData(columnStatisticsData); return statsObj; }
@Test public void testAggregateMultiStatsWhenOnlySomeAvailable() throws MetaException { List<String> partitions = Arrays.asList("part1", "part2", "part3", "part4"); ColumnStatisticsData data1 = new ColStatsBuilder<>(double.class).numNulls(1).numDVs(3) .low(1d).high(3d).hll(1, 2, 3).build(); ColumnStatisticsData data3 = new ColStatsBuilder<>(double.class).numNulls(3).numDVs(1) .low(7d).high(7d).hll(7).build(); ColumnStatisticsData data4 = new ColStatsBuilder<>(double.class).numNulls(2).numDVs(3) .low(3d).high(5d).hll(3, 4, 5).build(); List<ColStatsObjWithSourceInfo> statsList = Arrays.asList( createStatsWithInfo(data1, TABLE, COL, partitions.get(0)), createStatsWithInfo(data3, TABLE, COL, partitions.get(2)), createStatsWithInfo(data4, TABLE, COL, partitions.get(3))); DoubleColumnStatsAggregator aggregator = new DoubleColumnStatsAggregator(); ColumnStatisticsObj computedStatsObj = aggregator.aggregate(statsList, partitions, false); // hll in case of missing stats is left as null, only numDVs is updated ColumnStatisticsData expectedStats = new ColStatsBuilder<>(double.class).numNulls(8).numDVs(4) .low(1d).high(9.4).build(); assertEqualStatistics(expectedStats, computedStatsObj.getStatsData()); }
public static List<UpdateRequirement> forUpdateTable( TableMetadata base, List<MetadataUpdate> metadataUpdates) { Preconditions.checkArgument(null != base, "Invalid table metadata: null"); Preconditions.checkArgument(null != metadataUpdates, "Invalid metadata updates: null"); Builder builder = new Builder(base, false); builder.require(new UpdateRequirement.AssertTableUUID(base.uuid())); metadataUpdates.forEach(builder::update); return builder.build(); }
@Test public void addPartitionSpecFailure() { when(metadata.lastAssignedPartitionId()).thenReturn(3); when(updated.lastAssignedPartitionId()).thenReturn(4); List<UpdateRequirement> requirements = UpdateRequirements.forUpdateTable( metadata, ImmutableList.of(new MetadataUpdate.AddPartitionSpec(PartitionSpec.unpartitioned()))); assertThatThrownBy(() -> requirements.forEach(req -> req.validate(updated))) .isInstanceOf(CommitFailedException.class) .hasMessage("Requirement failed: last assigned partition id changed: expected id 3 != 4"); }
public static boolean typeContainsTimestamp(TypeInfo type) { Category category = type.getCategory(); switch (category) { case PRIMITIVE: return type.getTypeName().equals(TIMESTAMP_TYPE_NAME); case LIST: ListTypeInfo listTypeInfo = (ListTypeInfo) type; return typeContainsTimestamp(listTypeInfo.getListElementTypeInfo()); case MAP: MapTypeInfo mapTypeInfo = (MapTypeInfo) type; return typeContainsTimestamp(mapTypeInfo.getMapKeyTypeInfo()) || typeContainsTimestamp(mapTypeInfo.getMapValueTypeInfo()); case STRUCT: StructTypeInfo structTypeInfo = (StructTypeInfo) type; return structTypeInfo.getAllStructFieldTypeInfos().stream() .anyMatch(HoodieColumnProjectionUtils::typeContainsTimestamp); case UNION: UnionTypeInfo unionTypeInfo = (UnionTypeInfo) type; return unionTypeInfo.getAllUnionObjectTypeInfos().stream() .anyMatch(HoodieColumnProjectionUtils::typeContainsTimestamp); default: return false; } }
@Test void testTypeContainsTimestamp() { String col1 = "timestamp"; TypeInfo typeInfo1 = TypeInfoUtils.getTypeInfosFromTypeString(col1).get(0); assertTrue(HoodieColumnProjectionUtils.typeContainsTimestamp(typeInfo1)); String col2 = "string"; TypeInfo typeInfo2 = TypeInfoUtils.getTypeInfosFromTypeString(col2).get(0); assertFalse(HoodieColumnProjectionUtils.typeContainsTimestamp(typeInfo2)); String col3 = "array<timestamp>"; TypeInfo typeInfo3 = TypeInfoUtils.getTypeInfosFromTypeString(col3).get(0); assertTrue(HoodieColumnProjectionUtils.typeContainsTimestamp(typeInfo3)); String col4 = "array<string>"; TypeInfo typeInfo4 = TypeInfoUtils.getTypeInfosFromTypeString(col4).get(0); assertFalse(HoodieColumnProjectionUtils.typeContainsTimestamp(typeInfo4)); String col5 = "map<string,timestamp>"; TypeInfo typeInfo5 = TypeInfoUtils.getTypeInfosFromTypeString(col5).get(0); assertTrue(HoodieColumnProjectionUtils.typeContainsTimestamp(typeInfo5)); String col6 = "map<string,string>"; TypeInfo typeInfo6 = TypeInfoUtils.getTypeInfosFromTypeString(col6).get(0); assertFalse(HoodieColumnProjectionUtils.typeContainsTimestamp(typeInfo6)); String col7 = "struct<name1:string,name2:timestamp>"; TypeInfo typeInfo7 = TypeInfoUtils.getTypeInfosFromTypeString(col7).get(0); assertTrue(HoodieColumnProjectionUtils.typeContainsTimestamp(typeInfo7)); String col8 = "struct<name1:string,name2:string>"; TypeInfo typeInfo8 = TypeInfoUtils.getTypeInfosFromTypeString(col8).get(0); assertFalse(HoodieColumnProjectionUtils.typeContainsTimestamp(typeInfo8)); String col9 = "uniontype<string,timestamp>"; TypeInfo typeInfo9 = TypeInfoUtils.getTypeInfosFromTypeString(col9).get(0); assertTrue(HoodieColumnProjectionUtils.typeContainsTimestamp(typeInfo9)); String col10 = "uniontype<string,int>"; TypeInfo typeInfo10 = TypeInfoUtils.getTypeInfosFromTypeString(col10).get(0); assertFalse(HoodieColumnProjectionUtils.typeContainsTimestamp(typeInfo10)); String col11 = "uniontype<string,int,map<string,array<timestamp>>>"; TypeInfo typeInfo11 = TypeInfoUtils.getTypeInfosFromTypeString(col11).get(0); assertTrue(HoodieColumnProjectionUtils.typeContainsTimestamp(typeInfo11)); }
public static void validate(FilterPredicate predicate, MessageType schema) { Objects.requireNonNull(predicate, "predicate cannot be null"); Objects.requireNonNull(schema, "schema cannot be null"); predicate.accept(new SchemaCompatibilityValidator(schema)); }
@Test public void testValidType() { validate(complexValid, schema); }
public void acquireWriteLock(String key) { getLock(key).writeLock().lock(); }
@Test public void shouldNotEnforceMutualExclusionOfWriteLockForDifferentNames() throws InterruptedException { readWriteLock.acquireWriteLock("foo"); new Thread(() -> { readWriteLock.acquireWriteLock("bar"); numberOfLocks++; }).start(); Thread.sleep(1000); assertThat(numberOfLocks, is(1)); }
static byte[] adaptArray(byte[] ftdiData) { int length = ftdiData.length; if(length > 64) { int n = 1; int p = 64; // Precalculate length without FTDI headers while(p < length) { n++; p = n*64; } int realLength = length - n*2; byte[] data = new byte[realLength]; copyData(ftdiData, data); return data; } else if (length == 2) // special case optimization that returns the same instance. { return EMPTY_BYTE_ARRAY; } else { return Arrays.copyOfRange(ftdiData, 2, length); } }
@Test public void fullWithHeaders() { byte[] withHeaders = {1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64}; byte[] wanted = Arrays.copyOfRange(withHeaders, 2, 64); Assert.assertArrayEquals(wanted, FTDISerialDevice.adaptArray(withHeaders)); }
@Override public String serviceToUrl(String protocol, String serviceId, String tag, String requestKey) { if(StringUtils.isBlank(serviceId)) { logger.debug("The serviceId cannot be blank"); return null; } URL url = loadBalance.select(discovery(protocol, serviceId, tag), serviceId, tag, requestKey); if (url != null) { logger.debug("Final url after load balance = {}.", url); // construct a url in string return protocol + "://" + url.getHost() + ":" + url.getPort(); } else { logger.debug("The service: {} cannot be found from service discovery.", serviceId); return null; } }
@Test public void testServiceToUrlWithEnvironment() { String s = cluster.serviceToUrl("https", "com.networknt.portal.command-1.0.0", "0000", null); System.out.println(s); Assert.assertTrue("https://localhost:8440".equals(s)); s = cluster.serviceToUrl("https", "com.networknt.portal.command-1.0.0", "0001", null); System.out.println(s); Assert.assertTrue("https://localhost:8441".equals(s)); s = cluster.serviceToUrl("https", "com.networknt.portal.command-1.0.0", "0002", null); System.out.println(s); Assert.assertTrue("https://localhost:8442".equals(s)); }
@Override public final void stop() { Boolean running = this.running.get(); if (null == running) { this.running.set(false); return; } if (!running) { return; } LocalDateTime startTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(startTimeMillis), ZoneId.systemDefault()); log.info("stop lifecycle executor {}, startTime={}, cost {} ms", this, startTime.format(DateTimeFormatterFactory.getStandardFormatter()), System.currentTimeMillis() - startTimeMillis); try { doStop(); // CHECKSTYLE:OFF } catch (final SQLException | RuntimeException ex) { // CHECKSTYLE:ON log.warn("doStop failed", ex); } this.running.set(false); }
@Test void assertNoStopBeforeStarting() { FixturePipelineLifecycleRunnable executor = new FixturePipelineLifecycleRunnable(); executor.stop(); executor.stop(); assertThat(executor.doStopCount.get(), is(0)); }
public CompositeFileEntryParser getParser(final String system) { return this.getParser(system, TimeZone.getDefault()); }
@Test public void testGetMVS() { final CompositeFileEntryParser parser = new FTPParserSelector().getParser("MVS is the operating system of this server. FTP Server is running on z/OS."); final String line = "drwxr-xr-x 6 START2 SYS1 8192 Oct 28 2008 ADCD"; parser.preParse(Arrays.asList("total 66", line)); assertNotNull(parser.parseFTPEntry(line)); }
public void modifyTableAddOrDrop(Database db, OlapTable olapTable, Map<Long, List<Column>> indexSchemaMap, List<Index> indexes, long jobId, long txnId, Map<Long, Long> indexToNewSchemaId, boolean isReplay) throws DdlException, NotImplementedException { Locker locker = new Locker(); locker.lockTablesWithIntensiveDbLock(db, Lists.newArrayList(olapTable.getId()), LockType.WRITE); try { LOG.debug("indexSchemaMap:{}, indexes:{}", indexSchemaMap, indexes); if (olapTable.getState() == OlapTableState.ROLLUP) { throw new DdlException("Table[" + olapTable.getName() + "] is doing ROLLUP job"); } // for now table's state can only be NORMAL Preconditions.checkState(olapTable.getState() == OlapTableState.NORMAL, olapTable.getState().name()); olapTable.setState(OlapTableState.UPDATING_META); SchemaChangeJobV2 schemaChangeJob = new SchemaChangeJobV2(jobId, db.getId(), olapTable.getId(), olapTable.getName(), 1000); // update base index schema Set<String> modifiedColumns = Sets.newHashSet(); boolean hasMv = !olapTable.getRelatedMaterializedViews().isEmpty(); for (Map.Entry<Long, List<Column>> entry : indexSchemaMap.entrySet()) { Long idx = entry.getKey(); List<Column> indexSchema = entry.getValue(); // modify the copied indexMeta and put the update result in the indexIdToMeta MaterializedIndexMeta currentIndexMeta = olapTable.getIndexMetaByIndexId(idx).shallowCopy(); List<Column> originSchema = currentIndexMeta.getSchema(); if (hasMv) { modifiedColumns.addAll(AlterHelper.collectDroppedOrModifiedColumns(originSchema, indexSchema)); } List<Integer> sortKeyUniqueIds = currentIndexMeta.getSortKeyUniqueIds(); List<Integer> newSortKeyIdxes = new ArrayList<>(); if (sortKeyUniqueIds != null) { for (Integer uniqueId : sortKeyUniqueIds) { Optional<Column> col = indexSchema.stream().filter(c -> c.getUniqueId() == uniqueId).findFirst(); if (col.isEmpty()) { throw new DdlException("Sork key col with unique id: " + uniqueId + " not exists"); } int sortKeyIdx = indexSchema.indexOf(col.get()); newSortKeyIdxes.add(sortKeyIdx); } } currentIndexMeta.setSchema(indexSchema); if (!newSortKeyIdxes.isEmpty()) { currentIndexMeta.setSortKeyIdxes(newSortKeyIdxes); } int currentSchemaVersion = currentIndexMeta.getSchemaVersion(); int newSchemaVersion = currentSchemaVersion + 1; currentIndexMeta.setSchemaVersion(newSchemaVersion); // update the indexIdToMeta olapTable.getIndexIdToMeta().put(idx, currentIndexMeta); // if FE upgrade from old version and replay journal, the indexToNewSchemaId maybe null if (indexToNewSchemaId != null) { currentIndexMeta.setSchemaId(indexToNewSchemaId.get(idx)); } schemaChangeJob.addIndexSchema(idx, idx, olapTable.getIndexNameById(idx), newSchemaVersion, currentIndexMeta.getSchemaHash(), currentIndexMeta.getShortKeyColumnCount(), indexSchema); } olapTable.setIndexes(indexes); olapTable.rebuildFullSchema(); // If modified columns are already done, inactive related mv inactiveRelatedMaterializedViews(db, olapTable, modifiedColumns); if (!isReplay) { TableAddOrDropColumnsInfo info = new TableAddOrDropColumnsInfo(db.getId(), olapTable.getId(), indexSchemaMap, indexes, jobId, txnId, indexToNewSchemaId); LOG.debug("logModifyTableAddOrDrop info:{}", info); GlobalStateMgr.getCurrentState().getEditLog().logModifyTableAddOrDrop(info); } schemaChangeJob.setWatershedTxnId(txnId); schemaChangeJob.setJobState(AlterJobV2.JobState.FINISHED); schemaChangeJob.setFinishedTimeMs(System.currentTimeMillis()); this.addAlterJobV2(schemaChangeJob); olapTable.lastSchemaUpdateTime.set(System.nanoTime()); LOG.info("finished modify table's add or drop column(field). table: {}, is replay: {}", olapTable.getName(), isReplay); } finally { olapTable.setState(OlapTableState.NORMAL); locker.unLockTablesWithIntensiveDbLock(db, Lists.newArrayList(olapTable.getId()), LockType.WRITE); } }
@Test public void testModifyTableAddOrDropColumns() { GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); Database db = globalStateMgr.getDb("test"); OlapTable tbl = (OlapTable) db.getTable("sc_dup2"); Map<Long, AlterJobV2> alterJobs = globalStateMgr.getSchemaChangeHandler().getAlterJobsV2(); // origin columns Map<Long, List<Column>> indexSchemaMap = new HashMap<>(); Map<Long, Long> indexToNewSchemaId = new HashMap<>(); for (Map.Entry<Long, List<Column>> entry : tbl.getIndexIdToSchema().entrySet()) { indexSchemaMap.put(entry.getKey(), new LinkedList<>(entry.getValue())); indexToNewSchemaId.put(entry.getKey(), globalStateMgr.getNextId()); } List<Index> newIndexes = tbl.getCopiedIndexes(); Assertions.assertDoesNotThrow( () -> ((SchemaChangeHandler) GlobalStateMgr.getCurrentState().getAlterJobMgr().getSchemaChangeHandler()) .modifyTableAddOrDrop(db, tbl, indexSchemaMap, newIndexes, 100, 100, indexToNewSchemaId, false)); jobSize++; Assertions.assertEquals(jobSize, alterJobs.size()); Assertions.assertDoesNotThrow( () -> ((SchemaChangeHandler) GlobalStateMgr.getCurrentState().getAlterJobMgr().getSchemaChangeHandler()) .modifyTableAddOrDrop(db, tbl, indexSchemaMap, newIndexes, 101, 101, indexToNewSchemaId, true)); jobSize++; Assertions.assertEquals(jobSize, alterJobs.size()); OlapTableState beforeState = tbl.getState(); tbl.setState(OlapTableState.ROLLUP); Assertions.assertThrows(DdlException.class, () -> ((SchemaChangeHandler) GlobalStateMgr.getCurrentState().getAlterJobMgr().getSchemaChangeHandler()) .modifyTableAddOrDrop(db, tbl, indexSchemaMap, newIndexes, 102, 102, indexToNewSchemaId, false)); tbl.setState(beforeState); }
public static boolean isEmpty(Collection collection) { return collection == null || collection.isEmpty(); }
@Test public void testIsEmpty() { assertTrue(isEmpty(Collections.emptyList())); assertFalse(isEmpty(singletonList(23))); }
@Override public void remove(String noteId, String notePath, AuthenticationInfo subject) throws IOException { Preconditions.checkArgument(StringUtils.isNotEmpty(noteId)); BlobId blobId = makeBlobId(noteId, notePath); try { boolean deleted = storage.delete(blobId); if (!deleted) { throw new IOException("Tried to remove nonexistent blob " + blobId.toString()); } } catch (StorageException se) { throw new IOException("Could not remove " + blobId.toString() + ": " + se.getMessage(), se); } }
@Test void testRemove_nonexistent() throws Exception { zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), DEFAULT_URL); this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); assertThrows(IOException.class, () -> { notebookRepo.remove("id", "/name", AUTH_INFO); }); }
@Override public void build(DefaultGoPublisher publisher, EnvironmentVariableContext environmentVariableContext, TaskExtension taskExtension, ArtifactExtension artifactExtension, PluginRequestProcessorRegistry pluginRequestProcessorRegistry, Charset consoleLogCharset) { downloadMetadataFile(publisher); try { pluginRequestProcessorRegistry.registerProcessorFor(CONSOLE_LOG.requestName(), ArtifactRequestProcessor.forFetchArtifact(publisher, environmentVariableContext)); final String message = format("[%s] Fetching pluggable artifact using plugin `%s`.", GoConstants.PRODUCT_NAME, artifactStore.getPluginId()); LOGGER.info(message); publisher.taggedConsumeLine(TaggedStreamConsumer.OUT, message); List<FetchArtifactEnvironmentVariable> newEnvironmentVariables = artifactExtension.fetchArtifact( artifactStore.getPluginId(), artifactStore, configuration, getMetadataFromFile(artifactId), agentWorkingDirectory()); updateEnvironmentVariableContextWith(publisher, environmentVariableContext, newEnvironmentVariables); } catch (Exception e) { publisher.taggedConsumeLine(TaggedStreamConsumer.ERR, e.getMessage()); LOGGER.error(e.getMessage(), e); throw new RuntimeException(e); } finally { pluginRequestProcessorRegistry.removeProcessorFor(CONSOLE_LOG.requestName()); } }
@Test public void shouldCallArtifactExtension() { final FetchPluggableArtifactBuilder builder = new FetchPluggableArtifactBuilder(new RunIfConfigs(), new NullBuilder(), "", jobIdentifier, artifactStore, fetchPluggableArtifactTask.getConfiguration(), fetchPluggableArtifactTask.getArtifactId(), sourceOnServer, metadataDest.toFile(), checksumFileHandler); builder.build(publisher, new EnvironmentVariableContext(), null, artifactExtension, registry, UTF_8); verify(artifactExtension).fetchArtifact(eq(PLUGIN_ID), eq(artifactStore), eq(fetchPluggableArtifactTask.getConfiguration()), any(), eq(metadataDest.getParent().toString())); }
@VisibleForTesting Matcher querySplitterMatcher(String queryString) { return QUERY_SPLITTER_PATTERN.matcher(queryString); }
@Test void querySplitterMatcher() { final SearchQueryParser parser = new SearchQueryParser("defaultfield", ImmutableMap.of( "id", SearchQueryField.create("real_id"), "date", SearchQueryField.create("created_at", SearchQueryField.Type.DATE)) ); final String queryString = "from:\"2017-10-02 12:07:01.345\" hello:world foo:=~\"bar baz\""; final Matcher matcher = parser.querySplitterMatcher(queryString); assertThat(matcher.find()).isTrue(); assertThat(matcher.group()).isEqualTo("from:\"2017-10-02 12:07:01.345\""); assertThat(matcher.find()).isTrue(); assertThat(matcher.group()).isEqualTo("hello:world"); assertThat(matcher.find()).isTrue(); assertThat(matcher.group()).isEqualTo("foo:=~\"bar baz\""); assertThat(matcher.find()).isFalse(); }
@Override public GetNewApplicationResponse getNewApplication( GetNewApplicationRequest request) throws YarnException, IOException { if (request == null) { routerMetrics.incrAppsFailedCreated(); String errMsg = "Missing getNewApplication request."; RouterAuditLogger.logFailure(user.getShortUserName(), GET_NEW_APP, UNKNOWN, TARGET_CLIENT_RM_SERVICE, errMsg); RouterServerUtil.logAndThrowException(errMsg, null); } long startTime = clock.getTime(); Map<SubClusterId, SubClusterInfo> subClustersActive = federationFacade.getSubClusters(true); // Try calling the getNewApplication method List<SubClusterId> blacklist = new ArrayList<>(); int activeSubClustersCount = federationFacade.getActiveSubClustersCount(); int actualRetryNums = Math.min(activeSubClustersCount, numSubmitRetries); try { GetNewApplicationResponse response = ((FederationActionRetry<GetNewApplicationResponse>) (retryCount) -> invokeGetNewApplication(subClustersActive, blacklist, request, retryCount)). runWithRetries(actualRetryNums, submitIntervalTime); if (response != null) { long stopTime = clock.getTime(); routerMetrics.succeededAppsCreated(stopTime - startTime); return response; } } catch (Exception e) { routerMetrics.incrAppsFailedCreated(); RouterAuditLogger.logFailure(user.getShortUserName(), GET_NEW_APP, UNKNOWN, TARGET_CLIENT_RM_SERVICE, e.getMessage()); RouterServerUtil.logAndThrowException(e.getMessage(), e); } routerMetrics.incrAppsFailedCreated(); String errMsg = "Failed to create a new application."; RouterAuditLogger.logFailure(user.getShortUserName(), GET_NEW_APP, UNKNOWN, TARGET_CLIENT_RM_SERVICE, errMsg); throw new YarnException(errMsg); }
@Test public void testGetNewApplication() throws YarnException, IOException { LOG.info("Test FederationClientInterceptor: Get New Application."); GetNewApplicationRequest request = GetNewApplicationRequest.newInstance(); GetNewApplicationResponse response = interceptor.getNewApplication(request); Assert.assertNotNull(response); Assert.assertNotNull(response.getApplicationId()); Assert.assertEquals(response.getApplicationId().getClusterTimestamp(), ResourceManager.getClusterTimeStamp()); }
public RegistryBuilder simplified(Boolean simplified) { this.simplified = simplified; return getThis(); }
@Test void simplified() { RegistryBuilder builder = new RegistryBuilder(); builder.simplified(true); Assertions.assertTrue(builder.build().getSimplified()); }
@Override protected void deregister() { if (!this.registration.isRegisterEnabled()) { return; } super.deregister(); }
@Test public void testDeregister() { doReturn(false).when(registration).isRegisterEnabled(); assertThatCode(() -> { polarisAutoServiceRegistration.registerManagement(); }).doesNotThrowAnyException(); doReturn(true).when(registration).isRegisterEnabled(); assertThatCode(() -> { polarisAutoServiceRegistration.deregister(); }).doesNotThrowAnyException(); }
public SearchOptions writeJson(JsonWriter json, long totalHits) { json.prop("total", totalHits); json.prop(WebService.Param.PAGE, getPage()); json.prop(WebService.Param.PAGE_SIZE, getLimit()); return this; }
@Test public void writeJson() { SearchOptions options = new SearchOptions().setPage(3, 10); StringWriter json = new StringWriter(); JsonWriter jsonWriter = JsonWriter.of(json).beginObject(); options.writeJson(jsonWriter, 42L); jsonWriter.endObject().close(); JsonAssert.assertJson(json.toString()).isSimilarTo("{\"total\": 42, \"p\": 3, \"ps\": 10}"); }
public void hasAllRequiredFields() { if (!actual.isInitialized()) { // MessageLite doesn't support reflection so this is the best we can do. failWithoutActual( simpleFact("expected to have all required fields set"), fact("but was", actualCustomStringRepresentationForProtoPackageMembersToCall()), simpleFact("(Lite runtime could not determine which fields were missing.)")); } }
@Test public void testHasAllRequiredFields_success() { expectThat(config.nonEmptyMessage()).hasAllRequiredFields(); }
@Description("Returns a Geometry type Point object with the given coordinate values") @ScalarFunction("ST_Point") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stPoint(@SqlType(DOUBLE) double x, @SqlType(DOUBLE) double y) { return serialize(createJtsPoint(x, y)); }
@Test public void testSTPoint() { assertFunction("ST_AsText(ST_Point(1, 4))", VARCHAR, "POINT (1 4)"); assertFunction("ST_AsText(ST_Point(122.3, 10.55))", VARCHAR, "POINT (122.3 10.55)"); }
public static Document toXmlDoc(String xml) { return toXmlDoc(xml, false); }
@Test void testParsing() { String xml = "<foo></foo>"; Document doc = XmlUtils.toXmlDoc(xml); String rootName = doc.getDocumentElement().getNodeName(); assertEquals("foo", rootName); }
@Override public UpdateSchema addColumn(String name, Type type, String doc) { Preconditions.checkArgument( !name.contains("."), "Cannot add column with ambiguous name: %s, use addColumn(parent, name, type)", name); return addColumn(null, name, type, doc); }
@Test public void testAddAlreadyExists() { assertThatThrownBy( () -> { UpdateSchema update = new SchemaUpdate(SCHEMA, SCHEMA_LAST_COLUMN_ID); update.addColumn("preferences", "feature1", Types.BooleanType.get()); }) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot add column, name already exists: preferences.feature1"); assertThatThrownBy( () -> { UpdateSchema update = new SchemaUpdate(SCHEMA, SCHEMA_LAST_COLUMN_ID); update.addColumn("preferences", Types.BooleanType.get()); }) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot add column, name already exists: preferences"); }
public boolean isEnabled() { return config.getBoolean(ENABLED).orElseThrow(DEFAULT_VALUE_MISSING) && clientId() != null && clientSecret() != null; }
@Test public void is_enabled() { settings.setProperty("sonar.auth.bitbucket.clientId.secured", "id"); settings.setProperty("sonar.auth.bitbucket.clientSecret.secured", "secret"); settings.setProperty("sonar.auth.bitbucket.enabled", true); assertThat(underTest.isEnabled()).isTrue(); settings.setProperty("sonar.auth.bitbucket.enabled", false); assertThat(underTest.isEnabled()).isFalse(); }
static YarnApplicationFileUploader from( final FileSystem fileSystem, final Path homeDirectory, final List<Path> providedLibDirs, final ApplicationId applicationId, final int fileReplication) throws IOException { return new YarnApplicationFileUploader( fileSystem, homeDirectory, providedLibDirs, applicationId, fileReplication); }
@Test void testRegisterProvidedLocalResourcesWithNotAllowedUsrLib(@TempDir File flinkHomeDir) throws IOException { final File flinkLibDir = new File(flinkHomeDir, "lib"); final File flinkUsrLibDir = new File(flinkHomeDir, "usrlib"); final Map<String, String> libJars = getLibJars(); final Map<String, String> usrLibJars = getUsrLibJars(); generateFilesInDirectory(flinkLibDir, libJars); generateFilesInDirectory(flinkUsrLibDir, usrLibJars); final List<Path> providedLibDirs = new ArrayList<>(); providedLibDirs.add(new Path(flinkLibDir.toURI())); providedLibDirs.add(new Path(flinkUsrLibDir.toURI())); assertThatThrownBy( () -> YarnApplicationFileUploader.from( FileSystem.get(new YarnConfiguration()), new Path(flinkHomeDir.getPath()), providedLibDirs, ApplicationId.newInstance(0, 0), DFSConfigKeys.DFS_REPLICATION_DEFAULT)) .isInstanceOf(IllegalArgumentException.class) .hasMessage( "Provided lib directories, configured via %s, should not include %s.", YarnConfigOptions.PROVIDED_LIB_DIRS.key(), ConfigConstants.DEFAULT_FLINK_USR_LIB_DIR); }
protected String encrypt(String secretStr, int iterations) throws Exception { SecureRandom secureRandom = new SecureRandom(); byte[] salt = new byte[saltLengthBytes]; secureRandom.nextBytes(salt); byte[] encryptedVal = transform(Cipher.ENCRYPT_MODE, secretStr.getBytes(StandardCharsets.UTF_8), salt, iterations); return new String(Base64.getEncoder().encode(salt), StandardCharsets.UTF_8) + ":" + iterations + ":" + new String(Base64.getEncoder().encode(encryptedVal), StandardCharsets.UTF_8); }
@Test(expected = IllegalArgumentException.class) public void testEncryptionFailWithEmptyPassword() throws Exception { assumeDefaultAlgorithmsSupported(); AbstractPbeReplacer replacer = createAndInitReplacer("", new Properties()); replacer.encrypt("test", 1); }
public final void isNotSameInstanceAs(@Nullable Object unexpected) { if (actual == unexpected) { /* * We use actualCustomStringRepresentation() because it might be overridden to be better than * actual.toString()/unexpected.toString(). */ failWithoutActual( fact("expected not to be specific instance", actualCustomStringRepresentation())); } }
@Test public void isNotSameInstanceAsWithNulls() { Object o = null; assertThat(o).isNotSameInstanceAs("a"); }
public static boolean regionMatches(final CharSequence cs, final boolean ignoreCase, final int thisStart, final CharSequence substring, final int start, final int length) { if (cs instanceof String && substring instanceof String) { return ((String) cs).regionMatches(ignoreCase, thisStart, (String) substring, start, length); } int index1 = thisStart; int index2 = start; int tmpLen = length; while (tmpLen-- > 0) { final char c1 = cs.charAt(index1++); final char c2 = substring.charAt(index2++); if (c1 == c2) { continue; } if (!ignoreCase) { return false; } // The same check as in String.regionMatches(): if (Character.toUpperCase(c1) != Character.toUpperCase(c2) && Character.toLowerCase(c1) != Character .toLowerCase(c2)) { return false; } } return true; }
@Test void testRegionMatchesNotEqualsCaseSensitive() { assertFalse(StringUtils.regionMatches("abc", false, 0, "xAbc", 1, 3)); assertFalse(StringUtils.regionMatches("abc", false, 0, "xCbc", 1, 3)); }
public synchronized TopologyDescription describe() { return internalTopologyBuilder.describe(); }
@Test public void timeWindowedCogroupedZeroArgCountShouldPreserveTopologyStructure() { final StreamsBuilder builder = new StreamsBuilder(); builder.stream("input-topic") .groupByKey() .cogroup((key, value, aggregate) -> value) .windowedBy(TimeWindows.ofSizeWithNoGrace(ofMillis(1))) .aggregate(() -> ""); final Topology topology = builder.build(); final TopologyDescription describe = topology.describe(); assertEquals( "Topologies:\n" + " Sub-topology: 0\n" + " Source: KSTREAM-SOURCE-0000000000 (topics: [input-topic])\n" + " --> COGROUPKSTREAM-AGGREGATE-0000000002\n" + " Processor: COGROUPKSTREAM-AGGREGATE-0000000002 (stores: [COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000001])\n" + " --> COGROUPKSTREAM-MERGE-0000000003\n" + " <-- KSTREAM-SOURCE-0000000000\n" + " Processor: COGROUPKSTREAM-MERGE-0000000003 (stores: [])\n" + " --> none\n" + " <-- COGROUPKSTREAM-AGGREGATE-0000000002\n\n", describe.toString() ); topology.internalTopologyBuilder.setStreamsConfig(streamsConfig); assertThat(topology.internalTopologyBuilder.setApplicationId("test").buildTopology().hasPersistentLocalStore(), is(true)); }
@Override public Destination createTemporaryDestination(final Session session, final boolean topic) throws JMSException { if (topic) { return session.createTemporaryTopic(); } else { return session.createTemporaryQueue(); } }
@Test public void testTemporaryQueueCreation() throws Exception { TemporaryQueue destination = (TemporaryQueue) strategy.createTemporaryDestination(getSession(), false); assertNotNull(destination); assertNotNull(destination.getQueueName()); }
@Override public synchronized void cleanupAll() { LOG.info("Attempting to cleanup Cassandra manager."); boolean producedError = false; // First, delete the database if it was not given as a static argument if (!usingStaticDatabase) { try { executeStatement(String.format("DROP KEYSPACE IF EXISTS %s", this.keyspaceName)); } catch (Exception e) { LOG.error("Failed to drop Cassandra keyspace {}.", keyspaceName, e); // Only bubble exception if the cause is not timeout or does not exist if (!ExceptionUtils.containsType(e, DriverTimeoutException.class) && !ExceptionUtils.containsMessage(e, "does not exist")) { producedError = true; } } } // Next, try to close the Cassandra client connection try { cassandraClient.close(); } catch (Exception e) { LOG.error("Failed to delete Cassandra client.", e); producedError = true; } // Throw Exception at the end if there were any errors if (producedError) { throw new CassandraResourceManagerException( "Failed to delete resources. Check above for errors."); } super.cleanupAll(); LOG.info("Cassandra manager successfully cleaned up."); }
@Test public void testCleanupShouldDropNonStaticDatabase() { testManager.cleanupAll(); verify(cassandraClient).execute(any(SimpleStatement.class)); verify(cassandraClient).close(); }
@Description("encode value as a 64-bit 2's complement big endian varbinary") @ScalarFunction("to_big_endian_64") @SqlType(StandardTypes.VARBINARY) public static Slice toBigEndian64(@SqlType(StandardTypes.BIGINT) long value) { Slice slice = Slices.allocate(Long.BYTES); slice.setLong(0, Long.reverseBytes(value)); return slice; }
@Test public void testToBigEndian64() { assertFunction("to_big_endian_64(0)", VARBINARY, sqlVarbinaryHex("0000000000000000")); assertFunction("to_big_endian_64(1)", VARBINARY, sqlVarbinaryHex("0000000000000001")); assertFunction("to_big_endian_64(9223372036854775807)", VARBINARY, sqlVarbinaryHex("7FFFFFFFFFFFFFFF")); assertFunction("to_big_endian_64(-9223372036854775807)", VARBINARY, sqlVarbinaryHex("8000000000000001")); }
@Override protected StoregateApiClient connect(final ProxyFinder proxy, final HostKeyCallback key, final LoginCallback prompt, final CancelCallback cancel) throws ConnectionCanceledException { final HttpClientBuilder configuration = builder.build(proxy, this, prompt); final PreferencesReader preferences = new HostPreferences(host); authorizationService = new OAuth2RequestInterceptor(builder.build(proxy, this, prompt).addInterceptorLast(new HttpRequestInterceptor() { @Override public void process(final HttpRequest request, final HttpContext context) { request.addHeader(HttpHeaders.AUTHORIZATION, String.format("Basic %s", Base64.getEncoder().encodeToString(String.format("%s:%s", host.getProtocol().getOAuthClientId(), host.getProtocol().getOAuthClientSecret()).getBytes(StandardCharsets.UTF_8)))); } }).build(), host, prompt) .withRedirectUri(CYBERDUCK_REDIRECT_URI.equals(host.getProtocol().getOAuthRedirectUrl()) ? host.getProtocol().getOAuthRedirectUrl() : Scheme.isURL(host.getProtocol().getOAuthRedirectUrl()) ? host.getProtocol().getOAuthRedirectUrl() : new HostUrlProvider().withUsername(false).withPath(true).get( host.getProtocol().getScheme(), host.getPort(), null, host.getHostname(), host.getProtocol().getOAuthRedirectUrl()) ) .withParameter("login_hint", preferences.getProperty("storegate.login.hint")); // Force login even if browser session already exists authorizationService.withParameter("prompt", "login"); configuration.setServiceUnavailableRetryStrategy(new CustomServiceUnavailableRetryStrategy(host, new ExecutionCountServiceUnavailableRetryStrategy(new OAuth2ErrorResponseInterceptor(host, authorizationService)))); configuration.addInterceptorLast(authorizationService); final CloseableHttpClient apache = configuration.build(); final StoregateApiClient client = new StoregateApiClient(apache); client.setBasePath(new HostUrlProvider().withUsername(false).withPath(true).get(host.getProtocol().getScheme(), host.getPort(), null, host.getHostname(), host.getProtocol().getContext())); client.setHttpClient(ClientBuilder.newClient(new ClientConfig() .register(new InputStreamProvider()) .register(MultiPartFeature.class) .register(new JSON()) .register(JacksonFeature.class) .connectorProvider(new HttpComponentsProvider(apache)))); final int timeout = ConnectionTimeoutFactory.get(host).getTimeout() * 1000; client.setConnectTimeout(timeout); client.setReadTimeout(timeout); client.setUserAgent(new PreferencesUseragentProvider().get()); return client; }
@Test public void testConnect() throws Exception { assertTrue(session.isConnected()); session.close(); assertFalse(session.isConnected()); }
public static void addFileSliceCommonMetrics(List<FileSlice> fileSlices, Map<String, Double> metrics, long defaultBaseFileSize) { int numLogFiles = 0; long totalLogFileSize = 0; long totalIORead = 0; long totalIOWrite = 0; long totalIO = 0; for (FileSlice slice : fileSlices) { numLogFiles += slice.getLogFiles().count(); // Total size of all the log files totalLogFileSize += slice.getLogFiles().map(HoodieLogFile::getFileSize).filter(size -> size >= 0) .reduce(Long::sum).orElse(0L); long baseFileSize = slice.getBaseFile().isPresent() ? slice.getBaseFile().get().getFileSize() : 0L; totalIORead += baseFileSize; // Total write will be similar to the size of the base file totalIOWrite += baseFileSize > 0 ? baseFileSize : defaultBaseFileSize; } // Total read will be the base file + all the log files totalIORead = FSUtils.getSizeInMB(totalIORead + totalLogFileSize); totalIOWrite = FSUtils.getSizeInMB(totalIOWrite); // Total IO will be the IO for read + write totalIO = totalIORead + totalIOWrite; metrics.put(TOTAL_IO_READ_MB, (double) totalIORead); metrics.put(TOTAL_IO_WRITE_MB, (double) totalIOWrite); metrics.put(TOTAL_IO_MB, (double) totalIO); metrics.put(TOTAL_LOG_FILE_SIZE, (double) totalLogFileSize); metrics.put(TOTAL_LOG_FILES, (double) numLogFiles); }
@Test public void testFileSliceMetricUtilsWithoutFile() { Map<String, Double> metrics = new HashMap<>(); List<FileSlice> fileSlices = new ArrayList<>(); final long defaultBaseFileSize = 10 * 1024 * 1024; final double epsilon = 1e-5; FileSliceMetricUtils.addFileSliceCommonMetrics(fileSlices, metrics, defaultBaseFileSize); assertEquals(0.0, metrics.get(FileSliceMetricUtils.TOTAL_IO_READ_MB), epsilon); assertEquals(0.0, metrics.get(FileSliceMetricUtils.TOTAL_IO_WRITE_MB), epsilon); assertEquals(0.0, metrics.get(FileSliceMetricUtils.TOTAL_IO_MB), epsilon); assertEquals(0.0, metrics.get(FileSliceMetricUtils.TOTAL_LOG_FILE_SIZE), epsilon); assertEquals(0.0, metrics.get(FileSliceMetricUtils.TOTAL_LOG_FILES), epsilon); }
public final void addInternalTopic(final String topicName, final InternalTopicProperties internalTopicProperties) { Objects.requireNonNull(topicName, "topicName can't be null"); Objects.requireNonNull(internalTopicProperties, "internalTopicProperties can't be null"); internalTopicNamesWithProperties.put(topicName, internalTopicProperties); }
@Test public void shouldNotAddNullInternalTopic() { assertThrows(NullPointerException.class, () -> builder.addInternalTopic(null, InternalTopicProperties.empty())); }
@Override public String version() { return AppInfoParser.getVersion(); }
@Test public void testHoistFieldVersionRetrievedFromAppInfoParser() { assertEquals(AppInfoParser.getVersion(), xform.version()); }
public static String decode(String str, Charset charset) { return decode(str, charset, true); }
@Test public void decodePlusTest() { final String decode = URLDecoder.decode("+", CharsetUtil.CHARSET_UTF_8); assertEquals(" ", decode); }
@Override String getProperty(String key) { String checkedKey = checkPropertyName(key); if (checkedKey == null) { final String upperCaseKey = key.toUpperCase(); if (!upperCaseKey.equals(key)) { checkedKey = checkPropertyName(upperCaseKey); } } if (checkedKey == null) { return null; } return env.get(checkedKey); }
@Test void testGetEnvForLowerCaseKey() { assertEquals("value1", systemEnvPropertySource.getProperty("testcase1")); }
public static double parseBytesLongToDouble(List data) { return parseBytesLongToDouble(data, 0); }
@Test public void parseBytesLongToDouble() { byte[] longValByte = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A}; Double valueExpected = 10.0d; Double valueActual = TbUtils.parseBytesLongToDouble(longValByte); Assertions.assertEquals(valueExpected, valueActual); valueActual = TbUtils.parseBytesLongToDouble(longValByte, 7, 1, true); Assertions.assertEquals(valueExpected, valueActual); valueActual = TbUtils.parseBytesLongToDouble(longValByte, 7, 1, false); Assertions.assertEquals(valueExpected, valueActual); valueActual = TbUtils.parseBytesLongToDouble(longValByte, 6, 2, true); Assertions.assertEquals(valueExpected, valueActual); valueExpected = 2560.0d; valueActual = TbUtils.parseBytesLongToDouble(longValByte, 6, 2, false); Assertions.assertEquals(valueExpected, valueActual); valueExpected = 10.0d; valueActual = TbUtils.parseBytesLongToDouble(longValByte, 0, 8, true); Assertions.assertEquals(valueExpected, valueActual); valueExpected = 7.2057594037927936E17d; valueActual = TbUtils.parseBytesLongToDouble(longValByte, 0, 8, false); Assertions.assertEquals(valueExpected, valueActual); }
@Override public String getSerializedCommitOutputForRead(SchemaTableName table) { return serializeCommitOutput(lastDataCommitTimesForRead, table); }
@Test public void testGetSerializedCommitOutput() { HiveCommitHandle commitHandle = new HiveCommitHandle(testData, ImmutableMap.of()); assertEquals(commitHandle.getSerializedCommitOutputForRead(new SchemaTableName("s1", "t1")), "1,2"); assertEquals(commitHandle.getSerializedCommitOutputForRead(new SchemaTableName("s2", "t2")), "3,4"); assertEquals(commitHandle.getSerializedCommitOutputForRead(new SchemaTableName("s3", "t3")), ""); }
public static String escape(AlluxioURI uri) { return CACHED_ESCAPED_PATH.computeIfAbsent(uri, u -> u.toString().replace("%", "%25") .replace("/", "%2F").replace(".", "%2E")); }
@Test public void testEscape() { AlluxioURI localUri1 = new AlluxioURI("/foo/alluxio/underFSStorage"); String localUriEscaped1 = MetricsSystem.escape(localUri1); assertEquals("%2Ffoo%2Falluxio%2FunderFSStorage", localUriEscaped1); AlluxioURI localUri2 = new AlluxioURI("/.alluxio.wololo/alluxio/underFSStorage"); String localUriEscaped2 = MetricsSystem.escape(localUri2); assertEquals("%2F%2Ealluxio%2Ewololo%2Falluxio%2FunderFSStorage", localUriEscaped2); AlluxioURI localUri3 = new AlluxioURI("/%25alluxio%20user%2Ffoo%2Ebar/alluxio/underFSStorage"); String localUriEscaped3 = MetricsSystem.escape(localUri3); assertEquals("%2F%2525alluxio%2520user%252Ffoo%252Ebar%2Falluxio%2FunderFSStorage", localUriEscaped3); AlluxioURI localUri4 = new AlluxioURI("s3a://test/Tasks+Export+%282017–11–05+06%3A10+PM%2Ecsv"); String localUriEscaped4 = MetricsSystem.escape(localUri4); assertEquals("s3a:%2F%2Ftest%2FTasks+Export+%25282017–11–05+06%253A10+PM%252Ecsv", localUriEscaped4); }
@Override public void merge(Accumulator<Long, Long> other) { this.max = Math.max(this.max, other.getLocalValue()); }
@Test void testMerge() { LongMaximum max1 = new LongMaximum(); max1.add(1234567890987654321L); LongMaximum max2 = new LongMaximum(); max2.add(5678909876543210123L); max2.merge(max1); assertThat(max2.getLocalValue().longValue()).isEqualTo(5678909876543210123L); max1.merge(max2); assertThat(max1.getLocalValue().longValue()).isEqualTo(5678909876543210123L); }
@Override protected boolean isNan(Short number) { // NaN never applies here because only types like Float and Double have NaN return false; }
@Test void testIsNan() { ShortSummaryAggregator ag = new ShortSummaryAggregator(); // always false for Short assertThat(ag.isNan((short) -1)).isFalse(); assertThat(ag.isNan((short) 0)).isFalse(); assertThat(ag.isNan((short) 23)).isFalse(); assertThat(ag.isNan(Short.MAX_VALUE)).isFalse(); assertThat(ag.isNan(Short.MIN_VALUE)).isFalse(); assertThat(ag.isNan(null)).isFalse(); }
public static IpPrefix valueOf(int address, int prefixLength) { return new IpPrefix(IpAddress.valueOf(address), prefixLength); }
@Test(expected = IllegalArgumentException.class) public void testInvalidValueOfStringTooLongPrefixLengthIPv6() { IpPrefix ipPrefix; ipPrefix = IpPrefix.valueOf("1111:2222:3333:4444:5555:6666:7777:8888/129"); }
@Override @Transient public T get(boolean check) { if (destroyed) { throw new IllegalStateException("The invoker of ReferenceConfig(" + url + ") has already destroyed!"); } if (ref == null) { if (getScopeModel().isLifeCycleManagedExternally()) { // prepare model for reference getScopeModel().getDeployer().prepare(); } else { // ensure start module, compatible with old api usage getScopeModel().getDeployer().start(); } init(check); } return ref; }
@Test void testConstructWithReferenceAnnotation() throws NoSuchFieldException { Reference reference = getClass().getDeclaredField("innerTest").getAnnotation(Reference.class); ReferenceConfig referenceConfig = new ReferenceConfig(reference); Assertions.assertEquals(1, referenceConfig.getMethods().size()); Assertions.assertEquals((referenceConfig.getMethods().get(0)).getName(), "sayHello"); Assertions.assertEquals(1300, (int) (referenceConfig.getMethods().get(0)).getTimeout()); Assertions.assertEquals(4, (int) (referenceConfig.getMethods().get(0)).getRetries()); Assertions.assertEquals((referenceConfig.getMethods().get(0)).getLoadbalance(), "random"); Assertions.assertEquals(3, (int) (referenceConfig.getMethods().get(0)).getActives()); Assertions.assertEquals(5, (int) (referenceConfig.getMethods().get(0)).getExecutes()); Assertions.assertTrue((referenceConfig.getMethods().get(0)).isAsync()); Assertions.assertEquals((referenceConfig.getMethods().get(0)).getOninvokeMethod(), "i"); Assertions.assertEquals((referenceConfig.getMethods().get(0)).getOnreturnMethod(), "r"); Assertions.assertEquals((referenceConfig.getMethods().get(0)).getOnthrowMethod(), "t"); Assertions.assertEquals((referenceConfig.getMethods().get(0)).getCache(), "c"); }
public static String getSizeFromBytes(long bytes) { double ret = bytes; if (ret <= 1024 * 5) { return String.format(Locale.ENGLISH, "%dB", bytes); } ret /= 1024; if (ret <= 1024 * 5) { return String.format(Locale.ENGLISH, "%.2fKB", ret); } ret /= 1024; if (ret <= 1024 * 5) { return String.format(Locale.ENGLISH, "%.2fMB", ret); } ret /= 1024; if (ret <= 1024 * 5) { return String.format(Locale.ENGLISH, "%.2fGB", ret); } ret /= 1024; if (ret <= 1024 * 5) { return String.format(Locale.ENGLISH, "%.2fTB", ret); } ret /= 1024; if (ret <= 1024 * 5) { return String.format(Locale.ENGLISH, "%.2fPB", ret); } ret /= 1024; //Long.MAX_VALUE bytes approximately equals to 8EB. return String.format(Locale.ENGLISH, "%.2fEB", ret); }
@Test public void getSizeFromBytes() { class TestCase { String mExpected; long mInput; public TestCase(String expected, long input) { mExpected = expected; mInput = input; } } List<TestCase> testCases = new ArrayList<>(); testCases.add(new TestCase("4B", 1L << 2)); testCases.add(new TestCase("8B", 1L << 3)); testCases.add(new TestCase("4096B", 1L << 12)); testCases.add(new TestCase("8.00KB", 1L << 13)); testCases.add(new TestCase("4096.00KB", 1L << 22)); testCases.add(new TestCase("8.00MB", 1L << 23)); testCases.add(new TestCase("4096.00MB", 1L << 32)); testCases.add(new TestCase("8.00GB", 1L << 33)); testCases.add(new TestCase("4096.00GB", 1L << 42)); testCases.add(new TestCase("8.00TB", 1L << 43)); testCases.add(new TestCase("4096.00TB", 1L << 52)); testCases.add(new TestCase("8.00PB", 1L << 53)); testCases.add(new TestCase("4096.00PB", 1L << 62)); for (TestCase testCase : testCases) { assertEquals(testCase.mExpected, FormatUtils.getSizeFromBytes(testCase.mInput)); } }
public boolean containsStickyKeyHashes(Set<Integer> stickyKeyHashes) { if (!allowOutOfOrderDelivery) { for (Integer stickyKeyHash : stickyKeyHashes) { if (hashesRefCount.containsKey(stickyKeyHash)) { return true; } } } return false; }
@Test(dataProvider = "allowOutOfOrderDelivery", timeOut = 10000) public void testContainsStickyKeyHashes(boolean allowOutOfOrderDelivery) throws Exception { MessageRedeliveryController controller = new MessageRedeliveryController(allowOutOfOrderDelivery); controller.add(1, 1, 100); controller.add(1, 2, 101); controller.add(1, 3, 102); controller.add(2, 2, 103); controller.add(2, 1, 104); if (allowOutOfOrderDelivery) { assertFalse(controller.containsStickyKeyHashes(Set.of(100))); assertFalse(controller.containsStickyKeyHashes(Set.of(101, 102, 103))); assertFalse(controller.containsStickyKeyHashes(Set.of(104, 105))); } else { assertTrue(controller.containsStickyKeyHashes(Set.of(100))); assertTrue(controller.containsStickyKeyHashes(Set.of(101, 102, 103))); assertTrue(controller.containsStickyKeyHashes(Set.of(104, 105))); } assertFalse(controller.containsStickyKeyHashes(Set.of())); assertFalse(controller.containsStickyKeyHashes(Set.of(99))); assertFalse(controller.containsStickyKeyHashes(Set.of(105, 106))); }
@Override public EntityStatementJWS establishIdpTrust(URI issuer) { var trustedFederationStatement = fetchTrustedFederationStatement(issuer); // the federation statement from the master will establish trust in the JWKS and the issuer URL // of the idp, // we still need to fetch the entity configuration directly afterward to get the full // entity statement return fetchTrustedEntityConfiguration(issuer, trustedFederationStatement.body().jwks()); }
@Test void establishTrust_expiredFederationStatement() { var client = new FederationMasterClientImpl(FEDERATION_MASTER, federationApiClient, clock); var issuer = URI.create("https://idp-tk.example.com"); var federationFetchUrl = FEDERATION_MASTER.resolve("/fetch"); var fedmasterKeypair = ECKeyGenerator.example(); var fedmasterEntityConfigurationJws = federationFetchFedmasterConfiguration(federationFetchUrl, fedmasterKeypair); var trustedSectoralIdpKeypair = ECKeyGenerator.generate(); var trustedFederationStatement = expiredFederationStatement(issuer, trustedSectoralIdpKeypair, fedmasterKeypair); when(federationApiClient.fetchEntityConfiguration(FEDERATION_MASTER)) .thenReturn(fedmasterEntityConfigurationJws); when(federationApiClient.fetchFederationStatement( federationFetchUrl, FEDERATION_MASTER.toString(), issuer.toString())) .thenReturn(trustedFederationStatement); // when var e = assertThrows(FederationException.class, () -> client.establishIdpTrust(issuer)); // then assertEquals( "federation statement of 'https://idp-tk.example.com' expired or not yet valid", e.getMessage()); }
public static <K, V> V computeIfAbsent(ConcurrentMap<K, V> map, K key, Function<? super K, ? extends V> func) { Objects.requireNonNull(func); if (JRE.JAVA_8.isCurrentVersion()) { V v = map.get(key); if (null == v) { // issue#11986 lock bug // v = map.computeIfAbsent(key, func); // this bug fix methods maybe cause `func.apply` multiple calls. v = func.apply(key); if (null == v) { return null; } final V res = map.putIfAbsent(key, v); if (null != res) { // if pre value present, means other thread put value already, and putIfAbsent not effect // return exist value return res; } // if pre value is null, means putIfAbsent effected, return current value } return v; } else { return map.computeIfAbsent(key, func); } }
@Test @EnabledForJreRange(min = org.junit.jupiter.api.condition.JRE.JAVA_9) public void issue11986ForJava17Test() { // https://github.com/apache/dubbo/issues/11986 final ConcurrentHashMap<String, Integer> map = new ConcurrentHashMap<>(); // JDK9+ has been resolved JDK-8161372 bug, when cause dead then throw IllegalStateException assertThrows(IllegalStateException.class, () -> { ConcurrentHashMapUtils.computeIfAbsent(map, "AaAa", key -> map.computeIfAbsent("BBBB", key2 -> 42)); }); }
@Override public VoidOutput run(RunContext runContext) throws Exception { String renderedNamespace = runContext.render(this.namespace); String renderedKey = runContext.render(this.key); Object renderedValue = runContext.renderTyped(this.value); KVStore kvStore = runContext.namespaceKv(renderedNamespace); kvStore.put(renderedKey, new KVValueAndMetadata(new KVMetadata(ttl), renderedValue), this.overwrite); return null; }
@Test void shouldSetKVGivenNoNamespace() throws Exception { // Given Set set = Set.builder() .id(Set.class.getSimpleName()) .type(Set.class.getName()) .key("{{ inputs.key }}") .value("{{ inputs.value }}") .build(); var value = Map.of("date", Instant.now().truncatedTo(ChronoUnit.MILLIS), "int", 1, "string", "string"); final RunContext runContext = TestsUtils.mockRunContext(this.runContextFactory, set, Map.of( "key", TEST_KEY, "value", value )); // When set.run(runContext); // Then final KVStore kv = runContext.namespaceKv(runContext.flowInfo().namespace()); assertThat(kv.getValue(TEST_KEY), is(Optional.of(new KVValue(value)))); assertThat(kv.list().getFirst().expirationDate(), nullValue()); }
@Override public boolean fastPut(K key, V value, Duration ttl) { return get(fastPutAsync(key, value, ttl)); }
@Test public void testFastPutTTL() throws InterruptedException { RMapCacheNative<SimpleKey, SimpleValue> map = redisson.getMapCacheNative("getAll"); map.fastPut(new SimpleKey("1"), new SimpleValue("3"), Duration.ofSeconds(5)); Thread.sleep(5000); assertThat(map.get(new SimpleKey("1"))).isNull(); map.fastPut(new SimpleKey("1"), new SimpleValue("4"), Duration.ofSeconds(5)); Thread.sleep(10000); assertThat(map.get(new SimpleKey("1"))).isNull(); }
public static String get(String key) { return get(key, null); }
@Test public void testGetWithKeyEmpty() { assertThrows(IllegalArgumentException.class, new Executable() { @Override public void execute() { SystemPropertyUtil.get("", null); } }); }
@JsonIgnore public String getKey() { String serviceName = getGroupedServiceName(); return getKey(serviceName, clusters); }
@Test void testServiceInfoConstructor() { String key1 = "group@@name"; String key2 = "group@@name@@c2"; ServiceInfo s1 = new ServiceInfo(key1); ServiceInfo s2 = new ServiceInfo(key2); assertEquals(key1, s1.getKey()); assertEquals(key2, s2.getKey()); }
@Override public List<String> listResourcesOfApp(String app) { List<String> results = new ArrayList<>(); if (StringUtil.isBlank(app)) { return results; } // resource -> timestamp -> metric Map<String, LinkedHashMap<Long, MetricEntity>> resourceMap = allMetrics.get(app); if (resourceMap == null) { return results; } final long minTimeMs = System.currentTimeMillis() - 1000 * 60; Map<String, MetricEntity> resourceCount = new ConcurrentHashMap<>(32); readWriteLock.readLock().lock(); try { for (Entry<String, LinkedHashMap<Long, MetricEntity>> resourceMetrics : resourceMap.entrySet()) { for (Entry<Long, MetricEntity> metrics : resourceMetrics.getValue().entrySet()) { if (metrics.getKey() < minTimeMs) { continue; } MetricEntity newEntity = metrics.getValue(); if (resourceCount.containsKey(resourceMetrics.getKey())) { MetricEntity oldEntity = resourceCount.get(resourceMetrics.getKey()); oldEntity.addPassQps(newEntity.getPassQps()); oldEntity.addRtAndSuccessQps(newEntity.getRt(), newEntity.getSuccessQps()); oldEntity.addBlockQps(newEntity.getBlockQps()); oldEntity.addExceptionQps(newEntity.getExceptionQps()); oldEntity.addCount(1); } else { resourceCount.put(resourceMetrics.getKey(), MetricEntity.copyOf(newEntity)); } } } // Order by last minute b_qps DESC. return resourceCount.entrySet() .stream() .sorted((o1, o2) -> { MetricEntity e1 = o1.getValue(); MetricEntity e2 = o2.getValue(); int t = e2.getBlockQps().compareTo(e1.getBlockQps()); if (t != 0) { return t; } return e2.getPassQps().compareTo(e1.getPassQps()); }) .map(Entry::getKey) .collect(Collectors.toList()); } finally { readWriteLock.readLock().unlock(); } }
@Test public void testConcurrentPutAndGet() { List<CompletableFuture> futures = new ArrayList<>(10000); final CyclicBarrier cyclicBarrier = new CyclicBarrier(8); for (int j = 0; j < 10000; j++) { final int finalJ = j; futures.add(CompletableFuture.runAsync(() -> { try { cyclicBarrier.await(); if (finalJ % 2 == 0) { batchSave(); } else { inMemoryMetricsRepository.listResourcesOfApp(DEFAULT_APP); } } catch (InterruptedException | BrokenBarrierException e) { e.printStackTrace(); } }, executorService) ); } CompletableFuture all = CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])); try { all.get(10, TimeUnit.SECONDS); } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.getCause().printStackTrace(); if (e.getCause() instanceof ConcurrentModificationException) { fail("concurrent error occurred"); } else { fail("unexpected exception"); } } catch (TimeoutException e) { fail("allOf future timeout"); } }
@Override public Optional<AuthProperty> inferAuth(String registry) throws InferredAuthException { Server server = getServerFromMavenSettings(registry); if (server == null) { return Optional.empty(); } SettingsDecryptionRequest request = new DefaultSettingsDecryptionRequest(server); SettingsDecryptionResult result = decrypter.decrypt(request); // Un-encrypted passwords are passed through, so a problem indicates a real issue. // If there are any ERROR or FATAL problems reported, then decryption failed. for (SettingsProblem problem : result.getProblems()) { if (problem.getSeverity() == SettingsProblem.Severity.ERROR || problem.getSeverity() == SettingsProblem.Severity.FATAL) { throw new InferredAuthException( "Unable to decrypt server(" + registry + ") info from settings.xml: " + problem); } } Server resultServer = result.getServer(); String username = resultServer.getUsername(); String password = resultServer.getPassword(); return Optional.of( new AuthProperty() { @Override public String getUsername() { return username; } @Override public String getPassword() { return password; } @Override public String getAuthDescriptor() { return CREDENTIAL_SOURCE; } @Override public String getUsernameDescriptor() { return CREDENTIAL_SOURCE; } @Override public String getPasswordDescriptor() { return CREDENTIAL_SOURCE; } }); }
@Test public void testInferredAuth_registrySettingsWithPort() throws InferredAuthException { // Attempt to resolve WITHOUT the port. Should work as well. Optional<AuthProperty> auth = mavenSettingsServerCredentialsNoMasterPassword.inferAuth("docker.example.com:5432"); Assert.assertTrue(auth.isPresent()); Assert.assertEquals("registryUser", auth.get().getUsername()); Assert.assertEquals("registryPassword", auth.get().getPassword()); }
public synchronized boolean sendHttpEvents(Collection<SplunkEvent> events) { LOG.info("Attempting to send {} events to {}.", events.size(), getHecEndpoint()); // Construct base HEC request HttpPost httppost = new HttpPost(getHecEndpoint()); httppost.addHeader("Authorization", "Splunk " + hecToken); // Loop over events and send one-by-one StringBuilder eventsData = new StringBuilder(); events.forEach( event -> { String eventStr = splunkEventToJson(event); eventsData.append(eventStr); LOG.info("Sending HTTP event: {}", eventStr); }); try (CloseableHttpClient httpClient = clientFactory.getHttpClient()) { // Set request data try { httppost.setEntity(new StringEntity(eventsData.toString())); } catch (UnsupportedEncodingException e) { throw new SplunkResourceManagerException( "Error setting HTTP message data to " + eventsData, e); } // Send request try (CloseableHttpResponse response = httpClient.execute(httppost)) { // Check error code int code = response.getStatusLine().getStatusCode(); if (code != 200) { throw new SplunkResourceManagerException( "Received http error code " + code + " sending event."); } } catch (Exception e) { throw new SplunkResourceManagerException("Error sending event.", e); } } catch (IOException e) { throw new SplunkResourceManagerException("Error with HTTP client.", e); } LOG.info("Successfully sent {} events.", events.size()); return true; }
@Test public void testSendHttpEventsShouldReturnTrueIfSplunkDoesNotThrowAnyError() throws IOException { SplunkEvent event = SplunkEvent.newBuilder().withEvent(EVENT).create(); try (CloseableHttpResponse mockResponse = clientFactory.getHttpClient().execute(any(HttpPost.class))) { when(mockResponse.getStatusLine().getStatusCode()).thenReturn(200); } assertThat(testManager.sendHttpEvents(ImmutableList.of(event, event))).isTrue(); verify(clientFactory.getHttpClient()).execute(any(HttpPost.class)); }
@Override public boolean accept(FilterableIssue issue) { if (filters.isEmpty()) { return true; } else { return filters.get(0).accept(issue, new DefaultIssueFilterChain(filters.subList(1, filters.size()))); } }
@Test public void should_accept_and_not_go_further_if_filter_accepts() { assertThat(new DefaultIssueFilterChain(List.of( new PassingFilter(), new AcceptingFilter(), new FailingFilter()) ).accept(issue)).isTrue(); }
@Override public String getDataSource() { return DataSourceConstant.DERBY; }
@Test void testGetDataSource() { String dataSource = configInfoTagsRelationMapperByDerby.getDataSource(); assertEquals(DataSourceConstant.DERBY, dataSource); }
public static Pair<Optional<Method>, Optional<TypedExpression>> resolveMethodWithEmptyCollectionArguments( final MethodCallExpr methodExpression, final MvelCompilerContext mvelCompilerContext, final Optional<TypedExpression> scope, List<TypedExpression> arguments, List<Integer> emptyCollectionArgumentsIndexes) { Objects.requireNonNull(methodExpression, "MethodExpression parameter cannot be null as the method searches methods based on this expression!"); Objects.requireNonNull(mvelCompilerContext, "MvelCompilerContext parameter cannot be null!"); Objects.requireNonNull(arguments, "Arguments parameter cannot be null! Use an empty list instance if needed instead."); Objects.requireNonNull(emptyCollectionArgumentsIndexes, "EmptyListArgumentIndexes parameter cannot be null! Use an empty list instance if needed instead."); if (emptyCollectionArgumentsIndexes.size() > arguments.size()) { throw new IllegalArgumentException("There cannot be more empty collection arguments than all arguments! emptyCollectionArgumentsIndexes parameter has more items than arguments parameter. " + "(" + emptyCollectionArgumentsIndexes.size() + " > " + arguments.size() + ")"); } else { final List<TypedExpression> coercedArgumentsTypesList = new ArrayList<>(arguments); Pair<Optional<Method>, Optional<TypedExpression>> resolveMethodResult = MethodResolutionUtils.resolveMethod(methodExpression, mvelCompilerContext, scope, coercedArgumentsTypesList); if (resolveMethodResult.a.isPresent()) { return resolveMethodResult; } else { // Rather work only with the argumentsType and when a method is resolved, flip the arguments list based on it. // This needs to go through all possible combinations. final int indexesListSize = emptyCollectionArgumentsIndexes.size(); for (int numberOfProcessedIndexes = 0; numberOfProcessedIndexes < indexesListSize; numberOfProcessedIndexes++) { for (int indexOfEmptyListIndex = numberOfProcessedIndexes; indexOfEmptyListIndex < indexesListSize; indexOfEmptyListIndex++) { switchCollectionClassInArgumentsByIndex(coercedArgumentsTypesList, emptyCollectionArgumentsIndexes.get(indexOfEmptyListIndex)); resolveMethodResult = MethodResolutionUtils.resolveMethod(methodExpression, mvelCompilerContext, scope, coercedArgumentsTypesList); if (resolveMethodResult.a.isPresent()) { modifyArgumentsBasedOnCoercedCollectionArguments(arguments, coercedArgumentsTypesList); return resolveMethodResult; } switchCollectionClassInArgumentsByIndex(coercedArgumentsTypesList, emptyCollectionArgumentsIndexes.get(indexOfEmptyListIndex)); } switchCollectionClassInArgumentsByIndex(coercedArgumentsTypesList, emptyCollectionArgumentsIndexes.get(numberOfProcessedIndexes)); } // No method found, return empty. return new Pair<>(Optional.empty(), scope); } } }
@Test public void resolveMethodWithEmptyCollectionArgumentsNoCollectionArguments() { final MethodCallExpr methodExpression = new MethodCallExpr("setIntegerBoxed", new IntegerLiteralExpr("12")); final List<TypedExpression> arguments = List.of(new IntegerLiteralExpressionT(new IntegerLiteralExpr("12"))); final List<TypedExpression> expectedArguments = new ArrayList<>(arguments); final TypedExpression scope = new ObjectCreationExpressionT(arguments, Person.class); final Pair<Optional<Method>, Optional<TypedExpression>> resolvedMethodResult = MethodResolutionUtils.resolveMethodWithEmptyCollectionArguments( methodExpression, new MvelCompilerContext(null), Optional.of(scope), arguments, Collections.emptyList()); Assertions.assertThat(resolvedMethodResult.a).isPresent(); Assertions.assertThat(arguments).containsExactlyElementsOf(expectedArguments); }
void addGetModelForKieBaseMethod(StringBuilder sb) { sb.append( " public java.util.List<Model> getModelsForKieBase(String kieBaseName) {\n"); if (!modelMethod.getKieBaseNames().isEmpty()) { sb.append( " switch (kieBaseName) {\n"); for (String kBase : modelMethod.getKieBaseNames()) { sb.append(" case \"" + kBase + "\": "); List<String> models = modelsByKBase.get(kBase); String collected = null; if (models != null) { collected = models.stream() .map(element -> "new " + element + "()") .collect(Collectors.joining(",")); } sb.append(collected != null && !collected.isEmpty() ? "return java.util.Arrays.asList( " + collected + " );\n" : "return getModels();\n"); } sb.append(" }\n"); } sb.append( " throw new IllegalArgumentException(\"Unknown KieBase: \" + kieBaseName);\n" + " }\n" + "\n" ); }
@Test public void addGetModelForKieBaseMethodUnmatchingModelsByKBaseValuesTest() { KieBaseModel kieBaseModel = getKieBaseModel("ModelTest"); Map<String, KieBaseModel> kBaseModels = new HashMap<>(); kBaseModels.put("default-kie", kieBaseModel); List<String> modelByKBaseValues = Collections.singletonList("NotModelTest"); Map<String, List<String>> modelsByKBase = new HashMap<>(); modelsByKBase.put("default-kie", modelByKBaseValues); ModelSourceClass modelSourceClass = new ModelSourceClass(RELEASE_ID, kBaseModels, modelsByKBase); StringBuilder sb = new StringBuilder(); modelSourceClass.addGetModelForKieBaseMethod(sb); String retrieved = sb.toString(); String expected = "switch (kieBaseName) {"; assertThat(retrieved.contains(expected)).isTrue(); expected = "case \"default-kie\": return java.util.Arrays.asList( new NotModelTest() );"; assertThat(retrieved.contains(expected)).isTrue(); }
@Override public K getKey() { return key; }
@Test public void testMapStateIterator() { final String stateId = "foo"; final String countStateId = "count"; DoFn<KV<String, KV<String, Integer>>, KV<String, Integer>> fn = new DoFn<KV<String, KV<String, Integer>>, KV<String, Integer>>() { @StateId(stateId) private final StateSpec<MapState<String, Integer>> mapState = StateSpecs.map(StringUtf8Coder.of(), VarIntCoder.of()); @StateId(countStateId) private final StateSpec<CombiningState<Integer, int[], Integer>> countState = StateSpecs.combiningFromInputInternal(VarIntCoder.of(), Sum.ofIntegers()); @ProcessElement public void processElement( ProcessContext c, @StateId(stateId) MapState<String, Integer> mapState, @StateId(countStateId) CombiningState<Integer, int[], Integer> count) { SamzaMapState<String, Integer> state = (SamzaMapState<String, Integer>) mapState; KV<String, Integer> value = c.element().getValue(); state.put(value.getKey(), value.getValue()); count.add(1); if (count.read() >= 4) { final List<KV<String, Integer>> content = new ArrayList<>(); final Iterator<Map.Entry<String, Integer>> iterator = state.readIterator().read(); while (iterator.hasNext()) { Map.Entry<String, Integer> entry = iterator.next(); content.add(KV.of(entry.getKey(), entry.getValue())); c.output(KV.of(entry.getKey(), entry.getValue())); } assertEquals( content, ImmutableList.of(KV.of("a", 97), KV.of("b", 42), KV.of("c", 12))); } } }; PCollection<KV<String, Integer>> output = pipeline .apply( Create.of( KV.of("hello", KV.of("a", 97)), KV.of("hello", KV.of("b", 42)), KV.of("hello", KV.of("b", 42)), KV.of("hello", KV.of("c", 12)))) .apply(ParDo.of(fn)); PAssert.that(output).containsInAnyOrder(KV.of("a", 97), KV.of("b", 42), KV.of("c", 12)); pipeline.run(); }
@Override public Object evaluate(final ProcessingDTO processingDTO) { Number input = (Number) getFromPossibleSources(name, processingDTO) .orElse(null); if (input == null) { return mapMissingTo; } return getFromDiscretizeBins(input).orElse(defaultValue); }
@Test void evaluateNoInput() { KiePMMLDiscretize kiePMMLDiscretize = getKiePMMLDiscretize(null, null); ProcessingDTO processingDTO = getProcessingDTO(Collections.emptyList()); Object retrieved = kiePMMLDiscretize.evaluate(processingDTO); assertThat(retrieved).isNull(); kiePMMLDiscretize = getKiePMMLDiscretize(MAP_MISSING_TO, null); retrieved = kiePMMLDiscretize.evaluate(processingDTO); assertThat(retrieved).isNotNull(); assertThat(retrieved).isEqualTo(MAP_MISSING_TO); }
@Override public ParameterValue createValue(StaplerRequest req, JSONObject jo) { StringParameterValue value = req.bindJSON(StringParameterValue.class, jo); value.setDescription(getDescription()); checkValue(value, value.getValue()); return value; }
@Test @Issue("JENKINS-62889") public void createValue_Invalid() { String stringValue = "single"; String[] choices = new String[]{stringValue}; ChoiceParameterDefinition parameterDefinition = new ChoiceParameterDefinition("name", choices, "description"); assertThrows(IllegalArgumentException.class, () -> parameterDefinition.createValue("invalid")); }
@Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof DefaultQosDescription) { final DefaultQosDescription other = (DefaultQosDescription) obj; return Objects.equals(this.qosId, other.qosId) && Objects.equals(this.type, other.type) && Objects.equals(this.maxRate, other.maxRate) && Objects.equals(this.cir, other.cir) && Objects.equals(this.cbs, other.cbs) && Objects.equals(this.queues, other.queues); } return false; }
@Test public void testEquals() { new EqualsTester() .addEqualityGroup(defaultQosDescription1, sameAsDefaultQosDescription1) .addEqualityGroup(defaultQosDescription2) .addEqualityGroup(defaultQosDescription3) .addEqualityGroup(defaultQosDescription4) .addEqualityGroup(defaultQosDescription5) .addEqualityGroup(defaultQosDescription6) .testEquals(); }
public T valueOf(Class<?> firstNameComponent, String secondNameComponent) { return valueOf( checkNotNull(firstNameComponent, "firstNameComponent").getName() + '#' + checkNotNull(secondNameComponent, "secondNameComponent")); }
@Test @SuppressWarnings("RedundantStringConstructorCall") public void testUniqueness() { TestConstant a = pool.valueOf(new String("Leroy")); TestConstant b = pool.valueOf(new String("Leroy")); assertThat(a, is(sameInstance(b))); }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { try { final BoxApiClient client = new BoxApiClient(session.getClient()); final HttpGet request = new HttpGet(String.format("%s/files/%s/content", client.getBasePath(), fileid.getFileId(file))); if(status.isAppend()) { final HttpRange range = HttpRange.withStatus(status); final String header; if(-1 == range.getEnd()) { header = String.format("bytes=%d-", range.getStart()); } else { header = String.format("bytes=%d-%d", range.getStart(), range.getEnd()); } if(log.isDebugEnabled()) { log.debug(String.format("Add range header %s for file %s", header, file)); } request.addHeader(new BasicHeader(HttpHeaders.RANGE, header)); } final CloseableHttpResponse response = session.getClient().execute(request); return new HttpMethodReleaseInputStream(response, status); } catch(IOException e) { throw new HttpExceptionMappingService().map("Download {0} failed", e, file); } }
@Test public void testReadRangeUnknownLength() throws Exception { final BoxFileidProvider fileid = new BoxFileidProvider(session); final Path test = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final byte[] content = RandomUtils.nextBytes(1432); final OutputStream out = new BoxWriteFeature(session, fileid).write(test, new TransferStatus().withLength(content.length), new DisabledConnectionCallback()); assertNotNull(out); new StreamCopier(new TransferStatus(), new TransferStatus()).transfer(new ByteArrayInputStream(content), out); final TransferStatus status = new TransferStatus(); status.setLength(-1L); status.setAppend(true); status.setOffset(100L); final InputStream in = new BoxReadFeature(session, fileid).read(test, status, new DisabledConnectionCallback()); assertNotNull(in); final ByteArrayOutputStream buffer = new ByteArrayOutputStream(content.length - 100); new StreamCopier(status, status).transfer(in, buffer); final byte[] reference = new byte[content.length - 100]; System.arraycopy(content, 100, reference, 0, content.length - 100); assertArrayEquals(reference, buffer.toByteArray()); in.close(); new BoxDeleteFeature(session, fileid).delete(Collections.<Path>singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public ProxyCA getProxyCA() { return proxyCA; }
@Test public void testBasics() throws Exception { ProxyCA proxyCA = spy(new ProxyCA()); RMContext rmContext = mock(RMContext.class); RMStateStore rmStateStore = mock(RMStateStore.class); when(rmContext.getStateStore()).thenReturn(rmStateStore); ProxyCAManager proxyCAManager = new ProxyCAManager(proxyCA, rmContext); proxyCAManager.init(new YarnConfiguration()); Assert.assertEquals(proxyCA, proxyCAManager.getProxyCA()); verify(rmContext, times(0)).getStateStore(); verify(rmStateStore, times(0)).storeProxyCACert(any(), any()); verify(proxyCA, times(0)).init(); Assert.assertNull(proxyCA.getCaCert()); Assert.assertNull(proxyCA.getCaKeyPair()); proxyCAManager.start(); verify(rmContext, times(1)).getStateStore(); verify(rmStateStore, times(1)).storeProxyCACert(proxyCA.getCaCert(), proxyCA.getCaKeyPair().getPrivate()); verify(proxyCA, times(1)).init(); Assert.assertNotNull(proxyCA.getCaCert()); Assert.assertNotNull(proxyCA.getCaKeyPair()); }
public int getCount() { return count; }
@Test public void testGetCount() { List<String> strings = Arrays.asList( "02/29/2000", "03/29/2000" ); for ( String string : strings ) { evaluator.evaluateString( string ); } assertEquals( strings.size(), evaluator.getCount() ); }
@Override public CheckResult runCheck() { try { final String filter = buildQueryFilter(stream.getId(), query); // TODO we don't support cardinality yet final FieldStatsResult fieldStatsResult = searches.fieldStats(field, "*", filter, RelativeRange.create(time * 60), false, true, false); if (fieldStatsResult.count() == 0) { LOG.debug("Alert check <{}> did not match any messages. Returning not triggered.", type); return new NegativeCheckResult(); } final double result; switch (type) { case MEAN: result = fieldStatsResult.mean(); break; case MIN: result = fieldStatsResult.min(); break; case MAX: result = fieldStatsResult.max(); break; case SUM: result = fieldStatsResult.sum(); break; case STDDEV: result = fieldStatsResult.stdDeviation(); break; default: LOG.error("No such field value check type: [{}]. Returning not triggered.", type); return new NegativeCheckResult(); } LOG.debug("Alert check <{}> result: [{}]", id, result); if (Double.isInfinite(result)) { // This happens when there are no ES results/docs. LOG.debug("Infinite value. Returning not triggered."); return new NegativeCheckResult(); } final boolean triggered; switch (thresholdType) { case HIGHER: triggered = result > threshold.doubleValue(); break; case LOWER: triggered = result < threshold.doubleValue(); break; default: triggered = false; } if (triggered) { final String resultDescription = "Field " + field + " had a " + type + " of " + decimalFormat.format(result) + " in the last " + time + " minutes with trigger condition " + thresholdType + " than " + decimalFormat.format(threshold) + ". " + "(Current grace time: " + grace + " minutes)"; final List<MessageSummary> summaries; if (getBacklog() > 0) { final List<ResultMessage> searchResult = fieldStatsResult.searchHits(); summaries = Lists.newArrayListWithCapacity(searchResult.size()); for (ResultMessage resultMessage : searchResult) { final Message msg = resultMessage.getMessage(); summaries.add(new MessageSummary(resultMessage.getIndex(), msg)); } } else { summaries = Collections.emptyList(); } return new CheckResult(true, this, resultDescription, Tools.nowUTC(), summaries); } else { return new NegativeCheckResult(); } } catch (InvalidRangeParametersException e) { // cannot happen lol LOG.error("Invalid timerange.", e); return null; } catch (FieldTypeException e) { LOG.debug("Field [{}] seems not to have a numerical type or doesn't even exist at all. Returning not triggered.", field, e); return new NegativeCheckResult(); } }
@Test public void testRunCheckHigherNegative() throws Exception { for (FieldValueAlertCondition.CheckType checkType : FieldValueAlertCondition.CheckType.values()) { final double threshold = 50.0; final double lowerThanThreshold = threshold - 10; FieldValueAlertCondition fieldValueAlertCondition = getFieldValueAlertCondition(getParametersMap(0, 0, FieldValueAlertCondition.ThresholdType.HIGHER, checkType, threshold, "response_time"), alertConditionTitle); fieldStatsShouldReturn(getFieldStatsResult(checkType, lowerThanThreshold)); AlertCondition.CheckResult result = fieldValueAlertCondition.runCheck(); assertNotTriggered(result); } }
public static int read(final AtomicBuffer buffer, final ErrorConsumer consumer) { return read(buffer, consumer, 0); }
@Test void shouldReadTwoDistinctObservations() { final ErrorConsumer consumer = mock(ErrorConsumer.class); final long timestampOne = 7; final long timestampTwo = 10; final RuntimeException errorOne = new RuntimeException("Test Error One"); final IllegalStateException errorTwo = new IllegalStateException("Test Error Two"); when(clock.time()).thenReturn(timestampOne).thenReturn(timestampTwo); log.record(errorOne); log.record(errorTwo); assertThat(ErrorLogReader.read(buffer, consumer), is(2)); final InOrder inOrder = inOrder(consumer); inOrder.verify(consumer).accept(eq(1), eq(timestampOne), eq(timestampOne), any(String.class)); inOrder.verify(consumer).accept(eq(1), eq(timestampTwo), eq(timestampTwo), any(String.class)); }
@Override public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { try { if (defaultTrustManager != null) { defaultTrustManager.checkServerTrusted(chain, authType); } } catch (CertificateException ce) { // If the certificate chain couldn't be verified using the default trust manager, // try verifying the same with the user-provided root CA if (userTrustManager != null) { userTrustManager.checkServerTrusted(chain, authType); } } }
@Test public void testCustomX509TrustManagerWithRecognizedCertificate() throws CertificateException { customTrustManager.checkServerTrusted( new X509Certificate[] {recognizedSelfSignedCertificate}, "RSA"); }
@Override public void deregister(PolarisRegistration registration) { LOGGER.info("De-registering from Polaris Server now..."); if (StringUtils.isEmpty(registration.getServiceId()) || !PolarisSDKContextManager.isRegistered) { LOGGER.warn("No dom to de-register for polaris client..."); return; } InstanceDeregisterRequest deRegisterRequest = new InstanceDeregisterRequest(); deRegisterRequest.setToken(polarisDiscoveryProperties.getToken()); deRegisterRequest.setNamespace(polarisDiscoveryProperties.getNamespace()); deRegisterRequest.setService(registration.getServiceId()); deRegisterRequest.setHost(registration.getHost()); deRegisterRequest.setPort(registration.getPort()); try { ProviderAPI providerClient = polarisSDKContextManager.getProviderAPI(); providerClient.deRegister(deRegisterRequest); PolarisSDKContextManager.isRegistered = false; LOGGER.info("De-registration finished."); } catch (Exception e) { LOGGER.error("ERR_POLARIS_DEREGISTER, de-register failed...{},", registration, e); } finally { if (null != heartbeatExecutor) { heartbeatExecutor.shutdown(); } } }
@Test public void testDeRegister() { this.contextRunner.run(context -> { PolarisServiceRegistry registry = context.getBean(PolarisServiceRegistry.class); PolarisRegistration registration = Mockito.mock(PolarisRegistration.class); doReturn(null).when(registration).getServiceId(); assertThatCode(() -> { registry.deregister(registration); }).doesNotThrowAnyException(); }); }
protected static boolean equals(@Nullable MediaType first, @Nullable MediaType second) { String s1 = first == null ? null : first.toString().toLowerCase(ENGLISH).replace(" ", ""); String s2 = second == null ? null : second.toString().toLowerCase(ENGLISH).replace(" ", ""); return s1 != null && s2 != null && s1.equals(s2); }
@Test public void check_mediaTypes_equality() { assertThat(underTest.equals(null, null)).isFalse(); assertThat(underTest.equals(MediaType.parse("application/json"), null)).isFalse(); assertThat(underTest.equals(null, MediaType.parse("application/json"))).isFalse(); assertThat(underTest.equals(MediaType.parse("application/ json"), MediaType.parse("text/html; charset=UTF-8"))).isFalse(); assertThat(underTest.equals(MediaType.parse("application/Json"), MediaType.parse("application/JSON"))).isTrue(); }