focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public Object convertDataFromString( String pol, ValueMetaInterface convertMeta, String nullIf, String ifNull, int trim_type ) throws KettleValueException { if ( convertMeta == null ) { throw new KettleValueException( "API coding error: convertMeta input parameter should not be equals to null" ); } // null handling and conversion of value to null // String null_value = nullIf; int inValueType = convertMeta.getType(); int outValueType = getType(); if ( null_value == null ) { switch ( inValueType ) { case ValueMetaInterface.TYPE_BOOLEAN: null_value = Const.NULL_BOOLEAN; break; case ValueMetaInterface.TYPE_STRING: null_value = Const.NULL_STRING; break; case ValueMetaInterface.TYPE_BIGNUMBER: null_value = Const.NULL_BIGNUMBER; break; case ValueMetaInterface.TYPE_NUMBER: null_value = Const.NULL_NUMBER; break; case ValueMetaInterface.TYPE_INTEGER: null_value = Const.NULL_INTEGER; break; case ValueMetaInterface.TYPE_DATE: null_value = Const.NULL_DATE; break; case ValueMetaInterface.TYPE_BINARY: null_value = Const.NULL_BINARY; break; default: null_value = Const.NULL_NONE; break; } } // See if we need to convert a null value into a String // For example, we might want to convert null into "Empty". // if ( !Utils.isEmpty( ifNull ) ) { // Note that you can't pull the pad method up here as a nullComp variable // because you could get an NPE since you haven't checked isEmpty(pol) // yet! if ( Utils.isEmpty( pol ) || pol.equalsIgnoreCase( Const.rightPad( new StringBuilder( null_value ), pol.length() ) ) ) { pol = ifNull; } } // See if the polled value is empty // In that case, we have a null value on our hands... boolean isStringValue = outValueType == Value.VALUE_TYPE_STRING; Object emptyValue = isStringValue ? Const.NULL_STRING : null; Boolean isEmptyAndNullDiffer = convertStringToBoolean( Const.NVL( System.getProperty( Const.KETTLE_EMPTY_STRING_DIFFERS_FROM_NULL, "N" ), "N" ) ); Boolean normalizeNullStringToEmpty = !convertStringToBoolean( Const.NVL( System.getProperty( Const.KETTLE_DO_NOT_NORMALIZE_NULL_STRING_TO_EMPTY, "N" ), "N" ) ); if ( normalizeNullStringToEmpty ) { if ( pol == null && isStringValue && isEmptyAndNullDiffer ) { pol = Const.NULL_STRING; } } if ( pol == null ) { return null; } else if ( Utils.isEmpty( pol ) && !isStringValue ) { return null; } else { // if the null_value is specified, we try to match with that. // if ( !Utils.isEmpty( null_value ) ) { if ( null_value.length() <= pol.length() ) { // If the polled value is equal to the spaces right-padded null_value, // we have a match // if ( pol.equalsIgnoreCase( Const.rightPad( new StringBuilder( null_value ), pol.length() ) ) ) { return emptyValue; } } } else { Boolean normalizeSpacesOnlyString = !convertStringToBoolean( Const.NVL( System.getProperty( Const.KETTLE_DO_NOT_NORMALIZE_SPACES_ONLY_STRING_TO_EMPTY, "N" ), "N" ) ); // Verify if there are only spaces in the polled value... // We consider that empty as well... // if ( Const.onlySpaces( pol ) && normalizeSpacesOnlyString ) { return emptyValue; } } } // Trimming StringBuilder strpol; switch ( trim_type ) { case ValueMetaInterface.TRIM_TYPE_LEFT: strpol = new StringBuilder( pol ); while ( strpol.length() > 0 && strpol.charAt( 0 ) == ' ' ) { strpol.deleteCharAt( 0 ); } pol = strpol.toString(); break; case ValueMetaInterface.TRIM_TYPE_RIGHT: strpol = new StringBuilder( pol ); while ( strpol.length() > 0 && strpol.charAt( strpol.length() - 1 ) == ' ' ) { strpol.deleteCharAt( strpol.length() - 1 ); } pol = strpol.toString(); break; case ValueMetaInterface.TRIM_TYPE_BOTH: strpol = new StringBuilder( pol ); while ( strpol.length() > 0 && strpol.charAt( 0 ) == ' ' ) { strpol.deleteCharAt( 0 ); } while ( strpol.length() > 0 && strpol.charAt( strpol.length() - 1 ) == ' ' ) { strpol.deleteCharAt( strpol.length() - 1 ); } pol = strpol.toString(); break; default: break; } // On with the regular program... // Simply call the ValueMeta routines to do the conversion // We need to do some effort here: copy all // return convertData( convertMeta, pol ); }
@Test public void testConvertDataFromStringToDate() throws KettleValueException { ValueMetaBase inValueMetaString = new ValueMetaString(); ValueMetaBase outValueMetaDate = new ValueMetaDate(); String inputValueEmptyString = StringUtils.EMPTY; String nullIf = null; String ifNull = null; int trim_type = 0; Object result; result = outValueMetaDate.convertDataFromString( inputValueEmptyString, inValueMetaString, nullIf, ifNull, trim_type ); assertEquals( "Conversion from empty string to date must return null", result, null ); }
public int getTaskManagerFrequencyInSeconds() { return Optional.ofNullable(getProperty(ControllerPeriodicTasksConf.TASK_MANAGER_FREQUENCY_PERIOD)) .map(period -> (int) convertPeriodToSeconds(period)).orElseGet( () -> getProperty(ControllerPeriodicTasksConf.DEPRECATED_TASK_MANAGER_FREQUENCY_IN_SECONDS, ControllerPeriodicTasksConf.DEFAULT_TASK_MANAGER_FREQUENCY_IN_SECONDS)); }
@Test public void shouldBeAbleToDisableUsingNewConfig() { Map<String, Object> controllerConfig = new HashMap<>(); ControllerConf conf = new ControllerConf(controllerConfig); Assert.assertEquals(conf.getTaskManagerFrequencyInSeconds(), -1); controllerConfig = new HashMap<>(); controllerConfig.put(TASK_MANAGER_FREQUENCY_PERIOD, "0s"); conf = new ControllerConf(controllerConfig); Assert.assertEquals(conf.getTaskManagerFrequencyInSeconds(), 0); controllerConfig = new HashMap<>(); controllerConfig.put(TASK_MANAGER_FREQUENCY_PERIOD, "-1s"); conf = new ControllerConf(controllerConfig); Assert.assertEquals(conf.getTaskManagerFrequencyInSeconds(), -1); }
@Override public Optional<DatabaseAdminExecutor> create(final SQLStatementContext sqlStatementContext) { SQLStatement sqlStatement = sqlStatementContext.getSqlStatement(); if (sqlStatement instanceof ShowFunctionStatusStatement) { return Optional.of(new ShowFunctionStatusExecutor((ShowFunctionStatusStatement) sqlStatement)); } if (sqlStatement instanceof ShowProcedureStatusStatement) { return Optional.of(new ShowProcedureStatusExecutor((ShowProcedureStatusStatement) sqlStatement)); } if (sqlStatement instanceof ShowTablesStatement) { return Optional.of(new ShowTablesExecutor((ShowTablesStatement) sqlStatement, sqlStatementContext.getDatabaseType())); } return Optional.empty(); }
@Test void assertCreateWithOtherSelectStatementForNullDatabaseName() { ResourceMetaData resourceMetaData = new ResourceMetaData(Collections.singletonMap("ds_0", new MockedDataSource())); ShardingSphereDatabase database = new ShardingSphereDatabase("db_0", databaseType, resourceMetaData, mock(RuleMetaData.class), Collections.emptyMap()); Map<String, ShardingSphereDatabase> result = Collections.singletonMap("db_0", database); initProxyContext(result); when(ProxyContext.getInstance().getAllDatabaseNames()).thenReturn(Collections.singleton("db_0")); when(ProxyContext.getInstance().getContextManager().getDatabase("db_0")).thenReturn(database); MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); when(selectStatement.getFrom()).thenReturn(Optional.empty()); ProjectionsSegment projectionsSegment = mock(ProjectionsSegment.class); when(projectionsSegment.getProjections()).thenReturn(Collections.singletonList(new ExpressionProjectionSegment(0, 10, "CURRENT_DATE()"))); when(selectStatement.getProjections()).thenReturn(projectionsSegment); when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); Optional<DatabaseAdminExecutor> actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select CURRENT_DATE()", null, Collections.emptyList()); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(UnicastResourceShowExecutor.class)); }
static <SERIALIZED_CLASS extends Message, PROTO_SERIALIZABLE extends ProtoSerializable<SERIALIZED_CLASS>> Class<SERIALIZED_CLASS> getSerializedClass(PROTO_SERIALIZABLE protoSerializable) { @SuppressWarnings("unchecked") Class<SERIALIZED_CLASS> serializedClass = (Class<SERIALIZED_CLASS>) resolveTypeParameter(ProtoSerializable.class, protoSerializable.getClass(), ProtoSerializable.class.getTypeParameters()[0]); if (serializedClass != null) { return serializedClass; } String tpName = ProtoSerializable.class.getTypeParameters()[0].getName(); throw new IllegalArgumentException("unable to resolve type parameter '" + tpName + "' in ProtoSerializable<" + tpName + "> for class " + protoSerializable.getClass().getName()); }
@Test void testGetSerializedClass() throws Exception { CategoricalInfo ci = CategoricalInfoTest.generateProtoTestInfo(); assertEquals(VariableInfoProto.class, ProtoUtil.getSerializedClass(ci)); CategoricalIDInfo cidi = ci.makeIDInfo(12345); assertEquals(VariableInfoProto.class, ProtoUtil.getSerializedClass(cidi)); VariableInfo ridi = new RealIDInfo("bob", 100, 1000.0, 0.0, 25.0, 125.0, 12345); assertEquals(VariableInfoProto.class, ProtoUtil.getSerializedClass(ridi)); RealInfo ri = new RealInfo("bob", 100, 1000.0, 0.0, 25.0, 125.0); assertEquals(VariableInfoProto.class, ProtoUtil.getSerializedClass(ri)); MutableFeatureMap mfm = new MutableFeatureMap(); mfm.add("goldrat", 1.618033988749); mfm.add("e", Math.E); mfm.add("pi", Math.PI); HashedFeatureMap hfm = HashedFeatureMap.generateHashedFeatureMap(mfm, new MessageDigestHasher("SHA-512", "abcdefghi")); assertEquals(FeatureDomainProto.class, ProtoUtil.getSerializedClass(hfm)); ModHashCodeHasher mdch = new ModHashCodeHasher(200, "abcdefghi"); assertEquals(HasherProto.class, ProtoUtil.getSerializedClass(mdch)); MessageDigestHasher mdh = new MessageDigestHasher("SHA-256", "abcdefghi"); assertEquals(HasherProto.class, ProtoUtil.getSerializedClass(mdh)); HashCodeHasher hch = new HashCodeHasher("abcdefghi"); assertEquals(HasherProto.class, ProtoUtil.getSerializedClass(hch)); assertEquals(CategoricalIDInfoProto.class, ProtoUtil.getSerializedClass(new PSC())); assertEquals(RealIDInfoProto.class, ProtoUtil.getSerializedClass(new PSD2())); assertEquals(RealIDInfoProto.class, ProtoUtil.getSerializedClass(new PSC2())); assertThrows(IllegalArgumentException.class, () -> ProtoUtil.getSerializedClass(new PSB2<RealInfoProto>())); }
@Override public boolean isSnake() { return true; }
@Test void should_be_a_snake() { // GIVEN Snake snake = new Snake(0, 0, null); // WHEN boolean isSnake = snake.isSnake(); // THEN assertThat(isSnake).isTrue(); }
public static ShardingSphereDatabase create(final String databaseName, final DatabaseConfiguration databaseConfig, final ConfigurationProperties props, final ComputeNodeInstanceContext computeNodeInstanceContext) throws SQLException { return ShardingSphereDatabase.create(databaseName, DatabaseTypeEngine.getProtocolType(databaseConfig, props), DatabaseTypeEngine.getStorageTypes(databaseConfig), databaseConfig, props, computeNodeInstanceContext); }
@Test void assertCreateDatabaseMap() throws SQLException { DatabaseConfiguration databaseConfig = new DataSourceProvidedDatabaseConfiguration(Collections.emptyMap(), Collections.emptyList()); Map<String, ShardingSphereDatabase> actual = ExternalMetaDataFactory.create( Collections.singletonMap("foo_db", databaseConfig), new ConfigurationProperties(new Properties()), mock(ComputeNodeInstanceContext.class)); assertTrue(actual.containsKey("foo_db")); assertTrue(actual.get("foo_db").getResourceMetaData().getStorageUnits().isEmpty()); }
public static int getCalcFunctionDefaultResultType( int type ) { if ( type < 0 || type >= calcDefaultResultType.length ) { return ValueMetaInterface.TYPE_NONE; } return calcDefaultResultType[ type ]; }
@Test public void testGetCalcFunctionDefaultResultType() { assertEquals( ValueMetaInterface.TYPE_NONE, CalculatorMetaFunction.getCalcFunctionDefaultResultType( Integer.MIN_VALUE ) ); assertEquals( ValueMetaInterface.TYPE_NONE, CalculatorMetaFunction.getCalcFunctionDefaultResultType( Integer.MAX_VALUE ) ); assertEquals( ValueMetaInterface.TYPE_NONE, CalculatorMetaFunction.getCalcFunctionDefaultResultType( -1 ) ); assertEquals( ValueMetaInterface.TYPE_STRING, CalculatorMetaFunction.getCalcFunctionDefaultResultType( CalculatorMetaFunction.CALC_CONSTANT ) ); assertEquals( ValueMetaInterface.TYPE_NUMBER, CalculatorMetaFunction.getCalcFunctionDefaultResultType( CalculatorMetaFunction.CALC_ADD ) ); }
@Override public REMOVE3Response remove(XDR xdr, RpcInfo info) { return remove(xdr, getSecurityHandler(info), info.remoteAddress()); }
@Test(timeout = 60000) public void testRemove() throws Exception { HdfsFileStatus status = nn.getRpcServer().getFileInfo(testdir); long dirId = status.getFileId(); int namenodeId = Nfs3Utils.getNamenodeId(config); XDR xdr_req = new XDR(); FileHandle handle = new FileHandle(dirId, namenodeId); REMOVE3Request req = new REMOVE3Request(handle, "bar"); req.serialize(xdr_req); // Attempt by an unpriviledged user should fail. REMOVE3Response response1 = nfsd.remove(xdr_req.asReadOnlyWrap(), securityHandlerUnpriviledged, new InetSocketAddress("localhost", 1234)); assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES, response1.getStatus()); // Attempt by a priviledged user should pass. REMOVE3Response response2 = nfsd.remove(xdr_req.asReadOnlyWrap(), securityHandler, new InetSocketAddress("localhost", 1234)); assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK, response2.getStatus()); }
public CompletableFuture<List<Credential>> getBackupAuthCredentials( final Account account, final Instant redemptionStart, final Instant redemptionEnd) { // If the account has an expired payment, clear it before continuing if (hasExpiredVoucher(account)) { return accountsManager.updateAsync(account, a -> { // Re-check in case we raced with an update if (hasExpiredVoucher(a)) { a.setBackupVoucher(null); } }).thenCompose(updated -> getBackupAuthCredentials(updated, redemptionStart, redemptionEnd)); } // If this account isn't allowed some level of backup access via configuration, don't continue final BackupLevel configuredBackupLevel = configuredBackupLevel(account).orElseThrow(() -> Status.PERMISSION_DENIED.withDescription("Backups not allowed on account").asRuntimeException()); final Instant startOfDay = clock.instant().truncatedTo(ChronoUnit.DAYS); if (redemptionStart.isAfter(redemptionEnd) || redemptionStart.isBefore(startOfDay) || redemptionEnd.isAfter(startOfDay.plus(MAX_REDEMPTION_DURATION)) || !redemptionStart.equals(redemptionStart.truncatedTo(ChronoUnit.DAYS)) || !redemptionEnd.equals(redemptionEnd.truncatedTo(ChronoUnit.DAYS))) { throw Status.INVALID_ARGUMENT.withDescription("invalid redemption window").asRuntimeException(); } // fetch the blinded backup-id the account should have previously committed to final byte[] committedBytes = account.getBackupCredentialRequest(); if (committedBytes == null) { throw Status.NOT_FOUND.withDescription("No blinded backup-id has been added to the account").asRuntimeException(); } try { // create a credential for every day in the requested period final BackupAuthCredentialRequest credentialReq = new BackupAuthCredentialRequest(committedBytes); return CompletableFuture.completedFuture(Stream .iterate(redemptionStart, curr -> curr.plus(Duration.ofDays(1))) .takeWhile(redemptionTime -> !redemptionTime.isAfter(redemptionEnd)) .map(redemptionTime -> { // Check if the account has a voucher that's good for a certain receiptLevel at redemption time, otherwise // use the default receipt level final BackupLevel backupLevel = storedBackupLevel(account, redemptionTime).orElse(configuredBackupLevel); return new Credential( credentialReq.issueCredential(redemptionTime, backupLevel, serverSecretParams), redemptionTime); }) .toList()); } catch (InvalidInputException e) { throw Status.INTERNAL .withDescription("Could not deserialize stored request credential") .withCause(e) .asRuntimeException(); } }
@Test void expiredBackupPayment() { final Instant day1 = Instant.EPOCH.plus(Duration.ofDays(1)); final Instant day2 = Instant.EPOCH.plus(Duration.ofDays(2)); final Instant day3 = Instant.EPOCH.plus(Duration.ofDays(3)); final BackupAuthManager authManager = create(BackupLevel.MESSAGES, false); final Account account = mock(Account.class); when(account.getUuid()).thenReturn(aci); when(account.getBackupVoucher()).thenReturn(new Account.BackupVoucher(3, day1)); final Account updated = mock(Account.class); when(updated.getUuid()).thenReturn(aci); when(updated.getBackupCredentialRequest()).thenReturn(backupAuthTestUtil.getRequest(backupKey, aci).serialize()); when(updated.getBackupVoucher()).thenReturn(null); when(accountsManager.updateAsync(any(), any())).thenReturn(CompletableFuture.completedFuture(updated)); clock.pin(day2.plus(Duration.ofSeconds(1))); assertThat(authManager.getBackupAuthCredentials(account, day2, day2.plus(Duration.ofDays(7))).join()) .hasSize(8); @SuppressWarnings("unchecked") final ArgumentCaptor<Consumer<Account>> accountUpdater = ArgumentCaptor.forClass( Consumer.class); verify(accountsManager, times(1)).updateAsync(any(), accountUpdater.capture()); // If the account is not expired when we go to update it, we shouldn't wipe it out final Account alreadyUpdated = mock(Account.class); when(alreadyUpdated.getBackupVoucher()).thenReturn(new Account.BackupVoucher(3, day3)); accountUpdater.getValue().accept(alreadyUpdated); verify(alreadyUpdated, never()).setBackupVoucher(any()); // If the account is still expired when we go to update it, we can wipe it out final Account expired = mock(Account.class); when(expired.getBackupVoucher()).thenReturn(new Account.BackupVoucher(3, day1)); accountUpdater.getValue().accept(expired); verify(expired, times(1)).setBackupVoucher(null); }
@Override public IMetaverseNode createResourceNode( IExternalResourceInfo resource ) throws MetaverseException { MetaverseComponentDescriptor componentDescriptor = new MetaverseComponentDescriptor( resource.getName(), getResourceInputNodeType(), descriptor.getNamespace(), descriptor.getContext() ); IMetaverseNode node = createNodeFromDescriptor( componentDescriptor ); return node; }
@Test public void testCreateResourceNode() throws Exception { IExternalResourceInfo res = mock( IExternalResourceInfo.class ); when( res.getName() ).thenReturn( "http://my.rest.url" ); IMetaverseNode resourceNode = analyzer.createResourceNode( res ); assertNotNull( resourceNode ); assertEquals( DictionaryConst.NODE_TYPE_WEBSERVICE, resourceNode.getType() ); assertEquals( "http://my.rest.url", resourceNode.getName() ); }
public static boolean isIntegerValue(BigDecimal value) { return value.signum() == 0 || value.scale() <= 0 || value.stripTrailingZeros().scale() <= 0; }
@Test public void testIsIntegerValue() { assertTrue(Numeric.isIntegerValue(BigDecimal.ZERO)); assertTrue(Numeric.isIntegerValue(BigDecimal.ZERO)); assertTrue(Numeric.isIntegerValue(BigDecimal.valueOf(Long.MAX_VALUE))); assertTrue(Numeric.isIntegerValue(BigDecimal.valueOf(Long.MIN_VALUE))); assertTrue( Numeric.isIntegerValue( new BigDecimal( "9999999999999999999999999999999999999999999999999999999999999999.0"))); assertTrue( Numeric.isIntegerValue( new BigDecimal( "-9999999999999999999999999999999999999999999999999999999999999999.0"))); assertFalse(Numeric.isIntegerValue(BigDecimal.valueOf(0.1))); assertFalse(Numeric.isIntegerValue(BigDecimal.valueOf(-0.1))); assertFalse(Numeric.isIntegerValue(BigDecimal.valueOf(1.1))); assertFalse(Numeric.isIntegerValue(BigDecimal.valueOf(-1.1))); }
@Override public TenantDO getTenant(Long id) { return tenantMapper.selectById(id); }
@Test public void testGetTenant() { // mock 数据 TenantDO dbTenant = randomPojo(TenantDO.class); tenantMapper.insert(dbTenant);// @Sql: 先插入出一条存在的数据 // 准备参数 Long id = dbTenant.getId(); // 调用 TenantDO result = tenantService.getTenant(id); // 校验存在 assertPojoEquals(result, dbTenant); }
public synchronized int sendFetches() { final Map<Node, FetchSessionHandler.FetchRequestData> fetchRequests = prepareFetchRequests(); sendFetchesInternal( fetchRequests, (fetchTarget, data, clientResponse) -> { synchronized (Fetcher.this) { handleFetchSuccess(fetchTarget, data, clientResponse); } }, (fetchTarget, data, error) -> { synchronized (Fetcher.this) { handleFetchFailure(fetchTarget, data, error); } }); return fetchRequests.size(); }
@Test public void testReadCommittedAbortMarkerWithNoData() { buildFetcher(OffsetResetStrategy.EARLIEST, new StringDeserializer(), new StringDeserializer(), Integer.MAX_VALUE, IsolationLevel.READ_COMMITTED); ByteBuffer buffer = ByteBuffer.allocate(1024); long producerId = 1L; abortTransaction(buffer, producerId, 5L); appendTransactionalRecords(buffer, producerId, 6L, new SimpleRecord("6".getBytes(), null), new SimpleRecord("7".getBytes(), null), new SimpleRecord("8".getBytes(), null)); commitTransaction(buffer, producerId, 9L); buffer.flip(); // send the fetch assignFromUser(singleton(tp0)); subscriptions.seek(tp0, 0); assertEquals(1, sendFetches()); // prepare the response. the aborted transactions begin at offsets which are no longer in the log List<FetchResponseData.AbortedTransaction> abortedTransactions = Collections.singletonList( new FetchResponseData.AbortedTransaction().setProducerId(producerId).setFirstOffset(0L)); client.prepareResponse(fullFetchResponseWithAbortedTransactions(MemoryRecords.readableRecords(buffer), abortedTransactions, Errors.NONE, 100L, 100L, 0)); consumerClient.poll(time.timer(0)); assertTrue(fetcher.hasCompletedFetches()); Map<TopicPartition, List<ConsumerRecord<String, String>>> allFetchedRecords = fetchRecords(); assertTrue(allFetchedRecords.containsKey(tp0)); List<ConsumerRecord<String, String>> fetchedRecords = allFetchedRecords.get(tp0); assertEquals(3, fetchedRecords.size()); assertEquals(Arrays.asList(6L, 7L, 8L), collectRecordOffsets(fetchedRecords)); }
public boolean valid() { return accessKey != null && !accessKey.isEmpty() && secretKey != null && !secretKey.isEmpty(); }
@Test void testValid() { //given String ak = "ak"; String sk = "sk"; String tenantId = "100"; Credentials credentials = new Credentials(ak, sk, tenantId); //when boolean actual = credentials.valid(); //then assertTrue(actual); }
public boolean supportsDashboardAnalytics() { return hasSupportFor(DASHBOARD_TYPE); }
@Test public void shouldSupportDashboardAnalyticsIfPluginListsDashboardMetricsAsCapability() { assertTrue(new Capabilities(List.of(new SupportedAnalytics("dashboard", "id", "title"))).supportsDashboardAnalytics()); assertTrue(new Capabilities(List.of(new SupportedAnalytics("DashBoard", "id", "title"))).supportsDashboardAnalytics()); assertFalse(new Capabilities(Collections.emptyList()).supportsDashboardAnalytics()); }
@Override public ColumnStatisticsObj aggregate(List<ColStatsObjWithSourceInfo> colStatsWithSourceInfo, List<String> partNames, boolean areAllPartsFound) throws MetaException { checkStatisticsList(colStatsWithSourceInfo); ColumnStatisticsObj statsObj = null; String colType = null; String colName = null; // check if all the ColumnStatisticsObjs contain stats and all the ndv are // bitvectors boolean doAllPartitionContainStats = partNames.size() == colStatsWithSourceInfo.size(); NumDistinctValueEstimator ndvEstimator = null; for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); if (statsObj == null) { colName = cso.getColName(); colType = cso.getColType(); statsObj = ColumnStatsAggregatorFactory.newColumnStaticsObj(colName, colType, cso.getStatsData().getSetField()); LOG.trace("doAllPartitionContainStats for column: {} is: {}", colName, doAllPartitionContainStats); } StringColumnStatsDataInspector stringColumnStatsData = stringInspectorFromStats(cso); if (stringColumnStatsData.getNdvEstimator() == null) { ndvEstimator = null; break; } else { // check if all of the bit vectors can merge NumDistinctValueEstimator estimator = stringColumnStatsData.getNdvEstimator(); if (ndvEstimator == null) { ndvEstimator = estimator; } else { if (ndvEstimator.canMerge(estimator)) { continue; } else { ndvEstimator = null; break; } } } } if (ndvEstimator != null) { ndvEstimator = NumDistinctValueEstimatorFactory .getEmptyNumDistinctValueEstimator(ndvEstimator); } LOG.debug("all of the bit vectors can merge for " + colName + " is " + (ndvEstimator != null)); ColumnStatisticsData columnStatisticsData = initColumnStatisticsData(); if (doAllPartitionContainStats || colStatsWithSourceInfo.size() < 2) { StringColumnStatsDataInspector aggregateData = null; for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); StringColumnStatsDataInspector newData = stringInspectorFromStats(cso); if (ndvEstimator != null) { ndvEstimator.mergeEstimators(newData.getNdvEstimator()); } if (aggregateData == null) { aggregateData = newData.deepCopy(); } else { aggregateData .setMaxColLen(Math.max(aggregateData.getMaxColLen(), newData.getMaxColLen())); aggregateData .setAvgColLen(Math.max(aggregateData.getAvgColLen(), newData.getAvgColLen())); aggregateData.setNumNulls(aggregateData.getNumNulls() + newData.getNumNulls()); aggregateData.setNumDVs(Math.max(aggregateData.getNumDVs(), newData.getNumDVs())); } } if (ndvEstimator != null) { // if all the ColumnStatisticsObjs contain bitvectors, we do not need to // use uniform distribution assumption because we can merge bitvectors // to get a good estimation. aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues()); } else { // aggregateData already has the ndv of the max of all } columnStatisticsData.setStringStats(aggregateData); } else { // TODO: bail out if missing stats are over a certain threshold // we need extrapolation LOG.debug("start extrapolation for " + colName); Map<String, Integer> indexMap = new HashMap<>(); for (int index = 0; index < partNames.size(); index++) { indexMap.put(partNames.get(index), index); } Map<String, Double> adjustedIndexMap = new HashMap<>(); Map<String, ColumnStatisticsData> adjustedStatsMap = new HashMap<>(); if (ndvEstimator == null) { // if not every partition uses bitvector for ndv, we just fall back to // the traditional extrapolation methods. for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); String partName = csp.getPartName(); adjustedIndexMap.put(partName, (double) indexMap.get(partName)); adjustedStatsMap.put(partName, cso.getStatsData()); } } else { // we first merge all the adjacent bitvectors that we could merge and // derive new partition names and index. StringBuilder pseudoPartName = new StringBuilder(); double pseudoIndexSum = 0; int length = 0; int curIndex = -1; StringColumnStatsDataInspector aggregateData = null; for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); String partName = csp.getPartName(); StringColumnStatsDataInspector newData = stringInspectorFromStats(cso); // newData.isSetBitVectors() should be true for sure because we // already checked it before. if (indexMap.get(partName) != curIndex) { // There is bitvector, but it is not adjacent to the previous ones. if (length > 0) { // we have to set ndv adjustedIndexMap.put(pseudoPartName.toString(), pseudoIndexSum / length); aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues()); ColumnStatisticsData csd = new ColumnStatisticsData(); csd.setStringStats(aggregateData); adjustedStatsMap.put(pseudoPartName.toString(), csd); // reset everything pseudoPartName = new StringBuilder(); pseudoIndexSum = 0; length = 0; ndvEstimator = NumDistinctValueEstimatorFactory .getEmptyNumDistinctValueEstimator(ndvEstimator); } aggregateData = null; } curIndex = indexMap.get(partName); pseudoPartName.append(partName); pseudoIndexSum += curIndex; length++; curIndex++; if (aggregateData == null) { aggregateData = newData.deepCopy(); } else { aggregateData.setAvgColLen(Math.max(aggregateData.getAvgColLen(), newData.getAvgColLen())); aggregateData.setMaxColLen(Math.max(aggregateData.getMaxColLen(), newData.getMaxColLen())); aggregateData.setNumNulls(aggregateData.getNumNulls() + newData.getNumNulls()); } ndvEstimator.mergeEstimators(newData.getNdvEstimator()); } if (length > 0) { // we have to set ndv adjustedIndexMap.put(pseudoPartName.toString(), pseudoIndexSum / length); aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues()); ColumnStatisticsData csd = new ColumnStatisticsData(); csd.setStringStats(aggregateData); adjustedStatsMap.put(pseudoPartName.toString(), csd); } } extrapolate(columnStatisticsData, partNames.size(), colStatsWithSourceInfo.size(), adjustedIndexMap, adjustedStatsMap, -1); } LOG.debug( "Ndv estimatation for {} is {} # of partitions requested: {} # of partitions found: {}", colName, columnStatisticsData.getStringStats().getNumDVs(), partNames.size(), colStatsWithSourceInfo.size()); statsObj.setStatsData(columnStatisticsData); return statsObj; }
@Test public void testAggregateMultiStatsOnlySomeAvailableButUnmergeableBitVector() throws MetaException { List<String> partitions = Arrays.asList("part1", "part2", "part3"); ColumnStatisticsData data1 = new ColStatsBuilder<>(String.class).numNulls(1).numDVs(3).avgColLen(20.0 / 3).maxColLen(13) .fmSketch(S_1, S_2, S_3).build(); ColumnStatisticsData data3 = new ColStatsBuilder<>(String.class).numNulls(3).numDVs(2).avgColLen(17.5).maxColLen(18) .hll(S_6, S_7).build(); List<ColStatsObjWithSourceInfo> statsList = Arrays.asList( createStatsWithInfo(data1, TABLE, COL, partitions.get(0)), createStatsWithInfo(data3, TABLE, COL, partitions.get(2))); StringColumnStatsAggregator aggregator = new StringColumnStatsAggregator(); ColumnStatisticsObj computedStatsObj = aggregator.aggregate(statsList, partitions, false); // hll in case of missing stats is left as null, only numDVs is updated ColumnStatisticsData expectedStats = new ColStatsBuilder<>(String.class).numNulls(6).numDVs(3) .avgColLen(22.916666666666668).maxColLen(22).build(); Assert.assertEquals(expectedStats, computedStatsObj.getStatsData()); // both useDensityFunctionForNDVEstimation and ndvTuner are ignored by StringColumnStatsAggregator aggregator.useDensityFunctionForNDVEstimation = true; computedStatsObj = aggregator.aggregate(statsList, partitions, false); Assert.assertEquals(expectedStats, computedStatsObj.getStatsData()); }
@Override public void check(final String databaseName, final EncryptRuleConfiguration ruleConfig, final Map<String, DataSource> dataSourceMap, final Collection<ShardingSphereRule> builtRules) { checkEncryptors(ruleConfig.getEncryptors()); checkTables(databaseName, ruleConfig.getTables(), ruleConfig.getEncryptors()); }
@SuppressWarnings({"rawtypes", "unchecked"}) @Test void assertCheckWhenConfigInvalidCipherColumn() { EncryptRuleConfiguration config = createInvalidCipherColumnConfiguration(); RuleConfigurationChecker checker = OrderedSPILoader.getServicesByClass(RuleConfigurationChecker.class, Collections.singleton(config.getClass())).get(config.getClass()); assertThrows(UnregisteredAlgorithmException.class, () -> checker.check("test", config, Collections.emptyMap(), Collections.emptyList())); }
@Override public void execute(final List<String> args, final PrintWriter terminal) { CliCmdUtil.ensureArgCountBounds(args, 0, 1, HELP); if (args.isEmpty()) { final String setting = requestPipeliningSupplier.get() ? "ON" : "OFF"; terminal.printf("Current %s configuration: %s%n", NAME, setting); } else { final String newSetting = args.get(0); switch (newSetting.toUpperCase()) { case "ON": requestPipeliningConsumer.accept(true); break; case "OFF": requestPipeliningConsumer.accept(false); break; default: terminal.printf("Invalid %s setting: %s. ", NAME, newSetting); terminal.println("Valid options are 'ON' and 'OFF'."); return; } terminal.println(NAME + " configuration is now " + newSetting.toUpperCase()); } }
@Test public void shouldUpdateSettingToOff() { // When: requestPipeliningCommand.execute(ImmutableList.of("OFF"), terminal); // Then: verify(settingConsumer).accept(false); }
static String getAbbreviation(Exception ex, Integer statusCode, String storageErrorMessage) { String result = null; for (RetryReasonCategory retryReasonCategory : rankedReasonCategories) { final String abbreviation = retryReasonCategory.captureAndGetAbbreviation(ex, statusCode, storageErrorMessage); if (abbreviation != null) { result = abbreviation; } } return result; }
@Test public void test4xxStatusRetryReason() { Assertions.assertThat(RetryReason.getAbbreviation(null, HTTP_FORBIDDEN, null)) .describedAs("Abbreviation for 4xx should be equal to 4xx") .isEqualTo(HTTP_FORBIDDEN + ""); }
public static String getHostName(String address) { try { int i = address.indexOf(':'); if (i > -1) { address = address.substring(0, i); } String hostname = HOST_NAME_CACHE.get(address); if (hostname != null && hostname.length() > 0) { return hostname; } InetAddress inetAddress = InetAddress.getByName(address); if (inetAddress != null) { hostname = inetAddress.getHostName(); HOST_NAME_CACHE.put(address, hostname); return hostname; } } catch (Throwable e) { // ignore } return address; }
@Test void testGetHostName() { assertNotNull(NetUtils.getHostName("127.0.0.1")); }
public static int align(final int value, final int alignment) { return (value + (alignment - 1)) & -alignment; }
@Test void shouldAlignValueToNextMultipleOfAlignmentLong() { final long alignment = CACHE_LINE_LENGTH; assertThat(align(0L, alignment), is(0L)); assertThat(align(1L, alignment), is(alignment)); assertThat(align(alignment, alignment), is(alignment)); assertThat(align(alignment + 1, alignment), is(alignment * 2)); final long remainder = Long.MAX_VALUE % alignment; final long maxMultiple = Long.MAX_VALUE - remainder; assertThat(align(maxMultiple, alignment), is(maxMultiple)); assertThat(align(Long.MAX_VALUE, alignment), is(Long.MIN_VALUE)); }
void add(StorageType[] storageTypes, BlockStoragePolicy policy) { StorageTypeAllocation storageCombo = new StorageTypeAllocation(storageTypes, policy); Long count = storageComboCounts.get(storageCombo); if (count == null) { storageComboCounts.put(storageCombo, 1l); storageCombo.setActualStoragePolicy( getStoragePolicy(storageCombo.getStorageTypes())); } else { storageComboCounts.put(storageCombo, count.longValue()+1); } totalBlocks++; }
@Test public void testDifferentSpecifiedPolicies() { BlockStoragePolicySuite bsps = BlockStoragePolicySuite.createDefaultSuite(); StoragePolicySummary sts = new StoragePolicySummary(bsps.getAllPolicies()); BlockStoragePolicy hot = bsps.getPolicy("HOT"); BlockStoragePolicy warm = bsps.getPolicy("WARM"); BlockStoragePolicy cold = bsps.getPolicy("COLD"); //DISK:3 sts.add(new StorageType[]{StorageType.DISK,StorageType.DISK,StorageType.DISK},hot); sts.add(new StorageType[]{StorageType.DISK,StorageType.DISK,StorageType.DISK},hot); sts.add(new StorageType[]{StorageType.DISK,StorageType.DISK,StorageType.DISK},warm); sts.add(new StorageType[]{StorageType.DISK,StorageType.DISK,StorageType.DISK},cold); //DISK:1,ARCHIVE:2 sts.add(new StorageType[]{StorageType.DISK, StorageType.ARCHIVE,StorageType.ARCHIVE},hot); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.DISK,StorageType.ARCHIVE},warm); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.DISK},cold); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.DISK},cold); //ARCHIVE:3 sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.ARCHIVE},hot); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.ARCHIVE},hot); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.ARCHIVE},warm); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.ARCHIVE},cold); Map<String, Long> actualOutput = convertToStringMap(sts); Assert.assertEquals(9,actualOutput.size()); Map<String, Long> expectedOutput = new HashMap<>(); expectedOutput.put("HOT|DISK:3(HOT)", 2l); expectedOutput.put("COLD|DISK:1,ARCHIVE:2(WARM)", 2l); expectedOutput.put("HOT|ARCHIVE:3(COLD)", 2l); expectedOutput.put("WARM|DISK:3(HOT)", 1l); expectedOutput.put("COLD|DISK:3(HOT)", 1l); expectedOutput.put("WARM|ARCHIVE:3(COLD)", 1l); expectedOutput.put("WARM|DISK:1,ARCHIVE:2(WARM)", 1l); expectedOutput.put("COLD|ARCHIVE:3(COLD)", 1l); expectedOutput.put("HOT|DISK:1,ARCHIVE:2(WARM)", 1l); Assert.assertEquals(expectedOutput,actualOutput); }
@Override public String getFileName() { return ( Strings.isNullOrEmpty( this.fileName ) ? this.getTransformationPath() : this.fileName ); }
@Test public void testGetFileName() { meta = new StuffStreamMeta(); String testPathName = "transformationPathName"; String testFileName = "testFileName"; // verify that when the fileName is not set, we get the transformation path meta.setTransformationPath( testPathName ); assertThat( meta.getFileName(), equalTo( testPathName ) ); // verify that when the fileName is set, we get it meta.setFileName( testFileName ); assertThat( meta.getFileName(), equalTo( testFileName ) ); }
public SparkClusterResourceSpec getResourceSpec( SparkCluster cluster, Map<String, String> confOverrides) { SparkConf effectiveSparkConf = new SparkConf(); Map<String, String> confFromSpec = cluster.getSpec().getSparkConf(); if (MapUtils.isNotEmpty(confFromSpec)) { for (Map.Entry<String, String> entry : confFromSpec.entrySet()) { effectiveSparkConf.set(entry.getKey(), entry.getValue()); } } if (MapUtils.isNotEmpty(confOverrides)) { for (Map.Entry<String, String> entry : confOverrides.entrySet()) { effectiveSparkConf.set(entry.getKey(), entry.getValue()); } } effectiveSparkConf.set("spark.kubernetes.namespace", cluster.getMetadata().getNamespace()); return new SparkClusterResourceSpec(cluster, effectiveSparkConf); }
@Test void testGetResourceSpec() { SparkClusterSubmissionWorker worker = new SparkClusterSubmissionWorker(); SparkClusterResourceSpec spec = worker.getResourceSpec(cluster, Collections.emptyMap()); // SparkClusterResourceSpecTest will cover the detail information of easy resources assertNotNull(spec.getMasterService()); assertNotNull(spec.getMasterStatefulSet()); assertNotNull(spec.getWorkerStatefulSet()); }
public static String getAbsolutePathFromClassPath(String path) { URL resource = PathUtil.class.getResource(path); try { assert resource != null; URI uri = resource.toURI(); return Paths.get(uri).toString(); } catch (NullPointerException | URISyntaxException e) { throw DataXException.asDataXException("path error,please check whether the path is correct"); } }
@Test public void testParseClassPathFile() { String path = "/pathTest.json"; String absolutePathFromClassPath = PathUtil.getAbsolutePathFromClassPath(path); Assert.assertNotNull(absolutePathFromClassPath); }
@POST @Consumes(MediaType.TEXT_PLAIN) @Timed @ApiOperation("Add a list of new patterns") @AuditEvent(type = AuditEventTypes.GROK_PATTERN_IMPORT_CREATE) public Response bulkUpdatePatternsFromTextFile(@ApiParam(name = "patterns", required = true) @NotNull InputStream patternsFile, // deprecated. used to drop all existing patterns before import @Deprecated @QueryParam("replace") @DefaultValue("false") boolean deprecatedDropAllExisting, @ApiParam(name = "import-strategy", value = "Strategy to apply when importing.") @QueryParam("import-strategy") ImportStrategy importStrategy) throws ValidationException, IOException { checkPermission(RestPermissions.INPUTS_CREATE); final List<GrokPattern> grokPatterns = readGrokPatterns(patternsFile); if (!grokPatterns.isEmpty()) { try { if (!grokPatternService.validateAll(grokPatterns)) { throw new ValidationException("Invalid pattern contained. Did not save any patterns."); } } catch (GrokException | IllegalArgumentException e) { throw new ValidationException("Invalid pattern. Did not save any patterns\n" + e.getMessage()); } ImportStrategy resolvedStrategy = importStrategy != null ? importStrategy : deprecatedDropAllExisting ? ImportStrategy.DROP_ALL_EXISTING : ImportStrategy.ABORT_ON_CONFLICT; grokPatternService.saveAll(grokPatterns, resolvedStrategy); } return Response.accepted().build(); }
@Test public void bulkUpdatePatternsFromTextFileWithLF() throws Exception { final String patterns = Arrays.stream(GROK_LINES).collect(Collectors.joining("\n")); final ByteArrayInputStream inputStream = new ByteArrayInputStream(patterns.getBytes(StandardCharsets.UTF_8)); final GrokPattern expectedPattern = GrokPattern.create("TEST_PATTERN_0", "Foo"); final Response response = grokResource.bulkUpdatePatternsFromTextFile(inputStream, true, null); assertThat(response.getStatusInfo()).isEqualTo(Response.Status.ACCEPTED); assertThat(response.hasEntity()).isFalse(); await() .atMost(Durations.FIVE_SECONDS) .until(() -> !subscriber.events.isEmpty()); assertThat(subscriber.events) .containsOnly(GrokPatternsUpdatedEvent.create(Collections.singleton(expectedPattern.name()))); }
@Override public void write(final OutputStream out) { try { final PrintStream print = new PrintStream(out, true, "UTF8"); final RecordFormatter formatter = new RecordFormatter(schemaRegistryClient, topicName); final FormatsTracker formatsTracker = new FormatsTracker(print); while (!connectionClosed && !print.checkError() && !limitReached.test(messagesWritten)) { final ConsumerRecords<Bytes, Bytes> records = topicConsumer.poll(disconnectCheckInterval); if (records.isEmpty()) { print.println(); continue; } final List<String> values = formatter.format(records.records(topicName)); if (values.isEmpty()) { continue; } final List<String> toOutput = new ArrayList<>(); for (final String value : values) { if (messagesPolled++ % interval == 0) { messagesWritten++; toOutput.add(value); } if (limitReached.test(messagesWritten)) { break; } } formatsTracker.update(formatter); toOutput.forEach(print::println); } } catch (final Exception exception) { log.error("Exception encountered while writing to output stream", exception); outputException(out, exception); } finally { close(); } }
@Test public void shouldIntervalOneAndLimitTwo() { // Given: final TopicStreamWriter writer = new TopicStreamWriter( schemaRegistry, kafkaConsumer, "topic", 1, Duration.ZERO, OptionalInt.of(2), new CompletableFuture<>() ); // When: writer.write(out); // Then: final List<String> expected = ImmutableList.of( "Key format: ", "KAFKA_STRING", System.lineSeparator(), "Value format: ", "KAFKA_STRING", System.lineSeparator(), "rowtime: N/A, key: key-0, value: value-0, partition: 0", System.lineSeparator(), "rowtime: N/A, key: key-1, value: value-1, partition: 0", System.lineSeparator() ); out.assertWrites(expected); }
@Nonnull public static StreamSource<String> socket( @Nonnull String host, int port, @Nonnull Charset charset ) { return streamFromProcessor( "socketSource(" + host + ':' + port + ')', streamSocketP(host, port, charset) ); }
@Test public void socket() throws Exception { // Given try (ServerSocket socket = new ServerSocket(8176)) { spawn(() -> uncheckRun(() -> { Socket accept1 = socket.accept(); Socket accept2 = socket.accept(); PrintWriter writer1 = new PrintWriter(accept1.getOutputStream()); writer1.write("hello1 \n"); writer1.flush(); PrintWriter writer2 = new PrintWriter(accept2.getOutputStream()); writer2.write("hello2 \n"); writer2.flush(); writer1.write("world1 \n"); writer1.write("jet1 \n"); writer1.flush(); writer2.write("world2 \n"); writer2.write("jet2 \n"); writer2.flush(); accept1.close(); accept2.close(); })); // When StreamSource<String> source = Sources.socket("localhost", 8176, UTF_8); // Then p.readFrom(source).withoutTimestamps().writeTo(sink); execute(); assertEquals(6, sinkList.size()); } }
public static void setEnvFromInputProperty(Map<String, String> env, String propName, String defaultPropValue, Configuration conf, String classPathSeparator) { String envString = conf.get(propName, defaultPropValue); // Get k,v pairs from string into a tmp env. Note that we don't want // to expand the env var values, because we will do that below - // don't want to do it twice. Map<String, String> tmpEnv = new HashMap<String, String>(); Apps.setEnvFromInputStringNoExpand(tmpEnv, envString, classPathSeparator); // Get map of props with prefix propName. // (e.g., map.reduce.env.ENV_VAR_NAME=value) Map<String, String> inputMap = conf.getPropsWithPrefix(propName + "."); // Entries from map should override entries from input string. tmpEnv.putAll(inputMap); // Add them to the environment setEnvFromInputStringMap(env, tmpEnv, classPathSeparator); }
@Test void testSetEnvFromInputPropertyCommas() { Configuration conf = new Configuration(false); Map<String, String> env = new HashMap<>(); String propName = "mapreduce.reduce.env"; conf.set(propName, "env1=env1_val,env2=env2_val,env3=env3_val"); conf.set(propName + ".env2", "new2_val1,new2_val2,new2_val3"); conf.set(propName + ".env4", "new4_valwith=equals"); // Setup some default values - we shouldn't see these values String defaultPropName = "mapreduce.child.env"; conf.set(defaultPropName, "env1=def1_val,env2=def2_val,env3=def3_val"); String defaultPropValue = conf.get(defaultPropName); Apps.setEnvFromInputProperty(env, propName, defaultPropValue, conf, File.pathSeparator); // Check values from string assertEquals("env1_val", env.get("env1")); assertEquals("env3_val", env.get("env3")); // Check individual value assertEquals("new4_valwith=equals", env.get("env4")); // Check individual value that eclipses one in string assertEquals("new2_val1,new2_val2,new2_val3", env.get("env2")); }
public static String[] splitString( String string, String separator ) { /* * 0123456 Example a;b;c;d --> new String[] { a, b, c, d } */ // System.out.println("splitString ["+path+"] using ["+separator+"]"); List<String> list = new ArrayList<>(); if ( string == null || string.length() == 0 ) { return new String[] {}; } int sepLen = separator.length(); int from = 0; int end = string.length() - sepLen + 1; for ( int i = from; i < end; i += sepLen ) { if ( string.substring( i, i + sepLen ).equalsIgnoreCase( separator ) ) { // OK, we found a separator, the string to add to the list // is [from, i[ list.add( nullToEmpty( string.substring( from, i ) ) ); from = i + sepLen; } } // Wait, if the string didn't end with a separator, we still have information at the end of the string... // In our example that would be "d"... if ( from + sepLen <= string.length() ) { list.add( nullToEmpty( string.substring( from, string.length() ) ) ); } return list.toArray( new String[list.size()] ); }
@Test public void testSplitStringWithMultipleCharacterDelimiterAndEnclosure() { // Check for multiple-character strings String[] result = Const.splitString( "html this is a web page html</newpage>html and so is this html", "</newpage>", "html" ); assertNotNull( result ); assertEquals( 2, result.length ); assertEquals( "html this is a web page html", result[0] ); assertEquals( "html and so is this html", result[1] ); }
public static String replace(CharSequence str, CharSequence searchStr, CharSequence replacement) { return replace(str, 0, searchStr, replacement, false); }
@Test public void replaceTest2() { // https://gitee.com/dromara/hutool/issues/I4M16G String replace = "#{A}"; String result = CharSequenceUtil.replace(replace, "#{AAAAAAA}", "1"); assertEquals(replace, result); }
@Override @SneakyThrows public String toJSONString(Object object) { return MAPPER.writeValueAsString(object); }
@Test public void testToJSONString() { // boolean to json Assertions.assertEquals("true", JACKSON_HANDLER.toJSONString(true)); // double to json Assertions.assertEquals("0.01", JACKSON_HANDLER.toJSONString(0.01)); // integer to json Assertions.assertEquals("1", JACKSON_HANDLER.toJSONString(1)); // string to json Assertions.assertEquals("\"hello world\"", JACKSON_HANDLER.toJSONString("hello world")); // array to json Assertions.assertEquals("[0,1,2,3,4]", JACKSON_HANDLER.toJSONString(new int[]{0, 1, 2, 3, 4})); // object to json Assertions.assertEquals(EXPECTED_ENTITY_JSON, JACKSON_HANDLER.toJSONString(EXPECTED_ENTITY)); }
public double vincentyDistance(LatLong other) { return LatLongUtils.vincentyDistance(this, other); }
@Test public void vincentyDistance_originToNearOfSriLanka_returnQuarterOfEarthEquatorCircumference() { // This is the origin of the WGS-84 reference system LatLong zeroZero = new LatLong(0d, 0d); // These coordinates are 1/4 Earth circumference from zero on the equator LatLong nearSriLanka = new LatLong(0d, 90d); double vincenty = LatLongUtils.vincentyDistance(zeroZero, nearSriLanka); assertEquals(EARTH_EQUATOR_CIRCUMFERENCE / 4, vincenty, 1E-4); }
@Override public Optional<SearchVersion> version() { final Request request = new Request("GET", "/?filter_path=version.number,version.distribution"); final Optional<JsonNode> resp = Optional.of(jsonApi.perform(request, "Unable to retrieve cluster information")); final Optional<String> version = resp.map(r -> r.path("version")).map(r -> r.path("number")).map(JsonNode::textValue); final SearchVersion.Distribution distribution = resp.map(r -> r.path("version")).map(r -> r.path("distribution")).map(JsonNode::textValue) .map(StringUtils::toUpperCase) .map(SearchVersion.Distribution::valueOf) .orElse(SearchVersion.Distribution.ELASTICSEARCH); return version .map(this::parseVersion) .map(v -> SearchVersion.create(distribution, v)); }
@Test void testOpensearchVersionFetching() throws IOException { mockResponse("{\"version\" : " + " {" + " \"distribution\" : \"opensearch\"," + " \"number\" : \"1.3.1\"" + " }" + "}"); assertThat(toTest.version()) .isNotEmpty() .contains(SearchVersion.create(SearchVersion.Distribution.OPENSEARCH, Version.parse("1.3.1"))); }
public void generate() throws IOException { packageNameByTypes.clear(); generatePackageInfo(); generateTypeStubs(); generateMessageHeaderStub(); for (final List<Token> tokens : ir.messages()) { final Token msgToken = tokens.get(0); final List<Token> messageBody = getMessageBody(tokens); final boolean hasVarData = -1 != findSignal(messageBody, Signal.BEGIN_VAR_DATA); int i = 0; final List<Token> fields = new ArrayList<>(); i = collectFields(messageBody, i, fields); final List<Token> groups = new ArrayList<>(); i = collectGroups(messageBody, i, groups); final List<Token> varData = new ArrayList<>(); collectVarData(messageBody, i, varData); final String decoderClassName = formatClassName(decoderName(msgToken.name())); final String decoderStateClassName = decoderClassName + "#CodecStates"; final FieldPrecedenceModel decoderPrecedenceModel = precedenceChecks.createDecoderModel( decoderStateClassName, tokens); generateDecoder(decoderClassName, msgToken, fields, groups, varData, hasVarData, decoderPrecedenceModel); final String encoderClassName = formatClassName(encoderName(msgToken.name())); final String encoderStateClassName = encoderClassName + "#CodecStates"; final FieldPrecedenceModel encoderPrecedenceModel = precedenceChecks.createEncoderModel( encoderStateClassName, tokens); generateEncoder(encoderClassName, msgToken, fields, groups, varData, hasVarData, encoderPrecedenceModel); } }
@Test void shouldGenerateReadOnlyMessage() throws Exception { final UnsafeBuffer buffer = new UnsafeBuffer(new byte[4096]); generator().generate(); final Object encoder = wrap(buffer, compileCarEncoder().getConstructor().newInstance()); final Object decoder = getCarDecoder(buffer, encoder); final long expectedSerialNumber = 5L; putSerialNumber(encoder, expectedSerialNumber); final long serialNumber = getSerialNumber(decoder); assertEquals(expectedSerialNumber, serialNumber); }
public static Timer getSharedTimer() { return SHARED_TIMER.getRef(); }
@Test public void testGetShared() { Timer sharedTimer1 = WheelTimerFactory.getSharedTimer(); Timer sharedTimer2 = WheelTimerFactory.getSharedTimer(); assertSame(sharedTimer1, sharedTimer2); }
public URI getHttpPublishUri() { if (httpPublishUri == null) { final URI defaultHttpUri = getDefaultHttpUri(); LOG.debug("No \"http_publish_uri\" set. Using default <{}>.", defaultHttpUri); return defaultHttpUri; } else { final InetAddress inetAddress = toInetAddress(httpPublishUri.getHost()); if (Tools.isWildcardInetAddress(inetAddress)) { final URI defaultHttpUri = getDefaultHttpUri(httpPublishUri.getPath()); LOG.warn("\"{}\" is not a valid setting for \"http_publish_uri\". Using default <{}>.", httpPublishUri, defaultHttpUri); return defaultHttpUri; } else { return Tools.normalizeURI(httpPublishUri, httpPublishUri.getScheme(), GRAYLOG_DEFAULT_PORT, httpPublishUri.getPath()); } } }
@Test public void testHttpPublishUriWildcard() throws RepositoryException, ValidationException { final Map<String, String> properties = ImmutableMap.of( "http_bind_address", "0.0.0.0:9000", "http_publish_uri", "http://0.0.0.0:9000/"); jadConfig.setRepository(new InMemoryRepository(properties)).addConfigurationBean(configuration).process(); assertThat(configuration.getHttpPublishUri()).isNotEqualTo(URI.create("http://0.0.0.0:9000/")); }
public static List<LayoutLocation> fromCompactListString(String compactList) { List<LayoutLocation> locs = new ArrayList<>(); if (!Strings.isNullOrEmpty(compactList)) { String[] items = compactList.split(TILDE); for (String s : items) { locs.add(fromCompactString(s)); } } return locs; }
@Test public void fromCompactListNull() { List<LayoutLocation> locs = fromCompactListString(null); assertEquals("non-empty list", 0, locs.size()); }
@Override public Long dbSize(RedisClusterNode node) { return execute(node, RedisCommands.DBSIZE); }
@Test public void testDbSize() { connection.flushAll(); RedisClusterNode master = getFirstMaster(); Long size = connection.dbSize(master); assertThat(size).isZero(); }
public static TableFactoryHelper createTableFactoryHelper( DynamicTableFactory factory, DynamicTableFactory.Context context) { return new TableFactoryHelper(factory, context); }
@Test void testFactoryHelperWithEnrichmentOptions() { final Map<String, String> options = new HashMap<>(); options.put(TestDynamicTableFactory.TARGET.key(), "abc"); options.put(TestDynamicTableFactory.BUFFER_SIZE.key(), "1000"); final Map<String, String> enrichment = new HashMap<>(); enrichment.put(TestDynamicTableFactory.TARGET.key(), "xyz"); enrichment.put(TestDynamicTableFactory.BUFFER_SIZE.key(), "2000"); final FactoryUtil.TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper( new TestDynamicTableFactory(), FactoryMocks.createTableContext(SCHEMA, options, enrichment)); helper.validate(); assertThat(helper.getOptions().get(TestDynamicTableFactory.TARGET)).isEqualTo("abc"); assertThat(helper.getOptions().get(TestDynamicTableFactory.BUFFER_SIZE)).isEqualTo(2000); }
@SqlNullable @ScalarFunction @SqlType(StandardTypes.BIGINT) public static Long jsonArrayLength(@SqlType(StandardTypes.JSON) Slice json) { try (JsonParser parser = createJsonParser(JSON_FACTORY, json)) { if (parser.nextToken() != START_ARRAY) { return null; } long length = 0; while (true) { JsonToken token = parser.nextToken(); if (token == null) { return null; } if (token == END_ARRAY) { return length; } parser.skipChildren(); length++; } } catch (IOException e) { return null; } }
@Test public void testJsonArrayLength() { assertFunction("JSON_ARRAY_LENGTH('[]')", BIGINT, 0L); assertFunction("JSON_ARRAY_LENGTH('[1]')", BIGINT, 1L); assertFunction("JSON_ARRAY_LENGTH('[1, \"foo\", null]')", BIGINT, 3L); assertFunction("JSON_ARRAY_LENGTH('[2, 4, {\"a\": [8, 9]}, [], [5], 4]')", BIGINT, 6L); assertFunction("JSON_ARRAY_LENGTH(JSON '[]')", BIGINT, 0L); assertFunction("JSON_ARRAY_LENGTH(JSON '[1]')", BIGINT, 1L); assertFunction("JSON_ARRAY_LENGTH(JSON '[1, \"foo\", null]')", BIGINT, 3L); assertFunction("JSON_ARRAY_LENGTH(JSON '[2, 4, {\"a\": [8, 9]}, [], [5], 4]')", BIGINT, 6L); assertFunction("JSON_ARRAY_LENGTH(null)", BIGINT, null); }
public static PostgreSQLBinaryProtocolValue getBinaryProtocolValue(final BinaryColumnType binaryColumnType) { Preconditions.checkArgument(BINARY_PROTOCOL_VALUES.containsKey(binaryColumnType), "Cannot find PostgreSQL type '%s' in column type when process binary protocol value", binaryColumnType); return BINARY_PROTOCOL_VALUES.get(binaryColumnType); }
@Test void assertGetInt4BinaryProtocolValue() { PostgreSQLBinaryProtocolValue binaryProtocolValue = PostgreSQLBinaryProtocolValueFactory.getBinaryProtocolValue(PostgreSQLColumnType.INT4); assertThat(binaryProtocolValue, instanceOf(PostgreSQLInt4BinaryProtocolValue.class)); }
@Override public int getMaxColumnsInTable() { return 0; }
@Test void assertGetMaxColumnsInTable() { assertThat(metaData.getMaxColumnsInTable(), is(0)); }
@Override public final void isEqualTo(@Nullable Object other) { super.isEqualTo(other); }
@SuppressWarnings("TruthSelfEquals") @Test public void isEqualTo() { assertThat(GOLDEN).isEqualTo(GOLDEN); assertThatIsEqualToFails(GOLDEN, JUST_OVER_GOLDEN); assertThat(Float.POSITIVE_INFINITY).isEqualTo(Float.POSITIVE_INFINITY); assertThat(Float.NaN).isEqualTo(Float.NaN); assertThat((Float) null).isEqualTo(null); assertThat(1.0f).isEqualTo(1); }
static String generateDatabaseName(String baseString) { return generateResourceId( baseString, ILLEGAL_DATABASE_NAME_CHARS, REPLACE_DATABASE_NAME_CHAR, MAX_DATABASE_NAME_LENGTH, TIME_FORMAT); }
@Test public void testGenerateDatabaseNameShouldReplaceNullCharacter() { String testBaseString = "Test\0DB\0Name"; String actual = generateDatabaseName(testBaseString); assertThat(actual).matches("test-db-name-\\d{8}-\\d{6}-\\d{6}"); }
void clearCounter(String counterName) { final Counter counter = getCounterByName(counterName); if (counter != null) { final List<CounterRequest> requests = counter.getRequests(); // on réinitialise le counter counter.clear(); // et on purge les données correspondantes du collector utilisées pour les deltas globalRequestsByCounter.remove(counter); for (final CounterRequest request : requests) { requestsById.remove(request.getId()); requestJRobinsById.remove(request.getId()); } } }
@Test public void testClearCounter() { final Counter counter = createCounter(); final Collector collector = new Collector("test collector", Collections.singletonList(counter)); counter.addRequest("test clear", 0, 0, 0, false, 1000); collector.clearCounter(counter.getName()); if (counter.getRequestsCount() != 0) { fail("counter vide"); } collector.clearCounter("nothing"); }
@GetMapping("/menu") @RequiresPermissions("system:manager:configureDataPermission") public ShenyuAdminResult getMenuTree() { List<MenuInfo> menuInfoList = resourceService.getMenuTree(); if (CollectionUtils.isNotEmpty(menuInfoList)) { return ShenyuAdminResult.success(ShenyuResultMessage.QUERY_SUCCESS, menuInfoList); } return ShenyuAdminResult.error(ShenyuResultMessage.QUERY_FAILED); }
@Test public void testQueryMenuTreeResultSuccess() throws Exception { final List<PermissionMenuVO.MenuInfo> mockResult = newArrayList(new PermissionMenuVO.MenuInfo()); given(resourceService.getMenuTree()).willReturn(mockResult); this.mockMvc.perform(MockMvcRequestBuilders.get("/resource/menu")) .andExpect(content().json(GsonUtils.getInstance().toJson(ShenyuAdminResult.success(ShenyuResultMessage.QUERY_SUCCESS, mockResult)))) .andReturn(); }
public static List<Endpoint> listenerListToEndPoints( String input, Map<ListenerName, SecurityProtocol> nameToSecurityProto ) { return listenerListToEndPoints(input, n -> { SecurityProtocol result = nameToSecurityProto.get(n); if (result == null) { throw new IllegalArgumentException("No security protocol defined for listener " + n.value()); } return result; }); }
@Test public void testListenerListToEndPointsWithBlankString() { assertEquals(Arrays.asList(), SocketServerConfigs.listenerListToEndPoints(" ", SocketServerConfigs.DEFAULT_NAME_TO_SECURITY_PROTO)); }
public synchronized void failedUpdate(long now) { this.lastRefreshMs = now; this.attempts++; this.equivalentResponseCount = 0; }
@Test public void testFailedUpdate() { long time = 100; metadata.updateWithCurrentRequestVersion(emptyMetadataResponse(), false, time); assertEquals(100, metadata.timeToNextUpdate(1000)); metadata.failedUpdate(1100); long lowerBoundBackoffMs = (long) (refreshBackoffMs * (1 - CommonClientConfigs.RETRY_BACKOFF_JITTER)); long upperBoundBackoffMs = (long) (refreshBackoffMs * (1 + CommonClientConfigs.RETRY_BACKOFF_JITTER)); assertEquals(100, metadata.timeToNextUpdate(1100), upperBoundBackoffMs - lowerBoundBackoffMs); assertEquals(100, metadata.lastSuccessfulUpdate()); metadata.updateWithCurrentRequestVersion(emptyMetadataResponse(), false, time); assertEquals(100, metadata.timeToNextUpdate(1000), upperBoundBackoffMs - lowerBoundBackoffMs); }
public static String generateDatabaseId(String baseString) { checkArgument(baseString.length() != 0, "baseString cannot be empty!"); String databaseId = generateResourceId( baseString, ILLEGAL_DATABASE_CHARS, REPLACE_DATABASE_CHAR, MAX_DATABASE_ID_LENGTH, DATABASE_TIME_FORMAT); // replace hyphen with underscore, so there's no need for backticks String trimmed = CharMatcher.is('_').trimTrailingFrom(databaseId); checkArgument( trimmed.length() > 0, "Database id is empty after removing illegal characters and trailing underscores"); // if first char is not a letter, replace with a padding letter, so it doesn't // violate spanner's database naming rules char padding = generatePadding(); if (!Character.isLetter(trimmed.charAt(0))) { trimmed = padding + trimmed.substring(1); } return trimmed; }
@Test public void testGenerateDatabaseIdShouldTrimTrailingUnderscore() { String testBaseString = "test_database___"; String actual = generateDatabaseId(testBaseString); assertThat(actual).matches("test_da_\\d{8}_\\d{6}_\\d{6}"); }
public static long toLowerCase(final long word) { final long mask = applyUpperCasePattern(word) >>> 2; return word | mask; }
@Test void toLowerCaseLong() { // given final byte[] asciiTable = getExtendedAsciiTable(); shuffleArray(asciiTable, random); // when for (int idx = 0; idx < asciiTable.length; idx += Long.BYTES) { final long value = getLong(asciiTable, idx); final long actual = SWARUtil.toLowerCase(value); long expected = 0L; for (int i = 0; i < Long.BYTES; i++) { final byte b = (byte) Character.toLowerCase(asciiTable[idx + i]); expected |= (long) ((b & 0xff)) << (56 - (Byte.SIZE * i)); } // then assertEquals(expected, actual); } }
public OffsetPosition lookup(long targetOffset) { return maybeLock(lock, () -> { ByteBuffer idx = mmap().duplicate(); int slot = largestLowerBoundSlotFor(idx, targetOffset, IndexSearchType.KEY); if (slot == -1) return new OffsetPosition(baseOffset(), 0); else return parseEntry(idx, slot); }); }
@Test public void forceUnmapTest() throws IOException { OffsetIndex idx = new OffsetIndex(nonExistentTempFile(), 0L, 10 * 8); idx.forceUnmap(); // mmap should be null after unmap causing lookup to throw a NPE assertThrows(NullPointerException.class, () -> idx.lookup(1)); assertThrows(NullPointerException.class, idx::close); }
@Override public void accept(final DataType data) { if (data instanceof StartingData) { handleEvent((StartingData) data); } else if (data instanceof StoppingData) { handleEvent((StoppingData) data); } }
@Test void statusIgnoresMessageData() { //given final var messageData = new MessageData("message"); final var statusMember = new StatusMember(1); //when statusMember.accept(messageData); //then assertNull(statusMember.getStarted()); assertNull(statusMember.getStopped()); }
@Override public Predicate visit(AndPredicate andPredicate, IndexRegistry indexes) { Predicate[] originalPredicates = andPredicate.predicates; List<Predicate> toBeAdded = null; boolean modified = false; Predicate[] target = originalPredicates; for (int i = 0; i < target.length; i++) { Predicate predicate = target[i]; if (predicate instanceof AndPredicate andPredicateInstance) { Predicate[] subPredicates = andPredicateInstance.predicates; if (!modified) { modified = true; target = createCopy(target); } toBeAdded = replaceFirstAndStoreOthers(target, subPredicates, i, toBeAdded); } } Predicate[] newInners = createNewInners(target, toBeAdded); if (newInners == originalPredicates) { return andPredicate; } return new AndPredicate(newInners); }
@Test public void visitOrPredicate_whenHasInnerOrPredicate_thenFlattenIt() { // (a1 = 1 or (a2 = 2 or a3 = 3)) --> (a1 = 1 or a2 = 2 or a3 = 3) Predicate a1 = equal("a1", 1); Predicate a2 = equal("a2", 2); Predicate a3 = equal("a3", 3); OrPredicate innerOr = (OrPredicate) or(a2, a3); OrPredicate outerOr = (OrPredicate) or(a1, innerOr); OrPredicate result = (OrPredicate) visitor.visit(outerOr, indexes); Predicate[] inners = result.predicates; assertEquals(3, inners.length); }
@Udf public String concat(@UdfParameter final String... jsonStrings) { if (jsonStrings == null) { return null; } final List<JsonNode> nodes = new ArrayList<>(jsonStrings.length); boolean allObjects = true; for (final String jsonString : jsonStrings) { if (jsonString == null) { return null; } final JsonNode node = UdfJsonMapper.parseJson(jsonString); if (node.isMissingNode()) { return null; } if (allObjects && !node.isObject()) { allObjects = false; } nodes.add(node); } JsonNode result = nodes.get(0); if (allObjects) { for (int i = 1; i < nodes.size(); i++) { result = concatObjects((ObjectNode) result, (ObjectNode) nodes.get(i)); } } else { for (int i = 1; i < nodes.size(); i++) { result = concatArrays(toArrayNode(result), toArrayNode(nodes.get(i))); } } return UdfJsonMapper.writeValueAsJson(result); }
@Test public void shouldOverrideWithAttrsFromTheSecondObject() { // When: final String result = udf.concat("{\"a\": {\"5\": 6}}", "{\"a\": {\"3\": 4}}"); // Then: assertEquals("{\"a\":{\"3\":4}}", result); }
@Override public Optional<DevOpsProjectCreator> getDevOpsProjectCreator(DbSession dbSession, Map<String, String> characteristics) { String githubApiUrl = characteristics.get(DEVOPS_PLATFORM_URL); String githubRepository = characteristics.get(DEVOPS_PLATFORM_PROJECT_IDENTIFIER); if (githubApiUrl == null || githubRepository == null) { return Optional.empty(); } DevOpsProjectDescriptor devOpsProjectDescriptor = new DevOpsProjectDescriptor(ALM.GITHUB, githubApiUrl, githubRepository, null); return dbClient.almSettingDao().selectByAlm(dbSession, ALM.GITHUB).stream() .filter(almSettingDto -> devOpsProjectDescriptor.url().equals(almSettingDto.getUrl())) .map(almSettingDto -> findInstallationIdAndCreateDevOpsProjectCreator(devOpsProjectDescriptor, almSettingDto)) .flatMap(Optional::stream) .findFirst(); }
@Test public void getDevOpsProjectCreator_whenOneMatchingAndOneNotMatchingAlmSetting_shouldInstantiateDevOpsProjectCreator() { AlmSettingDto matchingAlmSettingDto = mockAlmSettingDto(true); AlmSettingDto notMatchingAlmSettingDto = mockAlmSettingDto(false); when(dbClient.almSettingDao().selectByAlm(dbSession, ALM.GITHUB)).thenReturn(List.of(notMatchingAlmSettingDto, matchingAlmSettingDto)); mockSuccessfulGithubInteraction(); when(devOpsProjectService.createDevOpsProject(matchingAlmSettingDto, GITHUB_PROJECT_DESCRIPTOR, appInstallationToken)).thenReturn(DEV_OPS_PROJECT); DevOpsProjectCreator devOpsProjectCreator = githubProjectCreatorFactory.getDevOpsProjectCreator(dbSession, VALID_GITHUB_PROJECT_COORDINATES).orElseThrow(); GithubProjectCreator expectedGithubProjectCreator = getExpectedGithubProjectCreator(false); assertThat(devOpsProjectCreator).usingRecursiveComparison().isEqualTo(expectedGithubProjectCreator); }
@Override public void calculate(TradePriceCalculateReqBO param, TradePriceCalculateRespBO result) { // 0. 只有【普通】订单,才计算该优惠 if (ObjectUtil.notEqual(result.getType(), TradeOrderTypeEnum.NORMAL.getType())) { return; } // 获得 SKU 对应的满减送活动 List<RewardActivityMatchRespDTO> rewardActivities = rewardActivityApi.getMatchRewardActivityList( convertSet(result.getItems(), TradePriceCalculateRespBO.OrderItem::getSpuId)); if (CollUtil.isEmpty(rewardActivities)) { return; } // 处理每个满减送活动 rewardActivities.forEach(rewardActivity -> calculate(param, result, rewardActivity)); }
@Test public void testCalculate_notMatch() { // 准备参数 TradePriceCalculateReqBO param = new TradePriceCalculateReqBO() .setItems(asList( new TradePriceCalculateReqBO.Item().setSkuId(10L).setCount(2).setSelected(true), new TradePriceCalculateReqBO.Item().setSkuId(20L).setCount(3).setSelected(true), new TradePriceCalculateReqBO.Item().setSkuId(30L).setCount(4).setSelected(true) )); TradePriceCalculateRespBO result = new TradePriceCalculateRespBO() .setType(TradeOrderTypeEnum.NORMAL.getType()) .setPrice(new TradePriceCalculateRespBO.Price()) .setPromotions(new ArrayList<>()) .setItems(asList( new TradePriceCalculateRespBO.OrderItem().setSkuId(10L).setCount(2).setSelected(true) .setPrice(100).setSpuId(1L), new TradePriceCalculateRespBO.OrderItem().setSkuId(20L).setCount(3).setSelected(true) .setPrice(50).setSpuId(2L) )); // 保证价格被初始化上 TradePriceCalculatorHelper.recountPayPrice(result.getItems()); TradePriceCalculatorHelper.recountAllPrice(result); // mock 方法(限时折扣 DiscountActivity 信息) when(rewardActivityApi.getMatchRewardActivityList(eq(asSet(1L, 2L)))).thenReturn(singletonList( randomPojo(RewardActivityMatchRespDTO.class, o -> o.setId(1000L).setName("活动 1000 号") .setSpuIds(asList(1L, 2L)).setConditionType(PromotionConditionTypeEnum.PRICE.getType()) .setRules(singletonList(new RewardActivityMatchRespDTO.Rule().setLimit(351).setDiscountPrice(70)))) )); // 调用 tradeRewardActivityPriceCalculator.calculate(param, result); // 断言 Order 部分 TradePriceCalculateRespBO.Price price = result.getPrice(); assertEquals(price.getTotalPrice(), 350); assertEquals(price.getDiscountPrice(), 0); assertEquals(price.getPointPrice(), 0); assertEquals(price.getDeliveryPrice(), 0); assertEquals(price.getCouponPrice(), 0); assertEquals(price.getPayPrice(), 350); assertNull(result.getCouponId()); // 断言:SKU 1 assertEquals(result.getItems().size(), 2); TradePriceCalculateRespBO.OrderItem orderItem01 = result.getItems().get(0); assertEquals(orderItem01.getSkuId(), 10L); assertEquals(orderItem01.getCount(), 2); assertEquals(orderItem01.getPrice(), 100); assertEquals(orderItem01.getDiscountPrice(), 0); assertEquals(orderItem01.getDeliveryPrice(), 0); assertEquals(orderItem01.getCouponPrice(), 0); assertEquals(orderItem01.getPointPrice(), 0); assertEquals(orderItem01.getPayPrice(), 200); // 断言:SKU 2 TradePriceCalculateRespBO.OrderItem orderItem02 = result.getItems().get(1); assertEquals(orderItem02.getSkuId(), 20L); assertEquals(orderItem02.getCount(), 3); assertEquals(orderItem02.getPrice(), 50); assertEquals(orderItem02.getDiscountPrice(), 0); assertEquals(orderItem02.getDeliveryPrice(), 0); assertEquals(orderItem02.getCouponPrice(), 0); assertEquals(orderItem02.getPointPrice(), 0); assertEquals(orderItem02.getPayPrice(), 150); // 断言 Promotion 部分 assertEquals(result.getPromotions().size(), 1); TradePriceCalculateRespBO.Promotion promotion01 = result.getPromotions().get(0); assertEquals(promotion01.getId(), 1000L); assertEquals(promotion01.getName(), "活动 1000 号"); assertEquals(promotion01.getType(), PromotionTypeEnum.REWARD_ACTIVITY.getType()); assertEquals(promotion01.getTotalPrice(), 350); assertEquals(promotion01.getDiscountPrice(), 0); assertFalse(promotion01.getMatch()); assertEquals(promotion01.getDescription(), "TODO"); // TODO 芋艿:后面再想想 assertEquals(promotion01.getItems().size(), 2); TradePriceCalculateRespBO.PromotionItem promotionItem011 = promotion01.getItems().get(0); assertEquals(promotionItem011.getSkuId(), 10L); assertEquals(promotionItem011.getTotalPrice(), 200); assertEquals(promotionItem011.getDiscountPrice(), 0); TradePriceCalculateRespBO.PromotionItem promotionItem012 = promotion01.getItems().get(1); assertEquals(promotionItem012.getSkuId(), 20L); assertEquals(promotionItem012.getTotalPrice(), 150); assertEquals(promotionItem012.getDiscountPrice(), 0); }
protected void setup() throws JournalException, InterruptedException { this.closing = false; ensureHelperInLocal(); initConfigs(isElectable); setupEnvironment(); }
@Test public void testRollbackExceptionOnSetupCluster(@Mocked RepImpl rep) throws Exception { long startMs = System.currentTimeMillis(); new Expectations() { { rep.getName(); minTimes = 0; result = "starrocks"; rep.isValid(); minTimes = 0; result = false; rep.getConfigManager(); minTimes = 0; result = new DbConfigManager(new EnvironmentConfig()); } }; // mock DatabaseUtil.checkForNullParam to generate RollBackException new MockUp<DatabaseUtil>() { @Mock public void checkForNullParam(final Object param, final String name) { throw new RollbackException(rep, VLSN.FIRST_VLSN, new MatchpointSearchResults(rep)); } }; leaderNodeHostPort = findUnbindHostPort(); leaderPath = createTmpDir(); // set retry times = 1 to ensure no recovery BDBEnvironment.RETRY_TIME = 1; // start leader will get rollback exception BDBEnvironment maserEnvironment = new BDBEnvironment( leaderPath, "leader", leaderNodeHostPort, leaderNodeHostPort, true); Assert.assertTrue(true); try { maserEnvironment.setup(); } catch (JournalException e) { LOG.warn("got Rollback Exception, as expect, ", e); } System.out.println("testRollbackExceptionOnSetupCluster cost " + (System.currentTimeMillis() - startMs) / 1000 + " s"); }
@Override public List<ShuffleDescriptor> getClusterPartitionShuffleDescriptors( IntermediateDataSetID dataSetID) { final DataSetMetaInfo dataSetMetaInfo = this.dataSetMetaInfo.get(dataSetID); if (dataSetMetaInfo == null) { return Collections.emptyList(); } return new ArrayList<>(dataSetMetaInfo.getShuffleDescriptors().values()); }
@Test void testGetClusterPartitionShuffleDescriptors() { final ResourceManagerPartitionTrackerImpl tracker = new ResourceManagerPartitionTrackerImpl(new TestClusterPartitionReleaser()); assertThat(tracker.listDataSets()).isEmpty(); List<ResultPartitionID> resultPartitionIDS = new ArrayList<>(); for (int i = 0; i < 100; i++) { resultPartitionIDS.add( new ResultPartitionID( new IntermediateResultPartitionID(DATA_SET_ID, i), ExecutionAttemptID.randomId())); } for (ResultPartitionID resultPartitionID : resultPartitionIDS) { report(tracker, TASK_EXECUTOR_ID_1, DATA_SET_ID, 100, resultPartitionID); } final List<ShuffleDescriptor> shuffleDescriptors = tracker.getClusterPartitionShuffleDescriptors(DATA_SET_ID); assertThat(shuffleDescriptors).hasSize(100); assertThat( shuffleDescriptors.stream() .map(ShuffleDescriptor::getResultPartitionID) .collect(Collectors.toList())) .containsExactlyElementsOf(resultPartitionIDS); reportEmpty(tracker, TASK_EXECUTOR_ID_1); reportEmpty(tracker, TASK_EXECUTOR_ID_2); assertThat(tracker.areAllMapsEmpty()).isTrue(); }
@Override public String toString() { return toString(false); }
@Test void testToString() { assertEquals("GET? http://localhost:8080/", new HttpRequest() .setScheme("http") .setHost("localhost") .setPort(8080) .toString(true)); assertEquals("POST http://localhost/", new HttpRequest() .setScheme("http") .setHttpOperation(HttpRequest.HttpOp.POST) .setHost("localhost") .toString(true)); assertEquals("GET http://localhost/?foo=bar", new HttpRequest() .setScheme("http") .setHttpOperation(HttpRequest.HttpOp.GET) .addUrlOption("foo", "bar") .setHost("localhost") .toString(true)); }
public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { // verify if the field has correct syntax if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); // working with custom objects and relationship // cut off _r and then add _c in the end of the name if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; }
@Test public void testFieldWithExtIdYesButNameInIncorrectSyntax_StandartObject() { when( logMock.isDebug() ).thenReturn( true ); inputFieldName = "Account"; expectedFieldName = inputFieldName; String fieldToNullName = SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true ); assertEquals( expectedFieldName, fieldToNullName ); }
protected void handleConnectCompletion( ChannelFuture cf, Promise<PooledConnection> callerPromise, CurrentPassport passport) { connCreationsInProgress.decrementAndGet(); updateServerStatsOnConnectCompletion(cf); if (cf.isSuccess()) { passport.add(PassportState.ORIGIN_CH_CONNECTED); createConnSucceededCounter.increment(); connsInUse.incrementAndGet(); createConnection(cf, callerPromise, passport); } else { createConnFailedCounter.increment(); // unwrap DecoderExceptions to get a better indication of why decoding failed // as decoding failures are not indicative of actual connection causes if (cf.cause() instanceof DecoderException de && de.getCause() != null) { callerPromise.setFailure(new OriginConnectException( de.getCause().getMessage(), de.getCause(), OutboundErrorType.CONNECT_ERROR)); } else { callerPromise.setFailure(new OriginConnectException( cf.cause().getMessage(), cf.cause(), OutboundErrorType.CONNECT_ERROR)); } } }
@Test void handleConnectCompletionWithDecoderExceptionIsUnwrapped() { EmbeddedChannel channel = new EmbeddedChannel(); Promise<PooledConnection> promise = CLIENT_EVENT_LOOP.newPromise(); pool.handleConnectCompletion( channel.newFailedFuture(new DecoderException(new SSLHandshakeException("Invalid tls cert"))), promise, CurrentPassport.create()); assertFalse(promise.isSuccess()); assertNotNull(promise.cause()); assertInstanceOf(OriginConnectException.class, promise.cause()); assertInstanceOf( SSLHandshakeException.class, promise.cause().getCause(), "expect decoder exception is unwrapped"); }
public ClusterStatsResponse clusterStats() { return execute(() -> { Request request = new Request("GET", "/_cluster/stats"); Response response = restHighLevelClient.getLowLevelClient().performRequest(request); return ClusterStatsResponse.toClusterStatsResponse(gson.fromJson(EntityUtils.toString(response.getEntity()), JsonObject.class)); }); }
@Test public void should_call_cluster_stat_api() throws Exception { HttpEntity entity = mock(HttpEntity.class); when(entity.getContent()).thenReturn(new ByteArrayInputStream(EXAMPLE_CLUSTER_STATS_JSON.getBytes())); Response response = mock(Response.class); when(response.getEntity()).thenReturn(entity); when(restClient.performRequest(argThat(new RawRequestMatcher( "GET", "/_cluster/stats")))) .thenReturn(response); assertThat(underTest.clusterStats()).isNotNull(); }
public static String getResourceFileAsText(String name) { try { String lineEnd = System.getProperty("line.separator"); // $NON-NLS-1$ InputStream is = JMeterUtils.class.getClassLoader().getResourceAsStream(name); if (is != null) { try (Reader in = new InputStreamReader(is, StandardCharsets.UTF_8); BufferedReader fileReader = new BufferedReader(in)) { return fileReader.lines() .collect(Collectors.joining(lineEnd, "", lineEnd)); } } else { return ""; // $NON-NLS-1$ } } catch (IOException e) { return ""; // $NON-NLS-1$ } }
@Test public void testGetResourceFileAsText() throws Exception{ String sep = System.getProperty("line.separator"); assertEquals("line one" + sep + "line two" + sep, JMeterUtils.getResourceFileAsText("resourcefile.txt")); }
@Override public String getName() { return ANALYZER_NAME; }
@Test public void testLibmanAnalysis() throws Exception { try (Engine engine = new Engine(getSettings())) { File file = BaseTest.getResourceAsFile(this, "libman/libman.json"); Dependency dependency = new Dependency(file); analyzer.analyze(dependency, engine); int count = 0; for (Dependency result : engine.getDependencies()) { switch (result.getName()) { case "bootstrap": count++; assertTrue(result.getEvidence(EvidenceType.PRODUCT).toString().contains("bootstrap")); assertTrue(result.getEvidence(EvidenceType.VERSION).toString().contains("4.6.0")); break; case "jquery": count++; assertTrue(result.getEvidence(EvidenceType.PRODUCT).toString().contains("jquery")); assertTrue(result.getEvidence(EvidenceType.VERSION).toString().contains("3.6.3")); break; case "font-awesome": count++; assertTrue(result.getEvidence(EvidenceType.PRODUCT).toString().contains("font-awesome")); assertTrue(result.getEvidence(EvidenceType.VERSION).toString().contains("6.2.1")); break; case "jquery-ui": count++; assertTrue(result.getEvidence(EvidenceType.VENDOR).toString().contains("regru")); assertTrue(result.getEvidence(EvidenceType.PRODUCT).toString().contains("jquery-ui")); assertTrue(result.getEvidence(EvidenceType.VERSION).toString().contains("1.6.3")); break; default: break; } } assertEquals("4 dependencies should be found", 4, count); } }
public static long hash64(Object o) { if (o == null) { return 0l; } else if (o instanceof String) { final byte[] bytes = ((String) o).getBytes(); return hash64(bytes, bytes.length); } else if (o instanceof byte[]) { final byte[] bytes = (byte[]) o; return hash64(bytes, bytes.length); } return hash64(o.toString()); }
@Test public void testHash64ByteArrayOverload() { String input = "hashthis"; byte[] inputBytes = input.getBytes(); long hashOfString = MurmurHash.hash64(input); assertEquals("MurmurHash.hash64(byte[]) did not match MurmurHash.hash64(String)", hashOfString, MurmurHash.hash64(inputBytes)); Object bytesAsObject = inputBytes; assertEquals("MurmurHash.hash64(Object) given a byte[] did not match MurmurHash.hash64(String)", hashOfString, MurmurHash.hash64(bytesAsObject)); }
@Override public KsMaterializedQueryResult<Row> get( final GenericKey key, final int partition, final Optional<Position> position ) { try { final KeyQuery<GenericKey, ValueAndTimestamp<GenericRow>> query = KeyQuery.withKey(key); StateQueryRequest<ValueAndTimestamp<GenericRow>> request = inStore(stateStore.getStateStoreName()) .withQuery(query) .withPartitions(ImmutableSet.of(partition)); if (position.isPresent()) { request = request.withPositionBound(PositionBound.at(position.get())); } final StateQueryResult<ValueAndTimestamp<GenericRow>> result = stateStore.getKafkaStreams().query(request); final QueryResult<ValueAndTimestamp<GenericRow>> queryResult = result.getPartitionResults().get(partition); // Some of these failures are retriable, and in the future, we may want to retry // locally before throwing. if (queryResult.isFailure()) { throw failedQueryException(queryResult); } else if (queryResult.getResult() == null) { return KsMaterializedQueryResult.rowIteratorWithPosition( Collections.emptyIterator(), queryResult.getPosition()); } else { final ValueAndTimestamp<GenericRow> row = queryResult.getResult(); return KsMaterializedQueryResult.rowIteratorWithPosition( ImmutableList.of(Row.of(stateStore.schema(), key, row.value(), row.timestamp())) .iterator(), queryResult.getPosition()); } } catch (final NotUpToBoundException | MaterializationException e) { throw e; } catch (final Exception e) { throw new MaterializationException("Failed to get value from materialized table", e); } }
@Test public void shouldReturnValuesLowerBound() { // Given: when(kafkaStreams.query(any())).thenReturn(getIteratorResult()); // When: final KsMaterializedQueryResult<Row> result = table.get(PARTITION, A_KEY, null); // Then: Iterator<Row> rowIterator = result.getRowIterator(); assertThat(rowIterator.hasNext(), is(true)); assertThat(rowIterator.next(), is(Row.of(SCHEMA, A_KEY, ROW1, TIME1))); assertThat(rowIterator.next(), is(Row.of(SCHEMA, A_KEY2, ROW2, TIME2))); assertThat(rowIterator.hasNext(), is(false)); assertThat(result.getPosition(), not(Optional.empty())); assertThat(result.getPosition().get(), is(POSITION)); }
@SuppressWarnings({"checkstyle:CyclomaticComplexity", "checkstyle:FinalParameters"}) protected static int send( final String customerId, final byte[] bytes, final HttpPost httpPost, final HttpHost proxy, CloseableHttpClient httpClient, final ResponseHandler responseHandler ) { int statusCode = DEFAULT_STATUS_CODE; if (bytes != null && bytes.length > 0 && httpPost != null && customerId != null) { // add the body to the request final MultipartEntityBuilder builder = MultipartEntityBuilder.create(); builder.setMode(HttpMultipartMode.LEGACY); builder.addTextBody("cid", customerId); builder.addBinaryBody("file", bytes, ContentType.DEFAULT_BINARY, "filename"); httpPost.setEntity(builder.build()); httpPost.addHeader("api-version", "phone-home-v1"); // set the HTTP config RequestConfig config = RequestConfig.custom() .setConnectTimeout(Timeout.ofMilliseconds(REQUEST_TIMEOUT_MS)) .setConnectionRequestTimeout(Timeout.ofMilliseconds(REQUEST_TIMEOUT_MS)) .setResponseTimeout(Timeout.ofMilliseconds(REQUEST_TIMEOUT_MS)) .build(); CloseableHttpResponse response = null; try { if (proxy != null) { log.debug("setting proxy to {}", proxy); config = RequestConfig.copy(config).setProxy(proxy).build(); httpPost.setConfig(config); final DefaultProxyRoutePlanner routePlanner = new DefaultProxyRoutePlanner(proxy); if (httpClient == null) { httpClient = HttpClientBuilder .create() .setRoutePlanner(routePlanner) .setDefaultRequestConfig(config) .build(); } } else { if (httpClient == null) { httpClient = HttpClientBuilder.create().setDefaultRequestConfig(config).build(); } } response = httpClient.execute(httpPost); if (responseHandler != null) { responseHandler.handle(response); } // send request log.debug("POST request returned {}", new StatusLine(response).toString()); statusCode = response.getCode(); } catch (IOException e) { log.error("Could not submit metrics to Confluent: {}", e.getMessage()); } finally { if (httpClient != null) { try { httpClient.close(); } catch (IOException e) { log.warn("could not close http client", e); } } if (response != null) { try { response.close(); } catch (IOException e) { log.warn("could not close http response", e); } } } } else { statusCode = HttpStatus.SC_BAD_REQUEST; } return statusCode; }
@Test public void testSubmitValidCustomer() { // Given HttpPost p = new HttpPost(SECURE_LIVE_TEST_ENDPOINT); byte[] anyData = "anyData".getBytes(StandardCharsets.UTF_8); int randomIndex = ThreadLocalRandom.current().nextInt(CustomerIdExamples.VALID_CUSTOMER_IDS.size()); String validCustomerId = CustomerIdExamples.VALID_CUSTOMER_IDS.get(randomIndex); // When/Then int status = WebClient.send(validCustomerId, anyData, p, null); // if we are not connected to the internet this test should still pass assertTrue("customerId=" + validCustomerId, status == HttpStatus.SC_OK || status == HttpStatus.SC_BAD_GATEWAY); }
@SuppressWarnings("unchecked") @Override public <T extends Statement> ConfiguredStatement<T> inject( final ConfiguredStatement<T> statement ) { if (!(statement.getStatement() instanceof CreateSource) && !(statement.getStatement() instanceof CreateAsSelect)) { return statement; } try { if (statement.getStatement() instanceof CreateSource) { final ConfiguredStatement<CreateSource> createStatement = (ConfiguredStatement<CreateSource>) statement; return (ConfiguredStatement<T>) forCreateStatement(createStatement).orElse(createStatement); } else { final ConfiguredStatement<CreateAsSelect> createStatement = (ConfiguredStatement<CreateAsSelect>) statement; return (ConfiguredStatement<T>) forCreateAsStatement(createStatement).orElse( createStatement); } } catch (final KsqlStatementException e) { throw e; } catch (final KsqlException e) { throw new KsqlStatementException( ErrorMessageUtil.buildErrorMessage(e), statement.getMaskedStatementText(), e.getCause()); } }
@Test public void shouldThrowIfCsasValueFormatDoesnotSupportInference() { // Given: givenFormatsAndProps(null, "kafka", ImmutableMap.of("VALUE_SCHEMA_ID", new IntegerLiteral(42))); givenDDLSchemaAndFormats(LOGICAL_SCHEMA, "kafka", "delimited", SerdeFeature.UNWRAP_SINGLES, SerdeFeature.UNWRAP_SINGLES); // When: final Exception e = assertThrows( KsqlException.class, () -> injector.inject(csasStatement) ); // Then: assertThat(e.getMessage(), containsString("VALUE_FORMAT should support schema inference when " + "VALUE_SCHEMA_ID is provided. Current format is DELIMITED.")); }
public static String getHostAddress() throws SocketException, UnknownHostException { boolean isIPv6Preferred = Boolean.parseBoolean(System.getProperty("java.net.preferIPv6Addresses")); DatagramSocket ds = new DatagramSocket(); try { ds.connect(isIPv6Preferred ? Inet6Address.getByName(DUMMY_OUT_IPV6) : Inet4Address.getByName(DUMMY_OUT_IPV4), HTTP_PORT); } catch (java.io.UncheckedIOException e) { LOGGER.warn(e.getMessage()); if (isIPv6Preferred) { LOGGER.warn("No IPv6 route available on host, falling back to IPv4"); ds.connect(Inet4Address.getByName(DUMMY_OUT_IPV4), HTTP_PORT); } else { LOGGER.warn("No IPv4 route available on host, falling back to IPv6"); ds.connect(Inet6Address.getByName(DUMMY_OUT_IPV6), HTTP_PORT); } } InetAddress localAddress = ds.getLocalAddress(); if (localAddress.isAnyLocalAddress()) { localAddress = isIPv6Preferred ? getLocalIPv6Address() : InetAddress.getLocalHost(); } return localAddress.getHostAddress(); }
@Test(description = "Test getHostAddress with preferIPv6Addresses=true in dual stack environment") public void testGetHostAddressDualStackEnvIPv6Preferred() { System.setProperty("java.net.preferIPv6Addresses", "true"); InetAddress mockInetAddress = mock(InetAddress.class); when(mockInetAddress.isAnyLocalAddress()).thenReturn(false); when(mockInetAddress.getHostAddress()).thenReturn(LOCAL_ADDRESS_IPV6); try (MockedConstruction<DatagramSocket> mockedConstructionDatagramSocket = mockConstruction(DatagramSocket.class, initDatagramSocket(mockInetAddress, NetworkEnv.DUAL_STACK))) { String hostAddress = NetUtils.getHostAddress(); DatagramSocket mockDatagramSocket = mockedConstructionDatagramSocket.constructed().get(0); assertEquals(LOCAL_ADDRESS_IPV6, hostAddress); assertEquals(1, mockedConstructionDatagramSocket.constructed().size()); verify(mockDatagramSocket, times(1)).connect(any(), anyInt()); } catch (SocketException | UnknownHostException e) { Assert.fail("Should not throw: " + e.getMessage()); } }
@Override public void updateDiyPage(DiyPageUpdateReqVO updateReqVO) { // 校验存在 validateDiyPageExists(updateReqVO.getId()); // 校验名称唯一 validateNameUnique(updateReqVO.getId(), updateReqVO.getTemplateId(), updateReqVO.getName()); // 更新 DiyPageDO updateObj = DiyPageConvert.INSTANCE.convert(updateReqVO); diyPageMapper.updateById(updateObj); }
@Test public void testUpdateDiyPage_success() { // mock 数据 DiyPageDO dbDiyPage = randomPojo(DiyPageDO.class); diyPageMapper.insert(dbDiyPage);// @Sql: 先插入出一条存在的数据 // 准备参数 DiyPageUpdateReqVO reqVO = randomPojo(DiyPageUpdateReqVO.class, o -> { o.setId(dbDiyPage.getId()); // 设置更新的 ID }); // 调用 diyPageService.updateDiyPage(reqVO); // 校验是否更新正确 DiyPageDO diyPage = diyPageMapper.selectById(reqVO.getId()); // 获取最新的 assertPojoEquals(reqVO, diyPage); }
CompletableFuture<Void> beginExecute( @Nonnull List<? extends Tasklet> tasklets, @Nonnull CompletableFuture<Void> cancellationFuture, @Nonnull ClassLoader jobClassLoader ) { final ExecutionTracker executionTracker = new ExecutionTracker(tasklets.size(), cancellationFuture); try { final Map<Boolean, List<Tasklet>> byCooperation = tasklets.stream().collect(partitioningBy( tasklet -> doWithClassLoader(jobClassLoader, tasklet::isCooperative) )); submitCooperativeTasklets(executionTracker, jobClassLoader, byCooperation.get(true)); submitBlockingTasklets(executionTracker, jobClassLoader, byCooperation.get(false)); } catch (Throwable t) { executionTracker.future.internalCompleteExceptionally(t); } return executionTracker.future; }
@Test public void when_cancellationFutureCompleted_then_fails() throws Throwable { // Given final MockTasklet t = new MockTasklet().callsBeforeDone(Integer.MAX_VALUE); CompletableFuture<Void> f = tes.beginExecute(singletonList(t), cancellationFuture, classLoader); // When cancellationFuture.complete(null); // Then assertThrows(IllegalStateException.class, () -> { try { f.join(); } catch (CompletionException e) { throw peel(e); } }); }
public Person getPerson(int key) { // Try to find person in the identity map Person person = this.identityMap.getPerson(key); if (person != null) { LOGGER.info("Person found in the Map"); return person; } else { // Try to find person in the database person = this.db.find(key); if (person != null) { this.identityMap.addPerson(person); LOGGER.info("Person found in DB."); return person; } LOGGER.info("Person with this ID does not exist."); return null; } }
@Test void personNotFoundInDB(){ PersonFinder personFinder = new PersonFinder(); // init database for our personFinder PersonDbSimulatorImplementation db = new PersonDbSimulatorImplementation(); personFinder.setDb(db); Assertions.assertThrows(IdNotFoundException.class,()->personFinder.getPerson(1)); // Dummy persons Person person1 = new Person(1, "John", 27304159); Person person2 = new Person(2, "Thomas", 42273631); Person person3 = new Person(3, "Arthur", 27489171); Person person4 = new Person(4, "Finn", 20499078); Person person5 = new Person(5, "Michael", 40599078); db.insert(person1); db.insert(person2); db.insert(person3); db.insert(person4); db.insert(person5); personFinder.setDb(db); // Assure that the database has been updated. Assertions.assertEquals(person4,personFinder.getPerson(4),"Find returns incorrect record"); // Assure key is in DB now. Assertions.assertDoesNotThrow(()->personFinder.getPerson(1)); // Assure key not in DB. Assertions.assertThrows(IdNotFoundException.class,()->personFinder.getPerson(6)); }
public RouteResult<T> route(HttpMethod method, String path) { return route(method, path, Collections.emptyMap()); }
@Test void testEmptyParams() { RouteResult<String> routed = router.route(GET, "/articles"); assertThat(routed.target()).isEqualTo("index"); assertThat(routed.pathParams()).isEmpty(); }
static int encodeTrailingString( final UnsafeBuffer encodingBuffer, final int offset, final int remainingCapacity, final String value) { final int maxLength = remainingCapacity - SIZE_OF_INT; if (value.length() <= maxLength) { return encodingBuffer.putStringAscii(offset, value, LITTLE_ENDIAN); } else { encodingBuffer.putInt(offset, maxLength, LITTLE_ENDIAN); encodingBuffer.putStringWithoutLengthAscii(offset + SIZE_OF_INT, value, 0, maxLength - 3); encodingBuffer.putStringWithoutLengthAscii(offset + SIZE_OF_INT + maxLength - 3, "..."); return remainingCapacity; } }
@Test void encodeTrailingStringAsAsciiWhenPayloadExceedsMaxMessageSizeWithoutHeader() { final int offset = 23; final int remainingCapacity = 59; final char[] chars = new char[100]; fill(chars, 'x'); final String value = new String(chars); final int encodedLength = encodeTrailingString(buffer, offset, remainingCapacity, value); assertEquals(remainingCapacity, encodedLength); assertEquals(value.substring(0, remainingCapacity - SIZE_OF_INT - 3) + "...", buffer.getStringAscii(offset)); }
public ZipBuilder zipContentsOfMultipleFolders(File destZipFile, boolean excludeRootDir) throws IOException { return new ZipBuilder(this, 0, new FileOutputStream(destZipFile), excludeRootDir); }
@Test void shouldZipMultipleFolderContentsWhenNotExcludingRootDirectory() throws IOException { File folderOne = createDirectoryInTempDir("folder1"); FileUtils.writeStringToFile(new File(folderOne, "folder1-file1.txt"), "folder1-file1", UTF_8); FileUtils.writeStringToFile(new File(folderOne, "folder1-file2.txt"), "folder1-file2", UTF_8); File folderTwo = createDirectoryInTempDir("folder2"); FileUtils.writeStringToFile(new File(folderTwo, "folder2-file1.txt"), "folder2-file1", UTF_8); FileUtils.writeStringToFile(new File(folderTwo, "folder2-file2.txt"), "folder2-file2", UTF_8); File targetZipFile = tempDir.resolve("final2.zip").toFile(); ZipBuilder zipBuilder = zipUtil.zipContentsOfMultipleFolders(targetZipFile, false); zipBuilder.add("folder-one", folderOne); zipBuilder.add("folder-two", folderTwo); zipBuilder.done(); assertContent(targetZipFile, "folder-one/folder1/folder1-file1.txt", "folder1-file1"); assertContent(targetZipFile, "folder-one/folder1/folder1-file2.txt", "folder1-file2"); assertContent(targetZipFile, "folder-two/folder2/folder2-file1.txt", "folder2-file1"); assertContent(targetZipFile, "folder-two/folder2/folder2-file2.txt", "folder2-file2"); }
public String join(final Stream<?> parts) { return join(parts.iterator()); }
@Test public void shouldHandleSingleItem() { assertThat(joiner.join(ImmutableList.of(1)), is("1")); }
@Override public void startScheduling() { final Set<SchedulingPipelinedRegion> sourceRegions = IterableUtils.toStream(schedulingTopology.getAllPipelinedRegions()) .filter(this::isSourceRegion) .collect(Collectors.toSet()); maybeScheduleRegions(sourceRegions); }
@Test void testSchedulingTopologyWithPersistentBlockingEdges() { final TestingSchedulingTopology topology = new TestingSchedulingTopology(); final List<TestingSchedulingExecutionVertex> v1 = topology.addExecutionVertices().withParallelism(1).finish(); final List<TestingSchedulingExecutionVertex> v2 = topology.addExecutionVertices().withParallelism(1).finish(); topology.connectPointwise(v1, v2) .withResultPartitionState(ResultPartitionState.CREATED) .withResultPartitionType(ResultPartitionType.BLOCKING_PERSISTENT) .finish(); startScheduling(topology); final List<List<TestingSchedulingExecutionVertex>> expectedScheduledVertices = new ArrayList<>(); expectedScheduledVertices.add(Arrays.asList(v1.get(0))); assertLatestScheduledVerticesAreEqualTo( expectedScheduledVertices, testingSchedulerOperation); }
public Date getEndOfNextNthPeriod(Date now, int numPeriods) { Calendar cal = this; cal.setTime(now); roundDownTime(cal, this.datePattern); switch (this.periodicityType) { case TOP_OF_MILLISECOND: cal.add(Calendar.MILLISECOND, numPeriods); break; case TOP_OF_SECOND: cal.add(Calendar.SECOND, numPeriods); break; case TOP_OF_MINUTE: cal.add(Calendar.MINUTE, numPeriods); break; case TOP_OF_HOUR: cal.add(Calendar.HOUR_OF_DAY, numPeriods); break; case TOP_OF_DAY: cal.add(Calendar.DATE, numPeriods); break; case TOP_OF_WEEK: cal.set(Calendar.DAY_OF_WEEK, cal.getFirstDayOfWeek()); cal.add(Calendar.WEEK_OF_YEAR, numPeriods); break; case TOP_OF_MONTH: cal.add(Calendar.MONTH, numPeriods); break; default: throw new IllegalStateException("Unknown periodicity type."); } return cal.getTime(); }
@Test public void roundsDateWithMissingMonthDayUnits() throws ParseException { final Date REF_DATE = parseDate("yyyy-MM-dd HH:mm:ss.SSS", "2000-12-25 09:30:49.876"); Calendar cal = getEndOfNextNthPeriod("yyyy-SSS", REF_DATE, -1); assertEquals(2000, cal.get(Calendar.YEAR)); assertEquals(Calendar.JANUARY, cal.get(Calendar.MONTH)); assertEquals(1, cal.get(Calendar.DAY_OF_MONTH)); assertEquals(0, cal.get(Calendar.HOUR_OF_DAY)); assertEquals(0, cal.get(Calendar.MINUTE)); assertEquals(0, cal.get(Calendar.SECOND)); assertEquals(875, cal.get(Calendar.MILLISECOND)); }
@Override public String getDataSource() { return DataSourceConstant.MYSQL; }
@Test void testGetDataSource() { String dataSource = tenantCapacityMapperByMySql.getDataSource(); assertEquals(DataSourceConstant.MYSQL, dataSource); }
static boolean isTableUsingInstancePoolAndReplicaGroup(@Nonnull TableConfig tableConfig) { boolean status = true; Map<String, InstanceAssignmentConfig> instanceAssignmentConfigMap = tableConfig.getInstanceAssignmentConfigMap(); if (instanceAssignmentConfigMap != null) { for (InstanceAssignmentConfig instanceAssignmentConfig : instanceAssignmentConfigMap.values()) { if (instanceAssignmentConfig != null) { status &= (instanceAssignmentConfig.getTagPoolConfig().isPoolBased() && instanceAssignmentConfig.getReplicaGroupPartitionConfig().isReplicaGroupBased()); } else { status = false; } } } else { status = false; } return status; }
@Test public void testNoPoolsOfflineTable() { InstanceAssignmentConfig config = new InstanceAssignmentConfig(new InstanceTagPoolConfig("DefaultTenant", false, 0, null), null, new InstanceReplicaGroupPartitionConfig(true, 0, 0, 0, 0, 0, false, null), null, false); TableConfig tableConfig = new TableConfig("table", TableType.OFFLINE.name(), new SegmentsValidationAndRetentionConfig(), new TenantConfig("DefaultTenant", "DefaultTenant", null), new IndexingConfig(), new TableCustomConfig(null), null, null, null, null, Map.of("OFFLINE", config), null, null, null, null, null, null, false, null, null, null); Assert.assertFalse(TableConfigUtils.isTableUsingInstancePoolAndReplicaGroup(tableConfig)); }
@SuppressWarnings("unchecked") @Override public RegisterNodeManagerResponse registerNodeManager( RegisterNodeManagerRequest request) throws YarnException, IOException { NodeId nodeId = request.getNodeId(); String host = nodeId.getHost(); int cmPort = nodeId.getPort(); int httpPort = request.getHttpPort(); Resource capability = request.getResource(); String nodeManagerVersion = request.getNMVersion(); Resource physicalResource = request.getPhysicalResource(); NodeStatus nodeStatus = request.getNodeStatus(); RegisterNodeManagerResponse response = recordFactory .newRecordInstance(RegisterNodeManagerResponse.class); if (!minimumNodeManagerVersion.equals("NONE")) { if (minimumNodeManagerVersion.equals("EqualToRM")) { minimumNodeManagerVersion = YarnVersionInfo.getVersion(); } if ((nodeManagerVersion == null) || (VersionUtil.compareVersions(nodeManagerVersion,minimumNodeManagerVersion)) < 0) { String message = "Disallowed NodeManager Version " + nodeManagerVersion + ", is less than the minimum version " + minimumNodeManagerVersion + " sending SHUTDOWN signal to " + "NodeManager."; LOG.info(message); response.setDiagnosticsMessage(message); response.setNodeAction(NodeAction.SHUTDOWN); return response; } } if (checkIpHostnameInRegistration) { InetSocketAddress nmAddress = NetUtils.createSocketAddrForHost(host, cmPort); InetAddress inetAddress = Server.getRemoteIp(); if (inetAddress != null && nmAddress.isUnresolved()) { // Reject registration of unresolved nm to prevent resourcemanager // getting stuck at allocations. final String message = "hostname cannot be resolved (ip=" + inetAddress.getHostAddress() + ", hostname=" + host + ")"; LOG.warn("Unresolved nodemanager registration: " + message); response.setDiagnosticsMessage(message); response.setNodeAction(NodeAction.SHUTDOWN); return response; } } // Check if this node is a 'valid' node if (!this.nodesListManager.isValidNode(host) && !isNodeInDecommissioning(nodeId)) { String message = "Disallowed NodeManager from " + host + ", Sending SHUTDOWN signal to the NodeManager."; LOG.info(message); response.setDiagnosticsMessage(message); response.setNodeAction(NodeAction.SHUTDOWN); return response; } // check if node's capacity is load from dynamic-resources.xml String nid = nodeId.toString(); Resource dynamicLoadCapability = loadNodeResourceFromDRConfiguration(nid); if (dynamicLoadCapability != null) { LOG.debug("Resource for node: {} is adjusted from: {} to: {} due to" + " settings in dynamic-resources.xml.", nid, capability, dynamicLoadCapability); capability = dynamicLoadCapability; // sync back with new resource. response.setResource(capability); } // Check if this node has minimum allocations if (capability.getMemorySize() < minAllocMb || capability.getVirtualCores() < minAllocVcores) { String message = "NodeManager from " + host + " doesn't satisfy minimum allocations, Sending SHUTDOWN" + " signal to the NodeManager. Node capabilities are " + capability + "; minimums are " + minAllocMb + "mb and " + minAllocVcores + " vcores"; LOG.info(message); response.setDiagnosticsMessage(message); response.setNodeAction(NodeAction.SHUTDOWN); return response; } response.setContainerTokenMasterKey(containerTokenSecretManager .getCurrentKey()); response.setNMTokenMasterKey(nmTokenSecretManager .getCurrentKey()); RMNode rmNode = new RMNodeImpl(nodeId, rmContext, host, cmPort, httpPort, resolve(host), capability, nodeManagerVersion, physicalResource); RMNode oldNode = this.rmContext.getRMNodes().putIfAbsent(nodeId, rmNode); if (oldNode == null) { RMNodeStartedEvent startEvent = new RMNodeStartedEvent(nodeId, request.getNMContainerStatuses(), request.getRunningApplications(), nodeStatus); if (request.getLogAggregationReportsForApps() != null && !request.getLogAggregationReportsForApps().isEmpty()) { if (LOG.isDebugEnabled()) { LOG.debug("Found the number of previous cached log aggregation " + "status from nodemanager:" + nodeId + " is :" + request.getLogAggregationReportsForApps().size()); } startEvent.setLogAggregationReportsForApps(request .getLogAggregationReportsForApps()); } this.rmContext.getDispatcher().getEventHandler().handle( startEvent); } else { LOG.info("Reconnect from the node at: " + host); this.nmLivelinessMonitor.unregister(nodeId); if (CollectionUtils.isEmpty(request.getRunningApplications()) && rmNode.getState() != NodeState.DECOMMISSIONING && rmNode.getHttpPort() != oldNode.getHttpPort()) { // Reconnected node differs, so replace old node and start new node switch (rmNode.getState()) { case RUNNING: ClusterMetrics.getMetrics().decrNumActiveNodes(); break; case UNHEALTHY: ClusterMetrics.getMetrics().decrNumUnhealthyNMs(); break; default: LOG.debug("Unexpected Rmnode state"); } this.rmContext.getDispatcher().getEventHandler() .handle(new NodeRemovedSchedulerEvent(rmNode)); this.rmContext.getRMNodes().put(nodeId, rmNode); this.rmContext.getDispatcher().getEventHandler() .handle(new RMNodeStartedEvent(nodeId, null, null, nodeStatus)); } else { // Reset heartbeat ID since node just restarted. oldNode.resetLastNodeHeartBeatResponse(); this.rmContext.getDispatcher().getEventHandler() .handle(new RMNodeReconnectEvent(nodeId, rmNode, request.getRunningApplications(), request.getNMContainerStatuses())); } } // On every node manager register we will be clearing NMToken keys if // present for any running application. this.nmTokenSecretManager.removeNodeKey(nodeId); this.nmLivelinessMonitor.register(nodeId); // Handle received container status, this should be processed after new // RMNode inserted if (!rmContext.isWorkPreservingRecoveryEnabled()) { if (!request.getNMContainerStatuses().isEmpty()) { LOG.info("received container statuses on node manager register :" + request.getNMContainerStatuses()); for (NMContainerStatus status : request.getNMContainerStatuses()) { handleNMContainerStatus(status, nodeId); } } } // Update node's labels to RM's NodeLabelManager. Set<String> nodeLabels = NodeLabelsUtils.convertToStringSet( request.getNodeLabels()); if (isDistributedNodeLabelsConf && nodeLabels != null) { try { updateNodeLabelsFromNMReport(nodeLabels, nodeId); response.setAreNodeLabelsAcceptedByRM(true); } catch (IOException ex) { // Ensure the exception is captured in the response response.setDiagnosticsMessage(ex.getMessage()); response.setAreNodeLabelsAcceptedByRM(false); } } else if (isDelegatedCentralizedNodeLabelsConf) { this.rmContext.getRMDelegatedNodeLabelsUpdater().updateNodeLabels(nodeId); } // Update node's attributes to RM's NodeAttributesManager. if (request.getNodeAttributes() != null) { try { // update node attributes if necessary then update heartbeat response updateNodeAttributesIfNecessary(nodeId, request.getNodeAttributes()); response.setAreNodeAttributesAcceptedByRM(true); } catch (IOException ex) { //ensure the error message is captured and sent across in response String errorMsg = response.getDiagnosticsMessage() == null ? ex.getMessage() : response.getDiagnosticsMessage() + "\n" + ex.getMessage(); response.setDiagnosticsMessage(errorMsg); response.setAreNodeAttributesAcceptedByRM(false); } } StringBuilder message = new StringBuilder(); message.append("NodeManager from node ").append(host).append("(cmPort: ") .append(cmPort).append(" httpPort: "); message.append(httpPort).append(") ") .append("registered with capability: ").append(capability); message.append(", assigned nodeId ").append(nodeId); if (response.getAreNodeLabelsAcceptedByRM()) { message.append(", node labels { ").append( StringUtils.join(",", nodeLabels) + " } "); } if (response.getAreNodeAttributesAcceptedByRM()) { message.append(", node attributes { ") .append(request.getNodeAttributes() + " } "); } LOG.info(message.toString()); response.setNodeAction(NodeAction.NORMAL); response.setRMIdentifier(ResourceManager.getClusterTimeStamp()); response.setRMVersion(YarnVersionInfo.getVersion()); return response; }
@Test public void testNMIpHostNameResolution() throws Exception { Configuration conf = new Configuration(); conf.set(YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, "localhost:" + ServerSocketUtil.getPort(10000, 10)); conf.setBoolean(YarnConfiguration.RM_NM_REGISTRATION_IP_HOSTNAME_CHECK_KEY, true); MockRM mockRM = new MockRM(conf) { @Override protected ResourceTrackerService createResourceTrackerService() { return new ResourceTrackerService(getRMContext(), nodesListManager, this.nmLivelinessMonitor, rmContext.getContainerTokenSecretManager(), rmContext.getNMTokenSecretManager()) { }; } }; mockRM.start(); ResourceTracker rmTracker = ServerRMProxy.createRMProxy(mockRM.getConfig(), ResourceTracker.class); RegisterNodeManagerResponse response = rmTracker.registerNodeManager( RegisterNodeManagerRequest.newInstance( NodeId.newInstance("host1" + System.currentTimeMillis(), 1234), 1236, Resource.newInstance(10000, 10), "2", new ArrayList<>(), new ArrayList<>())); Assert .assertEquals("Shutdown signal should be received", NodeAction.SHUTDOWN, response.getNodeAction()); Assert.assertTrue("Diagnostic Message", response.getDiagnosticsMessage() .contains("hostname cannot be resolved ")); // Test success rmTracker = ServerRMProxy.createRMProxy(mockRM.getConfig(), ResourceTracker.class); response = rmTracker.registerNodeManager(RegisterNodeManagerRequest .newInstance(NodeId.newInstance("localhost", 1234), 1236, Resource.newInstance(10000, 10), "2", new ArrayList<>(), new ArrayList<>())); Assert.assertEquals("Successfull registration", NodeAction.NORMAL, response.getNodeAction()); mockRM.stop(); }
@SuppressWarnings("deprecation") File convertToFile(URL url) { String protocol = url.getProtocol(); if ("file".equals(protocol)) { return new File(URLDecoder.decode(url.getFile())); } else { addInfo("URL [" + url + "] is not of type file"); return null; } }
@Test // See http://jira.qos.ch/browse/LBCORE-119 public void fileToURLAndBack() throws MalformedURLException { File file = new File("a b.xml"); URL url = file.toURI().toURL(); ConfigurationWatchList cwl = new ConfigurationWatchList(); File back = cwl.convertToFile(url); assertEquals(file.getName(), back.getName()); }
DataTableType lookupTableTypeByType(Type type) { return lookupTableTypeByType(type, Function.identity()); }
@Test void null_double_transformed_to_null() { DataTableTypeRegistry registry = new DataTableTypeRegistry(Locale.ENGLISH); DataTableType dataTableType = registry.lookupTableTypeByType(LIST_OF_LIST_OF_DOUBLE); assertEquals( singletonList(singletonList(null)), dataTableType.transform(singletonList(singletonList(null)))); }
public static Map<String, Object> flatten(Map<String, Object> originalMap, String parentKey, String separator) { final Map<String, Object> result = new HashMap<>(); for (Map.Entry<String, Object> entry : originalMap.entrySet()) { final String key = parentKey.isEmpty() ? entry.getKey() : parentKey + separator + entry.getKey(); final Object value = entry.getValue(); if (value instanceof Map) { @SuppressWarnings("unchecked") final Map<String, Object> valueMap = (Map<String, Object>) value; result.putAll(flatten(valueMap, key, separator)); } else { result.put(key, value); } } return result; }
@Test public void flattenSupportsMultipleLevels() throws Exception { final Map<String, Object> map = ImmutableMap.of( "map", ImmutableMap.of( "foo", "bar", "baz", ImmutableMap.of( "foo", "bar", "baz", "qux"))); final Map<String, Object> expected = ImmutableMap.of( "map_foo", "bar", "map_baz_foo", "bar", "map_baz_baz", "qux"); assertThat(MapUtils.flatten(map, "", "_")).isEqualTo(expected); }
public synchronized void submitCancelTaskCommand(long jobId, long taskId, long workerId) { CancelTaskCommand.Builder cancelTaskCommand = CancelTaskCommand.newBuilder(); cancelTaskCommand.setJobId(jobId); cancelTaskCommand.setTaskId(taskId); JobCommand.Builder command = JobCommand.newBuilder(); command.setCancelTaskCommand(cancelTaskCommand); submit(workerId, command); }
@Test public void submitCancelTaskCommand() { long jobId = 0L; int taskId = 1; long workerId = 2L; mManager.submitCancelTaskCommand(jobId, taskId, workerId); List<JobCommand> commands = mManager.pollAllPendingCommands(workerId); Assert.assertEquals(1, commands.size()); JobCommand command = commands.get(0); Assert.assertEquals(jobId, command.getCancelTaskCommand().getJobId()); Assert.assertEquals(taskId, command.getCancelTaskCommand().getTaskId()); }
@JsonValue public String toString() { return toString(Locale.getDefault().getLanguage()); }
@Test public void tbDateSerializedMapperTest() { String stringDateUTC = "2023-09-06T01:04:05.345Z"; TbDate expectedDate = new TbDate(stringDateUTC); String serializedTbDate = JacksonUtil.toJsonNode(JacksonUtil.toString(Map.of("date", expectedDate))).get("date").asText(); Assertions.assertNotNull(serializedTbDate); Assertions.assertEquals(expectedDate.toString(), serializedTbDate); }
public static String getFullGcsPath(String... pathParts) { checkArgument(pathParts.length != 0, "Must provide at least one path part"); checkArgument( stream(pathParts).noneMatch(Strings::isNullOrEmpty), "No path part can be null or empty"); return String.format("gs://%s", String.join("/", pathParts)); }
@Test public void testGetFullGcsPathOnlyBucket() { assertThat(ArtifactUtils.getFullGcsPath("bucket")).isEqualTo("gs://bucket"); }
public static int parseIntAscii(final CharSequence cs, final int index, final int length) { if (length <= 0) { throw new AsciiNumberFormatException("empty string: index=" + index + " length=" + length); } final boolean negative = MINUS_SIGN == cs.charAt(index); int i = index; if (negative) { i++; if (1 == length) { throwParseIntError(cs, index, length); } } final int end = index + length; if (end - i < INT_MAX_DIGITS) { final int tally = parsePositiveIntAscii(cs, index, length, i, end); return negative ? -tally : tally; } else { final long tally = parsePositiveIntAsciiOverflowCheck(cs, index, length, i, end); if (tally > INTEGER_ABSOLUTE_MIN_VALUE || INTEGER_ABSOLUTE_MIN_VALUE == tally && !negative) { throwParseIntOverflowError(cs, index, length); } return (int)(negative ? -tally : tally); } }
@Test void shouldThrowExceptionWhenParsingIntegerContainingLoneMinusSign() { assertThrows(AsciiNumberFormatException.class, () -> parseIntAscii("-", 0, 1)); }
@SuppressWarnings("unchecked") public <T> T convert(DocString docString, Type targetType) { if (DocString.class.equals(targetType)) { return (T) docString; } List<DocStringType> docStringTypes = docStringTypeRegistry.lookup(docString.getContentType(), targetType); if (docStringTypes.isEmpty()) { if (docString.getContentType() == null) { throw new CucumberDocStringException(format( "It appears you did not register docstring type for %s", targetType.getTypeName())); } throw new CucumberDocStringException(format( "It appears you did not register docstring type for '%s' or %s", docString.getContentType(), targetType.getTypeName())); } if (docStringTypes.size() > 1) { List<String> suggestedContentTypes = suggestedContentTypes(docStringTypes); if (docString.getContentType() == null) { throw new CucumberDocStringException(format( "Multiple converters found for type %s, add one of the following content types to your docstring %s", targetType.getTypeName(), suggestedContentTypes)); } throw new CucumberDocStringException(format( "Multiple converters found for type %s, and the content type '%s' did not match any of the registered types %s. Change the content type of the docstring or register a docstring type for '%s'", targetType.getTypeName(), docString.getContentType(), suggestedContentTypes, docString.getContentType())); } return (T) docStringTypes.get(0).transform(docString.getContent()); }
@Test void throws_when_no_converter_available() { DocString docString = DocString.create("{\"hello\":\"world\"}", "application/json"); CucumberDocStringException exception = assertThrows( CucumberDocStringException.class, () -> converter.convert(docString, JsonNode.class)); assertThat(exception.getMessage(), is("" + "It appears you did not register docstring type for 'application/json' or com.fasterxml.jackson.databind.JsonNode")); }
@Override public String generateSqlType(Dialect dialect) { switch (dialect.getId()) { case PostgreSql.ID, H2.ID: return "BOOLEAN"; case Oracle.ID: return "NUMBER(1)"; case MsSql.ID: return "BIT"; default: throw new UnsupportedOperationException(String.format("Unknown dialect '%s'", dialect.getId())); } }
@Test public void generate_sql_type() { BooleanColumnDef def = new BooleanColumnDef.Builder() .setColumnName("enabled") .setIsNullable(true) .build(); assertThat(def.generateSqlType(new H2())).isEqualTo("BOOLEAN"); assertThat(def.generateSqlType(new PostgreSql())).isEqualTo("BOOLEAN"); assertThat(def.generateSqlType(new MsSql())).isEqualTo("BIT"); assertThat(def.generateSqlType(new Oracle())).isEqualTo("NUMBER(1)"); }
void updatePullOffset(MessageQueue remoteQueue, long nextPullOffset) { pullOffsetTable.put(remoteQueue, nextPullOffset); }
@Test public void testUpdatePullOffset() throws Exception { MessageQueue messageQueue = new MessageQueue(); localMessageCache.updatePullOffset(messageQueue, 124L); assertThat(localMessageCache.nextPullOffset(messageQueue)).isEqualTo(124L); }
@Override public Result invoke(Invocation invocation) throws RpcException { Result result; String value = getUrl().getMethodParameter( RpcUtils.getMethodName(invocation), MOCK_KEY, Boolean.FALSE.toString()) .trim(); if (ConfigUtils.isEmpty(value)) { // no mock result = this.invoker.invoke(invocation); } else if (value.startsWith(FORCE_KEY)) { if (logger.isWarnEnabled()) { logger.warn( CLUSTER_FAILED_MOCK_REQUEST, "force mock", "", "force-mock: " + RpcUtils.getMethodName(invocation) + " force-mock enabled , url : " + getUrl()); } // force:direct mock result = doMockInvoke(invocation, null); } else { // fail-mock try { result = this.invoker.invoke(invocation); // fix:#4585 if (result.getException() != null && result.getException() instanceof RpcException) { RpcException rpcException = (RpcException) result.getException(); if (rpcException.isBiz()) { throw rpcException; } else { result = doMockInvoke(invocation, rpcException); } } } catch (RpcException e) { if (e.isBiz()) { throw e; } if (logger.isWarnEnabled()) { logger.warn( CLUSTER_FAILED_MOCK_REQUEST, "failed to mock invoke", "", "fail-mock: " + RpcUtils.getMethodName(invocation) + " fail-mock enabled , url : " + getUrl(), e); } result = doMockInvoke(invocation, e); } } return result; }
@Test void testMockInvokerFromOverride_Invoke_checkCompatible_ImplMock2() { URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName()) .addParameter(REFER_KEY, URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&" + "mock=fail")) .addParameter("invoke_return_error", "true"); Invoker<IHelloService> cluster = getClusterInvoker(url); // Configured with mock RpcInvocation invocation = new RpcInvocation(); invocation.setMethodName("getSomething"); Result ret = cluster.invoke(invocation); Assertions.assertEquals("somethingmock", ret.getValue()); }
public IndexRecord getIndexInformation(String mapId, int reduce, Path fileName, String expectedIndexOwner) throws IOException { IndexInformation info = cache.get(mapId); if (info == null) { info = readIndexFileToCache(fileName, mapId, expectedIndexOwner); } else { synchronized(info) { while (isUnderConstruction(info)) { try { info.wait(); } catch (InterruptedException e) { throw new IOException("Interrupted waiting for construction", e); } } } LOG.debug("IndexCache HIT: MapId " + mapId + " found"); } if (info.mapSpillRecord.size() == 0 || info.mapSpillRecord.size() <= reduce) { throw new IOException("Invalid request " + " Map Id = " + mapId + " Reducer = " + reduce + " Index Info Length = " + info.mapSpillRecord.size()); } return info.mapSpillRecord.getIndex(reduce); }
@Test public void testLRCPolicy() throws Exception { Random r = new Random(); long seed = r.nextLong(); r.setSeed(seed); System.out.println("seed: " + seed); fs.delete(p, true); conf.setInt(MRJobConfig.SHUFFLE_INDEX_CACHE, 1); final int partsPerMap = 1000; final int bytesPerFile = partsPerMap * 24; IndexCache cache = new IndexCache(conf); // fill cache int totalsize = bytesPerFile; for (; totalsize < 1024 * 1024; totalsize += bytesPerFile) { Path f = new Path(p, Integer.toString(totalsize, 36)); writeFile(fs, f, totalsize, partsPerMap); IndexRecord rec = cache.getIndexInformation( Integer.toString(totalsize, 36), r.nextInt(partsPerMap), f, UserGroupInformation.getCurrentUser().getShortUserName()); checkRecord(rec, totalsize); } // delete files, ensure cache retains all elem for (FileStatus stat : fs.listStatus(p)) { fs.delete(stat.getPath(),true); } for (int i = bytesPerFile; i < 1024 * 1024; i += bytesPerFile) { Path f = new Path(p, Integer.toString(i, 36)); IndexRecord rec = cache.getIndexInformation(Integer.toString(i, 36), r.nextInt(partsPerMap), f, UserGroupInformation.getCurrentUser().getShortUserName()); checkRecord(rec, i); } // push oldest (bytesPerFile) out of cache Path f = new Path(p, Integer.toString(totalsize, 36)); writeFile(fs, f, totalsize, partsPerMap); cache.getIndexInformation(Integer.toString(totalsize, 36), r.nextInt(partsPerMap), f, UserGroupInformation.getCurrentUser().getShortUserName()); fs.delete(f, false); // oldest fails to read, or error boolean fnf = false; try { cache.getIndexInformation(Integer.toString(bytesPerFile, 36), r.nextInt(partsPerMap), new Path(p, Integer.toString(bytesPerFile)), UserGroupInformation.getCurrentUser().getShortUserName()); } catch (IOException e) { if (e.getCause() == null || !(e.getCause() instanceof FileNotFoundException)) { throw e; } else { fnf = true; } } if (!fnf) fail("Failed to push out last entry"); // should find all the other entries for (int i = bytesPerFile << 1; i < 1024 * 1024; i += bytesPerFile) { IndexRecord rec = cache.getIndexInformation(Integer.toString(i, 36), r.nextInt(partsPerMap), new Path(p, Integer.toString(i, 36)), UserGroupInformation.getCurrentUser().getShortUserName()); checkRecord(rec, i); } IndexRecord rec = cache.getIndexInformation(Integer.toString(totalsize, 36), r.nextInt(partsPerMap), f, UserGroupInformation.getCurrentUser().getShortUserName()); checkRecord(rec, totalsize); }
@Override public PermissionTicket createTicket(ResourceSet resourceSet, Set<String> scopes) { // check to ensure that the scopes requested are a subset of those in the resource set if (!scopeService.scopesMatch(resourceSet.getScopes(), scopes)) { throw new InsufficientScopeException("Scopes of resource set are not enough for requested permission."); } Permission perm = new Permission(); perm.setResourceSet(resourceSet); perm.setScopes(scopes); PermissionTicket ticket = new PermissionTicket(); ticket.setPermission(perm); ticket.setTicket(UUID.randomUUID().toString()); ticket.setExpiration(new Date(System.currentTimeMillis() + permissionExpirationSeconds * 1000L)); return repository.save(ticket); }
@Test public void testCreate_differentTicketsDifferentClient() { PermissionTicket perm1 = permissionService.createTicket(rs1, scopes1); PermissionTicket perm2 = permissionService.createTicket(rs2, scopes2); assertNotNull(perm1.getTicket()); assertNotNull(perm2.getTicket()); // make sure these are different from each other assertThat(perm1.getTicket(), not(equalTo(perm2.getTicket()))); }
public static String getViewTypeByReflect(View view) { Class<?> compatClass; String viewType = SnapCache.getInstance().getCanonicalName(view.getClass()); compatClass = ReflectUtil.getClassByName("android.widget.Switch"); if (compatClass != null && compatClass.isInstance(view)) { return getViewType(viewType, "Switch"); } compatClass = ReflectUtil.getClassByName("android.support.v7.widget.SwitchCompat"); if (compatClass != null && compatClass.isInstance(view)) { return getViewType(viewType, "SwitchCompat"); } compatClass = ReflectUtil.getClassByName("androidx.appcompat.widget.SwitchCompat"); if (compatClass != null && compatClass.isInstance(view)) { return getViewType(viewType, "SwitchCompat"); } return viewType; }
@Test public void getViewTypeByReflect() { TextView textView1 = new TextView(mApplication); textView1.setText("child1"); Assert.assertEquals("android.widget.TextView", SAViewUtils.getViewTypeByReflect(textView1)); }
@Override public Object plugin(final Object target) { return Plugin.wrap(target, this); }
@Test public void pluginTest() { final OpenGaussSqlUpdateInterceptor openGaussSqlUpdateInterceptor = new OpenGaussSqlUpdateInterceptor(); Assertions.assertDoesNotThrow(() -> openGaussSqlUpdateInterceptor.plugin(new Object())); }
@Override public void updateAdd(Object key, Object value) { int keyHash = key.hashCode(); int valueHash = value.hashCode(); int leafOrder = MerkleTreeUtil.getLeafOrderForHash(keyHash, leafLevel); int leafCurrentHash = getNodeHash(leafOrder); int leafNewHash = MerkleTreeUtil.addHash(leafCurrentHash, valueHash); setNodeHash(leafOrder, leafNewHash); updateBranch(leafOrder); }
@Test public void testUpdateAdd() { MerkleTree merkleTree = new ArrayMerkleTree(3); merkleTree.updateAdd(1, 1); merkleTree.updateAdd(2, 2); merkleTree.updateAdd(3, 3); int expectedHash = 0; expectedHash = MerkleTreeUtil.addHash(expectedHash, 1); expectedHash = MerkleTreeUtil.addHash(expectedHash, 2); expectedHash = MerkleTreeUtil.addHash(expectedHash, 3); int nodeHash = merkleTree.getNodeHash(5); assertEquals(expectedHash, nodeHash); }
public static boolean isAmbiguousKeyPossible( TypeSerializer keySerializer, TypeSerializer namespaceSerializer) { return (isSerializerTypeVariableSized(keySerializer) && isSerializerTypeVariableSized(namespaceSerializer)); }
@Test void testIsAmbiguousKeyPossible() { assertThat( CompositeKeySerializationUtils.isAmbiguousKeyPossible( IntSerializer.INSTANCE, IntSerializer.INSTANCE)) .isFalse(); assertThat( CompositeKeySerializationUtils.isAmbiguousKeyPossible( StringSerializer.INSTANCE, StringSerializer.INSTANCE)) .isTrue(); }
@Deprecated(forRemoval=true, since = "13.0") public static Object convertOctetStreamToJava(byte[] source, MediaType destination, Marshaller marshaller) { if (source == null) return null; if (!destination.match(MediaType.APPLICATION_OBJECT)) { throw CONTAINER.invalidMediaType(APPLICATION_OBJECT_TYPE, destination.toString()); } String classType = destination.getClassType(); if (classType == null) return source; if (classType.equals("ByteArray")) { return source; } if (destination.hasStringType()) { return new String(source, UTF_8); } try { return marshaller.objectFromByteBuffer(source); } catch (IOException | IllegalStateException | ClassNotFoundException e) { throw CONTAINER.conversionNotSupported(source, MediaType.APPLICATION_OCTET_STREAM_TYPE, destination.toString()); } }
@Test public void testOctetStreamToJavaConversion() { String value = "It's not an easy thing to meet your maker."; byte[] textStream = value.getBytes(UTF_8); byte[] randomBytes = new byte[]{23, 23, 34, 1, -1, -123}; Marshaller marshaller = new ProtoStreamMarshaller(); MediaType stringType = APPLICATION_OBJECT.withParameter("type", "java.lang.String"); Object result = StandardConversions.convertOctetStreamToJava(textStream, stringType, marshaller); assertEquals(value, result); MediaType byteArrayType = APPLICATION_OBJECT.withParameter("type", "ByteArray"); Object result2 = StandardConversions.convertOctetStreamToJava(textStream, byteArrayType, marshaller); assertArrayEquals(textStream, (byte[]) result2); Object result3 = StandardConversions.convertOctetStreamToJava(randomBytes, byteArrayType, marshaller); assertArrayEquals(randomBytes, (byte[]) result3); thrown.expect(EncodingException.class); MediaType doubleType = APPLICATION_OBJECT.withParameter("type", "java.lang.Double"); StandardConversions.convertOctetStreamToJava(randomBytes, doubleType, marshaller); System.out.println(thrown); }