idx
int64
0
41.2k
question
stringlengths
73
5.81k
target
stringlengths
5
918
40,400
private static Field [ ] getAllFieldsRec ( Class clazz , List < Field > fields ) { Class superClazz = clazz . getSuperclass ( ) ; if ( superClazz != null ) { getAllFieldsRec ( superClazz , fields ) ; } fields . addAll ( Arrays . asList ( clazz . getDeclaredFields ( ) ) ) ; return fields . toArray ( new Field [ fields . size ( ) ] ) ; }
Get all fields rec .
40,401
public static List < String > removeAddressPort ( List < String > stringList ) { List < String > adresNoPort = new ArrayList < > ( ) ; for ( String s : stringList ) { int index = s . indexOf ( ":" ) ; if ( index > - 1 ) { adresNoPort . add ( s . substring ( 0 , index ) ) ; continue ; } adresNoPort . add ( s ) ; } return adresNoPort ; }
Remove address port .
40,402
public static String splitListByComma ( List < String > hosts ) { boolean firstHost = true ; StringBuilder hostConnection = new StringBuilder ( ) ; for ( String host : hosts ) { if ( ! firstHost ) { hostConnection . append ( "," ) ; } hostConnection . append ( host . trim ( ) ) ; firstHost = false ; } return hostConnection . toString ( ) ; }
Split list by comma .
40,403
public static < T , S extends BaseConfig > IExtractor < T , S > getExtractorInstance ( S config ) { try { Class < T > rdd = ( Class < T > ) config . getExtractorImplClass ( ) ; if ( rdd == null ) { rdd = ( Class < T > ) Class . forName ( config . getExtractorImplClassName ( ) ) ; } Constructor < T > c ; if ( config . getEntityClass ( ) . isAssignableFrom ( Cells . class ) ) { c = rdd . getConstructor ( ) ; return ( IExtractor < T , S > ) c . newInstance ( ) ; } else { c = rdd . getConstructor ( Class . class ) ; return ( IExtractor < T , S > ) c . newInstance ( config . getEntityClass ( ) ) ; } } catch ( ClassNotFoundException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e ) { String message = "A exception happens and we wrap with DeepExtractorInitializationException" + e . getMessage ( ) ; LOG . error ( message ) ; throw new DeepExtractorInitializationException ( message , e ) ; } }
Gets extractor instance .
40,404
public static Object castNumberType ( Object object , Class clazz ) { if ( Number . class . isAssignableFrom ( clazz ) ) { if ( Double . class . isAssignableFrom ( clazz ) ) { return ( ( Number ) object ) . doubleValue ( ) ; } else if ( Long . class . isAssignableFrom ( clazz ) ) { return ( ( Number ) object ) . longValue ( ) ; } else if ( Float . class . isAssignableFrom ( clazz ) ) { return ( ( Number ) object ) . floatValue ( ) ; } else if ( Integer . class . isAssignableFrom ( clazz ) ) { return ( ( Number ) object ) . intValue ( ) ; } else if ( Short . class . isAssignableFrom ( clazz ) ) { return ( ( Number ) object ) . shortValue ( ) ; } else if ( Byte . class . isAssignableFrom ( clazz ) ) { return ( ( Number ) object ) . byteValue ( ) ; } else if ( BigInteger . class . isAssignableFrom ( clazz ) ) { return BigInteger . valueOf ( ( ( Number ) object ) . longValue ( ) ) ; } else if ( BigDecimal . class . isAssignableFrom ( clazz ) ) { return BigDecimal . valueOf ( ( ( Number ) object ) . longValue ( ) ) ; } else if ( AtomicLong . class . isAssignableFrom ( clazz ) ) { return new AtomicLong ( ( ( Number ) object ) . longValue ( ) ) ; } else if ( AtomicInteger . class . isAssignableFrom ( clazz ) ) { return new AtomicInteger ( ( ( Number ) object ) . intValue ( ) ) ; } } throw new ClassCastException ( "it is not a Number Type" + object . getClass ( ) + "|" + clazz ) ; }
Cast number type .
40,405
public static < T > T cloneObjectWithParents ( T t ) throws IllegalAccessException , InstantiationException { T clone = ( T ) t . getClass ( ) . newInstance ( ) ; List < Field > allFields = new ArrayList < > ( ) ; Class parentClass = t . getClass ( ) . getSuperclass ( ) ; while ( parentClass != null ) { Collections . addAll ( allFields , parentClass . getDeclaredFields ( ) ) ; parentClass = parentClass . getSuperclass ( ) ; } Collections . addAll ( allFields , t . getClass ( ) . getDeclaredFields ( ) ) ; for ( Field field : allFields ) { int modifiers = field . getModifiers ( ) ; if ( ( Modifier . FINAL & modifiers ) != 0 || ( Modifier . STATIC & modifiers ) != 0 ) { continue ; } field . setAccessible ( true ) ; Object value = field . get ( t ) ; if ( Collection . class . isAssignableFrom ( field . getType ( ) ) ) { Collection collection = ( Collection ) field . get ( clone ) ; if ( collection == null ) { collection = ( Collection ) field . get ( t ) . getClass ( ) . newInstance ( ) ; } collection . addAll ( ( Collection ) field . get ( t ) ) ; value = collection ; } else if ( Map . class . isAssignableFrom ( field . getType ( ) ) ) { Map clonMap = ( Map ) field . get ( t ) . getClass ( ) . newInstance ( ) ; clonMap . putAll ( ( Map ) field . get ( t ) ) ; value = clonMap ; } field . set ( clone , value ) ; } return clone ; }
Returns an instance clone . this method gets every class property by reflection including its parents properties
40,406
public static ExecutorService newBlockingFixedThreadPoolExecutor ( int nThreads , int queueSize ) { BlockingQueue < Runnable > blockingQueue = new ArrayBlockingQueue < > ( queueSize ) ; RejectedExecutionHandler blockingRejectedExecutionHandler = new RejectedExecutionHandler ( ) { public void rejectedExecution ( Runnable task , ThreadPoolExecutor executor ) { try { executor . getQueue ( ) . put ( task ) ; } catch ( InterruptedException e ) { } } } ; return new ThreadPoolExecutor ( nThreads , nThreads , 0L , TimeUnit . MILLISECONDS , blockingQueue , blockingRejectedExecutionHandler ) ; }
Returns an instance of ThreadPoolExecutor using an bounded queue and blocking when the worker queue is full .
40,407
public String getNameSpace ( ) { if ( nameSpace == null ) { nameSpace = new StringBuilder ( ) . append ( catalog ) . append ( "." ) . append ( table ) . toString ( ) ; } return nameSpace ; }
Get name space .
40,408
public S initialize ( ExtractorConfig extractorConfig ) { setExtractorImplClassName ( extractorConfig . getExtractorImplClassName ( ) ) ; setEntityClass ( extractorConfig . getEntityClass ( ) ) ; setRddId ( extractorConfig . getRddId ( ) ) ; setPartitionId ( extractorConfig . getPartitionId ( ) ) ; Map < String , Serializable > values = extractorConfig . getValues ( ) ; if ( values . get ( USERNAME ) != null ) { username ( extractorConfig . getString ( USERNAME ) ) ; } if ( values . get ( PASSWORD ) != null ) { password ( extractorConfig . getString ( PASSWORD ) ) ; } if ( values . get ( HOST ) != null ) { host ( ( extractorConfig . getStringArray ( HOST ) ) ) ; } if ( values . get ( ES_REST_PORTS ) != null ) { port ( ( extractorConfig . getInteger ( ES_REST_PORTS ) ) ) ; } if ( values . get ( PORT ) != null ) { port ( ( extractorConfig . getInteger ( PORT ) ) ) ; } if ( values . get ( COLLECTION ) != null ) { table ( extractorConfig . getString ( COLLECTION ) ) ; } if ( values . get ( INPUT_COLUMNS ) != null ) { inputColumns ( extractorConfig . getStringArray ( INPUT_COLUMNS ) ) ; } if ( values . get ( DATABASE ) != null ) { catalog ( extractorConfig . getString ( DATABASE ) ) ; } if ( values . get ( FILTER_QUERY ) != null ) { filters ( extractorConfig . getFilterArray ( FILTER_QUERY ) ) ; } return ( S ) this ; }
Initialize s .
40,409
private boolean isShardedCollection ( DBCollection collection ) { DB config = collection . getDB ( ) . getMongo ( ) . getDB ( "config" ) ; DBCollection configCollections = config . getCollection ( "collections" ) ; DBObject dbObject = configCollections . findOne ( new BasicDBObject ( MONGO_DEFAULT_ID , collection . getFullName ( ) ) ) ; return dbObject != null ; }
Is sharded collection .
40,410
private Map < String , String [ ] > getShards ( DBCollection collection ) { DB config = collection . getDB ( ) . getSisterDB ( "config" ) ; DBCollection configShards = config . getCollection ( "shards" ) ; DBCursor cursorShards = configShards . find ( ) ; Map < String , String [ ] > map = new HashMap < > ( ) ; while ( cursorShards . hasNext ( ) ) { DBObject currentShard = cursorShards . next ( ) ; String currentHost = ( String ) currentShard . get ( "host" ) ; int slashIndex = currentHost . indexOf ( "/" ) ; if ( slashIndex > 0 ) { map . put ( ( String ) currentShard . get ( MONGO_DEFAULT_ID ) , currentHost . substring ( slashIndex + 1 ) . split ( "," ) ) ; } } return map ; }
Gets shards .
40,411
private DBCursor getChunks ( DBCollection collection ) { DB config = collection . getDB ( ) . getSisterDB ( "config" ) ; DBCollection configChunks = config . getCollection ( "chunks" ) ; return configChunks . find ( new BasicDBObject ( "ns" , collection . getFullName ( ) ) ) ; }
Gets chunks .
40,412
private DeepPartition [ ] calculateSplits ( DBCollection collection ) { BasicDBList splitData = getSplitData ( collection ) ; List < ServerAddress > serverAddressList = collection . getDB ( ) . getMongo ( ) . getServerAddressList ( ) ; if ( splitData == null ) { Pair < BasicDBList , List < ServerAddress > > pair = getSplitDataCollectionShardEnviroment ( getShards ( collection ) , collection . getDB ( ) . getName ( ) , collection . getName ( ) ) ; splitData = pair . left ; serverAddressList = pair . right ; } Object lastKey = null ; List < String > stringHosts = new ArrayList < > ( ) ; for ( ServerAddress serverAddress : serverAddressList ) { stringHosts . add ( serverAddress . toString ( ) ) ; } int i = 0 ; MongoPartition [ ] partitions = new MongoPartition [ splitData . size ( ) + 1 ] ; for ( Object aSplitData : splitData ) { BasicDBObject currentKey = ( BasicDBObject ) aSplitData ; Object currentO = currentKey . get ( MONGO_DEFAULT_ID ) ; partitions [ i ] = new MongoPartition ( mongoDeepJobConfig . getRddId ( ) , i , new DeepTokenRange ( lastKey , currentO , stringHosts ) , MONGO_DEFAULT_ID ) ; lastKey = currentO ; i ++ ; } QueryBuilder queryBuilder = QueryBuilder . start ( MONGO_DEFAULT_ID ) ; queryBuilder . greaterThanEquals ( lastKey ) ; partitions [ i ] = new MongoPartition ( 0 , i , new DeepTokenRange ( lastKey , null , stringHosts ) , MONGO_DEFAULT_ID ) ; return partitions ; }
Calculate splits .
40,413
private BasicDBList getSplitData ( DBCollection collection ) { final DBObject cmd = BasicDBObjectBuilder . start ( "splitVector" , collection . getFullName ( ) ) . add ( "keyPattern" , new BasicDBObject ( MONGO_DEFAULT_ID , 1 ) ) . add ( "force" , false ) . add ( "maxChunkSize" , splitSize ) . get ( ) ; CommandResult splitVectorResult = collection . getDB ( ) . getSisterDB ( "admin" ) . command ( cmd ) ; return ( BasicDBList ) splitVectorResult . get ( SPLIT_KEYS ) ; }
Gets split data .
40,414
private Pair < BasicDBList , List < ServerAddress > > getSplitDataCollectionShardEnviroment ( Map < String , String [ ] > shards , String dbName , String collectionName ) { MongoClient mongoClient = null ; try { Set < String > keys = shards . keySet ( ) ; for ( String key : keys ) { List < ServerAddress > addressList = getServerAddressList ( Arrays . asList ( shards . get ( key ) ) ) ; mongoClient = new MongoClient ( addressList ) ; BasicDBList dbList = getSplitData ( mongoClient . getDB ( dbName ) . getCollection ( collectionName ) ) ; if ( dbList != null ) { return Pair . create ( dbList , addressList ) ; } } } catch ( UnknownHostException e ) { throw new DeepGenericException ( e ) ; } finally { if ( mongoClient != null ) { mongoClient . close ( ) ; } } return null ; }
Gets split data collection shard enviroment .
40,415
private DeepPartition [ ] calculateShardChunks ( DBCollection collection ) { DBCursor chuncks = getChunks ( collection ) ; Map < String , String [ ] > shards = getShards ( collection ) ; MongoPartition [ ] deepPartitions = new MongoPartition [ chuncks . count ( ) ] ; int i = 0 ; boolean keyAssigned = false ; String key = null ; while ( chuncks . hasNext ( ) ) { DBObject dbObject = chuncks . next ( ) ; if ( ! keyAssigned ) { Set < String > keySet = ( ( DBObject ) dbObject . get ( "min" ) ) . keySet ( ) ; for ( String s : keySet ) { key = s ; keyAssigned = true ; } } deepPartitions [ i ] = new MongoPartition ( mongoDeepJobConfig . getRddId ( ) , i , new DeepTokenRange ( shards . get ( dbObject . get ( "shard" ) ) , ( ( DBObject ) dbObject . get ( "min" ) ) . get ( key ) , ( ( DBObject ) dbObject . get ( "max" ) ) . get ( key ) ) , key ) ; i ++ ; } List < MongoPartition > mongoPartitions = Arrays . asList ( deepPartitions ) ; Collections . shuffle ( mongoPartitions ) ; return mongoPartitions . toArray ( new MongoPartition [ mongoPartitions . size ( ) ] ) ; }
Calculates shard chunks .
40,416
private List < ServerAddress > getServerAddressList ( List < String > addressStringList ) throws UnknownHostException { List < ServerAddress > addressList = new ArrayList < > ( ) ; for ( String addressString : addressStringList ) { addressList . add ( new ServerAddress ( addressString ) ) ; } return addressList ; }
Gets server address list .
40,417
public < T > RDD < T > createRDD ( ExtractorConfig < T > config ) { return new DeepRDD < > ( this . sc ( ) , config ) ; }
Creates a RDD .
40,418
public < T > JavaRDD < T > createJavaRDD ( ExtractorConfig < T > config ) { return new DeepJavaRDD < > ( ( DeepRDD < T , ExtractorConfig < T > > ) createRDD ( config ) ) ; }
Creates a JavaRDD .
40,419
public static JavaRDD < Row > createJavaRowRDD ( JavaRDD < Cells > cellsRDD ) throws UnsupportedDataTypeException { JavaRDD < Row > result = cellsRDD . map ( new Function < Cells , Row > ( ) { public Row call ( Cells cells ) throws Exception { return CellsUtils . getRowFromCells ( cells ) ; } } ) ; return result ; }
Creates a JavaRDD of SparkSQL rows
40,420
public DataFrame createJavaSchemaRDD ( ExtractorConfig < Cells > config ) throws UnsupportedDataTypeException , UnsupportedOperationException { JavaRDD < Cells > cellsRDD = createJavaRDD ( config ) ; JavaRDD < Row > rowsRDD = DeepSparkContext . createJavaRowRDD ( cellsRDD ) ; try { Cells firstCells = cellsRDD . first ( ) ; StructType schema = CellsUtils . getStructTypeFromCells ( firstCells ) ; return sqlContext . applySchema ( rowsRDD , schema ) ; } catch ( UnsupportedOperationException e ) { throw new UnsupportedOperationException ( "Cannot infer schema from empty data RDD" , e ) ; } }
Creates a JavaSchemaRDD from a DeepJobConfig and a JavaSQLContext .
40,421
public RDD textFile ( ExtractorConfig < Cells > config ) throws IllegalArgumentException { if ( ExtractorConstants . HDFS . equals ( config . getExtractorImplClassName ( ) ) ) { return createHDFSRDD ( config ) ; } else if ( ExtractorConstants . S3 . equals ( config . getExtractorImplClassName ( ) ) ) { return createS3RDD ( config ) ; } throw new IllegalArgumentException ( "Valid configurations are HDFS paths, S3 paths or local file paths." ) ; }
Returns a Cells RDD from a HDFS or S3 ExtractorConfig .
40,422
public RDD < Cells > createHDFSRDD ( ExtractorConfig < Cells > config ) { Serializable host = config . getValues ( ) . get ( ExtractorConstants . HOST ) ; Serializable port = config . getValues ( ) . get ( ExtractorConstants . PORT ) ; Serializable path = config . getValues ( ) . get ( ExtractorConstants . FS_FILE_PATH ) ; final TextFileDataTable textFileDataTable = UtilFS . createTextFileMetaDataFromConfig ( config , this ) ; String filePath = path . toString ( ) ; if ( config . getExtractorImplClassName ( ) . equals ( ExtractorConstants . HDFS ) ) { filePath = ExtractorConstants . HDFS_PREFIX + host . toString ( ) + ":" + port + path . toString ( ) ; } return createRDDFromFilePath ( filePath , textFileDataTable ) ; }
Returns a Cells RDD from HDFS .
40,423
public RDD < Cells > createS3RDD ( ExtractorConfig < Cells > config ) { Serializable bucket = config . getValues ( ) . get ( ExtractorConstants . S3_BUCKET ) ; Serializable path = config . getValues ( ) . get ( ExtractorConstants . FS_FILE_PATH ) ; final TextFileDataTable textFileDataTable = UtilFS . createTextFileMetaDataFromConfig ( config , this ) ; String filePath = path . toString ( ) ; if ( config . getExtractorImplClassName ( ) . equals ( ExtractorConstants . S3 ) ) { filePath = ExtractorConstants . S3_PREFIX + bucket . toString ( ) + path . toString ( ) ; } Configuration hadoopConf = this . sc ( ) . hadoopConfiguration ( ) ; hadoopConf . set ( "fs.s3n.impl" , "org.apache.hadoop.fs.s3native.NativeS3FileSystem" ) ; hadoopConf . set ( "fs.s3n.awsAccessKeyId" , config . getString ( ExtractorConstants . S3_ACCESS_KEY_ID ) ) ; hadoopConf . set ( "fs.s3n.awsSecretAccessKey" , config . getString ( ExtractorConstants . S3_SECRET_ACCESS_KEY ) ) ; return createRDDFromFilePath ( filePath , textFileDataTable ) ; }
Returns a Cells RDD from S3 fileSystem .
40,424
public static < T extends IDeepType > AerospikeDeepJobConfig < T > createAerospike ( Class < T > entityClass ) { return new AerospikeDeepJobConfig < > ( entityClass ) ; }
Creates a new entity - based Aerospike job configuration object .
40,425
public static String deepFieldName ( Field field ) { DeepField annotation = field . getAnnotation ( DeepField . class ) ; if ( StringUtils . isNotEmpty ( annotation . fieldName ( ) ) ) { return annotation . fieldName ( ) ; } else { return field . getName ( ) ; } }
Returns the field name as known by the datastore . If the provided field object DeepField annotation specifies the fieldName property the value of this property will be returned otherwise the java field name will be returned .
40,426
static List < DeepTokenRange > mergeTokenRanges ( Map < String , Iterable < Comparable > > tokens , final Session session , final IPartitioner p ) { final Iterable < Comparable > allRanges = Ordering . natural ( ) . sortedCopy ( concat ( tokens . values ( ) ) ) ; final Comparable maxValue = Ordering . natural ( ) . max ( allRanges ) ; final Comparable minValue = ( Comparable ) p . minValue ( maxValue . getClass ( ) ) . getToken ( ) . token ; Function < Comparable , Set < DeepTokenRange > > map = new MergeTokenRangesFunction ( maxValue , minValue , session , p , allRanges ) ; Iterable < DeepTokenRange > concatenated = concat ( transform ( allRanges , map ) ) ; Set < DeepTokenRange > dedup = Sets . newHashSet ( concatenated ) ; return Ordering . natural ( ) . sortedCopy ( dedup ) ; }
Merges the list of tokens for each cluster machine to a single list of token ranges .
40,427
private static void bisectTokeRange ( DeepTokenRange range , final IPartitioner partitioner , final int bisectFactor , final List < DeepTokenRange > accumulator ) { final AbstractType tkValidator = partitioner . getTokenValidator ( ) ; Token leftToken = partitioner . getTokenFactory ( ) . fromByteArray ( tkValidator . decompose ( range . getStartToken ( ) ) ) ; Token rightToken = partitioner . getTokenFactory ( ) . fromByteArray ( tkValidator . decompose ( range . getEndToken ( ) ) ) ; Token midToken = partitioner . midpoint ( leftToken , rightToken ) ; Comparable midpoint = ( Comparable ) tkValidator . compose ( tkValidator . fromString ( midToken . toString ( ) ) ) ; DeepTokenRange left = new DeepTokenRange ( range . getStartToken ( ) , midpoint , range . getReplicas ( ) ) ; DeepTokenRange right = new DeepTokenRange ( midpoint , range . getEndToken ( ) , range . getReplicas ( ) ) ; if ( bisectFactor / 2 <= 1 ) { accumulator . add ( left ) ; accumulator . add ( right ) ; } else { bisectTokeRange ( left , partitioner , bisectFactor / 2 , accumulator ) ; bisectTokeRange ( right , partitioner , bisectFactor / 2 , accumulator ) ; } }
Recursive function that splits a given token range to a given number of token ranges .
40,428
public static IPartitioner getPartitioner ( ICassandraDeepJobConfig config ) { try { return ( IPartitioner ) Class . forName ( config . getPartitionerClassName ( ) ) . newInstance ( ) ; } catch ( ClassNotFoundException | InstantiationException | IllegalAccessException e ) { throw new DeepGenericException ( e ) ; } }
Creates a new instance of the cassandra partitioner configured in the configuration object .
40,429
public void init ( Partition p ) throws Exception { Class . forName ( jdbcDeepJobConfig . getDriverClass ( ) ) ; conn = DriverManager . getConnection ( jdbcDeepJobConfig . getConnectionUrl ( ) , jdbcDeepJobConfig . getUsername ( ) , jdbcDeepJobConfig . getPassword ( ) ) ; Statement statement = conn . createStatement ( ) ; SelectQuery query = jdbcDeepJobConfig . getQuery ( ) ; JdbcPartition jdbcPartition = ( JdbcPartition ) p ; if ( jdbcDeepJobConfig . getNumPartitions ( ) > 1 ) { Column partitionKey = jdbcDeepJobConfig . getPartitionKey ( ) ; query . getWhereClause ( ) . addCondition ( BinaryCondition . lessThan ( partitionKey , jdbcPartition . upper ( ) , true ) ) . addCondition ( BinaryCondition . greaterThan ( partitionKey , jdbcPartition . lower ( ) , true ) ) ; } resultSet = statement . executeQuery ( query . toString ( ) ) ; this . hasNext = resultSet . next ( ) ; }
Initialized the reader
40,430
public void close ( ) throws Exception { try { if ( resultSet != null ) { resultSet . close ( ) ; } } finally { if ( conn != null ) { conn . close ( ) ; } } }
closes the resultset and the jdbc connection .
40,431
private Session createConnection ( ) { List < String > locations = Lists . newArrayList ( split . getReplicas ( ) ) ; Collections . sort ( locations , new DeepPartitionLocationComparator ( ) ) ; Exception lastException = null ; LOG . debug ( "createConnection: " + locations ) ; for ( String location : locations ) { try { return trySessionForLocation ( location , config , false ) . left ; } catch ( Exception e ) { LOG . error ( "Could not get connection for: {}, replicas: {}" , location , locations ) ; lastException = e ; } } throw new DeepIOException ( lastException ) ; }
Creates a new connection . Reuses a cached connection if possible .
40,432
private boolean reachEndRange ( ) { ByteBuffer rowKey ; if ( keyValidator instanceof CompositeType ) { ByteBuffer [ ] keys = new ByteBuffer [ partitionBoundColumns . size ( ) ] ; for ( int i = 0 ; i < partitionBoundColumns . size ( ) ; i ++ ) { keys [ i ] = partitionBoundColumns . get ( i ) . value . duplicate ( ) ; } rowKey = CompositeType . build ( keys ) ; } else { rowKey = partitionBoundColumns . get ( 0 ) . value ; } String endToken = String . valueOf ( split . getEndToken ( ) ) ; String currentToken = partitioner . getToken ( rowKey ) . toString ( ) ; return endToken . equals ( currentToken ) ; }
check whether current row is at the end of range
40,433
public Pair < Map < String , ByteBuffer > , Map < String , ByteBuffer > > next ( ) { if ( ! this . hasNext ( ) ) { throw new DeepIllegalAccessException ( "DeepRecordReader exhausted" ) ; } return rowIterator . next ( ) ; }
Returns the next element in the underlying rowIterator .
40,434
public Integer getInteger ( String key ) { try { return getValue ( Integer . class , key ) ; } catch ( ClassCastException e ) { String value = getString ( key ) ; return Integer . parseInt ( value . split ( "," ) [ 0 ] ) ; } }
Gets integer .
40,435
public String [ ] getStringArray ( String key ) { try { return getValue ( String [ ] . class , key ) ; } catch ( ClassCastException e ) { return new String [ ] { getString ( key ) } ; } }
Get string array .
40,436
public < K , V > Pair < K , V > getPair ( String key , Class < K > keyClass , Class < V > valueClass ) { return getValue ( Pair . class , key ) ; }
Gets pair .
40,437
private void validate ( ) { if ( driverClass == null ) { throw new IllegalArgumentException ( "Driver class must be specified" ) ; } if ( catalog == null || catalog . isEmpty ( ) ) { throw new IllegalArgumentException ( "Schema name must be specified" ) ; } if ( table == null || table . isEmpty ( ) ) { throw new IllegalArgumentException ( "Table name must be specified" ) ; } if ( connectionUrl == null || connectionUrl . isEmpty ( ) ) { if ( ( host != null && ! host . isEmpty ( ) ) && ( port > 0 ) ) { connectionUrl ( getJdbcUrl ( ) ) ; } else { throw new IllegalArgumentException ( "You must specify at least one of connectionUrl or host and port properties" ) ; } } if ( partitionKey == null && numPartitions > 1 ) { throw new IllegalArgumentException ( "You must define a valid partition key for using more than one partition." ) ; } }
Validates configuration object .
40,438
public static < T extends IDeepType > MongoDeepJobConfig < T > createMongoDB ( Class < T > entityClass ) { return new MongoDeepJobConfig < > ( entityClass ) ; }
Creates a new entity - based MongoDB job configuration object .
40,439
public List < String > getPreferredLocations ( Partition tokenRange ) { return ( ( DeepPartition ) tokenRange ) . splitWrapper ( ) . getReplicas ( ) ; }
Returns a list of hosts on which the given split resides .
40,440
private DeepRecordReader initRecordReader ( final DeepPartition dp , CassandraDeepJobConfig < T > config ) { DeepRecordReader recordReader = new DeepRecordReader ( config , dp . splitWrapper ( ) ) ; return recordReader ; }
Instantiates a new deep record reader object associated to the provided partition .
40,441
public static < L , R > Pair < L , R > create ( L left , R right ) { return new Pair < > ( left , right ) ; }
Creates a new immutable pair of objects .
40,442
public List < DeepTokenRange > getSplits ( ) { List < DeepTokenRange > tokenRanges = getRanges ( ) ; List < DeepTokenRange > splits = new ArrayList < > ( ) ; for ( DeepTokenRange tokenRange : tokenRanges ) { List < DeepTokenRange > nodeSplits = getSplits ( tokenRange ) ; splits . addAll ( nodeSplits ) ; } return splits ; }
Returns the token range splits of the Cassandra ring that will be mapped to Spark partitions .
40,443
public List < DeepTokenRange > getRanges ( ) { try { List < TokenRange > tokenRanges ; ThriftClient client = ThriftClient . build ( host , rpcPort ) ; try { tokenRanges = client . describe_local_ring ( keyspace ) ; } catch ( TApplicationException e ) { if ( e . getType ( ) == TApplicationException . UNKNOWN_METHOD ) { tokenRanges = client . describe_ring ( keyspace ) ; } else { throw new DeepGenericException ( "Unknown server error" , e ) ; } } client . close ( ) ; List < DeepTokenRange > deepTokenRanges = new ArrayList < > ( tokenRanges . size ( ) ) ; for ( TokenRange tokenRange : tokenRanges ) { Comparable start = tokenAsComparable ( tokenRange . getStart_token ( ) ) ; Comparable end = tokenAsComparable ( tokenRange . getEnd_token ( ) ) ; deepTokenRanges . add ( new DeepTokenRange ( start , end , tokenRange . getEndpoints ( ) ) ) ; } return deepTokenRanges ; } catch ( TException e ) { throw new DeepGenericException ( "No available replicas for get ring token ranges" , e ) ; } }
Returns the token ranges of the Cassandra ring that will be mapped to Spark partitions . The returned ranges are the Cassandra s physical ones without any splitting .
40,444
public List < DeepTokenRange > getSplits ( DeepTokenRange deepTokenRange ) { String start = tokenAsString ( ( Comparable ) deepTokenRange . getStartToken ( ) ) ; String end = tokenAsString ( ( Comparable ) deepTokenRange . getEndToken ( ) ) ; List < String > endpoints = deepTokenRange . getReplicas ( ) ; for ( String endpoint : endpoints ) { try { ThriftClient client = ThriftClient . build ( endpoint , rpcPort , keyspace ) ; List < CfSplit > splits = client . describe_splits_ex ( columnFamily , start , end , splitSize ) ; client . close ( ) ; return deepTokenRanges ( splits , endpoints ) ; } catch ( TException e ) { LOG . warn ( "Endpoint %s failed while splitting range %s" , endpoint , deepTokenRange ) ; } } throw new DeepGenericException ( "No available replicas for splitting range " + deepTokenRange ) ; }
Returns the computed token range splits of the specified token range .
40,445
public List < DeepTokenRange > deepTokenRanges ( List < CfSplit > splits , List < String > endpoints ) { List < DeepTokenRange > result = new ArrayList < > ( ) ; for ( CfSplit split : splits ) { Comparable splitStart = tokenAsComparable ( split . getStart_token ( ) ) ; Comparable splitEnd = tokenAsComparable ( split . getEnd_token ( ) ) ; if ( splitStart . equals ( splitEnd ) ) { result . add ( new DeepTokenRange ( minToken , minToken , endpoints ) ) ; } else if ( splitStart . compareTo ( splitEnd ) > 0 ) { result . add ( new DeepTokenRange ( splitStart , minToken , endpoints ) ) ; result . add ( new DeepTokenRange ( minToken , splitEnd , endpoints ) ) ; } else { result . add ( new DeepTokenRange ( splitStart , splitEnd , endpoints ) ) ; } } return result ; }
Returns the Deep splits represented by the specified Thrift splits using the specified endpoints for all of them . Note that the returned list can contain one more ranges than the specified because the range containing the partitioner s minimum token are divided into two ranges .
40,446
public AerospikeDeepJobConfig < T > port ( Integer [ ] ports ) { this . portList . addAll ( Arrays . asList ( ports ) ) ; return this ; }
Set Aerospike nodes ports .
40,447
private void validate ( ) { if ( host . isEmpty ( ) ) { throw new IllegalArgumentException ( "host cannot be null" ) ; } if ( catalog == null ) { throw new IllegalArgumentException ( "namespace cannot be null" ) ; } if ( table == null ) { throw new IllegalArgumentException ( "set cannot be null" ) ; } if ( portList . isEmpty ( ) ) { if ( port > 0 ) { port ( port ) ; } else { throw new IllegalArgumentException ( "port cannot be null" ) ; } } if ( host . size ( ) != portList . size ( ) ) { throw new IllegalArgumentException ( "Host and ports cardinality must be the same" ) ; } }
Validates connection parameters .
40,448
public AerospikeDeepJobConfig < T > filterQuery ( Filter [ ] filters ) { if ( filters . length > 1 ) { throw new UnsupportedOperationException ( "Aerospike currently accepts only one filter operations" ) ; } else if ( filters . length > 0 ) { Filter deepFilter = filters [ 0 ] ; if ( ! isValidAerospikeFilter ( deepFilter ) ) { throw new UnsupportedOperationException ( "Aerospike currently supports only equality and range filter operations" ) ; } else if ( ! deepFilter . getFilterType ( ) . equals ( FilterType . EQ ) ) { operation ( "numrange" ) ; setAerospikeNumrange ( deepFilter ) ; } else { operation ( "scan" ) ; setAerospikeEqualsFilter ( deepFilter ) ; } } return this ; }
Configure Aerospike filters with the received Deep Filter objects .
40,449
public static < T , S extends DeepJobConfig > T getObjectFromRow ( Class < T > classEntity , Map < String , Object > row , DeepJobConfig < T , S > config ) throws IllegalAccessException , InstantiationException , InvocationTargetException { T t = classEntity . newInstance ( ) ; Field [ ] fields = AnnotationUtils . filterDeepFields ( classEntity ) ; for ( Field field : fields ) { Object currentRow = null ; Method method = null ; Class < ? > classField = field . getType ( ) ; try { method = Utils . findSetter ( field . getName ( ) , classEntity , field . getType ( ) ) ; currentRow = row . get ( AnnotationUtils . deepFieldName ( field ) ) ; if ( currentRow != null ) { method . invoke ( t , currentRow ) ; } } catch ( IllegalAccessException | InvocationTargetException | IllegalArgumentException e ) { LOG . error ( "impossible to create a java object from column:" + field . getName ( ) + " and type:" + field . getType ( ) + " and value:" + t + "; recordReceived:" + currentRow ) ; method . invoke ( t , Utils . castNumberType ( currentRow , classField ) ) ; } } return t ; }
Returns a Stratio Entity from a Jdbc row represented as a map .
40,450
public static < T > Map < String , Object > getRowFromObject ( T entity ) throws IllegalAccessException , InstantiationException , InvocationTargetException { Field [ ] fields = AnnotationUtils . filterDeepFields ( entity . getClass ( ) ) ; Map < String , Object > row = new HashMap < > ( ) ; for ( Field field : fields ) { Method method = Utils . findGetter ( field . getName ( ) , entity . getClass ( ) ) ; Object object = method . invoke ( entity ) ; if ( object != null ) { row . put ( AnnotationUtils . deepFieldName ( field ) , object ) ; } } return row ; }
Returns a JDBC row data structure from a Stratio Deep Entity .
40,451
public static < T extends DeepJobConfig > Cells getCellsFromObject ( Map < String , Object > row , DeepJobConfig < Cells , T > config ) { Cells result = new Cells ( config . getCatalog ( ) + "." + config . getTable ( ) ) ; for ( Map . Entry < String , Object > entry : row . entrySet ( ) ) { Cell cell = Cell . create ( entry . getKey ( ) , entry . getValue ( ) ) ; result . add ( cell ) ; } return result ; }
Returns a Cells object from a JDBC row data structure .
40,452
public static Map < String , Object > getObjectFromCells ( Cells cells ) { Map < String , Object > result = new HashMap < > ( ) ; for ( Cell cell : cells . getCells ( ) ) { result . put ( cell . getName ( ) , cell . getValue ( ) ) ; } return result ; }
Returns a JDBC row data structure from a Cells object .
40,453
public static ThriftClient build ( String host , int port , String keyspace ) throws TException { TTransport transport = new TFramedTransport ( new TSocket ( host , port ) ) ; TProtocol protocol = new TBinaryProtocol ( transport ) ; ThriftClient client = new ThriftClient ( protocol ) ; transport . open ( ) ; if ( keyspace != null ) { client . set_keyspace ( keyspace ) ; } return client ; }
Returns a new client for the specified host setting the specified keyspace .
40,454
public static ThriftClient build ( String host , int port ) throws TException { return build ( host , port , null ) ; }
Returns a new client for the specified host .
40,455
public static Row getRowFromCells ( Cells cells ) { Object [ ] values = cells . getCellValues ( ) . toArray ( ) ; return RowFactory . create ( values ) ; }
Creates a SparkSQL Row object from a Stratio Cells object
40,456
public static Collection < Row > getRowsFromsCells ( Collection < Cells > cellsCol ) { Collection < Row > result = new ArrayList < > ( ) ; for ( Cells cells : cellsCol ) { result . add ( getRowFromCells ( cells ) ) ; } return result ; }
Returns a Collection of SparkSQL Row objects from a collection of Stratio Cells objects
40,457
private List < Cell > getCellsByTable ( String nameSpace ) { String tName = StringUtils . isEmpty ( nameSpace ) ? this . nameSpace : nameSpace ; List < Cell > res = cells . get ( tName ) ; if ( res == null ) { res = new ArrayList < > ( ) ; cells . put ( tName , res ) ; } return res ; }
Given the table name returns the List of Cell object associated to that table .
40,458
public boolean add ( Cell c ) { if ( c == null ) { throw new DeepGenericException ( new IllegalArgumentException ( "cell parameter cannot be null" ) ) ; } return getCellsByTable ( nameSpace ) . add ( c ) ; }
Adds a new Cell object to this Cells instance . Associates the provided Cell to the default table .
40,459
public Collection < Cell > getCells ( ) { List < Cell > res = new ArrayList < > ( ) ; for ( Map . Entry < String , List < Cell > > entry : cells . entrySet ( ) ) { res . addAll ( entry . getValue ( ) ) ; } return Collections . unmodifiableList ( res ) ; }
Returns an immutable collection of all the Cell objects contained in this Cells . Beware that internally each list of cells is associated to the table owning those cells this method flattens the lists of cells known to this object to just one list thus losing the table information .
40,460
public void write ( Cells keys , Cells values ) { if ( ! hasCurrentTask ) { String localCql = queryBuilder . prepareQuery ( keys , values ) ; currentTask = new WriteTask ( localCql ) ; hasCurrentTask = true ; } List < Object > allValues = new ArrayList < > ( values . getCellValues ( ) ) ; allValues . addAll ( keys . getCellValues ( ) ) ; currentTask . add ( allValues ) ; if ( isBatchSizeReached ( ) ) { executeTaskAsync ( ) ; } }
Adds the provided row to a batch . If the batch size reaches the threshold configured in IDeepJobConfig . getBatchSize the batch will be sent to the data store .
40,461
private void executeTaskAsync ( ) { final String taskId = currentTask . getId ( ) ; ListenableFuture < ? > future = taskExecutorService . submit ( currentTask ) ; pendingTasks . put ( taskId , future ) ; future . addListener ( new Runnable ( ) { public void run ( ) { pendingTasks . remove ( taskId ) ; } } , MoreExecutors . sameThreadExecutor ( ) ) ; hasCurrentTask = false ; }
Submits the task for future execution . Task is added to pending tasks and removed when the execution is done .
40,462
private void waitForCompletion ( ) { for ( ListenableFuture < ? > future : pendingTasks . values ( ) ) { try { future . get ( ) ; } catch ( InterruptedException | ExecutionException e ) { LOG . error ( "[" + this + "] Error waiting for writes to complete: " + e . getMessage ( ) ) ; } } }
Waits until all pending tasks completed .
40,463
public static < T > CellValidator cellValidator ( T obj ) { if ( obj == null ) { return null ; } Kind kind = Kind . objectToKind ( obj ) ; AbstractType < ? > tAbstractType = CassandraUtils . marshallerInstance ( obj ) ; String validatorClassName = tAbstractType . getClass ( ) . getCanonicalName ( ) ; Collection < String > validatorTypes = null ; DataType . Name cqlTypeName = MAP_JAVA_TYPE_TO_DATA_TYPE_NAME . get ( validatorClassName ) ; return new CellValidator ( validatorClassName , kind , validatorTypes , cqlTypeName ) ; }
Generates a CellValidator for a generic instance of an object . We need the actual instance in order to differentiate between an UUID and a TimeUUID .
40,464
private static String getCollectionInnerType ( Class < ? > type ) { CQL3Type . Native nativeType = MAP_JAVA_TYPE_TO_CQL_TYPE . get ( type ) ; return nativeType . name ( ) . toLowerCase ( ) ; }
private constructor .
40,465
public static < T extends IDeepType > CassandraDeepJobConfig < T > create ( Class < T > entityClass ) { return new EntityDeepJobConfig < > ( entityClass ) ; }
Creates an entity - based configuration object .
40,466
public static < T extends IDeepType > CassandraDeepJobConfig < T > createWriteConfig ( Class < T > entityClass ) { return new EntityDeepJobConfig < > ( entityClass , true ) ; }
Creates an entity - based write configuration object .
40,467
public static < T > T getObjectFromBson ( Class < T > classEntity , BSONObject bsonObject ) throws IllegalAccessException , InstantiationException , InvocationTargetException { T t = classEntity . newInstance ( ) ; Field [ ] fields = AnnotationUtils . filterDeepFields ( classEntity ) ; Object insert = null ; for ( Field field : fields ) { Object currentBson = null ; Method method = null ; try { method = Utils . findSetter ( field . getName ( ) , classEntity , field . getType ( ) ) ; Class < ? > classField = field . getType ( ) ; currentBson = bsonObject . get ( AnnotationUtils . deepFieldName ( field ) ) ; if ( currentBson != null ) { if ( Iterable . class . isAssignableFrom ( classField ) ) { Type type = field . getGenericType ( ) ; insert = subDocumentListCase ( type , ( List ) bsonObject . get ( AnnotationUtils . deepFieldName ( field ) ) ) ; } else if ( IDeepType . class . isAssignableFrom ( classField ) ) { insert = getObjectFromBson ( classField , ( BSONObject ) bsonObject . get ( AnnotationUtils . deepFieldName ( field ) ) ) ; } else { insert = currentBson ; } method . invoke ( t , insert ) ; } } catch ( IllegalAccessException | InstantiationException | InvocationTargetException | IllegalArgumentException e ) { LOG . error ( "impossible to create a java object from Bson field:" + field . getName ( ) + " and type:" + field . getType ( ) + " and value:" + t + "; bsonReceived:" + currentBson + ", bsonClassReceived:" + currentBson . getClass ( ) ) ; method . invoke ( t , Utils . castNumberType ( insert , t . getClass ( ) ) ) ; } } return t ; }
converts from BsonObject to an entity class with deep s anotations
40,468
public static < T > DBObject getBsonFromObject ( T t ) throws IllegalAccessException , InstantiationException , InvocationTargetException { Field [ ] fields = AnnotationUtils . filterDeepFields ( t . getClass ( ) ) ; DBObject bson = new BasicDBObject ( ) ; for ( Field field : fields ) { Method method = Utils . findGetter ( field . getName ( ) , t . getClass ( ) ) ; Object object = method . invoke ( t ) ; if ( object != null ) { if ( Collection . class . isAssignableFrom ( field . getType ( ) ) ) { Collection c = ( Collection ) object ; Iterator iterator = c . iterator ( ) ; List innerBsonList = new ArrayList < > ( ) ; while ( iterator . hasNext ( ) ) { innerBsonList . add ( getBsonFromObject ( iterator . next ( ) ) ) ; } bson . put ( AnnotationUtils . deepFieldName ( field ) , innerBsonList ) ; } else if ( IDeepType . class . isAssignableFrom ( field . getType ( ) ) ) { bson . put ( AnnotationUtils . deepFieldName ( field ) , getBsonFromObject ( ( IDeepType ) object ) ) ; } else { bson . put ( AnnotationUtils . deepFieldName ( field ) , object ) ; } } } return bson ; }
converts from an entity class with deep s anotations to BsonObject .
40,469
public static Cells getCellFromBson ( BSONObject bsonObject , String tableName ) { Cells cells = tableName != null ? new Cells ( tableName ) : new Cells ( ) ; Map < String , Object > map = bsonObject . toMap ( ) ; Set < Map . Entry < String , Object > > entryBson = map . entrySet ( ) ; for ( Map . Entry < String , Object > entry : entryBson ) { try { if ( List . class . isAssignableFrom ( entry . getValue ( ) . getClass ( ) ) ) { List innerCell = new ArrayList < > ( ) ; for ( Object innerBson : ( List ) entry . getValue ( ) ) { if ( innerBson instanceof DBObject ) { innerCell . add ( getCellFromBson ( ( DBObject ) innerBson , null ) ) ; } else { innerCell . add ( innerBson ) ; } } cells . add ( Cell . create ( entry . getKey ( ) , innerCell ) ) ; } else if ( BSONObject . class . isAssignableFrom ( entry . getValue ( ) . getClass ( ) ) ) { Cells innerCells = getCellFromBson ( ( BSONObject ) entry . getValue ( ) , null ) ; cells . add ( Cell . create ( entry . getKey ( ) , innerCells ) ) ; } else { cells . add ( Cell . create ( entry . getKey ( ) , entry . getValue ( ) ) ) ; } } catch ( IllegalArgumentException e ) { LOG . error ( "impossible to create a java cell from Bson field:" + entry . getKey ( ) + ", type:" + entry . getValue ( ) . getClass ( ) + ", value:" + entry . getValue ( ) ) ; } } return cells ; }
converts from BsonObject to cell class with deep s anotations
40,470
public static DBObject getDBObjectFromCell ( Cells cells ) { DBObject bson = new BasicDBObject ( ) ; for ( Cell cell : cells ) { if ( cell . getValue ( ) != null ) { if ( Collection . class . isAssignableFrom ( cell . getCellValue ( ) . getClass ( ) ) ) { Collection c = ( Collection ) cell . getCellValue ( ) ; Iterator iterator = c . iterator ( ) ; List < Object > innerBsonList = new ArrayList < > ( ) ; while ( iterator . hasNext ( ) ) { Object currentO = iterator . next ( ) ; if ( currentO instanceof Cells ) { innerBsonList . add ( getDBObjectFromCell ( ( Cells ) currentO ) ) ; } else { innerBsonList . add ( currentO ) ; } } bson . put ( cell . getCellName ( ) , innerBsonList ) ; } else if ( Cells . class . isAssignableFrom ( cell . getCellValue ( ) . getClass ( ) ) ) { bson . put ( cell . getCellName ( ) , getDBObjectFromCell ( ( Cells ) cell . getCellValue ( ) ) ) ; } else { bson . put ( cell . getCellName ( ) , cell . getCellValue ( ) ) ; } } } return bson ; }
converts from and entity class with deep s anotations to BsonObject
40,471
protected T transformElement ( Map < String , Object > entity ) { try { return ( T ) UtilJdbc . getObjectFromRow ( jdbcDeepJobConfig . getEntityClass ( ) , entity , jdbcDeepJobConfig ) ; } catch ( IllegalAccessException | InvocationTargetException | InstantiationException e ) { throw new DeepTransformException ( e ) ; } }
Transforms a database row represented as a Map into a Stratio Deep Entity .
40,472
protected Map < String , Object > transformElement ( T entity ) { try { return UtilJdbc . getRowFromObject ( entity ) ; } catch ( IllegalAccessException | InvocationTargetException | InstantiationException e ) { throw new DeepTransformException ( e ) ; } }
Trasforms a Stratio Deep Entity into a database row represented as a Map .
40,473
public static < T > T getObjectFromAerospikeRecord ( Class < T > classEntity , AerospikeRecord aerospikeRecord , AerospikeDeepJobConfig aerospikeConfig ) throws IllegalAccessException , InstantiationException , InvocationTargetException { Tuple2 < String , Object > equalsFilter = aerospikeConfig . getEqualsFilter ( ) ; String equalsFilterBin = equalsFilter != null ? equalsFilter . _1 ( ) : null ; Object equalsFilterValue = equalsFilter != null ? equalsFilter . _2 ( ) : null ; Map < String , Object > bins = aerospikeRecord . bins ; T t = classEntity . newInstance ( ) ; if ( equalsFilter == null || checkEqualityFilter ( bins , equalsFilterBin , equalsFilterValue ) ) { Field [ ] fields = AnnotationUtils . filterDeepFields ( classEntity ) ; Object insert = null ; List < String > inputColumns = null ; if ( aerospikeConfig . getInputColumns ( ) != null ) { inputColumns = Arrays . asList ( aerospikeConfig . getInputColumns ( ) ) ; } for ( Field field : fields ) { if ( inputColumns != null && ! inputColumns . contains ( AnnotationUtils . deepFieldName ( field ) ) ) { continue ; } Object currentBin = null ; Method method = null ; Class < ? > classField = field . getType ( ) ; try { method = Utils . findSetter ( field . getName ( ) , classEntity , field . getType ( ) ) ; currentBin = bins . get ( AnnotationUtils . deepFieldName ( field ) ) ; if ( currentBin != null ) { if ( currentBin instanceof Integer && classField . equals ( Long . class ) ) { currentBin = new Long ( ( Integer ) currentBin ) ; } if ( currentBin instanceof String || currentBin instanceof Integer || currentBin instanceof Long ) { insert = currentBin ; } else { throw new DeepGenericException ( "Data type [" + classField . toString ( ) + "] not supported in Aerospike entity extractor (only Strings and Integers)" ) ; } method . invoke ( t , insert ) ; } } catch ( IllegalAccessException | InvocationTargetException | IllegalArgumentException e ) { LOG . error ( "impossible to create a java object from Bin:" + field . getName ( ) + " and type:" + field . getType ( ) + " and value:" + t + "; recordReceived:" + currentBin ) ; method . invoke ( t , Utils . castNumberType ( insert , classField ) ) ; } } } return t ; }
Converts from AerospikeRecord to an entity class with deep s anotations .
40,474
public static < T > Pair < Object , AerospikeRecord > getAerospikeRecordFromObject ( T t ) throws IllegalAccessException , InstantiationException , InvocationTargetException { Field [ ] fields = AnnotationUtils . filterDeepFields ( t . getClass ( ) ) ; Pair < Field [ ] , Field [ ] > keysAndFields = AnnotationUtils . filterKeyFields ( t . getClass ( ) ) ; Field [ ] keys = keysAndFields . left ; Object key ; Map < String , Object > bins = new HashMap < > ( ) ; if ( keys . length == 0 ) { throw new InvocationTargetException ( new Exception ( "One key field must be defined." ) ) ; } else if ( keys . length > 1 ) { throw new InvocationTargetException ( new Exception ( "Aerospike only supports one key field" ) ) ; } else { Field keyField = keys [ 0 ] ; Method method = Utils . findGetter ( keyField . getName ( ) , t . getClass ( ) ) ; key = method . invoke ( t ) ; } for ( Field field : fields ) { Method method = Utils . findGetter ( field . getName ( ) , t . getClass ( ) ) ; Object object = method . invoke ( t ) ; if ( object != null ) { bins . put ( AnnotationUtils . deepFieldName ( field ) , object ) ; } } Record record = new Record ( bins , 0 , 0 ) ; AerospikeRecord aerospikeRecord = new AerospikeRecord ( record ) ; Pair < Object , AerospikeRecord > result = Pair . create ( key , aerospikeRecord ) ; return result ; }
Converts from an entity class with deep s anotations to AerospikeRecord .
40,475
public static Cells getCellFromAerospikeRecord ( AerospikeKey key , AerospikeRecord aerospikeRecord , AerospikeDeepJobConfig aerospikeConfig ) throws IllegalAccessException , InstantiationException , InvocationTargetException { String namespace = aerospikeConfig . getNamespace ( ) + "." + aerospikeConfig . getSet ( ) ; String setName = aerospikeConfig . getSet ( ) ; String [ ] inputColumns = aerospikeConfig . getInputColumns ( ) ; Tuple2 < String , Object > equalsFilter = aerospikeConfig . getEqualsFilter ( ) ; String equalsFilterBin = equalsFilter != null ? equalsFilter . _1 ( ) : null ; Object equalsFilterValue = equalsFilter != null ? equalsFilter . _2 ( ) : null ; Cells cells = namespace != null ? new Cells ( namespace ) : new Cells ( ) ; Map < String , Object > map = aerospikeRecord . bins ; if ( inputColumns != null ) { if ( equalsFilter == null || checkEqualityFilter ( map , equalsFilterBin , equalsFilterValue ) ) { for ( int i = 0 ; i < inputColumns . length ; i ++ ) { String binName = inputColumns [ i ] ; if ( map . containsKey ( binName ) ) { Cell cell = Cell . create ( binName , map . get ( binName ) ) ; if ( i == 0 ) { cell . setIsClusterKey ( true ) ; cell . setIsKey ( true ) ; } cells . add ( namespace , cell ) ; } else { throw new InvocationTargetException ( new Exception ( "There is no [" + binName + "] on aerospike [" + namespace + "." + setName + "] set" ) ) ; } } } } else { if ( equalsFilter == null || checkEqualityFilter ( map , equalsFilterBin , equalsFilterValue ) ) { int index = 0 ; for ( Map . Entry < String , Object > bin : map . entrySet ( ) ) { Cell cell = Cell . create ( bin . getKey ( ) , bin . getValue ( ) ) ; if ( index == 0 ) { cell . setIsClusterKey ( true ) ; cell . setIsKey ( true ) ; } cells . add ( namespace , cell ) ; index ++ ; } } } return cells ; }
Converts from AerospikeRecord to cell class with deep s anotations .
40,476
public static Pair < Object , AerospikeRecord > getAerospikeRecordFromCell ( Cells cells ) throws IllegalAccessException , InstantiationException , InvocationTargetException { Map < String , Object > bins = new HashMap < > ( ) ; Object key = null ; for ( Cell cell : cells . getCells ( ) ) { if ( key == null ) { if ( cell . isKey ( ) ) { key = cell . getValue ( ) ; } } else { if ( cell . isKey ( ) ) { throw new InvocationTargetException ( new Exception ( "Aerospike records must have only one key" ) ) ; } } bins . put ( cell . getCellName ( ) , cell . getValue ( ) ) ; } if ( key == null ) { throw new InvocationTargetException ( new Exception ( "Aerospike records must have one primary key" ) ) ; } Record record = new Record ( bins , 0 , 0 ) ; return Pair . create ( key , new AerospikeRecord ( record ) ) ; }
Converts from and entity class with deep s anotations to AerospikeRecord .
40,477
public static < T > T getObjectFromJson ( Class < T > classEntity , LinkedMapWritable jsonObject ) throws IllegalAccessException , InstantiationException , InvocationTargetException , NoSuchMethodException { T t = classEntity . newInstance ( ) ; Field [ ] fields = AnnotationUtils . filterDeepFields ( classEntity ) ; Object insert ; for ( Field field : fields ) { Method method = Utils . findSetter ( field . getName ( ) , classEntity , field . getType ( ) ) ; Class < ? > classField = field . getType ( ) ; String key = AnnotationUtils . deepFieldName ( field ) ; Text text = new org . apache . hadoop . io . Text ( key ) ; Writable currentJson = jsonObject . get ( text ) ; if ( currentJson != null ) { if ( Iterable . class . isAssignableFrom ( classField ) ) { Type type = field . getGenericType ( ) ; insert = subDocumentListCase ( type , ( ArrayWritable ) currentJson ) ; method . invoke ( t , ( insert ) ) ; } else if ( IDeepType . class . isAssignableFrom ( classField ) ) { insert = getObjectFromJson ( classField , ( LinkedMapWritable ) currentJson ) ; method . invoke ( t , ( insert ) ) ; } else { insert = currentJson ; try { method . invoke ( t , getObjectFromWritable ( ( Writable ) insert ) ) ; } catch ( Exception e ) { LOG . error ( "impossible to convert field " + t + " :" + field + " error: " + e . getMessage ( ) ) ; method . invoke ( t , Utils . castNumberType ( getObjectFromWritable ( ( Writable ) insert ) , t . getClass ( ) ) ) ; } } } } return t ; }
converts from JSONObject to an entity class with deep s anotations
40,478
public static Cells getCellFromJson ( LinkedMapWritable jsonObject , String tableName ) throws IllegalAccessException , InstantiationException , InvocationTargetException , NoSuchMethodException { Cells cells = tableName != null ? new Cells ( tableName ) : new Cells ( ) ; Set < Map . Entry < Writable , Writable > > entryJson = jsonObject . entrySet ( ) ; for ( Map . Entry < Writable , Writable > entry : entryJson ) { if ( LinkedMapWritable . class . isAssignableFrom ( entry . getValue ( ) . getClass ( ) ) ) { Cells innerCells = getCellFromJson ( ( LinkedMapWritable ) entry . getValue ( ) , null ) ; cells . add ( Cell . create ( entry . getKey ( ) . toString ( ) , innerCells ) ) ; } else if ( ArrayWritable . class . isAssignableFrom ( entry . getValue ( ) . getClass ( ) ) ) { Writable [ ] writetable = ( ( ArrayWritable ) entry . getValue ( ) ) . get ( ) ; List innerCell = new ArrayList < > ( ) ; for ( int i = 0 ; i < writetable . length ; i ++ ) { if ( writetable [ i ] instanceof LinkedMapWritable ) { innerCell . add ( getCellFromJson ( ( LinkedMapWritable ) writetable [ i ] , null ) ) ; } else { innerCell . add ( getObjectFromWritable ( entry . getValue ( ) ) ) ; } } cells . add ( Cell . create ( entry . getKey ( ) . toString ( ) , innerCell ) ) ; } else { cells . add ( Cell . create ( entry . getKey ( ) . toString ( ) , getObjectFromWritable ( entry . getValue ( ) ) ) ) ; } } return cells ; }
converts from JSONObject to cell class
40,479
public static Cell create ( String cellName , DataType cellType , Boolean isPartitionKey , Boolean isClusterKey ) { return new CassandraCell ( cellName , cellType , isPartitionKey , isClusterKey ) ; }
Factory method creates a new metadata Cell i . e . a Cell without value .
40,480
protected Cells transformElement ( Map < String , Object > entity ) { return UtilJdbc . getCellsFromObject ( entity , jdbcDeepJobConfig ) ; }
Transforms a database row represented as a Map into a Cells object .
40,481
protected Map < String , Object > transformElement ( Cells cells ) { return UtilJdbc . getObjectFromCells ( cells ) ; }
Transforms a Cells object into a database row represented as a Map .
40,482
public void extractDockerImageLayers ( File imageTarFile , File imageExtractionDir , Boolean deleteTarFiles ) { FilesScanner filesScanner = new FilesScanner ( ) ; boolean success = false ; if ( imageTarFile . getName ( ) . endsWith ( TAR_SUFFIX ) ) { success = unTar ( imageTarFile . getName ( ) . toLowerCase ( ) , imageExtractionDir . getAbsolutePath ( ) , imageTarFile . getPath ( ) ) ; boolean deleted = false ; if ( deleteTarFiles ) { deleted = imageTarFile . delete ( ) ; } if ( ! deleted ) { logger . warn ( "Was not able to delete {} (docker image TAR file)" , imageTarFile . getName ( ) ) ; } } if ( success ) { String [ ] fileNames = filesScanner . getDirectoryContent ( imageExtractionDir . getAbsolutePath ( ) , new String [ ] { LAYER_TAR } , new String [ ] { } , true , false ) ; for ( String filename : fileNames ) { File layerToExtract = new File ( imageExtractionDir + File . separator + filename ) ; extractDockerImageLayers ( layerToExtract , layerToExtract . getParentFile ( ) , deleteTarFiles ) ; } } else { logger . warn ( "Was not able to extract {} (docker image TAR file)" , imageTarFile . getName ( ) ) ; } }
extract image layers
40,483
private boolean unZip ( String innerDir , String archiveFile ) { boolean success = true ; ZipFile zipFile ; try { zipFile = new ZipFile ( archiveFile ) ; List fileHeaderList = zipFile . getFileHeaders ( ) ; List < PathMatcher > matchers = Arrays . stream ( filesExcludes ) . map ( fileExclude -> FileSystems . getDefault ( ) . getPathMatcher ( GLOB_PREFIX + fileExclude ) ) . collect ( Collectors . toList ( ) ) ; for ( int i = 0 ; i < fileHeaderList . size ( ) ; i ++ ) { FileHeader fileHeader = ( FileHeader ) fileHeaderList . get ( i ) ; String fileName = fileHeader . getFileName ( ) ; if ( filesExcludes . length > 0 ) { Predicate < PathMatcher > matchesExcludes = pathMatcher -> pathMatcher . matches ( Paths . get ( innerDir , fileName ) ) ; if ( matchers . stream ( ) . noneMatch ( matchesExcludes ) ) { zipFile . extractFile ( fileHeader , innerDir ) ; } } else { zipFile . extractFile ( fileHeader , innerDir ) ; } } } catch ( Exception e ) { success = false ; logger . warn ( "Error extracting file {}: {}" , archiveFile , e . getMessage ( ) ) ; logger . debug ( "Error extracting file {}: {}" , archiveFile , e . getStackTrace ( ) ) ; } finally { zipFile = null ; } return success ; }
Open and extract data from zip pattern files
40,484
private boolean unTar ( String fileName , String innerDir , String archiveFile ) { boolean success = true ; TarUnArchiver unArchiver = new TarUnArchiver ( ) ; try { File destDir = new File ( innerDir ) ; if ( ! destDir . exists ( ) ) { destDir . mkdirs ( ) ; } if ( fileName . endsWith ( TAR_GZ_SUFFIX ) || fileName . endsWith ( TGZ_SUFFIX ) ) { unArchiver = new TarGZipUnArchiver ( ) ; } else if ( fileName . endsWith ( TAR_BZ2_SUFFIX ) ) { unArchiver = new TarBZip2UnArchiver ( ) ; } else if ( fileName . endsWith ( XZ_SUFFIX ) ) { String destFileUrl = destDir . getCanonicalPath ( ) + Constants . BACK_SLASH + XZ_UN_ARCHIVER_FILE_NAME ; success = unXz ( new File ( archiveFile ) , destFileUrl ) ; archiveFile = destFileUrl ; } if ( success ) { unArchiver . enableLogging ( new ConsoleLogger ( ConsoleLogger . LEVEL_DISABLED , UN_ARCHIVER_LOGGER ) ) ; unArchiver . setSourceFile ( new File ( archiveFile ) ) ; unArchiver . setDestDirectory ( destDir ) ; unArchiver . extract ( ) ; } } catch ( Exception e ) { success = false ; logger . warn ( "Error extracting file {}: {}" , fileName , e . getMessage ( ) ) ; } return success ; }
Open and extract data from Tar pattern files
40,485
public boolean unXz ( File srcFileToArchive , String destFilePath ) { boolean success = true ; try { XZUnArchiver XZUnArchiver = new XZUnArchiver ( ) ; XZUnArchiver . enableLogging ( new ConsoleLogger ( ConsoleLogger . LEVEL_DISABLED , UN_ARCHIVER_LOGGER ) ) ; XZUnArchiver . setSourceFile ( srcFileToArchive ) ; XZUnArchiver . setDestFile ( new File ( destFilePath ) ) ; XZUnArchiver . extract ( ) ; } catch ( Exception e ) { success = false ; logger . warn ( "Failed to extract Xz file : {} - {}" , srcFileToArchive . getPath ( ) , e . getMessage ( ) ) ; } return success ; }
extract xz files
40,486
private String getFileName ( String name ) { if ( name . contains ( Constants . FORWARD_SLASH ) ) { name = name . substring ( name . lastIndexOf ( Constants . FORWARD_SLASH ) + 1 , name . length ( ) ) ; } else if ( name . contains ( Constants . BACK_SLASH ) ) { name = name . substring ( name . lastIndexOf ( Constants . BACK_SLASH ) + 1 , name . length ( ) ) ; } return name ; }
parse name without directories
40,487
protected Pair < Integer , InputStream > executeCommand ( String command ) { int resultVal = 1 ; InputStream inputStream = null ; try { logger . debug ( "Executing command: {}" , command ) ; Process process = Runtime . getRuntime ( ) . exec ( command ) ; resultVal = process . waitFor ( ) ; inputStream = process . getInputStream ( ) ; } catch ( InterruptedException e ) { logger . info ( "Execution of {} failed: code - {} ; message - {}" , command , resultVal , e . getMessage ( ) ) ; Thread . currentThread ( ) . interrupt ( ) ; } catch ( IOException e ) { logger . info ( "Execution of {} failed: code - {} ; message - {}" , command , resultVal , e . getMessage ( ) ) ; } if ( inputStream == null ) { inputStream = new InputStream ( ) { public int read ( ) throws IOException { return - 1 ; } } ; } return new Pair < > ( resultVal , inputStream ) ; }
from old command s stream and the new command s stream
40,488
public void onContentViewChanged ( NotificationView view , View contentView , int layoutId ) { if ( DBG ) Log . v ( TAG , "onContentViewChanged" ) ; ChildViewManager mgr = view . getChildViewManager ( ) ; if ( layoutId == R . layout . notification_simple || layoutId == R . layout . notification_large_icon || layoutId == R . layout . notification_full ) { view . setNotificationTransitionEnabled ( false ) ; mgr . setView ( ICON , contentView . findViewById ( R . id . switcher_icon ) ) ; mgr . setView ( TITLE , contentView . findViewById ( R . id . switcher_title ) ) ; mgr . setView ( TEXT , contentView . findViewById ( R . id . switcher_text ) ) ; mgr . setView ( WHEN , contentView . findViewById ( R . id . switcher_when ) ) ; } else if ( layoutId == R . layout . notification_simple_2 ) { view . setNotificationTransitionEnabled ( true ) ; mgr . setView ( ICON , contentView . findViewById ( R . id . icon ) ) ; mgr . setView ( TITLE , contentView . findViewById ( R . id . title ) ) ; mgr . setView ( TEXT , contentView . findViewById ( R . id . text ) ) ; mgr . setView ( WHEN , contentView . findViewById ( R . id . when ) ) ; } }
Called when content view is changed . All child - views were cleared due the change of content view . You need to re - setup the associated child - views .
40,489
public void onShowNotification ( NotificationView view , View contentView , NotificationEntry entry , int layoutId ) { if ( DBG ) Log . v ( TAG , "onShowNotification - " + entry . ID ) ; final Drawable icon = entry . iconDrawable ; final CharSequence title = entry . title ; final CharSequence text = entry . text ; final CharSequence when = entry . showWhen ? entry . whenFormatted : null ; ChildViewManager mgr = view . getChildViewManager ( ) ; if ( layoutId == R . layout . notification_simple || layoutId == R . layout . notification_large_icon || layoutId == R . layout . notification_full ) { boolean titleChanged = true ; boolean contentChanged = view . isContentLayoutChanged ( ) ; NotificationEntry lastEntry = view . getLastNotification ( ) ; if ( ! contentChanged && title != null && lastEntry != null && title . equals ( lastEntry . title ) ) { titleChanged = false ; } mgr . setImageDrawable ( ICON , icon , titleChanged ) ; mgr . setText ( TITLE , title , titleChanged ) ; mgr . setText ( TEXT , text ) ; mgr . setText ( WHEN , when ) ; } else if ( layoutId == R . layout . notification_simple_2 ) { mgr . setImageDrawable ( ICON , icon ) ; mgr . setText ( TITLE , title ) ; mgr . setText ( TEXT , text ) ; mgr . setText ( WHEN , when ) ; } }
Called when a notification is being displayed . This is the place to update the user interface of child - views for the new notification .
40,490
public void onUpdateNotification ( NotificationView view , View contentView , NotificationEntry entry , int layoutId ) { if ( DBG ) Log . v ( TAG , "onUpdateNotification - " + entry . ID ) ; final Drawable icon = entry . iconDrawable ; final CharSequence title = entry . title ; final CharSequence text = entry . text ; final CharSequence when = entry . showWhen ? entry . whenFormatted : null ; ChildViewManager mgr = view . getChildViewManager ( ) ; mgr . setImageDrawable ( ICON , icon , false ) ; mgr . setText ( TITLE , title , false ) ; mgr . setText ( TEXT , text , false ) ; mgr . setText ( WHEN , when , false ) ; }
Called when a notification is being updated .
40,491
public void onClickContentView ( NotificationView view , View contentView , NotificationEntry entry ) { if ( DBG ) Log . v ( TAG , "onClickContentView - " + entry . ID ) ; }
Called when the view has been clicked .
40,492
public static Animation pushDownIn ( ) { AnimationSet animationSet = new AnimationSet ( true ) ; animationSet . setFillAfter ( true ) ; animationSet . addAnimation ( new TranslateAnimation ( 0 , 0 , - 100 , 0 ) ) ; animationSet . addAnimation ( new AlphaAnimation ( 0.0f , 1.0f ) ) ; return animationSet ; }
Create push down animation for entering .
40,493
public List < String > getConfigurationErrors ( boolean projectPerFolder , String configProjectToken , String configProjectName , String configApiToken , String configFilePath , int archiveDepth , String [ ] includes , String [ ] projectPerFolderIncludes , String [ ] pythonIncludes , String scanComment ) { List < String > errors = new ArrayList < > ( ) ; String [ ] requirements = pythonIncludes [ Constants . ZERO ] . split ( Constants . WHITESPACE ) ; if ( StringUtils . isBlank ( configApiToken ) ) { String error = "Could not retrieve " + ORG_TOKEN_PROPERTY_KEY + " property from " + configFilePath ; errors . add ( error ) ; } boolean noProjectToken = StringUtils . isBlank ( configProjectToken ) ; boolean noProjectName = StringUtils . isBlank ( configProjectName ) ; if ( noProjectToken && noProjectName && ! projectPerFolder ) { String error = "Could not retrieve properties " + PROJECT_NAME_PROPERTY_KEY + " and " + PROJECT_TOKEN_PROPERTY_KEY + " from " + configFilePath ; errors . add ( error ) ; } else if ( ! noProjectToken && ! noProjectName ) { String error = "Please choose just one of either " + PROJECT_NAME_PROPERTY_KEY + " or " + PROJECT_TOKEN_PROPERTY_KEY + " (and not both)" ; errors . add ( error ) ; } if ( archiveDepth < Constants . ZERO || archiveDepth > Constants . MAX_EXTRACTION_DEPTH ) { errors . add ( "Error: archiveExtractionDepth value should be greater than 0 and less than " + Constants . MAX_EXTRACTION_DEPTH ) ; } if ( includes . length < Constants . ONE || StringUtils . isBlank ( includes [ Constants . ZERO ] ) ) { errors . add ( "Error: includes parameter must have at list one scanning pattern" ) ; } if ( projectPerFolder && projectPerFolderIncludes == null ) { errors . add ( "projectPerFolderIncludes parameter is empty, specify folders to include or mark as comment to scan all folders" ) ; } if ( requirements . length > Constants . ZERO ) { for ( String requirement : requirements ) { if ( ! requirement . endsWith ( Constants . TXT_EXTENSION ) ) { String error = "Invalid file name: " + requirement + Constants . WHITESPACE + "in property" + PYTHON_REQUIREMENTS_FILE_INCLUDES + "from " + configFilePath ; errors . add ( error ) ; } } } if ( ! StringUtils . isBlank ( scanComment ) ) { if ( scanComment . length ( ) > Constants . COMMENT_MAX_LENGTH ) { errors . add ( "Error: scanComment parameters is longer than 1000 characters" ) ; } } return errors ; }
Check configuration errors
40,494
public List < DependencyInfo > createProjects ( List < String > scannerBaseDirs , Map < String , Set < String > > appPathsToDependencyDirs , boolean scmConnector , String [ ] includes , String [ ] excludes , boolean globCaseSensitive , int archiveExtractionDepth , String [ ] archiveIncludes , String [ ] archiveExcludes , boolean archiveFastUnpack , boolean followSymlinks , Collection < String > excludedCopyrights , boolean partialSha1Match , String [ ] pythonRequirementsFileIncludes ) { Collection < AgentProjectInfo > projects = createProjects ( scannerBaseDirs , appPathsToDependencyDirs , scmConnector , includes , excludes , globCaseSensitive , archiveExtractionDepth , archiveIncludes , archiveExcludes , archiveFastUnpack , followSymlinks , excludedCopyrights , partialSha1Match , false , false , pythonRequirementsFileIncludes ) . keySet ( ) ; return projects . stream ( ) . flatMap ( project -> project . getDependencies ( ) . stream ( ) ) . collect ( Collectors . toList ( ) ) ; }
This method is usually called from outside by different other tools
40,495
public File cloneRepository ( ) { String scmTempFolder = new FilesUtils ( ) . createTmpFolder ( false , TempFolders . UNIQUE_SCM_TEMP_FOLDER ) ; cloneDirectory = new File ( scmTempFolder , getType ( ) . toString ( ) . toLowerCase ( ) + Constants . UNDERSCORE + getUrlName ( ) + Constants . UNDERSCORE + getBranch ( ) ) ; FilesUtils . deleteDirectory ( cloneDirectory ) ; logger . info ( "Cloning repository {} ...this may take a few minutes" , getUrl ( ) ) ; File branchDirectory = cloneRepository ( cloneDirectory ) ; return branchDirectory ; }
Clones the given repository .
40,496
public Set < DependencyInfo > parsePackagesConfigFile ( boolean getDependenciesFromReferenceTag , String nugetDependencyFile ) { Persister persister = new Persister ( ) ; Set < DependencyInfo > dependencies = new HashSet < > ( ) ; try { if ( this . nugetConfigFileType == NugetConfigFileType . CONFIG_FILE_TYPE ) { NugetPackages packages = persister . read ( NugetPackages . class , xml ) ; if ( ! getDependenciesFromReferenceTag ) { dependencies . addAll ( collectDependenciesFromNugetConfig ( packages , nugetDependencyFile ) ) ; } } else { NugetCsprojPackages csprojPackages = persister . read ( NugetCsprojPackages . class , xml ) ; NugetPackages packages = getNugetPackagesFromCsproj ( csprojPackages ) ; if ( ! getDependenciesFromReferenceTag ) { dependencies . addAll ( collectDependenciesFromNugetConfig ( packages , nugetDependencyFile ) ) ; } dependencies . addAll ( getDependenciesFromReferencesTag ( csprojPackages ) ) ; } dependencies . stream ( ) . forEach ( dependencyInfo -> dependencyInfo . setSystemPath ( this . xml . getPath ( ) ) ) ; } catch ( Exception e ) { logger . warn ( "Unable to parse suspected Nuget package configuration file {}" , xml , e . getMessage ( ) ) ; } return dependencies ; }
Parse packages . config or csproj file
40,497
private String findPathToGems ( ) throws FileNotFoundException { String [ ] commandParams = cli . getCommandParams ( GEM , ENVIRONMENT ) ; List < String > lines = cli . runCmd ( rootDirectory , commandParams ) ; String path = null ; if ( ! lines . isEmpty ( ) ) { path = lines . get ( 0 ) + fileSeparator + CACHE ; if ( new File ( path ) . isDirectory ( ) == false ) { throw new FileNotFoundException ( ) ; } } return path ; }
Ruby s cache is inside the installation folder . path can be found by running command gem environment gemdir
40,498
private String findGemVersion ( String gemName , String pathToGems ) { String version = null ; File maxVersionFile = findMaxVersionFile ( gemName , pathToGems ) ; if ( maxVersionFile != null ) { String fileName = maxVersionFile . getName ( ) ; version = getVersionFromFileName ( fileName , gemName ) ; } return version ; }
in such cases look for the relevant gem file in the cache with the highest version
40,499
public View makeRowView ( NotificationBoard board , NotificationEntry entry , LayoutInflater inflater ) { return inflater . inflate ( R . layout . notification_board_row , null , false ) ; }
Called to instantiate a view being placed in the row view which is the user interface for the incoming notification .