idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
51,900
def perform_query ( self , query , ** params ) : try : return self . engine . execute ( query , params ) except : print ( "Error with query: {}" . format ( query ) ) raise
Perform a query where query is a string .
51,901
def fetch_entities ( self ) : query = text ( ) response = self . perform_query ( query ) entities = { } domains = set ( ) for [ entity ] in response : domain = entity . split ( "." ) [ 0 ] domains . add ( domain ) entities . setdefault ( domain , [ ] ) . append ( entity ) self . _domains = list ( domains ) self . _enti...
Fetch entities for which we have data .
51,902
def fetch_all_data ( self , limit = 50000 ) : query = text ( ) try : print ( "Querying the database, this could take a while" ) response = self . perform_query ( query , limit = limit ) master_df = pd . DataFrame ( response . fetchall ( ) ) print ( "master_df created successfully." ) self . _master_df = master_df . cop...
Fetch data for all entities .
51,903
def parse_all_data ( self ) : self . _master_df . columns = [ "domain" , "entity" , "state" , "last_changed" ] self . _master_df [ "numerical" ] = self . _master_df [ "state" ] . apply ( lambda x : functions . isfloat ( x ) ) self . _master_df . set_index ( [ "domain" , "entity" , "numerical" , "last_changed" ] , inpla...
Parses the master df .
51,904
def correlations ( self ) : corr_df = self . _sensors_num_df . corr ( ) corr_names = [ ] corrs = [ ] for i in range ( len ( corr_df . index ) ) : for j in range ( len ( corr_df . index ) ) : c_name = corr_df . index [ i ] r_name = corr_df . columns [ j ] corr_names . append ( "%s-%s" % ( c_name , r_name ) ) corrs . app...
Calculate the correlation coefficients .
51,905
def plot ( self , entities : List [ str ] ) : ax = self . _sensors_num_df [ entities ] . plot ( figsize = [ 12 , 6 ] ) ax . legend ( loc = "center left" , bbox_to_anchor = ( 1 , 0.5 ) ) ax . set_xlabel ( "Date" ) ax . set_ylabel ( "Reading" ) return
Basic plot of a numerical sensor data .
51,906
def plot ( self , entity ) : df = self . _binary_df [ [ entity ] ] resampled = df . resample ( "s" ) . ffill ( ) resampled . columns = [ "value" ] fig , ax = plt . subplots ( 1 , 1 , figsize = ( 16 , 2 ) ) ax . fill_between ( resampled . index , y1 = 0 , y2 = 1 , facecolor = "royalblue" , label = "off" ) ax . fill_betw...
Basic plot of a single binary sensor data .
51,907
def is_sf_database ( db , model = None ) : from django . db import connections if db is None : return getattr ( model , '_salesforce_object' , False ) engine = connections [ db ] . settings_dict [ 'ENGINE' ] return engine == 'salesforce.backend' or connections [ db ] . vendor == 'salesforce'
The alias is a Salesforce database .
51,908
def allow_migrate ( self , db , app_label , model_name = None , ** hints ) : if model_name : model = apps . get_model ( app_label , model_name ) else : model = hints . get ( 'model' ) if hasattr ( model , '_salesforce_object' ) : if not ( is_sf_database ( db ) or db == self . sf_alias ) : return False else : if is_sf_d...
Don t attempt to sync SF models to non SF databases and vice versa .
51,909
def update ( self , ** kwargs ) : assert not self . called self . kw . update ( kwargs ) return self
Customize the lazy field
51,910
def create ( self ) : assert not self . called return self . klass ( * self . args , ** self . kw )
Create a normal field from the lazy field
51,911
def get_queryset ( self ) : if router . is_sf_database ( self . db ) : q = models_sql_query . SalesforceQuery ( self . model , where = compiler . SalesforceWhereNode ) return query . SalesforceQuerySet ( self . model , query = q , using = self . db ) return super ( SalesforceManager , self ) . get_queryset ( )
Returns a QuerySet which access remote SF objects .
51,912
def get_attname_column ( self ) : attname = self . get_attname ( ) if self . db_column is not None : column = self . db_column else : if not self . name . islower ( ) : column = self . name else : column = self . name . title ( ) . replace ( '_' , '' ) if self . sf_custom : column = self . sf_namespace + column + '__c'...
Get the database column name automatically in most cases .
51,913
def extract_values ( query ) : if isinstance ( query , subqueries . UpdateQuery ) : row = query . values return extract_values_inner ( row , query ) if isinstance ( query , subqueries . InsertQuery ) : ret = [ ] for row in query . objs : ret . append ( extract_values_inner ( row , query ) ) return ret raise NotSupporte...
Extract values from insert or update query . Supports bulk_create
51,914
def execute ( self , q , args = ( ) ) : self . rowcount = None response = None if self . query is None : self . execute_select ( q , args ) else : response = self . execute_django ( q , args ) if isinstance ( response , list ) : return if response and response . text : data = response . json ( parse_float = decimal . D...
Send a query to the Salesforce API .
51,915
def execute_django ( self , soql , args = ( ) ) : response = None sqltype = soql . split ( None , 1 ) [ 0 ] . upper ( ) if isinstance ( self . query , subqueries . InsertQuery ) : response = self . execute_insert ( self . query ) elif isinstance ( self . query , subqueries . UpdateQuery ) : response = self . execute_up...
Fixed execute for queries coming from Django query compilers
51,916
def get_pks_from_query ( self , query ) : where = query . where sql = None if where . connector == 'AND' and not where . negated and len ( where . children ) == 1 : child = where . children [ 0 ] if ( child . lookup_name in ( 'exact' , 'in' ) and child . lhs . target . column == 'Id' and not child . bilateral_transform...
Prepare primary keys for update and delete queries
51,917
def versions_request ( self ) : ret = self . handle_api_exceptions ( 'GET' , '' , api_ver = '' ) return [ str_dict ( x ) for x in ret . json ( ) ]
List Available REST API Versions
51,918
def fix_international ( text ) : "Fix excaped international characters back to utf-8" class SmartInternational ( str ) : def __new__ ( cls , text ) : return str . __new__ ( cls , text ) def endswith ( self , string ) : return super ( SmartInternational , self ) . endswith ( str ( string ) ) if PY3 : return text out = [...
Fix excaped international characters back to utf - 8
51,919
def get_meta ( self , table_name , constraints = None , column_to_field_name = None , is_view = False , is_partition = None ) : meta = [ " class Meta(models.Model.Meta):" , " db_table = '%s'" % table_name ] if self . connection . vendor == 'salesforce' : for line in self . connection . introspection . get_add...
Return a sequence comprising the lines of code necessary to construct the inner Meta class for the model corresponding to the given database table name .
51,920
def relative_path ( path ) : return os . path . join ( os . path . dirname ( __file__ ) , path )
Return the given path relative to this file .
51,921
def get_tagged_version ( ) : with open ( relative_path ( 'salesforce/__init__.py' ) , 'r' ) as fd : version = re . search ( r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]' , fd . read ( ) , re . MULTILINE ) . group ( 1 ) return version
Determine the current version of this package .
51,922
def dynamic_start ( self , access_token , instance_url = None , ** kw ) : self . dynamic = { 'access_token' : str ( access_token ) , 'instance_url' : str ( instance_url ) } self . dynamic . update ( kw )
Set the access token dynamically according to the current user .
51,923
def mark_quoted_strings ( sql ) : pm_pattern = re . compile ( r"'[^\\']*(?:\\[\\'][^\\']*)*'" ) bs_pattern = re . compile ( r"\\([\\'])" ) out_pattern = re . compile ( r"^(?:[-!()*+,.:<=>\w\s|%s])*$" ) missing_apostrophe = "invalid character in SOQL or a missing apostrophe" start = 0 out = [ ] params = [ ] for match in...
Mark all quoted strings in the SOQL by
51,924
def subst_quoted_strings ( sql , params ) : parts = sql . split ( '@' ) params_dont_match = "number of parameters doesn' match the transformed query" assert len ( parts ) == len ( params ) + 1 , params_dont_match out = [ ] for i , param in enumerate ( params ) : out . append ( parts [ i ] ) out . append ( "'%s'" % para...
Reverse operation to mark_quoted_strings - substitutes
51,925
def find_closing_parenthesis ( sql , startpos ) : pattern = re . compile ( r'[()]' ) level = 0 opening = [ ] for match in pattern . finditer ( sql , startpos ) : par = match . group ( ) if par == '(' : if level == 0 : opening = match . start ( ) level += 1 if par == ')' : assert level > 0 , "missing '(' before ')'" lev...
Find the pair of opening and closing parentheses .
51,926
def split_subquery ( sql ) : sql , params = mark_quoted_strings ( sql ) sql = simplify_expression ( sql ) _ = params start = 0 out = [ ] subqueries = [ ] pattern = re . compile ( r'\(SELECT\b' , re . I ) match = pattern . search ( sql , start ) while match : out . append ( sql [ start : match . start ( ) + 1 ] + '&' ) ...
Split on subqueries and replace them by & .
51,927
def simplify_expression ( txt ) : minimal = re . sub ( r'\s' , ' ' , re . sub ( r'\s(?=\W)' , '' , re . sub ( r'(?<=\W)\s' , '' , txt . strip ( ) ) ) ) return re . sub ( r'\)(?=\w)' , ') ' , re . sub ( r'(,|\b(?:{}))\(' . format ( '|' . join ( RESERVED_WORDS ) ) , '\\1 (' , minimal ) )
Remove all unecessary whitespace and some very usual space
51,928
def _make_flat ( self , row_dict , path , subroots ) : out = { } for k , v in row_dict . items ( ) : klc = k . lower ( ) if ( not ( isinstance ( v , dict ) and 'attributes' in v ) or ( 'done' in v and 'records' in v and 'totalSize' in v ) ) : if klc not in subroots : out [ klc ] = v else : strpath = '.' . join ( path +...
Replace the nested dict objects by a flat dict with keys object . object . name .
51,929
def parse_rest_response ( self , records , rowcount , row_type = list ) : if self . is_plain_count : assert list ( records ) == [ ] yield rowcount else : while True : for row_deep in records : assert self . is_aggregation == ( row_deep [ 'attributes' ] [ 'type' ] == 'AggregateResult' ) row_flat = self . _make_flat ( ro...
Parse the REST API response to DB API cursor flat response
51,930
def make_dynamic_fields ( pattern_module , dynamic_field_patterns , attrs ) : import re attr_meta = attrs [ 'Meta' ] db_table = getattr ( attr_meta , 'db_table' , None ) if not db_table : raise RuntimeError ( 'The "db_table" must be set in Meta if "dynamic_field_patterns" is used.' ) is_custom_model = getattr ( attr_me...
Add some Salesforce fields from a pattern_module models . py
51,931
def prepare_exception ( obj , messages = None , response = None , verbs = None ) : verbs = set ( verbs or [ ] ) known_options = [ 'method+url' ] if messages is None : messages = [ ] if isinstance ( messages , ( text_type , str ) ) : messages = [ messages ] assert isinstance ( messages , list ) assert not verbs . differ...
Prepare excetion params or only an exception message
51,932
def warn_sf ( messages , response , verbs = None , klass = SalesforceWarning ) : warnings . warn ( klass ( messages , response , verbs ) , stacklevel = 2 )
Issue a warning SalesforceWarning with message combined from message and data from SFDC response
51,933
def get_from_clause ( self ) : self . query_topology ( ) root_table = self . soql_trans [ self . root_alias ] return [ root_table ] , [ ]
Return the FROM clause converted the SOQL dialect .
51,934
def quote_name_unless_alias ( self , name ) : r = self . connection . ops . quote_name ( name ) self . quote_cache [ name ] = r return r
A wrapper around connection . ops . quote_name that doesn t quote aliases for table names . Mostly used during the ORDER BY clause .
51,935
def get_soap_client ( db_alias , client_class = None ) : if not beatbox : raise InterfaceError ( "To use SOAP API, you'll need to install the Beatbox package." ) if client_class is None : client_class = beatbox . PythonClient soap_client = client_class ( ) connection = connections [ db_alias ] cursor = connection . cur...
Create the SOAP client for the current user logged in the db_alias
51,936
def signalize_extensions ( ) : warnings . warn ( "DB-API extension cursor.rownumber used" , SalesforceWarning ) warnings . warn ( "DB-API extension connection.<exception> used" , SalesforceWarning ) warnings . warn ( "DB-API extension cursor.connection used" , SalesforceWarning ) warnings . warn ( "DB-API extension cur...
DB API 2 . 0 extension are reported by warnings at run - time .
51,937
def arg_to_soql ( arg ) : conversion = sql_conversions . get ( type ( arg ) ) if conversion : return conversion ( arg ) for type_ in subclass_conversions : if isinstance ( arg , type_ ) : return sql_conversions [ type_ ] ( arg ) return sql_conversions [ str ] ( arg )
Perform necessary SOQL quoting on the arg .
51,938
def arg_to_json ( arg ) : conversion = json_conversions . get ( type ( arg ) ) if conversion : return conversion ( arg ) for type_ in subclass_conversions : if isinstance ( arg , type_ ) : return json_conversions [ type_ ] ( arg ) return json_conversions [ str ] ( arg )
Perform necessary JSON conversion on the arg .
51,939
def merge_dict ( dict_1 , * other , ** kw ) : tmp = dict_1 . copy ( ) for x in other : tmp . update ( x ) tmp . update ( kw ) return tmp
Merge two or more dict including kw into result dict .
51,940
def make_session ( self ) : with connect_lock : if self . _sf_session is None : sf_session = requests . Session ( ) sf_session . auth = SalesforcePasswordAuth ( db_alias = self . alias , settings_dict = self . settings_dict ) sf_instance_url = sf_session . auth . instance_url sf_requests_adapter = HTTPAdapter ( max_ret...
Authenticate and get the name of assigned SFDC data server
51,941
def rest_api_url ( self , * url_parts , ** kwargs ) : url_parts = list ( url_parts ) if url_parts and re . match ( r'^(?:https|mock)://' , url_parts [ 0 ] ) : return '/' . join ( url_parts ) relative = kwargs . pop ( 'relative' , False ) api_ver = kwargs . pop ( 'api_ver' , None ) api_ver = api_ver if api_ver is not No...
Join the URL of REST_API
51,942
def raise_errors ( self , response ) : verb = self . debug_verbs method = response . request . method data = None is_json = 'json' in response . headers . get ( 'Content-Type' , '' ) and response . text if is_json : data = json . loads ( response . text ) if not ( isinstance ( data , list ) and data and 'errorCode' in ...
The innermost part - report errors by exceptions
51,943
def composite_request ( self , data ) : post_data = { 'compositeRequest' : data , 'allOrNone' : True } resp = self . handle_api_exceptions ( 'POST' , 'composite' , json = post_data ) comp_resp = resp . json ( ) [ 'compositeResponse' ] is_ok = all ( x [ 'httpStatusCode' ] < 400 for x in comp_resp ) if is_ok : return res...
Call a composite request with subrequests error handling
51,944
def align_after ( self , offset ) : f = self . reader if offset <= 0 : f . seek ( 0 ) self . _block_count = 0 self . _read_header ( ) return sm = self . sync_marker sml = len ( sm ) pos = offset while pos < self . file_length - sml : f . seek ( pos ) data = f . read ( self . FORWARD_WINDOW_SIZE ) sync_offset = data . f...
Search for a sync point after offset and align just after that .
51,945
def get_progress ( self ) : pos = self . reader . reader . tell ( ) return min ( ( pos - self . region_start ) / float ( self . region_end - self . region_start ) , 1.0 )
Give a rough estimate of the progress done .
51,946
def is_exe ( fpath ) : return os . path . isfile ( fpath ) and os . access ( fpath , os . X_OK )
Path references an executable file .
51,947
def is_readable ( fpath ) : return os . path . isfile ( fpath ) and os . access ( fpath , os . R_OK )
Path references a readable file .
51,948
def is_local ( self , hadoop_conf = None , hadoop_home = None ) : conf = self . hadoop_params ( hadoop_conf , hadoop_home ) keys = ( 'mapreduce.framework.name' , 'mapreduce.jobtracker.address' , 'mapred.job.tracker' ) for k in keys : if conf . get ( k , 'local' ) . lower ( ) != 'local' : return False return True
\ Is Hadoop configured to run in local mode?
51,949
def abspath ( hdfs_path , user = None , local = False ) : if local : return 'file:%s' % os . path . abspath ( hdfs_path ) if isfull ( hdfs_path ) : return hdfs_path hostname , port , path = split ( hdfs_path , user = user ) if hostname : fs = hdfs_fs . hdfs ( hostname , port ) apath = join ( "hdfs://%s:%s" % ( fs . hos...
Return an absolute path for hdfs_path .
51,950
def dirname ( hdfs_path ) : scheme , netloc , path = parse ( hdfs_path ) return unparse ( scheme , netloc , os . path . dirname ( path ) )
Return the directory component of hdfs_path .
51,951
def expanduser ( path ) : if hdfs_fs . default_is_local ( ) : return os . path . expanduser ( path ) m = re . match ( r'^~([^/]*)' , path ) if m is None : return path user = m . groups ( ) [ 0 ] or common . DEFAULT_USER return '/user/%s%s' % ( user , path [ m . end ( 1 ) : ] )
Replace initial ~ or ~user with the user s home directory .
51,952
def normpath ( path ) : scheme , netloc , path_ = parse ( path ) return unparse ( scheme , netloc , os . path . normpath ( path_ ) )
Normalize path collapsing redundant separators and up - level refs .
51,953
def realpath ( path ) : scheme , netloc , path_ = parse ( path ) if scheme == 'file' or hdfs_fs . default_is_local ( ) : return unparse ( scheme , netloc , os . path . realpath ( path_ ) ) return path
Return path with symlinks resolved .
51,954
def default_is_local ( hadoop_conf = None , hadoop_home = None ) : params = pydoop . hadoop_params ( hadoop_conf , hadoop_home ) for k in 'fs.defaultFS' , 'fs.default.name' : if not params . get ( k , 'file:' ) . startswith ( 'file:' ) : return False return True
\ Is Hadoop configured to use the local file system?
51,955
def open_file ( self , path , mode = "r" , buff_size = 0 , replication = 0 , blocksize = 0 , encoding = None , errors = None ) : _complain_ifclosed ( self . closed ) if not path : raise ValueError ( "Empty path" ) m , is_text = common . parse_mode ( mode ) if not self . host : fret = local_file ( self , path , m ) if i...
Open an HDFS file .
51,956
def capacity ( self ) : _complain_ifclosed ( self . closed ) if not self . __status . host : raise RuntimeError ( 'Capacity is not defined for a local fs' ) return self . fs . get_capacity ( )
Return the raw capacity of the filesystem .
51,957
def copy ( self , from_path , to_hdfs , to_path ) : _complain_ifclosed ( self . closed ) if isinstance ( to_hdfs , self . __class__ ) : to_hdfs = to_hdfs . fs return self . fs . copy ( from_path , to_hdfs , to_path )
Copy file from one filesystem to another .
51,958
def delete ( self , path , recursive = True ) : _complain_ifclosed ( self . closed ) return self . fs . delete ( path , recursive )
Delete path .
51,959
def exists ( self , path ) : _complain_ifclosed ( self . closed ) return self . fs . exists ( path )
Check if a given path exists on the filesystem .
51,960
def get_path_info ( self , path ) : _complain_ifclosed ( self . closed ) return self . fs . get_path_info ( path )
Get information about path as a dict of properties .
51,961
def list_directory ( self , path ) : r _complain_ifclosed ( self . closed ) return self . fs . list_directory ( path )
r Get list of files and directories for path \ .
51,962
def rename ( self , from_path , to_path ) : _complain_ifclosed ( self . closed ) return self . fs . rename ( from_path , to_path )
Rename file .
51,963
def set_replication ( self , path , replication ) : r _complain_ifclosed ( self . closed ) return self . fs . set_replication ( path , replication )
r Set the replication of path to replication \ .
51,964
def set_working_directory ( self , path ) : r _complain_ifclosed ( self . closed ) return self . fs . set_working_directory ( path )
r Set the working directory to path \ . All relative paths will be resolved relative to it .
51,965
def working_directory ( self ) : _complain_ifclosed ( self . closed ) wd = self . fs . get_working_directory ( ) return wd
Get the current working directory .
51,966
def __compute_mode_from_string ( self , path , mode_string ) : Char_to_perm_byte = { 'r' : 4 , 'w' : 2 , 'x' : 1 } Fields = ( ( 'u' , 6 ) , ( 'g' , 3 ) , ( 'o' , 0 ) ) m = re . match ( r"\s*([ugoa]*)([-+=])([rwx]*)\s*" , mode_string ) if not m : raise ValueError ( "Invalid mode string %s" % mode_string ) who = m . grou...
Scan a unix - style mode string and apply it to path .
51,967
def utime ( self , path , mtime , atime ) : _complain_ifclosed ( self . closed ) return self . fs . utime ( path , int ( mtime ) , int ( atime ) )
Change file last access and modification times .
51,968
def rm_rf ( path , dry_run = False ) : log . info ( "removing %s" % path ) if dry_run : return try : if os . path . isdir ( path ) and not os . path . islink ( path ) : shutil . rmtree ( path ) else : os . remove ( path ) except OSError : pass
Remove a file or directory tree .
51,969
def __finalize_hdfs ( self , ext ) : java_home = jvm . get_java_home ( ) jvm_lib_path , _ = jvm . get_jvm_lib_path_and_name ( java_home ) ext . include_dirs = jvm . get_include_dirs ( ) + ext . include_dirs ext . libraries = jvm . get_libraries ( ) ext . library_dirs = [ os . path . join ( java_home , "Libraries" ) , j...
\ Adds a few bits that depend on the specific environment .
51,970
def run_tool_cmd ( tool , cmd , args = None , properties = None , hadoop_conf_dir = None , logger = None , keep_streams = True ) : if logger is None : logger = utils . NullLogger ( ) _args = [ tool ] if hadoop_conf_dir : _args . extend ( [ "--config" , hadoop_conf_dir ] ) _args . append ( cmd ) if properties : _args . ...
Run a Hadoop command .
51,971
def get_task_trackers ( properties = None , hadoop_conf_dir = None , offline = False ) : if offline : if not hadoop_conf_dir : hadoop_conf_dir = pydoop . hadoop_conf ( ) slaves = os . path . join ( hadoop_conf_dir , "slaves" ) try : with open ( slaves ) as f : task_trackers = [ ( l . strip ( ) , 0 ) for l in f ] except...
Get the list of task trackers in the Hadoop cluster .
51,972
def get_num_nodes ( properties = None , hadoop_conf_dir = None , offline = False ) : return len ( get_task_trackers ( properties , hadoop_conf_dir , offline ) )
Get the number of task trackers in the Hadoop cluster .
51,973
def dfs ( args = None , properties = None , hadoop_conf_dir = None ) : return run_class ( "org.apache.hadoop.fs.FsShell" , args , properties , hadoop_conf_dir = hadoop_conf_dir , keep_streams = True )
Run the Hadoop file system shell .
51,974
def run_pipes ( executable , input_path , output_path , more_args = None , properties = None , force_pydoop_submitter = False , hadoop_conf_dir = None , logger = None , keep_streams = False ) : if logger is None : logger = utils . NullLogger ( ) if not hdfs . path . exists ( executable ) : raise IOError ( "executable %...
Run a pipes command .
51,975
def collect_output ( mr_out_dir , out_file = None ) : if out_file is None : output = [ ] for fn in iter_mr_out_files ( mr_out_dir ) : with hdfs . open ( fn , "rt" ) as f : output . append ( f . read ( ) ) return "" . join ( output ) else : block_size = 16777216 with open ( out_file , 'a' ) as o : for fn in iter_mr_out_...
Return all mapreduce output in mr_out_dir .
51,976
def set_output ( self , output ) : self . output = output self . logger . info ( "assigning output to %s" , self . output )
Set the output path for the job . Optional if the runner has been instantiated with a prefix .
51,977
def set_exe ( self , pipes_code ) : if not self . output : raise RuntimeError ( "no output directory, can't create launcher" ) parent = hdfs . path . dirname ( hdfs . path . abspath ( self . output . rstrip ( "/" ) ) ) self . exe = hdfs . path . join ( parent , utils . make_random_str ( ) ) hdfs . dump ( pipes_code , s...
Dump launcher code to the distributed file system .
51,978
def dump ( data , hdfs_path , ** kwargs ) : kwargs [ "mode" ] = "w" if isinstance ( data , bintype ) else "wt" with open ( hdfs_path , ** kwargs ) as fo : i = 0 bufsize = common . BUFSIZE while i < len ( data ) : fo . write ( data [ i : i + bufsize ] ) i += bufsize fo . fs . close ( )
\ Write data to hdfs_path .
51,979
def load ( hdfs_path , ** kwargs ) : m , _ = common . parse_mode ( kwargs . get ( "mode" , "r" ) ) if m != "r" : raise ValueError ( "opening mode must be readonly" ) with open ( hdfs_path , ** kwargs ) as fi : data = fi . read ( ) fi . fs . close ( ) return data
\ Read the content of hdfs_path and return it .
51,980
def cp ( src_hdfs_path , dest_hdfs_path , ** kwargs ) : src , dest = { } , { } try : for d , p in ( ( src , src_hdfs_path ) , ( dest , dest_hdfs_path ) ) : d [ "host" ] , d [ "port" ] , d [ "path" ] = path . split ( p ) d [ "fs" ] = hdfs ( d [ "host" ] , d [ "port" ] ) try : src [ "info" ] = src [ "fs" ] . get_path_inf...
\ Copy the contents of src_hdfs_path to dest_hdfs_path .
51,981
def put ( src_path , dest_hdfs_path , ** kwargs ) : cp ( path . abspath ( src_path , local = True ) , dest_hdfs_path , ** kwargs )
\ Copy the contents of src_path to dest_hdfs_path .
51,982
def get ( src_hdfs_path , dest_path , ** kwargs ) : cp ( src_hdfs_path , path . abspath ( dest_path , local = True ) , ** kwargs )
\ Copy the contents of src_hdfs_path to dest_path .
51,983
def mkdir ( hdfs_path , user = None ) : host , port , path_ = path . split ( hdfs_path , user ) fs = hdfs ( host , port , user ) retval = fs . create_directory ( path_ ) fs . close ( ) return retval
Create a directory and its parents as needed .
51,984
def lsl ( hdfs_path , user = None , recursive = False ) : host , port , path_ = path . split ( hdfs_path , user ) fs = hdfs ( host , port , user ) if not recursive : dir_list = fs . list_directory ( path_ ) else : treewalk = fs . walk ( path_ ) top = next ( treewalk ) if top [ 'kind' ] == 'directory' : dir_list = list ...
Return a list of dictionaries of file properties .
51,985
def ls ( hdfs_path , user = None , recursive = False ) : dir_list = lsl ( hdfs_path , user , recursive ) return [ d [ "name" ] for d in dir_list ]
Return a list of hdfs paths .
51,986
def move ( src , dest , user = None ) : src_host , src_port , src_path = path . split ( src , user ) dest_host , dest_port , dest_path = path . split ( dest , user ) src_fs = hdfs ( src_host , src_port , user ) dest_fs = hdfs ( dest_host , dest_port , user ) try : retval = src_fs . move ( src_path , dest_fs , dest_path...
Move or rename src to dest .
51,987
def renames ( from_path , to_path , user = None ) : to_dir = path . dirname ( to_path ) if to_dir : mkdir ( to_dir , user = user ) rename ( from_path , to_path , user = user )
Rename from_path to to_path creating parents as needed .
51,988
def readline ( self ) : _complain_ifclosed ( self . closed ) line = self . f . readline ( ) if self . __encoding : return line . decode ( self . __encoding , self . __errors ) else : return line
Read and return a line of text .
51,989
def pread ( self , position , length ) : r _complain_ifclosed ( self . closed ) if position > self . size : raise IOError ( "position cannot be past EOF" ) if length < 0 : length = self . size - position data = self . f . raw . pread ( position , length ) if self . __encoding : return data . decode ( self . __encoding ...
r Read length bytes of data from the file starting from position \ .
51,990
def read ( self , length = - 1 ) : _complain_ifclosed ( self . closed ) if length < 0 : length = self . size chunks = [ ] while 1 : if length <= 0 : break c = self . f . read ( min ( self . buff_size , length ) ) if c == b"" : break chunks . append ( c ) length -= len ( c ) data = b"" . join ( chunks ) if self . __enco...
Read length bytes from the file . If length is negative or omitted read all data until EOF .
51,991
def seek ( self , position , whence = os . SEEK_SET ) : _complain_ifclosed ( self . closed ) return self . f . seek ( position , whence )
Seek to position in file .
51,992
def write ( self , data ) : _complain_ifclosed ( self . closed ) if self . __encoding : self . f . write ( data . encode ( self . __encoding , self . __errors ) ) return len ( data ) else : return self . f . write ( data )
Write data to the file .
51,993
def set_args ( self , args , unknown_args = None ) : if unknown_args is None : unknown_args = [ ] self . logger . setLevel ( getattr ( logging , args . log_level ) ) parent = hdfs . path . dirname ( hdfs . path . abspath ( args . output . rstrip ( "/" ) ) ) self . remote_wd = hdfs . path . join ( parent , utils . make_...
Configure job based on the arguments provided .
51,994
def __warn_user_if_wd_maybe_unreadable ( self , abs_remote_path ) : host , port , path = hdfs . path . split ( abs_remote_path ) if host == '' and port == 0 : host_port = "file:///" else : host_port = "hdfs://%s:%s/" % ( host , port ) path_pieces = path . strip ( '/' ) . split ( os . path . sep ) fs = hdfs . hdfs ( hos...
Check directories above the remote module and issue a warning if they are not traversable by all users .
51,995
def __setup_remote_paths ( self ) : self . logger . debug ( "remote_wd: %s" , self . remote_wd ) self . logger . debug ( "remote_exe: %s" , self . remote_exe ) self . logger . debug ( "remotes: %s" , self . files_to_upload ) if self . args . module : self . logger . debug ( 'Generated pipes_code:\n\n %s' , self . _gene...
Actually create the working directory and copy the module into it .
51,996
def docker_client ( ) : cert_path = os . environ . get ( 'DOCKER_CERT_PATH' , '' ) if cert_path == '' : cert_path = os . path . join ( os . environ . get ( 'HOME' , '' ) , '.docker' ) base_url = os . environ . get ( 'DOCKER_HOST' ) tls_config = None if os . environ . get ( 'DOCKER_TLS_VERIFY' , '' ) != '' : parts = bas...
Returns a docker - py client configured using environment variables according to the same logic as the official Docker client .
51,997
def get_java_home ( ) : error = RuntimeError ( "java home not found, try setting JAVA_HOME" ) try : return os . environ [ "JAVA_HOME" ] except KeyError : wd = tempfile . mkdtemp ( prefix = 'pydoop_' ) jclass = "Temp" jsrc = os . path . join ( wd , "%s.java" % jclass ) with open ( jsrc , "w" ) as f : f . write ( JPROG ....
\ Try getting JAVA_HOME from system properties .
51,998
def run_task ( factory , ** kwargs ) : context = TaskContext ( factory , ** kwargs ) pstats_dir = kwargs . get ( "pstats_dir" , os . getenv ( PSTATS_DIR ) ) if pstats_dir : import cProfile import tempfile import pydoop . hdfs as hdfs hdfs . mkdir ( pstats_dir ) fd , pstats_fn = tempfile . mkstemp ( suffix = ".pstats" )...
\ Run a MapReduce task .
51,999
def progress ( self ) : now = time ( ) if now - self . last_progress_t > 1 : self . last_progress_t = now if self . status : self . uplink . status ( self . status ) self . status = None self . __spill_counters ( ) self . uplink . progress ( self . progress_value ) self . uplink . flush ( )
\ Report progress to the Java side .