idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
51,400 | def parse ( self ) : try : if not os . path . getsize ( self . ns . pathname ) : self . job . LOG . warn ( "Ignoring 0-byte metafile '%s'" % ( self . ns . pathname , ) ) return self . metadata = metafile . checked_open ( self . ns . pathname ) except EnvironmentError as exc : self . job . LOG . error ( "Can't read meta... | Parse metafile and check pre - conditions . |
51,401 | def addinfo ( self ) : self . ns . watch_path = self . job . config . path self . ns . relpath = None for watch in self . job . config . path : if self . ns . pathname . startswith ( watch . rstrip ( '/' ) + '/' ) : self . ns . relpath = os . path . dirname ( self . ns . pathname ) [ len ( watch . rstrip ( '/' ) ) + 1 ... | Add known facts to templating namespace . |
51,402 | def load ( self ) : if not self . ns . info_hash and not self . parse ( ) : return self . addinfo ( ) try : start_it = self . job . config . load_mode . lower ( ) in ( "start" , "started" ) queue_it = self . job . config . queued if "start" in self . ns . flags : start_it = True elif "load" in self . ns . flags : start... | Load metafile into client . |
51,403 | def handle_path ( self , event ) : self . job . LOG . debug ( "Notification %r" % event ) if event . dir : return if any ( event . pathname . endswith ( i ) for i in self . METAFILE_EXT ) : MetafileHandler ( self . job , event . pathname ) . handle ( ) elif os . path . basename ( event . pathname ) == "watch.ini" : sel... | Handle a path - related event . |
51,404 | def setup ( self ) : if not pyinotify . WatchManager : raise error . UserError ( "You need to install 'pyinotify' to use %s (%s)!" % ( self . __class__ . __name__ , pyinotify . _import_error ) ) self . manager = pyinotify . WatchManager ( ) self . handler = TreeWatchHandler ( job = self ) self . notifier = pyinotify . ... | Set up inotify manager . |
51,405 | def get_filetypes ( filelist , path = None , size = os . path . getsize ) : path = path or ( lambda _ : _ ) histo = defaultdict ( int ) for entry in filelist : ext = os . path . splitext ( path ( entry ) ) [ 1 ] . lstrip ( '.' ) . lower ( ) if ext and ext [ 0 ] == 'r' and ext [ 1 : ] . isdigit ( ) : ext = "rar" elif ex... | Get a sorted list of file types and their weight in percent from an iterable of file names . |
51,406 | def name_trait ( name , add_info = False ) : kind , info = None , { } if name and not name . startswith ( "VTS_" ) : lower_name = name . lower ( ) trait_patterns = ( ( "tv" , TV_PATTERNS , "show" ) , ( "movie" , MOVIE_PATTERNS , "title" ) ) if any ( i in lower_name for i in _DEFINITELY_TV ) : kind = "tv" trait_patterns... | Determine content type from name . |
51,407 | def detect_traits ( name = None , alias = None , filetype = None ) : result = [ ] if filetype : filetype = filetype . lstrip ( '.' ) theme = config . traits_by_alias . get ( alias ) if alias and theme : result = [ theme , filetype or "other" ] elif filetype in KIND_AUDIO : result = [ "audio" , filetype ] elif filetype ... | Build traits list from passed attributes . |
51,408 | def console_progress ( ) : def progress ( totalhashed , totalsize ) : "Helper" msg = " " * 30 if totalhashed < totalsize : msg = "%5.1f%% complete" % ( totalhashed * 100.0 / totalsize ) sys . stdout . write ( msg + " \r" ) sys . stdout . flush ( ) try : return progress if sys . stdout . isatty ( ) else None except Attr... | Return a progress indicator for consoles if stdout is a tty . |
51,409 | def check_info ( info ) : if not isinstance ( info , dict ) : raise ValueError ( "bad metainfo - not a dictionary" ) pieces = info . get ( "pieces" ) if not isinstance ( pieces , basestring ) or len ( pieces ) % 20 != 0 : raise ValueError ( "bad metainfo - bad pieces key" ) piece_size = info . get ( "piece length" ) if... | Validate info dict . |
51,410 | def check_meta ( meta ) : if not isinstance ( meta , dict ) : raise ValueError ( "bad metadata - not a dictionary" ) if not isinstance ( meta . get ( "announce" ) , basestring ) : raise ValueError ( "bad announce URL - not a string" ) check_info ( meta . get ( "info" ) ) return meta | Validate meta dict . |
51,411 | def clean_meta ( meta , including_info = False , logger = None ) : modified = set ( ) for key in meta . keys ( ) : if [ key ] not in METAFILE_STD_KEYS : if logger : logger ( "Removing key %r..." % ( key , ) ) del meta [ key ] modified . add ( key ) if including_info : for key in meta [ "info" ] . keys ( ) : if [ "info"... | Clean meta dict . Optionally log changes using the given logger . |
51,412 | def sanitize ( meta , diagnostics = False ) : bad_encodings , bad_fields = set ( ) , set ( ) def sane_encoding ( field , text ) : "Transcoding helper." for encoding in ( 'utf-8' , meta . get ( 'encoding' , None ) , 'cp1252' ) : if encoding : try : u8_text = text . decode ( encoding ) . encode ( "utf-8" ) if encoding !=... | Try to fix common problems especially transcode non - standard string encodings . |
51,413 | def add_fast_resume ( meta , datapath ) : files = meta [ "info" ] . get ( "files" , None ) single = files is None if single : if os . path . isdir ( datapath ) : datapath = os . path . join ( datapath , meta [ "info" ] [ "name" ] ) files = [ Bunch ( path = [ os . path . abspath ( datapath ) ] , length = meta [ "info" ]... | Add fast resume data to a metafile dict . |
51,414 | def data_size ( metadata ) : info = metadata [ 'info' ] if 'length' in info : total_size = info [ 'length' ] else : total_size = sum ( [ f [ 'length' ] for f in info [ 'files' ] ] ) return total_size | Calculate the size of a torrent based on parsed metadata . |
51,415 | def checked_open ( filename , log = None , quiet = False ) : with open ( filename , "rb" ) as handle : raw_data = handle . read ( ) data = bencode . bdecode ( raw_data ) try : check_meta ( data ) if raw_data != bencode . bencode ( data ) : raise ValueError ( "Bad bencoded data - dict keys out of order?" ) except ValueE... | Open and validate the given metafile . Optionally provide diagnostics on the passed logger for invalid metafiles which then just cause a warning but no exception . quiet can supress that warning . |
51,416 | def format ( self , obj , context , maxlevels , level ) : if isinstance ( obj , basestring ) and "://" in fmt . to_unicode ( obj ) : obj = mask_keys ( obj ) return pprint . PrettyPrinter . format ( self , obj , context , maxlevels , level ) | Mask obj if it looks like an URL then pass it to the super class . |
51,417 | def _get_datapath ( self ) : if self . _datapath is None : raise OSError ( errno . ENOENT , "You didn't provide any datapath for %r" % self . filename ) return self . _datapath | Get a valid datapath else raise an exception . |
51,418 | def _set_datapath ( self , datapath ) : if datapath : self . _datapath = datapath . rstrip ( os . sep ) self . _fifo = int ( stat . S_ISFIFO ( os . stat ( self . datapath ) . st_mode ) ) else : self . _datapath = None self . _fifo = False | Set a datapath . |
51,419 | def walk ( self ) : if self . _fifo : if self . _fifo > 1 : raise RuntimeError ( "INTERNAL ERROR: FIFO read twice!" ) self . _fifo += 1 with open ( self . datapath , "r" ) as fifo : while True : relpath = fifo . readline ( ) . rstrip ( '\n' ) if not relpath : break self . LOG . debug ( "Read relative path %r from FIFO.... | Generate paths in self . datapath . |
51,420 | def _calc_size ( self ) : return sum ( os . path . getsize ( filename ) for filename in self . walk ( ) ) | Get total size of self . datapath . |
51,421 | def _make_info ( self , piece_size , progress , walker , piece_callback = None ) : file_list = [ ] pieces = [ ] hashing_secs = time . time ( ) totalsize = - 1 if self . _fifo else self . _calc_size ( ) totalhashed = 0 sha1sum = hashlib . sha1 ( ) done = 0 filename = None for filename in walker : filesize = os . path . ... | Create info dict . |
51,422 | def _make_meta ( self , tracker_url , root_name , private , progress ) : if self . _fifo : piece_size_exp = 20 else : total_size = self . _calc_size ( ) if total_size : piece_size_exp = int ( math . log ( total_size ) / math . log ( 2 ) ) - 9 else : piece_size_exp = 0 piece_size_exp = min ( max ( 15 , piece_size_exp ) ... | Create torrent dict . |
51,423 | def check ( self , metainfo , datapath , progress = None ) : if datapath : self . datapath = datapath def check_piece ( filename , piece ) : "Callback for new piece" if piece != metainfo [ "info" ] [ "pieces" ] [ check_piece . piece_index : check_piece . piece_index + 20 ] : self . LOG . warn ( "Piece #%d: Hashes diffe... | Check piece hashes of a metafile against the given datapath . |
51,424 | def _start ( self , items ) : startable = [ i for i in items if self . config . startable . match ( i ) ] if not startable : self . LOG . debug ( "Checked %d item(s), none startable according to [ %s ]" , len ( items ) , self . config . startable ) return now = time . time ( ) if now < self . last_start : self . last_s... | Start some items if conditions are met . |
51,425 | def run ( self ) : try : self . proxy = config_ini . engine . open ( ) items = list ( config_ini . engine . items ( self . VIEWNAME , cache = False ) ) if self . sort_key : items . sort ( key = self . sort_key ) self . _start ( items ) self . LOG . debug ( "%s - %s" % ( config_ini . engine . engine_id , self . proxy ) ... | Queue manager job callback . |
51,426 | def print_help_fields ( ) : def custom_manifold ( ) : "named rTorrent custom attribute, e.g. 'custom_completion_target'" return ( "custom_KEY" , custom_manifold ) def kind_manifold ( ) : "file types that contribute at least N% to the item's total size" return ( "kind_N" , kind_manifold ) print ( '' ) print ( "Fields ar... | Print help about fields and field formatters . |
51,427 | def add ( self , field , val ) : "Add a sample" if engine . FieldDefinition . FIELDS [ field ] . _matcher is matching . TimeFilter : val = self . _basetime - val try : self . total [ field ] += val self . min [ field ] = min ( self . min [ field ] , val ) if field in self . min else val self . max [ field ] = max ( sel... | Add a sample |
51,428 | def help_completion_fields ( self ) : for name , field in sorted ( engine . FieldDefinition . FIELDS . items ( ) ) : if issubclass ( field . _matcher , matching . BoolFilter ) : yield "%s=no" % ( name , ) yield "%s=yes" % ( name , ) continue elif issubclass ( field . _matcher , matching . PatternFilter ) : yield "%s=" ... | Return valid field names . |
51,429 | def format_item ( self , item , defaults = None , stencil = None ) : from pyrobase . osutil import shell_escape try : item_text = fmt . to_console ( formatting . format_item ( self . options . output_format , item , defaults ) ) except ( NameError , ValueError , TypeError ) , exc : self . fatal ( "Trouble with formatti... | Format an item . |
51,430 | def emit ( self , item , defaults = None , stencil = None , to_log = False , item_formatter = None ) : item_text = self . format_item ( item , defaults , stencil ) if item_formatter : item_text = item_formatter ( item_text ) if item is None and os . isatty ( sys . stdout . fileno ( ) ) : item_text = '' . join ( ( confi... | Print an item to stdout or the log on INFO level . |
51,431 | def validate_output_format ( self , default_format ) : output_format = self . options . output_format if output_format is None : output_format = default_format output_format = config . formats . get ( output_format , output_format ) if re . match ( r"^[,._0-9a-zA-Z]+$" , output_format ) : self . plain_output_format = T... | Prepare output format for later use . |
51,432 | def get_output_fields ( self ) : emit_fields = list ( i . lower ( ) for i in re . sub ( r"[^_A-Z]+" , ' ' , self . format_item ( None ) ) . split ( ) ) result = [ ] for name in emit_fields [ : ] : if name not in engine . FieldDefinition . FIELDS : self . LOG . warn ( "Omitted unknown name '%s' from statistics and outpu... | Get field names from output template . |
51,433 | def validate_sort_fields ( self ) : sort_fields = ',' . join ( self . options . sort_fields ) if sort_fields == '*' : sort_fields = self . get_output_fields ( ) return formatting . validate_sort_fields ( sort_fields or config . sort_fields ) | Take care of sorting . |
51,434 | def show_in_view ( self , sourceview , matches , targetname = None ) : append = self . options . append_view or self . options . alter_view == 'append' remove = self . options . alter_view == 'remove' action_name = ', appending to' if append else ', removing from' if remove else ' into' targetname = config . engine . s... | Show search result in ncurses view . |
51,435 | def heatmap ( self , df , imagefile ) : import seaborn as sns import matplotlib . ticker as tkr import matplotlib . pyplot as plt from matplotlib . colors import LinearSegmentedColormap sns . set ( ) with sns . axes_style ( 'whitegrid' ) : fig , ax = plt . subplots ( figsize = ( 5 , 11 ) ) cmax = max ( df [ self . args... | Create the heat map . |
51,436 | def mainloop ( self ) : proxy = config . engine . open ( ) views = [ x for x in sorted ( proxy . view . list ( ) ) if x . startswith ( self . PREFIX ) ] current_view = real_current_view = proxy . ui . current_view ( ) if current_view not in views : if views : current_view = views [ 0 ] else : raise error . UserError ( ... | Manage category views . |
51,437 | def _custom_fields ( ) : import os from pyrocore . torrent import engine , matching from pyrocore . util import fmt def has_room ( obj ) : "Check disk space." pathname = obj . path if pathname and not os . path . exists ( pathname ) : pathname = os . path . dirname ( pathname ) if pathname and os . path . exists ( path... | Yield custom field definitions . |
51,438 | def engine_data ( engine ) : views = ( "default" , "main" , "started" , "stopped" , "complete" , "incomplete" , "seeding" , "leeching" , "active" , "messages" ) methods = [ "throttle.global_up.rate" , "throttle.global_up.max_rate" , "throttle.global_down.rate" , "throttle.global_down.max_rate" , ] proxy = engine . open... | Get important performance data and metadata from rTorrent . |
51,439 | def _write_pidfile ( pidfile ) : pid = str ( os . getpid ( ) ) handle = open ( pidfile , 'w' ) try : handle . write ( "%s\n" % pid ) finally : handle . close ( ) | Write file with current process ID . |
51,440 | def guard ( pidfile , guardfile = None ) : if guardfile and not os . path . exists ( guardfile ) : raise EnvironmentError ( "Guard file '%s' not found, won't start!" % guardfile ) if os . path . exists ( pidfile ) : running , pid = check_process ( pidfile ) if running : raise EnvironmentError ( "Daemon process #%d stil... | Raise an EnvironmentError when the guardfile doesn t exist or the process with the ID found in pidfile is still active . |
51,441 | def daemonize ( pidfile = None , logfile = None , sync = True ) : log = logging . getLogger ( "daemonize" ) ppid = os . getpid ( ) try : pid = os . fork ( ) if pid > 0 : log . debug ( "Parent exiting (PID %d, CHILD %d)" % ( ppid , pid ) ) sys . exit ( 0 ) except OSError as exc : log . critical ( "fork #1 failed (PID %d... | Fork the process into the background . |
51,442 | def flatten ( nested , containers = ( list , tuple ) ) : flat = list ( nested ) i = 0 while i < len ( flat ) : while isinstance ( flat [ i ] , containers ) : if not flat [ i ] : flat . pop ( i ) i -= 1 break else : flat [ i : i + 1 ] = ( flat [ i ] ) i += 1 return flat | Flatten a nested list in - place and return it . |
51,443 | def gendocs ( ) : "create some doc pages automatically" helppage = path ( "docs/references-cli-usage.rst" ) content = [ ".. automatically generated using 'paver gendocs'." , "" , ".. contents::" , " :local:" , "" , ".. note::" , "" , " The help output presented here applies to version ``%s`` of the tools." % sh (... | create some doc pages automatically |
51,444 | def watchdog_pid ( ) : result = sh ( 'netstat -tulpn 2>/dev/null | grep 127.0.0.1:{:d}' . format ( SPHINX_AUTOBUILD_PORT ) , capture = True , ignore_error = True ) pid = result . strip ( ) pid = pid . split ( ) [ - 1 ] if pid else None pid = pid . split ( '/' , 1 ) [ 0 ] if pid and pid != '-' else None return pid | Get watchdog PID via netstat . |
51,445 | def autodocs ( ) : "create Sphinx docs locally, and start a watchdog" build_dir = path ( 'docs/_build' ) index_html = build_dir / 'html/index.html' if build_dir . exists ( ) : build_dir . rmtree ( ) with pushd ( "docs" ) : print "\n*** Generating API doc ***\n" sh ( "sphinx-apidoc -o apidoc -f -T -M ../src/pyrocore" ) ... | create Sphinx docs locally and start a watchdog |
51,446 | def stopdocs ( ) : "stop Sphinx watchdog" for i in range ( 4 ) : pid = watchdog_pid ( ) if pid : if not i : sh ( 'ps {}' . format ( pid ) ) sh ( 'kill {}' . format ( pid ) ) time . sleep ( .5 ) else : break | stop Sphinx watchdog |
51,447 | def coverage ( ) : "generate coverage report and show in browser" coverage_index = path ( "build/coverage/index.html" ) coverage_index . remove ( ) sh ( "paver test" ) coverage_index . exists ( ) and webbrowser . open ( coverage_index ) | generate coverage report and show in browser |
51,448 | def lookup_announce_alias ( name ) : for alias , urls in announce . items ( ) : if alias . lower ( ) == name . lower ( ) : return alias , urls raise KeyError ( "Unknown alias %s" % ( name , ) ) | Get canonical alias name and announce URL list for the given alias . |
51,449 | def map_announce2alias ( url ) : import urlparse for alias , urls in announce . items ( ) : if any ( i == url for i in urls ) : return alias parts = urlparse . urlparse ( url ) server = urlparse . urlunparse ( ( parts . scheme , parts . netloc , "/" , None , None , None ) ) for alias , urls in announce . items ( ) : if... | Get tracker alias for announce URL and if none is defined the 2nd level domain . |
51,450 | def validate ( key , val ) : if val and val . startswith ( "~/" ) : return os . path . expanduser ( val ) if key == "output_header_frequency" : return int ( val , 10 ) if key . endswith ( "_ecma48" ) : return eval ( "'%s'" % val . replace ( "'" , r"\'" ) ) return val | Validate a configuration value . |
51,451 | def _update_config ( self , namespace ) : for key , val in namespace . items ( ) : setattr ( config , key , val ) | Inject the items from the given dict into the configuration . |
51,452 | def _interpolation_escape ( self , namespace ) : for key , val in namespace . items ( ) : if '%' in val : namespace [ key ] = self . INTERPOLATION_ESCAPE . sub ( lambda match : '%' + match . group ( 0 ) , val ) | Re - escape interpolation strings . |
51,453 | def _validate_namespace ( self , namespace ) : self . _update_config ( namespace ) for key , val in namespace [ "announce" ] . items ( ) : if isinstance ( val , basestring ) : namespace [ "announce" ] [ key ] = val . split ( ) self . _interpolation_escape ( namespace [ "formats" ] ) for factory in ( "engine" , ) : if i... | Validate the given namespace . This method is idempotent! |
51,454 | def _set_from_ini ( self , namespace , ini_file ) : global_vars = dict ( ( key , val ) for key , val in namespace . items ( ) if isinstance ( val , basestring ) ) for section in ini_file . sections ( ) : if section == "GLOBAL" : raw_vars = global_vars else : raw_vars = namespace . setdefault ( section . lower ( ) , { }... | Copy values from loaded INI file to namespace . |
51,455 | def _set_defaults ( self , namespace , optional_cfg_files ) : namespace [ "config_dir" ] = self . config_dir for idx , cfg_file in enumerate ( [ self . CONFIG_INI ] + optional_cfg_files ) : if any ( i in cfg_file for i in set ( '/' + os . sep ) ) : continue try : defaults = pymagic . resource_string ( "pyrocore" , "dat... | Set default values in the given dict . |
51,456 | def _load_ini ( self , namespace , config_file ) : self . LOG . debug ( "Loading %r..." % ( config_file , ) ) ini_file = ConfigParser . SafeConfigParser ( ) ini_file . optionxform = str if ini_file . read ( config_file ) : self . _set_from_ini ( namespace , ini_file ) else : self . LOG . warning ( "Configuration file %... | Load INI style configuration . |
51,457 | def _load_py ( self , namespace , config_file ) : if config_file and os . path . isfile ( config_file ) : self . LOG . debug ( "Loading %r..." % ( config_file , ) ) exec ( compile ( open ( config_file ) . read ( ) , config_file , 'exec' ) , vars ( config ) , namespace ) else : self . LOG . warning ( "Configuration file... | Load scripted configuration . |
51,458 | def load ( self , optional_cfg_files = None ) : optional_cfg_files = optional_cfg_files or [ ] if self . _loaded : raise RuntimeError ( "INTERNAL ERROR: Attempt to load configuration twice!" ) try : namespace = { } self . _set_defaults ( namespace , optional_cfg_files ) self . _load_ini ( namespace , os . path . join (... | Actually load the configuation from either the default location or the given directory . |
51,459 | def create ( self , remove_all_rc_files = False ) : if os . path . exists ( self . config_dir ) : self . LOG . debug ( "Configuration directory %r already exists!" % ( self . config_dir , ) ) else : os . mkdir ( self . config_dir ) if remove_all_rc_files : for subdir in ( '.' , 'rtorrent.d' ) : config_files = list ( gl... | Create default configuration files at either the default location or the given directory . |
51,460 | def make_magnet_meta ( self , magnet_uri ) : import cgi import hashlib if magnet_uri . startswith ( "magnet:" ) : magnet_uri = magnet_uri [ 7 : ] meta = { "magnet-uri" : "magnet:" + magnet_uri } magnet_params = cgi . parse_qs ( magnet_uri . lstrip ( '?' ) ) meta_name = magnet_params . get ( "xt" , [ hashlib . sha1 ( ma... | Create a magnet - uri torrent . |
51,461 | def get_class_logger ( obj ) : return logging . getLogger ( obj . __class__ . __module__ + '.' + obj . __class__ . __name__ ) | Get a logger specific for the given object s class . |
51,462 | def default ( self , o ) : if isinstance ( o , set ) : return list ( sorted ( o ) ) elif hasattr ( o , 'as_dict' ) : return o . as_dict ( ) else : return super ( JSONEncoder , self ) . default ( o ) | Support more object types . |
51,463 | def fmt_sz ( intval ) : try : return fmt . human_size ( intval ) except ( ValueError , TypeError ) : return "N/A" . rjust ( len ( fmt . human_size ( 0 ) ) ) | Format a byte sized value . |
51,464 | def fmt_iso ( timestamp ) : try : return fmt . iso_datetime ( timestamp ) except ( ValueError , TypeError ) : return "N/A" . rjust ( len ( fmt . iso_datetime ( 0 ) ) ) | Format a UNIX timestamp to an ISO datetime string . |
51,465 | def fmt_duration ( duration ) : try : return fmt . human_duration ( float ( duration ) , 0 , 2 , True ) except ( ValueError , TypeError ) : return "N/A" . rjust ( len ( fmt . human_duration ( 0 , 0 , 2 , True ) ) ) | Format a duration value in seconds to a readable form . |
51,466 | def fmt_subst ( regex , subst ) : return lambda text : re . sub ( regex , subst , text ) if text else text | Replace regex with string . |
51,467 | def preparse ( output_format ) : try : return templating . preparse ( output_format , lambda path : os . path . join ( config . config_dir , "templates" , path ) ) except ImportError as exc : if "tempita" in str ( exc ) : raise error . UserError ( "To be able to use Tempita templates, install the 'tempita' package (%s)... | Do any special processing of a template and return the result . |
51,468 | def validate_field_list ( fields , allow_fmt_specs = False , name_filter = None ) : formats = [ i [ 4 : ] for i in globals ( ) if i . startswith ( "fmt_" ) ] try : fields = [ i . strip ( ) for i in fields . replace ( ',' , ' ' ) . split ( ) ] except AttributeError : pass if name_filter : fields = [ name_filter ( name )... | Make sure the fields in the given list exist . |
51,469 | def validate_sort_fields ( sort_fields ) : descending = set ( ) def sort_order_filter ( name ) : "Helper to remove flag and memoize sort order" if name . startswith ( '-' ) : name = name [ 1 : ] descending . add ( name ) return name sort_fields = validate_field_list ( sort_fields , name_filter = sort_order_filter ) log... | Make sure the fields in the given list exist and return sorting key . |
51,470 | def formatter_help ( cls ) : result = [ ( "raw" , "Switch off the default field formatter." ) ] for name , method in globals ( ) . items ( ) : if name . startswith ( "fmt_" ) : result . append ( ( name [ 4 : ] , method . __doc__ . strip ( ) ) ) return result | Return a list of format specifiers and their documentation . |
51,471 | def timeparse ( sval , granularity = 'seconds' ) : match = COMPILED_SIGN . match ( sval ) sign = - 1 if match . groupdict ( ) [ 'sign' ] == '-' else 1 sval = match . groupdict ( ) [ 'unsigned' ] for timefmt in COMPILED_TIMEFORMATS : match = timefmt . match ( sval ) if match and match . group ( 0 ) . strip ( ) : mdict =... | Parse a time expression returning it as a number of seconds . If possible the return value will be an int ; if this is not possible the return will be a float . Returns None if a time expression cannot be parsed from the given string . |
51,472 | def get_client ( project_id = None , credentials = None , service_url = None , service_account = None , private_key = None , private_key_file = None , json_key = None , json_key_file = None , readonly = True , swallow_results = True , num_retries = 0 ) : if not credentials : assert ( service_account and ( private_key o... | Return a singleton instance of BigQueryClient . Either AssertionCredentials or a service account and private key combination need to be provided in order to authenticate requests to BigQuery . |
51,473 | def get_projects ( bq_service ) : projects_request = bq_service . projects ( ) . list ( ) . execute ( ) projects = [ ] for project in projects_request . get ( 'projects' , [ ] ) : project_data = { 'id' : project [ 'id' ] , 'name' : project [ 'friendlyName' ] } projects . append ( project_data ) return projects | Given the BigQuery service return data about all projects . |
51,474 | def _get_bq_service ( credentials = None , service_url = None ) : assert credentials , 'Must provide ServiceAccountCredentials' http = credentials . authorize ( Http ( ) ) service = build ( 'bigquery' , 'v2' , http = http , discoveryServiceUrl = service_url , cache_discovery = False ) return service | Construct an authorized BigQuery service object . |
51,475 | def _submit_query_job ( self , query_data ) : logger . debug ( 'Submitting query job: %s' % query_data ) job_collection = self . bigquery . jobs ( ) try : query_reply = job_collection . query ( projectId = self . project_id , body = query_data ) . execute ( num_retries = self . num_retries ) except HttpError as e : if ... | Submit a query job to BigQuery . |
51,476 | def _insert_job ( self , body_object ) : logger . debug ( 'Submitting job: %s' % body_object ) job_collection = self . bigquery . jobs ( ) return job_collection . insert ( projectId = self . project_id , body = body_object ) . execute ( num_retries = self . num_retries ) | Submit a job to BigQuery |
51,477 | def query ( self , query , max_results = None , timeout = 0 , dry_run = False , use_legacy_sql = None , external_udf_uris = None ) : logger . debug ( 'Executing query: %s' % query ) query_data = { 'query' : query , 'timeoutMs' : timeout * 1000 , 'dryRun' : dry_run , 'maxResults' : max_results } if use_legacy_sql is not... | Submit a query to BigQuery . |
51,478 | def get_query_schema ( self , job_id ) : query_reply = self . get_query_results ( job_id , offset = 0 , limit = 0 ) if not query_reply [ 'jobComplete' ] : logger . warning ( 'BigQuery job %s not complete' % job_id ) raise UnfinishedQueryException ( ) return query_reply [ 'schema' ] [ 'fields' ] | Retrieve the schema of a query by job id . |
51,479 | def get_table_schema ( self , dataset , table , project_id = None ) : project_id = self . _get_project_id ( project_id ) try : result = self . bigquery . tables ( ) . get ( projectId = project_id , tableId = table , datasetId = dataset ) . execute ( num_retries = self . num_retries ) except HttpError as e : if int ( e ... | Return the table schema . |
51,480 | def check_job ( self , job_id ) : query_reply = self . get_query_results ( job_id , offset = 0 , limit = 0 ) return ( query_reply . get ( 'jobComplete' , False ) , int ( query_reply . get ( 'totalRows' , 0 ) ) ) | Return the state and number of results of a query by job id . |
51,481 | def get_query_rows ( self , job_id , offset = None , limit = None , timeout = 0 ) : query_reply = self . get_query_results ( job_id , offset = offset , limit = limit , timeout = timeout ) if not query_reply [ 'jobComplete' ] : logger . warning ( 'BigQuery job %s not complete' % job_id ) raise UnfinishedQueryException (... | Retrieve a list of rows from a query table by job id . This method will append results from multiple pages together . If you want to manually page through results you can use get_query_results method directly . |
51,482 | def check_dataset ( self , dataset_id , project_id = None ) : dataset = self . get_dataset ( dataset_id , project_id ) return bool ( dataset ) | Check to see if a dataset exists . |
51,483 | def get_dataset ( self , dataset_id , project_id = None ) : project_id = self . _get_project_id ( project_id ) try : dataset = self . bigquery . datasets ( ) . get ( projectId = project_id , datasetId = dataset_id ) . execute ( num_retries = self . num_retries ) except HttpError : dataset = { } return dataset | Retrieve a dataset if it exists otherwise return an empty dict . |
51,484 | def get_table ( self , dataset , table , project_id = None ) : project_id = self . _get_project_id ( project_id ) try : table = self . bigquery . tables ( ) . get ( projectId = project_id , datasetId = dataset , tableId = table ) . execute ( num_retries = self . num_retries ) except HttpError : table = { } return table | Retrieve a table if it exists otherwise return an empty dict . |
51,485 | def create_table ( self , dataset , table , schema , expiration_time = None , time_partitioning = False , project_id = None ) : project_id = self . _get_project_id ( project_id ) body = { 'schema' : { 'fields' : schema } , 'tableReference' : { 'tableId' : table , 'projectId' : project_id , 'datasetId' : dataset } } if ... | Create a new table in the dataset . |
51,486 | def patch_table ( self , dataset , table , schema , project_id = None ) : project_id = self . _get_project_id ( project_id ) body = { 'schema' : { 'fields' : schema } , 'tableReference' : { 'tableId' : table , 'projectId' : project_id , 'datasetId' : dataset } } try : result = self . bigquery . tables ( ) . patch ( pro... | Patch an existing table in the dataset . |
51,487 | def create_view ( self , dataset , view , query , use_legacy_sql = None , project_id = None ) : project_id = self . _get_project_id ( project_id ) body = { 'tableReference' : { 'tableId' : view , 'projectId' : project_id , 'datasetId' : dataset } , 'view' : { 'query' : query } } if use_legacy_sql is not None : body [ '... | Create a new view in the dataset . |
51,488 | def delete_table ( self , dataset , table , project_id = None ) : project_id = self . _get_project_id ( project_id ) try : response = self . bigquery . tables ( ) . delete ( projectId = project_id , datasetId = dataset , tableId = table ) . execute ( num_retries = self . num_retries ) if self . swallow_results : return... | Delete a table from the dataset . |
51,489 | def get_tables ( self , dataset_id , app_id , start_time , end_time , project_id = None ) : if isinstance ( start_time , datetime ) : start_time = calendar . timegm ( start_time . utctimetuple ( ) ) if isinstance ( end_time , datetime ) : end_time = calendar . timegm ( end_time . utctimetuple ( ) ) every_table = self .... | Retrieve a list of tables that are related to the given app id and are inside the range of start and end times . |
51,490 | def wait_for_job ( self , job , interval = 5 , timeout = 60 ) : complete = False job_id = str ( job if isinstance ( job , ( six . binary_type , six . text_type , int ) ) else job [ 'jobReference' ] [ 'jobId' ] ) job_resource = None start_time = time ( ) elapsed_time = 0 while not ( complete or elapsed_time > timeout ) ... | Waits until the job indicated by job_resource is done or has failed |
51,491 | def push_rows ( self , dataset , table , rows , insert_id_key = None , skip_invalid_rows = None , ignore_unknown_values = None , template_suffix = None , project_id = None ) : project_id = self . _get_project_id ( project_id ) table_data = self . bigquery . tabledata ( ) rows_data = [ ] for row in rows : each_row = { }... | Upload rows to BigQuery table . |
51,492 | def get_all_tables ( self , dataset_id , project_id = None ) : tables_data = self . _get_all_tables_for_dataset ( dataset_id , project_id ) tables = [ ] for table in tables_data . get ( 'tables' , [ ] ) : table_name = table . get ( 'tableReference' , { } ) . get ( 'tableId' ) if table_name : tables . append ( table_nam... | Retrieve a list of tables for the dataset . |
51,493 | def _get_all_tables_for_dataset ( self , dataset_id , project_id = None ) : project_id = self . _get_project_id ( project_id ) result = self . bigquery . tables ( ) . list ( projectId = project_id , datasetId = dataset_id ) . execute ( num_retries = self . num_retries ) page_token = result . get ( 'nextPageToken' ) whi... | Retrieve a list of all tables for the dataset . |
51,494 | def _parse_table_list_response ( self , list_response ) : tables = defaultdict ( dict ) for table in list_response . get ( 'tables' , [ ] ) : table_ref = table . get ( 'tableReference' ) if not table_ref : continue table_id = table_ref . get ( 'tableId' , '' ) year_month , app_id = self . _parse_table_name ( table_id )... | Parse the response received from calling list on tables . |
51,495 | def _parse_table_name ( self , table_id ) : attributes = table_id . split ( '_' ) year_month = "-" . join ( attributes [ : 2 ] ) app_id = "-" . join ( attributes [ 2 : ] ) if year_month . count ( "-" ) == 1 and all ( [ num . isdigit ( ) for num in year_month . split ( '-' ) ] ) : return year_month , app_id attributes =... | Parse a table name in the form of appid_YYYY_MM or YYYY_MM_appid and return a tuple consisting of YYYY - MM and the app id . |
51,496 | def _filter_tables_by_time ( self , tables , start_time , end_time ) : return [ table_name for ( table_name , unix_seconds ) in tables . items ( ) if self . _in_range ( start_time , end_time , unix_seconds ) ] | Filter a table dictionary and return table names based on the range of start and end times in unix seconds . |
51,497 | def _in_range ( self , start_time , end_time , time ) : ONE_MONTH = 2764800 return start_time <= time <= end_time or time <= start_time <= time + ONE_MONTH or time <= end_time <= time + ONE_MONTH | Indicate if the given time falls inside of the given range . |
51,498 | def _transform_row ( self , row , schema ) : log = { } for index , col_dict in enumerate ( schema ) : col_name = col_dict [ 'name' ] row_value = row [ 'f' ] [ index ] [ 'v' ] if row_value is None : log [ col_name ] = None continue if col_dict [ 'type' ] == 'RECORD' : row_value = self . _recurse_on_row ( col_dict , row_... | Apply the given schema to the given BigQuery data row . |
51,499 | def _recurse_on_row ( self , col_dict , nested_value ) : row_value = None if col_dict [ 'mode' ] == 'REPEATED' and isinstance ( nested_value , list ) : row_value = [ self . _transform_row ( record [ 'v' ] , col_dict [ 'fields' ] ) for record in nested_value ] else : row_value = self . _transform_row ( nested_value , co... | Apply the schema specified by the given dict to the nested value by recursing on it . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.