idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
51,300 | def coffee_compile ( source ) : with open ( COFFEE_COMPILER , 'rb' ) as coffeescript_js : return evaljs ( ( coffeescript_js . read ( ) . decode ( 'utf-8' ) , 'CoffeeScript.compile(dukpy.coffeecode)' ) , coffeecode = source ) | Compiles the given source from CoffeeScript to JavaScript |
51,301 | def register_path ( self , path ) : self . _paths . insert ( 0 , os . path . abspath ( path ) ) | Registers a directory where to look for modules . |
51,302 | def lookup ( self , module_name ) : for search_path in self . _paths : module_path = os . path . join ( search_path , module_name ) new_module_name , module_file = self . _lookup ( module_path , module_name ) if module_file : return new_module_name , module_file return None , None | Searches for a file providing given module . |
51,303 | def load ( self , module_name ) : module_name , path = self . lookup ( module_name ) if path : with open ( path , 'rb' ) as f : return module_name , f . read ( ) . decode ( 'utf-8' ) return None , None | Returns source code and normalized module id of the given module . |
51,304 | def less_compile ( source , options = None ) : options = options or { } res = NodeLikeInterpreter ( ) . evaljs ( ( 'var result = null;' 'var less = require("less/less-node");' , 'less.render(dukpy.lesscode, dukpy.lessoptions, function(error, output) {' ' result = {"error": error, "output": output};' '});' 'result;' ) , lesscode = source , lessoptions = options ) if not res : raise RuntimeError ( 'Results or errors unavailable' ) if res . get ( 'error' ) : raise LessCompilerError ( res [ 'error' ] [ 'message' ] ) return res [ 'output' ] [ 'css' ] | Compiles the given source from LESS to CSS |
51,305 | def install_jspackage ( package_name , version , modulesdir ) : if not version : version = '' requirements = _resolve_dependencies ( package_name , version ) print ( 'Packages going to be installed: {0}' . format ( ', ' . join ( '{0}->{1}' . format ( * i ) for i in requirements ) ) ) downloads = { } for dependency_name , _ , version_info in requirements : try : downloads [ dependency_name ] = version_info [ 'dist' ] [ 'tarball' ] except KeyError : raise JSPackageInstallError ( 'Unable to detect a supported download url for package' , error_code = 3 ) for dependency_name , download_url in downloads . items ( ) : tarball = BytesIO ( ) print ( 'Fetching {0}' . format ( download_url ) , end = '' ) with closing ( urlopen ( download_url ) ) as data : chunk = data . read ( 1024 ) while chunk : print ( '.' , end = '' ) tarball . write ( chunk ) chunk = data . read ( 1024 ) print ( '' ) tarball . seek ( 0 ) with closing ( tarfile . open ( fileobj = tarball ) ) as tb : dest = os . path . join ( modulesdir , dependency_name ) tmpdir = tempfile . mkdtemp ( ) try : tb . extractall ( tmpdir ) shutil . rmtree ( os . path . abspath ( dest ) , ignore_errors = True ) shutil . move ( os . path . join ( tmpdir , 'package' ) , os . path . abspath ( dest ) ) finally : shutil . rmtree ( tmpdir ) print ( 'Installing {0} in {1} Done!' . format ( package_name , modulesdir ) ) | Installs a JavaScript package downloaded from npmjs . org . |
51,306 | def evaljs ( self , code , ** kwargs ) : jsvars = json . dumps ( kwargs ) jscode = self . _adapt_code ( code ) if not isinstance ( jscode , bytes ) : jscode = jscode . encode ( 'utf-8' ) if not isinstance ( jsvars , bytes ) : jsvars = jsvars . encode ( 'utf-8' ) res = _dukpy . eval_string ( self , jscode , jsvars ) if res is None : return None return json . loads ( res . decode ( 'utf-8' ) ) | Runs JavaScript code in the context of the interpreter . |
51,307 | def typescript_compile ( source ) : with open ( TS_COMPILER , 'r' ) as tsservices_js : return evaljs ( ( tsservices_js . read ( ) , 'ts.transpile(dukpy.tscode, {options});' . format ( options = TSC_OPTIONS ) ) , tscode = source ) | Compiles the given source from TypeScript to ES5 using TypescriptServices . js |
51,308 | def get_private_file ( self ) : return PrivateFile ( request = self . request , storage = self . get_storage ( ) , relative_name = self . get_path ( ) ) | Return all relevant data in a single object so this is easy to extend and server implementations can pick what they need . |
51,309 | def get ( self , request , * args , ** kwargs ) : private_file = self . get_private_file ( ) if not self . can_access_file ( private_file ) : return HttpResponseForbidden ( 'Private storage access denied' ) if not private_file . exists ( ) : return self . serve_file_not_found ( private_file ) else : return self . serve_file ( private_file ) | Handle incoming GET requests |
51,310 | def serve_file ( self , private_file ) : response = self . server_class ( ) . serve ( private_file ) if self . content_disposition : filename = self . get_content_disposition_filename ( private_file ) response [ 'Content-Disposition' ] = b'; ' . join ( [ self . content_disposition . encode ( ) , self . _encode_filename_header ( filename ) ] ) return response | Serve the file that was retrieved from the storage . The relative path can be found with private_file . relative_name . |
51,311 | def get_content_disposition_filename ( self , private_file ) : return self . content_disposition_filename or os . path . basename ( private_file . relative_name ) | Return the filename in the download header . |
51,312 | def _encode_filename_header ( self , filename ) : user_agent = self . request . META . get ( 'HTTP_USER_AGENT' , None ) if 'WebKit' in user_agent : return u'filename={}' . format ( filename ) . encode ( "utf-8" ) elif 'MSIE' in user_agent : url_encoded = quote ( filename . encode ( "utf-8" ) ) . replace ( 'attachment' , "a%74tachment" ) return "filename={}" . format ( url_encoded ) . encode ( "utf-8" ) else : rfc2231_filename = quote ( filename . encode ( "utf-8" ) ) return "filename*=UTF-8''{}" . format ( rfc2231_filename ) . encode ( "utf-8" ) | The filename encoded to use in a Content - Disposition header . |
51,313 | def add_no_cache_headers ( func ) : @ wraps ( func ) def _dec ( * args , ** kwargs ) : response = func ( * args , ** kwargs ) response [ 'Expires' ] = 'Thu, 01 Jan 1970 00:00:00 GMT' response [ 'Cache-Control' ] = 'max-age=0, no-cache, must-revalidate, proxy-revalidate' return response return _dec | Makes sure the retrieved file is not cached on disk or cached by proxy servers in between . This would circumvent any checking whether the user may even access the file . |
51,314 | def readTrainingData ( file_locations , GROUP_LABEL ) : class Mock ( object ) : pass mock_module = Mock ( ) mock_module . PARENT_LABEL = GROUP_LABEL for location in file_locations : with open ( location ) as f : tree = etree . parse ( f ) xml = tree . getroot ( ) for each in data_prep_utils . TrainingData ( xml , mock_module ) : yield each | Used in downstream tests |
51,315 | def device ( value ) : browser = None for regex , name in BROWSERS : if regex . search ( value ) : browser = name break device = None for regex , name in DEVICES : if regex . search ( value ) : device = name break if browser and device : return _ ( '%(browser)s on %(device)s' ) % { 'browser' : browser , 'device' : device } if browser : return browser if device : return device return None | Transform a User Agent into human readable text . |
51,316 | def location ( value ) : try : location = geoip ( ) and geoip ( ) . city ( value ) except Exception : try : location = geoip ( ) and geoip ( ) . country ( value ) except Exception as e : warnings . warn ( str ( e ) ) location = None if location and location [ 'country_name' ] : if 'city' in location and location [ 'city' ] : return '{}, {}' . format ( location [ 'city' ] , location [ 'country_name' ] ) return location [ 'country_name' ] return None | Transform an IP address into an approximate location . |
51,317 | def before ( func ) : class BeforeDecorator ( LambdaDecorator ) : def before ( self , event , context ) : return func ( event , context ) return BeforeDecorator | Run a function before the handler is invoked is passed the event & context and must return an event & context too . |
51,318 | def after ( func ) : class AfterDecorator ( LambdaDecorator ) : def after ( self , retval ) : return func ( retval ) return AfterDecorator | Run a function after the handler is invoked is passed the response and must return an response too . |
51,319 | def on_exception ( func ) : class OnExceptionDecorator ( LambdaDecorator ) : def on_exception ( self , exception ) : return func ( exception ) return OnExceptionDecorator | Run a function when a handler thows an exception . It s return value is returned to AWS . |
51,320 | def async_handler ( handler ) : @ wraps ( handler ) def wrapper ( event , context ) : context . loop = asyncio . get_event_loop ( ) return context . loop . run_until_complete ( handler ( event , context ) ) return wrapper | This decorator allows for use of async handlers by automatically running them in an event loop . The loop is added to the context object for if the handler needs it . |
51,321 | def dump_json_body ( handler ) : @ wraps ( handler ) def wrapper ( event , context ) : response = handler ( event , context ) if 'body' in response : try : response [ 'body' ] = json . dumps ( response [ 'body' ] ) except Exception as exception : return { 'statusCode' : 500 , 'body' : str ( exception ) } return response return wrapper | Automatically serialize response bodies with json . dumps . |
51,322 | def json_http_resp ( handler ) : @ wraps ( handler ) def wrapper ( event , context ) : response = handler ( event , context ) try : body = json . dumps ( response ) except Exception as exception : return { 'statusCode' : 500 , 'body' : str ( exception ) } return { 'statusCode' : 200 , 'body' : body } return wrapper | Automatically serialize return value to the body of a successfull HTTP response . |
51,323 | def load_json_body ( handler ) : @ wraps ( handler ) def wrapper ( event , context ) : if isinstance ( event . get ( 'body' ) , str ) : try : event [ 'body' ] = json . loads ( event [ 'body' ] ) except : return { 'statusCode' : 400 , 'body' : 'BAD REQUEST' } return handler ( event , context ) return wrapper | Automatically deserialize event bodies with json . loads . |
51,324 | def json_schema_validator ( request_schema = None , response_schema = None ) : def wrapper_wrapper ( handler ) : @ wraps ( handler ) def wrapper ( event , context ) : if request_schema is not None : if jsonschema is None : logger . error ( 'jsonschema is not installed, skipping request validation' ) else : try : jsonschema . validate ( event , request_schema ) except jsonschema . ValidationError as exception : return { 'statusCode' : 400 , 'body' : 'RequestValidationError: {}' . format ( exception . message ) } response = handler ( event , context ) if response_schema is not None : if jsonschema is None : logger . error ( 'jsonschema is not installed, skipping response validation' ) else : try : jsonschema . validate ( response , response_schema ) except jsonschema . ValidationError as exception : return { 'statusCode' : 500 , 'body' : 'ResponseValidationError: {}' . format ( exception . message ) } return response return wrapper return wrapper_wrapper | Validate your request & response payloads against a JSONSchema . |
51,325 | def no_retry_on_failure ( handler ) : seen_request_ids = set ( ) @ wraps ( handler ) def wrapper ( event , context ) : if context . aws_request_id in seen_request_ids : logger . critical ( 'Retry attempt on request id %s detected.' , context . aws_request_id ) return { 'statusCode' : 200 } seen_request_ids . add ( context . aws_request_id ) return handler ( event , context ) return wrapper | AWS Lambda retries scheduled lambdas that don t execute succesfully . |
51,326 | def _wrap_thing ( self , thing , kind ) : thing [ 'created' ] = self . _epoch_utc_to_local ( thing [ 'created_utc' ] ) thing [ 'd_' ] = copy . deepcopy ( thing ) ThingType = namedtuple ( kind , thing . keys ( ) ) thing = ThingType ( ** thing ) return thing | Mimic praw . Submission and praw . Comment API |
51,327 | def _add_nec_args ( self , payload ) : if self . _limited ( payload ) : return if 'limit' not in payload : payload [ 'limit' ] = self . max_results_per_request if 'sort' not in payload : payload [ 'sort' ] = 'desc' if 'filter' in payload : if not isinstance ( payload [ 'filter' ] , list ) : if isinstance ( payload [ 'filter' ] , str ) : payload [ 'filter' ] = [ payload [ 'filter' ] ] else : payload [ 'filter' ] = list ( payload [ 'filter' ] ) if 'created_utc' not in payload [ 'filter' ] : payload [ 'filter' ] . append ( 'created_utc' ) | Adds limit and created_utc arguments to the payload as necessary . |
51,328 | def pretty_path ( path ) : path = fmt . to_utf8 ( path ) home_dir = os . path . expanduser ( "~" ) if path . startswith ( home_dir ) : path = "~" + path [ len ( home_dir ) : ] return '"%s"' % ( path , ) | Prettify path for logging . |
51,329 | def guarded ( self , call , * args ) : self . LOG . debug ( '%s(%s)' % ( call . __name__ , ', ' . join ( [ pretty_path ( i ) for i in args ] ) , ) ) if not self . options . dry_run : try : call ( * args ) except ( EnvironmentError , UnicodeError ) as exc : self . fatal ( '%s(%s) failed [%s]' % ( call . __name__ , ', ' . join ( [ pretty_path ( i ) for i in args ] ) , exc , ) ) | Catch exceptions thrown by filesystem calls and don t really execute them in dry - run mode . |
51,330 | def run ( ) : logging . basicConfig ( level = logging . DEBUG ) load_config . ConfigLoader ( ) . load ( ) config . debug = True print ( repr ( config . engine . item ( sys . argv [ 1 ] ) ) ) | Module level test . |
51,331 | def _make_it_so ( self , command , calls , * args , ** kwargs ) : observer = kwargs . pop ( 'observer' , False ) args = ( self . _fields [ "hash" ] , ) + args try : for call in calls : self . _engine . LOG . debug ( "%s%s torrent #%s (%s)" % ( command [ 0 ] . upper ( ) , command [ 1 : ] , self . _fields [ "hash" ] , call ) ) if call . startswith ( ':' ) or call [ : 2 ] . endswith ( '.' ) : namespace = self . _engine . _rpc else : namespace = self . _engine . _rpc . d result = getattr ( namespace , call . lstrip ( ':' ) ) ( * args ) if observer : observer ( result ) except xmlrpc . ERRORS as exc : raise error . EngineError ( "While %s torrent #%s: %s" % ( command , self . _fields [ "hash" ] , exc ) ) | Perform some error - checked XMLRPC calls . |
51,332 | def fetch ( self , name , engine_name = None ) : try : return self . _fields [ name ] except KeyError : if isinstance ( name , ( int , long ) ) : name = "custom_%d" % name if name == "done" : val = float ( self . fetch ( "completed_chunks" ) ) / self . fetch ( "size_chunks" ) elif name == "files" : val = self . _get_files ( ) elif name . startswith ( "kind_" ) and name [ 5 : ] . isdigit ( ) : val = self . _get_kind ( int ( name [ 5 : ] , 10 ) ) elif name . startswith ( "custom_" ) : key = name [ 7 : ] try : if len ( key ) == 1 and key in "12345" : val = getattr ( self . _engine . _rpc . d , "custom" + key ) ( self . _fields [ "hash" ] ) else : val = self . _engine . _rpc . d . custom ( self . _fields [ "hash" ] , key ) except xmlrpc . ERRORS as exc : raise error . EngineError ( "While accessing field %r: %s" % ( name , exc ) ) else : getter_name = engine_name if engine_name else RtorrentEngine . PYRO2RT_MAPPING . get ( name , name ) if getter_name [ 0 ] == '=' : getter_name = getter_name [ 1 : ] else : getter_name = "get_" + getter_name getter = getattr ( self . _engine . _rpc . d , getter_name ) try : val = getter ( self . _fields [ "hash" ] ) except xmlrpc . ERRORS as exc : raise error . EngineError ( "While accessing field %r: %s" % ( name , exc ) ) self . _fields [ name ] = val return val | Get a field on demand . |
51,333 | def datapath ( self ) : path = self . _fields [ 'path' ] if not path : path = self . fetch ( 'directory' ) if path and not self . _fields [ 'is_multi_file' ] : path = os . path . join ( path , self . _fields [ 'name' ] ) return os . path . expanduser ( fmt . to_unicode ( path ) ) | Get an item s data path . |
51,334 | def announce_urls ( self , default = [ ] ) : try : response = self . _engine . _rpc . t . multicall ( self . _fields [ "hash" ] , 0 , "t.url=" , "t.is_enabled=" ) except xmlrpc . ERRORS as exc : raise error . EngineError ( "While getting announce URLs for #%s: %s" % ( self . _fields [ "hash" ] , exc ) ) if response : return [ i [ 0 ] for i in response if i [ 1 ] ] else : return default | Get a list of all announce URLs . Returns default if no trackers are found at all . |
51,335 | def tag ( self , tags ) : tags = tags . lower ( ) previous = self . tagged tagset = previous . copy ( ) for tag in tags . replace ( ',' , ' ' ) . split ( ) : if tag . startswith ( '-' ) : tagset . discard ( tag [ 1 : ] ) elif tag . startswith ( '+' ) : tagset . add ( tag [ 1 : ] ) else : tagset . add ( tag ) tagset . discard ( '' ) if tagset != previous : tagset = ' ' . join ( sorted ( tagset ) ) self . _make_it_so ( "setting tags %r on" % ( tagset , ) , [ "custom.set" ] , "tags" , tagset ) self . _fields [ "custom_tags" ] = tagset | Add or remove tags . |
51,336 | def set_throttle ( self , name ) : if name . lower ( ) == "null" : name = "NULL" if name . lower ( ) == "none" : name = '' if name not in self . _engine . known_throttle_names : if self . _engine . _rpc . throttle . up . max ( xmlrpc . NOHASH , name ) == - 1 : if self . _engine . _rpc . throttle . down . max ( xmlrpc . NOHASH , name ) == - 1 : raise error . UserError ( "Unknown throttle name '{}'" . format ( name ) ) self . _engine . known_throttle_names . add ( name ) if ( name or "NONE" ) == self . throttle : self . _engine . LOG . debug ( "Keeping throttle %r on torrent #%s" % ( self . throttle , self . _fields [ "hash" ] ) ) return active = self . is_active if active : self . _engine . LOG . debug ( "Torrent #%s stopped for throttling" % ( self . _fields [ "hash" ] , ) ) self . stop ( ) self . _make_it_so ( "setting throttle %r on" % ( name , ) , [ "throttle_name.set" ] , name ) if active : self . _engine . LOG . debug ( "Torrent #%s restarted after throttling" % ( self . _fields [ "hash" ] , ) ) self . start ( ) | Assign to throttle group . |
51,337 | def purge ( self ) : def partial_file ( item ) : "Filter out partial files" return item . completed_chunks < item . size_chunks self . cull ( file_filter = partial_file , attrs = [ "get_completed_chunks" , "get_size_chunks" ] ) | Delete PARTIAL data files and remove torrent from client . |
51,338 | def load_config ( self , namespace = None , rcfile = None ) : if namespace is None : namespace = config if namespace . scgi_url : return if not rcfile : rcfile = getattr ( config , "rtorrent_rc" , None ) if not rcfile : raise error . UserError ( "No 'rtorrent_rc' path defined in configuration!" ) if not os . path . isfile ( rcfile ) : raise error . UserError ( "Config file %r doesn't exist!" % ( rcfile , ) ) self . LOG . debug ( "Loading rtorrent config from %r" % ( rcfile , ) ) rc_vals = Bunch ( scgi_local = '' , scgi_port = '' ) with open ( rcfile ) as handle : continued = False for line in handle . readlines ( ) : line = line . strip ( ) continued , was_continued = line . endswith ( '\\' ) , continued if not line or was_continued or line . startswith ( "#" ) : continue try : key , val = line . split ( "=" , 1 ) except ValueError : self . LOG . warning ( "Ignored invalid line %r in %r!" % ( line , rcfile ) ) continue key , val = key . strip ( ) , val . strip ( ) key = self . RTORRENT_RC_ALIASES . get ( key , key ) . replace ( '.' , '_' ) if key in self . RTORRENT_RC_KEYS : self . LOG . debug ( "rtorrent.rc: %s = %s" % ( key , val ) ) rc_vals [ key ] = val if rc_vals . scgi_local : rc_vals . scgi_local = os . path . expanduser ( rc_vals . scgi_local ) if rc_vals . scgi_local . startswith ( '/' ) : rc_vals . scgi_local = "scgi://" + rc_vals . scgi_local if rc_vals . scgi_port and not rc_vals . scgi_port . startswith ( "scgi://" ) : rc_vals . scgi_port = "scgi://" + rc_vals . scgi_port namespace . scgi_url = rc_vals . scgi_local or rc_vals . scgi_port | Load file given in rcfile . |
51,339 | def _resolve_viewname ( self , viewname ) : if viewname == "-" : try : viewname = self . open ( ) . ui . current_view ( ) except xmlrpc . ERRORS as exc : raise error . EngineError ( "Can't get name of current view: %s" % ( exc ) ) return viewname | Check for special view names and return existing rTorrent one . |
51,340 | def open ( self ) : if self . _rpc is not None : return self . _rpc self . load_config ( ) if not config . scgi_url : raise error . UserError ( "You need to configure a XMLRPC connection, read" " https://pyrocore.readthedocs.io/en/latest/setup.html" ) self . _rpc = xmlrpc . RTorrentProxy ( config . scgi_url ) self . versions , self . version_info = self . _rpc . _set_mappings ( ) self . engine_id = self . _rpc . session . name ( ) time_usec = self . _rpc . system . time_usec ( ) if time_usec < 2 ** 32 : self . LOG . warn ( "Your xmlrpc-c is broken (64 bit integer support missing," " %r returned instead)" % ( type ( time_usec ) , ) ) self . engine_software = "rTorrent %s/%s" % self . versions if "+ssh:" in config . scgi_url : self . startup = int ( self . _rpc . startup_time ( ) or time . time ( ) ) else : self . _session_dir = self . _rpc . session . path ( ) if not self . _session_dir : raise error . UserError ( "You need a session directory, read" " https://pyrocore.readthedocs.io/en/latest/setup.html" ) if not os . path . exists ( self . _session_dir ) : raise error . UserError ( "Non-existing session directory %r" % self . _session_dir ) self . _download_dir = os . path . expanduser ( self . _rpc . directory . default ( ) ) if not os . path . exists ( self . _download_dir ) : raise error . UserError ( "Non-existing download directory %r" % self . _download_dir ) self . startup = os . path . getmtime ( os . path . join ( self . _session_dir , "rtorrent.lock" ) ) self . LOG . debug ( repr ( self ) ) return self . _rpc | Open connection . |
51,341 | def multicall ( self , viewname , fields ) : commands = tuple ( 'd.{}=' . format ( x ) for x in fields ) result_type = namedtuple ( 'DownloadItem' , [ x . replace ( '.' , '_' ) for x in fields ] ) items = self . open ( ) . d . multicall ( viewname , * commands ) return [ result_type ( * x ) for x in items ] | Query the given fields of items in the given view . |
51,342 | def item ( self , infohash , prefetch = None , cache = False ) : return next ( self . items ( infohash , prefetch , cache ) ) | Fetch a single item by its info hash . |
51,343 | def _load_rules ( self ) : for ruleset in self . active_rulesets : section_name = 'sweep_rules_' + ruleset . lower ( ) try : ruledefs = getattr ( self . config , section_name ) except AttributeError : raise error . UserError ( "There is no [{}] section in your configuration" . format ( section_name . upper ( ) ) ) for ruledef , filtercond in ruledefs . items ( ) : if ruledef . endswith ( '.filter' ) : rulename = ruledef . rsplit ( '.' , 1 ) [ 0 ] rule = SweepRule ( ruleset , rulename , int ( ruledefs . get ( rulename + '.prio' , '999' ) ) , ruledefs . get ( rulename + '.order' , self . default_order ) , parse_cond ( filtercond ) ) self . rules . append ( rule ) self . rules . sort ( key = lambda x : ( x . prio , x . name ) ) return self . rules | Load rule definitions from config . |
51,344 | def _parse_schedule ( self , schedule ) : result = { } for param in shlex . split ( str ( schedule ) ) : try : key , val = param . split ( '=' , 1 ) except ( TypeError , ValueError ) : self . fatal ( "Bad param '%s' in job schedule '%s'" % ( param , schedule ) ) else : result [ key ] = val return result | Parse a job schedule . |
51,345 | def _validate_config ( self ) : groups = dict ( job = defaultdict ( Bunch ) , httpd = defaultdict ( Bunch ) , ) for key , val in config . torque . items ( ) : if val . isdigit ( ) : config . torque [ key ] = val = int ( val ) elif val . lower ( ) in ( matching . TRUE | matching . FALSE ) : val = matching . truth ( str ( val ) , key ) stem = key . split ( '.' , 1 ) [ 0 ] if key == "httpd.active" : groups [ stem ] [ "active" ] = val elif stem in groups : try : stem , name , param = key . split ( '.' , 2 ) except ( TypeError , ValueError ) : self . fatal ( "Bad %s configuration key %r (expecting %s.NAME.PARAM)" % ( stem , key , stem ) ) else : groups [ stem ] [ name ] [ param ] = val for key , val in groups . iteritems ( ) : setattr ( self , key . replace ( "job" , "jobs" ) , Bunch ( val ) ) if self . httpd . active : if self . httpd . waitress . url_scheme not in ( "http" , "https" ) : self . fatal ( "HTTP URL scheme must be either 'http' or 'https'" ) if not isinstance ( self . httpd . waitress . port , int ) or not ( 1024 <= self . httpd . waitress . port < 65536 ) : self . fatal ( "HTTP port must be a 16 bit number >= 1024" ) for name , params in self . jobs . items ( ) : for key in ( "handler" , "schedule" ) : if key not in params : self . fatal ( "Job '%s' is missing the required 'job.%s.%s' parameter" % ( name , name , key ) ) bool_param = lambda k , default , p = params : matching . truth ( p . get ( k , default ) , "job.%s.%s" % ( name , k ) ) params . job_name = name params . dry_run = bool_param ( "dry_run" , False ) or self . options . dry_run params . active = bool_param ( "active" , True ) params . schedule = self . _parse_schedule ( params . schedule ) if params . active : try : params . handler = pymagic . import_name ( params . handler ) except ImportError as exc : self . fatal ( "Bad handler name '%s' for job '%s':\n %s" % ( params . handler , name , exc ) ) | Handle and check configuration . |
51,346 | def _add_jobs ( self ) : for name , params in self . jobs . items ( ) : if params . active : params . handler = params . handler ( params ) self . sched . add_cron_job ( params . handler . run , ** params . schedule ) | Add configured jobs . |
51,347 | def _init_wsgi_server ( self ) : self . wsgi_server = None if self . httpd . active : from waitress . server import WSGIServer from pyrocore . daemon import webapp wsgi_app = webapp . make_app ( self . httpd ) self . LOG . debug ( "Waitress config: %r" % self . httpd . waitress ) self . wsgi_server = WSGIServer ( wsgi_app , ** self . httpd . waitress ) self . LOG . info ( "Started web server at %s://%s:%d/" % ( self . httpd . waitress . url_scheme , self . wsgi_server . get_server_name ( self . wsgi_server . effective_host ) , int ( self . wsgi_server . effective_port ) , ) ) | Set up WSGI HTTP server . |
51,348 | def _run_forever ( self ) : while True : try : tick = time . time ( ) asyncore . loop ( timeout = self . POLL_TIMEOUT , use_poll = True ) tick += self . POLL_TIMEOUT - time . time ( ) if tick > 0 : time . sleep ( min ( tick , self . POLL_TIMEOUT ) ) except KeyboardInterrupt as exc : self . LOG . info ( "Termination request received (%s)" % exc ) break except SystemExit as exc : self . return_code = exc . code or 0 self . LOG . info ( "System exit (RC=%r)" % self . return_code ) break else : if self . options . guard_file and not os . path . exists ( self . options . guard_file ) : self . LOG . warn ( "Guard file '%s' disappeared, exiting!" % self . options . guard_file ) break | Run configured jobs until termination request . |
51,349 | def read_blob ( arg ) : result = None if arg == '@-' : result = sys . stdin . read ( ) elif any ( arg . startswith ( '@{}://' . format ( x ) ) for x in { 'http' , 'https' , 'ftp' , 'file' } ) : if not requests : raise error . UserError ( "You must 'pip install requests' to support @URL arguments." ) try : response = requests . get ( arg [ 1 : ] ) response . raise_for_status ( ) result = response . content except requests . RequestException as exc : raise error . UserError ( str ( exc ) ) else : with open ( os . path . expanduser ( arg [ 1 : ] ) , 'rb' ) as handle : result = handle . read ( ) return result | Read a BLOB from given |
51,350 | def open ( self ) : if not self . proxy : if not config . scgi_url : config . engine . load_config ( ) if not config . scgi_url : self . LOG . error ( "You need to configure a XMLRPC connection, read" " https://pyrocore.readthedocs.io/en/latest/setup.html" ) self . proxy = xmlrpc . RTorrentProxy ( config . scgi_url ) self . proxy . _set_mappings ( ) return self . proxy | Open connection and return proxy . |
51,351 | def execute ( self , proxy , method , args ) : try : result = getattr ( proxy , method ) ( raw_xml = self . options . xml , * tuple ( args ) ) except xmlrpc . ERRORS as exc : self . LOG . error ( "While calling %s(%s): %s" % ( method , ", " . join ( repr ( i ) for i in args ) , exc ) ) self . return_code = error . EX_NOINPUT if "not find" in getattr ( exc , "faultString" , "" ) else error . EX_DATAERR else : if not self . options . quiet : if self . options . repr : result = pformat ( result ) elif hasattr ( result , "__iter__" ) : result = '\n' . join ( i if isinstance ( i , basestring ) else pformat ( i ) for i in result ) print ( fmt . to_console ( result ) ) | Execute given XMLRPC call . |
51,352 | def do_repl ( self ) : from prompt_toolkit import prompt from prompt_toolkit . history import FileHistory from prompt_toolkit . auto_suggest import AutoSuggestFromHistory from prompt_toolkit . contrib . completers import WordCompleter self . options . quiet = False proxy = self . open ( ) ps1 = proxy . session . name ( ) + u'> ' words = [ 'help' , 'stats' , 'exit' ] words += [ x + '=' for x in proxy . system . listMethods ( ) ] history_file = os . path . join ( config . config_dir , '.rtxmlrpc_history' ) while True : try : try : cmd = prompt ( ps1 , completer = WordCompleter ( words ) , auto_suggest = AutoSuggestFromHistory ( ) , history = FileHistory ( history_file ) ) except KeyboardInterrupt : cmd = '' if not cmd : print ( "Enter '?' or 'help' for usage information, 'Ctrl-D' to exit." ) if cmd in { '?' , 'help' } : self . repl_usage ( ) continue elif cmd in { '' , 'stats' } : print ( repr ( proxy ) . split ( None , 1 ) [ 1 ] ) continue elif cmd in { 'exit' } : raise EOFError ( ) try : method , raw_args = cmd . split ( '=' , 1 ) except ValueError : print ( "ERROR: '=' not found" ) continue raw_args = raw_args . split ( ',' ) args = self . cooked ( raw_args ) self . execute ( proxy , method , args ) except EOFError : print ( 'Bye from {!r}' . format ( proxy ) ) break | REPL for rTorrent XMLRPC commands . |
51,353 | def do_import ( self ) : tmp_import = None try : if self . args [ 0 ] . startswith ( '@' ) and self . args [ 0 ] != '@-' : import_file = os . path . expanduser ( self . args [ 0 ] [ 1 : ] ) if not os . path . isfile ( import_file ) : self . parser . error ( "File not found (or not a file): {}" . format ( import_file ) ) args = ( xmlrpc . NOHASH , os . path . abspath ( import_file ) ) else : script_text = '\n' . join ( self . args + [ '' ] ) if script_text == '@-\n' : script_text = sys . stdin . read ( ) with tempfile . NamedTemporaryFile ( suffix = '.rc' , prefix = 'rtxmlrpc-' , delete = False ) as handle : handle . write ( script_text ) tmp_import = handle . name args = ( xmlrpc . NOHASH , tmp_import ) self . execute ( self . open ( ) , 'import' , args ) finally : if tmp_import and os . path . exists ( tmp_import ) : os . remove ( tmp_import ) | Handle import files or streams passed with - i . |
51,354 | def do_command ( self ) : method = self . args [ 0 ] raw_args = self . args [ 1 : ] if '=' in method : if raw_args : self . parser . error ( "Please don't mix rTorrent and shell argument styles!" ) method , raw_args = method . split ( '=' , 1 ) raw_args = raw_args . split ( ',' ) self . execute ( self . open ( ) , method , self . cooked ( raw_args ) ) | Call a single command with arguments . |
51,355 | def download_resource ( self , download_url , target , guard ) : download_url = download_url . strip ( ) if not os . path . isabs ( target ) : target = os . path . join ( config . config_dir , target ) if os . path . exists ( os . path . join ( target , guard ) ) : self . LOG . info ( "Already have '%s' in '%s'..." % ( download_url , target ) ) return if not os . path . isdir ( target ) : os . makedirs ( target ) self . LOG . info ( "Downloading '%s' to '%s'..." % ( download_url , target ) ) with closing ( urllib2 . urlopen ( download_url ) ) as url_handle : if download_url . endswith ( ".zip" ) : with closing ( ZipFile ( StringIO ( url_handle . read ( ) ) ) ) as zip_handle : zip_handle . extractall ( target ) else : with open ( os . path . join ( target , guard ) , "wb" ) as file_handle : shutil . copyfileobj ( url_handle , file_handle ) | Helper to download and install external resources . |
51,356 | def fmt_duration ( secs ) : return ' ' . join ( fmt . human_duration ( secs , 0 , precision = 2 , short = True ) . strip ( ) . split ( ) ) | Format a duration in seconds . |
51,357 | def disk_free ( path ) : stats = os . statvfs ( path ) return stats . f_bavail * stats . f_frsize | Return free bytes on partition holding path . |
51,358 | def truth ( val , context ) : try : 0 + val except TypeError : lower_val = val . lower ( ) if lower_val in TRUE : return True elif lower_val in FALSE : return False else : raise FilterError ( "Bad boolean value %r in %r (expected one of '%s', or '%s')" % ( val , context , "' '" . join ( TRUE ) , "' '" . join ( FALSE ) ) ) else : return bool ( val ) | Convert truth value in val to a boolean . |
51,359 | def _time_ym_delta ( timestamp , delta , months ) : timestamp = list ( time . localtime ( timestamp ) ) timestamp [ int ( months ) ] += delta return time . mktime ( timestamp ) | Helper to add a year or month delta to a timestamp . |
51,360 | def unquote_pre_filter ( pre_filter , _regex = re . compile ( r'[\\]+' ) ) : if pre_filter . startswith ( '"' ) and pre_filter . endswith ( '"' ) : pre_filter = pre_filter [ 1 : - 1 ] pre_filter = _regex . sub ( lambda x : x . group ( 0 ) [ : len ( x . group ( 0 ) ) // 2 ] , pre_filter ) return pre_filter | Unquote a pre - filter condition . |
51,361 | def _create_filter ( self , condition ) : comparison = re . match ( r"^(%s)(<[>=]?|>=?|!=|~)(.*)$" % self . ident_re , condition ) if comparison : name , comparison , values = comparison . groups ( ) if values and values [ 0 ] in "+-" : raise FilterError ( "Comparison operator cannot be followed by '%s' in '%s'" % ( values [ 0 ] , condition ) ) values = self . COMPARISON_OPS [ comparison ] % values else : try : name , values = condition . split ( '=' , 1 ) except ValueError : if self . default_field : name , values = self . default_field , condition else : raise FilterError ( "Field name missing in '%s' (expected '=')" % condition ) field = self . lookup ( name ) if not field : raise FilterError ( "Unknown field %r in %r" % ( name , condition ) ) if field . get ( "matcher" ) is None : raise FilterError ( "Field %r cannot be used as a filter" % ( name , ) ) filters = [ ] split_values = re . findall ( r'(!?/[^/]*/|[^,]+)(?:,|$)' , values ) if values else [ '' ] if not split_values : raise FilterError ( "Internal Error: Cannot split %r into match values" % ( values , ) ) for value in split_values : wrapper = None if value . startswith ( '!' ) : wrapper = NegateFilter value = value [ 1 : ] field_matcher = field [ "matcher" ] ( name , value ) filters . append ( wrapper ( field_matcher ) if wrapper else field_matcher ) return CompoundFilterAny ( filters ) if len ( filters ) > 1 else filters [ 0 ] | Create a filter object from a textual condition . |
51,362 | def parse ( self , conditions ) : conditions_text = conditions try : conditions = shlex . split ( fmt . to_utf8 ( conditions ) ) except AttributeError : conditions_text = self . _tree2str ( conditions ) if not conditions : raise FilterError ( "No conditions given at all!" ) negate = conditions [ : 1 ] == [ "NOT" ] if negate : conditions = conditions [ 1 : ] if not conditions : raise FilterError ( "NOT must be followed by some conditions!" ) if '[' in conditions : tree = [ [ ] ] for term in conditions : if term == '[' : tree . append ( [ ] ) elif term == ']' : subtree = tree . pop ( ) if not tree : raise FilterError ( "Unbalanced brackets, too many closing ']' in condition %r" % ( conditions_text , ) ) tree [ - 1 ] . append ( subtree ) else : tree [ - 1 ] . append ( term ) if len ( tree ) > 1 : raise FilterError ( "Unbalanced brackets, too many open '[' in condition %r" % ( conditions_text , ) ) conditions = tree [ 0 ] conditions = list ( conditions ) matcher = CompoundFilterAll ( ) if "OR" in conditions : root = CompoundFilterAny ( ) root . append ( matcher ) else : root = matcher for condition in conditions : if condition == "OR" : if not matcher : raise FilterError ( "Left-hand side of OR missing in %r!" % ( conditions_text , ) ) matcher = CompoundFilterAll ( ) root . append ( matcher ) elif isinstance ( condition , list ) : matcher . append ( self . parse ( condition ) ) else : matcher . append ( self . _create_filter ( condition ) ) if not matcher : raise FilterError ( "Right-hand side of OR missing in %r!" % ( conditions_text , ) ) return NegateFilter ( root ) if negate else root | Parse filter conditions . |
51,363 | def _flux_engine_data ( engine ) : data = stats . engine_data ( engine ) data [ "up_rate" ] = data [ "upload" ] [ 0 ] data [ "up_limit" ] = data [ "upload" ] [ 1 ] data [ "down_rate" ] = data [ "download" ] [ 0 ] data [ "down_limit" ] = data [ "download" ] [ 1 ] data [ "version" ] = data [ "versions" ] [ 0 ] views = data [ "views" ] del data [ "upload" ] del data [ "download" ] del data [ "versions" ] del data [ "views" ] return data , views | Return rTorrent data set for pushing to InfluxDB . |
51,364 | def run ( self ) : try : proxy = config_ini . engine . open ( ) self . LOG . info ( "Stats for %s - up %s, %s" % ( config_ini . engine . engine_id , fmt . human_duration ( proxy . system . time ( ) - config_ini . engine . startup , 0 , 2 , True ) . strip ( ) , proxy ) ) except ( error . LoggableError , xmlrpc . ERRORS ) , exc : self . LOG . warn ( str ( exc ) ) | Statistics logger job callback . |
51,365 | def _influxdb_url ( self ) : url = "{0}/db/{1}/series" . format ( self . influxdb . url . rstrip ( '/' ) , self . config . dbname ) if self . influxdb . user and self . influxdb . password : url += "?u={0}&p={1}" . format ( self . influxdb . user , self . influxdb . password ) return url | Return REST API URL to access time series . |
51,366 | def _push_data ( self ) : if not ( self . config . series or self . config . series_host ) : self . LOG . info ( "Misconfigured InfluxDB job, neither 'series' nor 'series_host' is set!" ) return fluxdata = [ ] if self . config . series : try : config_ini . engine . open ( ) data , views = _flux_engine_data ( config_ini . engine ) fluxdata . append ( dict ( name = self . config . series , columns = data . keys ( ) , points = [ data . values ( ) ] ) ) fluxdata . append ( dict ( name = self . config . series + '_views' , columns = views . keys ( ) , points = [ views . values ( ) ] ) ) except ( error . LoggableError , xmlrpc . ERRORS ) , exc : self . LOG . warn ( "InfluxDB stats: {0}" . format ( exc ) ) if not fluxdata : self . LOG . debug ( "InfluxDB stats: no data (previous errors?)" ) return fluxurl = self . _influxdb_url ( ) fluxjson = json . dumps ( fluxdata ) self . LOG . debug ( "POST to {0} with {1}" . format ( fluxurl . split ( '?' ) [ 0 ] , fluxjson ) ) try : requests . post ( fluxurl , data = fluxjson , timeout = self . influxdb . timeout ) except RequestException , exc : self . LOG . info ( "InfluxDB POST error: {0}" . format ( exc ) ) | Push stats data to InfluxDB . |
51,367 | def run ( self ) : from pyrocore import config try : config . engine . open ( ) items = [ ] self . run_filter ( items ) except ( error . LoggableError , xmlrpc . ERRORS ) as exc : self . LOG . warn ( str ( exc ) ) | Filter job callback . |
51,368 | def replace_fields ( meta , patterns ) : for pattern in patterns : try : field , regex , subst , _ = pattern . split ( pattern [ - 1 ] ) namespace = meta keypath = [ i . replace ( '\0' , '.' ) for i in field . replace ( '..' , '\0' ) . split ( '.' ) ] for key in keypath [ : - 1 ] : namespace = namespace [ key ] namespace [ keypath [ - 1 ] ] = re . sub ( regex , subst , namespace [ keypath [ - 1 ] ] ) except ( KeyError , IndexError , TypeError , ValueError ) as exc : raise error . UserError ( "Bad substitution '%s' (%s)!" % ( pattern , exc ) ) return meta | Replace patterns in fields . |
51,369 | def connect ( config_dir = None , optional_config_files = None , cron_cfg = "cron" ) : from pyrocore . scripts . base import ScriptBase from pyrocore . util import load_config ScriptBase . setup ( cron_cfg = cron_cfg ) load_config . ConfigLoader ( config_dir ) . load ( optional_config_files or [ ] ) from pyrocore import config config . engine . open ( ) return config . engine | Initialize everything for interactive use . |
51,370 | def setup ( cls , cron_cfg = "cron" ) : random . seed ( ) logging_cfg = cls . LOGGING_CFG if "%s" in logging_cfg : logging_cfg = logging_cfg % ( cron_cfg if "--cron" in sys . argv [ 1 : ] else "scripts" , ) logging_cfg = os . path . expanduser ( logging_cfg ) if os . path . exists ( logging_cfg ) : logging . HERE = os . path . dirname ( logging_cfg ) logging . config . fileConfig ( logging_cfg ) else : logging . basicConfig ( level = logging . INFO ) logging . getLogger ( ) . debug ( "Logging config read from '%s'" % logging_cfg ) | Set up the runtime environment . |
51,371 | def _get_pkg_meta ( self ) : logger = logging . getLogger ( 'pyrocore.scripts.base.version_info' ) pkg_info = None warnings = [ ] for info_ext , info_name in ( ( '.dist-info' , 'METADATA' ) , ( '.egg-info' , 'PKG-INFO' ) ) : try : pkg_path = os . path . join ( __file__ . split ( __name__ . replace ( '.' , os . sep ) ) [ 0 ] , __name__ . split ( "." ) [ 0 ] ) if os . path . exists ( pkg_path + info_ext ) : pkg_path += info_ext else : globbed_paths = glob . glob ( pkg_path + "-*-py%d.%d" % sys . version_info [ : 2 ] + info_ext ) if len ( globbed_paths ) == 1 : pkg_path = globbed_paths [ 0 ] elif globbed_paths : warnings . append ( "Found {} release-specific candidate versions in *{}" . format ( len ( globbed_paths ) , info_ext ) ) pkg_path = None else : globbed_paths = glob . glob ( pkg_path + "-*" + info_ext ) if len ( globbed_paths ) == 1 : pkg_path = globbed_paths [ 0 ] else : warnings . append ( "Found {} candidate versions in *{}" . format ( len ( globbed_paths ) , info_ext ) ) pkg_path = None if pkg_path : with open ( os . path . join ( pkg_path , info_name ) ) as handle : pkg_info = handle . read ( ) break except IOError : continue if not pkg_info : logger . warn ( "Software version cannot be determined! ({})" . format ( ', ' . join ( warnings ) ) ) return pkg_info or "Version: 0.0.0\n" | Try to find package metadata . |
51,372 | def add_bool_option ( self , * args , ** kwargs ) : dest = [ o for o in args if o . startswith ( "--" ) ] [ 0 ] . replace ( "--" , "" ) . replace ( "-" , "_" ) self . parser . add_option ( dest = dest , action = "store_true" , default = False , help = kwargs [ 'help' ] , * args ) | Add a boolean option . |
51,373 | def add_value_option ( self , * args , ** kwargs ) : kwargs [ 'metavar' ] = args [ - 1 ] if 'dest' not in kwargs : kwargs [ 'dest' ] = [ o for o in args if o . startswith ( "--" ) ] [ 0 ] . replace ( "--" , "" ) . replace ( "-" , "_" ) if 'default' in kwargs and kwargs [ 'default' ] : kwargs [ 'help' ] += " [%s]" % kwargs [ 'default' ] self . parser . add_option ( * args [ : - 1 ] , ** kwargs ) | Add a value option . |
51,374 | def handle_completion ( self ) : if len ( sys . argv ) > 1 and sys . argv [ 1 ] . startswith ( "--help-completion-" ) : handler = getattr ( self , sys . argv [ 1 ] [ 2 : ] . replace ( '-' , '_' ) , None ) if handler : print '\n' . join ( sorted ( handler ( ) ) ) self . STD_LOG_LEVEL = logging . DEBUG sys . exit ( error . EX_OK ) | Handle shell completion stuff . |
51,375 | def help_completion_options ( self ) : for opt in self . parser . option_list : for lopt in opt . _long_opts : yield lopt | Return options of this command . |
51,376 | def fatal ( self , msg , exc = None ) : if exc is not None : self . LOG . fatal ( "%s (%s)" % ( msg , exc ) ) if self . options . debug : return else : self . LOG . fatal ( msg ) sys . exit ( error . EX_SOFTWARE ) | Exit on a fatal error . |
51,377 | def run ( self ) : log_total = True try : try : self . get_options ( ) self . mainloop ( ) except error . LoggableError , exc : if self . options . debug : raise try : msg = str ( exc ) except UnicodeError : msg = unicode ( exc , "UTF-8" ) self . LOG . error ( msg ) sys . exit ( error . EX_SOFTWARE ) except KeyboardInterrupt , exc : if self . options . debug : raise sys . stderr . write ( "\n\nAborted by CTRL-C!\n" ) sys . stderr . flush ( ) sys . exit ( error . EX_TEMPFAIL ) except IOError , exc : if exc . errno == errno . EPIPE : sys . stderr . write ( "\n%s, exiting!\n" % exc ) sys . stderr . flush ( ) try : handlers = logging . _handlerList except AttributeError : pass else : for handler in handlers : try : handler . flush = lambda * _ : None except AttributeError : pass log_total = False sys . exit ( error . EX_IOERR ) else : raise finally : if log_total and self . options : running_time = time . time ( ) - self . startup self . LOG . log ( self . STD_LOG_LEVEL , "Total time: %.3f seconds." % running_time ) logging . shutdown ( ) if self . return_code : sys . exit ( self . return_code ) | The main program skeleton . |
51,378 | def add_options ( self ) : super ( ScriptBaseWithConfig , self ) . add_options ( ) self . add_value_option ( "--config-dir" , "DIR" , help = "configuration directory [{}]" . format ( os . environ . get ( 'PYRO_CONFIG_DIR' , self . CONFIG_DIR_DEFAULT ) ) ) self . add_value_option ( "--config-file" , "PATH" , action = "append" , default = [ ] , help = "additional config file(s) to read" ) self . add_value_option ( "-D" , "--define" , "KEY=VAL [-D ...]" , default = [ ] , action = "append" , dest = "defines" , help = "override configuration attributes" ) | Add configuration options . |
51,379 | def check_for_connection ( self ) : for idx , arg in enumerate ( self . args ) : if arg . startswith ( '@' ) : if arg [ 1 : ] not in config . connections : self . parser . error ( "Undefined connection '{}'!" . format ( arg [ 1 : ] ) ) config . scgi_url = config . connections [ arg [ 1 : ] ] self . LOG . debug ( "Switched to connection %s (%s)" , arg [ 1 : ] , config . scgi_url ) del self . args [ idx ] break | Scan arguments for a |
51,380 | def quit ( self ) : self . script . LOG . warn ( "Abort due to user choice!" ) sys . exit ( self . QUIT_RC ) | Exit the program due to user s choices . |
51,381 | def redirect ( req , _log = pymagic . get_lazy_logger ( "redirect" ) ) : log = req . environ . get ( "wsgilog.logger" , _log ) target = req . relative_url ( req . urlvars . to ) log . info ( "Redirecting '%s' to '%s'" % ( req . url , target ) ) return exc . HTTPMovedPermanently ( location = target ) | Redirect controller to emit a HTTP 301 . |
51,382 | def make_app ( httpd_config ) : htdocs_paths = [ os . path . realpath ( os . path . join ( config . config_dir , "htdocs" ) ) , os . path . join ( os . path . dirname ( config . __file__ ) , "data" , "htdocs" ) , ] return ( Router ( ) . add_route ( "/" , controller = redirect , to = "/static/index.html" ) . add_route ( "/favicon.ico" , controller = redirect , to = "/static/favicon.ico" ) . add_route ( "/static/{filepath:.+}" , controller = StaticFolders ( htdocs_paths ) ) . add_route ( "/json/{action}" , controller = JsonController ( ** httpd_config . json ) ) ) | Factory for the monitoring webapp . |
51,383 | def guarded ( self , func , * args , ** kwargs ) : try : return func ( * args , ** kwargs ) except ( EnvironmentError , error . LoggableError , xmlrpc . ERRORS ) as g_exc : if func . __name__ not in self . ERRORS_LOGGED : self . LOG . warn ( "While calling '%s': %s" % ( func . __name__ , g_exc ) ) self . ERRORS_LOGGED . add ( func . __name__ ) return None | Call a function return None on errors . |
51,384 | def json_engine ( self , req ) : try : return stats . engine_data ( config . engine ) except ( error . LoggableError , xmlrpc . ERRORS ) as torrent_exc : raise exc . HTTPInternalServerError ( str ( torrent_exc ) ) | Return torrent engine data . |
51,385 | def json_charts ( self , req ) : disk_used , disk_total , disk_detail = 0 , 0 , [ ] for disk_usage_path in self . cfg . disk_usage_path . split ( os . pathsep ) : disk_usage = self . guarded ( psutil . disk_usage , os . path . expanduser ( disk_usage_path . strip ( ) ) ) if disk_usage : disk_used += disk_usage . used disk_total += disk_usage . total disk_detail . append ( ( disk_usage . used , disk_usage . total ) ) data = dict ( engine = self . json_engine ( req ) , uptime = time . time ( ) - psutil . BOOT_TIME , fqdn = self . guarded ( socket . getfqdn ) , cpu_usage = self . guarded ( psutil . cpu_percent , 0 ) , ram_usage = self . guarded ( psutil . virtual_memory ) , swap_usage = self . guarded ( psutil . swap_memory ) , disk_usage = ( disk_used , disk_total , disk_detail ) if disk_total else None , disk_io = self . guarded ( psutil . disk_io_counters ) , net_io = self . guarded ( psutil . net_io_counters ) , ) return data | Return charting data . |
51,386 | def parse_route ( cls , template ) : regex = '' last_pos = 0 for match in cls . ROUTES_RE . finditer ( template ) : regex += re . escape ( template [ last_pos : match . start ( ) ] ) var_name = match . group ( 1 ) expr = match . group ( 2 ) or '[^/]+' expr = '(?P<%s>%s)' % ( var_name , expr ) regex += expr last_pos = match . end ( ) regex += re . escape ( template [ last_pos : ] ) regex = '^%s$' % regex return re . compile ( regex ) | Parse a route definition and return the compiled regex that matches it . |
51,387 | def add_route ( self , template , controller , ** kwargs ) : if isinstance ( controller , basestring ) : controller = pymagic . import_name ( controller ) self . routes . append ( ( self . parse_route ( template ) , controller , kwargs ) ) return self | Add a route definition |
51,388 | def _duration ( start , end ) : if start and end : if start > end : return None else : return end - start elif start : return time . time ( ) - start else : return None | Return time delta . |
51,389 | def _fmt_files ( filelist ) : depth = max ( i . path . count ( '/' ) for i in filelist ) pad = [ '\uFFFE' ] * depth base_indent = ' ' * 38 indent = 0 result = [ ] prev_path = pad sorted_files = sorted ( ( i . path . split ( '/' ) [ : - 1 ] + pad , i . path . rsplit ( '/' , 1 ) [ - 1 ] , i ) for i in filelist ) for path , name , fileinfo in sorted_files : path = path [ : depth ] if path != prev_path : common = min ( [ depth ] + [ idx for idx , ( dirname , prev_name ) in enumerate ( zip ( path , prev_path ) ) if dirname != prev_name ] ) while indent > common : indent -= 1 result . append ( "%s%s/" % ( base_indent , ' ' * indent ) ) for dirname in path [ common : ] : if dirname == '\uFFFE' : break result . append ( "%s%s\\ %s" % ( base_indent , ' ' * indent , dirname ) ) indent += 1 result . append ( " %s %s %s %s| %s" % ( { 0 : "off " , 1 : " " , 2 : "high" } . get ( fileinfo . prio , "????" ) , fmt . iso_datetime ( fileinfo . mtime ) , fmt . human_size ( fileinfo . size ) , ' ' * indent , name , ) ) prev_path = path while indent > 0 : indent -= 1 result . append ( "%s%s/" % ( base_indent , ' ' * indent ) ) result . append ( "%s= %d file(s)" % ( base_indent , len ( filelist ) ) ) return '\n' . join ( result ) | Produce a file listing . |
51,390 | def detect_traits ( item ) : return traits . detect_traits ( name = item . name , alias = item . alias , filetype = ( list ( item . fetch ( "kind_51" ) ) or [ None ] ) . pop ( ) , ) | Build traits list from attributes of the passed item . Currently kind_51 name and alias are considered . |
51,391 | def add_manifold_attribute ( cls , name ) : if name . startswith ( "custom_" ) : try : return FieldDefinition . FIELDS [ name ] except KeyError : field = OnDemandField ( fmt . to_unicode , name , "custom attribute %r" % name . split ( '_' , 1 ) [ 1 ] , matcher = matching . PatternFilter ) setattr ( cls , name , field ) return field elif name . startswith ( "kind_" ) and name [ 5 : ] . isdigit ( ) : try : return FieldDefinition . FIELDS [ name ] except KeyError : limit = int ( name [ 5 : ] . lstrip ( '0' ) or '0' , 10 ) if limit > 100 : raise error . UserError ( "kind_N: N > 100 in %r" % name ) field = OnDemandField ( set , name , "kinds of files that make up more than %d%% of this item's size" % limit , matcher = matching . TaggedAsFilter , formatter = _fmt_tags , engine_name = "kind_%d" % limit ) setattr ( cls , name , field ) return field | Register a manifold engine attribute . |
51,392 | def add_custom_fields ( cls , * args , ** kw ) : for factory in config . custom_field_factories : for field in factory ( ) : setattr ( cls , field . name , field ) | Add any custom fields defined in the configuration . |
51,393 | def _fetch_items ( self ) : if self . _items is None : self . _items = list ( self . engine . items ( self ) ) return self . _items | Fetch to attribute . |
51,394 | def _check_hash_view ( self ) : infohash = None if self . viewname . startswith ( '#' ) : infohash = self . viewname [ 1 : ] elif len ( self . viewname ) == 40 : try : int ( self . viewname , 16 ) except ( TypeError , ValueError ) : pass else : infohash = self . viewname return infohash | Return infohash if view name refers to a single item else None . |
51,395 | def size ( self ) : if self . _check_hash_view ( ) : return 1 else : return self . engine . open ( ) . view . size ( xmlrpc . NOHASH , self . viewname ) | Total unfiltered size of view . |
51,396 | def group_by ( self , fields , items = None ) : result = defaultdict ( list ) if items is None : items = self . items ( ) try : key = operator . attrgetter ( fields + '' ) except TypeError : def key ( obj , names = tuple ( fields ) ) : 'Helper to return group key tuple' return tuple ( getattr ( obj , x ) for x in names ) for item in items : result [ key ( item ) ] . append ( item ) return result | Returns a dict of lists of items grouped by the given fields . |
51,397 | def _set_mappings ( self ) : try : self . _versions = ( self . system . client_version ( ) , self . system . library_version ( ) , ) self . _version_info = tuple ( int ( i ) for i in self . _versions [ 0 ] . split ( '.' ) ) self . _use_deprecated = self . _version_info < ( 0 , 8 , 7 ) self . _mapping = self . _mapping . copy ( ) for key , val in sorted ( i for i in vars ( config ) . items ( ) if i [ 0 ] . startswith ( "xmlrpc_" ) ) : map_version = tuple ( int ( i ) for i in key . split ( '_' ) [ 1 : ] ) if map_version <= self . _version_info : if config . debug : self . LOG . debug ( "MAPPING for %r added: %r" % ( map_version , val ) ) self . _mapping . update ( val ) self . _fix_mappings ( ) except ERRORS as exc : raise error . LoggableError ( "Can't connect to %s (%s)" % ( self . _url , exc ) ) return self . _versions , self . _version_info | Set command mappings according to rTorrent version . |
51,398 | def _fix_mappings ( self ) : self . _mapping . update ( ( key + '=' , val + '=' ) for key , val in self . _mapping . items ( ) if not key . endswith ( '=' ) ) if config . debug : self . LOG . debug ( "CMD MAPPINGS ARE: %r" % ( self . _mapping , ) ) | Add computed stuff to mappings . |
51,399 | def _map_call ( self , cmd ) : if config . debug and cmd != self . _mapping . get ( cmd , cmd ) : self . LOG . debug ( "MAP %s ==> %s" % ( cmd , self . _mapping [ cmd ] ) ) cmd = self . _mapping . get ( cmd , cmd ) if not self . _use_deprecated and any ( cmd . startswith ( i ) for i in ( "d.get_" , "f.get_" , "p.get_" , "t.get_" ) ) : cmd = cmd [ : 2 ] + cmd [ 6 : ] return cmd | Map old to new command names . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.