idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
2,100
def defaults ( values = { } ) : if values : return values save_types = basestring , int , float , tuple , list , dict , type ( None ) for k , v in globals ( ) . items ( ) : if isinstance ( v , save_types ) and not k . startswith ( "_" ) : values [ k ] = v return values
Returns a once - assembled dict of this module s storable attributes .
2,101
def fix_pdf ( pdf_file , destination ) : tmp = tempfile . NamedTemporaryFile ( ) with open ( tmp . name , 'wb' ) as output : with open ( pdf_file , "rb" ) as fh : for line in fh : output . write ( line ) if b'%%EOF' in line : break shutil . copy ( tmp . name , destination )
Fix malformed pdf files when data are present after %%EOF
2,102
def tearpage_backend ( filename , teared_pages = None ) : if teared_pages is None : teared_pages = [ 0 ] with tempfile . NamedTemporaryFile ( ) as tmp : shutil . copy ( filename , tmp . name ) try : input_file = PdfFileReader ( open ( tmp . name , 'rb' ) ) except PdfReadError : fix_pdf ( filename , tmp . name ) input_file = PdfFileReader ( open ( tmp . name , 'rb' ) ) num_pages = input_file . getNumPages ( ) output_file = PdfFileWriter ( ) for i in range ( num_pages ) : if i in teared_pages : continue output_file . addPage ( input_file . getPage ( i ) ) tmp . close ( ) outputStream = open ( filename , "wb" ) output_file . write ( outputStream )
Copy filename to a tempfile write pages to filename except the teared one .
2,103
def tearpage_needed ( bibtex ) : for publisher in BAD_JOURNALS : if publisher in bibtex . get ( "journal" , "" ) . lower ( ) : return BAD_JOURNALS [ publisher ] return [ ]
Check whether a given paper needs some pages to be teared or not .
2,104
def tearpage ( filename , bibtex = None , force = None ) : pages_to_tear = [ ] if force is not None : pages_to_tear = force elif bibtex is not None : pages_to_tear = tearpage_needed ( bibtex ) if len ( pages_to_tear ) > 0 : tearpage_backend ( filename , teared_pages = pages_to_tear ) return True return False
Tear some pages of the file if needed .
2,105
def edit ( filename , connection = None ) : c = connection or connect ( ) rev = c . ls ( filename ) if rev : rev [ 0 ] . edit ( )
Checks out a file into the default changelist
2,106
def sync ( filename , connection = None ) : c = connection or connect ( ) rev = c . ls ( filename ) if rev : rev [ 0 ] . sync ( )
Syncs a file
2,107
def open ( filename , connection = None ) : c = connection or connect ( ) res = c . ls ( filename ) if res and res [ 0 ] . revision : res [ 0 ] . edit ( ) else : c . add ( filename )
Edits or Adds a filename ensuring the file is in perforce and editable
2,108
def is_valid ( arxiv_id ) : match = REGEX . match ( arxiv_id ) return ( match is not None ) and ( match . group ( 0 ) == arxiv_id )
Check that a given arXiv ID is a valid one .
2,109
def get_bibtex ( arxiv_id ) : try : bibtex = arxiv2bib . arxiv2bib ( [ arxiv_id ] ) except HTTPError : bibtex = [ ] for bib in bibtex : if isinstance ( bib , arxiv2bib . ReferenceErrorInfo ) : continue else : return bib . bibtex ( ) return None
Get a BibTeX entry for a given arXiv ID .
2,110
def extract_from_text ( text ) : return tools . remove_duplicates ( [ re . sub ( "arxiv:" , "" , i [ 0 ] , flags = re . IGNORECASE ) for i in REGEX . findall ( text ) if i [ 0 ] != '' ] )
Extract arXiv IDs from a text .
2,111
def from_doi ( doi ) : try : request = requests . get ( "http://export.arxiv.org/api/query" , params = { "search_query" : "doi:%s" % ( doi , ) , "max_results" : 1 } ) request . raise_for_status ( ) except RequestException : return None root = xml . etree . ElementTree . fromstring ( request . content ) for entry in root . iter ( "{http://www.w3.org/2005/Atom}entry" ) : arxiv_id = entry . find ( "{http://www.w3.org/2005/Atom}id" ) . text return arxiv_id . split ( "/" ) [ - 1 ] return None
Get the arXiv eprint id for a given DOI .
2,112
def get_sources ( arxiv_id ) : try : request = requests . get ( ARXIV_EPRINT_URL . format ( arxiv_id = arxiv_id ) ) request . raise_for_status ( ) file_object = io . BytesIO ( request . content ) return tarfile . open ( fileobj = file_object ) except ( RequestException , AssertionError , tarfile . TarError ) : return None
Download sources on arXiv for a given preprint .
2,113
def extractDates ( inp , tz = None , now = None ) : service = DateService ( tz = tz , now = now ) return service . extractDates ( inp )
Extract semantic date information from an input string . This is a convenience method which would only be used if you d rather not initialize a DateService object .
2,114
def extractTimes ( self , inp ) : def handleMatch ( time ) : relative = False if not time : return None elif time . group ( 1 ) == 'morning' : h = 8 m = 0 elif time . group ( 1 ) == 'afternoon' : h = 12 m = 0 elif time . group ( 1 ) == 'evening' : h = 19 m = 0 elif time . group ( 4 ) and time . group ( 5 ) : h , m = 0 , 0 converter = NumberService ( ) try : diff = converter . parse ( time . group ( 4 ) ) except : return None if time . group ( 5 ) == 'hours' : h += diff else : m += diff if time . group ( 6 ) : converter = NumberService ( ) try : diff = converter . parse ( time . group ( 7 ) ) except : return None if time . group ( 8 ) == 'hours' : h += diff else : m += diff relative = True else : t = time . group ( 2 ) h , m = int ( t . split ( ':' ) [ 0 ] ) % 12 , int ( t . split ( ':' ) [ 1 ] ) try : if time . group ( 3 ) == 'pm' : h += 12 except IndexError : pass if relative : return self . now + datetime . timedelta ( hours = h , minutes = m ) else : return datetime . datetime ( self . now . year , self . now . month , self . now . day , h , m ) inp = self . _preprocess ( inp ) return [ handleMatch ( time ) for time in self . _timeRegex . finditer ( inp ) ]
Extracts time - related information from an input string . Ignores any information related to the specific date focusing on the time - of - day .
2,115
def extractDates ( self , inp ) : def merge ( param ) : day , time = param if not ( day or time ) : return None if not day : return time if not time : return day return datetime . datetime ( day . year , day . month , day . day , time . hour , time . minute ) days = self . extractDays ( inp ) times = self . extractTimes ( inp ) return map ( merge , zip_longest ( days , times , fillvalue = None ) )
Extract semantic date information from an input string . In effect runs both parseDay and parseTime on the input string and merges the results to produce a comprehensive datetime object .
2,116
def extractDate ( self , inp ) : dates = self . extractDates ( inp ) for date in dates : return date return None
Returns the first date found in the input string or None if not found .
2,117
def convertDay ( self , day , prefix = "" , weekday = False ) : def sameDay ( d1 , d2 ) : d = d1 . day == d2 . day m = d1 . month == d2 . month y = d1 . year == d2 . year return d and m and y tom = self . now + datetime . timedelta ( days = 1 ) if sameDay ( day , self . now ) : return "today" elif sameDay ( day , tom ) : return "tomorrow" if weekday : dayString = day . strftime ( "%A, %B %d" ) else : dayString = day . strftime ( "%B %d" ) if not int ( dayString [ - 2 ] ) : dayString = dayString [ : - 2 ] + dayString [ - 1 ] return prefix + " " + dayString
Convert a datetime object representing a day into a human - ready string that can be read spoken aloud etc .
2,118
def convertTime ( self , time ) : m_format = "" if time . minute : m_format = ":%M" timeString = time . strftime ( "%I" + m_format + " %p" ) if not int ( timeString [ 0 ] ) : timeString = timeString [ 1 : ] return timeString
Convert a datetime object representing a time into a human - ready string that can be read spoken aloud etc .
2,119
def convertDate ( self , date , prefix = "" , weekday = False ) : dayString = self . convertDay ( date , prefix = prefix , weekday = weekday ) timeString = self . convertTime ( date ) return dayString + " at " + timeString
Convert a datetime object representing into a human - ready string that can be read spoken aloud etc . In effect runs both convertDay and convertTime on the input merging the results .
2,120
def _move ( self ) : newpath = self . action [ 'newpath' ] try : self . fs . move ( self . fp , newpath ) except OSError : raise tornado . web . HTTPError ( 400 ) return newpath
Called during a PUT request where the action specifies a move operation . Returns resource URI of the destination file .
2,121
def _copy ( self ) : copypath = self . action [ 'copypath' ] try : self . fs . copy ( self . fp , copypath ) except OSError : raise tornado . web . HTTPError ( 400 ) return copypath
Called during a PUT request where the action specifies a copy operation . Returns resource URI of the new file .
2,122
def _rename ( self ) : newname = self . action [ 'newname' ] try : newpath = self . fs . rename ( self . fp , newname ) except OSError : raise tornado . web . HTTPError ( 400 ) return newpath
Called during a PUT request where the action specifies a rename operation . Returns resource URI of the renamed file .
2,123
def get ( self ) : res = self . fs . get_filesystem_details ( ) res = res . to_dict ( ) self . write ( res )
Return details for the filesystem including configured volumes .
2,124
def put ( self ) : self . fp = self . get_body_argument ( 'filepath' ) self . action = self . get_body_argument ( 'action' ) try : ptype = self . fs . get_type_from_path ( self . fp ) except OSError : raise tornado . web . HTTPError ( 404 ) if ptype == 'directory' : self . handler_name = 'filesystem:directories-details' else : self . handler_name = 'filesystem:files-details' if self . action [ 'action' ] == 'move' : newpath = self . _move ( ) self . write ( { 'filepath' : newpath } ) elif self . action [ 'action' ] == 'copy' : newpath = self . _copy ( ) self . write ( { 'filepath' : newpath } ) elif self . action [ 'action' ] == 'rename' : newpath = self . _rename ( ) self . write ( { 'filepath' : newpath } ) else : raise tornado . web . HTTPError ( 400 )
Provides move copy and rename functionality . An action must be specified when calling this method .
2,125
def post ( self , * args ) : filepath = self . get_body_argument ( 'filepath' ) if not self . fs . exists ( filepath ) : raise tornado . web . HTTPError ( 404 ) Filewatcher . add_directory_to_watch ( filepath ) self . write ( { 'msg' : 'Watcher added for {}' . format ( filepath ) } )
Start a new filewatcher at the specified path .
2,126
def delete ( self , filepath ) : Filewatcher . remove_directory_to_watch ( filepath ) self . write ( { 'msg' : 'Watcher deleted for {}' . format ( filepath ) } )
Stop and delete the specified filewatcher .
2,127
def get ( self , filepath ) : try : res = self . fs . get_file_details ( filepath ) res = res . to_dict ( ) self . write ( res ) except OSError : raise tornado . web . HTTPError ( 404 )
Get file details for the specified file .
2,128
def put ( self , filepath ) : action = self . get_body_argument ( 'action' ) if action [ 'action' ] == 'update_group' : newgrp = action [ 'group' ] try : self . fs . update_group ( filepath , newgrp ) self . write ( { 'msg' : 'Updated group for {}' . format ( filepath ) } ) except OSError : raise tornado . web . HTTPError ( 404 ) elif action [ 'action' ] == 'update_permissions' : newperms = action [ 'permissions' ] try : self . fs . update_permissions ( filepath , newperms ) self . write ( { 'msg' : 'Updated permissions for {}' . format ( filepath ) } ) except OSError : raise tornado . web . HTTPError ( 404 ) else : raise tornado . web . HTTPError ( 400 )
Change the group or permissions of the specified file . Action must be specified when calling this method .
2,129
def delete ( self , filepath ) : try : self . fs . delete ( filepath ) self . write ( { 'msg' : 'File deleted at {}' . format ( filepath ) } ) except OSError : raise tornado . web . HTTPError ( 404 )
Delete the specified file .
2,130
def post ( self ) : filepath = self . get_body_argument ( 'filepath' ) try : self . fs . create_directory ( filepath ) encoded_filepath = tornado . escape . url_escape ( filepath , plus = True ) resource_uri = self . reverse_url ( 'filesystem:directories-details' , encoded_filepath ) self . write ( { 'uri' : resource_uri } ) except OSError : raise tornado . web . HTTPError ( 404 )
Create a new directory at the specified path .
2,131
def get ( self , filepath ) : exists = self . fs . exists ( filepath ) if exists : mime = magic . Magic ( mime = True ) mime_type = mime . from_file ( filepath ) if mime_type in self . unsupported_types : self . set_status ( 204 ) return else : contents = self . fs . read_file ( filepath ) self . write ( { 'filepath' : filepath , 'contents' : contents } ) else : raise tornado . web . HTTPError ( 404 )
Get the contents of the specified file .
2,132
def post ( self , filepath ) : try : content = self . get_body_argument ( 'content' ) self . fs . write_file ( filepath , content ) self . write ( { 'msg' : 'Updated file at {}' . format ( filepath ) } ) except OSError : raise tornado . web . HTTPError ( 404 )
Write the given contents to the specified file . This is not an append all file contents will be replaced by the contents given .
2,133
def get_content ( self , start = None , end = None ) : with open ( self . filepath , "rb" ) as file : if start is not None : file . seek ( start ) if end is not None : remaining = end - ( start or 0 ) else : remaining = None while True : chunk_size = 64 * 1024 if remaining is not None and remaining < chunk_size : chunk_size = remaining chunk = file . read ( chunk_size ) if chunk : if remaining is not None : remaining -= len ( chunk ) yield chunk else : if remaining is not None : assert remaining == 0 return
Retrieve the content of the requested resource which is located at the given absolute path . This method should either return a byte string or an iterator of byte strings . The latter is preferred for large files as it helps reduce memory fragmentation .
2,134
def set_headers ( self ) : self . set_header ( "Accept-Ranges" , "bytes" ) content_type = self . get_content_type ( ) if content_type : self . set_header ( "Content-Type" , content_type )
Sets the content headers on the response .
2,135
def __deactivate_shared_objects ( self , plugin , * args , ** kwargs ) : shared_objects = self . get ( ) for shared_object in shared_objects . keys ( ) : self . unregister ( shared_object )
Callback which gets executed if the signal plugin_deactivate_post was send by the plugin .
2,136
def get ( self , name = None ) : return self . app . shared_objects . get ( name , self . plugin )
Returns requested shared objects which were registered by the current plugin .
2,137
def get ( self , name = None , plugin = None ) : if plugin is not None : if name is None : shared_objects_list = { } for key in self . _shared_objects . keys ( ) : if self . _shared_objects [ key ] . plugin == plugin : shared_objects_list [ key ] = self . _shared_objects [ key ] return shared_objects_list else : if name in self . _shared_objects . keys ( ) : if self . _shared_objects [ name ] . plugin == plugin : return self . _shared_objects [ name ] else : return None else : return None else : if name is None : return self . _shared_objects else : if name in self . _shared_objects . keys ( ) : return self . _shared_objects [ name ] else : return None
Returns requested shared objects .
2,138
def unregister ( self , shared_object ) : if shared_object not in self . _shared_objects . keys ( ) : self . log . warning ( "Can not unregister shared object %s" % shared_object ) else : del ( self . _shared_objects [ shared_object ] ) self . log . debug ( "Shared object %s got unregistered" % shared_object )
Unregisters an existing shared object so that this shared object is no longer available .
2,139
def list_signals ( self ) : print ( "Signal list" ) print ( "***********\n" ) for key , signal in self . app . signals . signals . items ( ) : print ( "%s (%s)\n %s\n" % ( signal . name , signal . plugin . name , signal . description ) )
Prints a list of all registered signals . Including description and plugin name .
2,140
def list_receivers ( self ) : print ( "Receiver list" ) print ( "*************\n" ) for key , receiver in self . app . signals . receivers . items ( ) : print ( "%s <-- %s (%s):\n %s\n" % ( receiver . name , receiver . signal , receiver . plugin . name , receiver . description ) )
Prints a list of all registered receivers . Including signal plugin name and description .
2,141
def toxcmd_main ( args = None ) : usage = "USAGE: %(prog)s [OPTIONS] COMMAND args..." if args is None : args = sys . argv [ 1 : ] parser = argparse . ArgumentParser ( description = inspect . getdoc ( toxcmd_main ) , formatter_class = FORMATTER_CLASS ) common_parser = parser . add_argument_group ( "Common options" ) common_parser . add_argument ( "--version" , action = "version" , version = VERSION ) subparsers = parser . add_subparsers ( help = "commands" ) for command in discover_commands ( ) : command_parser = subparsers . add_parser ( command . name , usage = command . usage , description = command . description , help = command . short_description , formatter_class = FORMATTER_CLASS ) command_parser . set_defaults ( func = command ) command . setup_parser ( command_parser ) command . parser = command_parser options = parser . parse_args ( args ) command_function = options . func return command_function ( options )
Command util with subcommands for tox environments .
2,142
def discharge ( ctx , id , caveat , key , checker , locator ) : caveat_id_prefix = [ ] if caveat is None : caveat = id else : caveat_id_prefix = id cav_info = decode_caveat ( key , caveat ) cav_info = ThirdPartyCaveatInfo ( condition = cav_info . condition , first_party_public_key = cav_info . first_party_public_key , third_party_key_pair = cav_info . third_party_key_pair , root_key = cav_info . root_key , caveat = cav_info . caveat , version = cav_info . version , id = id , namespace = cav_info . namespace ) try : cond , arg = checkers . parse_caveat ( cav_info . condition ) except ValueError as exc : raise VerificationError ( exc . args [ 0 ] ) if cond == checkers . COND_NEED_DECLARED : cav_info = cav_info . _replace ( condition = arg ) caveats = _check_need_declared ( ctx , cav_info , checker ) else : caveats = checker . check_third_party_caveat ( ctx , cav_info ) m = Macaroon ( cav_info . root_key , id , '' , cav_info . version , cav_info . namespace , ) m . _caveat_id_prefix = caveat_id_prefix if caveats is not None : for cav in caveats : m . add_caveat ( cav , key , locator ) return m
Creates a macaroon to discharge a third party caveat .
2,143
def local_third_party_caveat ( key , version ) : if version >= VERSION_2 : loc = 'local {} {}' . format ( version , key ) else : loc = 'local {}' . format ( key ) return checkers . Caveat ( location = loc , condition = '' )
Returns a third - party caveat that when added to a macaroon with add_caveat results in a caveat with the location local encrypted with the given PublicKey . This can be automatically discharged by discharge_all passing a local key .
2,144
def deserialize_namespace ( data ) : if isinstance ( data , bytes ) : data = data . decode ( 'utf-8' ) kvs = data . split ( ) uri_to_prefix = { } for kv in kvs : i = kv . rfind ( ':' ) if i == - 1 : raise ValueError ( 'no colon in namespace ' 'field {}' . format ( repr ( kv ) ) ) uri , prefix = kv [ 0 : i ] , kv [ i + 1 : ] if not is_valid_schema_uri ( uri ) : raise ValueError ( 'invalid URI {} in namespace ' 'field {}' . format ( repr ( uri ) , repr ( kv ) ) ) if not is_valid_prefix ( prefix ) : raise ValueError ( 'invalid prefix {} in namespace field' ' {}' . format ( repr ( prefix ) , repr ( kv ) ) ) if uri in uri_to_prefix : raise ValueError ( 'duplicate URI {} in ' 'namespace {}' . format ( repr ( uri ) , repr ( data ) ) ) uri_to_prefix [ uri ] = prefix return Namespace ( uri_to_prefix )
Deserialize a Namespace object .
2,145
def serialize_text ( self ) : if self . _uri_to_prefix is None or len ( self . _uri_to_prefix ) == 0 : return b'' od = collections . OrderedDict ( sorted ( self . _uri_to_prefix . items ( ) ) ) data = [ ] for uri in od : data . append ( uri + ':' + od [ uri ] ) return ' ' . join ( data ) . encode ( 'utf-8' )
Returns a serialized form of the Namepace .
2,146
def register ( self , uri , prefix ) : if not is_valid_schema_uri ( uri ) : raise KeyError ( 'cannot register invalid URI {} (prefix {})' . format ( uri , prefix ) ) if not is_valid_prefix ( prefix ) : raise ValueError ( 'cannot register invalid prefix %q for URI %q' . format ( prefix , uri ) ) if self . _uri_to_prefix . get ( uri ) is None : self . _uri_to_prefix [ uri ] = prefix
Registers the given URI and associates it with the given prefix .
2,147
def with_value ( self , key , val ) : new_dict = dict ( self . _dict ) new_dict [ key ] = val return AuthContext ( new_dict )
Return a copy of the AuthContext object with the given key and value added .
2,148
def make_pattern ( self , pattern , listsep = ',' ) : if self is Cardinality . one : return pattern elif self is Cardinality . zero_or_one : return self . schema % pattern else : return self . schema % ( pattern , listsep , pattern )
Make pattern for a data type with the specified cardinality .
2,149
def with_cardinality ( cls , cardinality , converter , pattern = None , listsep = ',' ) : if cardinality is Cardinality . one : return converter builder_func = getattr ( cls , "with_%s" % cardinality . name ) if cardinality is Cardinality . zero_or_one : return builder_func ( converter , pattern ) else : return builder_func ( converter , pattern , listsep = listsep )
Creates a type converter for the specified cardinality by using the type converter for T .
2,150
def with_zero_or_one ( cls , converter , pattern = None ) : cardinality = Cardinality . zero_or_one if not pattern : pattern = getattr ( converter , "pattern" , cls . default_pattern ) optional_pattern = cardinality . make_pattern ( pattern ) group_count = cardinality . compute_group_count ( pattern ) def convert_optional ( text , m = None ) : if text : text = text . strip ( ) if not text : return None return converter ( text ) convert_optional . pattern = optional_pattern convert_optional . regex_group_count = group_count return convert_optional
Creates a type converter for a T with 0 .. 1 times by using the type converter for one item of T .
2,151
def server_static ( filepath ) : mimetype = "image/svg+xml" if filepath . endswith ( ".svg" ) else "auto" return bottle . static_file ( filepath , root = conf . StaticPath , mimetype = mimetype )
Handler for serving static files .
2,152
def mouse ( table , day = None ) : where = ( ( "day" , day ) , ) if day else ( ) events = db . fetch ( table , where = where , order = "day" ) for e in events : e [ "dt" ] = datetime . datetime . fromtimestamp ( e [ "stamp" ] ) stats , positions , events = stats_mouse ( events , table ) days , input = db . fetch ( "counts" , order = "day" , type = table ) , "mouse" return bottle . template ( "heatmap.tpl" , locals ( ) , conf = conf )
Handler for showing mouse statistics for specified type and day .
2,153
def keyboard ( table , day = None ) : cols , group = "realkey AS key, COUNT(*) AS count" , "realkey" where = ( ( "day" , day ) , ) if day else ( ) counts_display = counts = db . fetch ( table , cols , where , group , "count DESC" ) if "combos" == table : counts_display = db . fetch ( table , "key, COUNT(*) AS count" , where , "key" , "count DESC" ) events = db . fetch ( table , where = where , order = "stamp" ) for e in events : e [ "dt" ] = datetime . datetime . fromtimestamp ( e [ "stamp" ] ) stats , collatedevents = stats_keyboard ( events , table ) days , input = db . fetch ( "counts" , order = "day" , type = table ) , "keyboard" return bottle . template ( "heatmap.tpl" , locals ( ) , conf = conf )
Handler for showing the keyboard statistics page .
2,154
def inputindex ( input ) : stats = { } countminmax = "SUM(count) AS count, MIN(day) AS first, MAX(day) AS last" tables = ( "moves" , "clicks" , "scrolls" ) if "mouse" == input else ( "keys" , "combos" ) for table in tables : stats [ table ] = db . fetchone ( "counts" , countminmax , type = table ) stats [ table ] [ "days" ] = db . fetch ( "counts" , order = "day DESC" , type = table ) return bottle . template ( "input.tpl" , locals ( ) , conf = conf )
Handler for showing keyboard or mouse page with day and total links .
2,155
def index ( ) : stats = dict ( ( k , { "count" : 0 } ) for k , tt in conf . InputTables ) countminmax = "SUM(count) AS count, MIN(day) AS first, MAX(day) AS last" for input , table in [ ( x , t ) for x , tt in conf . InputTables for t in tt ] : row = db . fetchone ( "counts" , countminmax , type = table ) if not row [ "count" ] : continue stats [ input ] [ "count" ] += row [ "count" ] for func , key in [ ( min , "first" ) , ( max , "last" ) ] : stats [ input ] [ key ] = ( row [ key ] if key not in stats [ input ] else func ( stats [ input ] [ key ] , row [ key ] ) ) return bottle . template ( "index.tpl" , locals ( ) , conf = conf )
Handler for showing the GUI index page .
2,156
def stats_keyboard ( events , table ) : if len ( events ) < 2 : return [ ] , [ ] deltas , prev_dt = [ ] , None sessions , session = [ ] , None UNBROKEN_DELTA = datetime . timedelta ( seconds = conf . KeyboardSessionMaxDelta ) blank = collections . defaultdict ( lambda : collections . defaultdict ( int ) ) collated = [ blank . copy ( ) ] for e in events : if prev_dt : if ( prev_dt . second != e [ "dt" ] . second or prev_dt . minute != e [ "dt" ] . minute or prev_dt . hour != e [ "dt" ] . hour ) : collated . append ( blank . copy ( ) ) delta = e [ "dt" ] - prev_dt deltas . append ( delta ) if delta > UNBROKEN_DELTA : session = None else : if not session : session = [ ] sessions . append ( session ) session . append ( delta ) collated [ - 1 ] [ "dt" ] = e [ "dt" ] collated [ - 1 ] [ "keys" ] [ e [ "realkey" ] ] += 1 prev_dt = e [ "dt" ] longest_session = max ( sessions + [ [ datetime . timedelta ( ) ] ] , key = lambda x : sum ( x , datetime . timedelta ( ) ) ) stats = [ ( "Average interval between combos" , sum ( deltas , datetime . timedelta ( ) ) / len ( deltas ) ) , ] if "combos" == table else [ ( "Keys per hour" , int ( 3600 * len ( events ) / timedelta_seconds ( events [ - 1 ] [ "dt" ] - events [ 0 ] [ "dt" ] ) ) ) , ( "Average interval between keys" , sum ( deltas , datetime . timedelta ( ) ) / len ( deltas ) ) , ( "Typing sessions (key interval < %ss)" % UNBROKEN_DELTA . seconds , len ( sessions ) ) , ( "Average keys in session" , sum ( len ( x ) + 1 for x in sessions ) / len ( sessions ) ) , ( "Average session duration" , sum ( ( sum ( x , datetime . timedelta ( ) ) for x in sessions ) , datetime . timedelta ( ) ) / len ( sessions ) ) , ( "Longest session duration" , sum ( longest_session , datetime . timedelta ( ) ) ) , ( "Keys in longest session" , len ( longest_session ) + 1 ) , ( "Most keys in session" , max ( len ( x ) + 1 for x in sessions ) ) , ] return stats , collated
Return statistics and collated events for keyboard events .
2,157
def timedelta_seconds ( timedelta ) : return ( timedelta . total_seconds ( ) if hasattr ( timedelta , "total_seconds" ) else timedelta . days * 24 * 3600 + timedelta . seconds + timedelta . microseconds / 1000000. )
Returns the total timedelta duration in seconds .
2,158
def init ( ) : global app if app : return app conf . init ( ) , db . init ( conf . DbPath , conf . DbStatements ) bottle . TEMPLATE_PATH . insert ( 0 , conf . TemplatePath ) app = bottle . default_app ( ) bottle . BaseTemplate . defaults . update ( get_url = app . get_url ) return app
Initialize configuration and web application .
2,159
def start ( ) : global app bottle . run ( app , host = conf . WebHost , port = conf . WebPort , debug = conf . WebAutoReload , reloader = conf . WebAutoReload , quiet = conf . WebQuiet )
Starts the web server .
2,160
def download ( url , proxies = None ) : if proxies is None : proxies = [ "" ] for proxy in proxies : if proxy == "" : socket . socket = DEFAULT_SOCKET elif proxy . startswith ( 'socks' ) : if proxy [ 5 ] == '4' : proxy_type = socks . SOCKS4 else : proxy_type = socks . SOCKS5 proxy = proxy [ proxy . find ( '://' ) + 3 : ] try : proxy , port = proxy . split ( ':' ) except ValueError : port = None socks . set_default_proxy ( proxy_type , proxy , port ) socket . socket = socks . socksocket else : try : proxy , port = proxy . split ( ':' ) except ValueError : port = None socks . set_default_proxy ( socks . HTTP , proxy , port ) socket . socket = socks . socksocket downloaded = _download_helper ( url ) if downloaded is not None : return downloaded return ( None , None )
Download a PDF or DJVU document from a url eventually using proxies .
2,161
def make_format ( format_spec ) : fill = '' align = '' zero = '' width = format_spec . width if format_spec . align : align = format_spec . align [ 0 ] if format_spec . fill : fill = format_spec . fill [ 0 ] if format_spec . zero : zero = '0' precision_part = "" if format_spec . precision : precision_part = ".%s" % format_spec . precision return "%s%s%s%s%s%s" % ( fill , align , zero , width , precision_part , format_spec . type )
Build format string from a format specification .
2,162
def extract_fields ( cls , schema ) : for part in parse . PARSE_RE . split ( schema ) : if not part or part == '{{' or part == '}}' : continue elif part [ 0 ] == '{' : yield cls . parse ( part )
Extract fields in a parse expression schema .
2,163
def _registerHandler ( self , handler ) : self . _logger . addHandler ( handler ) self . _handlers . append ( handler )
Registers a handler .
2,164
def _unregisterHandler ( self , handler , shutdown = True ) : if handler in self . _handlers : self . _handlers . remove ( handler ) self . _logger . removeHandler ( handler ) if shutdown : try : handler . close ( ) except KeyError : pass
Unregisters the logging handler .
2,165
def getLogger ( cls , name = None ) : return logging . getLogger ( "{0}.{1}" . format ( cls . BASENAME , name ) if name else cls . BASENAME )
Retrieves the Python native logger
2,166
def debug ( cls , name , message , * args ) : cls . getLogger ( name ) . debug ( message , * args )
Convenience function to log a message at the DEBUG level .
2,167
def info ( cls , name , message , * args ) : cls . getLogger ( name ) . info ( message , * args )
Convenience function to log a message at the INFO level .
2,168
def warning ( cls , name , message , * args ) : cls . getLogger ( name ) . warning ( message , * args )
Convenience function to log a message at the WARNING level .
2,169
def error ( cls , name , message , * args ) : cls . getLogger ( name ) . error ( message , * args )
Convenience function to log a message at the ERROR level .
2,170
def critical ( cls , name , message , * args ) : cls . getLogger ( name ) . critical ( message , * args )
Convenience function to log a message at the CRITICAL level .
2,171
def exception ( cls , name , message , * args ) : cls . getLogger ( name ) . exception ( message , * args )
Convenience function to log a message at the ERROR level with additonal exception information .
2,172
def allow ( self , ctx , ops ) : auth_info , _ = self . allow_any ( ctx , ops ) return auth_info
Checks that the authorizer s request is authorized to perform all the given operations . Note that allow does not check first party caveats - if there is more than one macaroon that may authorize the request it will choose the first one that does regardless .
2,173
def allow_any ( self , ctx , ops ) : authed , used = self . _allow_any ( ctx , ops ) return self . _new_auth_info ( used ) , authed
like allow except that it will authorize as many of the operations as possible without requiring any to be authorized . If all the operations succeeded the array will be nil .
2,174
def allow_capability ( self , ctx , ops ) : nops = 0 for op in ops : if op != LOGIN_OP : nops += 1 if nops == 0 : raise ValueError ( 'no non-login operations required in capability' ) _ , used = self . _allow_any ( ctx , ops ) squasher = _CaveatSquasher ( ) for i , is_used in enumerate ( used ) : if not is_used : continue for cond in self . _conditions [ i ] : squasher . add ( cond ) return squasher . final ( )
Checks that the user is allowed to perform all the given operations . If not a discharge error will be raised . If allow_capability succeeds it returns a list of first party caveat conditions that must be applied to any macaroon granting capability to execute the operations . Those caveat conditions will not include any declarations contained in login macaroons - the caller must be careful not to mint a macaroon associated with the LOGIN_OP operation unless they add the expected declaration caveat too - in general clients should not create capabilities that grant LOGIN_OP rights .
2,175
def register ( self , name , path , description , final_words = None ) : return self . __app . recipes . register ( name , path , self . _plugin , description , final_words )
Registers a new recipe in the context of the current plugin .
2,176
def get ( self , name = None ) : return self . __app . recipes . get ( name , self . _plugin )
Gets a list of all recipes which are registered by the current plugin . If a name is provided only the requested recipe is returned or None .
2,177
def build ( self , recipe ) : return self . __app . recipes . build ( recipe , self . _plugin )
Builds a recipe
2,178
def register ( self , name , path , plugin , description = None , final_words = None ) : if name in self . recipes . keys ( ) : raise RecipeExistsException ( "Recipe %s was already registered by %s" % ( name , self . recipes [ "name" ] . plugin . name ) ) self . recipes [ name ] = Recipe ( name , path , plugin , description , final_words ) self . __log . debug ( "Recipe %s registered by %s" % ( name , plugin . name ) ) return self . recipes [ name ]
Registers a new recipe .
2,179
def unregister ( self , recipe ) : if recipe not in self . recipes . keys ( ) : self . __log . warning ( "Can not unregister recipe %s" % recipe ) else : del ( self . recipes [ recipe ] ) self . __log . debug ( "Recipe %s got unregistered" % recipe )
Unregisters an existing recipe so that this recipe is no longer available .
2,180
def get ( self , recipe = None , plugin = None ) : if plugin is not None : if recipe is None : recipes_list = { } for key in self . recipes . keys ( ) : if self . recipes [ key ] . plugin == plugin : recipes_list [ key ] = self . recipes [ key ] return recipes_list else : if recipe in self . recipes . keys ( ) : if self . recipes [ recipe ] . plugin == plugin : return self . recipes [ recipe ] else : return None else : return None else : if recipe is None : return self . recipes else : if recipe in self . recipes . keys ( ) : return self . recipes [ recipe ] else : return None
Get one or more recipes .
2,181
def build ( self , recipe , plugin = None ) : if recipe not in self . recipes . keys ( ) : raise RecipeMissingException ( "Recipe %s unknown." % recipe ) recipe_obj = self . recipes [ recipe ] if plugin is not None : if recipe_obj . plugin != plugin : raise RecipeWrongPluginException ( "The requested recipe does not belong to the given plugin. Use" "the app object, to retrieve the requested recipe: " "my_app.recipes.get(%s)" % recipe ) recipe_obj . build ( )
Execute a recipe and creates new folder and files .
2,182
def build ( self , output_dir = None , ** kwargs ) : if output_dir is None : output_dir = os . getcwd ( ) target = cookiecutter ( self . path , output_dir = output_dir , ** kwargs ) if self . final_words is not None and len ( self . final_words ) > 0 : print ( "" ) print ( self . final_words ) return target
Buildes the recipe and creates needed folder and files . May ask the user for some parameter inputs .
2,183
def where_am_i ( ) : locations = { 'Work' : 0 , 'Home' : 0 } for ssid in scan_for_ssids ( ) : for l in logged_ssids : if l [ 'name' ] == ssid : locations [ l [ 'location' ] ] += 1 print ( 'Where Am I: SSIDS Matching Home = ' , locations [ 'Home' ] , ' SSIDs matching Work = ' , locations [ 'Work' ] ) return max ( locations . keys ( ) , key = lambda k : locations [ k ] )
high level function that can estimate where user is based on predefined setups .
2,184
def summarise ( self ) : res = '' if self . user == 'Developer' : if self . host == 'Home PC' : res += 'At Home' else : res += 'Away from PC' elif self . user == 'User' and self . host == 'Home PC' : res += 'Remote desktop into home PC' res += '\n' res += self . transport return res
extrapolate a human readable summary of the contexts
2,185
def get_host ( self ) : import socket host_name = socket . gethostname ( ) for h in hosts : if h [ 'name' ] == host_name : return h [ 'type' ] , h [ 'name' ] return dict ( type = 'Unknown' , name = host_name )
returns the host computer running this program
2,186
def get_user ( self ) : for name in ( 'LOGNAME' , 'USER' , 'LNAME' , 'USERNAME' ) : user = os . environ . get ( name ) if user : break for u in users : if u [ 'name' ] == user : return u [ 'type' ] , u [ 'name' ]
returns the username on this computer
2,187
def get_host_usage ( self ) : import psutil process_names = [ proc . name for proc in psutil . process_iter ( ) ] cpu_pct = psutil . cpu_percent ( interval = 1 ) mem = psutil . virtual_memory ( ) return str ( cpu_pct ) , str ( len ( process_names ) ) , str ( mem . available ) , str ( mem . total )
get details of CPU RAM usage of this PC
2,188
def schema ( ) : return Schema ( { 'script' : And ( Or ( type ( ' ' ) , type ( u' ' ) ) , len ) , Optional ( 'title' , default = '' ) : str , Optional ( 'model' , default = { } ) : { Optional ( And ( str , len ) ) : object } , Optional ( 'env' , default = { } ) : { Optional ( And ( str , len ) ) : And ( str , len ) } , Optional ( 'item' , default = None ) : object , Optional ( 'dry_run' , default = False ) : bool , Optional ( 'debug' , default = False ) : bool , Optional ( 'strict' , default = False ) : bool , Optional ( 'variables' , default = { } ) : { Optional ( And ( Or ( type ( ' ' ) , type ( u' ' ) ) , len , Regex ( r'([a-zA-Z][_a-zA-Z]*)' ) ) ) : Or ( type ( ' ' ) , type ( u' ' ) ) } , Optional ( 'temporary_scripts_path' , default = '' ) : Or ( type ( '' ) , type ( u'' ) ) , Optional ( 'internal' , default = False ) : bool } )
Provide schema for shell configuration .
2,189
def get_by_name ( self , name ) : for p in self . project_list : if p . nme == name : return p return None
returns an object Project which matches name
2,190
def execute_tasks ( self ) : for t in self . tasks : print ( 'RUNNING ' + str ( t . task_id ) + ' = ' + t . name ) t . execute ( ) if t . success != '__IGNORE__RESULT__' : print ( t ) print ( 'TASK RESULT :' , t . result , ' but success = ' , t . success ) if t . result != t . success : print ( 'ABORTING TASK EXECUTION SEQUENCE' + str ( t . task_id ) + ' = ' + t . name ) break
run execute on all tasks IFF prior task is successful
2,191
def build_report ( self , op_file , tpe = 'md' ) : if tpe == 'md' : res = self . get_report_md ( ) elif tpe == 'rst' : res = self . get_report_rst ( ) elif tpe == 'html' : res = self . get_report_html ( ) else : res = 'Unknown report type passed to project.build_report' with open ( op_file , 'w' ) as f : f . write ( res )
create a report showing all project details
2,192
def get_report_rst ( self ) : res = '' res += '-----------------------------------\n' res += self . nme + '\n' res += '-----------------------------------\n\n' res += self . desc + '\n' res += self . fldr + '\n\n' res += '.. contents:: \n\n\n' res += 'Overview\n' + '===========================================\n\n' res += 'This document contains details on the project ' + self . nme + '\n\n' for d in self . details : res += ' - ' + d [ 0 ] + ' = ' + d [ 1 ] + '\n\n' res += '\nTABLES\n' + '===========================================\n\n' for t in self . datatables : res += t . name + '\n' res += '-------------------------\n\n' res += t . format_rst ( ) + '\n\n' return res
formats the project into a report in RST format
2,193
def get_report_html ( self ) : res = '<h2>Project:' + self . nme + '</h2>' res += '<p>' + self . desc + '</p>' res += '<p>' + self . fldr + '</p>' res += '<BR><h3>TABLES</h3>' for t in self . datatables : res += '<b>' + t . name + '<b><BR>' res += '<p>' + str ( t ) + '</p>' return res
formats the project into a report in MD format - WARNING - tables missing BR
2,194
def add_param ( self , param_key , param_val ) : self . params . append ( [ param_key , param_val ] ) if param_key == '__success_test' : self . success = param_val
adds parameters as key value pairs
2,195
def execute ( self ) : func_params = [ ] exec_str = self . func . __name__ + '(' for p in self . params : if p [ 0 ] [ 0 : 2 ] != '__' : exec_str += p [ 0 ] + '="' + self . _force_str ( p [ 1 ] ) + '", ' func_params . append ( p [ 1 ] ) exec_str = exec_str [ : - 2 ] exec_str += ') # task' + str ( self . task_id ) + ': ' + self . name self . result = self . func ( * func_params ) print ( exec_str + ' loaded ' , self . result )
executes all automatic tasks in order of task id
2,196
def create_column_index ( annotations ) : _column_index = OrderedDict ( { 'Column Name' : annotations [ 'Column Name' ] } ) categorical_rows = annotation_rows ( 'C:' , annotations ) _column_index . update ( categorical_rows ) numerical_rows = { name : [ float ( x ) if x != '' else float ( 'NaN' ) for x in values ] for name , values in annotation_rows ( 'N:' , annotations ) . items ( ) } _column_index . update ( numerical_rows ) column_index = pd . MultiIndex . from_tuples ( list ( zip ( * _column_index . values ( ) ) ) , names = list ( _column_index . keys ( ) ) ) if len ( column_index . names ) == 1 : name = column_index . names [ 0 ] column_index = column_index . get_level_values ( name ) return column_index
Create a pd . MultiIndex using the column names and any categorical rows . Note that also non - main columns will be assigned a default category .
2,197
def read_perseus ( path_or_file , ** kwargs ) : annotations = read_annotations ( path_or_file , separator ) column_index = create_column_index ( annotations ) if 'usecols' in kwargs : usecols = kwargs [ 'usecols' ] if type ( usecols [ 0 ] ) is str : usecols = sorted ( [ list ( column_index ) . index ( x ) for x in usecols ] ) column_index = column_index [ usecols ] kwargs [ 'dtype' ] = dict ( kwargs . get ( 'dtype' , { } ) , ** annotations . get ( 'dtype' , { } ) ) kwargs [ 'converters' ] = dict ( kwargs . get ( 'converters' , { } ) , ** annotations . get ( 'converters' , { } ) ) df = pd . read_csv ( path_or_file , sep = separator , comment = '#' , ** kwargs ) df . columns = column_index return df
Read a Perseus - formatted matrix into a pd . DataFrame . Annotation rows will be converted into a multi - index .
2,198
def to_perseus ( df , path_or_file , main_columns = None , separator = separator , convert_bool_to_category = True , numerical_annotation_rows = set ( [ ] ) ) : _df = df . copy ( ) if not _df . columns . name : _df . columns . name = 'Column Name' column_names = _df . columns . get_level_values ( 'Column Name' ) annotations = { } main_columns = _infer_main_columns ( _df ) if main_columns is None else main_columns annotations [ 'Type' ] = [ 'E' if column_names [ i ] in main_columns else dtype_to_perseus ( dtype ) for i , dtype in enumerate ( _df . dtypes ) ] for i , column in enumerate ( _df . columns ) : valid_values = [ value for value in _df [ column ] if value is not None ] if len ( valid_values ) > 0 and all ( type ( value ) is list for value in valid_values ) : annotations [ 'Type' ] [ i ] = 'M' _df [ column ] = _df [ column ] . apply ( lambda xs : ';' . join ( str ( x ) for x in xs ) ) if convert_bool_to_category : for i , column in enumerate ( _df . columns ) : if _df . dtypes [ i ] is np . dtype ( 'bool' ) : values = _df [ column ] . values _df [ column ] [ values ] = '+' _df [ column ] [ ~ values ] = '' annotation_row_names = set ( _df . columns . names ) - { 'Column Name' } for name in annotation_row_names : annotation_type = 'N' if name in numerical_annotation_rows else 'C' annotations [ '{}:{}' . format ( annotation_type , name ) ] = _df . columns . get_level_values ( name ) with PathOrFile ( path_or_file , 'w' ) as f : f . write ( separator . join ( column_names ) + '\n' ) for name , values in annotations . items ( ) : f . write ( '#!{{{name}}}{values}\n' . format ( name = name , values = separator . join ( [ str ( x ) for x in values ] ) ) ) _df . to_csv ( f , header = None , index = False , sep = separator )
Save pd . DataFrame to Perseus text format .
2,199
def get_page ( search_text ) : lst = search_aikif ( search_text ) txt = '<table class="as-table as-table-zebra as-table-horizontal">' for result in lst : txt += '<TR><TD>' + result + '</TD></TR>' txt += '</TABLE>\n\n' return txt
formats the entire search result in a table output