idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
14,700
def _deduplicate ( lst ) : out = [ ] for i in lst : if i not in out : out . append ( i ) return out
Auxiliary function to deduplicate lst .
14,701
def _join ( lst , key , sep = ";" ) : return sep . join ( [ d [ key ] for d in lst if d [ key ] ] )
Auxiliary function to join same elements of a list of dictionaries if the elements are not None .
14,702
def authors ( self ) : authors = self . xml . find ( 'authors' , ns ) try : return [ _ScopusAuthor ( author ) for author in authors ] except TypeError : return None
A list of scopus_api . _ScopusAuthor objects .
14,703
def citedby_url ( self ) : cite_link = self . coredata . find ( 'link[@rel="scopus-citedby"]' , ns ) try : return cite_link . get ( 'href' ) except AttributeError : return None
URL to Scopus page listing citing papers .
14,704
def scopus_url ( self ) : scopus_url = self . coredata . find ( 'link[@rel="scopus"]' , ns ) try : return scopus_url . get ( 'href' ) except AttributeError : return None
URL to the abstract page on Scopus .
14,705
def get_corresponding_author_info ( self ) : resp = requests . get ( self . scopus_url ) from lxml import html parsed_doc = html . fromstring ( resp . content ) for div in parsed_doc . body . xpath ( './/div' ) : for a in div . xpath ( 'a' ) : if '/cdn-cgi/l/email-protection' not in a . get ( 'href' , '' ) : continue encoded_text = a . attrib [ 'href' ] . replace ( '/cdn-cgi/l/email-protection#' , '' ) key = int ( encoded_text [ 0 : 2 ] , 16 ) email = '' . join ( [ chr ( int ( '0x{}' . format ( x ) , 16 ) ^ key ) for x in map ( '' . join , zip ( * [ iter ( encoded_text [ 2 : ] ) ] * 2 ) ) ] ) for aa in div . xpath ( 'a' ) : if 'http://www.scopus.com/authid/detail.url' in aa . get ( 'href' , '' ) : scopus_url = aa . attrib [ 'href' ] name = aa . text else : scopus_url , name = None , None return ( scopus_url , name , email )
Try to get corresponding author information .
14,706
def latex ( self ) : s = ( '{authors}, \\textit{{{title}}}, {journal}, {volissue}, ' '{pages}, ({date}). {doi}, {scopus_url}.' ) if len ( self . authors ) > 1 : authors = ', ' . join ( [ str ( a . given_name ) + ' ' + str ( a . surname ) for a in self . authors [ 0 : - 1 ] ] ) authors += ( ' and ' + str ( self . authors [ - 1 ] . given_name ) + ' ' + str ( self . authors [ - 1 ] . surname ) ) else : a = self . authors [ 0 ] authors = str ( a . given_name ) + ' ' + str ( a . surname ) title = self . title journal = self . publicationName volume = self . volume issue = self . issueIdentifier if volume and issue : volissue = '\\textbf{{{0}({1})}}' . format ( volume , issue ) elif volume : volissue = '\\textbf{{0}}' . format ( volume ) else : volissue = 'no volume' date = self . coverDate if self . pageRange : pages = 'p. {0}' . format ( self . pageRange ) elif self . startingPage : pages = 'p. {self.startingPage}' . format ( self ) elif self . article_number : pages = 'Art. No. {self.article_number}, ' . format ( self ) else : pages = '(no pages found)' doi = '\\href{{https://doi.org/{0}}}{{doi:{0}}}' . format ( self . doi ) scopus_url = '\\href{{{0}}}{{scopus:{1}}}' . format ( self . scopus_url , self . eid ) return s . format ( ** locals ( ) )
Return LaTeX representation of the abstract .
14,707
def html ( self ) : s = ( u'{authors}, {title}, {journal}, {volissue}, {pages}, ' '({date}). {doi}.' ) au_link = ( '<a href="https://www.scopus.com/authid/detail.url' '?origin=AuthorProfile&authorId={0}">{1}</a>' ) if len ( self . authors ) > 1 : authors = u', ' . join ( [ au_link . format ( a . auid , ( str ( a . given_name ) + ' ' + str ( a . surname ) ) ) for a in self . authors [ 0 : - 1 ] ] ) authors += ( u' and ' + au_link . format ( self . authors [ - 1 ] . auid , ( str ( self . authors [ - 1 ] . given_name ) + ' ' + str ( self . authors [ - 1 ] . surname ) ) ) ) else : a = self . authors [ 0 ] authors = au_link . format ( a . auid , str ( a . given_name ) + ' ' + str ( a . surname ) ) title = u'<a href="{link}">{title}</a>' . format ( link = self . scopus_url , title = self . title ) jname = self . publicationName sid = self . source_id jlink = ( '<a href="https://www.scopus.com/source/sourceInfo.url' '?sourceId={sid}">{journal}</a>' ) journal = jlink . format ( sid = sid , journal = jname ) volume = self . volume issue = self . issueIdentifier if volume and issue : volissue = u'<b>{0}({1})</b>' . format ( volume , issue ) elif volume : volissue = u'<b>{0}</b>' . format ( volume ) else : volissue = 'no volume' date = self . coverDate if self . pageRange : pages = u'p. {0}' . format ( self . pageRange ) elif self . startingPage : pages = u'p. {self.startingPage}' . format ( self = self ) elif self . article_number : pages = u'Art. No. {self.article_number}, ' . format ( self = self ) else : pages = '(no pages found)' doi = '<a href="https://doi.org/{0}">doi:{0}</a>' . format ( self . doi ) html = s . format ( ** locals ( ) ) return html . replace ( 'None' , '' )
Returns an HTML citation .
14,708
def cc ( self ) : _years = range ( self . _start , self . _end + 1 ) try : return list ( zip ( _years , [ d . get ( '$' ) for d in self . _citeInfoMatrix [ 'cc' ] ] ) ) except AttributeError : return list ( zip ( _years , [ 0 ] * len ( _years ) ) )
List of tuples of yearly number of citations for specified years .
14,709
def affiliation_history ( self ) : aff_ids = [ e . attrib . get ( 'affiliation-id' ) for e in self . xml . findall ( 'author-profile/affiliation-history/affiliation' ) if e is not None and len ( list ( e . find ( "ip-doc" ) . iter ( ) ) ) > 1 ] return [ ScopusAffiliation ( aff_id ) for aff_id in aff_ids ]
List of ScopusAffiliation objects representing former affiliations of the author . Only affiliations with more than one publication are considered .
14,710
def get_coauthors ( self ) : url = self . xml . find ( 'coredata/link[@rel="coauthor-search"]' ) . get ( 'href' ) xml = download ( url = url ) . text . encode ( 'utf-8' ) xml = ET . fromstring ( xml ) coauthors = [ ] N = int ( get_encoded_text ( xml , 'opensearch:totalResults' ) or 0 ) AUTHOR = namedtuple ( 'Author' , [ 'name' , 'scopus_id' , 'affiliation' , 'categories' ] ) count = 0 while count < N : params = { 'start' : count , 'count' : 25 } xml = download ( url = url , params = params ) . text . encode ( 'utf-8' ) xml = ET . fromstring ( xml ) for entry in xml . findall ( 'atom:entry' , ns ) : given_name = get_encoded_text ( entry , 'atom:preferred-name/atom:given-name' ) surname = get_encoded_text ( entry , 'atom:preferred-name/atom:surname' ) coauthor_name = u'{0} {1}' . format ( given_name , surname ) scopus_id = get_encoded_text ( entry , 'dc:identifier' ) . replace ( 'AUTHOR_ID:' , '' ) affiliation = get_encoded_text ( entry , 'atom:affiliation-current/atom:affiliation-name' ) s = u', ' . join ( [ '{0} ({1})' . format ( subject . text , subject . attrib [ 'frequency' ] ) for subject in entry . findall ( 'atom:subject-area' , ns ) ] ) coauthors += [ AUTHOR ( coauthor_name , scopus_id , affiliation , s ) ] count += 25 return coauthors
Return list of coauthors their scopus - id and research areas .
14,711
def get_document_eids ( self , * args , ** kwds ) : search = ScopusSearch ( 'au-id({})' . format ( self . author_id ) , * args , ** kwds ) return search . get_eids ( )
Return list of EIDs for the author using ScopusSearch .
14,712
def get_abstracts ( self , refresh = True ) : return [ ScopusAbstract ( eid , refresh = refresh ) for eid in self . get_document_eids ( refresh = refresh ) ]
Return a list of ScopusAbstract objects using ScopusSearch .
14,713
def get_journal_abstracts ( self , refresh = True ) : return [ abstract for abstract in self . get_abstracts ( refresh = refresh ) if abstract . aggregationType == 'Journal' ]
Return a list of ScopusAbstract objects using ScopusSearch but only if belonging to a Journal .
14,714
def get_document_summary ( self , N = None , cite_sort = True , refresh = True ) : abstracts = self . get_abstracts ( refresh = refresh ) if cite_sort : counts = [ ( a , int ( a . citedby_count ) ) for a in abstracts ] counts . sort ( reverse = True , key = itemgetter ( 1 ) ) abstracts = [ a [ 0 ] for a in counts ] if N is None : N = len ( abstracts ) s = [ u'{0} of {1} documents' . format ( N , len ( abstracts ) ) ] for i in range ( N ) : s += [ '{0:2d}. {1}\n' . format ( i + 1 , str ( abstracts [ i ] ) ) ] return '\n' . join ( s )
Return a summary string of documents .
14,715
def author_impact_factor ( self , year = 2014 , refresh = True ) : scopus_abstracts = self . get_journal_abstracts ( refresh = refresh ) cites = [ int ( ab . citedby_count ) for ab in scopus_abstracts ] years = [ int ( ab . coverDate . split ( '-' ) [ 0 ] ) for ab in scopus_abstracts ] data = zip ( years , cites , scopus_abstracts ) data = sorted ( data , key = itemgetter ( 1 ) , reverse = True ) aif_data = [ tup for tup in data if tup [ 0 ] in ( year - 1 , year - 2 ) ] Ncites = sum ( [ tup [ 1 ] for tup in aif_data ] ) if len ( aif_data ) > 0 : return ( Ncites , len ( aif_data ) , Ncites / float ( len ( aif_data ) ) ) else : return ( Ncites , len ( aif_data ) , 0 )
Get author_impact_factor for the .
14,716
def n_first_author_papers ( self , refresh = True ) : first_authors = [ 1 for ab in self . get_journal_abstracts ( refresh = refresh ) if ab . authors [ 0 ] . scopusid == self . author_id ] return sum ( first_authors )
Return number of papers with author as the first author .
14,717
def n_yearly_publications ( self , refresh = True ) : pub_years = [ int ( ab . coverDate . split ( '-' ) [ 0 ] ) for ab in self . get_journal_abstracts ( refresh = refresh ) ] return Counter ( pub_years )
Number of journal publications in a given year .
14,718
def _get_org ( aff ) : try : org = aff [ 'organization' ] if not isinstance ( org , str ) : try : org = org [ '$' ] except TypeError : org = ', ' . join ( [ d [ '$' ] for d in org if d ] ) except KeyError : org = None return org
Auxiliary function to extract org information from affiliation for authorgroup .
14,719
def _parse_pages ( self , unicode = False ) : if self . pageRange : pages = 'pp. {}' . format ( self . pageRange ) elif self . startingPage : pages = 'pp. {}-{}' . format ( self . startingPage , self . endingPage ) else : pages = '(no pages found)' if unicode : pages = u'{}' . format ( pages ) return pages
Auxiliary function to parse and format page range of a document .
14,720
def authkeywords ( self ) : keywords = self . _json [ 'authkeywords' ] if keywords is None : return None else : try : return [ d [ '$' ] for d in keywords [ 'author-keyword' ] ] except TypeError : return [ keywords [ 'author-keyword' ] [ '$' ] ]
List of author - provided keywords of the abstract .
14,721
def idxterms ( self ) : try : terms = listify ( self . _json . get ( "idxterms" , { } ) . get ( 'mainterm' , [ ] ) ) except AttributeError : return None try : return [ d [ '$' ] for d in terms ] except AttributeError : return None
List of index terms .
14,722
def get_html ( self ) : au_link = ( '<a href="https://www.scopus.com/authid/detail.url' '?origin=AuthorProfile&authorId={0}">{1}</a>' ) if len ( self . authors ) > 1 : authors = u', ' . join ( [ au_link . format ( a . auid , a . given_name + ' ' + a . surname ) for a in self . authors [ 0 : - 1 ] ] ) authors += ( u' and ' + au_link . format ( self . authors [ - 1 ] . auid , ( str ( self . authors [ - 1 ] . given_name ) + ' ' + str ( self . authors [ - 1 ] . surname ) ) ) ) else : a = self . authors [ 0 ] authors = au_link . format ( a . auid , a . given_name + ' ' + a . surname ) title = u'<a href="{}">{}</a>' . format ( self . scopus_link , self . title ) if self . volume and self . issueIdentifier : volissue = u'<b>{}({})</b>' . format ( self . volume , self . issueIdentifier ) elif self . volume : volissue = u'<b>{}</b>' . format ( self . volume ) else : volissue = 'no volume' jlink = '<a href="https://www.scopus.com/source/sourceInfo.url' '?sourceId={}">{}</a>' . format ( self . source_id , self . publicationName ) pages = _parse_pages ( self , unicode = True ) s = "{auth}, {title}, {jour}, {volissue}, {pages}, ({year})." . format ( auth = authors , title = title , jour = jlink , volissue = volissue , pages = pages , year = self . coverDate [ : 4 ] ) if self . doi : s += ' <a href="https://doi.org/{0}">doi:{0}</a>.' . format ( self . doi ) return s
Bibliographic entry in html format .
14,723
def get_latex ( self ) : if len ( self . authors ) > 1 : authors = _list_authors ( self . authors ) else : a = self . authors authors = ' ' . join ( [ a . given_name , a . surname ] ) if self . volume and self . issueIdentifier : volissue = '\\textbf{{{}({})}}' . format ( self . volume , self . issueIdentifier ) elif self . volume : volissue = '\\textbf{{{}}}' . format ( self . volume ) else : volissue = 'no volume' pages = _parse_pages ( self ) s = '{auth}, \\textit{{{title}}}, {jour}, {vol}, {pages} ({year}).' . format ( auth = authors , title = self . title , jour = self . publicationName , vol = volissue , pages = pages , year = self . coverDate [ : 4 ] ) if self . doi is not None : s += ' \\href{{https://doi.org/{0}}}{{doi:{0}}}, ' . format ( self . doi ) s += '\\href{{{0}}}{{scopus:{1}}}.' . format ( self . scopus_link , self . eid ) return s
Bibliographic entry in LaTeX format .
14,724
def _parse ( res , params , n , api , ** kwds ) : cursor = "cursor" in params if not cursor : start = params [ "start" ] if n == 0 : return "" _json = res . get ( 'search-results' , { } ) . get ( 'entry' , [ ] ) while n > 0 : n -= params [ "count" ] if cursor : pointer = res [ 'search-results' ] [ 'cursor' ] . get ( '@next' ) params . update ( { 'cursor' : pointer } ) else : start += params [ "count" ] params . update ( { 'start' : start } ) res = download ( url = URL [ api ] , params = params , accept = "json" , ** kwds ) . json ( ) _json . extend ( res . get ( 'search-results' , { } ) . get ( 'entry' , [ ] ) ) return _json
Auxiliary function to download results and parse json .
14,725
def create_config ( ) : file_exists = exists ( CONFIG_FILE ) if not file_exists : config . add_section ( 'Directories' ) defaults = [ ( 'AbstractRetrieval' , expanduser ( '~/.scopus/abstract_retrieval' ) ) , ( 'AffiliationSearch' , expanduser ( '~/.scopus/affiliation_search' ) ) , ( 'AuthorRetrieval' , expanduser ( '~/.scopus/author_retrieval' ) ) , ( 'AuthorSearch' , expanduser ( '~/.scopus/author_search' ) ) , ( 'CitationOverview' , expanduser ( '~/.scopus/citation_overview' ) ) , ( 'ContentAffiliationRetrieval' , expanduser ( '~/.scopus/affiliation_retrieval' ) ) , ( 'ScopusSearch' , expanduser ( '~/.scopus/scopus_search' ) ) ] for key , value in defaults : config . set ( 'Directories' , key , value ) if not exists ( value ) : makedirs ( value ) config . add_section ( 'Authentication' ) prompt_key = "Please enter your API Key, obtained from " "http://dev.elsevier.com/myapikey.html: \n" if py3 : key = input ( prompt_key ) else : key = raw_input ( prompt_key ) config . set ( 'Authentication' , 'APIKey' , key ) prompt_token = "API Keys are sufficient for most users. If you " "have to use Authtoken authentication, please enter " "the token, otherwise press Enter: \n" if py3 : token = input ( prompt_token ) else : token = raw_input ( prompt_token ) if len ( token ) > 0 : config . set ( 'Authentication' , 'InstToken' , token ) with open ( CONFIG_FILE , 'w' ) as f : config . write ( f ) else : text = "Configuration file already exists at {}; process to create " "the file aborted. Please open the file and edit the " "entries manually." . format ( CONFIG_FILE ) raise FileExistsError ( text )
Initiates process to generate configuration file .
14,726
def get_encoded_text ( container , xpath ) : try : return "" . join ( container . find ( xpath , ns ) . itertext ( ) ) except AttributeError : return None
Return text for element at xpath in the container xml if it is there .
14,727
def main ( argv = None ) : signal . signal ( signal . SIGINT , lambda * args : sys . exit ( _SIGNAL_EXIT_CODE_BASE + signal . SIGINT ) ) if hasattr ( signal , 'SIGINFO' ) : signal . signal ( getattr ( signal , 'SIGINFO' ) , lambda * args : report_progress ( sys . stderr ) ) try : return docopt_subcommands . main ( commands = dsc , argv = argv , doc_template = DOC_TEMPLATE , exit_at_end = False ) except docopt . DocoptExit as exc : print ( exc , file = sys . stderr ) return ExitCode . USAGE except FileNotFoundError as exc : print ( exc , file = sys . stderr ) return ExitCode . NO_INPUT except PermissionError as exc : print ( exc , file = sys . stderr ) return ExitCode . NO_PERM except cosmic_ray . config . ConfigError as exc : print ( repr ( exc ) , file = sys . stderr ) if exc . __cause__ is not None : print ( exc . __cause__ , file = sys . stderr ) return ExitCode . CONFIG except subprocess . CalledProcessError as exc : print ( 'Error in subprocess' , file = sys . stderr ) print ( exc , file = sys . stderr ) return exc . returncode
Invoke the cosmic ray evaluation .
14,728
def extend_name ( suffix ) : def dec ( cls ) : name = '{}{}' . format ( cls . __name__ , suffix ) setattr ( cls , '__name__' , name ) return cls return dec
A factory for class decorators that modify the class name by appending some text to it .
14,729
def mutate ( self , node , index ) : assert index < len ( OFFSETS ) , 'received count with no associated offset' assert isinstance ( node , parso . python . tree . Number ) val = eval ( node . value ) + OFFSETS [ index ] return parso . python . tree . Number ( ' ' + str ( val ) , node . start_pos )
Modify the numeric value on node .
14,730
def _prohibited ( from_op , to_op ) : "Determines if from_op is allowed to be mutated to to_op." if from_op is UnaryOperators . Not : if to_op is not UnaryOperators . Nothing : return True if from_op is UnaryOperators . UAdd : if to_op is UnaryOperators . Nothing : return True return False
Determines if from_op is allowed to be mutated to to_op .
14,731
def load_config ( filename = None ) : try : with _config_stream ( filename ) as handle : filename = handle . name return deserialize_config ( handle . read ( ) ) except ( OSError , toml . TomlDecodeError , UnicodeDecodeError ) as exc : raise ConfigError ( 'Error loading configuration from {}' . format ( filename ) ) from exc
Load a configuration from a file or stdin .
14,732
def _config_stream ( filename ) : if filename is None or filename == '-' : log . info ( 'Reading config from stdin' ) yield sys . stdin else : with open ( filename , mode = 'rt' ) as handle : log . info ( 'Reading config from %r' , filename ) yield handle
Given a configuration s filename this returns a stream from which a configuration can be read .
14,733
def sub ( self , * segments ) : "Get a sub-configuration." d = self for segment in segments : try : d = d [ segment ] except KeyError : return ConfigDict ( { } ) return d
Get a sub - configuration .
14,734
def python_version ( self ) : v = self . get ( 'python-version' , '' ) if v == '' : v = "{}.{}" . format ( sys . version_info . major , sys . version_info . minor ) return v
Get the configured Python version .
14,735
def mutate ( self , node , index ) : assert index == 0 assert isinstance ( node , ForStmt ) empty_list = parso . parse ( ' []' ) node . children [ 3 ] = empty_list return node
Modify the For loop to evaluate to None
14,736
def get_operator ( name ) : sep = name . index ( '/' ) provider_name = name [ : sep ] operator_name = name [ sep + 1 : ] provider = OPERATOR_PROVIDERS [ provider_name ] return provider [ operator_name ]
Get an operator class from a provider plugin .
14,737
def operator_names ( ) : return tuple ( '{}/{}' . format ( provider_name , operator_name ) for provider_name , provider in OPERATOR_PROVIDERS . items ( ) for operator_name in provider )
Get all operator names .
14,738
def get_execution_engine ( name ) : manager = driver . DriverManager ( namespace = 'cosmic_ray.execution_engines' , name = name , invoke_on_load = True , on_load_failure_callback = _log_extension_loading_failure , ) return manager . driver
Get the execution engine by name .
14,739
def use_db ( path , mode = WorkDB . Mode . create ) : database = WorkDB ( path , mode ) try : yield database finally : database . close ( )
Open a DB in file path in mode mode as a context manager .
14,740
def work_items ( self ) : cur = self . _conn . cursor ( ) rows = cur . execute ( "SELECT * FROM work_items" ) for row in rows : yield _row_to_work_item ( row )
An iterable of all of WorkItems in the db .
14,741
def clear ( self ) : with self . _conn : self . _conn . execute ( 'DELETE FROM results' ) self . _conn . execute ( 'DELETE FROM work_items' )
Clear all work items from the session .
14,742
def pending_work_items ( self ) : "Iterable of all pending work items." pending = self . _conn . execute ( "SELECT * FROM work_items WHERE job_id NOT IN (SELECT job_id FROM results)" ) return ( _row_to_work_item ( p ) for p in pending )
Iterable of all pending work items .
14,743
def report_xml ( ) : arguments = docopt . docopt ( report_xml . __doc__ , version = 'cr-rate 1.0' ) with use_db ( arguments [ '<session-file>' ] , WorkDB . Mode . open ) as db : xml_elem = _create_xml_report ( db ) xml_elem . write ( sys . stdout . buffer , encoding = 'utf-8' , xml_declaration = True )
cr - xml
14,744
def execute ( db_name ) : try : with use_db ( db_name , mode = WorkDB . Mode . open ) as work_db : _update_progress ( work_db ) config = work_db . get_config ( ) engine = get_execution_engine ( config . execution_engine_name ) def on_task_complete ( job_id , work_result ) : work_db . set_result ( job_id , work_result ) _update_progress ( work_db ) log . info ( "Job %s complete" , job_id ) log . info ( "Beginning execution" ) engine ( work_db . pending_work_items , config , on_task_complete = on_task_complete ) log . info ( "Execution finished" ) except FileNotFoundError as exc : raise FileNotFoundError ( str ( exc ) . replace ( 'Requested file' , 'Corresponding database' , 1 ) ) from exc
Execute any pending work in the database stored in db_name recording the results .
14,745
def get_ast ( module_path , python_version ) : with module_path . open ( mode = 'rt' , encoding = 'utf-8' ) as handle : source = handle . read ( ) return parso . parse ( source , version = python_version )
Get the AST for the code in a file .
14,746
def is_none ( node ) : "Determine if a node is the `None` keyword." return isinstance ( node , parso . python . tree . Keyword ) and node . value == 'None'
Determine if a node is the None keyword .
14,747
def walk ( self , node ) : "Walk a parse tree, calling visit for each node." node = self . visit ( node ) if node is None : return None if isinstance ( node , parso . tree . BaseNode ) : walked = map ( self . walk , node . children ) node . children = [ child for child in walked if child is not None ] return node
Walk a parse tree calling visit for each node .
14,748
def format_survival_rate ( ) : arguments = docopt . docopt ( format_survival_rate . __doc__ , version = 'cr-rate 1.0' ) with use_db ( arguments [ '<session-file>' ] , WorkDB . Mode . open ) as db : rate = survival_rate ( db ) print ( '{:.2f}' . format ( rate ) )
cr - rate
14,749
def survival_rate ( work_db ) : kills = sum ( r . is_killed for _ , r in work_db . results ) num_results = work_db . num_results if not num_results : return 0 return ( 1 - kills / num_results ) * 100
Calcuate the survival rate for the results in a WorkDB .
14,750
def report_html ( ) : arguments = docopt . docopt ( report_html . __doc__ , version = 'cr-rate 1.0' ) with use_db ( arguments [ '<session-file>' ] , WorkDB . Mode . open ) as db : doc = _generate_html_report ( db ) print ( doc . getvalue ( ) )
cr - html
14,751
def report ( ) : arguments = docopt . docopt ( report . __doc__ , version = 'cr-format 0.1' ) show_pending = arguments [ '--show-pending' ] show_output = arguments [ '--show-output' ] show_diff = arguments [ '--show-diff' ] with use_db ( arguments [ '<session-file>' ] , WorkDB . Mode . open ) as db : for work_item , result in db . completed_work_items : print ( '{} {} {} {}' . format ( work_item . job_id , work_item . module_path , work_item . operator_name , work_item . occurrence ) ) print ( 'worker outcome: {}, test outcome: {}' . format ( result . worker_outcome , result . test_outcome ) ) if show_output : print ( '=== OUTPUT ===' ) print ( result . output ) print ( '==============' ) if show_diff : print ( '=== DIFF ===' ) print ( result . diff ) print ( '============' ) if show_pending : for work_item in db . pending_work_items : print ( '{} {} {} {}' . format ( work_item . job_id , work_item . module_path , work_item . operator_name , work_item . occurrence ) ) num_items = db . num_work_items num_complete = db . num_results print ( 'total jobs: {}' . format ( num_items ) ) if num_complete > 0 : print ( 'complete: {} ({:.2f}%)' . format ( num_complete , num_complete / num_items * 100 ) ) print ( 'survival rate: {:.2f}%' . format ( survival_rate ( db ) ) ) else : print ( 'no jobs completed' )
cr - report
14,752
def new_config ( ) : config = ConfigDict ( ) config [ "module-path" ] = qprompt . ask_str ( "Top-level module path" , blk = False , vld = os . path . exists , hlp = MODULE_PATH_HELP ) python_version = qprompt . ask_str ( 'Python version (blank for auto detection)' , vld = _validate_python_version , hlp = PYTHON_VERSION_HELP ) config [ 'python-version' ] = python_version timeout = qprompt . ask_str ( 'Test execution timeout (seconds)' , vld = float , blk = False , hlp = "The number of seconds to let a test run before terminating it." ) config [ 'timeout' ] = float ( timeout ) config [ 'excluded-modules' ] = [ ] config [ "test-command" ] = qprompt . ask_str ( "Test command" , blk = False , hlp = TEST_COMMAND_HELP ) menu = qprompt . Menu ( ) for at_pos , engine_name in enumerate ( execution_engine_names ( ) ) : menu . add ( str ( at_pos ) , engine_name ) config [ "execution-engine" ] = ConfigDict ( ) config [ 'execution-engine' ] [ 'name' ] = menu . show ( header = "Execution engine" , returns = "desc" ) config [ "cloning" ] = ConfigDict ( ) config [ 'cloning' ] [ 'method' ] = 'copy' config [ 'cloning' ] [ 'commands' ] = [ ] return config
Prompt user for config variables and generate new config .
14,753
def init ( module_paths , work_db , config ) : operator_names = cosmic_ray . plugins . operator_names ( ) work_db . set_config ( config = config ) work_db . clear ( ) for module_path in module_paths : module_ast = get_ast ( module_path , python_version = config . python_version ) for op_name in operator_names : operator = get_operator ( op_name ) ( config . python_version ) visitor = WorkDBInitVisitor ( module_path , op_name , work_db , operator ) visitor . walk ( module_ast ) apply_interceptors ( work_db , config . sub ( 'interceptors' ) . get ( 'enabled' , ( ) ) )
Clear and initialize a work - db with work items .
14,754
def apply_interceptors ( work_db , enabled_interceptors ) : names = ( name for name in interceptor_names ( ) if name in enabled_interceptors ) for name in names : interceptor = get_interceptor ( name ) interceptor ( work_db )
Apply each registered interceptor to the WorkDB .
14,755
def _allowed ( to_op , from_op , rhs ) : "Determine if a mutation from `from_op` to `to_op` is allowed given a particular `rhs` node." if is_none ( rhs ) : return to_op in _RHS_IS_NONE_OPS . get ( from_op , ( ) ) if is_number ( rhs ) : return to_op in _RHS_IS_INTEGER_OPS return True
Determine if a mutation from from_op to to_op is allowed given a particular rhs node .
14,756
def worker ( module_path , python_version , operator_name , occurrence , test_command , timeout ) : try : operator_class = cosmic_ray . plugins . get_operator ( operator_name ) operator = operator_class ( python_version ) with cosmic_ray . mutating . use_mutation ( module_path , operator , occurrence ) as ( original_code , mutated_code ) : if mutated_code is None : return WorkResult ( worker_outcome = WorkerOutcome . NO_TEST ) test_outcome , output = run_tests ( test_command , timeout ) diff = _make_diff ( original_code , mutated_code , module_path ) return WorkResult ( output = output , diff = '\n' . join ( diff ) , test_outcome = test_outcome , worker_outcome = WorkerOutcome . NORMAL ) except Exception : return WorkResult ( output = traceback . format_exc ( ) , test_outcome = TestOutcome . INCOMPETENT , worker_outcome = WorkerOutcome . EXCEPTION )
Mutate the OCCURRENCE - th site for OPERATOR_NAME in MODULE_PATH run the tests and report the results .
14,757
def report_progress ( stream = None ) : if stream is None : stream = sys . stderr for reporter in _reporters : reporter ( stream )
Report progress from any currently installed reporters .
14,758
def reports_progress ( reporter ) : def decorator ( func ) : @ wraps ( func ) def wrapper ( * args , ** kwargs ) : with progress_reporter ( reporter ) : return func ( * args , ** kwargs ) return wrapper return decorator
A decorator factory to mark functions which report progress .
14,759
def tags ( ) : "Get a set of tags for the current git repo." result = [ t . decode ( 'ascii' ) for t in subprocess . check_output ( [ 'git' , 'tag' ] ) . split ( b"\n" ) ] assert len ( set ( result ) ) == len ( result ) return set ( result )
Get a set of tags for the current git repo .
14,760
def create_tag_and_push ( version ) : "Create a git tag for `version` and push it to origin." assert version not in tags ( ) git ( 'config' , 'user.name' , 'Travis CI on behalf of Austin Bingham' ) git ( 'config' , 'user.email' , 'austin@sixty-north.com' ) git ( 'config' , 'core.sshCommand' , 'ssh -i deploy_key' ) git ( 'remote' , 'add' , 'ssh-origin' , 'git@github.com:sixty-north/cosmic-ray.git' ) git ( 'tag' , version ) subprocess . check_call ( [ 'ssh-agent' , 'sh' , '-c' , 'chmod 0600 deploy_key && ' + 'ssh-add deploy_key && ' + 'git push ssh-origin --tags' ] )
Create a git tag for version and push it to origin .
14,761
def worker_task ( work_item , config ) : global _workspace _ensure_workspace ( config ) result = worker ( work_item . module_path , config . python_version , work_item . operator_name , work_item . occurrence , config . test_command , config . timeout ) return work_item . job_id , result
The celery task which performs a single mutation and runs a test suite .
14,762
def execute_work_items ( work_items , config ) : return celery . group ( worker_task . s ( work_item , config ) for work_item in work_items )
Execute a suite of tests for a given set of work items .
14,763
def cloned_workspace ( clone_config , chdir = True ) : workspace = ClonedWorkspace ( clone_config ) original_dir = os . getcwd ( ) if chdir : os . chdir ( workspace . clone_dir ) try : yield workspace finally : os . chdir ( original_dir ) workspace . cleanup ( )
Create a cloned workspace and yield it .
14,764
def clone_with_git ( repo_uri , dest_path ) : log . info ( 'Cloning git repo %s to %s' , repo_uri , dest_path ) git . Repo . clone_from ( repo_uri , dest_path , depth = 1 )
Create a clone by cloning a git repository .
14,765
def clone_with_copy ( src_path , dest_path ) : log . info ( 'Cloning directory tree %s to %s' , src_path , dest_path ) shutil . copytree ( src_path , dest_path )
Clone a directory try by copying it .
14,766
def _build_env ( venv_dir ) : prefix = getattr ( sys , 'real_prefix' , sys . prefix ) python = Path ( prefix ) / 'bin' / 'python' command = '{} -m venv {}' . format ( python , venv_dir ) try : log . info ( 'Creating virtual environment: %s' , command ) subprocess . run ( command . split ( ) , stdout = subprocess . PIPE , stderr = subprocess . STDOUT , check = True ) except subprocess . CalledProcessError as exc : log . error ( "Error creating virtual environment: %s" , exc . output ) raise
Create a new virtual environment in venv_dir .
14,767
def replace_variables ( self , text ) : variables = { 'python-executable' : str ( self . _venv_path / 'bin' / 'python' ) } return text . format ( ** variables )
Replace variable placeholders in text with values from the virtual env .
14,768
def cleanup ( self ) : "Remove the directory containin the clone and virtual environment." log . info ( 'Removing temp dir %s' , self . _tempdir . name ) self . _tempdir . cleanup ( )
Remove the directory containin the clone and virtual environment .
14,769
def as_dict ( self ) : "Get the WorkResult as a dict." return { 'output' : self . output , 'test_outcome' : self . test_outcome , 'worker_outcome' : self . worker_outcome , 'diff' : self . diff , }
Get the WorkResult as a dict .
14,770
def as_dict ( self ) : return { 'module_path' : str ( self . module_path ) , 'operator_name' : self . operator_name , 'occurrence' : self . occurrence , 'start_pos' : self . start_pos , 'end_pos' : self . end_pos , 'job_id' : self . job_id , }
Get fields as a dict .
14,771
def intercept ( work_db ) : @ lru_cache ( ) def file_contents ( file_path ) : "A simple cache of file contents." with file_path . open ( mode = "rt" ) as handle : return handle . readlines ( ) for item in work_db . work_items : try : repo = open_repository ( item . module_path ) except ValueError : log . info ( "No spor repository for %s" , item . module_path ) continue for _ , anchor in repo . items ( ) : if anchor . file_path != item . module_path . absolute ( ) : continue metadata = anchor . metadata lines = file_contents ( item . module_path ) if _item_in_context ( lines , item , anchor . context ) and not metadata . get ( "mutate" , True ) : log . info ( "spor skipping %s %s %s %s %s %s" , item . job_id , item . operator_name , item . occurrence , item . module_path , item . start_pos , item . end_pos , ) work_db . set_result ( item . job_id , WorkResult ( output = None , test_outcome = None , diff = None , worker_outcome = WorkerOutcome . SKIPPED , ) , )
Look for WorkItems in work_db that should not be mutated due to spor metadata .
14,772
def _line_and_col_to_offset ( lines , line , col ) : offset = 0 for index , contents in enumerate ( lines , 1 ) : if index == line : return offset + col offset += len ( contents ) raise ValueError ( "Offset {}:{} not found" . format ( line , col ) )
Figure out the offset into a file for a particular line and col .
14,773
def _item_in_context ( lines , item , context ) : start_offset = _line_and_col_to_offset ( lines , item . start_pos [ 0 ] , item . start_pos [ 1 ] ) stop_offset = _line_and_col_to_offset ( lines , item . end_pos [ 0 ] , item . end_pos [ 1 ] ) width = stop_offset - start_offset return start_offset >= context . offset and width <= len ( context . topic )
Determines if a WorkItem falls within an anchor .
14,774
def use_mutation ( module_path , operator , occurrence ) : original_code , mutated_code = apply_mutation ( module_path , operator , occurrence ) try : yield original_code , mutated_code finally : with module_path . open ( mode = 'wt' , encoding = 'utf-8' ) as handle : handle . write ( original_code ) handle . flush ( )
A context manager that applies a mutation for the duration of a with - block .
14,775
def apply_mutation ( module_path , operator , occurrence ) : module_ast = get_ast ( module_path , python_version = operator . python_version ) original_code = module_ast . get_code ( ) visitor = MutationVisitor ( occurrence , operator ) mutated_ast = visitor . walk ( module_ast ) mutated_code = None if visitor . mutation_applied : mutated_code = mutated_ast . get_code ( ) with module_path . open ( mode = 'wt' , encoding = 'utf-8' ) as handle : handle . write ( mutated_code ) handle . flush ( ) return original_code , mutated_code
Apply a specific mutation to a file on disk .
14,776
def get_requirements ( ) : requirements_file = os . path . join ( os . getcwd ( ) , 'requirements.txt' ) requirements = [ ] links = [ ] try : with open ( requirements_file ) as reqfile : for line in reqfile . readlines ( ) : line = line . strip ( ) if line . startswith ( '#' ) : continue elif line . startswith ( ( 'https://' , 'git://' , 'hg://' , 'svn://' ) ) : links . append ( line ) else : requirements . append ( line ) except ( IOError , OSError ) as error : print ( error ) if python26 ( ) : requirements . append ( 'ordereddict==1.1#a0ed854ee442051b249bfad0f638bbec' ) if _isPyPy : for line in requirements [ : ] : if line . startswith ( 'psutil' ) : print ( "Not installing %s on PyPy..." % line ) requirements . remove ( line ) return requirements , links
Extract the list of requirements from our requirements . txt .
14,777
def createBatchfile ( keyparams = allparams ) : useparams = { } for key , value in keyparams . items ( ) : if value : useparams . update ( { key : value } ) batchfile = gpg . gen_key_input ( separate_keyring = True , save_batchfile = True , ** useparams ) log . info ( "Generated GnuPG batch file:\n%s" % batchfile ) return batchfile
Create the batchfile for our new key .
14,778
def exportNewKey ( fingerprint ) : log . info ( "Exporting key: %s" % fingerprint ) keyfn = os . path . join ( gpg . homedir , fingerprint + '-8192-bit-key' ) + os . path . extsep pubkey = gpg . export_keys ( fingerprint ) seckey = gpg . export_keys ( fingerprint , secret = True ) subkey = gpg . export_keys ( fingerprint , secret = True , subkeys = True ) with open ( keyfn + 'pub' + os . path . extsep + 'asc' , 'w' ) as fh : fh . write ( pubkey ) with open ( keyfn + 'sec' + os . path . extsep + 'asc' , 'w' ) as fh : fh . write ( seckey ) with open ( keyfn + 'sub' + os . path . extsep + 'asc' , 'w' ) as fh : fh . write ( subkey )
Export the new keys into . asc files .
14,779
def _check_keyserver ( location ) : protocols = [ 'hkp://' , 'hkps://' , 'http://' , 'https://' , 'ldap://' , 'mailto:' ] for proto in protocols : if location . startswith ( proto ) : url = location . replace ( proto , str ( ) ) host , slash , extra = url . partition ( '/' ) if extra : log . warn ( "URI text for %s: '%s'" % ( host , extra ) ) log . debug ( "Got host string for keyserver setting: '%s'" % host ) host = _fix_unsafe ( host ) if host : log . debug ( "Cleaned host string: '%s'" % host ) keyserver = proto + host return keyserver return None
Check that a given keyserver is a known protocol and does not contain shell escape characters .
14,780
def _check_preferences ( prefs , pref_type = None ) : if prefs is None : return cipher = frozenset ( [ 'AES256' , 'AES192' , 'AES128' , 'CAMELLIA256' , 'CAMELLIA192' , 'TWOFISH' , '3DES' ] ) digest = frozenset ( [ 'SHA512' , 'SHA384' , 'SHA256' , 'SHA224' , 'RMD160' , 'SHA1' ] ) compress = frozenset ( [ 'BZIP2' , 'ZLIB' , 'ZIP' , 'Uncompressed' ] ) trust = frozenset ( [ 'gpg' , 'classic' , 'direct' , 'always' , 'auto' ] ) pinentry = frozenset ( [ 'loopback' ] ) all = frozenset ( [ cipher , digest , compress , trust , pinentry ] ) if isinstance ( prefs , str ) : prefs = set ( prefs . split ( ) ) elif isinstance ( prefs , list ) : prefs = set ( prefs ) else : msg = "prefs must be list of strings, or space-separated string" log . error ( "parsers._check_preferences(): %s" % message ) raise TypeError ( message ) if not pref_type : pref_type = 'all' allowed = str ( ) if pref_type == 'cipher' : allowed += ' ' . join ( prefs . intersection ( cipher ) ) if pref_type == 'digest' : allowed += ' ' . join ( prefs . intersection ( digest ) ) if pref_type == 'compress' : allowed += ' ' . join ( prefs . intersection ( compress ) ) if pref_type == 'trust' : allowed += ' ' . join ( prefs . intersection ( trust ) ) if pref_type == 'pinentry' : allowed += ' ' . join ( prefs . intersection ( pinentry ) ) if pref_type == 'all' : allowed += ' ' . join ( prefs . intersection ( all ) ) return allowed
Check cipher digest and compression preference settings .
14,781
def _hyphenate ( input , add_prefix = False ) : ret = '--' if add_prefix else '' ret += input . replace ( '_' , '-' ) return ret
Change underscores to hyphens so that object attributes can be easily tranlated to GPG option names .
14,782
def _is_allowed ( input ) : gnupg_options = _get_all_gnupg_options ( ) allowed = _get_options_group ( "allowed" ) try : assert allowed . issubset ( gnupg_options ) except AssertionError : raise UsageError ( "'allowed' isn't a subset of known options, diff: %s" % allowed . difference ( gnupg_options ) ) if not isinstance ( input , str ) : input = ' ' . join ( [ x for x in input ] ) if isinstance ( input , str ) : if input . find ( '_' ) > 0 : if not input . startswith ( '--' ) : hyphenated = _hyphenate ( input , add_prefix = True ) else : hyphenated = _hyphenate ( input ) else : hyphenated = input try : assert hyphenated in allowed except AssertionError as ae : dropped = _fix_unsafe ( hyphenated ) log . warn ( "_is_allowed(): Dropping option '%s'..." % dropped ) raise ProtectedOption ( "Option '%s' not supported." % dropped ) else : return input return None
Check that an option or argument given to GPG is in the set of allowed options the latter being a strict subset of the set of all options known to GPG .
14,783
def _is_string ( thing ) : if _util . _py3k : return isinstance ( thing , str ) else : return isinstance ( thing , basestring )
Python character arrays are a mess .
14,784
def _sanitise_list ( arg_list ) : if isinstance ( arg_list , list ) : for arg in arg_list : safe_arg = _sanitise ( arg ) if safe_arg != "" : yield safe_arg
A generator for iterating through a list of gpg options and sanitising them .
14,785
def _get_options_group ( group = None ) : hex_options = frozenset ( [ '--check-sigs' , '--default-key' , '--default-recipient' , '--delete-keys' , '--delete-secret-keys' , '--delete-secret-and-public-keys' , '--desig-revoke' , '--export' , '--export-secret-keys' , '--export-secret-subkeys' , '--fingerprint' , '--gen-revoke' , '--hidden-encrypt-to' , '--hidden-recipient' , '--list-key' , '--list-keys' , '--list-public-keys' , '--list-secret-keys' , '--list-sigs' , '--recipient' , '--recv-keys' , '--send-keys' , '--edit-key' , '--sign-key' , ] ) unchecked_options = frozenset ( [ '--list-options' , '--passphrase-fd' , '--status-fd' , '--verify-options' , '--command-fd' , ] ) other_options = frozenset ( [ '--debug-level' , '--keyserver' , ] ) dir_options = frozenset ( [ '--homedir' , ] ) keyring_options = frozenset ( [ '--keyring' , '--primary-keyring' , '--secret-keyring' , '--trustdb-name' , ] ) file_or_none_options = frozenset ( [ '--decrypt' , '--decrypt-files' , '--encrypt' , '--encrypt-files' , '--import' , '--verify' , '--verify-files' , '--output' , ] ) pref_options = frozenset ( [ '--digest-algo' , '--cipher-algo' , '--compress-algo' , '--compression-algo' , '--cert-digest-algo' , '--personal-digest-prefs' , '--personal-digest-preferences' , '--personal-cipher-prefs' , '--personal-cipher-preferences' , '--personal-compress-prefs' , '--personal-compress-preferences' , '--pinentry-mode' , '--print-md' , '--trust-model' , ] ) none_options = frozenset ( [ '--allow-loopback-pinentry' , '--always-trust' , '--armor' , '--armour' , '--batch' , '--check-sigs' , '--check-trustdb' , '--clearsign' , '--debug-all' , '--default-recipient-self' , '--detach-sign' , '--export' , '--export-ownertrust' , '--export-secret-keys' , '--export-secret-subkeys' , '--fingerprint' , '--fixed-list-mode' , '--gen-key' , '--import-ownertrust' , '--list-config' , '--list-key' , '--list-keys' , '--list-packets' , '--list-public-keys' , '--list-secret-keys' , '--list-sigs' , '--lock-multiple' , '--lock-never' , '--lock-once' , '--no-default-keyring' , '--no-default-recipient' , '--no-emit-version' , '--no-options' , '--no-tty' , '--no-use-agent' , '--no-verbose' , '--print-mds' , '--quiet' , '--sign' , '--symmetric' , '--throw-keyids' , '--use-agent' , '--verbose' , '--version' , '--with-colons' , '--yes' , ] ) hex_or_none_options = hex_options . intersection ( none_options ) allowed = hex_options . union ( unchecked_options , other_options , dir_options , keyring_options , file_or_none_options , pref_options , none_options ) if group and group in locals ( ) . keys ( ) : return locals ( ) [ group ]
Get a specific group of options which are allowed .
14,786
def _get_all_gnupg_options ( ) : three_hundred_eighteen = ( ) . split ( ) three_hundred_eighteen . append ( '--export-ownertrust' ) three_hundred_eighteen . append ( '--import-ownertrust' ) three_hundred_eighteen . append ( '--pinentry-mode' ) three_hundred_eighteen . append ( '--allow-loopback-pinentry' ) gnupg_options = frozenset ( three_hundred_eighteen ) return gnupg_options
Get all GnuPG options and flags .
14,787
def nodata ( status_code ) : lookup = { '1' : 'No armored data.' , '2' : 'Expected a packet but did not find one.' , '3' : 'Invalid packet found, this may indicate a non OpenPGP message.' , '4' : 'Signature expected but not found.' } for key , value in lookup . items ( ) : if str ( status_code ) == key : return value
Translate NODATA status codes from GnuPG to messages .
14,788
def progress ( status_code ) : lookup = { 'pk_dsa' : 'DSA key generation' , 'pk_elg' : 'Elgamal key generation' , 'primegen' : 'Prime generation' , 'need_entropy' : 'Waiting for new entropy in the RNG' , 'tick' : 'Generic tick without any special meaning - still working.' , 'starting_agent' : 'A gpg-agent was started.' , 'learncard' : 'gpg-agent or gpgsm is learning the smartcard data.' , 'card_busy' : 'A smartcard is still working.' } for key , value in lookup . items ( ) : if str ( status_code ) == key : return value
Translate PROGRESS status codes from GnuPG to messages .
14,789
def _clean_key_expiration_option ( self ) : allowed_entry = re . findall ( '^(\d+)(|w|m|y)$' , self . _expiration_time ) if not allowed_entry : raise UsageError ( "Key expiration option: %s is not valid" % self . _expiration_time )
validates the expiration option supplied
14,790
def _create_trustdb ( cls ) : trustdb = os . path . join ( cls . homedir , 'trustdb.gpg' ) if not os . path . isfile ( trustdb ) : log . info ( "GnuPG complained that your trustdb file was missing. %s" % "This is likely due to changing to a new homedir." ) log . info ( "Creating trustdb.gpg file in your GnuPG homedir." ) cls . fix_trustdb ( trustdb )
Create the trustdb file in our homedir if it doesn t exist .
14,791
def export_ownertrust ( cls , trustdb = None ) : if trustdb is None : trustdb = os . path . join ( cls . homedir , 'trustdb.gpg' ) try : os . rename ( trustdb , trustdb + '.bak' ) except ( OSError , IOError ) as err : log . debug ( str ( err ) ) export_proc = cls . _open_subprocess ( [ '--export-ownertrust' ] ) tdb = open ( trustdb , 'wb' ) _util . _threaded_copy_data ( export_proc . stdout , tdb ) export_proc . wait ( )
Export ownertrust to a trustdb file .
14,792
def import_ownertrust ( cls , trustdb = None ) : if trustdb is None : trustdb = os . path . join ( cls . homedir , 'trustdb.gpg' ) import_proc = cls . _open_subprocess ( [ '--import-ownertrust' ] ) try : tdb = open ( trustdb , 'rb' ) except ( OSError , IOError ) : log . error ( "trustdb file %s does not exist!" % trustdb ) _util . _threaded_copy_data ( tdb , import_proc . stdin ) import_proc . wait ( )
Import ownertrust from a trustdb file .
14,793
def fix_trustdb ( cls , trustdb = None ) : if trustdb is None : trustdb = os . path . join ( cls . homedir , 'trustdb.gpg' ) export_proc = cls . _open_subprocess ( [ '--export-ownertrust' ] ) import_proc = cls . _open_subprocess ( [ '--import-ownertrust' ] ) _util . _threaded_copy_data ( export_proc . stdout , import_proc . stdin ) export_proc . wait ( ) import_proc . wait ( )
Attempt to repair a broken trustdb . gpg file .
14,794
def status ( self , message , * args , ** kwargs ) : if self . isEnabledFor ( GNUPG_STATUS_LEVEL ) : self . _log ( GNUPG_STATUS_LEVEL , message , args , ** kwargs )
LogRecord for GnuPG internal status messages .
14,795
def create_logger ( level = logging . NOTSET ) : _test = os . path . join ( os . path . join ( os . getcwd ( ) , 'pretty_bad_protocol' ) , 'test' ) _now = datetime . now ( ) . strftime ( "%Y-%m-%d_%H%M%S" ) _fn = os . path . join ( _test , "%s_test_gnupg.log" % _now ) _fmt = "%(relativeCreated)-4d L%(lineno)-4d:%(funcName)-18.18s %(levelname)-7.7s %(message)s" logging . addLevelName ( GNUPG_STATUS_LEVEL , "GNUPG" ) logging . Logger . status = status if level > logging . NOTSET : logging . basicConfig ( level = level , filename = _fn , filemode = "a" , format = _fmt ) logging . logThreads = True if hasattr ( logging , 'captureWarnings' ) : logging . captureWarnings ( True ) colouriser = _ansistrm . ColorizingStreamHandler colouriser . level_map [ 9 ] = ( None , 'blue' , False ) colouriser . level_map [ 10 ] = ( None , 'cyan' , False ) handler = colouriser ( sys . stderr ) handler . setLevel ( level ) formatr = logging . Formatter ( _fmt ) handler . setFormatter ( formatr ) else : handler = NullHandler ( ) log = logging . getLogger ( 'gnupg' ) log . addHandler ( handler ) log . setLevel ( level ) log . info ( "Log opened: %s UTC" % datetime . ctime ( datetime . utcnow ( ) ) ) return log
Create a logger for python - gnupg at a specific message level .
14,796
def _find_agent ( cls ) : if not psutil : return False this_process = psutil . Process ( os . getpid ( ) ) ownership_match = False if _util . _running_windows : identity = this_process . username ( ) else : identity = this_process . uids for proc in psutil . process_iter ( ) : try : if ( proc . name ( ) == "gpg-agent" ) and proc . is_running ( ) : log . debug ( "Found gpg-agent process with pid %d" % proc . pid ) if _util . _running_windows : if proc . username ( ) == identity : ownership_match = True else : if proc . uids ( ) == identity ( ) : ownership_match = True except psutil . Error as err : log . warn ( "Error while attempting to find gpg-agent process: %s" % err ) if ownership_match : log . debug ( "Effective UIDs of this process and gpg-agent match" ) setattr ( cls , '_agent_proc' , proc ) return True return False
Discover if a gpg - agent process for the current euid is running .
14,797
def default_preference_list ( self , prefs ) : prefs = _check_preferences ( prefs ) if prefs is not None : self . _prefs = prefs
Set the default preference list .
14,798
def _homedir_setter ( self , directory ) : if not directory : log . debug ( "GPGBase._homedir_setter(): Using default homedir: '%s'" % _util . _conf ) directory = _util . _conf hd = _parsers . _fix_unsafe ( directory ) log . debug ( "GPGBase._homedir_setter(): got directory '%s'" % hd ) if hd : log . debug ( "GPGBase._homedir_setter(): Check existence of '%s'" % hd ) _util . _create_if_necessary ( hd ) if self . ignore_homedir_permissions : self . _homedir = hd else : try : log . debug ( "GPGBase._homedir_setter(): checking permissions" ) assert _util . _has_readwrite ( hd ) , "Homedir '%s' needs read/write permissions" % hd except AssertionError as ae : msg = ( "Unable to set '%s' as GnuPG homedir" % directory ) log . debug ( "GPGBase.homedir.setter(): %s" % msg ) log . debug ( str ( ae ) ) raise RuntimeError ( str ( ae ) ) else : log . info ( "Setting homedir to '%s'" % hd ) self . _homedir = hd
Set the directory to use as GnuPG s homedir .
14,799
def _generated_keys_setter ( self , directory ) : if not directory : directory = os . path . join ( self . homedir , 'generated-keys' ) log . debug ( "GPGBase._generated_keys_setter(): Using '%s'" % directory ) hd = _parsers . _fix_unsafe ( directory ) log . debug ( "GPGBase._generated_keys_setter(): got directory '%s'" % hd ) if hd : log . debug ( "GPGBase._generated_keys_setter(): Check exists '%s'" % hd ) _util . _create_if_necessary ( hd ) try : log . debug ( "GPGBase._generated_keys_setter(): check permissions" ) assert _util . _has_readwrite ( hd ) , "Keys dir '%s' needs read/write permissions" % hd except AssertionError as ae : msg = ( "Unable to set '%s' as generated keys dir" % directory ) log . debug ( "GPGBase._generated_keys_setter(): %s" % msg ) log . debug ( str ( ae ) ) raise RuntimeError ( str ( ae ) ) else : log . info ( "Setting homedir to '%s'" % hd ) self . __generated_keys = hd
Set the directory for storing generated keys .