idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
14,700
def _deduplicate ( lst ) : out = [ ] for i in lst : if i not in out : out . append ( i ) return out
Auxiliary function to deduplicate lst .
14,701
def _join ( lst , key , sep = ";" ) : return sep . join ( [ d [ key ] for d in lst if d [ key ] ] )
Auxiliary function to join same elements of a list of dictionaries if the elements are not None .
14,702
def authors ( self ) : authors = self . xml . find ( 'authors' , ns ) try : return [ _ScopusAuthor ( author ) for author in authors ] except TypeError : return None
A list of scopus_api . _ScopusAuthor objects .
14,703
def citedby_url ( self ) : cite_link = self . coredata . find ( 'link[@rel="scopus-citedby"]' , ns ) try : return cite_link . get ( 'href' ) except AttributeError : return None
URL to Scopus page listing citing papers .
14,704
def scopus_url ( self ) : scopus_url = self . coredata . find ( 'link[@rel="scopus"]' , ns ) try : return scopus_url . get ( 'href' ) except AttributeError : return None
URL to the abstract page on Scopus .
14,705
def get_corresponding_author_info ( self ) : resp = requests . get ( self . scopus_url ) from lxml import html parsed_doc = html . fromstring ( resp . content ) for div in parsed_doc . body . xpath ( './/div' ) : for a in div . xpath ( 'a' ) : if '/cdn-cgi/l/email-protection' not in a . get ( 'href' , '' ) : continue e...
Try to get corresponding author information .
14,706
def latex ( self ) : s = ( '{authors}, \\textit{{{title}}}, {journal}, {volissue}, ' '{pages}, ({date}). {doi}, {scopus_url}.' ) if len ( self . authors ) > 1 : authors = ', ' . join ( [ str ( a . given_name ) + ' ' + str ( a . surname ) for a in self . authors [ 0 : - 1 ] ] ) authors += ( ' and ' + str ( self . author...
Return LaTeX representation of the abstract .
14,707
def html ( self ) : s = ( u'{authors}, {title}, {journal}, {volissue}, {pages}, ' '({date}). {doi}.' ) au_link = ( '<a href="https://www.scopus.com/authid/detail.url' '?origin=AuthorProfile&authorId={0}">{1}</a>' ) if len ( self . authors ) > 1 : authors = u', ' . join ( [ au_link . format ( a . auid , ( str ( a . give...
Returns an HTML citation .
14,708
def cc ( self ) : _years = range ( self . _start , self . _end + 1 ) try : return list ( zip ( _years , [ d . get ( '$' ) for d in self . _citeInfoMatrix [ 'cc' ] ] ) ) except AttributeError : return list ( zip ( _years , [ 0 ] * len ( _years ) ) )
List of tuples of yearly number of citations for specified years .
14,709
def affiliation_history ( self ) : aff_ids = [ e . attrib . get ( 'affiliation-id' ) for e in self . xml . findall ( 'author-profile/affiliation-history/affiliation' ) if e is not None and len ( list ( e . find ( "ip-doc" ) . iter ( ) ) ) > 1 ] return [ ScopusAffiliation ( aff_id ) for aff_id in aff_ids ]
List of ScopusAffiliation objects representing former affiliations of the author . Only affiliations with more than one publication are considered .
14,710
def get_coauthors ( self ) : url = self . xml . find ( 'coredata/link[@rel="coauthor-search"]' ) . get ( 'href' ) xml = download ( url = url ) . text . encode ( 'utf-8' ) xml = ET . fromstring ( xml ) coauthors = [ ] N = int ( get_encoded_text ( xml , 'opensearch:totalResults' ) or 0 ) AUTHOR = namedtuple ( 'Author' , ...
Return list of coauthors their scopus - id and research areas .
14,711
def get_document_eids ( self , * args , ** kwds ) : search = ScopusSearch ( 'au-id({})' . format ( self . author_id ) , * args , ** kwds ) return search . get_eids ( )
Return list of EIDs for the author using ScopusSearch .
14,712
def get_abstracts ( self , refresh = True ) : return [ ScopusAbstract ( eid , refresh = refresh ) for eid in self . get_document_eids ( refresh = refresh ) ]
Return a list of ScopusAbstract objects using ScopusSearch .
14,713
def get_journal_abstracts ( self , refresh = True ) : return [ abstract for abstract in self . get_abstracts ( refresh = refresh ) if abstract . aggregationType == 'Journal' ]
Return a list of ScopusAbstract objects using ScopusSearch but only if belonging to a Journal .
14,714
def get_document_summary ( self , N = None , cite_sort = True , refresh = True ) : abstracts = self . get_abstracts ( refresh = refresh ) if cite_sort : counts = [ ( a , int ( a . citedby_count ) ) for a in abstracts ] counts . sort ( reverse = True , key = itemgetter ( 1 ) ) abstracts = [ a [ 0 ] for a in counts ] if ...
Return a summary string of documents .
14,715
def author_impact_factor ( self , year = 2014 , refresh = True ) : scopus_abstracts = self . get_journal_abstracts ( refresh = refresh ) cites = [ int ( ab . citedby_count ) for ab in scopus_abstracts ] years = [ int ( ab . coverDate . split ( '-' ) [ 0 ] ) for ab in scopus_abstracts ] data = zip ( years , cites , scop...
Get author_impact_factor for the .
14,716
def n_first_author_papers ( self , refresh = True ) : first_authors = [ 1 for ab in self . get_journal_abstracts ( refresh = refresh ) if ab . authors [ 0 ] . scopusid == self . author_id ] return sum ( first_authors )
Return number of papers with author as the first author .
14,717
def n_yearly_publications ( self , refresh = True ) : pub_years = [ int ( ab . coverDate . split ( '-' ) [ 0 ] ) for ab in self . get_journal_abstracts ( refresh = refresh ) ] return Counter ( pub_years )
Number of journal publications in a given year .
14,718
def _get_org ( aff ) : try : org = aff [ 'organization' ] if not isinstance ( org , str ) : try : org = org [ '$' ] except TypeError : org = ', ' . join ( [ d [ '$' ] for d in org if d ] ) except KeyError : org = None return org
Auxiliary function to extract org information from affiliation for authorgroup .
14,719
def _parse_pages ( self , unicode = False ) : if self . pageRange : pages = 'pp. {}' . format ( self . pageRange ) elif self . startingPage : pages = 'pp. {}-{}' . format ( self . startingPage , self . endingPage ) else : pages = '(no pages found)' if unicode : pages = u'{}' . format ( pages ) return pages
Auxiliary function to parse and format page range of a document .
14,720
def authkeywords ( self ) : keywords = self . _json [ 'authkeywords' ] if keywords is None : return None else : try : return [ d [ '$' ] for d in keywords [ 'author-keyword' ] ] except TypeError : return [ keywords [ 'author-keyword' ] [ '$' ] ]
List of author - provided keywords of the abstract .
14,721
def idxterms ( self ) : try : terms = listify ( self . _json . get ( "idxterms" , { } ) . get ( 'mainterm' , [ ] ) ) except AttributeError : return None try : return [ d [ '$' ] for d in terms ] except AttributeError : return None
List of index terms .
14,722
def get_html ( self ) : au_link = ( '<a href="https://www.scopus.com/authid/detail.url' '?origin=AuthorProfile&authorId={0}">{1}</a>' ) if len ( self . authors ) > 1 : authors = u', ' . join ( [ au_link . format ( a . auid , a . given_name + ' ' + a . surname ) for a in self . authors [ 0 : - 1 ] ] ) authors += ( u' an...
Bibliographic entry in html format .
14,723
def get_latex ( self ) : if len ( self . authors ) > 1 : authors = _list_authors ( self . authors ) else : a = self . authors authors = ' ' . join ( [ a . given_name , a . surname ] ) if self . volume and self . issueIdentifier : volissue = '\\textbf{{{}({})}}' . format ( self . volume , self . issueIdentifier ) elif s...
Bibliographic entry in LaTeX format .
14,724
def _parse ( res , params , n , api , ** kwds ) : cursor = "cursor" in params if not cursor : start = params [ "start" ] if n == 0 : return "" _json = res . get ( 'search-results' , { } ) . get ( 'entry' , [ ] ) while n > 0 : n -= params [ "count" ] if cursor : pointer = res [ 'search-results' ] [ 'cursor' ] . get ( '@...
Auxiliary function to download results and parse json .
14,725
def create_config ( ) : file_exists = exists ( CONFIG_FILE ) if not file_exists : config . add_section ( 'Directories' ) defaults = [ ( 'AbstractRetrieval' , expanduser ( '~/.scopus/abstract_retrieval' ) ) , ( 'AffiliationSearch' , expanduser ( '~/.scopus/affiliation_search' ) ) , ( 'AuthorRetrieval' , expanduser ( '~/...
Initiates process to generate configuration file .
14,726
def get_encoded_text ( container , xpath ) : try : return "" . join ( container . find ( xpath , ns ) . itertext ( ) ) except AttributeError : return None
Return text for element at xpath in the container xml if it is there .
14,727
def main ( argv = None ) : signal . signal ( signal . SIGINT , lambda * args : sys . exit ( _SIGNAL_EXIT_CODE_BASE + signal . SIGINT ) ) if hasattr ( signal , 'SIGINFO' ) : signal . signal ( getattr ( signal , 'SIGINFO' ) , lambda * args : report_progress ( sys . stderr ) ) try : return docopt_subcommands . main ( comm...
Invoke the cosmic ray evaluation .
14,728
def extend_name ( suffix ) : def dec ( cls ) : name = '{}{}' . format ( cls . __name__ , suffix ) setattr ( cls , '__name__' , name ) return cls return dec
A factory for class decorators that modify the class name by appending some text to it .
14,729
def mutate ( self , node , index ) : assert index < len ( OFFSETS ) , 'received count with no associated offset' assert isinstance ( node , parso . python . tree . Number ) val = eval ( node . value ) + OFFSETS [ index ] return parso . python . tree . Number ( ' ' + str ( val ) , node . start_pos )
Modify the numeric value on node .
14,730
def _prohibited ( from_op , to_op ) : "Determines if from_op is allowed to be mutated to to_op." if from_op is UnaryOperators . Not : if to_op is not UnaryOperators . Nothing : return True if from_op is UnaryOperators . UAdd : if to_op is UnaryOperators . Nothing : return True return False
Determines if from_op is allowed to be mutated to to_op .
14,731
def load_config ( filename = None ) : try : with _config_stream ( filename ) as handle : filename = handle . name return deserialize_config ( handle . read ( ) ) except ( OSError , toml . TomlDecodeError , UnicodeDecodeError ) as exc : raise ConfigError ( 'Error loading configuration from {}' . format ( filename ) ) fr...
Load a configuration from a file or stdin .
14,732
def _config_stream ( filename ) : if filename is None or filename == '-' : log . info ( 'Reading config from stdin' ) yield sys . stdin else : with open ( filename , mode = 'rt' ) as handle : log . info ( 'Reading config from %r' , filename ) yield handle
Given a configuration s filename this returns a stream from which a configuration can be read .
14,733
def sub ( self , * segments ) : "Get a sub-configuration." d = self for segment in segments : try : d = d [ segment ] except KeyError : return ConfigDict ( { } ) return d
Get a sub - configuration .
14,734
def python_version ( self ) : v = self . get ( 'python-version' , '' ) if v == '' : v = "{}.{}" . format ( sys . version_info . major , sys . version_info . minor ) return v
Get the configured Python version .
14,735
def mutate ( self , node , index ) : assert index == 0 assert isinstance ( node , ForStmt ) empty_list = parso . parse ( ' []' ) node . children [ 3 ] = empty_list return node
Modify the For loop to evaluate to None
14,736
def get_operator ( name ) : sep = name . index ( '/' ) provider_name = name [ : sep ] operator_name = name [ sep + 1 : ] provider = OPERATOR_PROVIDERS [ provider_name ] return provider [ operator_name ]
Get an operator class from a provider plugin .
14,737
def operator_names ( ) : return tuple ( '{}/{}' . format ( provider_name , operator_name ) for provider_name , provider in OPERATOR_PROVIDERS . items ( ) for operator_name in provider )
Get all operator names .
14,738
def get_execution_engine ( name ) : manager = driver . DriverManager ( namespace = 'cosmic_ray.execution_engines' , name = name , invoke_on_load = True , on_load_failure_callback = _log_extension_loading_failure , ) return manager . driver
Get the execution engine by name .
14,739
def use_db ( path , mode = WorkDB . Mode . create ) : database = WorkDB ( path , mode ) try : yield database finally : database . close ( )
Open a DB in file path in mode mode as a context manager .
14,740
def work_items ( self ) : cur = self . _conn . cursor ( ) rows = cur . execute ( "SELECT * FROM work_items" ) for row in rows : yield _row_to_work_item ( row )
An iterable of all of WorkItems in the db .
14,741
def clear ( self ) : with self . _conn : self . _conn . execute ( 'DELETE FROM results' ) self . _conn . execute ( 'DELETE FROM work_items' )
Clear all work items from the session .
14,742
def pending_work_items ( self ) : "Iterable of all pending work items." pending = self . _conn . execute ( "SELECT * FROM work_items WHERE job_id NOT IN (SELECT job_id FROM results)" ) return ( _row_to_work_item ( p ) for p in pending )
Iterable of all pending work items .
14,743
def report_xml ( ) : arguments = docopt . docopt ( report_xml . __doc__ , version = 'cr-rate 1.0' ) with use_db ( arguments [ '<session-file>' ] , WorkDB . Mode . open ) as db : xml_elem = _create_xml_report ( db ) xml_elem . write ( sys . stdout . buffer , encoding = 'utf-8' , xml_declaration = True )
cr - xml
14,744
def execute ( db_name ) : try : with use_db ( db_name , mode = WorkDB . Mode . open ) as work_db : _update_progress ( work_db ) config = work_db . get_config ( ) engine = get_execution_engine ( config . execution_engine_name ) def on_task_complete ( job_id , work_result ) : work_db . set_result ( job_id , work_result )...
Execute any pending work in the database stored in db_name recording the results .
14,745
def get_ast ( module_path , python_version ) : with module_path . open ( mode = 'rt' , encoding = 'utf-8' ) as handle : source = handle . read ( ) return parso . parse ( source , version = python_version )
Get the AST for the code in a file .
14,746
def is_none ( node ) : "Determine if a node is the `None` keyword." return isinstance ( node , parso . python . tree . Keyword ) and node . value == 'None'
Determine if a node is the None keyword .
14,747
def walk ( self , node ) : "Walk a parse tree, calling visit for each node." node = self . visit ( node ) if node is None : return None if isinstance ( node , parso . tree . BaseNode ) : walked = map ( self . walk , node . children ) node . children = [ child for child in walked if child is not None ] return node
Walk a parse tree calling visit for each node .
14,748
def format_survival_rate ( ) : arguments = docopt . docopt ( format_survival_rate . __doc__ , version = 'cr-rate 1.0' ) with use_db ( arguments [ '<session-file>' ] , WorkDB . Mode . open ) as db : rate = survival_rate ( db ) print ( '{:.2f}' . format ( rate ) )
cr - rate
14,749
def survival_rate ( work_db ) : kills = sum ( r . is_killed for _ , r in work_db . results ) num_results = work_db . num_results if not num_results : return 0 return ( 1 - kills / num_results ) * 100
Calcuate the survival rate for the results in a WorkDB .
14,750
def report_html ( ) : arguments = docopt . docopt ( report_html . __doc__ , version = 'cr-rate 1.0' ) with use_db ( arguments [ '<session-file>' ] , WorkDB . Mode . open ) as db : doc = _generate_html_report ( db ) print ( doc . getvalue ( ) )
cr - html
14,751
def report ( ) : arguments = docopt . docopt ( report . __doc__ , version = 'cr-format 0.1' ) show_pending = arguments [ '--show-pending' ] show_output = arguments [ '--show-output' ] show_diff = arguments [ '--show-diff' ] with use_db ( arguments [ '<session-file>' ] , WorkDB . Mode . open ) as db : for work_item , re...
cr - report
14,752
def new_config ( ) : config = ConfigDict ( ) config [ "module-path" ] = qprompt . ask_str ( "Top-level module path" , blk = False , vld = os . path . exists , hlp = MODULE_PATH_HELP ) python_version = qprompt . ask_str ( 'Python version (blank for auto detection)' , vld = _validate_python_version , hlp = PYTHON_VERSION...
Prompt user for config variables and generate new config .
14,753
def init ( module_paths , work_db , config ) : operator_names = cosmic_ray . plugins . operator_names ( ) work_db . set_config ( config = config ) work_db . clear ( ) for module_path in module_paths : module_ast = get_ast ( module_path , python_version = config . python_version ) for op_name in operator_names : operato...
Clear and initialize a work - db with work items .
14,754
def apply_interceptors ( work_db , enabled_interceptors ) : names = ( name for name in interceptor_names ( ) if name in enabled_interceptors ) for name in names : interceptor = get_interceptor ( name ) interceptor ( work_db )
Apply each registered interceptor to the WorkDB .
14,755
def _allowed ( to_op , from_op , rhs ) : "Determine if a mutation from `from_op` to `to_op` is allowed given a particular `rhs` node." if is_none ( rhs ) : return to_op in _RHS_IS_NONE_OPS . get ( from_op , ( ) ) if is_number ( rhs ) : return to_op in _RHS_IS_INTEGER_OPS return True
Determine if a mutation from from_op to to_op is allowed given a particular rhs node .
14,756
def worker ( module_path , python_version , operator_name , occurrence , test_command , timeout ) : try : operator_class = cosmic_ray . plugins . get_operator ( operator_name ) operator = operator_class ( python_version ) with cosmic_ray . mutating . use_mutation ( module_path , operator , occurrence ) as ( original_co...
Mutate the OCCURRENCE - th site for OPERATOR_NAME in MODULE_PATH run the tests and report the results .
14,757
def report_progress ( stream = None ) : if stream is None : stream = sys . stderr for reporter in _reporters : reporter ( stream )
Report progress from any currently installed reporters .
14,758
def reports_progress ( reporter ) : def decorator ( func ) : @ wraps ( func ) def wrapper ( * args , ** kwargs ) : with progress_reporter ( reporter ) : return func ( * args , ** kwargs ) return wrapper return decorator
A decorator factory to mark functions which report progress .
14,759
def tags ( ) : "Get a set of tags for the current git repo." result = [ t . decode ( 'ascii' ) for t in subprocess . check_output ( [ 'git' , 'tag' ] ) . split ( b"\n" ) ] assert len ( set ( result ) ) == len ( result ) return set ( result )
Get a set of tags for the current git repo .
14,760
def create_tag_and_push ( version ) : "Create a git tag for `version` and push it to origin." assert version not in tags ( ) git ( 'config' , 'user.name' , 'Travis CI on behalf of Austin Bingham' ) git ( 'config' , 'user.email' , 'austin@sixty-north.com' ) git ( 'config' , 'core.sshCommand' , 'ssh -i deploy_key' ) git ...
Create a git tag for version and push it to origin .
14,761
def worker_task ( work_item , config ) : global _workspace _ensure_workspace ( config ) result = worker ( work_item . module_path , config . python_version , work_item . operator_name , work_item . occurrence , config . test_command , config . timeout ) return work_item . job_id , result
The celery task which performs a single mutation and runs a test suite .
14,762
def execute_work_items ( work_items , config ) : return celery . group ( worker_task . s ( work_item , config ) for work_item in work_items )
Execute a suite of tests for a given set of work items .
14,763
def cloned_workspace ( clone_config , chdir = True ) : workspace = ClonedWorkspace ( clone_config ) original_dir = os . getcwd ( ) if chdir : os . chdir ( workspace . clone_dir ) try : yield workspace finally : os . chdir ( original_dir ) workspace . cleanup ( )
Create a cloned workspace and yield it .
14,764
def clone_with_git ( repo_uri , dest_path ) : log . info ( 'Cloning git repo %s to %s' , repo_uri , dest_path ) git . Repo . clone_from ( repo_uri , dest_path , depth = 1 )
Create a clone by cloning a git repository .
14,765
def clone_with_copy ( src_path , dest_path ) : log . info ( 'Cloning directory tree %s to %s' , src_path , dest_path ) shutil . copytree ( src_path , dest_path )
Clone a directory try by copying it .
14,766
def _build_env ( venv_dir ) : prefix = getattr ( sys , 'real_prefix' , sys . prefix ) python = Path ( prefix ) / 'bin' / 'python' command = '{} -m venv {}' . format ( python , venv_dir ) try : log . info ( 'Creating virtual environment: %s' , command ) subprocess . run ( command . split ( ) , stdout = subprocess . PIPE...
Create a new virtual environment in venv_dir .
14,767
def replace_variables ( self , text ) : variables = { 'python-executable' : str ( self . _venv_path / 'bin' / 'python' ) } return text . format ( ** variables )
Replace variable placeholders in text with values from the virtual env .
14,768
def cleanup ( self ) : "Remove the directory containin the clone and virtual environment." log . info ( 'Removing temp dir %s' , self . _tempdir . name ) self . _tempdir . cleanup ( )
Remove the directory containin the clone and virtual environment .
14,769
def as_dict ( self ) : "Get the WorkResult as a dict." return { 'output' : self . output , 'test_outcome' : self . test_outcome , 'worker_outcome' : self . worker_outcome , 'diff' : self . diff , }
Get the WorkResult as a dict .
14,770
def as_dict ( self ) : return { 'module_path' : str ( self . module_path ) , 'operator_name' : self . operator_name , 'occurrence' : self . occurrence , 'start_pos' : self . start_pos , 'end_pos' : self . end_pos , 'job_id' : self . job_id , }
Get fields as a dict .
14,771
def intercept ( work_db ) : @ lru_cache ( ) def file_contents ( file_path ) : "A simple cache of file contents." with file_path . open ( mode = "rt" ) as handle : return handle . readlines ( ) for item in work_db . work_items : try : repo = open_repository ( item . module_path ) except ValueError : log . info ( "No spo...
Look for WorkItems in work_db that should not be mutated due to spor metadata .
14,772
def _line_and_col_to_offset ( lines , line , col ) : offset = 0 for index , contents in enumerate ( lines , 1 ) : if index == line : return offset + col offset += len ( contents ) raise ValueError ( "Offset {}:{} not found" . format ( line , col ) )
Figure out the offset into a file for a particular line and col .
14,773
def _item_in_context ( lines , item , context ) : start_offset = _line_and_col_to_offset ( lines , item . start_pos [ 0 ] , item . start_pos [ 1 ] ) stop_offset = _line_and_col_to_offset ( lines , item . end_pos [ 0 ] , item . end_pos [ 1 ] ) width = stop_offset - start_offset return start_offset >= context . offset an...
Determines if a WorkItem falls within an anchor .
14,774
def use_mutation ( module_path , operator , occurrence ) : original_code , mutated_code = apply_mutation ( module_path , operator , occurrence ) try : yield original_code , mutated_code finally : with module_path . open ( mode = 'wt' , encoding = 'utf-8' ) as handle : handle . write ( original_code ) handle . flush ( )
A context manager that applies a mutation for the duration of a with - block .
14,775
def apply_mutation ( module_path , operator , occurrence ) : module_ast = get_ast ( module_path , python_version = operator . python_version ) original_code = module_ast . get_code ( ) visitor = MutationVisitor ( occurrence , operator ) mutated_ast = visitor . walk ( module_ast ) mutated_code = None if visitor . mutati...
Apply a specific mutation to a file on disk .
14,776
def get_requirements ( ) : requirements_file = os . path . join ( os . getcwd ( ) , 'requirements.txt' ) requirements = [ ] links = [ ] try : with open ( requirements_file ) as reqfile : for line in reqfile . readlines ( ) : line = line . strip ( ) if line . startswith ( '#' ) : continue elif line . startswith ( ( 'htt...
Extract the list of requirements from our requirements . txt .
14,777
def createBatchfile ( keyparams = allparams ) : useparams = { } for key , value in keyparams . items ( ) : if value : useparams . update ( { key : value } ) batchfile = gpg . gen_key_input ( separate_keyring = True , save_batchfile = True , ** useparams ) log . info ( "Generated GnuPG batch file:\n%s" % batchfile ) ret...
Create the batchfile for our new key .
14,778
def exportNewKey ( fingerprint ) : log . info ( "Exporting key: %s" % fingerprint ) keyfn = os . path . join ( gpg . homedir , fingerprint + '-8192-bit-key' ) + os . path . extsep pubkey = gpg . export_keys ( fingerprint ) seckey = gpg . export_keys ( fingerprint , secret = True ) subkey = gpg . export_keys ( fingerpri...
Export the new keys into . asc files .
14,779
def _check_keyserver ( location ) : protocols = [ 'hkp://' , 'hkps://' , 'http://' , 'https://' , 'ldap://' , 'mailto:' ] for proto in protocols : if location . startswith ( proto ) : url = location . replace ( proto , str ( ) ) host , slash , extra = url . partition ( '/' ) if extra : log . warn ( "URI text for %s: '%...
Check that a given keyserver is a known protocol and does not contain shell escape characters .
14,780
def _check_preferences ( prefs , pref_type = None ) : if prefs is None : return cipher = frozenset ( [ 'AES256' , 'AES192' , 'AES128' , 'CAMELLIA256' , 'CAMELLIA192' , 'TWOFISH' , '3DES' ] ) digest = frozenset ( [ 'SHA512' , 'SHA384' , 'SHA256' , 'SHA224' , 'RMD160' , 'SHA1' ] ) compress = frozenset ( [ 'BZIP2' , 'ZLIB...
Check cipher digest and compression preference settings .
14,781
def _hyphenate ( input , add_prefix = False ) : ret = '--' if add_prefix else '' ret += input . replace ( '_' , '-' ) return ret
Change underscores to hyphens so that object attributes can be easily tranlated to GPG option names .
14,782
def _is_allowed ( input ) : gnupg_options = _get_all_gnupg_options ( ) allowed = _get_options_group ( "allowed" ) try : assert allowed . issubset ( gnupg_options ) except AssertionError : raise UsageError ( "'allowed' isn't a subset of known options, diff: %s" % allowed . difference ( gnupg_options ) ) if not isinstanc...
Check that an option or argument given to GPG is in the set of allowed options the latter being a strict subset of the set of all options known to GPG .
14,783
def _is_string ( thing ) : if _util . _py3k : return isinstance ( thing , str ) else : return isinstance ( thing , basestring )
Python character arrays are a mess .
14,784
def _sanitise_list ( arg_list ) : if isinstance ( arg_list , list ) : for arg in arg_list : safe_arg = _sanitise ( arg ) if safe_arg != "" : yield safe_arg
A generator for iterating through a list of gpg options and sanitising them .
14,785
def _get_options_group ( group = None ) : hex_options = frozenset ( [ '--check-sigs' , '--default-key' , '--default-recipient' , '--delete-keys' , '--delete-secret-keys' , '--delete-secret-and-public-keys' , '--desig-revoke' , '--export' , '--export-secret-keys' , '--export-secret-subkeys' , '--fingerprint' , '--gen-re...
Get a specific group of options which are allowed .
14,786
def _get_all_gnupg_options ( ) : three_hundred_eighteen = ( ) . split ( ) three_hundred_eighteen . append ( '--export-ownertrust' ) three_hundred_eighteen . append ( '--import-ownertrust' ) three_hundred_eighteen . append ( '--pinentry-mode' ) three_hundred_eighteen . append ( '--allow-loopback-pinentry' ) gnupg_option...
Get all GnuPG options and flags .
14,787
def nodata ( status_code ) : lookup = { '1' : 'No armored data.' , '2' : 'Expected a packet but did not find one.' , '3' : 'Invalid packet found, this may indicate a non OpenPGP message.' , '4' : 'Signature expected but not found.' } for key , value in lookup . items ( ) : if str ( status_code ) == key : return value
Translate NODATA status codes from GnuPG to messages .
14,788
def progress ( status_code ) : lookup = { 'pk_dsa' : 'DSA key generation' , 'pk_elg' : 'Elgamal key generation' , 'primegen' : 'Prime generation' , 'need_entropy' : 'Waiting for new entropy in the RNG' , 'tick' : 'Generic tick without any special meaning - still working.' , 'starting_agent' : 'A gpg-agent was started.'...
Translate PROGRESS status codes from GnuPG to messages .
14,789
def _clean_key_expiration_option ( self ) : allowed_entry = re . findall ( '^(\d+)(|w|m|y)$' , self . _expiration_time ) if not allowed_entry : raise UsageError ( "Key expiration option: %s is not valid" % self . _expiration_time )
validates the expiration option supplied
14,790
def _create_trustdb ( cls ) : trustdb = os . path . join ( cls . homedir , 'trustdb.gpg' ) if not os . path . isfile ( trustdb ) : log . info ( "GnuPG complained that your trustdb file was missing. %s" % "This is likely due to changing to a new homedir." ) log . info ( "Creating trustdb.gpg file in your GnuPG homedir."...
Create the trustdb file in our homedir if it doesn t exist .
14,791
def export_ownertrust ( cls , trustdb = None ) : if trustdb is None : trustdb = os . path . join ( cls . homedir , 'trustdb.gpg' ) try : os . rename ( trustdb , trustdb + '.bak' ) except ( OSError , IOError ) as err : log . debug ( str ( err ) ) export_proc = cls . _open_subprocess ( [ '--export-ownertrust' ] ) tdb = o...
Export ownertrust to a trustdb file .
14,792
def import_ownertrust ( cls , trustdb = None ) : if trustdb is None : trustdb = os . path . join ( cls . homedir , 'trustdb.gpg' ) import_proc = cls . _open_subprocess ( [ '--import-ownertrust' ] ) try : tdb = open ( trustdb , 'rb' ) except ( OSError , IOError ) : log . error ( "trustdb file %s does not exist!" % trust...
Import ownertrust from a trustdb file .
14,793
def fix_trustdb ( cls , trustdb = None ) : if trustdb is None : trustdb = os . path . join ( cls . homedir , 'trustdb.gpg' ) export_proc = cls . _open_subprocess ( [ '--export-ownertrust' ] ) import_proc = cls . _open_subprocess ( [ '--import-ownertrust' ] ) _util . _threaded_copy_data ( export_proc . stdout , import_p...
Attempt to repair a broken trustdb . gpg file .
14,794
def status ( self , message , * args , ** kwargs ) : if self . isEnabledFor ( GNUPG_STATUS_LEVEL ) : self . _log ( GNUPG_STATUS_LEVEL , message , args , ** kwargs )
LogRecord for GnuPG internal status messages .
14,795
def create_logger ( level = logging . NOTSET ) : _test = os . path . join ( os . path . join ( os . getcwd ( ) , 'pretty_bad_protocol' ) , 'test' ) _now = datetime . now ( ) . strftime ( "%Y-%m-%d_%H%M%S" ) _fn = os . path . join ( _test , "%s_test_gnupg.log" % _now ) _fmt = "%(relativeCreated)-4d L%(lineno)-4d:%(funcN...
Create a logger for python - gnupg at a specific message level .
14,796
def _find_agent ( cls ) : if not psutil : return False this_process = psutil . Process ( os . getpid ( ) ) ownership_match = False if _util . _running_windows : identity = this_process . username ( ) else : identity = this_process . uids for proc in psutil . process_iter ( ) : try : if ( proc . name ( ) == "gpg-agent" ...
Discover if a gpg - agent process for the current euid is running .
14,797
def default_preference_list ( self , prefs ) : prefs = _check_preferences ( prefs ) if prefs is not None : self . _prefs = prefs
Set the default preference list .
14,798
def _homedir_setter ( self , directory ) : if not directory : log . debug ( "GPGBase._homedir_setter(): Using default homedir: '%s'" % _util . _conf ) directory = _util . _conf hd = _parsers . _fix_unsafe ( directory ) log . debug ( "GPGBase._homedir_setter(): got directory '%s'" % hd ) if hd : log . debug ( "GPGBase._...
Set the directory to use as GnuPG s homedir .
14,799
def _generated_keys_setter ( self , directory ) : if not directory : directory = os . path . join ( self . homedir , 'generated-keys' ) log . debug ( "GPGBase._generated_keys_setter(): Using '%s'" % directory ) hd = _parsers . _fix_unsafe ( directory ) log . debug ( "GPGBase._generated_keys_setter(): got directory '%s'...
Set the directory for storing generated keys .