idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
13,800
def get_rosetta_sequence_to_atom_json_map ( self ) : import json if not self . rosetta_to_atom_sequence_maps and self . rosetta_sequences : raise Exception ( 'The PDB to Rosetta mapping has not been determined. Please call construct_pdb_to_rosetta_residue_map first.' ) d = { } for c , sm in self . rosetta_to_atom_sequence_maps . iteritems ( ) : for k , v in sm . map . iteritems ( ) : d [ k ] = v return json . dumps ( d , indent = 4 , sort_keys = True )
Returns the mapping from Rosetta residue IDs to PDB ATOM residue IDs in JSON format .
13,801
def assert_wildtype_matches ( self , mutation ) : readwt = self . getAminoAcid ( self . getAtomLine ( mutation . Chain , mutation . ResidueID ) ) assert ( mutation . WildTypeAA == residue_type_3to1_map [ readwt ] )
Check that the wildtype of the Mutation object matches the PDB sequence .
13,802
def get_B_factors ( self , force = False ) : if ( not self . bfactors ) or ( force == True ) : bfactors = { } old_chain_residue_id = None for line in self . lines : if line [ 0 : 4 ] == "ATOM" : chain_residue_id = line [ 21 : 27 ] if chain_residue_id != old_chain_residue_id : bfactors [ chain_residue_id ] = [ ] old_chain_residue_id = chain_residue_id bfactors [ chain_residue_id ] . append ( float ( line [ 60 : 66 ] ) ) B_factor_per_residue = { } mean_per_residue = [ ] for chain_residue_id , bfactor_list in bfactors . iteritems ( ) : mean , stddev , variance = get_mean_and_standard_deviation ( bfactor_list ) B_factor_per_residue [ chain_residue_id ] = dict ( mean = mean , stddev = stddev ) mean_per_residue . append ( mean ) total_average , total_standard_deviation , variance = get_mean_and_standard_deviation ( mean_per_residue ) self . bfactors = dict ( Overall = dict ( mean = total_average , stddev = total_standard_deviation ) , PerResidue = B_factor_per_residue , ) return self . bfactors
This reads in all ATOM lines and compute the mean and standard deviation of each residue s B - factors . It returns a table of the mean and standard deviation per residue as well as the mean and standard deviation over all residues with each residue having equal weighting .
13,803
def validate_mutations ( self , mutations ) : resID2AA = self . get_residue_id_to_type_map ( ) badmutations = [ ] for m in mutations : wildtype = resID2AA . get ( PDB . ChainResidueID2String ( m . Chain , m . ResidueID ) , "" ) if m . WildTypeAA != wildtype : badmutations . append ( m ) if badmutations : raise PDBValidationException ( "The mutation(s) %s could not be matched against the PDB %s." % ( ", " . join ( map ( str , badmutations ) ) , self . pdb_id ) )
This function has been refactored to use the SimpleMutation class . The parameter is a list of Mutation objects . The function has no return value but raises a PDBValidationException if the wildtype in the Mutation m does not match the residue type corresponding to residue m . ResidueID in the PDB file .
13,804
def fix_chain_id ( self ) : for i in xrange ( len ( self . lines ) ) : line = self . lines [ i ] if line . startswith ( "ATOM" ) and line [ 21 ] == ' ' : self . lines [ i ] = line [ : 21 ] + 'A' + line [ 22 : ]
fill in missing chain identifier
13,805
def getAtomLine ( self , chain , resid ) : for line in self . lines : fieldtype = line [ 0 : 6 ] . strip ( ) assert ( fieldtype == "ATOM" or fieldtype == "HETATM" ) if line [ 21 : 22 ] == chain and resid == line [ 22 : 27 ] : return line raise Exception ( "Could not find the ATOM/HETATM line corresponding to chain '%(chain)s' and residue '%(resid)s'." % vars ( ) )
This function assumes that all lines are ATOM or HETATM lines . resid should have the proper PDB format i . e . an integer left - padded to length 4 followed by the insertion code which may be a blank space .
13,806
def getAtomLinesForResidueInRosettaStructure ( self , resid ) : lines = [ line for line in self . lines if line [ 0 : 4 ] == "ATOM" and resid == int ( line [ 22 : 27 ] ) ] if not lines : raise Exception ( "Could not find the ATOM/HETATM line corresponding to residue '%(resid)s'." % vars ( ) ) return lines
We assume a Rosetta - generated structure where residues are uniquely identified by number .
13,807
def stripForDDG ( self , chains = True , keepHETATM = False , numberOfModels = None , raise_exception = True ) : if raise_exception : raise Exception ( 'This code is deprecated.' ) from Bio . PDB import PDBParser resmap = { } iresmap = { } newlines = [ ] residx = 0 oldres = None model_number = 1 for line in self . lines : fieldtype = line [ 0 : 6 ] . strip ( ) if fieldtype == "ENDMDL" : model_number += 1 if numberOfModels and ( model_number > numberOfModels ) : break if not numberOfModels : raise Exception ( "The logic here does not handle multiple models yet." ) if ( fieldtype == "ATOM" or ( fieldtype == "HETATM" and keepHETATM ) ) and ( float ( line [ 54 : 60 ] ) != 0 ) : chain = line [ 21 : 22 ] if ( chains == True ) or ( chain in chains ) : resid = line [ 21 : 27 ] iCode = line [ 26 : 27 ] if resid != oldres : residx += 1 newnumbering = "%s%4.i " % ( chain , residx ) assert ( len ( newnumbering ) == 6 ) id = fieldtype + "-" + resid resmap [ id ] = residx iresmap [ residx ] = id oldres = resid oldlength = len ( line ) line = "%s%4.i %s" % ( line [ 0 : 22 ] , resmap [ fieldtype + "-" + resid ] , line [ 27 : ] ) assert ( len ( line ) == oldlength ) newlines . append ( line ) self . lines = newlines self . ddGresmap = resmap self . ddGiresmap = iresmap tmpfile = "/tmp/ddgtemp.pdb" self . lines = self . lines or [ "\n" ] F = open ( tmpfile , 'w' ) F . write ( string . join ( self . lines , "\n" ) ) F . close ( ) parser = PDBParser ( ) structure = parser . get_structure ( 'tmp' , tmpfile ) os . remove ( tmpfile ) count = 0 for residue in structure . get_residues ( ) : count += 1 assert ( count == residx ) assert ( len ( resmap ) == len ( iresmap ) )
Strips a PDB to ATOM lines . If keepHETATM is True then also retain HETATM lines . By default all PDB chains are kept . The chains parameter should be True or a list . In the latter case only those chains in the list are kept . Unoccupied ATOM lines are discarded . This function also builds maps from PDB numbering to Rosetta numbering and vice versa .
13,808
def CheckForPresenceOf ( self , reslist ) : if type ( reslist ) == type ( "" ) : reslist = [ reslist ] foundRes = { } for line in self . lines : resname = line [ 17 : 20 ] if line [ 0 : 4 ] == "ATOM" : if resname in reslist : foundRes [ resname ] = True return foundRes . keys ( )
This checks whether residues in reslist exist in the ATOM lines . It returns a list of the residues in reslist which did exist .
13,809
def fix_residue_numbering ( self ) : resid_list = self . aa_resids ( ) resid_set = set ( resid_list ) resid_lst1 = list ( resid_set ) resid_lst1 . sort ( ) map_res_id = { } x = 1 old_chain = resid_lst1 [ 0 ] [ 0 ] for resid in resid_lst1 : map_res_id [ resid ] = resid [ 0 ] + '%4.i' % x if resid [ 0 ] == old_chain : x += 1 else : x = 1 old_chain = resid [ 0 ] atomlines = [ ] for line in self . lines : if line [ 0 : 4 ] == "ATOM" and line [ 21 : 26 ] in resid_set and line [ 26 ] == ' ' : lst = [ char for char in line ] lst [ 21 : 26 ] = map_res_id [ line [ 21 : 26 ] ] atomlines . append ( string . join ( lst , '' ) ) else : atomlines . append ( line ) self . lines = atomlines return map_res_id
this function renumbers the res ids in order to avoid strange behaviour of Rosetta
13,810
def neighbors2 ( self , distance , chain_residue , atom = None , resid_list = None ) : if atom == None : lines = [ line for line in self . atomlines ( resid_list ) if line [ 17 : 20 ] in allowed_PDB_residues_types ] else : lines = [ line for line in self . atomlines ( resid_list ) if line [ 17 : 20 ] in allowed_PDB_residues_types and line [ 12 : 16 ] == atom ] shash = spatialhash . SpatialHash ( distance ) for line in lines : pos = ( float ( line [ 30 : 38 ] ) , float ( line [ 38 : 46 ] ) , float ( line [ 46 : 54 ] ) ) shash . insert ( pos , line [ 21 : 26 ] ) neighbor_list = [ ] for line in lines : resid = line [ 21 : 26 ] if resid == chain_residue : pos = ( float ( line [ 30 : 38 ] ) , float ( line [ 38 : 46 ] ) , float ( line [ 46 : 54 ] ) ) for data in shash . nearby ( pos , distance ) : if data [ 1 ] not in neighbor_list : neighbor_list . append ( data [ 1 ] ) neighbor_list . sort ( ) return neighbor_list
this one is more precise since it uses the chain identifier also
13,811
def extract_xyz_matrix_from_chain ( self , chain_id , atoms_of_interest = [ ] ) : chains = [ l [ 21 ] for l in self . structure_lines if len ( l ) > 21 ] chain_lines = [ l for l in self . structure_lines if len ( l ) > 21 and l [ 21 ] == chain_id ] return PDB . extract_xyz_matrix_from_pdb ( chain_lines , atoms_of_interest = atoms_of_interest , include_all_columns = True )
Create a pandas coordinates dataframe from the lines in the specified chain .
13,812
def create_token_file ( username = id_generator ( ) , password = id_generator ( ) ) : cozy_ds_uid = helpers . get_uid ( 'cozy-data-system' ) if not os . path . isfile ( LOGIN_FILENAME ) : with open ( LOGIN_FILENAME , 'w+' ) as token_file : token_file . write ( "{0}\n{1}" . format ( username , password ) ) helpers . file_rights ( LOGIN_FILENAME , mode = 0400 , uid = cozy_ds_uid , gid = 0 )
Store the admins password for further retrieve
13,813
def get_admin ( ) : if os . path . isfile ( LOGIN_FILENAME ) : with open ( LOGIN_FILENAME , 'r' ) as token_file : old_login , old_password = token_file . read ( ) . splitlines ( ) [ : 2 ] return old_login , old_password else : return None , None
Return the actual admin from token file
13,814
def curl_couchdb ( url , method = 'GET' , base_url = BASE_URL , data = None ) : ( username , password ) = get_admin ( ) if username is None : auth = None else : auth = ( username , password ) if method == 'PUT' : req = requests . put ( '{}{}' . format ( base_url , url ) , auth = auth , data = data ) elif method == 'DELETE' : req = requests . delete ( '{}{}' . format ( base_url , url ) , auth = auth ) else : req = requests . get ( '{}{}' . format ( base_url , url ) , auth = auth ) if req . status_code not in [ 200 , 201 ] : raise HTTPError ( '{}: {}' . format ( req . status_code , req . text ) ) return req
Launch a curl on CouchDB instance
13,815
def get_couchdb_admins ( ) : user_list = [ ] req = curl_couchdb ( '/_config/admins/' ) for user in req . json ( ) . keys ( ) : user_list . append ( user ) return user_list
Return the actual CouchDB admins
13,816
def create_couchdb_admin ( username , password ) : curl_couchdb ( '/_config/admins/{}' . format ( username ) , method = 'PUT' , data = '"{}"' . format ( password ) )
Create a CouchDB user
13,817
def is_cozy_registered ( ) : req = curl_couchdb ( '/cozy/_design/user/_view/all' ) users = req . json ( ) [ 'rows' ] if len ( users ) > 0 : return True else : return False
Check if a Cozy is registered
13,818
def unregister_cozy ( ) : req = curl_couchdb ( '/cozy/_design/user/_view/all' ) users = req . json ( ) [ 'rows' ] if len ( users ) > 0 : user = users [ 0 ] [ 'value' ] user_id = user [ '_id' ] user_rev = user [ '_rev' ] print 'Delete cozy user: {}' . format ( user_id ) req = curl_couchdb ( '/cozy/{}?rev={}' . format ( user_id , user_rev ) , method = 'DELETE' ) return req . json ( ) else : print 'Cozy not registered' return None
Unregister a cozy
13,819
def delete_all_couchdb_admins ( ) : username = get_admin ( ) [ 0 ] admins = get_couchdb_admins ( ) for admin in admins : if admin == username : print "Delete {} later..." . format ( admin ) else : print "Delete {}" . format ( admin ) delete_couchdb_admin ( admin ) admin = username print "Delete {}" . format ( admin ) delete_couchdb_admin ( admin )
Delete all CouchDB users
13,820
def delete_token ( ) : username = get_admin ( ) [ 0 ] admins = get_couchdb_admins ( ) if username in admins : print 'I delete {} CouchDB user' . format ( username ) delete_couchdb_admin ( username ) if os . path . isfile ( LOGIN_FILENAME ) : print 'I delete {} token file' . format ( LOGIN_FILENAME ) os . remove ( LOGIN_FILENAME )
Delete current token file & CouchDB admin user
13,821
def create_token ( ) : username = id_generator ( ) password = id_generator ( ) create_couchdb_admin ( username , password ) create_token_file ( username , password ) return 'Token {} created' . format ( username )
Create token file & create user
13,822
def ping ( ) : try : curl_couchdb ( '/cozy/' ) ping = True except requests . exceptions . ConnectionError , error : print error ping = False return ping
Ping CozyDB with existing credentials
13,823
def get_cozy_param ( param ) : try : req = curl_couchdb ( '/cozy/_design/cozyinstance/_view/all' ) rows = req . json ( ) [ 'rows' ] if len ( rows ) == 0 : return None else : return rows [ 0 ] . get ( 'value' , { } ) . get ( param , None ) except : return None
Get parameter in Cozy configuration
13,824
def str2type ( value ) : if not isinstance ( value , str ) : return value try : return json . loads ( value ) except JSONDecodeError : return value
Take a string and convert it to a value of proper type .
13,825
def get_authorisation_url ( self , reset = False ) : if reset : self . auth_url = None if not self . auth_url : try : oauth = OAuth2Session ( self . client_id , redirect_uri = self . redirect_url ) self . auth_url , self . state = oauth . authorization_url ( self . auth_base_url ) except Exception : return None return self . auth_url
Initialises the OAuth2 Process by asking the auth server for a login URL . Once called the user can login by being redirected to the url returned by this function . If there is an error during authorisation None is returned .
13,826
def on_callback ( self , auth_resp ) : try : oauth = OAuth2Session ( self . client_id , state = self . state , redirect_uri = self . redirect_url ) self . token = oauth . fetch_token ( self . token_url , authorization_response = auth_resp , client_secret = self . client_secret , verify = self . verifySSL ) if not self . api_key and self . API_KEY_DEFAULT : self . get_api_key ( ) if not self . api_key : self . API_KEY_DEFAULT = False except Exception : return False return True
Must be called once the authorisation server has responded after redirecting to the url provided by get_authorisation_url and completing the login there . Returns True if a token was successfully retrieved False otherwise .
13,827
def validate ( self ) : try : resp = self . request ( ) . get ( self . validate_url , verify = self . verifySSL ) . json ( ) except TokenExpiredError : return False except AttributeError : return False if 'error' in resp : return False return True
Confirms the current token is still valid . Returns True if it is valid False otherwise .
13,828
def refresh_token ( self ) : try : if self . token : self . token = self . request ( ) . refresh_token ( self . refresh_url , self . token [ 'refresh_token' ] ) return True except Exception as e : pass return False
Refreshes access token using refresh token . Returns true if successful false otherwise .
13,829
def revoke_access ( self ) : if self . token is None : return True if self . validate ( ) : data = { } data [ 'token' ] = self . token [ 'access_token' ] self . request ( ) . post ( self . revoke_url , data = data , json = None , verify = self . verifySSL ) return True
Requests that the currently used token becomes invalid . Call this should a user logout .
13,830
def request ( self ) : headers = { 'Accept' : 'application/json' } if self . api_key : headers [ 'X-API-KEY' ] = self . api_key return requests , headers else : if self . token : return OAuth2Session ( self . client_id , token = self . token ) , headers else : raise APIError ( "No API key and no OAuth session available" )
Returns an OAuth2 Session to be used to make requests . Returns None if a token hasn t yet been received .
13,831
def to_json ( self ) : data = dict ( self . __dict__ ) data . pop ( 'context' , None ) data [ 'oauth' ] = self . oauth . to_dict ( ) data [ 'cache' ] = self . cache . to_dict ( ) return json . dumps ( data )
Returns a json string containing all relevant data to recreate this pyalveo . Client .
13,832
def api_request ( self , url , data = None , method = 'GET' , raw = False , file = None ) : if method is 'GET' : response = self . oauth . get ( url ) elif method is 'POST' : if file is not None : response = self . oauth . post ( url , data = data , file = file ) else : response = self . oauth . post ( url , data = data ) elif method is 'PUT' : response = self . oauth . put ( url , data = data ) elif method is 'DELETE' : response = self . oauth . delete ( url ) else : raise APIError ( "Unknown request method: %s" % ( method , ) ) if response . status_code >= 400 : raise APIError ( response . status_code , '' , "Error accessing API (url: %s, method: %s)\nData: %s\nMessage: %s" % ( url , method , data , response . text ) ) if raw : return response . content else : return response . json ( )
Perform an API request to the given URL optionally including the specified data
13,833
def get_collections ( self ) : result = self . api_request ( '/catalog' ) return [ ( os . path . split ( x ) [ 1 ] , x ) for x in result [ 'collections' ] ]
Retrieve a list of the collection URLs for all collections hosted on the server .
13,834
def get_item ( self , item_url , force_download = False ) : item_url = str ( item_url ) if ( self . use_cache and not force_download and self . cache . has_item ( item_url ) ) : item_json = self . cache . get_item ( item_url ) else : item_json = self . api_request ( item_url , raw = True ) if self . update_cache : self . cache . add_item ( item_url , item_json ) return Item ( json . loads ( item_json . decode ( 'utf-8' ) ) , self )
Retrieve the item metadata from the server as an Item object
13,835
def get_document ( self , doc_url , force_download = False ) : doc_url = str ( doc_url ) if ( self . use_cache and not force_download and self . cache . has_document ( doc_url ) ) : doc_data = self . cache . get_document ( doc_url ) else : doc_data = self . api_request ( doc_url , raw = True ) if self . update_cache : self . cache . add_document ( doc_url , doc_data ) return doc_data
Retrieve the data for the given document from the server
13,836
def get_primary_text ( self , item_url , force_download = False ) : item_url = str ( item_url ) metadata = self . get_item ( item_url ) . metadata ( ) try : primary_text_url = metadata [ 'alveo:primary_text_url' ] except KeyError : return None if primary_text_url == 'No primary text found' : return None if ( self . use_cache and not force_download and self . cache . has_primary_text ( item_url ) ) : primary_text = self . cache . get_primary_text ( item_url ) else : primary_text = self . api_request ( primary_text_url , raw = True ) if self . update_cache : self . cache . add_primary_text ( item_url , primary_text ) return primary_text
Retrieve the primary text for an item from the server
13,837
def get_item_annotations ( self , item_url , annotation_type = None , label = None ) : item_url = str ( item_url ) metadata = self . get_item ( item_url ) . metadata ( ) try : annotation_url = metadata [ 'alveo:annotations_url' ] except KeyError : return None req_url = annotation_url if annotation_type is not None : req_url += '?' req_url += urlencode ( ( ( 'type' , annotation_type ) , ) ) if label is not None : if annotation_type is None : req_url += '?' else : req_url += '&' req_url += urlencode ( ( ( 'label' , label ) , ) ) try : return self . api_request ( req_url ) except KeyError : return None
Retrieve the annotations for an item from the server
13,838
def get_annotation_types ( self , item_url ) : req_url = item_url + "/annotations/types" resp = self . api_request ( req_url ) return resp [ 'annotation_types' ]
Retrieve the annotation types for the given item from the server
13,839
def add_annotations ( self , item_url , annotations ) : adict = { '@context' : "https://alveo-staging1.intersect.org.au/schema/json-ld" } for ann in annotations : for key in ( '@type' , 'label' , 'start' , 'end' , 'type' ) : if key not in ann . keys ( ) : raise Exception ( "required key '%s' not present in annotation" % key ) adict [ '@graph' ] = annotations resp = self . api_request ( str ( item_url ) + '/annotations' , method = 'POST' , data = json . dumps ( adict ) ) return self . __check_success ( resp )
Add annotations to the given item
13,840
def create_collection ( self , name , metadata ) : payload = { 'collection_metadata' : metadata , 'name' : name } response = self . api_request ( '/catalog' , method = 'POST' , data = json . dumps ( payload ) ) return self . __check_success ( response )
Create a new collection with the given name and attach the metadata .
13,841
def modify_collection_metadata ( self , collection_uri , metadata , replace = None , name = '' ) : payload = { 'collection_metadata' : metadata , 'name' : name } if replace is not None : payload [ 'replace' ] = replace response = self . api_request ( collection_uri , method = 'PUT' , data = json . dumps ( payload ) ) return self . __check_success ( response )
Modify the metadata for the given collection .
13,842
def get_items ( self , collection_uri ) : cname = os . path . split ( collection_uri ) [ 1 ] return self . search_metadata ( "collection_name:%s" % cname )
Return all items in this collection .
13,843
def add_text_item ( self , collection_uri , name , metadata , text , title = None ) : docname = name + ".txt" if title is None : title = name metadata [ 'dcterms:identifier' ] = name metadata [ '@type' ] = 'ausnc:AusNCObject' metadata [ 'hcsvlab:display_document' ] = { '@id' : docname } metadata [ 'hcsvlab:indexable_document' ] = { '@id' : docname } metadata [ 'ausnc:document' ] = [ { '@id' : 'document1.txt' , '@type' : 'foaf:Document' , 'dcterms:extent' : len ( text ) , 'dcterms:identifier' : docname , 'dcterms:title' : title , 'dcterms:type' : 'Text' } ] meta = { 'items' : [ { 'metadata' : { '@context' : self . context , '@graph' : [ metadata ] } , 'documents' : [ { 'content' : text , 'identifier' : docname } ] } ] } response = self . api_request ( collection_uri , method = 'POST' , data = json . dumps ( meta ) ) self . __check_success ( response ) item_uri = collection_uri + "/" + response [ 'success' ] [ 0 ] return item_uri
Add a new item to a collection containing a single text document .
13,844
def add_item ( self , collection_uri , name , metadata ) : metadata [ 'dcterms:identifier' ] = name metadata [ 'dc:identifier' ] = name metadata [ '@type' ] = 'ausnc:AusNCObject' meta = { 'items' : [ { 'metadata' : { '@context' : self . context , '@graph' : [ metadata ] } } ] } response = self . api_request ( collection_uri , method = 'POST' , data = json . dumps ( meta ) ) self . __check_success ( response ) item_uri = collection_uri + "/" + response [ 'success' ] [ 0 ] return item_uri
Add a new item to a collection
13,845
def modify_item ( self , item_uri , metadata ) : md = json . dumps ( { 'metadata' : metadata } ) response = self . api_request ( item_uri , method = 'PUT' , data = md ) return self . __check_success ( response )
Modify the metadata on an item
13,846
def delete_item ( self , item_uri ) : response = self . api_request ( item_uri , method = 'DELETE' ) return self . __check_success ( response )
Delete an item from a collection
13,847
def add_document ( self , item_uri , name , metadata , content = None , docurl = None , file = None , displaydoc = False , preferName = False , contrib_id = None ) : if not preferName and file is not None : docid = os . path . basename ( file ) else : docid = name docmeta = { "metadata" : { "@context" : self . context , "@type" : "foaf:Document" , "dcterms:identifier" : docid , } } docmeta [ "metadata" ] . update ( metadata ) if contrib_id : docmeta [ 'contribution_id' ] = contrib_id if content is not None : docmeta [ 'document_content' ] = content elif docurl is not None : docmeta [ "metadata" ] [ "dcterms:source" ] = { "@id" : docurl } elif file is not None : docmeta = docmeta [ 'metadata' ] else : raise Exception ( "One of content, docurl or file must be specified in add_document" ) if file is not None : result = self . api_request ( item_uri , method = 'POST' , data = { 'metadata' : json . dumps ( docmeta ) } , file = file ) else : result = self . api_request ( item_uri , method = 'POST' , data = json . dumps ( docmeta ) ) self . __check_success ( result ) if displaydoc : itemmeta = { "http://alveo.edu.org/vocabulary/display_document" : docid } self . modify_item ( item_uri , itemmeta ) doc_uri = item_uri + "/document/" + name return doc_uri
Add a document to an existing item
13,848
def delete_document ( self , doc_uri ) : result = self . api_request ( doc_uri , method = 'DELETE' ) return self . __check_success ( result )
Delete a document from an item
13,849
def __check_success ( resp ) : if "success" not in resp . keys ( ) : try : raise APIError ( '200' , 'Operation Failed' , resp [ "error" ] ) except KeyError : raise APIError ( '200' , 'Operation Failed' , str ( resp ) ) return resp [ "success" ]
Check a JSON server response to see if it was successful
13,850
def download_items ( self , items , file_path , file_format = 'zip' ) : download_url = '/catalog/download_items' download_url += '?' + urlencode ( ( ( 'format' , file_format ) , ) ) item_data = { 'items' : list ( items ) } data = self . api_request ( download_url , method = 'POST' , data = json . dumps ( item_data ) , raw = True ) with open ( file_path , 'w' ) as f : f . write ( data ) return file_path
Retrieve a file from the server containing the metadata and documents for the speficied items
13,851
def search_metadata ( self , query ) : query_url = ( '/catalog/search?' + urlencode ( ( ( 'metadata' , query ) , ) ) ) resp = self . api_request ( query_url ) return ItemGroup ( resp [ 'items' ] , self )
Submit a search query to the server and retrieve the results
13,852
def add_to_item_list ( self , item_urls , item_list_url ) : item_list_url = str ( item_list_url ) name = self . get_item_list ( item_list_url ) . name ( ) return self . add_to_item_list_by_name ( item_urls , name )
Instruct the server to add the given items to the specified Item List
13,853
def rename_item_list ( self , item_list_url , new_name ) : data = json . dumps ( { 'name' : new_name } ) resp = self . api_request ( str ( item_list_url ) , data , method = "PUT" ) try : return ItemList ( resp [ 'items' ] , self , item_list_url , resp [ 'name' ] ) except KeyError : try : raise APIError ( '200' , 'Rename operation failed' , resp [ 'error' ] ) except KeyError : raise APIError ( '200' , 'Rename operation failed' , resp )
Rename an Item List on the server
13,854
def delete_item_list ( self , item_list_url ) : try : resp = self . api_request ( str ( item_list_url ) , method = "DELETE" ) if 'success' in resp : return True else : raise APIError ( '200' , 'Operation Failed' , 'Delete operation failed' ) except APIError as e : if e . http_status_code == 302 : return True else : raise e
Delete an Item List on the server
13,855
def get_speakers ( self , collection_name ) : speakers_url = "/speakers/" + collection_name resp = self . api_request ( speakers_url ) if 'speakers' in resp : return resp [ 'speakers' ] else : return [ ]
Get a list of speaker URLs for this collection
13,856
def add_speaker ( self , collection_name , metadata ) : if 'dcterms:identifier' not in metadata : raise APIError ( msg = "No identifier in speaker metadata" ) if '@context' not in metadata : metadata [ '@context' ] = CONTEXT speakers_url = "/speakers/" + collection_name + "/" resp = self . api_request ( speakers_url , data = json . dumps ( metadata ) , method = "POST" ) if 'success' in resp : return resp [ 'success' ] [ 'URI' ] else : return None
Add a new speaker to this collection .
13,857
def delete_speaker ( self , speaker_uri ) : response = self . api_request ( speaker_uri , method = 'DELETE' ) return self . __check_success ( response )
Delete an speaker from a collection
13,858
def sparql_query ( self , collection_name , query ) : request_url = '/sparql/' + collection_name + '?' request_url += urlencode ( ( ( 'query' , query ) , ) ) return self . api_request ( request_url )
Submit a sparql query to the server to search metadata and annotations .
13,859
def get_contribution ( self , url ) : result = self . api_request ( url ) result [ 'id' ] = os . path . split ( result [ 'url' ] ) [ 1 ] return result
Get the details of a particular contribution given it s url
13,860
def create_contribution ( self , metadata ) : result = self . api_request ( '/contrib/' , method = 'POST' , data = json . dumps ( metadata ) ) result [ 'id' ] = os . path . split ( result [ 'url' ] ) [ 1 ] return result
Create a new contribution given a dictionary of metadata
13,861
def delete_contribution ( self , url ) : try : result = self . api_request ( url ) if 'url' in result and 'documents' in result : self . api_request ( result [ 'url' ] , method = 'DELETE' ) return True except : pass return False
Delete the contribution with this identifier
13,862
def lint ( ) : path = os . path . realpath ( os . getcwd ( ) ) cmd = 'flake8 %s' % path opt = '' print ( ">>> Linting codebase with the following command: %s %s" % ( cmd , opt ) ) try : return_code = call ( [ cmd , opt ] , shell = True ) if return_code < 0 : print ( ">>> Terminated by signal" , - return_code , file = sys . stderr ) elif return_code != 0 : sys . exit ( '>>> Lint checks failed' ) else : print ( ">>> Lint checks passed" , return_code , file = sys . stderr ) except OSError as e : print ( ">>> Execution failed:" , e , file = sys . stderr )
run linter on our code base .
13,863
def prompt_yn ( stmt ) : print ( stmt ) answer = '' while answer not in [ 'Y' , 'N' ] : sys . stdout . write ( "$ " ) answer = sys . stdin . readline ( ) . upper ( ) . strip ( ) return answer == 'Y'
Prints the statement stmt to the terminal and wait for a Y or N answer . Returns True for Y False for N .
13,864
def get_agents ( self , addr = True , agent_cls = None , include_manager = False ) : agents = list ( self . agents . dict . values ( ) ) if hasattr ( self , 'manager' ) and self . manager is not None : if not include_manager : agents = [ a for a in agents if a . addr . rsplit ( '/' , 1 ) [ 1 ] != '0' ] if agent_cls is not None : agents = [ a for a in agents if type ( a ) is agent_cls ] if addr : agents = [ agent . addr for agent in agents ] return agents
Get agents in the environment .
13,865
async def trigger_act ( self , * args , addr = None , agent = None , ** kwargs ) : if agent is None and addr is None : raise TypeError ( "Either addr or agent has to be defined." ) if agent is None : for a in self . get_agents ( addr = False ) : if addr == a . addr : agent = a self . _log ( logging . DEBUG , "Triggering agent in {}" . format ( agent . addr ) ) ret = await agent . act ( * args , ** kwargs ) return ret
Trigger agent to act .
13,866
async def trigger_all ( self , * args , ** kwargs ) : tasks = [ ] for a in self . get_agents ( addr = False , include_manager = False ) : task = asyncio . ensure_future ( self . trigger_act ( * args , agent = a , ** kwargs ) ) tasks . append ( task ) rets = await asyncio . gather ( * tasks ) return rets
Trigger all agents in the environment to act asynchronously .
13,867
def create_random_connections ( self , n = 5 ) : if type ( n ) != int : raise TypeError ( "Argument 'n' must be of type int." ) if n <= 0 : raise ValueError ( "Argument 'n' must be greater than zero." ) for a in self . get_agents ( addr = False ) : others = self . get_agents ( addr = False ) [ : ] others . remove ( a ) shuffle ( others ) for r_agent in others [ : n ] : a . add_connection ( r_agent )
Create random connections for all agents in the environment .
13,868
def create_connections ( self , connection_map ) : agents = self . get_agents ( addr = False ) rets = [ ] for a in agents : if a . addr in connection_map : r = a . add_connections ( connection_map [ a . addr ] ) rets . append ( r ) return rets
Create agent connections from a given connection map .
13,869
def get_connections ( self , data = True ) : connections = [ ] for a in self . get_agents ( addr = False ) : c = ( a . addr , a . get_connections ( data = data ) ) connections . append ( c ) return connections
Return connections from all the agents in the environment .
13,870
def get_random_agent ( self , agent ) : r_agent = choice ( self . get_agents ( addr = False ) ) while r_agent . addr == agent . addr : r_agent = choice ( self . get_agents ( addr = False ) ) return r_agent
Return random agent that is not the same as agent given as parameter .
13,871
def add_artifact ( self , artifact ) : artifact . env_time = self . age self . artifacts . append ( artifact ) self . _log ( logging . DEBUG , "ARTIFACTS appended: '{}', length={}" . format ( artifact , len ( self . artifacts ) ) )
Add artifact with given framing to the environment .
13,872
async def get_artifacts ( self , agent = None ) : if hasattr ( self , 'manager' ) and self . manager is not None : artifacts = await self . manager . get_artifacts ( ) else : artifacts = self . artifacts if agent is not None : artifacts = [ a for a in artifacts if agent . name == a . creator ] return artifacts
Return artifacts published to the environment .
13,873
def destroy ( self , folder = None , as_coro = False ) : async def _destroy ( folder ) : ret = self . save_info ( folder ) for a in self . get_agents ( addr = False ) : a . close ( folder = folder ) await self . shutdown ( as_coro = True ) return ret return run_or_coro ( _destroy ( folder ) , as_coro )
Destroy the environment .
13,874
def tee ( * popenargs , ** kwargs ) : import subprocess , select , sys process = subprocess . Popen ( stdout = subprocess . PIPE , stderr = subprocess . PIPE , * popenargs , ** kwargs ) stdout , stderr = '' , '' def read_stream ( input_callback , output_stream ) : read = input_callback ( ) output_stream . write ( read ) output_stream . flush ( ) return read while process . poll ( ) is None : watch = process . stdout . fileno ( ) , process . stderr . fileno ( ) ready = select . select ( watch , [ ] , [ ] ) [ 0 ] for fd in ready : if fd == process . stdout . fileno ( ) : stdout += read_stream ( process . stdout . readline , sys . stdout ) if fd == process . stderr . fileno ( ) : stderr += read_stream ( process . stderr . readline , sys . stderr ) stdout += read_stream ( process . stdout . read , sys . stdout ) stderr += read_stream ( process . stderr . read , sys . stderr ) return stdout , stderr
Run a command as if it were piped though tee .
13,875
def save ( self , user , commit = True ) : self . is_instance ( user ) schema = UpdateSchema ( ) valid = schema . process ( user ) if not valid : return valid db . session . add ( user ) if commit : db . session . commit ( ) events . user_save_event . send ( user ) return user
Persist user and emit event
13,876
def login ( self , email = None , password = None , remember = False ) : from flask_login import login_user user = self . first ( email = email ) if user is None : events . login_failed_nonexistent_event . send ( ) return False if user . is_locked ( ) : raise x . AccountLocked ( locked_until = user . locked_until ) is_new = user . email and not user . email_new if is_new and not user . email_confirmed and self . require_confirmation : raise x . EmailNotConfirmed ( email = user . email_secure ) verified = user . verify_password ( password ) if not verified : user . increment_failed_logins ( ) self . save ( user ) events . login_failed_event . send ( user ) return False login_user ( user = user , remember = remember ) user . reset_login_counter ( ) self . save ( user ) events . login_event . send ( user ) app = current_app . _get_current_object ( ) identity_changed . send ( app , identity = Identity ( user . id ) ) return True
Authenticate user and emit event .
13,877
def force_login ( self , user ) : from flask_login import login_user if user . is_locked ( ) : raise x . AccountLocked ( locked_until = user . locked_until ) is_new = user . email and not user . email_new if is_new and not user . email_confirmed and self . require_confirmation : raise x . EmailNotConfirmed ( email = user . email_secure ) login_user ( user = user , remember = True ) user . reset_login_counter ( ) self . save ( user ) app = current_app . _get_current_object ( ) identity_changed . send ( app , identity = Identity ( user . id ) ) return True
Force login a user without credentials
13,878
def logout ( self ) : from flask_login import logout_user , current_user if not current_user . is_authenticated : return True user = current_user events . logout_event . send ( user ) logout_user ( ) app = current_app . _get_current_object ( ) identity_changed . send ( app , identity = AnonymousIdentity ( ) ) return True
Logout user and emit event .
13,879
def attempt_social_login ( self , provider , id ) : if not provider or not id : return False params = dict ( ) params [ provider . lower ( ) + '_id' ] = id user = self . first ( ** params ) if not user : return False self . force_login ( user ) return True
Attempt social login and return boolean result
13,880
def get_token ( self , user_id ) : if not self . jwt_implementation : return self . default_token_implementation ( user_id ) try : implementation = import_string ( self . jwt_implementation ) except ImportError : msg = 'Failed to import custom JWT implementation. ' msg += 'Check that configured module exists [{}]' raise x . ConfigurationException ( msg . format ( self . jwt_implementation ) ) return implementation ( user_id )
Get user token Checks if a custom token implementation is registered and uses that . Otherwise falls back to default token implementation . Returns a string token on success .
13,881
def get_user_by_token ( self , token ) : if not self . jwt_loader_implementation : return self . default_token_user_loader ( token ) try : implementation = import_string ( self . jwt_loader_implementation ) except ImportError : msg = 'Failed to import custom JWT user loader implementation. ' msg += 'Check that configured module exists [{}]' raise x . ConfigurationException ( msg . format ( self . jwt_loader_implementation ) ) return implementation ( token )
Get user by token Using for logging in . Check to see if a custom token user loader was registered and uses that . Otherwise falls back to default loader implementation . You should be fine with default implementation as long as your token has user_id claim in it .
13,882
def default_token_implementation ( self , user_id ) : user = self . get ( user_id ) if not user : msg = 'No user with such id [{}]' raise x . JwtNoUser ( msg . format ( user_id ) ) if user . _token : try : self . decode_token ( user . _token ) return user . _token except jwt . exceptions . ExpiredSignatureError : pass from_now = datetime . timedelta ( seconds = self . jwt_lifetime ) expires = datetime . datetime . utcnow ( ) + from_now issued = datetime . datetime . utcnow ( ) not_before = datetime . datetime . utcnow ( ) data = dict ( exp = expires , nbf = not_before , iat = issued , user_id = user_id ) token = jwt . encode ( data , self . jwt_secret , algorithm = self . jwt_algo ) string_token = token . decode ( 'utf-8' ) user . _token = string_token self . save ( user ) return string_token
Default JWT token implementation This is used by default for generating user tokens if custom implementation was not configured . The token will contain user_id and expiration date . If you need more information added to the token register your custom implementation .
13,883
def default_token_user_loader ( self , token ) : try : data = self . decode_token ( token ) except jwt . exceptions . DecodeError as e : raise x . JwtDecodeError ( str ( e ) ) except jwt . ExpiredSignatureError as e : raise x . JwtExpired ( str ( e ) ) user = self . get ( data [ 'user_id' ] ) if not user : msg = 'No user with such id [{}]' raise x . JwtNoUser ( msg . format ( data [ 'user_id' ] ) ) if user . is_locked ( ) : msg = 'This account is locked' raise x . AccountLocked ( msg , locked_until = user . locked_until ) if self . require_confirmation and not user . email_confirmed : msg = 'Please confirm your email address [{}]' raise x . EmailNotConfirmed ( msg . format ( user . email_secure ) , email = user . email ) if not token == user . _token : raise x . JwtTokenMismatch ( 'The token does not match our records' ) return user
Default token user loader Accepts a token and decodes it checking signature and expiration . Then loads user by id from the token to see if account is not locked . If all is good returns user record otherwise throws an exception .
13,884
def register ( self , user_data , base_confirm_url = '' , send_welcome = True ) : user = self . __model__ ( ** user_data ) schema = RegisterSchema ( ) valid = schema . process ( user ) if not valid : return valid db . session . add ( user ) db . session . commit ( ) if not user . id : return False if send_welcome : self . send_welcome_message ( user , base_confirm_url ) events . register_event . send ( user ) return user
Register user Accepts user data validates it and performs registration . Will send a welcome message with a confirmation link on success .
13,885
def send_welcome_message ( self , user , base_url ) : if not self . require_confirmation and not self . welcome_message : return subject = '' subjects = self . email_subjects if self . require_confirmation : subject = 'Welcome, please activate your account!' if 'welcome_confirm' in subjects . keys ( ) : subject = subjects [ 'welcome_confirm' ] if not self . require_confirmation : subject = 'Welcome to our site!' if 'welcome' in subjects . keys ( ) : subject = subjects [ 'welcome' ] sender = current_app . config [ 'MAIL_DEFAULT_SENDER' ] recipient = user . email link = '{url}/{link}/' . format ( url = base_url . rstrip ( '/' ) , link = user . email_link ) data = dict ( link = link ) if self . require_confirmation : html = render_template ( 'user/mail/account-confirm.html' , ** data ) txt = render_template ( 'user/mail/account-confirm.txt' , ** data ) else : html = render_template ( 'user/mail/welcome.html' , ** data ) txt = render_template ( 'user/mail/welcome.txt' , ** data ) mail . send ( Message ( subject = subject , recipients = [ recipient ] , body = txt , html = html , sender = sender ) )
Send welcome mail with email confirmation link
13,886
def resend_welcome_message ( self , user , base_url ) : user . require_email_confirmation ( ) self . save ( user ) self . send_welcome_message ( user , base_url )
Regenerate email link and resend welcome
13,887
def confirm_email_with_link ( self , link ) : user = self . first ( email_link = link ) if not user : return False elif user and user . email_confirmed : return True elif user and user . email_link_expired ( ) : raise x . EmailLinkExpired ( 'Link expired, generate a new one' ) user . confirm_email ( ) db . session . add ( user ) db . session . commit ( ) events . email_confirmed_event . send ( user ) return user
Confirm email with link A universal method to confirm email . used for both initial confirmation and when email is changed .
13,888
def change_email ( self , user , new_email , base_confirm_url = '' , send_message = True ) : from boiler . user . models import UpdateSchema schema = UpdateSchema ( ) user . email = new_email valid = schema . validate ( user ) if not valid : return valid db . session . add ( user ) db . session . commit ( ) if send_message : self . send_email_changed_message ( user , base_confirm_url ) events . email_update_requested_event . send ( user ) return user
Change email Saves new email and sends confirmation before doing the switch . Can optionally skip sending out message for testing purposes .
13,889
def resend_email_changed_message ( self , user , base_url ) : user . require_email_confirmation ( ) self . save ( user ) self . send_email_changed_message ( user , base_url )
Regenerate email confirmation link and resend message
13,890
def request_password_reset ( self , user , base_url ) : user . generate_password_link ( ) db . session . add ( user ) db . session . commit ( ) events . password_change_requested_event . send ( user ) self . send_password_change_message ( user , base_url )
Regenerate password link and send message
13,891
def change_password ( self , user , new_password ) : from boiler . user . models import UpdateSchema from flask_login import logout_user schema = UpdateSchema ( ) user . password = new_password user . password_link = None user . password_link_expires = None valid = schema . validate ( user ) if not valid : return valid db . session . add ( user ) db . session . commit ( ) if has_request_context ( ) : logout_user ( ) events . password_changed_event . send ( user ) return user
Change user password and logout
13,892
def send_password_change_message ( self , user , base_url ) : subject = 'Change your password here' if 'password_change' in self . email_subjects . keys ( ) : subject = self . email_subjects [ 'password_change' ] sender = current_app . config [ 'MAIL_DEFAULT_SENDER' ] recipient = user . email link = '{url}/{link}/' . format ( url = base_url . rstrip ( '/' ) , link = user . password_link ) data = dict ( link = link ) html = render_template ( 'user/mail/password-change.html' , ** data ) txt = render_template ( 'user/mail/password-change.txt' , ** data ) mail . send ( Message ( subject = subject , recipients = [ recipient ] , body = txt , html = html , sender = sender ) )
Send password change message
13,893
def add_role_to_user ( self , user , role ) : user . add_role ( role ) self . save ( user ) events . user_got_role_event . send ( user , role = role )
Adds a role to user
13,894
def remove_role_from_user ( self , user , role ) : user . remove_role ( role ) self . save ( user ) events . user_lost_role_event . send ( user , role = role )
Removes role from user
13,895
def random_walk ( network ) : latest = network . latest_transmission_recipient ( ) if ( not network . transmissions ( ) or latest is None ) : sender = random . choice ( network . nodes ( type = Source ) ) else : sender = latest receiver = random . choice ( sender . neighbors ( direction = "to" , type = Agent ) ) sender . transmit ( to_whom = receiver )
Take a random walk from a source .
13,896
def moran_cultural ( network ) : if not network . transmissions ( ) : replacer = random . choice ( network . nodes ( type = Source ) ) replacer . transmit ( ) else : replacer = random . choice ( network . nodes ( type = Agent ) ) replaced = random . choice ( replacer . neighbors ( direction = "to" , type = Agent ) ) from operator import attrgetter replacer . transmit ( what = max ( replacer . infos ( ) , key = attrgetter ( 'creation_time' ) ) , to_whom = replaced )
Generalized cultural Moran process .
13,897
def moran_sexual ( network ) : if not network . transmissions ( ) : replacer = random . choice ( network . nodes ( type = Source ) ) replacer . transmit ( ) else : from operator import attrgetter agents = network . nodes ( type = Agent ) baby = max ( agents , key = attrgetter ( 'creation_time' ) ) agents = [ a for a in agents if a . id != baby . id ] replacer = random . choice ( agents ) replaced = random . choice ( replacer . neighbors ( direction = "to" , type = Agent ) ) for node in replaced . neighbors ( direction = "to" ) : baby . connect ( direction = "to" , whom = node ) for node in replaced . neighbors ( direction = "from" ) : node . connect ( direction = "to" , whom = baby ) replaced . fail ( ) replacer . transmit ( to_whom = baby )
The generalized sexual Moran process .
13,898
def dump_object ( self , obj ) : if isinstance ( obj , uuid . UUID ) : return str ( obj ) if hasattr ( obj , 'isoformat' ) : return obj . isoformat ( ) if isinstance ( obj , ( bytes , bytearray , memoryview ) ) : return base64 . b64encode ( obj ) . decode ( 'ASCII' ) raise TypeError ( '{!r} is not JSON serializable' . format ( obj ) )
Called to encode unrecognized object .
13,899
def normalize_datum ( self , datum ) : if datum is None : return datum if isinstance ( datum , self . PACKABLE_TYPES ) : return datum if isinstance ( datum , uuid . UUID ) : datum = str ( datum ) if isinstance ( datum , bytearray ) : datum = bytes ( datum ) if isinstance ( datum , memoryview ) : datum = datum . tobytes ( ) if hasattr ( datum , 'isoformat' ) : datum = datum . isoformat ( ) if isinstance ( datum , ( bytes , str ) ) : return datum if isinstance ( datum , ( collections . Sequence , collections . Set ) ) : return [ self . normalize_datum ( item ) for item in datum ] if isinstance ( datum , collections . Mapping ) : out = { } for k , v in datum . items ( ) : out [ k ] = self . normalize_datum ( v ) return out raise TypeError ( '{} is not msgpackable' . format ( datum . __class__ . __name__ ) )
Convert datum into something that umsgpack likes .