idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
14,000
def get_sequences ( self , pdb_id = None ) : sequences = { } if pdb_id : for chain_id , sequence in self . get ( pdb_id , { } ) . iteritems ( ) : sequences [ chain_id ] = Sequence . from_sequence ( chain_id , sequence ) else : for pdb_id , v in self . iteritems ( ) : sequences [ pdb_id ] = { } for chain_id , sequence i...
Create Sequence objects for each FASTA sequence .
14,001
def get_chain_ids ( self , pdb_id = None , safe_call = False ) : if pdb_id == None and len ( self . keys ( ) ) == 1 : return self [ self . keys ( ) [ 0 ] ] . keys ( ) pdbID = pdbID . upper ( ) if not self . get ( pdbID ) : if not safe_call : raise Exception ( "FASTA object does not contain sequences for PDB %s." % pdbI...
If the FASTA file only has one PDB ID pdb_id does not need to be specified . Otherwise the list of chains identifiers for pdb_id is returned .
14,002
def match ( self , other ) : colortext . message ( "FASTA Match" ) for frompdbID , fromchains in sorted ( self . iteritems ( ) ) : matched_pdbs = { } matched_chains = { } for fromchain , fromsequence in fromchains . iteritems ( ) : for topdbID , tochains in other . iteritems ( ) : for tochain , tosequence in tochains ....
This is a noisy terminal - printing function at present since there is no need to make it a proper API function .
14,003
def _process_json_data ( person_data ) : person = SwsPerson ( ) if person_data [ "BirthDate" ] : person . birth_date = parse ( person_data [ "BirthDate" ] ) . date ( ) person . directory_release = person_data [ "DirectoryRelease" ] person . email = person_data [ "Email" ] person . employee_id = person_data [ "EmployeeI...
Returns a uw_sws . models . SwsPerson object
14,004
def _make_notice_date ( self , response ) : today = date . today ( ) yesterday = today - timedelta ( days = 1 ) tomorrow = today + timedelta ( days = 1 ) week = today + timedelta ( days = 2 ) next_week = today + timedelta ( weeks = 1 ) future = today + timedelta ( weeks = 3 ) future_end = today + timedelta ( weeks = 5 ...
Set the date attribte value in the notice mock data
14,005
def relative_symlink ( target , link_name ) : link_name = os . path . abspath ( link_name ) abs_target = os . path . abspath ( target ) rel_target = os . path . relpath ( target , os . path . dirname ( link_name ) ) if os . path . exists ( link_name ) : os . remove ( link_name ) os . symlink ( rel_target , link_name )
Make a symlink to target using the shortest possible relative path .
14,006
def params ( self , dict ) : self . _configuration . update ( dict ) self . _measurements . update ( )
Set configuration variables for an OnShape part .
14,007
def update ( self , params = None , client = c ) : uri = self . parent . uri if not params or not self . res : self . get_params ( ) return d = self . payload for k , v in params . items ( ) : m = d [ "currentConfiguration" ] [ self . parameter_map [ k ] ] [ "message" ] if isinstance ( v , bool ) or isinstance ( v , st...
Push params to OnShape and synchronize the local copy
14,008
def get_params ( self ) : self . res = c . get_configuration ( self . parent . uri . as_dict ( ) )
Manually pull params defined in config from OnShape and return a python representation of the params . Quantities are converted to pint quantities Bools are converted to python bools and Enums are converted to strings . Note that Enum names are autogenerated by OnShape and do not match the name on the OnShape UI .
14,009
def params ( self ) : payload = self . payload d = { } for i , p in enumerate ( payload [ "currentConfiguration" ] ) : type_name = p [ "typeName" ] cp = payload [ "configurationParameters" ] [ i ] [ "message" ] name = cp [ "parameterName" ] if type_name == "BTMParameterQuantity" : try : v = q ( p [ "message" ] [ "expre...
Get the params of response data from the API .
14,010
def update ( self ) : uri = self . parent . uri script = r self . res = c . evaluate_featurescript ( uri . as_dict ( ) , script )
Update all local variable names to match OnShape .
14,011
def getFailedJobIDs ( self , extraLapse = TYPICAL_LAPSE ) : scriptsRun = self . scriptsRun failedJobTimestamps = [ ] nodata = [ ] for name , details in sorted ( scriptsRun . iteritems ( ) ) : if details [ "lastSuccess" ] and expectedScripts . get ( name ) : if not expectedScripts . check ( name , details [ "lastSuccess...
Returns a list of which identify failed jobs in the scriptsRun table . If a time stamp for a job can be found we return this . The time stamp can be used to index the log . If no time stamp was found return the name of the script instead .
14,012
def generateSummaryHTMLTable ( self , extraLapse = TYPICAL_LAPSE ) : scriptsRun = self . scriptsRun html = [ ] html . append ( "<table style='text-align:center;border:1px solid black;margin-left: auto;margin-right: auto;'>\n" ) html . append ( ' <tr><td colspan="4" style="text-align:center"></td></tr>\n' ) html . appen...
Generates a summary in HTML of the status of the expected scripts broken based on the log . This summary is returned as a list of strings .
14,013
def to_csv ( args ) : result_file = args . result_file output_file = args . output_file delimiter = args . delimiter if not os . path . isfile ( result_file ) : raise OSError ( "Results file does not exists" ) headers = [ 'elapsed' , 'epoch' , 'turret_name' , 'scriptrun_time' , 'error' ] headers_row = { } set_database ...
Take a sqlite filled database of results and return a csv file
14,014
def fraction_correct_fuzzy_linear_create_vector ( z , z_cutoff , z_fuzzy_range ) : assert ( z_fuzzy_range * 2 < z_cutoff ) if ( z == None or numpy . isnan ( z ) ) : return None elif ( z >= z_cutoff + z_fuzzy_range ) : return [ 0 , 0 , 1 ] elif ( z <= - z_cutoff - z_fuzzy_range ) : return [ 1 , 0 , 0 ] elif ( - z_cutoff...
A helper function for fraction_correct_fuzzy_linear .
14,015
def apply_quality_control_checks ( seq , check_gen9_seqs = True , check_short_length = True , check_local_gc_content = True , check_global_gc_content = True ) : seq = seq . upper ( ) failure_reasons = [ ] if check_short_length : if len ( seq ) < min_gene_length : failure_reasons . append ( 'minimum_length: Sequence is ...
Raise a ValueError if the given sequence doesn t pass all of the Gen9 quality control design guidelines . Certain checks can be enabled or disabled via the command line .
14,016
def get_default_values ( self ) : out = dict ( dx = 0 , dy = 0 , dz = 0 , theta = 0 , phi = 0 , psi = 0 ) dx , dy , dz , _ = np . mean ( self . coord1 - self . coord2 , axis = 1 ) out [ 'dx' ] = dx out [ 'dy' ] = dy out [ 'dz' ] = dz vec1 = self . coord1 [ : - 1 , 1 ] - self . coord1 [ : - 1 , - 1 ] vec2 = self . coord...
Make a crude estimation of the alignment using the center of mass and general C - > N orientation .
14,017
def get_matrix ( theta , phi , psi , dx , dy , dz , matrix = np . zeros ( ( 4 , 4 ) , dtype = DTYPE ) , angles = np . zeros ( 3 , dtype = DTYPE ) ) : angles [ 0 ] = theta angles [ 1 ] = phi angles [ 2 ] = psi cx , cy , cz = np . cos ( angles ) sx , sy , sz = np . sin ( angles ) rotation = matrix [ : 3 , : 3 ] rotation ...
Build the rotation - translation matrix .
14,018
def _tm ( self , theta , phi , psi , dx , dy , dz ) : matrix = self . get_matrix ( theta , phi , psi , dx , dy , dz ) coord = matrix . dot ( self . coord2 ) dist = coord - self . coord1 d_i2 = ( dist * dist ) . sum ( axis = 0 ) tm = - ( 1 / ( 1 + ( d_i2 / self . d02 ) ) ) return tm
Compute the minimisation target not normalised .
14,019
def write ( self , outputfile = 'out.pdb' , appended = False ) : matrix = self . get_matrix ( ** self . get_current_values ( ) ) out = open ( outputfile , 'w' ) atomid = 1 if appended : for line in open ( self . pdb1 ) : if not line . startswith ( 'ATOM' ) or ( line [ 21 ] != self . chain_1 and line [ 21 ] != ' ' ) : c...
Save the second PDB file aligned to the first .
14,020
def _load_data_alignment ( self , chain1 , chain2 ) : parser = PDB . PDBParser ( QUIET = True ) ppb = PDB . PPBuilder ( ) structure1 = parser . get_structure ( chain1 , self . pdb1 ) structure2 = parser . get_structure ( chain2 , self . pdb2 ) seq1 = str ( ppb . build_peptides ( structure1 ) [ 0 ] . get_sequence ( ) ) ...
Extract the sequences from the PDB file perform the alignment and load the coordinates of the CA of the common residues .
14,021
def _load_data_index ( self , chain1 , chain2 ) : parser = PDB . PDBParser ( QUIET = True ) structure1 = parser . get_structure ( chain1 , self . pdb1 ) structure2 = parser . get_structure ( chain2 , self . pdb2 ) residues1 = list ( structure1 . get_residues ( ) ) residues2 = list ( structure2 . get_residues ( ) ) inde...
Load the coordinates of the CA of the common residues .
14,022
def _json_to_sectionstatus ( section_data ) : section_status = SectionStatus ( ) if section_data [ "AddCodeRequired" ] == 'true' : section_status . add_code_required = True else : section_status . add_code_required = False section_status . current_enrollment = int ( section_data [ "CurrentEnrollment" ] ) current_period...
Returns a uw_sws . models . SectionStatus object created from the passed json .
14,023
def renumber_atoms ( lines ) : new_lines = [ ] current_number = 1 for line in lines : if line . startswith ( 'ATOM' ) or line . startswith ( 'HETATM' ) : new_lines . append ( line [ : 6 ] + string . rjust ( '%d' % current_number , 5 ) + line [ 11 : ] ) current_number += 1 else : if line . startswith ( 'TER' ) : current...
Takes in a list of PDB lines and renumbers the atoms appropriately
14,024
def clean_alternate_location_indicators ( lines ) : new_lines = [ ] previously_seen_alt_atoms = set ( ) for line in lines : if line . startswith ( 'ATOM' ) : alt_loc_id = line [ 16 ] if alt_loc_id != ' ' : atom_name = line [ 12 : 16 ] . strip ( ) res_name = line [ 17 : 20 ] . strip ( ) chain = line [ 21 ] resnum = long...
Keeps only the first atom if alternated location identifiers are being used Removes alternate location ID charactor
14,025
def parse_pdb_ligand_info ( self , pdb_ligand_info ) : mtchs = re . findall ( '(<ligand.*?</ligand>)' , pdb_ligand_info , re . DOTALL ) for m in mtchs : if m . upper ( ) . find ( 'CHEMICALID="{0}"' . format ( self . PDBCode . upper ( ) ) ) != - 1 : ligand_type = re . match ( '<ligand.*?\stype="(.*?)".*?>' , m , re . DO...
This only parses the ligand type as all the other information should be in the . cif file . The XML file has proper capitalization whereas the . cif file uses all caps for the ligand type .
14,026
def add_code_mapping ( self , from_pdb_code , to_pdb_code ) : if from_pdb_code in self . code_map : assert ( self . code_map [ from_pdb_code ] == to_pdb_code ) else : self . code_map [ from_pdb_code ] = to_pdb_code
Add a code mapping without a given instance .
14,027
def reset_password ( self , action_token , signed_data ) : try : action = "reset-password" user = get_user_by_action_token ( action , action_token ) if not user or not user . signed_data_match ( signed_data , action ) : raise mocha_exc . AppError ( "Verification Invalid!" ) if request . method == "POST" : password = re...
Reset the user password . It was triggered by LOST - PASSWORD
14,028
def verify_email ( self , action_token , signed_data ) : try : action = "verify-email" user = get_user_by_action_token ( action , action_token ) if not user or not user . signed_data_match ( signed_data , action ) : raise mocha_exc . AppError ( "Verification Invalid!" ) else : user . set_email_verified ( True ) flash_s...
Verify email account in which a link was sent to
14,029
def oauth_connect ( self , provider , action ) : valid_actions = [ "connect" , "authorized" , "test" ] _redirect = views . auth . Account . account_settings if is_authenticated ( ) else self . login if action not in valid_actions or "oauth" not in __options__ . get ( "registration_methods" ) or not __options__ . get ( ...
This endpoint doesn t check if user is logged in because it has two functions
14,030
def log ( self , message , level = None ) : if level is None : level = logging . INFO current_app . logger . log ( msg = message , level = level )
Write a message to log
14,031
def is_instance ( self , model ) : result = isinstance ( model , self . __model__ ) if result is True : return True err = 'Object {} is not of type {}' raise ValueError ( err . format ( model , self . __model__ ) )
Is instance? Checks if provided object is instance of this service s model .
14,032
def create ( self , ** kwargs ) : model = self . new ( ** kwargs ) return self . save ( model )
Create Instantiates and persists new model populated from provided arguments
14,033
def save ( self , model , commit = True ) : self . is_instance ( model ) db . session . add ( model ) if commit : db . session . commit ( ) return model
Save Puts model into unit of work for persistence . Can optionally commit transaction . Returns persisted model as a result .
14,034
def delete ( self , model , commit = True ) : self . is_instance ( model ) db . session . delete ( model ) if commit : db . session . commit ( ) return model
Delete Puts model for deletion into unit of work and optionall commits transaction
14,035
def is_connectable ( host : str , port : Union [ int , str ] ) -> bool : socket_ = None try : socket_ = socket . create_connection ( ( host , port ) , 1 ) result = True except socket . timeout : result = False finally : if socket_ : socket_ . close ( ) return result
Tries to connect to the device to see if it is connectable .
14,036
def group_lines ( lines ) : groups = [ ] group = [ ] for line in lines : if line . strip ( ) == "" : groups . append ( group [ : ] ) group = [ ] continue group . append ( line ) if group : groups . append ( group [ : ] ) return groups
Split a list of lines using empty lines as separators .
14,037
async def set_neighbors ( self ) : t = time . time ( ) self . logger . debug ( "Settings grid neighbors for the multi-environments." ) tasks = [ ] for i in range ( len ( self . grid ) ) : for j in range ( len ( self . grid [ 0 ] ) ) : addr = self . grid [ i ] [ j ] N , E , S , W = None , None , None , None if i != 0 : ...
Set neighbors for multi - environments their slave environments and agents .
14,038
async def ssh_exec ( server , cmd , timeout = 10 , ** ssh_kwargs ) : conn = await asyncio . wait_for ( asyncssh . connect ( server , ** ssh_kwargs ) , timeout = timeout ) ret = await conn . run ( cmd ) conn . close ( ) return ret
Execute a command on a given server using asynchronous SSH - connection .
14,039
async def spawn_slaves ( self , spawn_cmd , ports = None , ** ssh_kwargs ) : pool = multiprocessing . Pool ( len ( self . nodes ) ) rets = [ ] for i , node in enumerate ( self . nodes ) : server , server_port = node port = ports [ node ] if ports is not None else self . port mgr_addr = "tcp://{}:{}/0" . format ( server...
Spawn multi - environments on the nodes through SSH - connections .
14,040
def get_slave_managers ( self , as_coro = False ) : async def slave_task ( addr ) : r_manager = await self . env . connect ( addr ) return await r_manager . get_slave_managers ( ) tasks = create_tasks ( slave_task , self . addrs ) return run_or_coro ( tasks , as_coro )
Return all slave environment manager addresses .
14,041
def nodes ( self , type = None , failed = False ) : if type is None : type = Node if not issubclass ( type , Node ) : raise ( TypeError ( "{} is not a valid node type." . format ( type ) ) ) if failed not in [ "all" , False , True ] : raise ValueError ( "{} is not a valid node failed" . format ( failed ) ) if failed ==...
Get nodes associated with this participant .
14,042
def print_verbose ( self ) : print "Nodes: " for a in ( self . nodes ( failed = "all" ) ) : print a print "\nVectors: " for v in ( self . vectors ( failed = "all" ) ) : print v print "\nInfos: " for i in ( self . infos ( failed = "all" ) ) : print i print "\nTransmissions: " for t in ( self . transmissions ( failed = "...
Print a verbose representation of a network .
14,043
def vectors ( self , direction = "all" , failed = False ) : if direction not in [ "all" , "incoming" , "outgoing" ] : raise ValueError ( "{} is not a valid vector direction. " "Must be all, incoming or outgoing." . format ( direction ) ) if failed not in [ "all" , False , True ] : raise ValueError ( "{} is not a valid ...
Get vectors that connect at this node .
14,044
def transmissions ( self , direction = "outgoing" , status = "all" , failed = False ) : if direction not in [ "incoming" , "outgoing" , "all" ] : raise ( ValueError ( "You cannot get transmissions of direction {}." . format ( direction ) + "Type can only be incoming, outgoing or all." ) ) if status not in [ "all" , "pe...
Get transmissions sent to or from this node .
14,045
def receive ( self , what = None ) : if self . failed : raise ValueError ( "{} cannot receive as it has failed." . format ( self ) ) received_transmissions = [ ] if what is None : pending_transmissions = self . transmissions ( direction = "incoming" , status = "pending" ) for transmission in pending_transmissions : tra...
Receive some transmissions .
14,046
def replicate ( self , info_in ) : if self . failed : raise ValueError ( "{} cannot replicate as it has failed." . format ( self ) ) from transformations import Replication info_out = type ( info_in ) ( origin = self , contents = info_in . contents ) Replication ( info_in = info_in , info_out = info_out )
Replicate an info .
14,047
def mutate ( self , info_in ) : if self . failed : raise ValueError ( "{} cannot mutate as it has failed." . format ( self ) ) from transformations import Mutation info_out = type ( info_in ) ( origin = self , contents = info_in . _mutated_contents ( ) ) Mutation ( info_in = info_in , info_out = info_out )
Replicate an info + mutation .
14,048
def transmissions ( self , status = "all" ) : if status not in [ "all" , "pending" , "received" ] : raise ( ValueError ( "You cannot get {} transmissions." . format ( status ) + "Status can only be pending, received or all" ) ) if status == "all" : return Transmission . query . filter_by ( vector_id = self . id , faile...
Get transmissions sent along this Vector .
14,049
def serve_forever ( self , banner = None ) : if hasattr ( readline , "read_history_file" ) : try : readline . read_history_file ( self . histfile ) except IOError : pass atexit . register ( self . _save_history ) super ( Shell , self ) . serve_forever ( banner )
Interact with the user .
14,050
def complete ( self , word , state ) : try : import rl rl . completion . suppress_append = True except ImportError : pass word = transform ( word , self . transforms , word = True ) if state == 0 : self . matches = self . get_matches ( word ) try : match = self . matches [ state ] except IndexError : return None else :...
Return the next possible completion for word .
14,051
def exclude_matches ( self , matches ) : for match in matches : for exclude_pattern in self . exclude_patterns : if re . match ( exclude_pattern , match ) is not None : break else : yield match
Filter any matches that match an exclude pattern .
14,052
def gen_filename_completions ( self , word , filenames ) : if not word : return filenames else : trie = pygtrie . CharTrie ( ) for filename in filenames : trie [ filename ] = filename return trie . iterkeys ( prefix = word )
Generate a sequence of filenames that match word .
14,053
def gen_matches ( self , word ) : if word . startswith ( "$" ) : for match in self . gen_variable_completions ( word , os . environ ) : yield match else : head , tail = os . path . split ( word ) filenames = os . listdir ( head or '.' ) completions = self . gen_filename_completions ( tail , filenames ) for match in com...
Generate a sequence of possible completions for word .
14,054
def gen_variable_completions ( self , word , env ) : var = word [ 1 : ] for k in env : if k . startswith ( var ) : yield "$" + k
Generate a sequence of possible variable completions for word .
14,055
def inflect ( self , filename ) : suffix = ( "/" if os . path . isdir ( filename ) else " " ) return self . _escape ( filename ) + suffix
Inflect a filename to indicate its type .
14,056
def state ( self , time = None ) : if time is None : return max ( self . infos ( type = State ) , key = attrgetter ( 'creation_time' ) ) else : states = [ s for s in self . infos ( type = State ) if s . creation_time < time ] return max ( states , key = attrgetter ( 'creation_time' ) )
The most recently - created info of type State at the specfied time .
14,057
def sentry_feature ( app ) : sentry_public_key = app . config . get ( 'SENTRY_PUBLIC_KEY' ) sentry_project_id = app . config . get ( 'SENTRY_PROJECT_ID' ) if not sentry_public_key or not sentry_project_id : return dsn = 'https://{key}@sentry.io/{project_id}' dsn = dsn . format ( key = sentry_public_key , project_id = s...
Sentry feature Adds basic integration with Sentry via the raven library
14,058
def new_plugin ( self , config , * args , ** kwargs ) : typ = None obj = None if 'type' in config : typ = config [ 'type' ] elif isinstance ( config , collections . Mapping ) and len ( config ) == 1 : ( typ , config ) = list ( config . items ( ) ) [ 0 ] obj = self . _ctor ( typ , config , * args , ** kwargs ) if 'name'...
instantiate a plugin creates the object stores it in _instance
14,059
def to_practice_counts ( request ) : data = None if request . method == "POST" : data = json . loads ( request . body . decode ( "utf-8" ) ) [ "filters" ] if "filters" in request . GET : data = load_query_json ( request . GET , "filters" ) if data is None or len ( data ) == 0 : return render_json ( request , { } , temp...
Get number of items available to practice .
14,060
def answer ( request ) : if request . method == 'GET' : return render ( request , 'models_answer.html' , { } , help_text = answer . __doc__ ) elif request . method == 'POST' : practice_filter = get_filter ( request ) practice_context = PracticeContext . objects . from_content ( practice_filter ) saved_answers = _save_a...
Save the answer .
14,061
def user_stats ( request ) : timer ( 'user_stats' ) response = { } data = None if request . method == "POST" : data = json . loads ( request . body . decode ( "utf-8" ) ) [ "filters" ] if "filters" in request . GET : data = load_query_json ( request . GET , "filters" ) if data is None : return render_json ( request , {...
Get user statistics for selected groups of items
14,062
def add ( self , start , end , cut_point = None , skip_rate = None , extend_loop = None ) : self . data . append ( self . parse_loop_line ( [ 'LOOP' , start , end , cut_point , skip_rate , extend_loop ] ) ) assert ( start <= end )
Add a new loop definition .
14,063
def log ( self , text , key = "?????" , force = False ) : if force or self . verbose : print ">>>> {} {}" . format ( key , text ) sys . stdout . flush ( )
Print a string to the logs .
14,064
def input_yes_no ( msg = '' ) : print '\n' + msg while ( True ) : i = raw_input ( 'Input yes or no: ' ) i = i . lower ( ) if i == 'y' or i == 'yes' : return True elif i == 'n' or i == 'no' : return False else : print 'ERROR: Bad input. Must enter y/n/yes/no'
Simple helper function
14,065
def resolve_relation_type_config ( value ) : relation_types = current_app . config [ 'PIDRELATIONS_RELATION_TYPES' ] if isinstance ( value , six . string_types ) : try : obj = next ( rt for rt in relation_types if rt . name == value ) except StopIteration : raise ValueError ( "Relation name '{0}' is not configured." . ...
Resolve the relation type to config object .
14,066
def match_RCSB_pdb_chains ( pdb_id1 , pdb_id2 , cut_off = 60.0 , allow_multiple_matches = False , multiple_match_error_margin = 3.0 , use_seqres_sequences_if_possible = True , strict = True ) : try : stage = pdb_id1 pdb_1 = PDB ( retrieve_pdb ( pdb_id1 ) , strict = strict ) stage = pdb_id2 pdb_2 = PDB ( retrieve_pdb ( ...
A convenience function for match_pdb_chains . The required arguments are two PDB IDs from the RCSB .
14,067
def create_resource ( output_model , rtype , unique , links , existing_ids = None , id_helper = None ) : if isinstance ( id_helper , str ) : idg = idgen ( id_helper ) elif isinstance ( id_helper , GeneratorType ) : idg = id_helper elif id_helper is None : idg = default_idgen ( None ) else : raise ValueError ( 'id_helpe...
General - purpose routine to create a new resource in the output model based on data provided
14,068
def _read_apps ( self ) : apps = { } for cfgfile in glob . iglob ( os . path . join ( self . confdir , '*.conf' ) ) : name = os . path . basename ( cfgfile ) [ 0 : - 5 ] try : app = AppLogParser ( name , cfgfile , self . args , self . logdir , self . fields , self . name_cache , self . report ) except ( LogRaptorOption...
Read the configuration of applications returning a dictionary
14,069
def patterns ( self ) : if not self . args . patterns and not self . args . pattern_files : try : self . args . patterns . append ( self . args . files . pop ( 0 ) ) except IndexError : raise LogRaptorArgumentError ( 'PATTERN' , 'no search pattern' ) patterns = set ( ) if self . args . pattern_files : patterns . update...
A tuple with re . RegexObject objects created from regex pattern arguments .
14,070
def files ( self ) : if not self . args . files and self . recursive : return [ '.' ] else : return self . args . files
A list of input sources . Each item can be a file path a glob path or URL .
14,071
def apps ( self ) : logger . debug ( "initialize applications ..." ) enabled = None apps = self . args . apps or self . _config_apps . keys ( ) unknown = set ( apps ) - set ( self . _config_apps . keys ( ) ) if unknown : raise LogRaptorArgumentError ( "--apps" , "not found apps %r" % list ( unknown ) ) if apps or enabl...
Dictionary with loaded applications .
14,072
def apptags ( self ) : logger . debug ( "populate tags map ..." ) apps = self . _apps . keys ( ) unknown = set ( apps ) unknown . difference_update ( self . _config_apps . keys ( ) ) if unknown : raise ValueError ( "unknown apps: %r" % list ( unknown ) ) apps = [ v for v in self . _config_apps . values ( ) if v . name ...
Map from log app - name to an application .
14,073
def create_dispatcher ( self ) : before_context = max ( self . args . before_context , self . args . context ) after_context = max ( self . args . after_context , self . args . context ) if self . args . files_with_match is not None or self . args . count or self . args . only_matching or self . args . quiet : return U...
Return a dispatcher for configured channels .
14,074
def get_config ( self ) : channels = [ sect . rsplit ( '_' ) [ 0 ] for sect in self . config . sections ( suffix = '_channel' ) ] channels . sort ( ) disabled_apps = [ app for app in self . _config_apps . keys ( ) if app not in self . _apps ] return u'' . join ( [ u"\n--- %s configuration ---" % __package__ , u"\nConfi...
Return a formatted text with main configuration parameters .
14,075
def get_run_summary ( self , run_stats ) : run_stats = run_stats . copy ( ) run_stats [ 'files' ] = len ( run_stats [ 'files' ] ) summary = [ u'\n--- %s run summary ---' % __package__ , u'Number of processed files: %(files)d' , u'Total lines read: %(lines)d' , u'Total log events matched: %(matches)d' , ] if any ( [ app...
Produce a text summary from run statistics .
14,076
def add_template_dirs ( app ) : template_dir = os . path . join ( os . path . dirname ( os . path . abspath ( __file__ ) ) , 'templates' ) app . jinja_loader = jinja2 . ChoiceLoader ( [ app . jinja_loader , jinja2 . FileSystemLoader ( template_dir ) ] )
Add flask_mongo_profiler s template directories .
14,077
def setup ( ) : config_name = ".wallaceconfig" config_path = os . path . join ( os . path . expanduser ( "~" ) , config_name ) if os . path . isfile ( config_path ) : log ( "Wallace config file already exists." , chevrons = False ) else : log ( "Creating Wallace config file at ~/.wallaceconfig..." , chevrons = False ) ...
Walk the user though the Wallace setup .
14,078
def summary ( app ) : r = requests . get ( 'https://{}.herokuapp.com/summary' . format ( app ) ) summary = r . json ( ) [ 'summary' ] click . echo ( "\nstatus \t| count" ) click . echo ( "----------------" ) for s in summary : click . echo ( "{}\t| {}" . format ( s [ 0 ] , s [ 1 ] ) ) num_101s = sum ( [ s [ 1 ] for s i...
Print a summary of a deployed app s status .
14,079
def scale_up_dynos ( id ) : config = PsiturkConfig ( ) config . load_config ( ) dyno_type = config . get ( 'Server Parameters' , 'dyno_type' ) num_dynos_web = config . get ( 'Server Parameters' , 'num_dynos_web' ) num_dynos_worker = config . get ( 'Server Parameters' , 'num_dynos_worker' ) log ( "Scaling up the dynos.....
Scale up the Heroku dynos .
14,080
def deploy ( verbose , app ) : config = PsiturkConfig ( ) config . load_config ( ) config . set ( "Experiment Configuration" , "mode" , "deploy" ) config . set ( "Server Parameters" , "logfile" , "-" ) config . set ( "Shell Parameters" , "launch_in_sandbox_mode" , "false" ) deploy_sandbox_shared_setup ( verbose = verbo...
Deploy app using Heroku to MTurk .
14,081
def qualify ( qualification , value , worker ) : from boto . mturk . connection import MTurkConnection config = PsiturkConfig ( ) config . load_config ( ) aws_access_key_id = config . get ( 'AWS Access' , 'aws_access_key_id' ) aws_secret_access_key = config . get ( 'AWS Access' , 'aws_secret_access_key' ) conn = MTurkC...
Assign a qualification to a worker .
14,082
def dump_database ( id ) : log ( "Generating a backup of the database on Heroku..." ) dump_filename = "data.dump" data_directory = "data" dump_dir = os . path . join ( data_directory , id ) if not os . path . exists ( dump_dir ) : os . makedirs ( dump_dir ) subprocess . call ( "heroku pg:backups capture --app " + id , ...
Backup the Postgres database locally .
14,083
def backup ( app ) : dump_path = dump_database ( app ) config = PsiturkConfig ( ) config . load_config ( ) conn = boto . connect_s3 ( config . get ( 'AWS Access' , 'aws_access_key_id' ) , config . get ( 'AWS Access' , 'aws_secret_access_key' ) , ) bucket = conn . create_bucket ( app , location = boto . s3 . connection ...
Dump the database .
14,084
def create ( example ) : try : this_dir = os . path . dirname ( os . path . realpath ( __file__ ) ) example_dir = os . path . join ( this_dir , os . pardir , "examples" , example ) shutil . copytree ( example_dir , os . path . join ( os . getcwd ( ) , example ) ) log ( "Example created." , delay = 0 ) except TypeError ...
Create a copy of the given example .
14,085
def get_datetime_interval ( timestamp , diff , offset = 0 ) : fin_datetime = datetime . datetime . fromtimestamp ( timestamp + offset ) ini_datetime = datetime . datetime . fromtimestamp ( timestamp - diff ) return ini_datetime , fin_datetime
Returns datetime interval from timestamp backward in the past computed using the milliseconds difference passed as argument . The final datetime is corrected with an optional offset .
14,086
def strftimegen ( start_dt , end_dt ) : if start_dt > end_dt : raise ValueError ( "the start datetime is after the end datetime: (%r,%r)" % ( start_dt , end_dt ) ) def iterftime ( string ) : date_subs = [ i for i in DATE_FORMATS if i [ 1 ] . search ( string ) is not None ] if not date_subs : yield string else : dt = st...
Return a generator function for datetime format strings . The generator produce a day - by - day sequence starting from the first datetime to the second datetime argument .
14,087
def setup_jobs ( outpath , options , input_files ) : job_inputs = None reverse_mapping = None fasta_file_contents = { } for input_file in input_files : assert ( not ( fasta_file_contents . get ( input_file ) ) ) if any ( fnmatch ( input_file , x ) for x in pdb_file_wildcards ) : pdb = PDB . from_filepath ( input_file ,...
This function sets up the jobs by creating the necessary input files as expected . - outpath is where the output is to be stored . - options is the optparse options object . - input_files is a list of paths to input files .
14,088
def reformat ( found_sequences ) : for ( pdb_id , chain , file_name ) , sequence in sorted ( found_sequences . iteritems ( ) ) : header = sequence [ 0 ] assert ( header [ 0 ] == '>' ) tokens = header . split ( '|' ) tokens [ 0 ] = tokens [ 0 ] [ : 5 ] assert ( len ( tokens [ 0 ] ) == 5 ) sequence [ 0 ] = "|" . join ( t...
Truncate the FASTA headers so that the first field is a 4 - character ID .
14,089
def search_configuration_files ( findstr , replacestr = None ) : F = open ( configurationFilesLocation , "r" ) lines = F . readlines ( ) F . close ( ) allerrors = { } alloutput = { } for line in lines : line = line . strip ( ) if line : if line . endswith ( "generate_fragments.py" ) : if not ( os . path . exists ( line...
This function could be used to find and replace paths in the configuration files . At present it only finds phrases .
14,090
def get_local_ip_address ( target ) : ip_adr = '' try : s = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) s . connect ( ( target , 8000 ) ) ip_adr = s . getsockname ( ) [ 0 ] s . close ( ) except : pass return ip_adr
Get the local ip address to access one specific target .
14,091
def connect ( self ) : try : self . sock . connect ( ( self . host , self . port ) ) return self . sock except socket . error as ex : logging . error ( 'Exception while connecting socket on %s:%s - Error %s' % ( self . host , self . port , ex ) ) raise except Exception as ex : logging . exception ( 'Exception while con...
Connect socket to server
14,092
def send_by_packet ( self , data ) : total_sent = 0 while total_sent < PACKET_SIZE : sent = self . sock . send ( data [ total_sent : ] ) if sent == 0 : raise RuntimeError ( "socket connection broken" ) total_sent += sent return total_sent
Send data by packet on socket
14,093
def receive ( self , siz ) : result = bytearray ( ) data = 'x' while len ( data ) > 0 : data = self . sock . recv ( siz - len ( result ) ) result += data if len ( result ) == siz : return result if len ( result ) > siz : raise Exception ( 'Received more bytes than expected' ) raise Exception ( 'Error receiving data. %d...
Receive a known length of bytes from a socket
14,094
def spawn_container ( addr , env_cls = Environment , mgr_cls = EnvManager , set_seed = True , * args , ** kwargs ) : try : import setproctitle as spt title = 'creamas: {}({})' . format ( env_cls . __class__ . __name__ , _get_base_url ( addr ) ) spt . setproctitle ( title ) except : pass if set_seed : _set_random_seeds ...
Spawn a new environment in a given address as a coroutine .
14,095
def _set_random_seeds ( ) : try : import numpy as np np . random . seed ( ) except : pass try : import scipy as sp sp . random . seed ( ) except : pass import random random . seed ( )
Set new random seeds for the process .
14,096
async def report ( self , msg , timeout = 5 ) : try : host_manager = await self . env . connect ( self . host_manager , timeout = timeout ) except : raise ConnectionError ( "Could not reach host manager ({})." . format ( self . host_manager ) ) ret = await host_manager . handle ( msg ) return ret
Report message to the host manager .
14,097
def get_agents ( self , addr = True , agent_cls = None , as_coro = False ) : return self . env . get_agents ( addr = addr , agent_cls = agent_cls )
Get agents from the managed environment .
14,098
async def get_artifacts ( self ) : host_manager = await self . env . connect ( self . _host_manager , timeout = TIMEOUT ) artifacts = await host_manager . get_artifacts ( ) return artifacts
Get all artifacts from the host environment .
14,099
async def spawn ( self , agent_cls , * args , addr = None , ** kwargs ) : _ , addr = await self . menv . spawn ( agent_cls , * args , addr = addr , ** kwargs ) return addr
Spawn an agent to the environment .