idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
12,900
def password_link_expired ( self , now = None ) : if not now : now = datetime . datetime . utcnow ( ) return self . password_link_expires < now
Check if password link expired
12,901
def add_role ( self , role ) : schema = RoleSchema ( ) ok = schema . process ( role ) if not ok or not role . id : err = 'Role must be valid and saved before adding to user' raise x . UserException ( err ) self . __roles . append ( role )
Add role to user Role must be valid and saved first otherwise will raise an exception .
12,902
def has_role ( self , role_or_handle ) : if not isinstance ( role_or_handle , str ) : return role_or_handle in self . roles has_role = False for role in self . roles : if role . handle == role_or_handle : has_role = True break return has_role
Checks if user has role
12,903
def push ( remote = 'origin' , branch = 'master' ) : print ( cyan ( "Pulling changes from repo ( %s / %s)..." % ( remote , branch ) ) ) local ( "git push %s %s" % ( remote , branch ) )
git push commit
12,904
def pull ( remote = 'origin' , branch = 'master' ) : print ( cyan ( "Pulling changes from repo ( %s / %s)..." % ( remote , branch ) ) ) local ( "git pull %s %s" % ( remote , branch ) )
git pull commit
12,905
def sync ( remote = 'origin' , branch = 'master' ) : pull ( branch , remote ) push ( branch , remote ) print ( cyan ( "Git Synced!" ) )
git pull and push commit
12,906
def update ( tournament , match , attachment , ** params ) : api . fetch ( "PUT" , "tournaments/%s/matches/%s/attachments/%s" % ( tournament , match , attachment ) , "match_attachment" , ** params )
Update the attributes of a match attachment .
12,907
def count_row ( engine , table ) : return engine . execute ( select ( [ func . count ( ) ] ) . select_from ( table ) ) . fetchone ( ) [ 0 ]
Return number of rows in a table .
12,908
def get_providers ( self ) : if self . providers : return self . providers providers = dict ( ) for provider in self . config : configurator = provider . lower ( ) + '_config' if not hasattr ( self , configurator ) : err = 'Provider [{}] not recognized' . format ( provider ) raise ValueError ( err ) provider_config = s...
Get OAuth providers Returns a dictionary of oauth applications ready to be registered with flask oauth extension at application bootstrap .
12,909
def token_getter ( provider , token = None ) : session_key = provider + '_token' if token is None : token = session . get ( session_key ) return token
Generic token getter for all the providers
12,910
def register_token_getter ( self , provider ) : app = oauth . remote_apps [ provider ] decorator = getattr ( app , 'tokengetter' ) def getter ( token = None ) : return self . token_getter ( provider , token ) decorator ( getter )
Register callback to retrieve token from session
12,911
def vkontakte_config ( self , id , secret , scope = None , offline = False , ** _ ) : if scope is None : scope = 'email,offline' if offline : scope += ',offline' token_params = dict ( scope = scope ) config = dict ( request_token_url = None , access_token_url = 'https://oauth.vk.com/access_token' , authorize_url = 'htt...
Get config dictionary for vkontakte oauth
12,912
def instagram_config ( self , id , secret , scope = None , ** _ ) : scope = scope if scope else 'basic' token_params = dict ( scope = scope ) config = dict ( access_token_url = '/oauth/access_token/' , authorize_url = '/oauth/authorize/' , base_url = 'https://api.instagram.com/' , consumer_key = id , consumer_secret = ...
Get config dictionary for instagram oauth
12,913
def convert ( self , chain_id , residue_id , from_scheme , to_scheme ) : from_scheme = from_scheme . lower ( ) to_scheme = to_scheme . lower ( ) assert ( from_scheme in ResidueRelatrix . schemes ) assert ( to_scheme in ResidueRelatrix . schemes ) return self . _convert ( chain_id , residue_id , from_scheme , to_scheme ...
The API conversion function . This converts between the different residue ID schemes .
12,914
def _convert ( self , chain_id , residue_id , from_scheme , to_scheme ) : if from_scheme == 'rosetta' : atom_id = self . rosetta_to_atom_sequence_maps . get ( chain_id , { } ) [ residue_id ] if to_scheme == 'atom' : return atom_id else : return self . _convert ( chain_id , atom_id , 'atom' , to_scheme ) if from_scheme ...
The actual private conversion function .
12,915
def convert_from_rosetta ( self , residue_id , to_scheme ) : assert ( type ( residue_id ) == types . IntType ) chain_id = None for c , sequence in self . rosetta_sequences . iteritems ( ) : for id , r in sequence : if r . ResidueID == residue_id : assert ( chain_id == None ) chain_id = c if chain_id : return self . con...
A simpler conversion function to convert from Rosetta numbering without requiring the chain identifier .
12,916
def _validate ( self ) : self . _validate_fasta_vs_seqres ( ) self . _validate_mapping_signature ( ) self . _validate_id_types ( ) self . _validate_residue_types ( )
Validate the mappings .
12,917
def _validate_id_types ( self ) : for sequences in [ self . uniparc_sequences , self . fasta_sequences , self . seqres_sequences , self . rosetta_sequences ] : for chain_id , sequence in sequences . iteritems ( ) : sequence_id_types = set ( map ( type , sequence . ids ( ) ) ) if sequence_id_types : assert ( len ( seque...
Check that the ID types are integers for Rosetta SEQRES and UniParc sequences and 6 - character PDB IDs for the ATOM sequences .
12,918
def _validate_residue_types ( self ) : for chain_id , sequence_map in self . rosetta_to_atom_sequence_maps . iteritems ( ) : rosetta_sequence = self . rosetta_sequences [ chain_id ] atom_sequence = self . atom_sequences [ chain_id ] for rosetta_id , atom_id , _ in sequence_map : assert ( rosetta_sequence [ rosetta_id ]...
Make sure all the residue types map through translation .
12,919
def _create_sequences ( self ) : try : self . pdb . construct_pdb_to_rosetta_residue_map ( self . rosetta_scripts_path , rosetta_database_path = self . rosetta_database_path , cache_dir = self . cache_dir ) except PDBMissingMainchainAtomsException : self . pdb_to_rosetta_residue_map_error = True if self . pdb_id not in...
Get all of the Sequences - Rosetta ATOM SEQRES FASTA UniParc .
12,920
def search ( cls , query_string , options = None , enable_facet_discovery = False , return_facets = None , facet_options = None , facet_refinements = None , deadline = None , ** kwargs ) : search_class = cls . search_get_class_names ( ) [ - 1 ] query_string += ' ' + 'class_name:%s' % ( search_class , ) q = search . Que...
Searches the index . Conveniently searches only for documents that belong to instances of this class .
12,921
def search_update_index ( self ) : doc_id = self . search_get_document_id ( self . key ) fields = [ search . AtomField ( 'class_name' , name ) for name in self . search_get_class_names ( ) ] index = self . search_get_index ( ) if self . searchable_fields is None : searchable_fields = [ ] for field , prop in self . _pro...
Updates the search index for this instance .
12,922
def search_get_class_names ( cls ) : if hasattr ( cls , '_class_key' ) : class_names = [ ] for n in cls . _class_key ( ) : class_names . append ( n ) return class_names else : return [ cls . __name__ ]
Returns class names for use in document indexing .
12,923
def from_urlsafe ( cls , urlsafe ) : try : key = ndb . Key ( urlsafe = urlsafe ) except : return None obj = key . get ( ) if obj and isinstance ( obj , cls ) : return obj
Returns an instance of the model from a urlsafe string .
12,924
def get_from_search_doc ( cls , doc_id ) : if hasattr ( doc_id , 'doc_id' ) : doc_id = doc_id . doc_id return cls . from_urlsafe ( doc_id )
Returns an instance of the model from a search document id .
12,925
def _pre_delete_hook ( cls , key ) : if cls . searching_enabled : doc_id = cls . search_get_document_id ( key ) index = cls . search_get_index ( ) index . delete ( doc_id )
Removes instance from index .
12,926
def process_answer ( self , user , item , asked , answered , time , answer , response_time , guess , ** kwargs ) : pass
This method is used during the answer streaming and is called after the predictive model for each answer .
12,927
def _get_sz_info ( self ) : if 'None' == self . _state : return None cmd = 'show virtual-service detail name guestshell+' got = self . cli ( cmd ) got = got [ 'TABLE_detail' ] [ 'ROW_detail' ] sz_cpu = int ( got [ 'cpu_reservation' ] ) sz_disk = int ( got [ 'disk_reservation' ] ) sz_memory = int ( got [ 'memory_reserva...
Obtains the current resource allocations assumes that the guestshell is in an Activated state
12,928
def fractal_dimension ( image ) : pixels = [ ] for i in range ( image . shape [ 0 ] ) : for j in range ( image . shape [ 1 ] ) : if image [ i , j ] > 0 : pixels . append ( ( i , j ) ) lx = image . shape [ 1 ] ly = image . shape [ 0 ] pixels = np . array ( pixels ) if len ( pixels ) < 2 : return 0 scales = np . logspace...
Estimates the fractal dimension of an image with box counting . Counts pixels with value 0 as empty and everything else as non - empty . Input image has to be grayscale .
12,929
def channel_portion ( image , channel ) : rgb = [ ] for i in range ( 3 ) : rgb . append ( image [ : , : , i ] . astype ( int ) ) ch = rgb . pop ( channel ) relative_values = ch - np . sum ( rgb , axis = 0 ) / 2 relative_values = np . maximum ( np . zeros ( ch . shape ) , relative_values ) return float ( np . average ( ...
Estimates the amount of a color relative to other colors .
12,930
def intensity ( image ) : if len ( image . shape ) > 2 : image = cv2 . cvtColor ( image , cv2 . COLOR_RGB2GRAY ) / 255 elif issubclass ( image . dtype . type , np . integer ) : image /= 255 return float ( np . sum ( image ) / np . prod ( image . shape ) )
Calculates the average intensity of the pixels in an image . Accepts both RGB and grayscale images .
12,931
def sliding_window ( sequence , win_size , step = 1 ) : try : it = iter ( sequence ) except TypeError : raise ValueError ( "sequence must be iterable." ) if not isinstance ( win_size , int ) : raise ValueError ( "type(win_size) must be int." ) if not isinstance ( step , int ) : raise ValueError ( "type(step) must be in...
Returns a generator that will iterate through the defined chunks of input sequence . Input sequence must be iterable .
12,932
def dna_to_re ( seq ) : seq = seq . replace ( 'K' , '[GT]' ) seq = seq . replace ( 'M' , '[AC]' ) seq = seq . replace ( 'R' , '[AG]' ) seq = seq . replace ( 'Y' , '[CT]' ) seq = seq . replace ( 'S' , '[CG]' ) seq = seq . replace ( 'W' , '[AT]' ) seq = seq . replace ( 'B' , '[CGT]' ) seq = seq . replace ( 'V' , '[ACG]' ...
Return a compiled regular expression that will match anything described by the input sequence . For example a sequence that contains a N matched any base at that position .
12,933
def case_highlight ( seq , subseq ) : return re . subs ( subseq . lower ( ) , subseq . upper ( ) , seq . lower ( ) )
Highlights all instances of subseq in seq by making them uppercase and everything else lowercase .
12,934
def index_relations ( sender , pid_type , json = None , record = None , index = None , ** kwargs ) : if not json : json = { } pid = PersistentIdentifier . query . filter ( PersistentIdentifier . object_uuid == record . id , PersistentIdentifier . pid_type == pid_type , ) . one_or_none ( ) relations = None if pid : rela...
Add relations to the indexed record .
12,935
def index_siblings ( pid , include_pid = False , children = None , neighbors_eager = False , eager = False , with_deposits = True ) : assert not ( neighbors_eager and eager ) , if children is None : parent_pid = PIDNodeVersioning ( pid = pid ) . parents . first ( ) children = PIDNodeVersioning ( pid = parent_pid ) . ch...
Send sibling records of the passed pid for indexing .
12,936
def iter_paths ( self , pathnames = None , mapfunc = None ) : pathnames = pathnames or self . _pathnames if self . recursive and not pathnames : pathnames = [ '.' ] elif not pathnames : yield [ ] if mapfunc is not None : for mapped_paths in map ( mapfunc , pathnames ) : for path in mapped_paths : if self . recursive an...
Special iteration on paths . Yields couples of path and items . If a expanded path doesn t match with any files a couple with path and None is returned .
12,937
def check_stat ( self , path ) : statinfo = os . stat ( path ) st_mtime = datetime . fromtimestamp ( statinfo . st_mtime ) if platform . system ( ) == 'Linux' : check = st_mtime >= self . start_dt else : st_ctime = datetime . fromtimestamp ( statinfo . st_ctime ) check = st_mtime >= self . start_dt and st_ctime <= self...
Checks logfile stat information for excluding files not in datetime period . On Linux it s possible to checks only modification time because file creation info are not available so it s possible to exclude only older files . In Unix BSD systems and windows information about file creation date and times are available so...
12,938
def add ( self , files , items ) : if isinstance ( files , ( str , bytes ) ) : files = iter ( [ files ] ) for pathname in files : try : values = self . _filemap [ pathname ] except KeyError : self . _filemap [ pathname ] = items else : values . extend ( items )
Add a list of files with a reference to a list of objects .
12,939
def recruit ( self ) : participants = Participant . query . with_entities ( Participant . status ) . all ( ) if not self . networks ( full = False ) : print "All networks are full, closing recruitment." self . recruiter ( ) . close_recruitment ( ) elif [ p for p in participants if p . status < 100 ] : print "People are...
Recruit more participants .
12,940
def data_check ( self , participant ) : participant_id = participant . uniqueid nodes = Node . query . filter_by ( participant_id = participant_id ) . all ( ) if len ( nodes ) != self . experiment_repeats + self . practice_repeats : print ( "Error: Participant has {} nodes. Data check failed" . format ( len ( nodes ) )...
Check a participants data .
12,941
def add_node_to_network ( self , node , network ) : network . add_node ( node ) node . receive ( ) environment = network . nodes ( type = Environment ) [ 0 ] environment . connect ( whom = node ) gene = node . infos ( type = LearningGene ) [ 0 ] . contents if ( gene == "social" ) : prev_agents = RogersAgent . query . f...
Add participant s node to a network .
12,942
def create_state ( self , proportion ) : if random . random ( ) < 0.5 : proportion = 1 - proportion State ( origin = self , contents = proportion )
Create an environmental state .
12,943
def step ( self ) : current_state = max ( self . infos ( type = State ) , key = attrgetter ( 'creation_time' ) ) current_contents = float ( current_state . contents ) new_contents = 1 - current_contents info_out = State ( origin = self , contents = new_contents ) transformations . Mutation ( info_in = current_state , i...
Prompt the environment to change .
12,944
def print_subprocess_output ( subp ) : if subp : if subp . errorcode != 0 : print ( '<error errorcode="%s">' % str ( subp . errorcode ) ) print ( subp . stderr ) print ( "</error>" ) print_tag ( 'stdout' , '\n%s\n' % subp . stdout ) else : print_tag ( 'success' , '\n%s\n' % subp . stdout ) print_tag ( 'warnings' , '\n%...
Prints the stdout and stderr output .
12,945
def get_all ( self , force_download = False ) : cl = self . client return [ cl . get_item ( item , force_download ) for item in self . item_urls ]
Retrieve the metadata for all items in this list from the server as Item objects
12,946
def get_item ( self , item_index , force_download = False ) : return self . client . get_item ( self . item_urls [ item_index ] , force_download )
Retrieve the metadata for a specific item in this ItemGroup
12,947
def refresh ( self ) : refreshed = self . client . get_item_list ( self . url ( ) ) self . item_urls = refreshed . urls ( ) self . list_name = refreshed . name ( ) return self
Update this ItemList by re - downloading it from the server
12,948
def append ( self , items ) : resp = self . client . add_to_item_list ( items , self . url ( ) ) self . refresh ( ) return resp
Add some items to this ItemList and save the changes to the server
12,949
def get_document ( self , index = 0 ) : try : return Document ( self . metadata ( ) [ 'alveo:documents' ] [ index ] , self . client ) except IndexError : raise ValueError ( 'No document exists for this item with index: ' + str ( index ) )
Return the metadata for the specified document as a Document object
12,950
def get_primary_text ( self , force_download = False ) : return self . client . get_primary_text ( self . url ( ) , force_download )
Retrieve the primary text for this item from the server
12,951
def get_annotations ( self , atype = None , label = None ) : return self . client . get_item_annotations ( self . url ( ) , atype , label )
Retrieve the annotations for this item from the server
12,952
def get_content ( self , force_download = False ) : return self . client . get_document ( self . url ( ) , force_download )
Retrieve the content for this Document from the server
12,953
def download_content ( self , dir_path = '' , filename = None , force_download = False ) : if filename is None : filename = self . get_filename ( ) path = os . path . join ( dir_path , filename ) data = self . client . get_document ( self . url ( ) , force_download ) with open ( path , 'wb' ) as f : f . write ( data ) ...
Download the content for this document to a file
12,954
def generic_ref_formatter ( view , context , model , name , lazy = False ) : try : if lazy : rel_model = getattr ( model , name ) . fetch ( ) else : rel_model = getattr ( model , name ) except ( mongoengine . DoesNotExist , AttributeError ) as e : return Markup ( '<span class="label label-danger">Error</span> <small>%s...
For GenericReferenceField and LazyGenericReferenceField
12,955
def generic_document_type_formatter ( view , context , model , name ) : _document_model = model . get ( 'document' ) . document_type url = _document_model . get_admin_list_url ( ) return Markup ( '<a href="%s">%s</a>' % ( url , _document_model . __name__ ) )
Return AdminLog . document field wrapped in URL to its list view .
12,956
def return_page ( page ) : try : hit_id = request . args [ 'hit_id' ] assignment_id = request . args [ 'assignment_id' ] worker_id = request . args [ 'worker_id' ] mode = request . args [ 'mode' ] return render_template ( page , hit_id = hit_id , assignment_id = assignment_id , worker_id = worker_id , mode = mode ) exc...
Return a rendered template .
12,957
def quitter ( ) : exp = experiment ( session ) exp . log ( "Quitter route was hit." ) return Response ( dumps ( { "status" : "success" } ) , status = 200 , mimetype = 'application/json' )
Overide the psiTurk quitter route .
12,958
def ad_address ( mode , hit_id ) : if mode == "debug" : address = '/complete' elif mode in [ "sandbox" , "live" ] : username = os . getenv ( 'psiturk_access_key_id' , config . get ( "psiTurk Access" , "psiturk_access_key_id" ) ) password = os . getenv ( 'psiturk_secret_access_id' , config . get ( "psiTurk Access" , "ps...
Get the address of the ad on AWS .
12,959
def connect ( node_id , other_node_id ) : exp = experiment ( session ) direction = request_parameter ( parameter = "direction" , default = "to" ) if type ( direction == Response ) : return direction node = models . Node . query . get ( node_id ) if node is None : return error_response ( error_type = "/node/connect, nod...
Connect to another node .
12,960
def get_info ( node_id , info_id ) : exp = experiment ( session ) node = models . Node . query . get ( node_id ) if node is None : return error_response ( error_type = "/info, node does not exist" ) info = models . Info . query . get ( info_id ) if info is None : return error_response ( error_type = "/info GET, info do...
Get a specific info .
12,961
def transformation_post ( node_id , info_in_id , info_out_id ) : exp = experiment ( session ) transformation_type = request_parameter ( parameter = "transformation_type" , parameter_type = "known_class" , default = models . Transformation ) if type ( transformation_type ) == Response : return transformation_type node =...
Transform an info .
12,962
def api_notifications ( ) : event_type = request . values [ 'Event.1.EventType' ] assignment_id = request . values [ 'Event.1.AssignmentId' ] db . logger . debug ( 'rq: Queueing %s with id: %s for worker_function' , event_type , assignment_id ) q . enqueue ( worker_function , event_type , assignment_id , None ) db . lo...
Receive MTurk REST notifications .
12,963
def process ( source , target , rdfsonly , base = None , logger = logging ) : for link in source . match ( ) : s , p , o = link [ : 3 ] if s == ( base or '' ) + '@docheader' : continue if p in RESOURCE_MAPPING : p = RESOURCE_MAPPING [ p ] if o in RESOURCE_MAPPING : o = RESOURCE_MAPPING [ o ] if p == VERSA_BASEIRI + 're...
Prepare a statement into a triple ready for rdflib graph
12,964
def write ( models , base = None , graph = None , rdfsonly = False , prefixes = None , logger = logging ) : prefixes = prefixes or { } g = graph or rdflib . Graph ( ) g . bind ( 'v' , VNS ) for k , v in prefixes . items ( ) : g . bind ( k , v ) for m in models : base_out = m . base process ( m , g , rdfsonly , base = b...
See the command line help
12,965
def routing_feature ( app ) : app . url_map . converters [ 'regex' ] = RegexConverter urls = app . name . rsplit ( '.' , 1 ) [ 0 ] + '.urls.urls' try : urls = import_string ( urls ) except ImportError as e : err = 'Failed to import {}. If it exists, check that it does not ' err += 'import something non-existent itself!...
Add routing feature Allows to define application routes un urls . py file and use lazy views . Additionally enables regular exceptions in route definitions
12,966
def undoable ( method ) : def undoable_method ( self , * args ) : return self . do ( Command ( self , method , * args ) ) return undoable_method
Decorator undoable allows an instance method to be undone .
12,967
def get_template_directory ( self ) : dir = os . path . join ( os . path . dirname ( __file__ ) , 'templates' ) return dir
Get path to migrations templates This will get used when you run the db init command
12,968
async def pull_metrics ( self , event_fn , loop = None ) : if self . lazy and not self . ready : return None logger = self . get_logger ( ) ts = timer ( ) logger . trace ( "Waiting for process event" ) result = await self . process ( event_fn ) td = int ( timer ( ) - ts ) logger . trace ( "It took: {}ms" . format ( td ...
Method called by core . Should not be overwritten .
12,969
def ready ( self ) : logger = self . get_logger ( ) now = current_ts ( ) logger . trace ( "Current time: {0}" . format ( now ) ) logger . trace ( "Last Run: {0}" . format ( self . _last_run ) ) delta = ( now - self . _last_run ) logger . trace ( "Delta: {0}, Interval: {1}" . format ( delta , self . interval * 1000 ) ) ...
Function used when agent is lazy . It is being processed only when ready condition is satisfied
12,970
def create_jwt ( self , expires_in = None ) : s = utils . sign_jwt ( data = { "id" : self . user . id } , secret_key = get_jwt_secret ( ) , salt = get_jwt_salt ( ) , expires_in = expires_in or get_jwt_ttl ( ) ) return s
Create a secure timed JWT token that can be passed . It save the user id which later will be used to retrieve the data
12,971
def sendgmail ( self , subject , recipients , plaintext , htmltext = None , cc = None , debug = False , useMIMEMultipart = True , gmail_account = 'kortemmelab@gmail.com' , pw_filepath = None ) : smtpserver = smtplib . SMTP ( "smtp.gmail.com" , 587 ) smtpserver . ehlo ( ) smtpserver . starttls ( ) smtpserver . ehlo gmai...
For this function to work the password for the gmail user must be colocated with this file or passed in .
12,972
def show_one ( request , post_process_fun , object_class , id , template = 'common_json.html' ) : obj = get_object_or_404 ( object_class , pk = id ) json = post_process_fun ( request , obj ) return render_json ( request , json , template = template , help_text = show_one . __doc__ )
Return object of the given type with the specified identifier .
12,973
def show_more ( request , post_process_fun , get_fun , object_class , should_cache = True , template = 'common_json.html' , to_json_kwargs = None ) : if not should_cache and 'json_orderby' in request . GET : return render_json ( request , { 'error' : "Can't order the result according to the JSON field, because the cach...
Return list of objects of the given type .
12,974
def log ( request ) : if request . method == "POST" : log_dict = json_body ( request . body . decode ( "utf-8" ) ) if 'message' not in log_dict : return HttpResponseBadRequest ( 'There is no message to log!' ) levels = { 'debug' : JAVASCRIPT_LOGGER . debug , 'info' : JAVASCRIPT_LOGGER . info , 'warn' : JAVASCRIPT_LOGGE...
Log an event from the client to the server .
12,975
def custom_config ( request ) : if request . method == 'POST' : config_dict = json_body ( request . body . decode ( 'utf-8' ) ) CustomConfig . objects . try_create ( config_dict [ 'app_name' ] , config_dict [ 'key' ] , config_dict [ 'value' ] , request . user . id , config_dict . get ( 'condition_key' ) if config_dict ...
Save user - specific configuration property .
12,976
def languages ( request ) : return render_json ( request , settings . LANGUAGE_DOMAINS if hasattr ( settings , 'LANGUAGE_DOMAINS' ) else { "error" : "Languages are not set. (Set LANGUAGE_DOMAINS in settings.py)" } , template = 'common_json.html' , help_text = languages . __doc__ )
Returns languages that are available in the system .
12,977
def channel_to_id ( slack , channel ) : channels = slack . api_call ( 'channels.list' ) . get ( 'channels' ) or [ ] groups = slack . api_call ( 'groups.list' ) . get ( 'groups' ) or [ ] if not channels and not groups : raise RuntimeError ( "Couldn't get channels and groups." ) ids = [ c [ 'id' ] for c in channels + gro...
Surely there s a better way to do this ...
12,978
def send_message ( slack ) : channel = input ( 'Which channel would you like to message? ' ) message = input ( 'What should the message be? ' ) channel_id = channel_to_id ( slack , channel ) print ( f"Sending message to #{channel} (id: {channel_id})!" ) slack . rtm_send_message ( channel_id , message )
Prompt for and send a message to a channel .
12,979
def parse_device ( lines ) : name , status_line , device = parse_device_header ( lines . pop ( 0 ) ) if not status_line : status_line = lines . pop ( 0 ) status = parse_device_status ( status_line , device [ "personality" ] ) bitmap = None resync = None for line in lines : if line . startswith ( " bitmap:" ) : bit...
Parse all the lines of a device block .
12,980
def match_etag ( etag , header , weak = False ) : if etag is None : return False m = etag_re . match ( etag ) if not m : raise ValueError ( "Not a well-formed ETag: '%s'" % etag ) ( is_weak , etag ) = m . groups ( ) parsed_header = parse_etag_header ( header ) if parsed_header == '*' : return True if is_weak and not we...
Try to match an ETag against a header value .
12,981
def datetime_to_httpdate ( dt ) : if isinstance ( dt , ( int , float ) ) : return format_date_time ( dt ) elif isinstance ( dt , datetime ) : return format_date_time ( datetime_to_timestamp ( dt ) ) else : raise TypeError ( "expected datetime.datetime or timestamp (int/float)," " got '%s'" % dt )
Convert datetime . datetime or Unix timestamp to HTTP date .
12,982
def timedelta_to_httpdate ( td ) : if isinstance ( td , ( int , float ) ) : return format_date_time ( time . time ( ) + td ) elif isinstance ( td , timedelta ) : return format_date_time ( time . time ( ) + total_seconds ( td ) ) else : raise TypeError ( "expected datetime.timedelta or number of seconds" "(int/float), g...
Convert datetime . timedelta or number of seconds to HTTP date .
12,983
def cache_control ( max_age = None , private = False , public = False , s_maxage = None , must_revalidate = False , proxy_revalidate = False , no_cache = False , no_store = False ) : if all ( [ private , public ] ) : raise ValueError ( "'private' and 'public' are mutually exclusive" ) if isinstance ( max_age , timedelt...
Generate the value for a Cache - Control header .
12,984
def get_incidents ( self ) -> Union [ list , bool ] : brotts_entries_left = True incidents_today = [ ] url = self . url while brotts_entries_left : requests_response = requests . get ( url , params = self . parameters ) rate_limited = requests_response . headers . get ( 'x-ratelimit-reset' ) if rate_limited : print ( "...
Get today s incidents .
12,985
def from_template ( args ) : project_name = args . name template = args . template with tarfile . open ( template ) as tar : prefix = os . path . commonprefix ( tar . getnames ( ) ) check_template ( tar . getnames ( ) , prefix ) tar . extractall ( project_name , members = get_members ( tar , prefix ) )
Create a new oct project from existing template
12,986
def from_oct ( args ) : project_name = args . name env = Environment ( loader = PackageLoader ( 'oct.utilities' , 'templates' ) ) config_content = env . get_template ( 'configuration/config.json' ) . render ( script_name = 'v_user.py' ) script_content = env . get_template ( 'scripts/v_user.j2' ) . render ( ) try : os ....
Create a new oct project
12,987
def as_data_frame ( self ) -> pandas . DataFrame : header_gene = { } header_multiplex = { } headr_transitions = { } for gene in self . influence_graph . genes : header_gene [ gene ] = repr ( gene ) header_multiplex [ gene ] = f"active multiplex on {gene!r}" headr_transitions [ gene ] = f"K_{gene!r}" columns = defaultdi...
Create a panda DataFrame representation of the resource table .
12,988
def create ( self , r , r_ , R = 200 ) : x , y = give_dots ( R , r , r_ , spins = 20 ) xy = np . array ( [ x , y ] ) . T xy = np . array ( np . around ( xy ) , dtype = np . int64 ) xy = xy [ ( xy [ : , 0 ] >= - 250 ) & ( xy [ : , 1 ] >= - 250 ) & ( xy [ : , 0 ] < 250 ) & ( xy [ : , 1 ] < 250 ) ] xy = xy + 250 img = np ...
Create new spirograph image with given arguments . Returned image is scaled to agent s preferred image size .
12,989
def hedonic_value ( self , novelty ) : lmax = gaus_pdf ( self . desired_novelty , self . desired_novelty , 4 ) pdf = gaus_pdf ( novelty , self . desired_novelty , 4 ) return pdf / lmax
Given the agent s desired novelty how good the novelty value is .
12,990
def evaluate ( self , artifact ) : if self . desired_novelty > 0 : return self . hedonic_value ( self . novelty ( artifact . obj ) ) return self . novelty ( artifact . obj ) / self . img_size , None
Evaluate the artifact with respect to the agents short term memory .
12,991
def learn ( self , spiro , iterations = 1 ) : for i in range ( iterations ) : self . stmem . train_cycle ( spiro . obj . flatten ( ) )
Train short term memory with given spirograph .
12,992
def plot_places ( self ) : from matplotlib import pyplot as plt fig , ax = plt . subplots ( ) x = [ ] y = [ ] if len ( self . arg_history ) > 1 : xs = [ ] ys = [ ] for p in self . arg_history : xs . append ( p [ 0 ] ) ys . append ( p [ 1 ] ) ax . plot ( xs , ys , color = ( 0.0 , 0.0 , 1.0 , 0.1 ) ) for a in self . A : ...
Plot places where the agent has been and generated a spirograph .
12,993
def destroy ( self , folder = None ) : ameans = [ ( 0 , 0 , 0 ) for _ in range ( 3 ) ] ret = [ self . save_info ( folder , ameans ) ] aiomas . run ( until = self . stop_slaves ( folder ) ) self . _pool . close ( ) self . _pool . terminate ( ) self . _pool . join ( ) self . _env . shutdown ( ) return ret
Destroy the environment and the subprocesses .
12,994
def add ( self , r ) : id = r . get_residue_id ( ) if self . order : last_id = self . order [ - 1 ] if id in self . order : raise colortext . Exception ( 'Warning: using code to "allow for multiresidue noncanonicals" - check this case manually.' ) id = '%s.%d' % ( str ( id ) , self . special_insertion_count ) self . sp...
Takes an id and a Residue r and adds them to the Sequence .
12,995
def set_type ( self , sequence_type ) : if not ( self . sequence_type ) : for id , r in self . sequence . iteritems ( ) : assert ( r . residue_type == None ) r . residue_type = sequence_type self . sequence_type = sequence_type
Set the type of a Sequence if it has not been set .
12,996
def from_sequence ( chain , list_of_residues , sequence_type = None ) : s = Sequence ( sequence_type ) count = 1 for ResidueAA in list_of_residues : s . add ( Residue ( chain , count , ResidueAA , sequence_type ) ) count += 1 return s
Takes in a chain identifier and protein sequence and returns a Sequence object of Residues indexed from 1 .
12,997
def substitution_scores_match ( self , other ) : overlap = set ( self . substitution_scores . keys ( ) ) . intersection ( set ( other . substitution_scores . keys ( ) ) ) for k in overlap : if not ( self . substitution_scores [ k ] == None or other . substitution_scores [ k ] == None ) : if self . substitution_scores [...
Check to make sure that the substitution scores agree . If one map has a null score and the other has a non - null score we trust the other s score and vice versa .
12,998
def merge ( self , other ) : our_element_frequencies = self . items their_element_frequencies = other . items for element_name , freq in sorted ( our_element_frequencies . iteritems ( ) ) : our_element_frequencies [ element_name ] = max ( our_element_frequencies . get ( element_name , 0 ) , their_element_frequencies . ...
Merge two element counters . For all elements we take the max count from both counters .
12,999
def dump ( self , obj , fp ) : if not validate ( obj , self . _raw_schema ) : raise AvroTypeException ( self . _avro_schema , obj ) fastavro_write_data ( fp , obj , self . _raw_schema )
Serializes obj as an avro - format byte stream to the provided fp file - like object stream .