idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
49,000
def create_organization ( self , auth , owner_name , org_name , full_name = None , description = None , website = None , location = None ) : data = { "username" : org_name , "full_name" : full_name , "description" : description , "website" : website , "location" : location } url = "/admin/users/{u}/orgs" . format ( u = owner_name ) response = self . post ( url , auth = auth , data = data ) return GogsOrg . from_json ( response . json ( ) )
Creates a new organization and returns the created organization .
49,001
def create_organization_team ( self , auth , org_name , name , description = None , permission = "read" ) : data = { "name" : name , "description" : description , "permission" : permission } url = "/admin/orgs/{o}/teams" . format ( o = org_name ) response = self . post ( url , auth = auth , data = data ) return GogsTeam . from_json ( response . json ( ) )
Creates a new team of the organization .
49,002
def add_team_membership ( self , auth , team_id , username ) : url = "/admin/teams/{t}/members/{u}" . format ( t = team_id , u = username ) self . put ( url , auth = auth )
Add user to team .
49,003
def remove_team_membership ( self , auth , team_id , username ) : url = "/admin/teams/{t}/members/{u}" . format ( t = team_id , u = username ) self . delete ( url , auth = auth )
Remove user from team .
49,004
def add_repo_to_team ( self , auth , team_id , repo_name ) : url = "/admin/teams/{t}/repos/{r}" . format ( t = team_id , r = repo_name ) self . put ( url , auth = auth )
Add or update repo from team .
49,005
def list_deploy_keys ( self , auth , username , repo_name ) : response = self . get ( "/repos/{u}/{r}/keys" . format ( u = username , r = repo_name ) , auth = auth ) return [ GogsRepo . DeployKey . from_json ( key_json ) for key_json in response . json ( ) ]
List deploy keys for the specified repo .
49,006
def get_deploy_key ( self , auth , username , repo_name , key_id ) : response = self . get ( "/repos/{u}/{r}/keys/{k}" . format ( u = username , r = repo_name , k = key_id ) , auth = auth ) return GogsRepo . DeployKey . from_json ( response . json ( ) )
Get a deploy key for the specified repo .
49,007
def add_deploy_key ( self , auth , username , repo_name , title , key_content ) : data = { "title" : title , "key" : key_content } response = self . post ( "/repos/{u}/{r}/keys" . format ( u = username , r = repo_name ) , auth = auth , data = data ) return GogsRepo . DeployKey . from_json ( response . json ( ) )
Add a deploy key to the specified repo .
49,008
def delete_deploy_key ( self , auth , username , repo_name , key_id ) : self . delete ( "/repos/{u}/{r}/keys/{k}" . format ( u = username , r = repo_name , k = key_id ) , auth = auth )
Remove deploy key for the specified repo .
49,009
def delete ( self , path , auth = None , ** kwargs ) : return self . _check_ok ( self . _delete ( path , auth = auth , ** kwargs ) )
Manually make a DELETE request .
49,010
def get ( self , path , auth = None , ** kwargs ) : return self . _check_ok ( self . _get ( path , auth = auth , ** kwargs ) )
Manually make a GET request .
49,011
def patch ( self , path , auth = None , ** kwargs ) : return self . _check_ok ( self . _patch ( path , auth = auth , ** kwargs ) )
Manually make a PATCH request .
49,012
def post ( self , path , auth = None , ** kwargs ) : return self . _check_ok ( self . _post ( path , auth = auth , ** kwargs ) )
Manually make a POST request .
49,013
def put ( self , path , auth = None , ** kwargs ) : return self . _check_ok ( self . _put ( path , auth = auth , ** kwargs ) )
Manually make a PUT request .
49,014
def _fail ( response ) : message = "Status code: {}-{}, url: {}" . format ( response . status_code , response . reason , response . url ) try : message += ", message:{}" . format ( response . json ( ) [ "message" ] ) except ( ValueError , KeyError ) : pass raise ApiFailure ( message , response . status_code )
Raise an ApiFailure pertaining to the given response
49,015
def Printer ( open_file = sys . stdout , closing = False ) : try : while True : logstr = ( yield ) open_file . write ( logstr ) open_file . write ( '\n' ) except GeneratorExit : if closing : try : open_file . close ( ) except : pass
Prints items with a timestamp .
49,016
def FilePrinter ( filename , mode = 'a' , closing = True ) : path = os . path . abspath ( os . path . expanduser ( filename ) ) f = open ( path , mode ) return Printer ( f , closing )
Opens the given file and returns a printer to it .
49,017
def Emailer ( recipients , sender = None ) : import smtplib hostname = socket . gethostname ( ) if not sender : sender = 'lggr@{0}' . format ( hostname ) smtp = smtplib . SMTP ( 'localhost' ) try : while True : logstr = ( yield ) try : smtp . sendmail ( sender , recipients , logstr ) except smtplib . SMTPException : pass except GeneratorExit : smtp . quit ( )
Sends messages as emails to the given list of recipients .
49,018
def GMailer ( recipients , username , password , subject = 'Log message from lggr.py' ) : import smtplib srvr = smtplib . SMTP ( 'smtp.gmail.com' , 587 ) srvr . ehlo ( ) srvr . starttls ( ) srvr . ehlo ( ) srvr . login ( username , password ) if not ( isinstance ( recipients , list ) or isinstance ( recipients , tuple ) ) : recipients = [ recipients ] gmail_sender = '{0}@gmail.com' . format ( username ) msg = 'To: {0}\nFrom: ' + gmail_sender + '\nSubject: ' + subject + '\n' msg = msg + '\n{1}\n\n' try : while True : logstr = ( yield ) for rcp in recipients : message = msg . format ( rcp , logstr ) srvr . sendmail ( gmail_sender , rcp , message ) except GeneratorExit : srvr . quit ( )
Sends messages as emails to the given list of recipients from a GMail account .
49,019
def add ( self , levels , logger ) : if isinstance ( levels , ( list , tuple ) ) : for lvl in levels : self . config [ lvl ] . add ( logger ) else : self . config [ levels ] . add ( logger )
Given a list or tuple of logging levels add a logger instance to each .
49,020
def remove ( self , level , logger ) : self . config [ level ] . discard ( logger ) logger . close ( )
Given a level remove a given logger function if it is a member of that level closing the logger function either way .
49,021
def clear ( self , level ) : for item in self . config [ level ] : item . close ( ) self . config [ level ] . clear ( )
Remove all logger functions from a given level .
49,022
def _log ( self , level , fmt , args = None , extra = None , exc_info = None , inc_stackinfo = False , inc_multiproc = False ) : if not self . enabled : return log_record = self . _make_record ( level , fmt , args , extra , exc_info , inc_stackinfo , inc_multiproc ) logstr = log_record [ 'defaultfmt' ] . format ( ** log_record ) if self . keep_history : self . history . append ( logstr ) log_funcs = self . config [ level ] to_remove = [ ] for lf in log_funcs : try : lf . send ( logstr ) except StopIteration : to_remove . append ( lf ) for lf in to_remove : self . remove ( level , lf ) self . info ( 'Logging function {} removed from level {}' , lf , level )
Send a log message to all of the logging functions for a given level as well as adding the message to this logger instance s history .
49,023
def log ( self , * args , ** kwargs ) : if self . suppress_errors : try : self . _log ( * args , ** kwargs ) return True except : return False else : self . _log ( * args , ** kwargs ) return True
Do logging but handle error suppression .
49,024
def debug ( self , msg , * args , ** kwargs ) : kwargs . setdefault ( 'inc_stackinfo' , True ) self . log ( DEBUG , msg , args , ** kwargs )
Log a message with DEBUG level . Automatically includes stack info unless it is specifically not included .
49,025
def error ( self , msg , * args , ** kwargs ) : kwargs . setdefault ( 'inc_stackinfo' , True ) kwargs . setdefault ( 'inc_multiproc' , True ) self . log ( ERROR , msg , args , ** kwargs )
Log a message with ERROR level . Automatically includes stack and process info unless they are specifically not included .
49,026
def critical ( self , msg , * args , ** kwargs ) : kwargs . setdefault ( 'inc_stackinfo' , True ) kwargs . setdefault ( 'inc_multiproc' , True ) self . log ( CRITICAL , msg , args , ** kwargs )
Log a message with CRITICAL level . Automatically includes stack and process info unless they are specifically not included .
49,027
def multi ( self , lvl_list , msg , * args , ** kwargs ) : for level in lvl_list : self . log ( level , msg , args , ** kwargs )
Log a message at multiple levels
49,028
def all ( self , msg , * args , ** kwargs ) : self . multi ( ALL , msg , args , ** kwargs )
Log a message at every known log level
49,029
def _map_value ( self , value ) : if isinstance ( value , list ) : out = [ ] for c in value : out . append ( self . _map_value ( c ) ) return out elif isinstance ( value , dict ) and 'metadata' in value and 'labels' in value [ 'metadata' ] and 'self' in value : return neo4j . Node ( ustr ( value [ 'metadata' ] [ 'id' ] ) , value [ 'metadata' ] [ 'labels' ] , value [ 'data' ] ) elif isinstance ( value , dict ) and 'metadata' in value and 'type' in value and 'self' in value : return neo4j . Relationship ( ustr ( value [ 'metadata' ] [ 'id' ] ) , value [ 'type' ] , value [ 'start' ] . split ( '/' ) [ - 1 ] , value [ 'end' ] . split ( '/' ) [ - 1 ] , value [ 'data' ] ) elif isinstance ( value , dict ) : out = { } for k , v in value . items ( ) : out [ k ] = self . _map_value ( v ) return out elif isinstance ( value , str ) : return ustr ( value ) else : return value
Maps a raw deserialized row to proper types
49,030
def get_deprecated_gene_ids ( filename ) : deprecated = { } with open ( filename ) as handle : for line in handle : line = line . strip ( ) . split ( ) old = line [ 0 ] new = line [ 1 ] deprecated [ old ] = new return deprecated
gets a dict of the gene IDs used during in DDD datasets that have been deprecated in favour of other gene IDs
49,031
def required_from_env ( key ) : val = os . environ . get ( key ) if not val : raise ValueError ( "Required argument '{}' not supplied and not found in environment variables" . format ( key ) ) return val
Retrieve a required variable from the current environment variables .
49,032
def get ( self , url ) : logger . debug ( 'Making GET request to %s' , url ) return self . oauth_session . get ( url )
Make a HTTP GET request to the Reader API .
49,033
def post ( self , url , post_params = None ) : params = urlencode ( post_params ) logger . debug ( 'Making POST request to %s with body %s' , url , params ) return self . oauth_session . post ( url , data = params )
Make a HTTP POST request to the Reader API .
49,034
def delete ( self , url ) : logger . debug ( 'Making DELETE request to %s' , url ) return self . oauth_session . delete ( url )
Make a HTTP DELETE request to the Readability API .
49,035
def get_article ( self , article_id ) : url = self . _generate_url ( 'articles/{0}' . format ( article_id ) ) return self . get ( url )
Get a single article represented by article_id .
49,036
def get_bookmarks ( self , ** filters ) : filter_dict = filter_args_to_dict ( filters , ACCEPTED_BOOKMARK_FILTERS ) url = self . _generate_url ( 'bookmarks' , query_params = filter_dict ) return self . get ( url )
Get Bookmarks for the current user .
49,037
def get_bookmark ( self , bookmark_id ) : url = self . _generate_url ( 'bookmarks/{0}' . format ( bookmark_id ) ) return self . get ( url )
Get a single bookmark represented by bookmark_id .
49,038
def add_bookmark ( self , url , favorite = False , archive = False , allow_duplicates = True ) : rdb_url = self . _generate_url ( 'bookmarks' ) params = { "url" : url , "favorite" : int ( favorite ) , "archive" : int ( archive ) , "allow_duplicates" : int ( allow_duplicates ) } return self . post ( rdb_url , params )
Adds given bookmark to the authenticated user .
49,039
def update_bookmark ( self , bookmark_id , favorite = None , archive = None , read_percent = None ) : rdb_url = self . _generate_url ( 'bookmarks/{0}' . format ( bookmark_id ) ) params = { } if favorite is not None : params [ 'favorite' ] = 1 if favorite == True else 0 if archive is not None : params [ 'archive' ] = 1 if archive == True else 0 if read_percent is not None : try : params [ 'read_percent' ] = float ( read_percent ) except ValueError : pass return self . post ( rdb_url , params )
Updates given bookmark . The requested bookmark must belong to the current user .
49,040
def delete_bookmark ( self , bookmark_id ) : url = self . _generate_url ( 'bookmarks/{0}' . format ( bookmark_id ) ) return self . delete ( url )
Delete a single bookmark represented by bookmark_id .
49,041
def get_bookmark_tags ( self , bookmark_id ) : url = self . _generate_url ( 'bookmarks/{0}/tags' . format ( bookmark_id ) ) return self . get ( url )
Retrieve tags that have been applied to a bookmark .
49,042
def add_tags_to_bookmark ( self , bookmark_id , tags ) : url = self . _generate_url ( 'bookmarks/{0}/tags' . format ( bookmark_id ) ) params = dict ( tags = tags ) return self . post ( url , params )
Add tags to to a bookmark .
49,043
def delete_tag_from_bookmark ( self , bookmark_id , tag_id ) : url = self . _generate_url ( 'bookmarks/{0}/tags/{1}' . format ( bookmark_id , tag_id ) ) return self . delete ( url )
Remove a single tag from a bookmark .
49,044
def get_tag ( self , tag_id ) : url = self . _generate_url ( 'tags/{0}' . format ( tag_id ) ) return self . get ( url )
Get a single tag represented by tag_id .
49,045
def post ( self , url , post_params = None ) : post_params [ 'token' ] = self . token params = urlencode ( post_params ) logger . debug ( 'Making POST request to %s with body %s' , url , params ) return requests . post ( url , data = params )
Make an HTTP POST request to the Parser API .
49,046
def _generate_url ( self , resource , query_params = None ) : resource = '{resource}?token={token}' . format ( resource = resource , token = self . token ) if query_params : resource += "&{}" . format ( urlencode ( query_params ) ) return self . base_url_template . format ( resource )
Build the url to resource .
49,047
def get_article ( self , url = None , article_id = None , max_pages = 25 ) : query_params = { } if url is not None : query_params [ 'url' ] = url if article_id is not None : query_params [ 'article_id' ] = article_id query_params [ 'max_pages' ] = max_pages url = self . _generate_url ( 'parser' , query_params = query_params ) return self . get ( url )
Send a GET request to the parser endpoint of the parser API to get back the representation of an article .
49,048
def post_article_content ( self , content , url , max_pages = 25 ) : params = { 'doc' : content , 'max_pages' : max_pages } url = self . _generate_url ( 'parser' , { "url" : url } ) return self . post ( url , post_params = params )
POST content to be parsed to the Parser API .
49,049
def get_article_status ( self , url = None , article_id = None ) : query_params = { } if url is not None : query_params [ 'url' ] = url if article_id is not None : query_params [ 'article_id' ] = article_id url = self . _generate_url ( 'parser' , query_params = query_params ) return self . head ( url )
Send a HEAD request to the parser endpoint to the parser API to get the articles status .
49,050
def get_confidence ( self , url = None , article_id = None ) : query_params = { } if url is not None : query_params [ 'url' ] = url if article_id is not None : query_params [ 'article_id' ] = article_id url = self . _generate_url ( 'confidence' , query_params = query_params ) return self . get ( url )
Send a GET request to the confidence endpoint of the Parser API .
49,051
def save ( f , arr , vocab ) : itr = iter ( vocab ) word , idx = next ( itr ) _write_line ( f , arr [ idx ] , word ) for word , idx in itr : f . write ( b'\n' ) _write_line ( f , arr [ idx ] , word )
Save word embedding file .
49,052
def convert ( outputfile , inputfile , to_format , from_format ) : emb = word_embedding . WordEmbedding . load ( inputfile , format = _input_choices [ from_format ] [ 1 ] , binary = _input_choices [ from_format ] [ 2 ] ) emb . save ( outputfile , format = _output_choices [ to_format ] [ 1 ] , binary = _output_choices [ to_format ] [ 2 ] )
Convert pretrained word embedding file in one format to another .
49,053
def check_format ( inputfile ) : t = word_embedding . classify_format ( inputfile ) if t == word_embedding . _glove : _echo_format_result ( 'glove' ) elif t == word_embedding . _word2vec_bin : _echo_format_result ( 'word2vec-binary' ) elif t == word_embedding . _word2vec_text : _echo_format_result ( 'word2vec-text' ) else : assert not "Should not get here!"
Check format of inputfile .
49,054
def list ( ) : choice_len = max ( map ( len , _input_choices . keys ( ) ) ) tmpl = " {:<%d}: {}\n" % choice_len text = '' . join ( map ( lambda k_v : tmpl . format ( k_v [ 0 ] , k_v [ 1 ] [ 0 ] ) , six . iteritems ( _input_choices ) ) ) click . echo ( text )
List available format .
49,055
def main ( ) : from twitter . cmdline import Action , OPTIONS twitter = Twitter . from_oauth_file ( ) Action ( ) ( twitter , OPTIONS )
Do the default action of twitter command .
49,056
def from_oauth_file ( cls , filepath = None ) : if filepath is None : home = os . environ . get ( 'HOME' , os . environ . get ( 'USERPROFILE' , '' ) ) filepath = os . path . join ( home , '.twitter_oauth' ) oauth_token , oauth_token_secret = read_token_file ( filepath ) twitter = cls ( auth = OAuth ( oauth_token , oauth_token_secret , CONSUMER_KEY , CONSUMER_SECRET ) , api_version = '1.1' , domain = 'api.twitter.com' ) return twitter
Get an object bound to the Twitter API using your own credentials .
49,057
def get_version ( filepath = 'src/birding/version.py' ) : with open ( get_abspath ( filepath ) ) as version_file : return re . search ( r , version_file . read ( ) ) . group ( 'version' )
Get version without import which avoids dependency issues .
49,058
def search ( self , q , ** kw ) : url = '{base_url}/search/{stream}' . format ( ** vars ( self ) ) params = { 'q' : q , } params . update ( self . params ) params . update ( kw ) response = self . session . get ( url , params = params ) response . raise_for_status ( ) return response . json ( )
Search Gnip for given query returning deserialized response .
49,059
def dump ( result ) : if isinstance ( result , dict ) : statuses = result [ 'results' ] else : statuses = result status_str_list = [ ] for status in statuses : status_str_list . append ( textwrap . dedent ( u ) . strip ( ) . format ( screen_name = status [ 'actor' ] [ 'preferredUsername' ] , text = status [ 'body' ] ) ) return u'\n\n' . join ( status_str_list )
Dump result into a string useful for debugging .
49,060
def shelf_from_config ( config , ** default_init ) : shelf_cls = import_name ( config [ 'shelf_class' ] , default_ns = 'birding.shelf' ) init = { } init . update ( default_init ) init . update ( config [ 'shelf_init' ] ) shelf = shelf_cls ( ** init ) if hasattr ( shelf , 'set_expiration' ) and 'shelf_expiration' in config : shelf . set_expiration ( config [ 'shelf_expiration' ] ) return shelf
Get a Shelf instance dynamically based on config .
49,061
def unpack ( self , key , value ) : value , freshness = value if not self . is_fresh ( freshness ) : raise KeyError ( '{} (stale)' . format ( key ) ) return value
Unpack and return value only if it is fresh .
49,062
def is_fresh ( self , freshness ) : if self . expire_after is None : return True return self . freshness ( ) - freshness <= self . expire_after
Return False if given freshness value has expired else True .
49,063
def is_first_instance_aws ( ) : try : instance_details = requests . get ( 'http://169.254.169.254/latest/dynamic/instance-identity/document' , timeout = 5 ) . json ( ) instance_id = instance_details [ 'instanceId' ] instance_region = instance_details [ 'region' ] except ( requests . RequestException , ValueError , KeyError ) as e : raise StackInterrogationException ( e ) try : autoscaling_client = boto3 . client ( 'autoscaling' , region_name = instance_region ) response = autoscaling_client . describe_auto_scaling_instances ( InstanceIds = [ instance_id ] ) assert len ( response [ 'AutoScalingInstances' ] ) == 1 autoscaling_group = response [ 'AutoScalingInstances' ] [ 0 ] [ 'AutoScalingGroupName' ] except ClientError as e : raise StackInterrogationException ( e ) except AssertionError : raise InstanceNotInAsgException ( ) try : response = autoscaling_client . describe_auto_scaling_groups ( AutoScalingGroupNames = [ autoscaling_group ] ) assert len ( response [ 'AutoScalingGroups' ] ) == 1 autoscaling_group_instance_ids = sorted ( instance [ 'InstanceId' ] for instance in response [ 'AutoScalingGroups' ] [ 0 ] [ 'Instances' ] if instance [ 'LifecycleState' ] == 'InService' ) except ( ClientError , AssertionError ) as e : raise StackInterrogationException ( e ) return bool ( autoscaling_group_instance_ids and autoscaling_group_instance_ids [ 0 ] == instance_id )
Returns True if the current instance is the first instance in the ASG group sorted by instance_id .
49,064
def is_first_instance_k8s ( current_pod_name = None ) : current_pod_name = current_pod_name or os . environ . get ( 'POD_NAME' ) if not current_pod_name : raise StackInterrogationException ( 'Pod name not known' ) namespace = 'money-to-prisoners-%s' % settings . ENVIRONMENT try : load_incluster_config ( ) except ConfigException as e : raise StackInterrogationException ( e ) try : response = k8s_client . CoreV1Api ( ) . list_namespaced_pod ( namespace = namespace , label_selector = 'app=%s' % settings . APP , watch = False , ) except ApiException as e : raise StackInterrogationException ( e ) pod_names = sorted ( pod . metadata . name for pod in filter ( lambda pod : pod . status . phase == 'Running' , response . items ) ) return bool ( pod_names and pod_names [ 0 ] == current_pod_name )
Returns True if the current pod is the first replica in Kubernetes cluster .
49,065
def splitarg ( args ) : if not args : return args split = list ( ) for arg in args : if ',' in arg : split . extend ( [ x for x in arg . split ( ',' ) if x ] ) elif arg : split . append ( arg ) return split
This function will split arguments separated by spaces or commas to be backwards compatible with the original ArcGet command line tool
49,066
def logout ( request , template_name = None , next_page = None , redirect_field_name = REDIRECT_FIELD_NAME , current_app = None , extra_context = None ) : auth_logout ( request ) if next_page is not None : next_page = resolve_url ( next_page ) if ( redirect_field_name in request . POST or redirect_field_name in request . GET ) : next_page = request . POST . get ( redirect_field_name , request . GET . get ( redirect_field_name ) ) if not is_safe_url ( url = next_page , host = request . get_host ( ) ) : next_page = request . path if next_page : return HttpResponseRedirect ( next_page ) current_site = get_current_site ( request ) context = { 'site' : current_site , 'site_name' : current_site . name , 'title' : _ ( 'Logged out' ) } if extra_context is not None : context . update ( extra_context ) if current_app is not None : request . current_app = current_app return TemplateResponse ( request , template_name , context )
Logs out the user .
49,067
def get_config ( filepath = None , default_loader = None , on_missing = None ) : cache_key = ( filepath , default_loader , on_missing ) if CACHE . get ( cache_key ) is not None : return CACHE . get ( cache_key ) logger = logging . getLogger ( 'birding' ) if filepath is None : filepath = BIRDING_CONF if default_loader is None : default_loader = get_defaults_file if on_missing is None : on_missing = logger . info logger . info ( 'Looking for configuration file: {}' . format ( os . path . abspath ( filepath ) ) ) if not os . path . exists ( filepath ) : on_missing ( 'No {} configuration file found.' . format ( filepath ) ) if filepath != BIRDING_CONF_DEFAULT : os . stat ( filepath ) config = yaml . safe_load ( default_loader ( ) ) tv . validate ( SCHEMA , config ) if os . path . exists ( filepath ) : file_config = yaml . safe_load ( open ( filepath ) ) if file_config : config = overlay ( file_config , config ) tv . validate ( SCHEMA , config ) CACHE . put ( cache_key , config ) return config
Get a dict for the current birding configuration .
49,068
def get_defaults_file ( * a , ** kw ) : fd = StringIO ( ) fd . write ( get_defaults_str ( * a , ** kw ) ) fd . seek ( 0 ) return fd
Get a file object with YAML data of configuration defaults .
49,069
def get_defaults_str ( raw = None , after = 'Defaults::' ) : if raw is None : raw = __doc__ return unicode ( textwrap . dedent ( raw . split ( after ) [ - 1 ] ) . strip ( ) )
Get the string YAML representation of configuration defaults .
49,070
def overlay ( upper , lower ) : result = { } for key in upper : if is_mapping ( upper [ key ] ) : lower_value = lower . get ( key , { } ) if not is_mapping ( lower_value ) : msg = 'Attempting to overlay a mapping on a non-mapping: {}' raise ValueError ( msg . format ( key ) ) result [ key ] = overlay ( upper [ key ] , lower_value ) else : result [ key ] = upper [ key ] for key in lower : if key in result : continue result [ key ] = lower [ key ] return result
Return the overlay of upper dict onto lower dict .
49,071
def import_name ( name , default_ns = None ) : if '.' not in name : if default_ns is None : return importlib . import_module ( name ) else : name = default_ns + '.' + name module_name , object_name = name . rsplit ( '.' , 1 ) module = importlib . import_module ( module_name ) return getattr ( module , object_name )
Import an object based on the dotted string .
49,072
def follow_topic_from_config ( ) : config = get_config ( ) [ 'ResultTopicBolt' ] kafka_class = import_name ( config [ 'kafka_class' ] ) return follow_topic ( kafka_class , config [ 'topic' ] , ** config [ 'kafka_init' ] )
Read kafka config then dispatch to follow_topic .
49,073
def follow_topic ( kafka_class , name , retry_interval = 1 , ** kafka_init ) : while True : try : client = kafka_class ( ** kafka_init ) topic = client . topics [ name ] consumer = topic . get_simple_consumer ( reset_offset_on_start = True ) except Exception as e : if not should_try_kafka_again ( e ) : raise with flushing ( sys . stderr ) : print ( 'Failed attempt to connect to Kafka. Will retry ...' , file = sys . stderr ) sleep ( retry_interval ) else : with flushing ( sys . stdout ) : print ( 'Connected to Kafka.' ) break dump = Dump ( ) for message in consumer : with flushing ( sys . stdout , sys . stderr ) : status = load ( message . value ) if status : dump ( status )
Dump each message from kafka topic to stdio .
49,074
def follow_fd ( fd ) : dump = Dump ( ) for line in fd : if not line . strip ( ) : continue with flushing ( sys . stdout , sys . stderr ) : status = load ( line ) if status : dump ( status )
Dump each line of input to stdio .
49,075
def should_try_kafka_again ( error ) : msg = 'Unable to retrieve' return isinstance ( error , KafkaException ) and str ( error ) . startswith ( msg )
Determine if the error means to retry or fail True to retry .
49,076
def check_valid ( line0 , line1 ) : data = line0 . strip ( ) . split ( b' ' ) if len ( data ) <= 2 : return False try : map ( float , data [ 2 : ] ) except : return False return True
Check if a file is valid Glove format .
49,077
def load_with_vocab ( fin , vocab , dtype = np . float32 ) : arr = None for line in fin : try : token , v = _parse_line ( line , dtype ) except ( ValueError , IndexError ) : raise ParseError ( b'Parsing error in line: ' + line ) if token in vocab : if arr is None : arr = np . empty ( ( len ( vocab ) , len ( v ) ) , dtype = dtype ) arr . fill ( np . NaN ) elif arr . shape [ 1 ] != len ( v ) : raise ParseError ( b'Vector size did not match in line: ' + line ) arr [ vocab [ token ] , : ] = np . array ( v , dtype = dtype ) . reshape ( 1 , - 1 ) return arr
Load word embedding file with predefined vocabulary
49,078
def load ( fin , dtype = np . float32 , max_vocab = None ) : vocab = { } arr = None i = 0 for line in fin : if max_vocab is not None and i >= max_vocab : break try : token , v = _parse_line ( line , dtype ) except ( ValueError , IndexError ) : raise ParseError ( b'Parsing error in line: ' + line ) if token in vocab : parse_warn ( b'Duplicated vocabulary ' + token ) continue if arr is None : arr = np . array ( v , dtype = dtype ) . reshape ( 1 , - 1 ) else : if arr . shape [ 1 ] != len ( v ) : raise ParseError ( b'Vector size did not match in line: ' + line ) arr = np . append ( arr , [ v ] , axis = 0 ) vocab [ token ] = i i += 1 return arr , vocab
Load word embedding file .
49,079
def set_version ( context : Context , version = None , bump = False ) : if bump and version : raise TaskError ( 'You cannot bump and set a specific version' ) if bump : from mtp_common import VERSION version = list ( VERSION ) version [ - 1 ] += 1 else : try : version = list ( map ( int , version . split ( '.' ) ) ) assert len ( version ) == 3 except ( AttributeError , ValueError , AssertionError ) : raise TaskError ( 'Version must be in the form N.N.N' ) dotted_version = '.' . join ( map ( str , version ) ) replacements = [ ( r'^VERSION =.*$' , 'VERSION = (%s)' % ', ' . join ( map ( str , version ) ) , 'mtp_common/__init__.py' ) , ( r'^ "version":.*$' , ' "version": "%s",' % dotted_version , 'package.json' ) , ] for search , replacement , path in replacements : with open ( os . path . join ( root_path , path ) ) as f : content = f . read ( ) content = re . sub ( search , replacement , content , flags = re . MULTILINE ) with open ( os . path . join ( root_path , path ) , 'w' ) as f : f . write ( content ) context . debug ( 'Updated version to %s' % dotted_version )
Updates the version of MTP - common
49,080
def docs ( context : Context ) : try : from sphinx . application import Sphinx except ImportError : context . pip_command ( 'install' , 'Sphinx' ) from sphinx . application import Sphinx context . shell ( 'cp' , 'README.rst' , 'docs/README.rst' ) app = Sphinx ( 'docs' , 'docs' , 'docs/build' , 'docs/build/.doctrees' , buildername = 'html' , parallel = True , verbosity = context . verbosity ) app . build ( )
Generates static documentation
49,081
def authenticate ( self , username = None , password = None ) : data = api_client . authenticate ( username , password ) if not data : return return User ( data . get ( 'pk' ) , data . get ( 'token' ) , data . get ( 'user_data' ) )
Returns a valid MojUser if the authentication is successful or None if the credentials were wrong .
49,082
def get_client_token ( ) : if getattr ( settings , 'NOMIS_API_CLIENT_TOKEN' , '' ) : return settings . NOMIS_API_CLIENT_TOKEN global client_token if not client_token or client_token [ 'expires' ] and client_token [ 'expires' ] - now ( ) < datetime . timedelta ( days = 1 ) : session = None try : session = api_client . get_authenticated_api_session ( settings . TOKEN_RETRIEVAL_USERNAME , settings . TOKEN_RETRIEVAL_PASSWORD ) client_token = session . get ( '/tokens/nomis/' ) . json ( ) except ( requests . RequestException , HttpNotFoundError , ValueError , AttributeError ) : logger . exception ( 'Cannot load NOMIS API client token' ) return None finally : if session and getattr ( session , 'access_token' , None ) : api_client . revoke_token ( session . access_token ) if client_token . get ( 'expires' ) : client_token [ 'expires' ] = parse_datetime ( client_token [ 'expires' ] ) if client_token [ 'expires' ] < now ( ) : logger . error ( 'NOMIS API client token from mtp-api had expired' ) return None return client_token [ 'token' ]
Requests and stores the NOMIS API client token from mtp - api
49,083
def include ( self , * fields , ** kwargs ) : clone = self . _clone ( ) if self . query . filter_is_sticky : clone . query . filter_is_sticky = True clone . _include_limit = kwargs . pop ( 'limit_includes' , None ) assert not kwargs , '"limit_includes" is the only accepted kwargs. Eat your heart out 2.7' if fields == ( None , ) : for field in clone . _includes . keys ( ) : clone . query . _annotations . pop ( '__{}' . format ( field . name ) , None ) clone . _includes . clear ( ) return clone for name in fields : ctx , model = clone . _includes , clone . model for spl in name . split ( '__' ) : field = model . _meta . get_field ( spl ) if isinstance ( field , ForeignObjectRel ) and field . is_hidden ( ) : raise ValueError ( 'Hidden field "{!r}" has no descriptor and therefore cannot be included' . format ( field ) ) model = field . related_model ctx = ctx . setdefault ( field , OrderedDict ( ) ) for field in clone . _includes . keys ( ) : clone . _include ( field ) return clone
Return a new QuerySet instance that will include related objects .
49,084
def revoke_token ( access_token ) : response = requests . post ( get_revoke_token_url ( ) , data = { 'token' : access_token , 'client_id' : settings . API_CLIENT_ID , 'client_secret' : settings . API_CLIENT_SECRET , } , timeout = 15 ) return response . status_code == 200
Instructs the API to delete this access token and associated refresh token
49,085
def lru_cache ( fn ) : @ wraps ( fn ) def memoized_fn ( * args ) : pargs = pickle . dumps ( args ) if pargs not in memoized_fn . cache : memoized_fn . cache [ pargs ] = fn ( * args ) return memoized_fn . cache [ pargs ] for attr , value in iter ( fn . __dict__ . items ( ) ) : setattr ( memoized_fn , attr , value ) memoized_fn . cache = { } return memoized_fn
Memoization wrapper that can handle function attributes mutable arguments and can be applied either as a decorator or at runtime .
49,086
def auth ( alias = None , url = None , cfg = "~/.xnat_auth" ) : if not alias and not url : raise ValueError ( 'you must provide an alias or url argument' ) if alias and url : raise ValueError ( 'cannot provide both alias and url arguments' ) cfg = os . path . expanduser ( cfg ) if not os . path . exists ( cfg ) : raise AuthError ( "could not locate auth file %s" % cfg ) tree = etree . parse ( os . path . expanduser ( cfg ) ) res = None if alias : res = tree . findall ( "./%s" % alias ) if url : res = tree . findall ( "./*/[url='%s']" % url ) if not res : raise AuthError ( "failed to locate xnat credentials within %s" % cfg ) elif len ( res ) > 1 : raise AuthError ( "found too many sets of credentials within %s" % cfg ) res = res . pop ( ) url = res . findall ( "url" ) if not url : raise AuthError ( "no url for %s in %s" % ( alias , cfg ) ) elif len ( url ) > 1 : raise AuthError ( "too many urls for %s in %s" % ( alias , cfg ) ) username = res . findall ( "username" ) if not username : raise AuthError ( "no username for %s in %s" % ( alias , cfg ) ) elif len ( username ) > 1 : raise AuthError ( "too many usernames for %s in %s" % ( alias , cfg ) ) password = res . findall ( "password" ) if not password : raise AuthError ( "no password for %s in %s" % ( alias , cfg ) ) elif len ( password ) > 1 : raise AuthError ( "too many passwords for %s in %s" % ( alias , cfg ) ) return XnatAuth ( url = url . pop ( ) . text , username = username . pop ( ) . text , password = password . pop ( ) . text )
Read connection details from an xnat_auth XML file
49,087
def accession ( auth , label , project = None ) : return list ( experiments ( auth , label , project ) ) [ 0 ] . id
Get the Accession ID for any Experiment label .
49,088
def extract ( zf , content , out_dir = '.' ) : previous_header_offset = 0 compensation = Namespace ( value = 2 ** 32 , factor = 0 ) for i , member in enumerate ( zf . infolist ( ) ) : if i == 0 : concat = member . header_offset member . header_offset -= concat if previous_header_offset > member . header_offset : compensation . factor += 1 previous_header_offset = member . header_offset member . header_offset += compensation . value * compensation . factor try : bio = io . BytesIO ( zf . read ( member . filename ) ) except zipfile . BadZipfile : with tf . NamedTemporaryFile ( dir = out_dir , prefix = "xnat" , suffix = ".zip" , delete = False ) as fo : content . seek ( 0 ) fo . write ( content . read ( ) ) raise DownloadError ( "bad zip file, written to %s" % fo . name ) if not member . filename . endswith ( ".gz" ) : try : gz = gzip . GzipFile ( fileobj = bio , mode = "rb" ) gz . read ( ) bio = gz except IOError : pass bio . seek ( 0 ) f = os . path . join ( out_dir , os . path . basename ( member . filename ) ) with open ( f , "wb" ) as fo : fo . write ( bio . read ( ) )
Extracting a Java 1 . 6 XNAT ZIP archive in Python .
49,089
def __quick_validate ( r , check = ( 'ResultSet' , 'Result' , 'totalRecords' ) ) : if 'ResultSet' in check and 'ResultSet' not in r : raise ResultSetError ( 'no ResultSet in server response' ) if 'Result' in check and 'Result' not in r [ 'ResultSet' ] : raise ResultSetError ( 'no Result in server response' ) if 'totalRecords' in check and 'totalRecords' not in r [ 'ResultSet' ] : raise ResultSetError ( 'no totalRecords in server response' ) return True
Quick validation of JSON result set returned by XNAT .
49,090
def scansearch ( auth , label , filt , project = None , aid = None ) : if not aid : aid = accession ( auth , label , project ) url = "%s/data/experiments/%s/scans?format=csv" % ( auth . url . rstrip ( '/' ) , aid ) logger . debug ( "issuing http request %s" , url ) r = requests . get ( url , auth = ( auth . username , auth . password ) , verify = CHECK_CERTIFICATE ) if r . status_code != requests . codes . ok : raise ScanSearchError ( "response not ok (%s) from %s" % ( r . status_code , r . url ) ) if not r . content : raise ScanSearchError ( "response is empty from %s" % r . url ) reader = csv . reader ( io . StringIO ( r . content . decode ( ) ) ) columns = next ( reader ) conn = sqlite3 . connect ( ":memory:" ) c = conn . cursor ( ) c . execute ( "CREATE TABLE scans (%s)" % ',' . join ( columns ) ) query = "INSERT INTO scans VALUES (%s)" % ',' . join ( '?' * len ( columns ) ) for row in reader : c . execute ( query , [ x for x in row ] ) conn . commit ( ) result = col . defaultdict ( list ) for token , filt in iter ( filt . items ( ) ) : try : result [ token ] = [ x [ 0 ] for x in c . execute ( "SELECT ID FROM scans where %s" % filt ) ] except sqlite3 . OperationalError : logger . critical ( "something is wrong with the filter: %s" , filt ) raise return result
Search for scans by supplying a set of SQL - based conditionals .
49,091
def scans ( auth , label = None , scan_ids = None , project = None , experiment = None ) : if experiment and ( label or project ) : raise ValueError ( 'cannot supply experiment with label or project' ) if experiment : label , project = experiment . label , experiment . project aid = accession ( auth , label , project ) path = '/data/experiments' params = { 'xsiType' : 'xnat:mrSessionData' , 'columns' : ',' . join ( scans . columns . keys ( ) ) } params [ 'xnat:mrSessionData/ID' ] = aid _ , result = _get ( auth , path , 'json' , autobox = True , params = params ) for result in result [ 'ResultSet' ] [ 'Result' ] : if scan_ids == None or result [ 'xnat:mrscandata/id' ] in scan_ids : data = dict ( ) for k , v in iter ( scans . columns . items ( ) ) : data [ v ] = result [ k ] yield data
Get scan information for a MR Session as a sequence of dictionaries .
49,092
def extendedboldqc ( auth , label , scan_ids = None , project = None , aid = None ) : if not aid : aid = accession ( auth , label , project ) path = '/data/experiments' params = { 'xsiType' : 'neuroinfo:extendedboldqc' , 'columns' : ',' . join ( extendedboldqc . columns . keys ( ) ) } if project : params [ 'project' ] = project params [ 'xnat:mrSessionData/ID' ] = aid _ , result = _get ( auth , path , 'json' , autobox = True , params = params ) for result in result [ 'ResultSet' ] [ 'Result' ] : if scan_ids == None or result [ 'neuroinfo:extendedboldqc/scan/scan_id' ] in scan_ids : data = dict ( ) for k , v in iter ( extendedboldqc . columns . items ( ) ) : data [ v ] = result [ k ] yield data
Get ExtendedBOLDQC data as a sequence of dictionaries .
49,093
def _autobox ( content , format ) : if format == Format . JSON : return json . loads ( content ) elif format == Format . XML : return etree . fromstring ( content ) elif format == Format . CSV : try : return csv . reader ( io . BytesIO ( content ) ) except TypeError : def unicode_csv_reader ( unicode_csv_data , dialect = csv . excel , ** kwargs ) : csv_reader = csv . reader ( utf_8_encoder ( unicode_csv_data ) , dialect = dialect , ** kwargs ) for row in csv_reader : yield [ unicode ( cell , 'utf-8' ) for cell in row ] def utf_8_encoder ( unicode_csv_data ) : for line in unicode_csv_data : yield line . encode ( 'utf-8' ) return unicode_csv_reader ( io . StringIO ( content ) ) else : raise AutoboxError ( "unknown autobox format %s" % format )
Autobox response content .
49,094
def get_user ( request ) : if not hasattr ( request , '_cached_user' ) : request . _cached_user = auth_get_user ( request ) return request . _cached_user
Returns a cached copy of the user if it exists or calls auth_get_user otherwise .
49,095
def ensure_compatible_admin ( view ) : def wrapper ( request , * args , ** kwargs ) : user_roles = request . user . user_data . get ( 'roles' , [ ] ) if len ( user_roles ) != 1 : context = { 'message' : 'I need to be able to manage user accounts. ' 'My username is %s' % request . user . username } return render ( request , 'mtp_common/user_admin/incompatible-admin.html' , context = context ) return view ( request , * args , ** kwargs ) return wrapper
Ensures that the user is in exactly one role . Other checks could be added such as requiring one prison if in prison - clerk role .
49,096
def fault_barrier ( fn ) : @ functools . wraps ( fn ) def process ( self , tup ) : try : return fn ( self , tup ) except Exception as e : if isinstance ( e , KeyboardInterrupt ) : return print ( str ( e ) , file = sys . stderr ) self . fail ( tup ) return process
Method decorator to catch and log errors then send fail message .
49,097
def search_manager_from_config ( config , ** default_init ) : manager_cls = import_name ( config [ 'class' ] , default_ns = 'birding.search' ) init = { } init . update ( default_init ) init . update ( config [ 'init' ] ) manager = manager_cls ( ** init ) return manager
Get a SearchManager instance dynamically based on config .
49,098
def bids_from_config ( sess , scans_metadata , config , out_base ) : _item = next ( iter ( scans_metadata ) ) session , subject = _item [ 'session_label' ] , _item [ 'subject_label' ] sourcedata_base = os . path . join ( out_base , 'sourcedata' , 'sub-{0}' . format ( legal . sub ( '' , subject ) ) , 'ses-{0}' . format ( legal . sub ( '' , session ) ) ) bids_base = os . path . join ( out_base , 'sub-{0}' . format ( legal . sub ( '' , subject ) ) , 'ses-{0}' . format ( legal . sub ( '' , session ) ) ) args = commons . struct ( xnat = sess , subject = subject , session = session , bids = bids_base , sourcedata = sourcedata_base ) func_refs = proc_func ( config , args ) anat_refs = proc_anat ( config , args ) fmap_refs = proc_fmap ( config , args , func_refs )
Create a BIDS output directory from configuration file
49,099
def proc_anat ( config , args ) : refs = dict ( ) for scan in iterconfig ( config , 'anat' ) : ref = scan . get ( 'id' , None ) templ = 'sub-${sub}_ses-${ses}' if 'acquisition' in scan : templ += '_acq-${acquisition}' if 'run' in scan : templ += '_run-${run}' templ += '_${modality}' templ = string . Template ( templ ) fbase = templ . safe_substitute ( sub = legal . sub ( '' , args . subject ) , ses = legal . sub ( '' , args . session ) , acquisition = scan . get ( 'acquisition' , None ) , run = scan . get ( 'run' , None ) , modality = scan . get ( 'modality' , None ) , ) sourcedata_dir = os . path . join ( args . sourcedata , scan [ 'type' ] ) if not os . path . exists ( sourcedata_dir ) : os . makedirs ( sourcedata_dir ) dicom_dir = os . path . join ( sourcedata_dir , '{0}.dicom' . format ( fbase ) ) logger . info ( 'downloading session=%s, scan=%s, loc=%s' , args . session , scan [ 'scan' ] , dicom_dir ) args . xnat . download ( args . session , [ scan [ 'scan' ] ] , out_dir = dicom_dir ) fname = '{0}.nii.gz' . format ( fbase ) refs [ ref ] = os . path . join ( scan [ 'type' ] , fname ) fullfile = os . path . join ( args . bids , scan [ 'type' ] , fname ) logger . info ( 'converting %s to %s' , dicom_dir , fullfile ) convert ( dicom_dir , fullfile ) return refs
Download anatomical data and convert to BIDS