idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
10,100
def _reconnect ( self ) : log . debug ( "Reconnecting to JLigier..." ) self . _disconnect ( ) self . _connect ( ) self . _update_subscriptions ( )
Reconnect to JLigier and subscribe to the tags .
10,101
def data ( self , value ) : if not value : value = b'' if len ( value ) > self . SIZE : raise ValueError ( "The maximum tag size is {0}" . format ( self . SIZE ) ) self . _data = value while len ( self . _data ) < self . SIZE : self . _data += b'\x00'
Set the byte data and fill up the bytes to fit the size .
10,102
def add ( self , name , attr = None , value = None ) : "Set values in constant" if isinstance ( name , tuple ) or isinstance ( name , list ) : name , attr , value = self . __set_iter_value ( name ) if attr is None : attr = name if value is None : value = attr self . __data += ( self . get_const_string ( name = name , value = value ) , ) self . __dict__ [ s_attr ( attr ) ] = self . __data [ - 1 ]
Set values in constant
10,103
def start ( self ) : assert self . _thread is None , 'thread already started' self . _thread = Thread ( target = self . _start_io_loop ) self . _thread . daemon = True self . _thread . start ( ) self . _ready . wait ( )
Start IOLoop in daemonized thread .
10,104
def _start_io_loop ( self ) : def mark_as_ready ( ) : self . _ready . set ( ) if not self . _io_loop : self . _io_loop = ioloop . IOLoop ( ) self . _io_loop . add_callback ( mark_as_ready ) self . _io_loop . start ( )
Start IOLoop then set ready threading . Event .
10,105
def is_ready ( self ) : if not self . _thread : return False if not self . _ready . is_set ( ) : return False return True
Is thread & ioloop ready .
10,106
def submit ( self , fn , * args , ** kwargs ) : if not self . is_ready ( ) : raise ThreadNotStartedError ( "The thread has not been started yet, " "make sure you call start() first" ) future = Future ( ) def execute ( ) : try : result = gen . maybe_future ( fn ( * args , ** kwargs ) ) except Exception : f = gen . Future ( ) f . set_exc_info ( sys . exc_info ( ) ) on_done ( f ) else : result . add_done_callback ( on_done ) def on_done ( f ) : if not f . exception ( ) : future . set_result ( f . result ( ) ) return if hasattr ( f , 'exc_info' ) : exception , traceback = f . exc_info ( ) [ 1 : ] else : if hasattr ( f , 'exception_info' ) : exception , traceback = f . exception_info ( ) else : exception = f . exception ( ) traceback = None if _FUTURE_HAS_EXC_INFO : future . set_exception_info ( exception , traceback ) return future . set_exception ( exception ) self . _io_loop . add_callback ( execute ) return future
Submit Tornado Coroutine to IOLoop in daemonized thread .
10,107
def peak_memory_usage ( ) : if sys . platform . startswith ( 'win' ) : p = psutil . Process ( ) return p . memory_info ( ) . peak_wset / 1024 / 1024 mem = resource . getrusage ( resource . RUSAGE_SELF ) . ru_maxrss factor_mb = 1 / 1024 if sys . platform == 'darwin' : factor_mb = 1 / ( 1024 * 1024 ) return mem * factor_mb
Return peak memory usage in MB
10,108
def getPreferenceCounts ( self ) : preferenceCounts = [ ] for preference in self . preferences : preferenceCounts . append ( preference . count ) return preferenceCounts
Returns a list of the number of times each preference is given .
10,109
def getRankMaps ( self ) : rankMaps = [ ] for preference in self . preferences : rankMaps . append ( preference . getRankMap ( ) ) return rankMaps
Returns a list of dictionaries one for each preference that associates the integer representation of each candidate with its position in the ranking starting from 1 and returns a list of the number of times each preference is given .
10,110
def getReverseRankMaps ( self ) : reverseRankMaps = [ ] for preference in self . preferences : reverseRankMaps . append ( preference . getReverseRankMap ( ) ) return reverseRankMaps
Returns a list of dictionaries one for each preference that associates each position in the ranking with a list of integer representations of the candidates ranked at that position and returns a list of the number of times each preference is given .
10,111
def exportPreflibFile ( self , fileName ) : elecType = self . getElecType ( ) if elecType != "soc" and elecType != "toc" and elecType != "soi" and elecType != "toi" : print ( "ERROR: printing current type to preflib format is not supported" ) exit ( ) reverseRankMaps = self . getReverseRankMaps ( ) outfileObj = open ( fileName , 'w' ) outfileObj . write ( str ( self . numCands ) ) for candInt , cand in self . candMap . items ( ) : outfileObj . write ( "\n" + str ( candInt ) + "," + cand ) preferenceCount = 0 for preference in self . preferences : preferenceCount += preference . count outfileObj . write ( "\n" + str ( self . numVoters ) + "," + str ( preferenceCount ) + "," + str ( len ( self . preferences ) ) ) for i in range ( 0 , len ( reverseRankMaps ) ) : outfileObj . write ( "\n" + str ( self . preferences [ i ] . count ) ) reverseRankMap = reverseRankMaps [ i ] sortedKeys = sorted ( reverseRankMap . keys ( ) ) for key in sortedKeys : cands = reverseRankMap [ key ] if len ( cands ) == 1 : outfileObj . write ( "," + str ( cands [ 0 ] ) ) elif len ( cands ) > 1 : outfileObj . write ( ",{" + str ( cands [ 0 ] ) ) for j in range ( 1 , len ( cands ) ) : outfileObj . write ( "," + str ( cands [ j ] ) ) outfileObj . write ( "}" ) outfileObj . close ( )
Exports a preflib format file that contains all the information of the current Profile .
10,112
def importPreflibFile ( self , fileName ) : elecFileObj = open ( fileName , 'r' ) self . candMap , rankMaps , wmgMapsCounts , self . numVoters = prefpy_io . read_election_file ( elecFileObj ) elecFileObj . close ( ) self . numCands = len ( self . candMap . keys ( ) ) self . preferences = [ ] for i in range ( 0 , len ( rankMaps ) ) : wmgMap = self . genWmgMapFromRankMap ( rankMaps [ i ] ) self . preferences . append ( Preference ( wmgMap , wmgMapsCounts [ i ] ) )
Imports a preflib format file that contains all the information of a Profile . This function will completely override all members of the current Profile object . Currently we assume that in an election where incomplete ordering are allowed if a voter ranks only one candidate then the voter did not prefer any candidates over another . This may lead to some discrepancies when importing and exporting a . toi preflib file or a . soi preflib file .
10,113
def exportJsonFile ( self , fileName ) : data = dict ( ) for key in self . __dict__ . keys ( ) : if key != "preferences" : data [ key ] = self . __dict__ [ key ] preferenceDicts = [ ] for preference in self . preferences : preferenceDict = dict ( ) for key in preference . __dict__ . keys ( ) : preferenceDict [ key ] = preference . __dict__ [ key ] preferenceDicts . append ( preferenceDict ) data [ "preferences" ] = preferenceDicts outfile = open ( fileName , 'w' ) json . dump ( data , outfile ) outfile . close ( )
Exports a json file that contains all the information of the current Profile .
10,114
def importJsonFile ( self , fileName ) : infile = open ( fileName ) data = json . load ( infile ) infile . close ( ) self . numCands = int ( data [ "numCands" ] ) self . numVoters = int ( data [ "numVoters" ] ) candMap = dict ( ) for key in data [ "candMap" ] . keys ( ) : candMap [ int ( key ) ] = data [ "candMap" ] [ key ] . encode ( "ascii" ) self . candMap = candMap self . preferences = [ ] for preferenceMap in data [ "preferences" ] : count = int ( preferenceMap [ "count" ] ) preferenceWmgMap = preferenceMap [ "wmgMap" ] wmgMap = dict ( ) for key in preferenceWmgMap . keys ( ) : wmgMap [ int ( key ) ] = dict ( ) for key2 in preferenceWmgMap [ key ] . keys ( ) : wmgMap [ int ( key ) ] [ int ( key2 ) ] = int ( preferenceWmgMap [ key ] [ key2 ] ) self . preferences . append ( Preference ( wmgMap , count ) )
Imports a json file that contains all the information of a Profile . This function will completely override all members of the current Profile object .
10,115
def main ( ) : cand_set = [ 0 , 1 , 2 ] votes = [ [ 0 , 1 , 2 ] , [ 1 , 2 , 0 ] ] mmagg = MMPLAggregator ( cand_set ) gamma = mmagg . aggregate ( votes , epsilon = 1e-7 , max_iters = 20 ) print ( mmagg . alts_to_ranks , mmagg . ranks_to_alts ) assert ( [ mmagg . get_ranking ( i ) for i in cand_set ] == [ 1 , 0 , 2 ] ) print ( gamma )
Driver function for the computation of the MM algorithm
10,116
def get_login_url ( self , state = None ) : payload = { 'response_type' : 'code' , 'client_id' : self . _client_id , 'redirect_uri' : self . _redirect_uri , } if state is not None : payload [ 'state' ] = state return "%s?%s" % ( settings . API_AUTHORIZATION_URL , urllib . urlencode ( payload ) )
Generates and returns URL for redirecting to Login Page of RunKeeper which is the Authorization Endpoint of Health Graph API .
10,117
def get_login_button_url ( self , button_color = None , caption_color = None , button_size = None ) : if not button_color in settings . LOGIN_BUTTON_COLORS : button_color = settings . LOGIN_BUTTON_COLORS [ 0 ] if not caption_color in settings . LOGIN_BUTTON_CAPTION_COLORS : caption_color = settings . LOGIN_BUTTON_CAPTION_COLORS [ 0 ] if settings . LOGIN_BUTTON_SIZES . has_key ( button_size ) : button_size = settings . LOGIN_BUTTON_SIZES [ button_size ] else : button_size = settings . LOGIN_BUTTON_SIZES [ 'None' ] return settings . LOGIN_BUTTON_URL % ( button_color , caption_color , button_size )
Return URL for image used for RunKeeper Login button .
10,118
def get_access_token ( self , code ) : payload = { 'grant_type' : 'authorization_code' , 'code' : code , 'client_id' : self . _client_id , 'client_secret' : self . _client_secret , 'redirect_uri' : self . _redirect_uri , } req = requests . post ( settings . API_ACCESS_TOKEN_URL , data = payload ) data = req . json ( ) return data . get ( 'access_token' )
Returns Access Token retrieved from the Health Graph API Token Endpoint following the login to RunKeeper . to RunKeeper .
10,119
def revoke_access_token ( self , access_token ) : payload = { 'access_token' : access_token , } req = requests . post ( settings . API_DEAUTHORIZATION_URL , data = payload )
Revokes the Access Token by accessing the De - authorization Endpoint of Health Graph API .
10,120
def split ( self , points ) : for p in points : for i in range ( len ( self . intervals ) ) : if ( self . intervals [ i ] . start < p ) and ( self . intervals [ i ] . end > p ) : self . intervals = ( self . intervals [ : i ] + [ TimeInterval ( self . intervals [ i ] . start , p ) , TimeInterval ( p , self . intervals [ i ] . end ) ] + self . intervals [ ( i + 1 ) : ] ) break
Splits the list of time intervals in the specified points
10,121
def create ( cls , data , ** kwargs ) : with db . session . begin_nested ( ) : model = cls . dbmodel ( ** kwargs ) model . data = data obj = cls ( model ) db . session . add ( obj . model ) return obj
Create a new Workflow Object with given content .
10,122
def get ( cls , id_ ) : with db . session . no_autoflush : query = cls . dbmodel . query . filter_by ( id = id_ ) try : model = query . one ( ) except NoResultFound : raise WorkflowsMissingObject ( "No object for for id {0}" . format ( id_ ) ) return cls ( model )
Return a workflow object from id .
10,123
def query ( cls , * criteria , ** filters ) : query = cls . dbmodel . query . filter ( * criteria ) . filter_by ( ** filters ) return [ cls ( obj ) for obj in query . all ( ) ]
Wrap sqlalchemy query methods .
10,124
def delete ( self , force = False ) : if self . model is None : raise WorkflowsMissingModel ( ) with db . session . begin_nested ( ) : db . session . delete ( self . model ) return self
Delete a workflow object .
10,125
def set_action ( self , action , message ) : self . extra_data [ "_action" ] = action self . extra_data [ "_message" ] = message
Set the action to be taken for this object .
10,126
def start_workflow ( self , workflow_name , delayed = False , ** kwargs ) : from . tasks import start if delayed : self . save ( ) db . session . commit ( ) return start . delay ( workflow_name , object_id = self . id , ** kwargs ) else : return start ( workflow_name , data = [ self ] , ** kwargs )
Run the workflow specified on the object .
10,127
def continue_workflow ( self , start_point = "continue_next" , delayed = False , ** kwargs ) : from . tasks import resume self . save ( ) if not self . id_workflow : raise WorkflowAPIError ( "No workflow associated with object: %r" % ( repr ( self ) , ) ) if delayed : db . session . commit ( ) return resume . delay ( self . id , start_point , ** kwargs ) else : return resume ( self . id , start_point , ** kwargs )
Continue the workflow for this object .
10,128
def get_current_task_info ( self ) : name = self . model . workflow . name if not name : return current_task = workflows [ name ] . workflow for step in self . callback_pos : current_task = current_task [ step ] if callable ( current_task ) : return get_func_info ( current_task )
Return dictionary of current task function info for this object .
10,129
def canned_handlers ( self , environ , start_response , code = '200' , headers = [ ] ) : headerbase = [ ( 'Content-Type' , 'text/plain' ) ] if headers : hObj = Headers ( headerbase ) for header in headers : hObj [ header [ 0 ] ] = '; ' . join ( header [ 1 : ] ) start_response ( self . canned_collection [ code ] , headerbase ) return [ '' ]
We convert an error code into certain action over start_response and return a WSGI - compliant payload .
10,130
def info_shell_scope ( self ) : Console . ok ( "{:>20} = {:}" . format ( "ECHO" , self . echo ) ) Console . ok ( "{:>20} = {:}" . format ( "DEBUG" , self . debug ) ) Console . ok ( "{:>20} = {:}" . format ( "LOGLEVEL" , self . loglevel ) ) Console . ok ( "{:>20} = {:}" . format ( "SCOPE" , self . active_scope ) ) Console . ok ( "{:>20} = {:}" . format ( "SCOPES" , self . scopes ) ) Console . ok ( "{:>20} = {:}" . format ( "SCOPELESS" , self . scopeless ) ) Console . ok ( "{:>20} = {:}" . format ( "prompt" , self . prompt ) ) Console . ok ( "{:>20} = {:}" . format ( "scripts" , self . scripts ) ) Console . ok ( "{:>20} = {:}" . format ( "variables" , self . variables ) )
prints some information about the shell scope
10,131
def activate_shell_scope ( self ) : self . variables = { } self . prompt = 'cm> ' self . active_scope = "" self . scopes = [ ] self . scopeless = [ 'load' , 'info' , 'var' , 'use' , 'quit' , 'q' , 'help' ]
activates the shell scope
10,132
def _build_stack ( self ) -> List [ Callable ] : stack = [ ] for m in self . manager . middlewares : try : stack . append ( getattr ( m ( self ) , self . name ) ) except AttributeError : pass return stack
Generates the stack of functions to call . It looks at the ordered list of all middlewares and only keeps those which have the method we re trying to call .
10,133
def instance ( cls ) -> 'MiddlewareManager' : if cls . _instance is None : cls . _instance = cls ( ) cls . _instance . init ( ) return cls . _instance
Creates initializes and returns a unique MiddlewareManager instance .
10,134
def health_check ( cls ) : try : assert isinstance ( settings . MIDDLEWARES , list ) except AssertionError : yield HealthCheckFail ( '00005' , 'The "MIDDLEWARES" configuration key should be assigned ' 'to a list' , ) return for m in settings . MIDDLEWARES : try : c = import_class ( m ) except ( TypeError , ValueError , AttributeError , ImportError ) : yield HealthCheckFail ( '00005' , f'Cannot import middleware "{m}"' , ) else : if not issubclass ( c , BaseMiddleware ) : yield HealthCheckFail ( '00005' , f'Middleware "{m}" does not implement ' f'"BaseMiddleware"' , )
Checks that the configuration makes sense .
10,135
def get ( self , name : Text , final : C ) -> C : return Caller ( self , name , final )
Get the function to call which will run all middlewares .
10,136
def load_from_args ( args ) : if not args . locus : return None loci_iterator = ( Locus . parse ( locus ) for locus in args . locus ) return Loci ( loci_iterator )
Return a Loci object giving the loci specified on the command line .
10,137
def format_date ( cls , timestamp ) : if not timestamp : raise DateTimeFormatterException ( 'timestamp must a valid string {}' . format ( timestamp ) ) return timestamp . strftime ( cls . DATE_FORMAT )
Creates a string representing the date information provided by the given timestamp object .
10,138
def format_datetime ( cls , timestamp ) : if not timestamp : raise DateTimeFormatterException ( 'timestamp must a valid string {}' . format ( timestamp ) ) return timestamp . strftime ( cls . DATETIME_FORMAT )
Creates a string representing the date and time information provided by the given timestamp object .
10,139
def extract_date ( cls , date_str ) : if not date_str : raise DateTimeFormatterException ( 'date_str must a valid string {}.' . format ( date_str ) ) try : return cls . _extract_timestamp ( date_str , cls . DATE_FORMAT ) except ( TypeError , ValueError ) : raise DateTimeFormatterException ( 'Invalid date string {}.' . format ( date_str ) )
Tries to extract a datetime object from the given string expecting date information only .
10,140
def extract_datetime ( cls , datetime_str ) : if not datetime_str : raise DateTimeFormatterException ( 'datetime_str must a valid string' ) try : return cls . _extract_timestamp ( datetime_str , cls . DATETIME_FORMAT ) except ( TypeError , ValueError ) : raise DateTimeFormatterException ( 'Invalid datetime string {}.' . format ( datetime_str ) )
Tries to extract a datetime object from the given string including time information .
10,141
def extract_datetime_hour ( cls , datetime_str ) : if not datetime_str : raise DateTimeFormatterException ( 'datetime_str must a valid string' ) try : return cls . _extract_timestamp ( datetime_str , cls . DATETIME_HOUR_FORMAT ) except ( TypeError , ValueError ) : raise DateTimeFormatterException ( 'Invalid datetime string {}.' . format ( datetime_str ) )
Tries to extract a datetime object from the given string including only hours .
10,142
def extract ( cls , timestamp_str ) : if not timestamp_str : raise DateTimeFormatterException ( 'timestamp_str must a valid string {}' . format ( timestamp_str ) ) if isinstance ( timestamp_str , ( date , datetime ) ) : return timestamp_str try : return cls . extract_datetime ( timestamp_str ) except DateTimeFormatterException : pass try : return cls . extract_datetime_hour ( timestamp_str ) except DateTimeFormatterException : pass try : return cls . extract_date ( timestamp_str ) except DateTimeFormatterException as e : raise DateTimeFormatterException ( e )
Tries to extract a datetime object from the given string . First the datetime format is tried if it fails the date format is used for extraction .
10,143
def restart ( uuid , ** kwargs ) : from . worker_engine import restart_worker return text_type ( restart_worker ( uuid , ** kwargs ) . uuid )
Restart the workflow from a given workflow engine UUID .
10,144
def import_submodules ( context , root_module , path ) : for _ , module_name , _ in pkgutil . walk_packages ( path , root_module + '.' ) : module = __import__ ( module_name , globals ( ) , locals ( ) , [ '__name__' ] ) for k , v in vars ( module ) . items ( ) : if not k . startswith ( '_' ) : context [ k ] = v context [ module_name ] = module
Import all submodules and register them in the context namespace .
10,145
def command ( func ) : classname = inspect . getouterframes ( inspect . currentframe ( ) ) [ 1 ] [ 3 ] name = func . __name__ help_name = name . replace ( "do_" , "help_" ) doc = textwrap . dedent ( func . __doc__ ) def new ( instance , args ) : try : argv = shlex . split ( args ) arguments = docopt ( doc , help = True , argv = argv ) func ( instance , args , arguments ) except SystemExit : if args not in ( '-h' , '--help' ) : Console . error ( "Could not execute the command." ) print ( doc ) new . __doc__ = doc return new
A decorator to create a function with docopt arguments . It also generates a help function
10,146
def addFile ( self , path , msg = "" ) : item = Item . from_path ( repo = self . repo , path = path ) self . addItem ( item )
Adds a file to the version
10,147
def addItem ( self , item ) : try : self . tree . addItem ( item ) except AttributeError , e : raise VersionError ( 'Saved versions are immutable' )
Adds an item if the tree is mutable
10,148
def removeItem ( self , item ) : try : self . tree . removeItem ( item ) except AttributeError , e : raise VersionError ( 'Saved versions are immutable' )
Removes an item if the tree is mutable
10,149
def iteritems ( self ) : if self . type in [ 'blob' ] : raise StopIteration for path , mode , sha in self . tree . iteritems ( ) : item = Item ( self , sha , path , mode ) yield item for i in item . iteritems ( ) : yield i
Generator that yields Items
10,150
def items ( self , path = None ) : items = list ( self . iteritems ( ) ) if path is not None : path += '$' regex = re . compile ( path ) items = [ i for i in items if regex . match ( i . path ) ] return items
Returns set of items .
10,151
def _get_blob ( self ) : if not self . __blob : self . __blob = self . repo . get_object ( self . id ) return self . __blob
read blob on access only because get_object is slow
10,152
def from_path ( self , repo , path , name = None ) : if name is None : name = os . path . basename ( path ) return Item . from_string ( repo = repo , name = name , string = open ( path ) . read ( ) )
Create a new Item from a file path .
10,153
def from_string ( self , repo , name , string ) : try : log . debug ( 'Creating new item: %s' % name ) blob = Blob . from_string ( string ) item = Item ( parent = repo , sha = blob . sha , path = name ) item . blob = blob return item except AssertionError , e : raise ItemError ( e )
Create a new Item from a data stream .
10,154
def save ( self , msg = None ) : if msg is None : msg = 'Saving %s' % self . name log . debug ( msg ) self . repo . addItem ( self , msg )
Modify item data and commit to repo . Git objects are immutable to save means adding a new item
10,155
def checkout ( self , path ) : if os . path . isdir ( path ) : path = os . path . join ( path , self . name ) try : log . debug ( 'Checking out %s to %s' % ( self . path , path ) ) f = open ( path , 'w' ) f . write ( self . data ( ) ) f . close ( ) return True except Exception , e : raise ItemError ( e )
Check out file data to path .
10,156
def save ( self , message ) : self . commit . message = message self . commit . tree = self . tree for item in self . tree . items ( ) : self . repo . object_store . add_object ( item . blob ) self . repo . object_store . add_object ( self . tree ) self . repo . object_store . add_object ( self . commit ) self . repo . refs [ 'refs/heads/master' ] = self . commit . id
Add version to repo object store set repo head to version sha .
10,157
def new ( self , repo ) : try : commit = Commit ( ) author = os . environ . get ( 'USER' ) commit . author = commit . committer = author commit . commit_time = commit . author_time = int ( time ( ) ) tz = parse_timezone ( '-0200' ) [ 0 ] commit . commit_timezone = commit . author_timezone = tz commit . encoding = "UTF-8" commit . message = '' parent = repo . versions ( - 1 ) if parent : commit . parents = [ parent . id ] tree = Tree ( ) curr = repo . versions ( - 1 ) if curr : for item in curr . items ( ) : tree . addItem ( item ) commit . tree = tree . id version = Version ( repo = repo , commit = commit , tree = tree ) return version except Exception , e : traceback . print_exc ( ) return VersionError ( e )
Create a new version of a repo . Local object .
10,158
def confirm ( prompt = None , resp = False ) : if prompt is None : prompt = 'Confirm' if resp : prompt = '%s [%s]|%s: ' % ( prompt , 'y' , 'n' ) else : prompt = '%s [%s]|%s: ' % ( prompt , 'n' , 'y' ) while True : ans = raw_input ( prompt ) if not ans : return resp if ans not in [ 'y' , 'Y' , 'n' , 'N' ] : print 'please enter y or n.' continue if ans == 'y' or ans == 'Y' : return True if ans == 'n' or ans == 'N' : return False
Prompts user for confirmation .
10,159
def prompt ( name , default ) : value = raw_input ( '%s [%s]: ' % ( name , default ) ) if not value : value = default return value
Prompts user for raw input .
10,160
def new ( url ) : from grit import Repo return Repo . new ( url = url , bare = True )
Creates a new Repo class instance at url .
10,161
def checkout ( url , version = None ) : from grit import Repo r = Repo ( url ) def _write ( item ) : log . debug ( 'writing: %s' % item . name ) if item . type != 'blob' : return if r . type in [ 'repo' , 'proxy' , 'local' ] : path = os . path . join ( r . name , item . path ) pdir = os . path . dirname ( path ) if not os . path . isdir ( pdir ) : os . makedirs ( pdir ) else : path = item . name f = open ( path , 'w' ) f . write ( item . data ( ) ) f . close ( ) if r . type == 'blob' : _write ( r ) else : items = r . items ( ) count = 1 total = len ( items ) while count <= total : print '[%s/%s] %0.2f%%' % ( count , total , ( float ( count ) / total ) * 100 ) , '*' * count , '\r' , _write ( items [ count - 1 ] ) count += 1 sys . stdout . flush ( ) print
Checks out latest version of item or repository .
10,162
def checkin ( url , files , message = None ) : from grit import Repo , Item r = Repo ( url ) if not files : raise GritError ( 'No files' ) def _write ( path ) : item = Item . from_path ( repo = r , path = path ) if r . isLocal ( ) : v . addItem ( item = item ) else : r . upload ( filename = os . path . basename ( path ) , filedata = open ( path , 'r' ) . read ( ) ) if r . isLocal ( ) : v = r . addVersion ( ) count = 1 total = len ( files ) while count <= total : print '[%s/%s] %0.2f%%' % ( count , total , ( float ( count ) / total ) * 100 ) , '*' * count , '\r' , _write ( os . path . abspath ( files [ count - 1 ] ) ) count += 1 sys . stdout . flush ( ) if message is None : message = 'Publishing %s' % ', ' . join ( files ) if r . isLocal ( ) : v . save ( message = message ) print
Check in files to a repository .
10,163
def get ( cls , ** kwargs ) : fields = { } for field in cls . url_fields : value = kwargs . pop ( field , None ) if value is None : cls . _handle_wrong_field ( field , ATTR_TYPE_URL ) fields [ field ] = value model = cls ( ** fields ) model . _populate ( ** kwargs ) return model
Retrieve an object by making a GET request to Transifex .
10,164
def save ( self , ** fields ) : for field in fields : if field in self . writable_fields : setattr ( self , field , fields [ field ] ) else : self . _handle_wrong_field ( field , ATTR_TYPE_WRITE ) if self . _populated_fields : self . _update ( ** self . _modified_fields ) else : self . _create ( ** self . _modified_fields )
Save the instance to the remote Transifex server .
10,165
def _get ( self , ** kwargs ) : path = self . _construct_path_to_item ( ) return self . _http . get ( path )
Get the resource from a remote Transifex server .
10,166
def _create ( self , ** kwargs ) : path = self . _construct_path_to_collection ( ) for field in self . writable_fields : try : value = getattr ( self , field ) kwargs [ field ] = value except AttributeError : pass return self . _http . post ( path , json . dumps ( kwargs ) )
Create a resource in the remote Transifex server .
10,167
def _update ( self , ** kwargs ) : path = self . _construct_path_to_item ( ) if not kwargs : return return self . _http . put ( path , json . dumps ( kwargs ) )
Update a resource in a remote Transifex server .
10,168
def _delete ( self , ** kwargs ) : path = self . _construct_path_to_item ( ) return self . _http . delete ( path )
Delete a resource from a remote Transifex server .
10,169
def get_url_parameters ( self ) : url_fields = { } for field in self . url_fields : url_fields [ field ] = getattr ( self , field ) return url_fields
Create a dictionary of parameters used in URLs for this model .
10,170
def _handle_wrong_field ( cls , field_name , field_type ) : if field_type == ATTR_TYPE_READ : field_type = 'readable' elif field_type == ATTR_TYPE_WRITE : field_type = 'writable' elif field_type == ATTR_TYPE_URL : field_type = 'URL' else : raise AttributeError ( 'Invalid attribute type: {}' . format ( field_type ) ) msg = '{} has no {} attribute "{}"' . format ( cls . __name__ , field_type , field_name ) _logger . error ( msg ) raise AttributeError ( msg )
Raise an exception whenever an invalid attribute with the given name was attempted to be set to or retrieved from this model class .
10,171
def update_http_rules ( rules , content_type = 'text/plain' ) : for kw in deepcopy ( rules ) : kw [ 'url' ] = re . compile ( kw [ 'url' ] ) if 'Content-Type' not in kw . get ( 'headers' , { } ) : kw [ 'headers' ] = dict ( kw . get ( 'headers' , { } ) , ** { 'Content-Type' : content_type , } ) method = kw . pop ( 'method' ) url = kw . pop ( 'url' ) http_mock . register_uri ( method , url , ** kw )
Adds rules to global http mock .
10,172
def get_task_history ( last_task ) : if hasattr ( last_task , 'branch' ) and last_task . branch : return elif hasattr ( last_task , 'hide' ) and last_task . hide : return else : return get_func_info ( last_task )
Append last task to task history .
10,173
def get_func_info ( func ) : name = func . __name__ doc = func . __doc__ or "" try : nicename = func . description except AttributeError : if doc : nicename = doc . split ( '\n' ) [ 0 ] if len ( nicename ) > 80 : nicename = name else : nicename = name parameters = [ ] try : closure = func . func_closure except AttributeError : closure = func . __closure__ try : varnames = func . func_code . co_freevars except AttributeError : varnames = func . __code__ . co_freevars if closure : for index , arg in enumerate ( closure ) : if not callable ( arg . cell_contents ) : parameters . append ( ( varnames [ index ] , text_type ( arg . cell_contents ) ) ) return ( { "nicename" : nicename , "doc" : doc , "parameters" : parameters , "name" : name , "time" : str ( datetime . datetime . now ( ) ) , "hostname" : socket . gethostname ( ) , } )
Retrieve a function s information .
10,174
def get_workflow_info ( func_list ) : funcs = [ ] for item in func_list : if item is None : continue if isinstance ( item , list ) : funcs . append ( get_workflow_info ( item ) ) else : funcs . append ( get_func_info ( item ) ) return funcs
Return function info go through lists recursively .
10,175
def _copy_context_into_mutable ( context ) : def make_mutable ( val ) : if isinstance ( val , Mapping ) : return dict ( val ) else : return val if not isinstance ( context , ( str , Mapping ) ) : try : return [ make_mutable ( val ) for val in context ] except TypeError : pass return make_mutable ( context )
Copy a properly formatted context into a mutable data structure .
10,176
def make_dataset_models ( dataset , schemas_and_tables , metadata_dict = None , version : int = 1 , include_contacts = False ) : if metadata_dict is None : metadata_dict = { } validate_types ( schemas_and_tables ) dataset_dict = { } cell_segment_model = make_cell_segment_model ( dataset , version = version ) dataset_dict [ root_model_name . lower ( ) ] = cell_segment_model for schema_name , table_name in schemas_and_tables : model_key = table_name metadata = metadata_dict . get ( table_name , None ) dataset_dict [ model_key ] = make_annotation_model ( dataset , schema_name , table_name , table_metadata = metadata , version = version ) if include_contacts : contact_model = make_annotation_model_from_schema ( dataset , 'contact' , Contact , version = version ) dataset_dict [ 'contact' ] = contact_model return dataset_dict
make all the models for a dataset
10,177
def _key_name ( self ) : if self . _key is not None : return self . _key return self . __class__ . __name__ . lower ( )
Return the key referring to this object
10,178
def _path ( self ) : if self . _parent : return '{}.{}' . format ( self . _parent . _path ( ) , self . _key_name ( ) ) return self . _key_name ( )
Return the dotted path representation of this object
10,179
def _add_error ( self , * args , ** kwargs ) : if kwargs . get ( 'node' , None ) : error = ConfigError . create_from_yaml_node ( * args , ** kwargs ) elif self . _value_node : error = ConfigError . create_from_yaml_node ( node = self . _value_node , * args , ** kwargs ) else : error = ConfigError ( * args , ** kwargs ) self . _errors . append ( error )
Convenience function to add an error to this object with line numbers
10,180
def _get_descendants_errors ( self ) : descendants_errors = [ ] if hasattr ( self , '_children' ) : if isinstance ( self . _children , ( list , tuple ) ) : for c in self . _children : descendants_errors += c . _get_all_errors ( ) elif isinstance ( self . _children , dict ) : for c in self . _children . values ( ) : descendants_errors += c . _get_all_errors ( ) return descendants_errors
Recursively get errors from descendants
10,181
def _validate ( self ) : self . _errors = [ ] self . _validate_type ( ) if self . is_valid ( ) : self . _validate_value ( )
Run validation save errors to object in self . _errors
10,182
def _validate_type ( self ) : if not isinstance ( self . _value , self . _type ) : title = '{} has an invalid type' . format ( self . _key_name ( ) ) description = '{} must be a {}' . format ( self . _key_name ( ) , self . _type . __name__ ) self . _add_error ( title = title , description = description )
Validation to ensure value is the correct type
10,183
def haveSnapshots ( self ) : return os . path . islink ( self . latestLink ) and os . path . isdir ( self . latestLink )
Check if we have at least one snapshot .
10,184
def fromScratch ( self ) : assert ( not os . path . lexists ( self . latestLink ) or os . path . islink ( self . latestLink ) ) self . rmR ( self . latestLink ) return self
Start a fresh experiment from scratch . Returns self .
10,185
def snapshot ( self ) : nextSnapshotNum = self . nextSnapshotNum nextSnapshotPath = self . getFullPathToSnapshot ( nextSnapshotNum ) if os . path . lexists ( nextSnapshotPath ) : self . rmR ( nextSnapshotPath ) self . mkdirp ( os . path . join ( nextSnapshotPath , ".experiment" ) ) return self . dump ( nextSnapshotPath ) . __markLatest ( nextSnapshotNum )
Take a snapshot of the experiment . Returns self .
10,186
def rollback ( self , n = None ) : if n is None : if self . haveSnapshots : return self . fromSnapshot ( self . latestLink ) else : return self . fromScratch ( ) elif isinstance ( n , int ) : loadSnapshotPath = self . getFullPathToSnapshot ( n ) assert ( os . path . isdir ( loadSnapshotPath ) ) return self . __markLatest ( n ) . fromSnapshot ( loadSnapshotPath ) else : raise ValueError ( "n must be int, or None!" )
Roll back the experiment to the given snapshot number . Returns self .
10,187
def getFullPathToSnapshot ( self , n ) : return os . path . join ( self . snapDir , str ( n ) )
Get the full path to snapshot n .
10,188
def strategyLastK ( kls , n , k = 10 ) : return set ( map ( str , filter ( lambda x : x >= 0 , range ( n , n - k , - 1 ) ) ) )
Return the directory names to preserve under the LastK purge strategy .
10,189
def strategyKLogN ( kls , n , k = 4 ) : assert ( k > 1 ) s = set ( [ n ] ) i = 0 while k ** i <= n : s . update ( range ( n , n - k * k ** i , - k ** i ) ) i += 1 n -= n % k ** i return set ( map ( str , filter ( lambda x : x >= 0 , s ) ) )
Return the directory names to preserve under the KLogN purge strategy .
10,190
def listSnapshotDir ( kls , path ) : snapshotSet = set ( ) nonsnapshotSet = set ( ) try : entryList = os . listdir ( path ) for e in entryList : if kls . isFilenameInteger ( e ) : snapshotSet . add ( e ) else : nonsnapshotSet . add ( e ) except FileNotFoundError : pass finally : return snapshotSet , nonsnapshotSet
Return the set of snapshot directories and non - snapshot directories under the given path .
10,191
def rmR ( kls , path ) : if os . path . islink ( path ) or os . path . isfile ( path ) : os . unlink ( path ) elif os . path . isdir ( path ) : walker = os . walk ( path , topdown = False , followlinks = False ) for dirpath , dirnames , filenames in walker : for f in filenames : os . unlink ( os . path . join ( dirpath , f ) ) for d in dirnames : os . rmdir ( os . path . join ( dirpath , d ) ) os . rmdir ( path )
rm - R path . Deletes but does not recurse into symlinks . If the path does not exist silently return .
10,192
def atomicSymlink ( kls , target , name ) : linkAtomicName = name + ".ATOMIC" linkFinalName = name linkTarget = target if os . path . lexists ( linkAtomicName ) : kls . rmR ( linkAtomicName ) os . symlink ( linkTarget , linkAtomicName ) os . rename ( linkAtomicName , linkFinalName )
Same syntax as os . symlink except that the new link called name will first be created with the name and target name . ATOMIC - > target then be atomically renamed to name - > target thus overwriting any previous symlink there . If a filesystem entity called name . ATOMIC already exists it will be forcibly removed .
10,193
def _compensate_temperature ( self , adc_t ) : var_1 = ( ( adc_t / 16384.0 - self . _calibration_t [ 0 ] / 1024.0 ) * self . _calibration_t [ 1 ] ) var_2 = ( ( adc_t / 131072.0 - self . _calibration_t [ 0 ] / 8192.0 ) * ( adc_t / 131072.0 - self . _calibration_t [ 0 ] / 8192.0 ) * self . _calibration_t [ 2 ] ) self . _temp_fine = var_1 + var_2 if self . _delta_temp != 0. : temp = self . _temp_fine / 5120.0 + self . _delta_temp self . _temp_fine = temp * 5120.0 else : temp = self . _temp_fine / 5120.0 return temp
Compensate temperature .
10,194
def _compensate_pressure ( self , adc_p ) : var_1 = ( self . _temp_fine / 2.0 ) - 64000.0 var_2 = ( ( var_1 / 4.0 ) * ( var_1 / 4.0 ) ) / 2048 var_2 *= self . _calibration_p [ 5 ] var_2 += ( ( var_1 * self . _calibration_p [ 4 ] ) * 2.0 ) var_2 = ( var_2 / 4.0 ) + ( self . _calibration_p [ 3 ] * 65536.0 ) var_1 = ( ( ( self . _calibration_p [ 2 ] * ( ( ( var_1 / 4.0 ) * ( var_1 / 4.0 ) ) / 8192 ) ) / 8 ) + ( ( self . _calibration_p [ 1 ] * var_1 ) / 2.0 ) ) var_1 /= 262144 var_1 = ( ( 32768 + var_1 ) * self . _calibration_p [ 0 ] ) / 32768 if var_1 == 0 : return 0 pressure = ( ( 1048576 - adc_p ) - ( var_2 / 4096 ) ) * 3125 if pressure < 0x80000000 : pressure = ( pressure * 2.0 ) / var_1 else : pressure = ( pressure / var_1 ) * 2 var_1 = ( self . _calibration_p [ 8 ] * ( ( ( pressure / 8.0 ) * ( pressure / 8.0 ) ) / 8192.0 ) ) / 4096 var_2 = ( ( pressure / 4.0 ) * self . _calibration_p [ 7 ] ) / 8192.0 pressure += ( ( var_1 + var_2 + self . _calibration_p [ 6 ] ) / 16.0 ) return pressure / 100
Compensate pressure .
10,195
def _compensate_humidity ( self , adc_h ) : var_h = self . _temp_fine - 76800.0 if var_h == 0 : return 0 var_h = ( ( adc_h - ( self . _calibration_h [ 3 ] * 64.0 + self . _calibration_h [ 4 ] / 16384.0 * var_h ) ) * ( self . _calibration_h [ 1 ] / 65536.0 * ( 1.0 + self . _calibration_h [ 5 ] / 67108864.0 * var_h * ( 1.0 + self . _calibration_h [ 2 ] / 67108864.0 * var_h ) ) ) ) var_h *= 1.0 - self . _calibration_h [ 0 ] * var_h / 524288.0 if var_h > 100.0 : var_h = 100.0 elif var_h < 0.0 : var_h = 0.0 return var_h
Compensate humidity .
10,196
def _take_forced_measurement ( self ) : self . _bus . write_byte_data ( self . _i2c_add , 0xF4 , self . ctrl_meas_reg ) while self . _bus . read_byte_data ( self . _i2c_add , 0xF3 ) & 0x08 : sleep ( 0.005 )
Take a forced measurement .
10,197
def update ( self , first_reading = False ) : try : if first_reading or not self . _ok : self . _bus . write_byte_data ( self . _i2c_add , 0xF2 , self . ctrl_hum_reg ) self . _bus . write_byte_data ( self . _i2c_add , 0xF5 , self . config_reg ) self . _bus . write_byte_data ( self . _i2c_add , 0xF4 , self . ctrl_meas_reg ) self . _populate_calibration_data ( ) if self . mode == 2 : self . _take_forced_measurement ( ) data = [ ] for i in range ( 0xF7 , 0xF7 + 8 ) : data . append ( self . _bus . read_byte_data ( self . _i2c_add , i ) ) except OSError as exc : self . log_error ( "Bad update: %s" , exc ) self . _ok = False return pres_raw = ( data [ 0 ] << 12 ) | ( data [ 1 ] << 4 ) | ( data [ 2 ] >> 4 ) temp_raw = ( data [ 3 ] << 12 ) | ( data [ 4 ] << 4 ) | ( data [ 5 ] >> 4 ) hum_raw = ( data [ 6 ] << 8 ) | data [ 7 ] self . _ok = False temperature = self . _compensate_temperature ( temp_raw ) if ( temperature >= - 20 ) and ( temperature < 80 ) : self . _temperature = temperature self . _ok = True if self . _with_humidity : humidity = self . _compensate_humidity ( hum_raw ) if ( humidity >= 0 ) and ( humidity <= 100 ) : self . _humidity = humidity else : self . _ok = False if self . _with_pressure : pressure = self . _compensate_pressure ( pres_raw ) if pressure > 100 : self . _pressure = pressure else : self . _ok = False
Read raw data and update compensated variables .
10,198
def append ( self , element ) : assert element . locus == self . locus , ( "Element locus (%s) != Pileup locus (%s)" % ( element . locus , self . locus ) ) self . elements [ element ] = None
Append a PileupElement to this Pileup . If an identical PileupElement is already part of this Pileup do nothing .
10,199
def update ( self , other ) : assert self . locus == other . locus self . elements . update ( other . elements )
Add all pileup elements from other into self .