idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
6,600
def generate_xliff ( entry_dict ) : entries = "" for key , value in entry_dict . iteritems ( ) : entries += create_trans_unit ( key , value ) . strip ( ) + "\n" xliff_str = get_head_xliff ( ) . strip ( ) + "\n" + entries + get_tail_xliff ( ) . strip ( ) return xliff_str
Given a dictionary with keys = ids and values equals to strings generates and xliff file to send to unbabel .
6,601
def Get ( self , key ) : for alert in self . alerts : if alert . id == key : return ( alert ) elif alert . name == key : return ( alert )
Get alert by providing name ID or other unique key .
6,602
def Search ( self , key ) : results = [ ] for alert in self . alerts : if alert . id . lower ( ) . find ( key . lower ( ) ) != - 1 : results . append ( alert ) elif alert . name . lower ( ) . find ( key . lower ( ) ) != - 1 : results . append ( alert ) return ( results )
Search alert list by providing partial name ID or other key .
6,603
def _Login ( ) : if not clc . v2 . V2_API_USERNAME or not clc . v2 . V2_API_PASSWD : clc . v1 . output . Status ( 'ERROR' , 3 , 'V2 API username and password not provided' ) raise ( clc . APIV2NotEnabled ) session = clc . _REQUESTS_SESSION session . headers [ 'content-type' ] = "application/json" r = session . request ( "POST" , "%s/v2/%s" % ( clc . defaults . ENDPOINT_URL_V2 , "authentication/login" ) , json = { "username" : clc . v2 . V2_API_USERNAME , "password" : clc . v2 . V2_API_PASSWD } , verify = API . _ResourcePath ( 'clc/cacert.pem' ) ) if r . status_code == 200 : clc . _LOGIN_TOKEN_V2 = r . json ( ) [ 'bearerToken' ] clc . ALIAS = r . json ( ) [ 'accountAlias' ] clc . LOCATION = r . json ( ) [ 'locationAlias' ] elif r . status_code == 400 : raise ( Exception ( "Invalid V2 API login. %s" % ( r . json ( ) [ 'message' ] ) ) ) else : raise ( Exception ( "Error logging into V2 API. Response code %s. message %s" % ( r . status_code , r . json ( ) [ 'message' ] ) ) )
Login to retrieve bearer token and set default accoutn and location aliases .
6,604
def Call ( method , url , payload = None , session = None , debug = False ) : if session is not None : token = session [ 'token' ] http_session = session [ 'http_session' ] else : if not clc . _LOGIN_TOKEN_V2 : API . _Login ( ) token = clc . _LOGIN_TOKEN_V2 http_session = clc . _REQUESTS_SESSION if payload is None : payload = { } if url [ 0 ] == '/' : fq_url = "%s%s" % ( clc . defaults . ENDPOINT_URL_V2 , url ) else : fq_url = "%s/v2/%s" % ( clc . defaults . ENDPOINT_URL_V2 , url ) http_session . headers . update ( { 'Authorization' : "Bearer %s" % token } ) if isinstance ( payload , basestring ) : http_session . headers [ 'content-type' ] = "Application/json" else : http_session . headers [ 'content-type' ] = "application/x-www-form-urlencoded" if method == "GET" : r = http_session . request ( method , fq_url , params = payload , verify = API . _ResourcePath ( 'clc/cacert.pem' ) ) else : r = http_session . request ( method , fq_url , data = payload , verify = API . _ResourcePath ( 'clc/cacert.pem' ) ) if debug : API . _DebugRequest ( request = requests . Request ( method , fq_url , data = payload , headers = http_session . headers ) . prepare ( ) , response = r ) if r . status_code >= 200 and r . status_code < 300 : try : return ( r . json ( ) ) except : return ( { } ) else : try : e = clc . APIFailedResponse ( "Response code %s. %s %s %s" % ( r . status_code , r . json ( ) [ 'message' ] , method , fq_url ) ) e . response_status_code = r . status_code e . response_json = r . json ( ) e . response_text = r . text raise ( e ) except clc . APIFailedResponse : raise except : e = clc . APIFailedResponse ( "Response code %s. %s. %s %s" % ( r . status_code , r . text , method , fq_url ) ) e . response_status_code = r . status_code e . response_json = { } e . response_text = r . text raise ( e )
Execute v2 API call .
6,605
def get_external_references ( self ) : node = self . node . find ( 'externalReferences' ) if node is not None : ext_refs = CexternalReferences ( node ) for ext_ref in ext_refs : yield ext_ref
Returns the external references of the element
6,606
def add_external_reference ( self , ext_ref ) : node_ext_refs = self . node . find ( 'externalReferences' ) ext_refs = None if node_ext_refs == None : ext_refs = CexternalReferences ( ) self . node . append ( ext_refs . get_node ( ) ) else : ext_refs = CexternalReferences ( node_ext_refs ) ext_refs . add_external_reference ( ext_ref )
Adds an external reference to the role
6,607
def remove_external_references ( self ) : for ex_ref_node in self . node . findall ( 'externalReferences' ) : self . node . remove ( ex_ref_node )
Removes any external reference from the role
6,608
def remove_external_references_from_roles ( self ) : for node_role in self . node . findall ( 'role' ) : role = Crole ( node_role ) role . remove_external_references ( )
Removes any external references on any of the roles from the predicate
6,609
def add_roles ( self , list_of_roles ) : for role in list_of_roles : role_node = role . get_node ( ) self . node . append ( role_node )
Adds a list of roles to the predicate
6,610
def add_role ( self , role_obj ) : role_node = role_obj . get_node ( ) self . node . append ( role_node )
Add a role to the predicate
6,611
def add_external_reference_to_role ( self , role_id , ext_ref ) : node_role = self . map_roleid_node [ role_id ] obj_role = Crole ( node_role ) obj_role . add_external_reference ( ext_ref )
Adds an external reference to a role identifier
6,612
def add_predicate ( self , pred_obj ) : pred_id = pred_obj . get_id ( ) if not pred_id in self . idx : pred_node = pred_obj . get_node ( ) self . node . append ( pred_node ) self . idx [ pred_id ] = pred_node else : print ( 'Error: trying to add new element, but id has already been given' )
Adds a predicate object to the layer
6,613
def display_dp_matrix_attr ( dp_matrix , attr_name ) : print ( ) print ( "---------- {:s} ----------" . format ( attr_name ) ) for dp_list in dp_matrix : print ( [ getattr ( dp , attr_name ) for dp in dp_list ] )
show a value assocciated with an attribute for each DataProperty instance in the dp_matrix
6,614
def _query ( self , sql , * args ) : if not self . _con : logger . debug ( ( "Open MBTiles file '%s'" ) % self . filename ) self . _con = sqlite3 . connect ( self . filename ) self . _cur = self . _con . cursor ( ) sql = ' ' . join ( sql . split ( ) ) logger . debug ( ( "Execute query '%s' %s" ) % ( sql , args ) ) try : self . _cur . execute ( sql , * args ) except ( sqlite3 . OperationalError , sqlite3 . DatabaseError ) as e : raise InvalidFormatError ( ( "%s while reading %s" ) % ( e , self . filename ) ) return self . _cur
Executes the specified sql query and returns the cursor
6,615
def set_comment ( self , c ) : c = ' ' + c . replace ( '-' , '' ) . strip ( ) + ' ' self . node . insert ( 0 , etree . Comment ( c ) )
Sets the comment for the element
6,616
def set_id ( self , my_id ) : if self . type == 'NAF' : self . node . set ( 'id' , my_id ) elif self . type == 'KAF' : self . node . set ( 'oid' , my_id )
Sets the opinion identifier
6,617
def to_kaf ( self ) : if self . type == 'NAF' : for node in self . __get_opinion_nodes ( ) : node . set ( 'oid' , node . get ( 'id' ) ) del node . attrib [ 'id' ]
Converts the opinion layer to KAF
6,618
def to_naf ( self ) : if self . type == 'KAF' : for node in self . __get_opinion_nodes ( ) : node . set ( 'id' , node . get ( 'oid' ) ) del node . attrib [ 'oid' ]
Converts the opinion layer to NAF
6,619
def remove_this_opinion ( self , opinion_id ) : for opi in self . get_opinions ( ) : if opi . get_id ( ) == opinion_id : self . node . remove ( opi . get_node ( ) ) break
Removes the opinion for the given opinion identifier
6,620
def GetAccountDetails ( alias = None ) : if not alias : alias = Account . GetAlias ( ) r = clc . v1 . API . Call ( 'post' , 'Account/GetAccountDetails' , { 'AccountAlias' : alias } ) if r [ 'Success' ] != True : if clc . args : clc . v1 . output . Status ( 'ERROR' , 3 , 'Error calling %s. Status code %s. %s' % ( 'Account/GetAccountDetails' , r [ 'StatusCode' ] , r [ 'Message' ] ) ) raise Exception ( 'Error calling %s. Status code %s. %s' % ( 'Account/GetAccountDetails' , r [ 'StatusCode' ] , r [ 'Message' ] ) ) elif int ( r [ 'StatusCode' ] ) == 0 : r [ 'AccountDetails' ] [ 'Status' ] = Account . account_status_itos [ r [ 'AccountDetails' ] [ 'Status' ] ] return ( r [ 'AccountDetails' ] )
Return account details dict associated with the provided alias .
6,621
def GetAccounts ( alias = None ) : if alias is not None : payload = { 'AccountAlias' : alias } else : payload = { } r = clc . v1 . API . Call ( 'post' , 'Account/GetAccounts' , payload ) if int ( r [ 'StatusCode' ] ) == 0 : if not clc . ALIAS : clc . ALIAS = r [ 'Accounts' ] [ 0 ] [ 'AccountAlias' ] if not clc . LOCATION : clc . LOCATION = r [ 'Accounts' ] [ 0 ] [ 'Location' ] return ( r [ 'Accounts' ] )
Return account inventory dict containing all subaccounts for the given alias . If None search from default alias .
6,622
def assure_cache ( project_path = None ) : project_path = path ( project_path , ISDIR ) cache_path = os . path . join ( project_path , CACHE_NAME ) if not os . path . isdir ( cache_path ) : os . mkdir ( cache_path )
Assure that a project directory has a cache folder . If not it will create it .
6,623
def purge_project ( ) : print ( 'Current Directory: {}' . format ( os . getcwd ( ) ) ) directories = os . listdir ( os . getcwd ( ) ) if CACHE_NAME in directories : response = get_input ( "Would you like to delete the cache and" "everything in it? [y/n]: " , ( 'y' , 'n' ) ) if response == 'y' : print ( "Listing local '__cythercache__':" ) cache_dir = os . path . join ( os . getcwd ( ) , "__cythercache__" ) to_delete = [ ] contents = os . listdir ( cache_dir ) if contents : for filename in contents : print ( '\t' + filename ) filepath = os . path . join ( cache_dir , filename ) to_delete . append ( filepath ) else : print ( "\tNothing was found in the cache" ) check_response = get_input ( "Delete all these files? (^)" "[y/n]: " , ( 'y' , 'n' ) ) if check_response == 'y' : for filepath in to_delete : os . remove ( filepath ) os . rmdir ( cache_dir ) else : print ( "Skipping the deletion... all files are fine!" ) else : print ( "Skipping deletion of the cache" ) else : print ( "Couldn't find a cache file ('{}') in this " "directory" . format ( CACHE_NAME ) )
Purge a directory of anything cyther related
6,624
def map ( func , items , pool_size = 10 ) : with OrderedPool ( func , pool_size ) as pool : for count , item in enumerate ( items ) : pool . put ( item ) for i in xrange ( count + 1 ) : yield pool . get ( )
a parallelized work - alike to the built - in map function
6,625
def start ( self ) : "start the pool's workers" for i in xrange ( self . size ) : scheduler . schedule ( self . _runner ) self . _closing = False
start the pool s workers
6,626
def put ( self , * args , ** kwargs ) : self . inq . put ( ( self . _putcount , ( args , kwargs ) ) ) self . _putcount += 1
place a new item into the pool to be handled by the workers
6,627
def get_entity ( self , entity_id ) : entity_node = self . map_entity_id_to_node . get ( entity_id ) if entity_node is not None : return Centity ( node = entity_node , type = self . type ) else : for entity_node in self . __get_entity_nodes ( ) : if self . type == 'NAF' : label_id = 'id' elif self . type == 'KAF' : label_id = 'eid' if entity_node . get ( label_id ) == entity_id : return Centity ( node = entity_node , type = self . type ) return None
Returns the entity object for the given entity identifier
6,628
def add_external_reference_to_entity ( self , entity_id , ext_ref ) : node_entity = self . map_entity_id_to_node . get ( entity_id ) if node_entity is not None : entity = Centity ( node_entity , self . type ) entity . add_external_reference ( ext_ref ) else : print >> sys . stderr , 'Trying to add a reference to the entity' , entity_id , 'but can not be found in this file'
Adds an external reference to a entity specified by the entity identifier
6,629
def to_kaf ( self ) : if self . type == 'NAF' : for node in self . __get_entity_nodes ( ) : node . set ( 'eid' , node . get ( 'id' ) ) del node . attrib [ 'id' ]
Converts the layer from KAF to NAF
6,630
def to_naf ( self ) : if self . type == 'KAF' : for node in self . __get_entity_nodes ( ) : node . set ( 'id' , node . get ( 'eid' ) ) del node . attrib [ 'eid' ]
Converts the layer from NAF to KAF
6,631
def updateImage ( self , val ) : if ( self . firstRun == True and self . inputSigma >= 0 ) : sigma = np . round ( self . inputSigma , 2 ) elif self . interactivity : sigma = np . round ( self . ssigma . val , 2 ) else : sigma = np . round ( self . inputSigma , 2 ) if self . interactivity : self . smin . val = ( np . round ( self . smin . val , 2 ) ) self . smin . valtext . set_text ( '{}' . format ( self . smin . val ) ) self . smax . val = ( np . round ( self . smax . val , 2 ) ) self . smax . valtext . set_text ( '{}' . format ( self . smax . val ) ) self . threshold = self . smin . val self . threshold_upper = self . smax . val closeNum = int ( np . round ( self . sclose . val , 0 ) ) openNum = int ( np . round ( self . sopen . val , 0 ) ) self . sclose . valtext . set_text ( '{}' . format ( closeNum ) ) self . sopen . valtext . set_text ( '{}' . format ( openNum ) ) else : closeNum = self . ICBinaryClosingIterations openNum = self . ICBinaryOpeningIterations self . imgFiltering , self . threshold = make_image_processing ( data = self . data , voxelsize_mm = self . voxelsize_mm , seeds = self . seeds , sigma_mm = sigma , min_threshold = self . threshold , max_threshold = self . threshold_upper , closeNum = closeNum , openNum = openNum , min_threshold_auto_method = self . auto_method , fill_holes = self . fillHoles , get_priority_objects = self . get_priority_objects , nObj = self . nObj ) if ( self . interactivity == True ) : self . drawVisualization ( ) self . firstRun = False garbage . collect ( ) self . debugInfo ( )
Hlavni update metoda . Cinny kod pro gaussovske filtrovani prahovani binarni uzavreni a otevreni a vraceni nejvetsich nebo oznacenych objektu .
6,632
def GetAll ( alias = None , location = None , session = None ) : if not alias : alias = clc . v2 . Account . GetAlias ( session = session ) policies = [ ] policy_resp = clc . v2 . API . Call ( 'GET' , 'antiAffinityPolicies/%s' % alias , { } , session = session ) for k in policy_resp : r_val = policy_resp [ k ] for r in r_val : if r . get ( 'location' ) : if location and r [ 'location' ] . lower ( ) != location . lower ( ) : continue servers = [ obj [ 'id' ] for obj in r [ 'links' ] if obj [ 'rel' ] == "server" ] policies . append ( AntiAffinity ( id = r [ 'id' ] , name = r [ 'name' ] , location = r [ 'location' ] , servers = servers , session = session ) ) return ( policies )
Gets a list of anti - affinity policies within a given account .
6,633
def GetLocation ( location = None , alias = None , session = None ) : if not location : location = clc . v2 . Account . GetLocation ( session = session ) return ( AntiAffinity . GetAll ( alias = alias , location = location , session = session ) )
Returns a list of anti - affinity policies within a specific location .
6,634
def Create ( name , alias = None , location = None , session = None ) : if not alias : alias = clc . v2 . Account . GetAlias ( session = session ) if not location : location = clc . v2 . Account . GetLocation ( session = session ) r = clc . v2 . API . Call ( 'POST' , 'antiAffinityPolicies/%s' % alias , json . dumps ( { 'name' : name , 'location' : location } ) , session = session ) return ( AntiAffinity ( id = r [ 'id' ] , name = r [ 'name' ] , location = r [ 'location' ] , servers = [ ] , session = session ) )
Creates a new anti - affinity policy within a given account .
6,635
def Update ( self , name ) : r = clc . v2 . API . Call ( 'PUT' , 'antiAffinityPolicies/%s/%s' % ( self . alias , self . id ) , { 'name' : name } , session = self . session ) self . name = name
Change the policy s name .
6,636
def _node ( handler , single = None , multi = None ) : return _AbstractSyntaxTreeNode ( handler = handler , single = ( single if single else [ ] ) , multi = ( multi if multi else [ ] ) )
Return an _AbstractSyntaxTreeNode with some elements defaulted .
6,637
def _recurse ( node , * args , ** kwargs ) : node_name = node . __class__ . __name__ try : info_for_node = _NODE_INFO_TABLE [ node_name ] except KeyError : return action = kwargs [ info_for_node . handler ] depth = kwargs [ "depth" ] if action is not None : action ( node_name , node , depth ) recurse_kwargs = kwargs kwargs [ "depth" ] = depth + 1 for single in info_for_node . single : _recurse ( getattr ( node , single ) , * args , ** recurse_kwargs ) for multi in info_for_node . multi : for statement in getattr ( node , multi ) : _recurse ( statement , * args , ** recurse_kwargs )
Recursive print worker - recurses the AST and prints each node .
6,638
def recurse ( node , * args , ** kwargs ) : fwd = dict ( ) for node_info in _NODE_INFO_TABLE . values ( ) : fwd [ node_info . handler ] = kwargs . get ( node_info . handler , None ) fwd [ "depth" ] = 0 _recurse ( node , * args , ** fwd )
Entry point for AST recursion .
6,639
def get_filename_filled_with_checked_labels ( self , labels = None ) : if labels is None : labels = self . slab_wg . action_check_slab_ui ( ) string_labels = imma . get_nlabels ( slab = self . slab_wg . slab , labels = labels , return_mode = "str" ) filename = self . vtk_file . format ( "-" . join ( string_labels ) ) return filename
Fill used labels into filename
6,640
def patched ( module_name ) : if module_name in _patchers : return _patched_copy ( module_name , _patchers [ module_name ] ) old_module = sys . modules . pop ( module_name , None ) saved = [ ( module_name , old_module ) ] for name , patch in _patchers . iteritems ( ) : new_mod = _patched_copy ( name , patch ) saved . append ( ( name , sys . modules . pop ( name ) ) ) sys . modules [ name ] = new_mod try : result = __import__ ( module_name , { } , { } , module_name . rsplit ( "." , 1 ) [ 0 ] ) finally : for name , old_mod in saved : if old_mod is None : sys . modules . pop ( name , None ) else : sys . modules [ name ] = old_mod return result
import and return a named module with patches applied locally only
6,641
def patched_context ( * module_names , ** kwargs ) : local = kwargs . pop ( 'local' , False ) if kwargs : raise TypeError ( "patched_context() got an unexpected keyword " + "argument %r" % kwargs . keys ( ) [ 0 ] ) patch ( * module_names ) if local : @ scheduler . local_incoming_hook @ scheduler . local_outgoing_hook def hook ( direction , target ) : { 1 : patch , 2 : unpatch } [ direction ] ( * module_names ) yield unpatch ( * module_names ) if local : scheduler . remove_local_incoming_hook ( hook ) scheduler . remove_local_outgoing_hook ( hook )
apply emulation patches only for a specific context
6,642
def patch ( * module_names ) : if not module_names : module_names = _patchers . keys ( ) log . info ( "monkey-patching in-place (%d modules)" % len ( module_names ) ) for module_name in module_names : if module_name not in _patchers : raise ValueError ( "'%s' is not greenhouse-patchable" % module_name ) for module_name in module_names : if module_name in sys . modules : module = sys . modules [ module_name ] else : module = __import__ ( module_name , { } , { } , module_name . rsplit ( "." , 1 ) [ 0 ] ) for attr , patch in _patchers [ module_name ] . items ( ) : setattr ( module , attr , patch )
apply monkey - patches to stdlib modules in - place
6,643
def from_json ( data ) : parsed_data = json . loads ( data ) trigger = TriggerInfo ( parsed_data [ 'trigger' ] [ 'class' ] , parsed_data [ 'trigger' ] [ 'kind' ] , ) content_type = parsed_data [ 'content_type' ] return Event ( body = Event . decode_body ( parsed_data [ 'body' ] , content_type ) , content_type = content_type , trigger = trigger , fields = parsed_data . get ( 'fields' ) , headers = parsed_data . get ( 'headers' ) , _id = parsed_data [ 'id' ] , method = parsed_data [ 'method' ] , path = parsed_data [ 'path' ] , size = parsed_data [ 'size' ] , timestamp = datetime . datetime . utcfromtimestamp ( parsed_data [ 'timestamp' ] ) , url = parsed_data [ 'url' ] , _type = parsed_data [ 'type' ] , type_version = parsed_data [ 'type_version' ] , version = parsed_data [ 'version' ] )
Decode event encoded as JSON by processor
6,644
def decode_body ( body , content_type ) : if isinstance ( body , dict ) : return body else : try : decoded_body = base64 . b64decode ( body ) except : return body if content_type == 'application/json' : try : return json . loads ( decoded_body ) except : pass return decoded_body
Decode event body
6,645
def furtherArgsProcessing ( args ) : if isinstance ( args , str ) : unprocessed = args . strip ( ) . split ( ' ' ) if unprocessed [ 0 ] == 'cyther' : del unprocessed [ 0 ] args = parser . parse_args ( unprocessed ) . __dict__ elif isinstance ( args , argparse . Namespace ) : args = args . __dict__ elif isinstance ( args , dict ) : pass else : raise CytherError ( "Args must be a instance of str or argparse.Namespace, not '{}'" . format ( str ( type ( args ) ) ) ) if args [ 'watch' ] : args [ 'timestamp' ] = True args [ 'watch_stats' ] = { 'counter' : 0 , 'errors' : 0 , 'compiles' : 0 , 'polls' : 0 } args [ 'print_args' ] = True return args
Converts args and deals with incongruities that argparse couldn t handle
6,646
def processFiles ( args ) : to_process = [ ] for filename in args [ 'filenames' ] : file = dict ( ) if args [ 'include' ] : file [ 'include' ] = INCLUDE_STRING + '' . join ( [ '-I' + item for item in args [ 'include' ] ] ) else : file [ 'include' ] = INCLUDE_STRING file [ 'file_path' ] = getPath ( filename ) file [ 'file_base_name' ] = os . path . splitext ( os . path . basename ( file [ 'file_path' ] ) ) [ 0 ] file [ 'no_extension' ] , file [ 'extension' ] = os . path . splitext ( file [ 'file_path' ] ) if file [ 'extension' ] not in CYTHONIZABLE_FILE_EXTS : raise CytherError ( "The file '{}' is not a designated cython file" . format ( file [ 'file_path' ] ) ) base_path = os . path . dirname ( file [ 'file_path' ] ) local_build = args [ 'local' ] if not local_build : cache_name = os . path . join ( base_path , '__cythercache__' ) os . makedirs ( cache_name , exist_ok = True ) file [ 'c_name' ] = os . path . join ( cache_name , file [ 'file_base_name' ] ) + '.c' else : file [ 'c_name' ] = file [ 'no_extension' ] + '.c' file [ 'object_file_name' ] = os . path . splitext ( file [ 'c_name' ] ) [ 0 ] + '.o' output_name = args [ 'output_name' ] if args [ 'watch' ] : file [ 'output_name' ] = file [ 'no_extension' ] + DEFAULT_OUTPUT_EXTENSION elif output_name : if os . path . exists ( output_name ) and os . path . isfile ( output_name ) : file [ 'output_name' ] = output_name else : dirname = os . path . dirname ( output_name ) if not dirname : dirname = os . getcwd ( ) if os . path . exists ( dirname ) : file [ 'output_name' ] = output_name else : raise CytherError ( 'The directory specified to write' 'the output file in does not exist' ) else : file [ 'output_name' ] = file [ 'no_extension' ] + DEFAULT_OUTPUT_EXTENSION file [ 'stamp_if_error' ] = 0 to_process . append ( file ) return to_process
Generates and error checks each file s information before the compilation actually starts
6,647
def makeCommands ( file ) : commands = [ [ 'cython' , '-a' , '-p' , '-o' , file [ 'c_name' ] , file [ 'file_path' ] ] , [ 'gcc' , '-DNDEBUG' , '-g' , '-fwrapv' , '-O3' , '-Wall' , '-Wextra' , '-pthread' , '-fPIC' , '-c' , file [ 'include' ] , '-o' , file [ 'object_file_name' ] , file [ 'c_name' ] ] , [ 'gcc' , '-g' , '-Wall' , '-Wextra' , '-pthread' , '-shared' , RUNTIME_STRING , '-o' , file [ 'output_name' ] , file [ 'object_file_name' ] , L_OPTION ] ] return commands
Given a high level preset it will construct the basic args to pass over . ninja beast minimal swift
6,648
def collection ( et_model , variable , collections , start_date , end_date , t_interval , geometry , ** kwargs ) : if et_model . lower ( ) == 'ndvi' : try : import openet . ndvi as model except ModuleNotFoundError : print ( '\nThe ET model {} could not be imported' . format ( et_model ) + '\nPlease ensure that the model has been installed' ) return False except Exception as e : print ( 'Unhandled Exception: {}' . format ( e ) ) raise elif et_model . lower ( ) == 'ssebop' : try : import openet . ssebop as model except ModuleNotFoundError : print ( '\nThe ET model {} could not be imported' . format ( et_model ) + '\nPlease ensure that the model has been installed' ) return False except Exception as e : print ( 'Unhandled Exception: {}' . format ( e ) ) raise else : raise ValueError ( 'unsupported et_model type' ) variable_coll = model . collection ( variable , collections , start_date , end_date , t_interval , geometry , ** kwargs ) return variable_coll
Generic OpenET Collection
6,649
def get_terminals_as_list ( self ) : terminalList = [ ] for t_node in self . __get_t_nodes ( ) : terminalList . append ( Cterminal ( t_node ) ) return terminalList
Iterator that returns all the terminal objects
6,650
def get_edges_as_list ( self ) : my_edges = [ ] for edge_node in self . __get_edge_nodes ( ) : my_edges . append ( Cedge ( edge_node ) ) return my_edges
Iterator that returns all the edge objects
6,651
def select_labels ( self , labels = None ) : self . _resize_if_required ( ) segmentation = self . _select_labels ( self . resized_segmentation , labels ) self . resized_binar_segmentation = segmentation
Prepare binar segmentation based on input segmentation and labels .
6,652
def _select_labels ( self , segmentation , labels = None ) : logger . debug ( "select_labels() started with labels={}" . format ( labels ) ) if self . slab is not None and labels is not None : segmentation_out = select_labels ( segmentation , labels , slab = self . slab ) else : logger . warning ( "Nothing found for labels " + str ( labels ) ) un = np . unique ( segmentation ) if len ( un ) < 2 : logger . error ( "Just one label found in input segmenation" ) segmentation_out = ( segmentation > un [ 0 ] ) . astype ( segmentation . dtype ) return segmentation_out
Get selection of labels from input segmentation
6,653
def Get ( self , key ) : for template in self . templates : if template . id == key : return ( template )
Get template by providing name ID or other unique key .
6,654
def Search ( self , key ) : results = [ ] for template in self . templates : if template . id . lower ( ) . find ( key . lower ( ) ) != - 1 : results . append ( template ) elif template . name . lower ( ) . find ( key . lower ( ) ) != - 1 : results . append ( template ) return ( results )
Search template list by providing partial name ID or other key .
6,655
def SecondsToZuluTS ( secs = None ) : if not secs : secs = int ( time . time ( ) ) return ( datetime . utcfromtimestamp ( secs ) . strftime ( "%Y-%m-%dT%H:%M:%SZ" ) )
Returns Zulu TS from unix time seconds .
6,656
def main ( ) : parser = argparse . ArgumentParser ( description = 'DistanceClassifier for classification based on distance measure in feature space.' , add_help = False ) parser . add_argument ( 'INPUT_FILE' , type = str , help = 'Data file to perform DistanceClassifier on; ensure that the class label column is labeled as "class".' ) parser . add_argument ( '-h' , '--help' , action = 'help' , help = 'Show this help message and exit.' ) parser . add_argument ( '-is' , action = 'store' , dest = 'INPUT_SEPARATOR' , default = '\t' , type = str , help = 'Character used to separate columns in the input file.' ) parser . add_argument ( '-d' , action = 'store' , dest = 'D' , default = 'mahalanobis' , choices = [ 'mahalanobis' , 'euclidean' ] , type = str , help = 'Distance metric to use.' ) parser . add_argument ( '-v' , action = 'store' , dest = 'VERBOSITY' , default = 1 , choices = [ 0 , 1 , 2 ] , type = int , help = 'How much information DistanceClassifier communicates while it is running: 0 = none, 1 = minimal, 2 = all.' ) parser . add_argument ( '-s' , action = 'store' , dest = 'RANDOM_STATE' , default = 0 , type = int , help = 'Random state for train/test split.' ) parser . add_argument ( '--version' , action = 'version' , version = 'DistanceClassifier {version}' . format ( version = __version__ ) , help = 'Show DistanceClassifier\'s version number and exit.' ) args = parser . parse_args ( ) if args . VERBOSITY >= 2 : print ( '\nDistanceClassifier settings:' ) for arg in sorted ( args . __dict__ ) : print ( '{}\t=\t{}' . format ( arg , args . __dict__ [ arg ] ) ) print ( '' ) input_data = pd . read_csv ( args . INPUT_FILE , sep = args . INPUT_SEPARATOR ) if 'Class' in input_data . columns . values : input_data . rename ( columns = { 'Label' : 'label' } , inplace = True ) RANDOM_STATE = args . RANDOM_STATE if args . RANDOM_STATE > 0 else None dc = DistanceClassifier ( d = args . D ) dc . fit ( input_data . drop ( 'label' , axis = 1 ) . values , input_data [ 'label' ] . values ) print ( dc . score ( input_data . drop ( 'label' , axis = 1 ) . values , input_data [ 'label' ] . values ) )
Main function that is called when DistanceClassifier is run on the command line
6,657
def fit ( self , features , classes ) : classes = self . le . fit_transform ( classes ) X = [ ] self . mu = [ ] self . Z = [ ] for i in np . unique ( classes ) : X . append ( features [ classes == i ] ) self . mu . append ( np . mean ( X [ i ] , axis = 0 ) ) if self . d == 'mahalanobis' : self . Z . append ( np . cov ( X [ i ] . transpose ( ) ) ) return self
Constructs the DistanceClassifier from the provided training data
6,658
def predict ( self , features ) : distances = [ self . _distance ( x ) for x in features ] class_predict = [ np . argmin ( d ) for d in distances ] return self . le . inverse_transform ( class_predict )
Predict class outputs for an unlabelled feature set
6,659
def _distance ( self , x ) : distance = np . empty ( [ len ( self . mu ) ] ) for i in np . arange ( len ( self . mu ) ) : if self . d == 'mahalanobis' and self . is_invertible ( self . Z [ i ] ) : distance [ i ] = ( x - self . mu [ i ] ) . dot ( np . linalg . inv ( self . Z [ i ] ) ) . dot ( ( x - self . mu [ i ] ) . transpose ( ) ) else : distance [ i ] = ( x - self . mu [ i ] ) . dot ( ( x - self . mu [ i ] ) . transpose ( ) ) return distance
returns distance measures for features
6,660
def score ( self , features , classes , scoring_function = accuracy_score , ** scoring_function_kwargs ) : if not self . mu : raise ValueError ( 'The DistanceClassifier model must be fit before score() can be called' ) return scoring_function ( classes , self . predict ( features ) , ** scoring_function_kwargs )
Estimates the accuracy of the predictions from the constructed feature
6,661
def is_invertible ( self , X ) : if len ( X . shape ) == 2 : return X . shape [ 0 ] == X . shape [ 1 ] and np . linalg . matrix_rank ( X ) == X . shape [ 0 ] else : return False
checks if Z is invertible
6,662
def get_span_ids ( self ) : node_span = self . node . find ( 'span' ) if node_span is not None : mySpan = Cspan ( node_span ) span_ids = mySpan . get_span_ids ( ) return span_ids else : return [ ]
Returns the span object of the term
6,663
def set_span_from_ids ( self , span_list ) : this_span = Cspan ( ) this_span . create_from_ids ( span_list ) self . node . append ( this_span . get_node ( ) )
Sets the span for the term from list of ids
6,664
def get_term ( self , term_id ) : if term_id in self . idx : return Cterm ( self . idx [ term_id ] , self . type ) else : return None
Returns the term object for the supplied identifier
6,665
def add_term ( self , term_obj ) : if term_obj . get_id ( ) in self . idx : raise ValueError ( "Term with id {} already exists!" . format ( term_obj . get_id ( ) ) ) self . node . append ( term_obj . get_node ( ) ) self . idx [ term_obj . get_id ( ) ] = term_obj
Adds a term object to the layer
6,666
def add_external_reference ( self , term_id , external_ref ) : if term_id in self . idx : term_obj = Cterm ( self . idx [ term_id ] , self . type ) term_obj . add_external_reference ( external_ref ) else : print ( '{term_id} not in self.idx' . format ( ** locals ( ) ) )
Adds an external reference for the given term
6,667
def init_slab ( self , slab = None , segmentation = None , voxelsize_mm = None , show_ok_button = False ) : self . segmentation = segmentation self . voxelsize_mm = voxelsize_mm from . import show_segmentation self . slab = show_segmentation . create_slab_from_segmentation ( self . segmentation , slab = slab ) if show_ok_button : ok_button = QPushButton ( "Ok" ) ok_button . clicked . connect ( self . _action_ok_button ) self . superMainScrollLayout . addWidget ( ok_button )
Create widget with segmentation labels information used to select labels .
6,668
def GetServers ( location , group = None , alias = None , name_groups = False ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) payload = { 'AccountAlias' : alias } if group : payload [ 'HardwareGroupUUID' ] = clc . v1 . Group . GetGroupUUID ( group , alias , location ) else : payload [ 'Location' ] = location try : r = clc . v1 . API . Call ( 'post' , 'Server/GetAllServers' , payload ) if name_groups : r [ 'Servers' ] = clc . v1 . Group . NameGroups ( r [ 'Servers' ] , 'HardwareGroupUUID' ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r [ 'Servers' ] ) except Exception as e : if str ( e ) == "Hardware does not exist for location" : return ( [ ] ) else : raise
Gets a deep list of all Servers for a given Hardware Group and its sub groups or all Servers for a given location .
6,669
def GetAllServers ( alias = None , name_groups = False ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) servers = [ ] clc . v1 . Account . GetLocations ( ) for location in clc . LOCATIONS : try : r = clc . v1 . API . Call ( 'post' , 'Server/GetAllServers' , { 'AccountAlias' : alias , 'Location' : location } , hide_errors = [ 5 , ] ) if name_groups : r [ 'Servers' ] = clc . v1 . Group . NameGroups ( r [ 'Servers' ] , 'HardwareGroupUUID' ) if int ( r [ 'StatusCode' ] ) == 0 : servers += r [ 'Servers' ] except : pass return ( servers )
Gets a deep list of all Servers in all groups and datacenters .
6,670
def GetTemplateID ( alias , location , name ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) r = Server . GetTemplates ( alias , location ) for row in r : if row [ 'Name' ] . lower ( ) == name . lower ( ) : return ( row [ 'OperatingSystem' ] ) else : if clc . args : clc . v1 . output . Status ( "ERROR" , 3 , "Template %s not found in account %s datacenter %s" % ( name , alias , location ) ) raise Exception ( "Template not found" )
Given a template name return the unique OperatingSystem ID .
6,671
def ConvertToTemplate ( server , template , password = None , alias = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if password is None : password = clc . v1 . Server . GetCredentials ( [ server , ] , alias ) [ 0 ] [ 'Password' ] r = clc . v1 . API . Call ( 'post' , 'Server/ConvertServerToTemplate' , { 'AccountAlias' : alias , 'Name' : server , 'Password' : password , 'TemplateAlias' : template } ) return ( r )
Converts an existing server into a template .
6,672
def RestoreServer ( server , group , alias , location ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) if re . match ( "([a-zA-Z0-9]){32}" , group . replace ( "-" , "" ) ) : groups_uuid = group else : groups_uuid = clc . v1 . Group . GetGroupUUID ( group , alias , location ) r = clc . v1 . API . Call ( 'post' , 'Server/RestoreServer' , { 'AccountAlias' : alias , 'Name' : server , 'HardwareGroupUUID' : groups_uuid } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r )
Restores an archived server .
6,673
def _ServerActions ( action , alias , servers ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) results = [ ] for server in servers : r = clc . v1 . API . Call ( 'post' , 'Server/%sServer' % ( action ) , { 'AccountAlias' : alias , 'Name' : server } ) if int ( r [ 'StatusCode' ] ) == 0 : results . append ( r ) return ( results )
Archives the specified servers .
6,674
def GetDisks ( server , alias = None , guest_names = True ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) r = clc . v1 . API . Call ( 'post' , 'Server/ListDisks' , { 'AccountAlias' : alias , 'Name' : server , 'QueryGuestDiskNames' : guest_names } ) return ( r [ 'Disks' ] )
Returns list of disks configured for the server
6,675
def DeleteDisk ( server , scsi_bus_id , scsi_device_id , alias = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) r = clc . v1 . API . Call ( 'post' , 'Server/DeleteDisk' , { 'AccountAlias' : alias , 'Name' : server , 'OverrideFailsafes' : True , 'ScsiBusID' : scsi_bus_id , 'ScsiDeviceID' : scsi_device_id } ) return ( r )
Deletes the specified disk .
6,676
def get_external_references ( self ) : for ext_ref_node in self . node . findall ( 'externalRef' ) : ext_refs_obj = CexternalReference ( ext_ref_node ) for ref in ext_refs_obj : yield ref
Iterator that returns all the external reference objects of the external references object
6,677
def set_id ( self , this_id ) : if self . type == 'NAF' : return self . node . set ( 'id' , this_id ) elif self . type == 'KAF' : return self . node . set ( 'wid' , this_id )
Set the identifier for the token
6,678
def to_naf ( self ) : if self . type == 'KAF' : self . type = 'NAF' for node in self . __get_wf_nodes ( ) : node . set ( 'id' , node . get ( 'wid' ) ) del node . attrib [ 'wid' ]
Converts the object to NAF
6,679
def get_wf ( self , token_id ) : wf_node = self . idx . get ( token_id ) if wf_node is not None : return Cwf ( node = wf_node , type = self . type ) else : for wf_node in self . __get_wf_nodes ( ) : if self . type == 'NAF' : label_id = 'id' elif self . type == 'KAF' : label_id = 'wid' if wf_node . get ( label_id ) == token_id : return Cwf ( node = wf_node , type = self . type ) return None
Returns the token object for the given token identifier
6,680
def add_wf ( self , wf_obj ) : if wf_obj . get_id ( ) in self . idx : raise ValueError ( "Text node (wf) with id {} already exists!" . format ( wf_obj . get_id ( ) ) ) self . node . append ( wf_obj . get_node ( ) ) self . idx [ wf_obj . get_id ( ) ] = wf_obj
Adds a token object to the text layer
6,681
def remove_tokens_of_sentence ( self , sentence_id ) : nodes_to_remove = set ( ) for wf in self : if wf . get_sent ( ) == sentence_id : nodes_to_remove . add ( wf . get_node ( ) ) for node in nodes_to_remove : self . node . remove ( node )
Removes the tokens of the given sentence
6,682
def aggregate_daily ( image_coll , start_date = None , end_date = None , agg_type = 'mean' ) : if start_date and end_date : test_coll = image_coll . filterDate ( ee . Date ( start_date ) , ee . Date ( end_date ) ) elif start_date : test_coll = image_coll . filter ( ee . Filter . greaterThanOrEquals ( 'system:time_start' , ee . Date ( start_date ) . millis ( ) ) ) elif end_date : test_coll = image_coll . filter ( ee . Filter . lessThan ( 'system:time_start' , ee . Date ( end_date ) . millis ( ) ) ) else : test_coll = image_coll def get_date ( time ) : return ee . Date ( ee . Number ( time ) ) . format ( 'yyyy-MM-dd' ) date_list = ee . List ( test_coll . aggregate_array ( 'system:time_start' ) ) . map ( get_date ) . distinct ( ) . sort ( ) def aggregate_func ( date_str ) : start_date = ee . Date ( ee . String ( date_str ) ) end_date = start_date . advance ( 1 , 'day' ) agg_coll = image_coll . filterDate ( start_date , end_date ) agg_img = agg_coll . mean ( ) return agg_img . set ( { 'system:index' : start_date . format ( 'yyyyMMdd' ) , 'system:time_start' : start_date . millis ( ) , 'date' : start_date . format ( 'yyyy-MM-dd' ) , } ) return ee . ImageCollection ( date_list . map ( aggregate_func ) )
Aggregate images by day without using joins
6,683
def remove_this_clink ( self , clink_id ) : for clink in self . get_clinks ( ) : if clink . get_id ( ) == clink_id : self . node . remove ( clink . get_node ( ) ) break
Removes the clink for the given clink identifier
6,684
def GetGroupEstimate ( group , alias = None , location = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) group_uuid = clc . v1 . Group . GetGroupUUID ( group , alias , location ) r = clc . v1 . API . Call ( 'post' , 'Billing/GetGroupEstimate' , { 'AccountAlias' : alias , 'HardwareGroupUUID' : group_uuid } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r )
Gets estimated costs for a group of servers .
6,685
def GetGroupSummaries ( alias = None , date_start = None , date_end = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) payload = { 'AccountAlias' : alias } if date_start is not None : payload [ 'StartDate' ] = date_start if date_end is not None : payload [ 'EndDate' ] = date_end r = clc . v1 . API . Call ( 'post' , 'Billing/GetGroupSummaries' , payload ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r [ 'GroupTotals' ] )
Gets the charges for groups and servers within a given account and for any date range .
6,686
def GetServerEstimate ( server , alias = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) r = clc . v1 . API . Call ( 'post' , 'Billing/GetServerEstimate' , { 'AccountAlias' : alias , 'ServerName' : server } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r )
Gets the estimated monthly cost for a given server .
6,687
def display_direct ( ) : include_dirs , runtime_dirs , runtime = get_direct_config ( ) print ( "Include Search Dirs: {}" . format ( include_dirs ) ) print ( "\tContents: {}\n" . format ( get_dir_contents ( include_dirs ) ) ) print ( "Runtime Search Dirs: {}" . format ( runtime_dirs ) ) print ( "\tContents: {}\n" . format ( get_dir_contents ( runtime_dirs ) ) ) print ( "Runtime Libs: '{}'" . format ( runtime ) )
Displays the output of get_direct_config formatted nicely
6,688
def save ( self , filename = 'saved.ol.p' ) : import dill as pickle sv = { 'cl' : self . cl } pickle . dump ( sv , open ( filename , "wb" ) )
Save model to pickle file
6,689
def GetGroupUUID ( group , alias = None , location = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) r = Group . GetGroups ( location , alias ) for row in r : if row [ 'Name' ] == group : return ( row [ 'UUID' ] ) else : if clc . args : clc . v1 . output . Status ( "ERROR" , 3 , "Group %s not found in account %s datacenter %s" % ( group , alias , location ) ) raise Exception ( "Group not found" )
Given a group name return the unique group ID .
6,690
def NameGroups ( data_arr , id_key ) : new_data_arr = [ ] for data in data_arr : try : data_arr [ id_key ] = clc . _GROUP_MAPPING [ data [ id_key ] ] except : pass new_data_arr . append ( data ) if clc . args : clc . v1 . output . Status ( "ERROR" , 2 , "Group name conversion not yet implemented" ) return ( new_data_arr )
Get group name associated with ID .
6,691
def GetGroups ( location = None , alias = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) r = clc . v1 . API . Call ( 'post' , 'Group/GetGroups' , { 'AccountAlias' : alias , 'Location' : location } ) for group in r [ 'HardwareGroups' ] : clc . _GROUP_MAPPING [ group [ 'UUID' ] ] = group [ 'Name' ] if int ( r [ 'StatusCode' ] ) == 0 : return ( r [ 'HardwareGroups' ] )
Return all of alias groups in the given location .
6,692
def _GroupActions ( action , group , alias , location ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) groups_uuid = Group . GetGroupUUID ( group , alias , location ) r = clc . v1 . API . Call ( 'post' , 'Group/%sHardwareGroup' % ( action ) , { 'UUID' : groups_uuid , 'AccountAlias' : alias } ) return ( r )
Applies group level actions .
6,693
def get_endpoint_and_path ( environ ) : path = environ [ 'PATH_INFO' ] components = path . split ( '/' ) if '..' in components : raise HttpError ( '400 Bad Request' , 'Path cannot contain "..".' ) if components and components [ - 1 ] == '' : components . pop ( ) try : first = _rindex ( components , '' ) + 1 except ValueError : first = 0 components = components [ first : ] if len ( components ) == 0 : return '' , '' else : return components [ 0 ] , '/' . join ( components [ 1 : ] )
Extracts endpoint and path from the request URL .
6,694
def pack ( self ) : block = bytearray ( self . size ) self . pack_into ( block ) return block
convenience function for packing
6,695
def encoded_class ( block , offset = 0 ) : if not block : raise InvalidFileFormatNull for key in __magicmap__ : if block . find ( key , offset , offset + len ( key ) ) > - 1 : return __magicmap__ [ key ] raise InvalidFileFormat
predicate indicating whether a block of memory includes a magic number
6,696
def _copy_stream ( src , dest , length = 0 ) : if length == 0 : shutil . copyfileobj ( src , dest ) return bytes_left = length while bytes_left > 0 : buf_size = min ( _BUFFER_SIZE , bytes_left ) buf = src . read ( buf_size ) dest . write ( buf ) bytes_left -= buf_size
Similar to shutil . copyfileobj but supports limiting data size .
6,697
def _path_exists ( path ) : return os . path . exists ( path ) or os . path . islink ( path )
Checks if the path exists - is a file a directory or a symbolic link that may be broken .
6,698
def _exclusive_lock ( path ) : _create_file_dirs ( path ) fd = os . open ( path , os . O_WRONLY | os . O_CREAT , 0o600 ) try : retries_left = _LOCK_RETRIES success = False while retries_left > 0 : try : fcntl . flock ( fd , fcntl . LOCK_EX | fcntl . LOCK_NB ) success = True break except IOError as e : if e . errno in [ errno . EAGAIN , errno . EWOULDBLOCK ] : gevent . sleep ( _LOCK_SLEEP_TIME_S ) retries_left -= 1 else : raise if success : yield else : raise ConcurrentModificationError ( path ) finally : if success : fcntl . flock ( fd , fcntl . LOCK_UN ) os . close ( fd )
A simple wrapper for fcntl exclusive lock .
6,699
def delete ( self , name , version , _lock = True ) : link_path = self . _link_path ( name ) if _lock : file_lock = _exclusive_lock ( self . _lock_path ( 'links' , name ) ) else : file_lock = _no_lock ( ) with file_lock : logger . debug ( 'Acquired or inherited lock for link %s.' , name ) if not _path_exists ( link_path ) : raise FiletrackerFileNotFoundError if _file_version ( link_path ) > version : logger . info ( 'Tried to delete newer version of %s (%d < %d), ignoring.' , name , version , _file_version ( link_path ) ) return False digest = self . _digest_for_link ( name ) with _exclusive_lock ( self . _lock_path ( 'blobs' , digest ) ) : logger . debug ( 'Acquired lock for blob %s.' , digest ) should_delete_blob = False with self . _db_transaction ( ) as txn : logger . debug ( 'Started DB transaction (deleting link).' ) digest_bytes = digest . encode ( ) link_count = self . db . get ( digest_bytes , txn = txn ) if link_count is None : raise RuntimeError ( "File exists but has no key in db" ) link_count = int ( link_count ) if link_count == 1 : logger . debug ( 'Deleting last link to blob %s.' , digest ) self . db . delete ( digest_bytes , txn = txn ) self . db . delete ( '{}:logical_size' . format ( digest ) . encode ( ) , txn = txn ) should_delete_blob = True else : new_count = str ( link_count - 1 ) . encode ( ) self . db . put ( digest_bytes , new_count , txn = txn ) logger . debug ( 'Committing DB transaction (deleting link).' ) logger . debug ( 'Committed DB transaction (deleting link).' ) os . unlink ( link_path ) logger . debug ( 'Deleted link %s.' , name ) if should_delete_blob : os . unlink ( self . _blob_path ( digest ) ) logger . debug ( 'Released lock for blob %s.' , digest ) logger . debug ( 'Released (or gave back) lock for link %s.' , name ) return True
Removes a file from the storage .