idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
52,100 | def eval ( self , command ) : if self . logger . handlers : self . logger . debug ( command . decode ( 'utf-8' ) ) if self . tcl_script : self . tcl_script . info ( command ) self . rc = self . tcl_interp . eval ( command ) if self . logger . handlers : self . logger . debug ( '\t' + self . rc . decode ( 'utf-8' ) ) return self . rc | Execute Tcl command . |
52,101 | def value_change ( self , vcdId , value ) : self . idcode2series [ vcdId ] . append ( ( self . now , value ) ) | append change from VCD file signal data series |
52,102 | def parse ( self , file_handle ) : lineIterator = iter ( enumerate ( file_handle ) ) tokeniser = ( ( lineNo , word ) for lineNo , line in lineIterator for word in line . split ( ) if word ) while True : token = next ( tokeniser ) self . keyword_dispatch [ token [ 1 ] ] ( tokeniser , token [ 1 ] ) if self . end_of_definitions : break while True : try : lineNo , token = next ( lineIterator ) except StopIteration : break c = token [ 0 ] if c == '$' : continue elif c == '#' : self . now = int ( token [ 1 : ] ) else : sp = token . split ( ) sp_len = len ( sp ) if sp_len == 1 : value = c vcdId = token [ 1 : ] elif sp_len == 2 : value , vcdId = sp else : raise VcdSyntaxError ( "Line %d: Don't understand: %s " % ( lineNo , token ) ) self . value_change ( vcdId . strip ( ) , value . strip ( ) ) | Tokenize and parse the VCD file |
52,103 | def edit_record ( self , new_record ) : try : record_id = new_record [ "id" ] except KeyError : raise ValueError ( "No record ID provided!" ) record_type = self . resource_type ( record_id ) if record_type is None : raise ArchivistsToolkitError ( "Could not determine type for record with ID {}; not in database?" . format ( record_id ) ) clause = [ ] values = [ ] if "title" in new_record : clause . append ( "title=%s" ) values . append ( new_record [ "title" ] ) if "levelOfDescription" in new_record : clause . append ( "resourceLevel=%s" ) values . append ( new_record [ "levelOfDescription" ] ) if not clause : raise ValueError ( "No fields to update specified!" ) clause = ", " . join ( clause ) if record_type == ArchivistsToolkitClient . RESOURCE : db_type = "Resources" db_id_field = "resourceId" else : db_type = "ResourcesComponents" db_id_field = "resourceComponentId" sql = "UPDATE {} SET {} WHERE {}=%s" . format ( db_type , clause , db_id_field ) cursor = self . db . cursor ( ) cursor . execute ( sql , tuple ( values ) ) | Update a record in Archivist s Toolkit using the provided new_record . |
52,104 | def get_levels_of_description ( self ) : if not hasattr ( self , "levels_of_description" ) : cursor = self . db . cursor ( ) levels = set ( ) cursor . execute ( "SELECT distinct(resourceLevel) FROM Resources" ) for row in cursor : levels . add ( row ) cursor . execute ( "SELECT distinct(resourceLevel) FROM ResourcesComponents" ) for row in cursor : levels . add ( row ) self . levels_of_description = list ( levels ) return self . levels_of_description | Returns an array of all levels of description defined in this Archivist s Toolkit instance . |
52,105 | def collection_list ( self , resource_id , resource_type = "collection" ) : ret = [ ] cursor = self . db . cursor ( ) if resource_type == "collection" : cursor . execute ( "SELECT resourceComponentId FROM ResourcesComponents WHERE parentResourceComponentId IS NULL AND resourceId=%s" , ( resource_id ) , ) else : ret . append ( resource_id ) cursor . execute ( "SELECT resourceComponentId FROM ResourcesComponents WHERE parentResourceComponentId=%s" , ( resource_id ) , ) rows = cursor . fetchall ( ) if len ( rows ) : for row in rows : ret . extend ( self . collection_list ( row [ 0 ] , "description" ) ) return ret | Fetches a list of all resource and component IDs within the specified resource . |
52,106 | def find_resource_id_for_component ( self , component_id ) : cursor = self . db . cursor ( ) sql = "SELECT resourceId, parentResourceComponentId FROM ResourcesComponents WHERE resourceComponentId=%s" cursor . execute ( sql , ( component_id , ) ) resource_id , parent_id = cursor . fetchone ( ) if resource_id is None : return self . find_resource_id_for_component ( parent_id ) else : return resource_id | Given the ID of a component returns the parent resource ID . |
52,107 | def find_parent_id_for_component ( self , component_id ) : cursor = self . db . cursor ( ) sql = "SELECT parentResourceComponentId FROM ResourcesComponents WHERE resourceComponentId=%s" count = cursor . execute ( sql , ( component_id , ) ) if count > 0 : return ( ArchivistsToolkitClient . RESOURCE_COMPONENT , cursor . fetchone ( ) ) return ( ArchivistsToolkitClient . RESOURCE , self . find_resource_id_for_component ( component_id ) , ) | Given the ID of a component returns the parent component s ID . |
52,108 | def evaluate ( self , flux , xo , yo , a , b , c ) : dx = self . x - xo dy = self . y - yo psf = tf . exp ( - ( a * dx ** 2 + 2 * b * dx * dy + c * dy ** 2 ) ) psf_sum = tf . reduce_sum ( psf ) return flux * psf / psf_sum | Evaluate the Gaussian model |
52,109 | def parse_log_line ( line ) : matches = re . search ( r'^\[([^\]]+)\] ([^:]+: .*)' , line ) error = re . search ( r'Traceback' , line ) if error : return { 'line' : line , 'step' : 'error' } if not matches : return { 'line' : line , 'step' : None } tstamp = matches . group ( 1 ) msg = matches . group ( 2 ) if not msg . find ( 'Timing: ' ) >= 0 : return { 'line' : line , 'step' : None } when = datetime . strptime ( tstamp , '%Y-%m-%dT%H:%MZ' ) . replace ( tzinfo = pytz . timezone ( 'UTC' ) ) step = msg . split ( ":" ) [ - 1 ] . strip ( ) return { 'line' : line , 'step' : step , 'when' : when } | Parses a log line and returns it with more information |
52,110 | def message ( self ) : return { 'timestamp' : time . mktime ( self . timestamp . timetuple ( ) ) * 1e3 + self . timestamp . microsecond / 1e3 , 'group' : self . group_pk , 'participant' : None if not self . participant else self . participant . code , 'channel' : self . channel , 'value' : self . value } | Dictionary representation of the Event appropriate for JSON - encoding . |
52,111 | def save ( self , * args , ** kwargs ) : if self . timestamp is None : self . timestamp = timezone . now ( ) super ( ) . save ( * args , ** kwargs ) | Saving an Event automatically sets the timestamp if not already set . |
52,112 | def send ( self , channel , payload ) : with track ( 'send_channel=' + channel ) : with track ( 'create event' ) : Event . objects . create ( group = self , channel = channel , value = payload ) ChannelGroup ( str ( self . pk ) ) . send ( { 'text' : json . dumps ( { 'channel' : channel , 'payload' : payload } ) } ) | Send a message with the given payload on the given channel . Messages are broadcast to all players in the group . |
52,113 | def _subperiod_tick ( self , current_interval , intervals ) : self . refresh_from_db ( ) for key , value in self . group_decisions . items ( ) : self . subperiod_group_decisions [ key ] = value self . send ( 'group_decisions' , self . subperiod_group_decisions ) self . save ( update_fields = [ 'subperiod_group_decisions' ] ) | Tick each sub - period copying group_decisions to subperiod_group_decisions . |
52,114 | def _on_decisions_event ( self , event = None , ** kwargs ) : if not self . ran_ready_function : logger . warning ( 'ignoring decision from {} before when_all_players_ready: {}' . format ( event . participant . code , event . value ) ) return with track ( '_on_decisions_event' ) : self . group_decisions [ event . participant . code ] = event . value self . _group_decisions_updated = True self . save ( update_fields = [ 'group_decisions' , '_group_decisions_updated' ] ) if not self . num_subperiods ( ) and not self . rate_limit ( ) : self . send ( 'group_decisions' , self . group_decisions ) | Called when an Event is received on the decisions channel . Saves the value in group_decisions . If num_subperiods is None immediately broadcasts the event back out on the group_decisions channel . |
52,115 | def clean ( matrix ) : return defaultdict ( lambda : ' ' , { k : v for k , v in matrix . items ( ) if v != ' ' } ) | Return a copy of given matrix where keys associated to space values are discarded |
52,116 | def build ( matrix ) : max_x = max ( matrix , key = lambda t : t [ 0 ] ) [ 0 ] min_x = min ( matrix , key = lambda t : t [ 0 ] ) [ 0 ] max_y = max ( matrix , key = lambda t : t [ 1 ] ) [ 1 ] min_y = min ( matrix , key = lambda t : t [ 1 ] ) [ 1 ] yield from ( '' . join ( matrix [ i , j ] for i in range ( min_x , max_x + 1 ) ) for j in range ( min_y , max_y + 1 ) ) | Yield lines generated from given matrix |
52,117 | def _build_base_url ( self , host , port ) : parsed = urlparse ( host ) if not parsed . scheme : parsed = parsed . _replace ( scheme = "http" ) parsed = parsed . _replace ( path = "" ) netloc , parts = host , host . partition ( ":" ) if parts [ 1 ] == "" and port is not None : netloc = "{}:{}" . format ( parts [ 0 ] , port ) parsed = parsed . _replace ( netloc = netloc ) parsed = parsed . _replace ( path = parsed . path . rstrip ( "/" ) ) return parsed . geturl ( ) | Return the API base URL string based on host and port . |
52,118 | def _process_notes ( record , new_record ) : if "notes" not in new_record or not new_record [ "notes" ] : return False new_notes = [ ] for note in new_record [ "notes" ] : if note [ "type" ] in ( "odd" , "accessrestrict" ) and note . get ( "content" ) : new_notes . append ( { "jsonmodel_type" : "note_multipart" , "publish" : True , "subnotes" : [ { "content" : note [ "content" ] , "jsonmodel_type" : "note_text" , "publish" : True , } ] , "type" : note [ "type" ] , } ) record [ "notes" ] = new_notes return True | Populate the notes property using the provided new_record . |
52,119 | def resource_type ( self , resource_id ) : match = re . search ( r"repositories/\d+/(resources|archival_objects)/\d+" , resource_id ) if match and match . groups ( ) : type_ = match . groups ( ) [ 0 ] return "resource" if type_ == "resources" else "resource_component" else : raise ArchivesSpaceError ( "Unable to determine type of provided ID: {}" . format ( resource_id ) ) | Given an ID determines whether a given resource is a resource or a resource_component . |
52,120 | def edit_record ( self , new_record ) : try : record_id = new_record [ "id" ] except KeyError : raise ValueError ( "No record ID provided!" ) record = self . get_record ( record_id ) field_map = { "title" : "title" , "level" : "levelOfDescription" } fields_updated = False for field , targetfield in field_map . items ( ) : try : record [ targetfield ] = new_record [ field ] fields_updated = True except KeyError : continue if self . _process_notes ( record , new_record ) : fields_updated = True if ( "start_date" in new_record or "end_date" in new_record or "date_expression" in new_record ) : date = { "jsonmodel_type" : "date" , "date_type" : "inclusive" , "label" : "creation" , } if "date_expression" in new_record : date [ "expression" ] = new_record [ "date_expression" ] if "start_date" in new_record : date [ "begin" ] = new_record [ "start_date" ] if "end_date" in new_record : date [ "end" ] = new_record [ "end_date" ] if len ( record [ "dates" ] ) == 0 : record [ "dates" ] = [ date ] else : record [ "dates" ] [ 0 ] = date fields_updated = True if not fields_updated : raise ValueError ( "No fields to update specified!" ) self . _post ( record_id , data = json . dumps ( record ) ) | Update a record in ArchivesSpace using the provided new_record . |
52,121 | def get_levels_of_description ( self ) : if not hasattr ( self , "levels_of_description" ) : self . levels_of_description = self . _get ( "/config/enumerations/32" ) . json ( ) [ "values" ] return self . levels_of_description | Returns an array of all levels of description defined in this ArchivesSpace instance . |
52,122 | def get_resource_component_children ( self , resource_component_id ) : resource_type = self . resource_type ( resource_component_id ) return self . get_resource_component_and_children ( resource_component_id , resource_type ) | Given a resource component fetches detailed metadata for it and all of its children . |
52,123 | def find_parent_id_for_component ( self , component_id ) : response = self . get_record ( component_id ) if "parent" in response : return ( ArchivesSpaceClient . RESOURCE_COMPONENT , response [ "parent" ] [ "ref" ] ) elif "resource" in response : return ( ArchivesSpaceClient . RESOURCE , response [ "resource" ] [ "ref" ] ) else : return ( ArchivesSpaceClient . RESOURCE , component_id ) | Given the URL to a component returns the parent component s URL . |
52,124 | def find_collection_ids ( self , search_pattern = "" , identifier = "" , fetched = 0 , page = 1 ) : params = { "page" : page , "q" : "primary_type:resource" } if search_pattern != "" : search_pattern = self . _escape_solr_query ( search_pattern , field = "title" ) params [ "q" ] = params [ "q" ] + " AND title:{}" . format ( search_pattern ) if identifier != "" : identifier = self . _escape_solr_query ( identifier , field = "identifier" ) params [ "q" ] = params [ "q" ] + " AND identifier:{}" . format ( identifier ) response = self . _get ( self . repository + "/search" , params = params ) hits = response . json ( ) results = [ r [ "uri" ] for r in hits [ "results" ] ] results_so_far = fetched + hits [ "this_page" ] if hits [ "total_hits" ] > results_so_far : results . extend ( self . find_collection_ids ( fetched = results_so_far , page = page + 1 ) ) return results | Fetches a list of resource URLs for every resource in the database . |
52,125 | def find_by_id ( self , object_type , field , value ) : def format_record ( record ) : resolved = record [ "_resolved" ] identifier = ( resolved [ "ref_id" ] if "ref_id" in resolved else resolved . get ( "component_id" , "" ) ) return { "id" : record [ "ref" ] , "type" : self . resource_type ( record [ "ref" ] ) , "identifier" : identifier , "title" : resolved . get ( "title" , "" ) , "levelOfDescription" : resolved . get ( "level" , "" ) , "fullrecord" : resolved , } if object_type not in ( "digital_object_components" , "archival_objects" ) : raise ValueError ( "object_type must be 'digital_object_components' or 'archival_objects'" ) if field not in ( "ref_id" , "component_id" ) : raise ValueError ( "field must be 'component_id' or 'ref_id'" ) params = { field + "[]" : value , "resolve[]" : object_type } url = self . repository + "/find_by_id/" + object_type response = self . _get ( url , params = params ) hits = response . json ( ) return [ format_record ( r ) for r in hits [ object_type ] ] | Find resource by a specific ID . |
52,126 | def attribute ( func ) : attr = abc . abstractmethod ( func ) attr . __iattribute__ = True attr = _property ( attr ) return attr | Wrap a function as an attribute . |
52,127 | def property ( func ) : attr = abc . abstractmethod ( func ) attr . __iproperty__ = True attr = Property ( attr ) return attr | Wrap a function as a property . |
52,128 | def classattribute ( func ) : attr = abc . abstractmethod ( func ) attr . __iclassattribute__ = True attr = _property ( attr ) return attr | Wrap a function as a class attribute . |
52,129 | def method ( func ) : attr = abc . abstractmethod ( func ) attr . __imethod__ = True return attr | Wrap a function as a method . |
52,130 | def classmethod ( func ) : attr = abc . abstractmethod ( func ) attr . __iclassmethod__ = True attr = _classmethod ( attr ) return attr | Wrap a function as a classmethod . |
52,131 | def parse_config ( config_file ) : try : with open ( config_file , 'r' ) as f : return yaml . load ( f ) except IOError : print "Configuration file {} not found or not readable." . format ( config_file ) raise | Parse a YAML configuration file |
52,132 | def _ensure_ifaces_tuple ( ifaces ) : try : ifaces = tuple ( ifaces ) except TypeError : ifaces = ( ifaces , ) for iface in ifaces : if not _issubclass ( iface , ibc . Iface ) : raise TypeError ( 'Can only compare against interfaces.' ) return ifaces | Convert to a tuple of interfaces and raise if not interfaces . |
52,133 | def _check_for_definition ( iface , cls , tag , defines ) : attributes = ( attr for attr in iface . __abstractmethods__ if hasattr ( getattr ( iface , attr ) , tag ) ) for attribute in attributes : for node in cls . __mro__ : if hasattr ( node , attribute ) and defines ( getattr ( node , attribute ) ) : return True try : attribute return False except NameError : return True | Check for a valid definition of a value . |
52,134 | def issubclass ( cls , ifaces ) : ifaces = _ensure_ifaces_tuple ( ifaces ) for iface in ifaces : return all ( ( _check_for_definition ( iface , cls , '__iclassattribute__' , _is_attribute , ) , _check_for_definition ( iface , cls , '__iproperty__' , _is_property , ) , _check_for_definition ( iface , cls , '__imethod__' , _is_method , ) , _check_for_definition ( iface , cls , '__iclassmethod__' , _is_classmethod , ) , ) ) | Check if the given class is an implementation of the given iface . |
52,135 | def isinstance ( instance , ifaces ) : ifaces = _ensure_ifaces_tuple ( ifaces ) for iface in ifaces : attributes = ( attr for attr in iface . __abstractmethods__ if hasattr ( getattr ( iface , attr ) , '__iattribute__' ) ) for attribute in attributes : if not hasattr ( instance , attribute ) : return False if not issubclass ( type ( instance ) , ifaces ) : return False return True | Check if a given instance is an implementation of the interface . |
52,136 | def _get_call_class ( method ) : call_base , call_name = method . split ( '.' , 1 ) mod = __import__ ( 'ubersmith.calls.{0}' . format ( call_base ) , fromlist = [ '' ] ) gen = ( getattr ( mod , x ) for x in dir ( mod ) if not x . startswith ( '_' ) ) gen = ( x for x in gen if type ( x ) is type and issubclass ( x , BaseCall ) ) for call_class in gen : if call_class . method == method : return call_class else : class GenericCall ( BaseCall ) : method = '.' . join ( ( call_base , call_name ) ) return GenericCall | Find the call class for method if it exists else create one . |
52,137 | def render ( self ) : if not self . validate ( ) : raise ValidationError self . process_request ( ) self . clean ( ) return self . response | Validate process clean and return the result of the call . |
52,138 | def process_request ( self ) : self . response = self . request_handler . process_request ( self . method , self . request_data ) | Processing the call and set response_data . |
52,139 | def clean ( self ) : if self . response . type == 'application/json' : cleaned = copy . deepcopy ( self . response . data ) if self . cleaner is not None : cleaned = self . cleaner ( cleaned ) typed_response = { dict : DictResponse , int : IntResponse , } . get ( type ( cleaned ) , BaseResponse ) self . response = typed_response . from_cleaned ( self . response , cleaned ) else : self . response = FileResponse ( self . response . response ) | Clean response . |
52,140 | def edit_record ( self , new_record ) : try : record_id = new_record [ "slug" ] except KeyError : raise ValueError ( "No slug provided!" ) record = self . get_record ( record_id ) field_map = { "title" : "title" , "level" : "levelOfDescription" } fields_updated = False for field , targetfield in field_map . items ( ) : try : record [ targetfield ] = new_record [ field ] fields_updated = True except KeyError : continue if "notes" in new_record and new_record [ "notes" ] : note = new_record [ "notes" ] [ 0 ] new_note = { "content" : note [ "content" ] , "type" : note [ "type" ] } if "notes" not in record or record [ "notes" ] == [ ] : record [ "notes" ] = [ new_note ] else : record [ "notes" ] [ 0 ] = new_note fields_updated = True else : record [ "notes" ] = [ ] updated_date = { } if "dates" in new_record and type ( new_record [ "dates" ] ) is list : new_record [ "dates" ] = new_record [ "dates" ] [ 0 ] date_mapping = { "start_date" : "start_date" , "end_date" : "end_date" , "date_expression" : "date" , } for date_field in date_mapping : if date_field in new_record : updated_date [ date_mapping [ date_field ] ] = new_record [ date_field ] if updated_date != { } : record [ "dates" ] = [ updated_date ] fields_updated = True if not fields_updated : raise ValueError ( "No fields to update specified!" ) self . _put ( urljoin ( self . base_url , "informationobjects/{}" . format ( record_id ) ) , data = json . dumps ( record ) , ) | Update a record in AtoM using the provided new_record . |
52,141 | def get_levels_of_description ( self ) : if not hasattr ( self , "levels_of_description" ) : self . levels_of_description = [ item [ "name" ] for item in self . _get ( urljoin ( self . base_url , "taxonomies/34" ) ) . json ( ) ] return self . levels_of_description | Returns an array of all levels of description defined in this AtoM instance . |
52,142 | def collection_list ( self , resource_id , resource_type = "collection" ) : def fetch_children ( children ) : results = [ ] for child in children : results . append ( child [ "slug" ] ) if "children" in child : results . extend ( fetch_children ( child [ "children" ] ) ) return results response = self . _get ( urljoin ( self . base_url , "informationobjects/tree/{}" . format ( resource_id ) ) ) tree = response . json ( ) return fetch_children ( tree [ "children" ] ) | Fetches a list of slug representing descriptions within the specified parent description . |
52,143 | def find_parent_id_for_component ( self , slug ) : response = self . get_record ( slug ) if "parent" in response : return response [ "parent" ] else : return slug | Given the slug of a description returns the parent description s slug . |
52,144 | def augment_resource_ids ( self , resource_ids ) : resources_augmented = [ ] for id in resource_ids : resources_augmented . append ( self . get_resource_component_and_children ( id , recurse_max_level = 2 ) ) return resources_augmented | Given a list of resource IDs returns a list of dicts containing detailed information about the specified resources and their children . |
52,145 | def delete_record ( self , record_id ) : self . _delete ( urljoin ( self . base_url , "informationobjects/{}" . format ( record_id ) ) , expected_response = 204 , ) return { "status" : "Deleted" } | Delete a record with record_id . |
52,146 | def version_check ( self ) : try : version_info = self [ 'Version' ] except KeyError : raise ValidateError ( 'Config file has to have a Version section' ) try : float ( version_info [ 'version' ] ) except KeyError : raise ValidateError ( 'Config file has to have a version section' ) except ValueError : raise ValidateError ( 'Version has to be a float.' ) try : version_info [ 'name' ] except KeyError : raise ValidateError ( "Config file has to have a name" ) return | Check if the version entry is in the proper format |
52,147 | def check_plugin ( self , plugin ) : vcf_section = self [ plugin ] try : vcf_field = vcf_section [ 'field' ] if not vcf_field in self . vcf_columns : raise ValidateError ( "field has to be in {0}\n" "Wrong field name in plugin: {1}" . format ( self . vcf_columns , plugin ) ) if vcf_field == 'INFO' : try : info_key = vcf_section [ 'info_key' ] if info_key == 'CSQ' : try : csq_key = vcf_section [ 'csq_key' ] except KeyError : raise ValidateError ( "CSQ entrys has to refer to an csq field.\n" "Refer with keyword 'csq_key'\n" "csq_key is missing in section: {0}" . format ( plugin ) ) except KeyError : raise ValidateError ( "INFO entrys has to refer to an INFO field.\n" "Refer with keyword 'info_key'\n" "info_key is missing in section: {0}" . format ( plugin ) ) except KeyError : raise ValidateError ( "Vcf entrys have to refer to a field in the VCF with keyword" " 'field'.\nMissing keyword 'field' in plugin: {0}" . format ( plugin ) ) try : data_type = vcf_section [ 'data_type' ] if not data_type in self . data_types : raise ValidateError ( "data_type has to be in {0}\n" "Wrong data_type in plugin: {1}" . format ( self . data_types , plugin ) ) except KeyError : raise ValidateError ( "Vcf entrys have to refer to a data type in the VCF with " "keyword 'data_type'.\n" "Missing data_type in plugin: {0}" . format ( plugin ) ) separators = vcf_section . get ( 'separators' , None ) if separators : if len ( separators ) == 1 : self [ plugin ] [ 'separators' ] = list ( separators ) else : if data_type != 'flag' : raise ValidateError ( "If data_type != flag the separators have to be defined" "Missing separators in plugin: {0}" . format ( plugin ) ) record_rule = vcf_section . get ( 'record_rule' , None ) if record_rule : if not record_rule in [ 'min' , 'max' ] : raise ValidateError ( "Record rules have to be in {0}\n" "Wrong record_rule in plugin: {1}" . format ( [ 'min' , 'max' ] , plugin ) ) else : self . logger . info ( "Setting record rule to default: 'max'" ) return True | Check if the section is in the proper format vcf format . |
52,148 | def span ( route ) : @ wraps ( route ) def route_decorator ( * args , ** kwargs ) : start_span ( ) try : return route ( * args , ** kwargs ) finally : end_span ( ) return route_decorator | Optional decorator for Flask routes . |
52,149 | def _start_subspan ( headers = None ) : b3 = values ( ) g . subspan = { b3_trace_id : b3 [ b3_trace_id ] , b3_span_id : _generate_identifier ( ) , b3_parent_span_id : b3 [ b3_span_id ] , b3_sampled : b3 [ b3_sampled ] , b3_flags : b3 [ b3_flags ] , } result = dict ( headers or { } ) result . update ( { b3_trace_id : g . subspan [ b3_trace_id ] , b3_span_id : g . subspan [ b3_span_id ] , b3_parent_span_id : g . subspan [ b3_parent_span_id ] , } ) if g . subspan [ b3_sampled ] : result [ b3_sampled ] = g . subspan [ b3_sampled ] if g . subspan [ b3_flags ] : result [ b3_flags ] = g . subspan [ b3_flags ] _info ( "Client start. Starting sub-span" ) _log . debug ( "B3 values for sub-span: {b3_headers}" . format ( b3_headers = values ( ) ) ) _log . debug ( "All headers for downstream request: {b3_headers}" . format ( b3_headers = result ) ) return result | Sets up a new span to contact a downstream service . This is used when making a downstream service call . It returns a dict containing the required sub - span headers . Each downstream call you make is handled as a new span so call this every time you need to contact another service . |
52,150 | def _info ( message ) : span = values ( ) _log . debug ( message + ": {span} in trace {trace}. (Parent span: {parent})." . format ( span = span . get ( b3_span_id ) , trace = span . get ( b3_trace_id ) , parent = span . get ( b3_parent_span_id ) , ) ) | Convenience function to log current span values . |
52,151 | def checkversion ( version ) : try : for refversion , responseversion in zip ( [ int ( x ) for x in REQUIREFOLIADOCSERVE . split ( '.' ) ] , [ int ( x ) for x in version . split ( '.' ) ] ) : if responseversion > refversion : return 1 elif responseversion < refversion : return - 1 return 0 except ValueError : raise ValueError ( "Unable to parse version, invalid syntax" ) | Checks foliadocserve version returns 1 if the document is newer than the library - 1 if it is older 0 if it is equal |
52,152 | def pub_poll ( request , docid ) : try : r = flat . comm . get ( request , '/poll/pub/' + docid + '/' , False ) except URLError : return HttpResponseForbidden ( "Unable to connect to the document server [viewer/poll]" ) return HttpResponse ( r , content_type = 'application/json' ) | The initial viewer does not provide the document content yet |
52,153 | def prepare ( _next , self ) : _next ( self ) if not self . autoincrement_support : return id_field = self . form [ 'id' ] del id_field . attrs [ 'required' ] id_field . attrs [ 'disabled' ] = 'disabled' id_field . getter = _ ( 'auto_incremented' , default = 'auto incremented' ) | Hook after prepare and set id disabled . |
52,154 | def delete_user_action ( model , request ) : try : users = model . parent . backend uid = model . model . name del users [ uid ] users ( ) model . parent . invalidate ( ) localizer = get_localizer ( request ) message = localizer . translate ( _ ( 'delete_user_from_database' , default = "Deleted user '${uid}' from database." , mapping = { 'uid' : uid } ) ) return { 'success' : True , 'message' : message } except Exception as e : return { 'success' : False , 'message' : str ( e ) } | Delete user from database . |
52,155 | def delete_group_action ( model , request ) : try : groups = model . parent . backend uid = model . model . name del groups [ uid ] groups ( ) model . parent . invalidate ( ) except Exception as e : return { 'success' : False , 'message' : str ( e ) } localizer = get_localizer ( request ) message = localizer . translate ( _ ( 'deleted_group' , default = 'Deleted group from database' ) ) return { 'success' : True , 'message' : message } | Delete group from database . |
52,156 | def prepare ( _next , self ) : _next ( self ) if not self . roles_support : return if not self . request . has_permission ( 'manage' , self . model . parent ) : return value = [ ] if self . action_resource == 'edit' : value = self . model . model . roles roles_widget = factory ( 'field:label:select' , name = 'principal_roles' , value = value , props = { 'label' : _ ( 'roles' , default = 'Roles' ) , 'multivalued' : True , 'vocabulary' : self . roles_vocab , 'format' : 'single' , 'listing_tag' : 'ul' , 'listing_label_position' : 'after' , } ) save_widget = self . form [ 'save' ] self . form . insertbefore ( roles_widget , save_widget ) | Hook after prepare and set principal_roles as selection to self . form . |
52,157 | def initialize_ugm ( config , global_config , local_config ) : cfg . merged . css . protected . append ( ( static_resources , 'styles.css' ) ) cfg . merged . js . protected . append ( ( static_resources , 'ugm.js' ) ) register_config ( 'ugm_general' , GeneralSettings ) register_config ( 'ugm_server' , ServerSettings ) register_config ( 'ugm_users' , UsersSettings ) register_config ( 'ugm_groups' , GroupsSettings ) register_config ( 'ugm_roles' , RolesSettings ) register_config ( 'ugm_localmanager' , LocalManagerSettings ) register_entry ( 'users' , users_factory ) register_entry ( 'groups' , groups_factory ) acl_registry . register ( ugm_user_acl , User , 'user' ) acl_registry . register ( ugm_default_acl , Users , 'users' ) acl_registry . register ( ugm_default_acl , Group , 'group' ) acl_registry . register ( ugm_default_acl , Groups , 'groups' ) lm_config = local_config . get ( 'ugm.localmanager_config' , '' ) os . environ [ 'LOCAL_MANAGER_CFG_FILE' ] = lm_config config . add_translation_dirs ( 'cone.ugm:locale/' ) config . add_view ( static_resources , name = 'cone.ugm.static' ) config . scan ( 'cone.ugm.browser' ) | Initialize UGM . |
52,158 | def expiration_extractor ( widget , data ) : active = int ( data . request . get ( '%s.active' % widget . name , '0' ) ) if not active : return 0 expires = data . extracted if expires : return time . mktime ( expires . utctimetuple ( ) ) return UNSET | Extract expiration information . |
52,159 | def prepare ( _next , self ) : _next ( self ) cfg = ugm_general ( self . model ) if cfg . attrs [ 'users_account_expiration' ] != 'True' : return mode = 'edit' if not self . request . has_permission ( 'manage_expiration' , self . model . parent ) : mode = 'display' if self . action_resource == 'edit' : attr = cfg . attrs [ 'users_expires_attr' ] unit = int ( cfg . attrs [ 'users_expires_unit' ] ) value = int ( self . model . attrs . get ( attr , 0 ) ) if unit == 0 : value *= 86400 else : value = UNSET expires_widget = factory ( 'field:label:expiration' , name = 'active' , value = value , props = { 'label' : _ ( 'active' , default = 'Active' ) } , mode = mode ) save_widget = self . form [ 'save' ] self . form . insertbefore ( expires_widget , save_widget ) | Hook after prepare and set expiration widget to self . form . |
52,160 | def prepare ( _next , self ) : _next ( self ) if not self . portrait_support : return model = self . model request = self . request if request . has_permission ( 'edit_user' , model . parent ) : mode = 'edit' else : mode = 'display' cfg = ugm_general ( model ) image_attr = cfg . attrs [ 'users_portrait_attr' ] image_accept = cfg . attrs [ 'users_portrait_accept' ] image_width = int ( cfg . attrs [ 'users_portrait_width' ] ) image_height = int ( cfg . attrs [ 'users_portrait_height' ] ) image_data = model . attrs . get ( image_attr ) if image_data : image_value = { 'file' : BytesIO ( image_data ) , 'mimetype' : 'image/jpeg' , } image_url = make_url ( request , node = model , resource = 'portrait_image' ) else : image_value = UNSET resource = 'cone.ugm.static/images/default_portrait.jpg' image_url = make_url ( request , node = model . root , resource = resource ) portrait_widget = factory ( 'field:label:error:image' , name = 'portrait' , value = image_value , props = { 'label' : _ ( 'portrait' , default = 'Portrait' ) , 'src' : image_url , 'alt' : _ ( 'portrait' , default = 'Portrait' ) , 'accept' : image_accept , 'minsize' : ( image_width , image_height ) , 'crop' : { 'size' : ( image_width , image_height ) , 'fitting' : True , } } , mode = mode ) save_widget = self . form [ 'save' ] self . form . insertbefore ( portrait_widget , save_widget ) | Hook after prepare and set portrait as image widget to self . form . |
52,161 | def local_manager_consider_for_user ( self ) : if not self . local_management_enabled : return False request = get_current_request ( ) if authenticated_userid ( request ) == security . ADMIN_USER : return False roles = security . authenticated_user ( request ) . roles if 'admin' in roles or 'manager' in roles : return False return True | Flag whether local manager ACL should be considered for current authenticated user . |
52,162 | def local_manager_gid ( self ) : config = self . root [ 'settings' ] [ 'ugm_localmanager' ] . attrs user = security . authenticated_user ( get_current_request ( ) ) if not user : return None gids = user . group_ids adm_gids = list ( ) for gid in gids : rule = config . get ( gid ) if rule : adm_gids . append ( gid ) if len ( adm_gids ) == 0 : return None if len ( adm_gids ) > 1 : msg = ( u"Authenticated member defined in local manager " u"groups %s but only one management group allowed for " u"each user. Please contact System Administrator in " u"order to fix this problem." ) exc = msg % ', ' . join ( [ "'%s'" % gid for gid in adm_gids ] ) raise Exception ( exc ) return adm_gids [ 0 ] | Group id of local manager group of current authenticated member . |
52,163 | def local_manager_rule ( self ) : adm_gid = self . local_manager_gid if not adm_gid : return None config = self . root [ 'settings' ] [ 'ugm_localmanager' ] . attrs return config [ adm_gid ] | Return rule for local manager . |
52,164 | def local_manager_target_uids ( self ) : groups = self . root [ 'groups' ] . backend managed_uids = set ( ) for gid in self . local_manager_target_gids : group = groups . get ( gid ) if group : managed_uids . update ( group . member_ids ) return list ( managed_uids ) | Target uid s for local manager . |
52,165 | def local_manager_is_default ( self , adm_gid , gid ) : config = self . root [ 'settings' ] [ 'ugm_localmanager' ] . attrs rule = config [ adm_gid ] if gid not in rule [ 'target' ] : raise Exception ( u"group '%s' not managed by '%s'" % ( gid , adm_gid ) ) return gid in rule [ 'default' ] | Check whether gid is default group for local manager group . |
52,166 | def form_field_definitions ( self ) : schema = copy . deepcopy ( form_field_definitions . user ) uid , login = self . _get_auth_attrs ( ) if uid != login : field = schema . get ( login , schema [ 'default' ] ) if field [ 'chain' ] . find ( '*optional_login' ) == - 1 : field [ 'chain' ] = '%s:%s' % ( '*optional_login' , field [ 'chain' ] ) if not field . get ( 'custom' ) : field [ 'custom' ] = dict ( ) field [ 'custom' ] [ 'optional_login' ] = ( [ 'context.optional_login' ] , [ ] , [ ] , [ ] , [ ] ) schema [ login ] = field return schema | Hook optional_login extractor if necessary for form defaults . |
52,167 | def remote_add_user ( model , request ) : params = request . params uid = params . get ( 'id' ) if not uid : return { 'success' : False , 'message' : u"No user ID given." , } users = model . backend if uid in users : return { 'success' : False , 'message' : u"User with given ID already exists." , } password = params . get ( 'password' ) add_roles = params . get ( 'roles' , '' ) add_roles = [ val . strip ( ) for val in add_roles . split ( ',' ) if val ] add_groups = params . get ( 'groups' , '' ) add_groups = [ val . strip ( ) for val in add_groups . split ( ',' ) if val ] attrs = dict ( ) for key , val in params . items ( ) : if not key . startswith ( 'attr.' ) : continue key = key [ key . find ( '.' ) + 1 : ] attrs [ key ] = val settings = ugm_users ( model ) attrmap = settings . attrs . users_form_attrmap exposed = settings . attrs . users_exposed_attributes if not exposed : exposed = list ( ) valid_attrs = attrmap . keys ( ) + exposed checked_attrs = dict ( ) for key in valid_attrs : val = attrs . get ( key ) if not val : continue checked_attrs [ key ] = val try : user = users . create ( uid , ** checked_attrs ) message = u"" from cone . app . security import DEFAULT_ROLES available_roles = [ role [ 0 ] for role in DEFAULT_ROLES ] for role in add_roles : if role not in available_roles : message += u"Role '%s' given but inexistent. " % role continue user . add_role ( role ) groups = users . parent . groups for group in add_groups : if group not in groups : message += u"Group '%s' given but inexistent. " % group continue groups [ group ] . add ( uid ) users . parent ( ) if password is not None : users . passwd ( uid , None , password ) message += u"Created user with ID '%s'." % uid return { 'success' : True , 'message' : message , } except Exception as e : return { 'success' : False , 'message' : str ( e ) , } finally : model . invalidate ( ) | Add user via remote service . |
52,168 | def remote_delete_user ( model , request ) : params = request . params uid = params . get ( 'id' ) if not uid : return { 'success' : False , 'message' : u"No user ID given." , } users = model . backend if uid not in users : return { 'success' : False , 'message' : u"User with given ID not exists." , } try : del users [ uid ] users . parent ( ) message = u"Deleted user with ID '%s'." % uid return { 'success' : True , 'message' : message , } except Exception as e : return { 'success' : False , 'message' : str ( e ) , } finally : model . invalidate ( ) | Remove user via remote service . |
52,169 | def get_formset ( self , request , obj = None , ** kwargs ) : if obj is not None : try : obj . external except LayerExternal . DoesNotExist : pass return super ( LayerExternalInline , self ) . get_formset ( request , obj = None , ** kwargs ) | Load Synchronizer schema to display specific fields in admin |
52,170 | def public_broadcaster ( ) : while __websocket_server_running__ : pipein = open ( PUBLIC_PIPE , 'r' ) line = pipein . readline ( ) . replace ( '\n' , '' ) . replace ( '\r' , '' ) if line != '' : WebSocketHandler . broadcast ( line ) print line remaining_lines = pipein . read ( ) pipein . close ( ) pipeout = open ( PUBLIC_PIPE , 'w' ) pipeout . write ( remaining_lines ) pipeout . close ( ) else : pipein . close ( ) time . sleep ( 0.05 ) | Thread which runs in parallel and constantly checks for new messages in the public pipe and broadcasts them publicly to all connected clients . |
52,171 | def private_messenger ( ) : while __websocket_server_running__ : pipein = open ( PRIVATE_PIPE , 'r' ) line = pipein . readline ( ) . replace ( '\n' , '' ) . replace ( '\r' , '' ) if line != '' : message = json . loads ( line ) WebSocketHandler . send_private_message ( user_id = message [ 'user_id' ] , message = message ) print line remaining_lines = pipein . read ( ) pipein . close ( ) pipeout = open ( PRIVATE_PIPE , 'w' ) pipeout . write ( remaining_lines ) pipeout . close ( ) else : pipein . close ( ) time . sleep ( 0.05 ) | Thread which runs in parallel and constantly checks for new messages in the private pipe and sends them to the specific client . If client is not connected the message is discarded . |
52,172 | def write ( self , values , timestamp = None , database = None , async = True ) : func = write_async if async else write return func ( name = self . name , values = values , tags = self . tags , timestamp = timestamp , database = database ) | write metric point |
52,173 | def get ( self , request , * args , ** kwargs ) : serializer_class = self . get_serializer_class ( ) context = self . get_serializer_context ( ) services = [ ] for service_type in SERVICES . keys ( ) : services . append ( serializer_class ( object ( ) , context = context , service_type = service_type ) . data ) return Response ( services ) | return list of open 311 services |
52,174 | def get ( self , request , * args , ** kwargs ) : if 'service_code' not in request . GET . keys ( ) : return Response ( { 'detail' : _ ( 'A service code must be inserted' ) } , status = 404 ) service_code = request . GET [ 'service_code' ] if service_code not in SERVICES . keys ( ) : return Response ( { 'detail' : _ ( 'Service not found' ) } , status = 404 ) start_date = None end_date = None status = None layer = None STATUSES = { } for status_type in ( 'open' , 'closed' ) : STATUSES [ status_type ] = [ k for k , v in STATUS . items ( ) if v == status_type ] if 'start_date' in request . GET . keys ( ) : start_date = request . GET [ 'start_date' ] if iso8601_REGEXP . match ( start_date ) is None : return Response ( { 'detail' : _ ( 'Invalid date inserted' ) } , status = 404 ) if 'end_date' in request . GET . keys ( ) : end_date = request . GET [ 'end_date' ] if iso8601_REGEXP . match ( end_date ) is None : return Response ( { 'detail' : _ ( 'Invalid date inserted' ) } , status = 404 ) if 'status' in request . GET . keys ( ) : if request . GET [ 'status' ] not in ( 'open' , 'closed' ) : return Response ( { 'detail' : _ ( 'Invalid status inserted' ) } , status = 404 ) status = request . GET [ 'status' ] if 'layer' in request . GET . keys ( ) : layer = request . GET [ 'layer' ] node_layer = get_object_or_404 ( Layer , slug = layer ) service_model = MODELS [ service_code ] if service_code in ( 'vote' , 'comment' , 'rate' ) : self . queryset = service_model . objects . none ( ) else : self . queryset = service_model . objects . all ( ) if layer is not None : self . queryset = self . queryset . filter ( layer = node_layer ) if start_date is not None and end_date is not None : self . queryset = self . queryset . filter ( added__gte = start_date ) . filter ( added__lte = end_date ) if start_date is not None and end_date is None : self . queryset = self . queryset . filter ( added__gte = start_date ) if start_date is None and end_date is not None : self . queryset = self . queryset . filter ( added__lte = end_date ) if status is not None : q_list = [ Q ( status__slug__exact = s ) for s in STATUSES [ status ] ] self . queryset = self . queryset . filter ( reduce ( operator . or_ , q_list ) ) return self . list ( request , * args , ** kwargs ) | Retrieve list of service requests |
52,175 | def node_created_handler ( sender , ** kwargs ) : if kwargs [ 'created' ] : obj = kwargs [ 'instance' ] queryset = exclude_owner_of_node ( obj ) create_notifications . delay ( ** { "users" : queryset , "notification_model" : Notification , "notification_type" : "node_created" , "related_object" : obj } ) | send notification when a new node is created according to users s settings |
52,176 | def node_status_changed_handler ( ** kwargs ) : obj = kwargs [ 'instance' ] obj . old_status = kwargs [ 'old_status' ] . name obj . new_status = kwargs [ 'new_status' ] . name queryset = exclude_owner_of_node ( obj ) create_notifications . delay ( ** { "users" : queryset , "notification_model" : Notification , "notification_type" : "node_status_changed" , "related_object" : obj } ) if obj . user is not None : create_notifications . delay ( ** { "users" : [ obj . user ] , "notification_model" : Notification , "notification_type" : "node_own_status_changed" , "related_object" : obj } ) | send notification when the status of a node changes according to users s settings |
52,177 | def diff ( self ) : latest = self . latest current = NetJsonParser ( self . json ( ) ) return diff ( current , latest ) | shortcut to netdiff . diff |
52,178 | def json ( self ) : nodes = [ ] links = [ ] for link in self . link_set . all ( ) : if self . is_layer2 : source = link . interface_a . mac destination = link . interface_b . mac else : source = str ( link . interface_a . ip_set . first ( ) . address ) destination = str ( link . interface_b . ip_set . first ( ) . address ) nodes . append ( { 'id' : source } ) nodes . append ( { 'id' : destination } ) links . append ( OrderedDict ( ( ( 'source' , source ) , ( 'target' , destination ) , ( 'cost' , link . metric_value ) ) ) ) return OrderedDict ( ( ( 'type' , 'NetworkGraph' ) , ( 'protocol' , self . parser . protocol ) , ( 'version' , self . parser . version ) , ( 'metric' , self . parser . metric ) , ( 'nodes' , nodes ) , ( 'links' , links ) ) ) | returns a dict that represents a NetJSON NetworkGraph object |
52,179 | def update ( self ) : from . link import Link diff = self . diff ( ) status = { 'added' : 'active' , 'removed' : 'disconnected' , 'changed' : 'active' } for section in [ 'added' , 'removed' , 'changed' ] : if not diff [ section ] : continue for link_dict in diff [ section ] [ 'links' ] : try : link = Link . get_or_create ( source = link_dict [ 'source' ] , target = link_dict [ 'target' ] , cost = link_dict [ 'cost' ] , topology = self ) except ( LinkDataNotFound , ValidationError ) as e : msg = 'Exception while updating {0}' . format ( self . __repr__ ( ) ) logger . exception ( msg ) print ( '{0}\n{1}\n' . format ( msg , e ) ) continue link . ensure ( status = status [ section ] , cost = link_dict [ 'cost' ] ) | Updates topology Links are not deleted straightaway but set as disconnected |
52,180 | def clean ( self , * args , ** kwargs ) : if self . synchronizer_path != 'None' and self . config : try : self . synchronizer . load_config ( self . config ) self . synchronizer . clean ( ) except ImproperlyConfigured as e : raise ValidationError ( e . message ) | Call self . synchronizer . clean method |
52,181 | def save ( self , * args , ** kwargs ) : after_save = kwargs . pop ( 'after_save' , True ) super ( LayerExternal , self ) . save ( * args , ** kwargs ) if after_save : try : synchronizer = self . synchronizer except ImproperlyConfigured : pass else : if synchronizer : synchronizer . after_external_layer_saved ( self . config ) self . _reload_schema ( ) | call synchronizer after_external_layer_saved method for any additional operation that must be executed after save |
52,182 | def synchronizer_class ( self ) : if not self . synchronizer_path or self . synchronizer_path == 'None' or not self . layer : return False if ( self . _synchronizer_class is not None and self . _synchronizer_class . __name__ not in self . synchronizer_path ) : self . _synchronizer = None self . _synchronizer_class = None if not self . _synchronizer_class : self . _synchronizer_class = import_by_path ( self . synchronizer_path ) return self . _synchronizer_class | returns synchronizer class |
52,183 | def get_can_edit ( self , obj ) : view = self . context . get ( 'view' ) request = copy ( self . context . get ( 'request' ) ) request . _method = 'PUT' try : view . check_object_permissions ( request , obj ) except ( PermissionDenied , NotAuthenticated ) : return False else : return True | returns true if user has permission to edit false otherwise |
52,184 | def get_details ( self , obj ) : args = { 'slug' : obj . node . slug , 'pk' : obj . pk } return reverse ( 'api_node_image_detail' , kwargs = args , request = self . context . get ( 'request' , None ) ) | returns uri of API image resource |
52,185 | def has_permission ( self , request , view ) : if request . method == 'POST' : user = Profile . objects . only ( 'id' , 'username' ) . get ( username = view . kwargs [ 'username' ] ) return request . user . id == user . id return True | applies to social - link - list |
52,186 | def delete ( self , * args , ** kwargs ) : try : os . remove ( self . file . file . name ) except ( OSError , IOError ) : pass super ( Image , self ) . delete ( * args , ** kwargs ) | delete image when an image record is deleted |
52,187 | def get_quality ( self , type = 'etx' ) : if type == 'etx' : if 0 < self . etx < 1.5 : quality = 1 elif self . etx < 3 : quality = 2 else : quality = 3 elif type == 'dbm' : if - 83 < self . dbm < 0 : quality = 1 elif self . dbm > - 88 : quality = 2 else : quality = 3 return quality | used to determine color of links |
52,188 | def new_nodes_allowed_for_layer ( self ) : if not self . pk and self . layer and not self . layer . new_nodes_allowed : raise ValidationError ( _ ( 'New nodes are not allowed for this layer' ) ) | ensure new nodes are allowed for this layer |
52,189 | def nodes_minimum_distance_validation ( self ) : if self . layer and self . layer . nodes_minimum_distance : minimum_distance = self . layer . nodes_minimum_distance near_nodes = Node . objects . exclude ( pk = self . id ) . filter ( geometry__distance_lte = ( self . geometry , D ( m = minimum_distance ) ) ) . count ( ) if near_nodes > 0 : raise ValidationError ( _ ( 'Distance between nodes cannot be less than %s meters' ) % minimum_distance ) | if minimum distance is specified ensure node is not too close to other nodes ; |
52,190 | def node_contained_in_layer_area_validation ( self ) : if self . layer and isinstance ( self . layer . area , Polygon ) and not self . layer . area . contains ( self . geometry ) : raise ValidationError ( _ ( 'Node must be inside layer area' ) ) | if layer defines an area ensure node coordinates are contained in the area |
52,191 | def save ( self , * args , ** kwargs ) : super ( Layer , self ) . save ( * args , ** kwargs ) if self . pk and self . is_published != self . _current_is_published : layer_is_published_changed . send ( sender = self . __class__ , instance = self , old_is_published = self . _current_is_published , new_is_published = self . is_published ) self . update_nodes_published ( ) self . _current_is_published = self . is_published | intercepts changes to is_published and fires layer_is_published_changed signal |
52,192 | def update_nodes_published ( self ) : if self . pk : self . node_set . all ( ) . update ( is_published = self . is_published ) | publish or unpublish nodes of current layer |
52,193 | def get ( self , request , format = None ) : action = request . query_params . get ( 'action' , 'unread' ) action = action if action == 'count' or action == 'all' else 'unread' mark_as_read = request . query_params . get ( 'read' , 'true' ) == 'true' notifications = self . get_queryset ( ) . filter ( to_user = request . user ) return getattr ( self , 'get_%s' % action ) ( request , notifications , mark_as_read ) | get HTTP method |
52,194 | def get_count ( self , request , notifications , mark_as_read = False ) : return Response ( { 'count' : notifications . filter ( is_read = False ) . count ( ) } ) | return count of unread notification |
52,195 | def get_all ( self , request , notifications , mark_as_read = False ) : return self . list ( request , notifications ) | return all notifications with pagination |
52,196 | def get_object ( self , queryset = None ) : try : obj = self . get_queryset ( ) except self . model . DoesNotExist : raise Http404 ( ) self . check_object_permissions ( self . request , obj ) return obj | get privacy settings of current user |
52,197 | def clean ( self , * args , ** kwargs ) : if self . status != LINK_STATUS . get ( 'planned' ) : if self . interface_a is None or self . interface_b is None : raise ValidationError ( _ ( 'fields "from interface" and "to interface" are mandatory in this case' ) ) if ( self . interface_a_id == self . interface_b_id ) or ( self . interface_a == self . interface_b ) : msg = _ ( 'link cannot have same "from interface" and "to interface: %s"' ) % self . interface_a raise ValidationError ( msg ) if self . status == LINK_STATUS . get ( 'planned' ) and ( self . node_a is None or self . node_b is None ) : raise ValidationError ( _ ( 'fields "from node" and "to node" are mandatory for planned links' ) ) if self . type != LINK_TYPES . get ( 'radio' ) and ( self . dbm is not None or self . noise is not None ) : raise ValidationError ( _ ( 'Only links of type "radio" can contain "dbm" and "noise" information' ) ) | Custom validation 1 . interface_a and interface_b mandatory except for planned links 2 . planned links should have at least node_a and node_b filled in 3 . dbm and noise fields can be filled only for radio links 4 . interface_a and interface_b must differ 5 . interface a and b type must match |
52,198 | def get_or_create ( cls , source , target , cost , topology = None ) : try : return cls . get_link ( source , target , topology ) except LinkNotFound as e : pass link = Link ( interface_a = e . interface_a , interface_b = e . interface_b , status = LINK_STATUS [ 'active' ] , metric_value = cost , topology = topology ) link . full_clean ( ) link . save ( ) return link | Tries to find a link with get_link creates a new link if link not found . |
52,199 | def ensure ( self , status , cost ) : changed = False status_id = LINK_STATUS [ status ] if self . status != status_id : self . status = status_id changed = True if self . metric_value != cost : self . metric_value = cost changed = True if changed : self . save ( ) | ensure link properties correspond to the specified ones perform save operation only if necessary |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.