idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
51,700
def _make_inputnode ( self , frequency ) : inputs = list ( self . frequency_inputs ( frequency ) ) input_names = [ i . name for i in inputs ] input_names . extend ( self . study . FREQUENCIES [ frequency ] ) if not input_names : raise ArcanaError ( "No inputs to '{}' pipeline for requested freqency '{}'" . format ( self . name , frequency ) ) inputnode = self . add ( '{}_inputnode' . format ( frequency ) , IdentityInterface ( fields = input_names ) ) for input in inputs : prev_conv_nodes = { } for ( node , node_in , format , conv_kwargs ) in self . _input_conns [ input . name ] : if self . requires_conversion ( input , format ) : try : conv = format . converter_from ( input . format , ** conv_kwargs ) except ArcanaNoConverterError as e : e . msg += ( "which is required to convert '{}' from {} to {} " "for '{}' input of '{}' node" . format ( input . name , input . format , format , node_in , node . name ) ) raise e try : in_node = prev_conv_nodes [ format . name ] except KeyError : in_node = prev_conv_nodes [ format . name ] = self . add ( 'conv_{}_to_{}_format' . format ( input . name , format . name ) , conv . interface , inputs = { conv . input : ( inputnode , input . name ) } , requirements = conv . requirements , mem_gb = conv . mem_gb , wall_time = conv . wall_time ) in_node_out = conv . output else : in_node = inputnode in_node_out = input . name self . connect ( in_node , in_node_out , node , node_in ) for iterator , conns in self . _iterator_conns . items ( ) : if self . study . FREQUENCIES [ frequency ] == ( iterator , ) : for ( node , node_in , format ) in conns : self . connect ( inputnode , iterator , node , node_in ) return inputnode
Generates an input node for the given frequency . It also adds implicit file format conversion nodes to the pipeline .
51,701
def _make_outputnode ( self , frequency ) : outputs = list ( self . frequency_outputs ( frequency ) ) if not outputs : raise ArcanaError ( "No outputs to '{}' pipeline for requested freqency '{}'" . format ( self . name , frequency ) ) output_names = [ o . name for o in outputs ] outputnode = self . add ( '{}_outputnode' . format ( frequency ) , IdentityInterface ( fields = output_names ) ) for output in outputs : ( node , node_out , format , conv_kwargs ) = self . _output_conns [ output . name ] if self . requires_conversion ( output , format ) : conv = output . format . converter_from ( format , ** conv_kwargs ) node = self . add ( 'conv_{}_from_{}_format' . format ( output . name , format . name ) , conv . interface , inputs = { conv . input : ( node , node_out ) } , requirements = conv . requirements , mem_gb = conv . mem_gb , wall_time = conv . wall_time ) node_out = conv . output self . connect ( node , node_out , outputnode , output . name ) return outputnode
Generates an output node for the given frequency . It also adds implicit file format conversion nodes to the pipeline .
51,702
def _gen_prov ( self ) : wf_dict = nx_json . node_link_data ( self . workflow . _graph ) for link in wf_dict [ 'links' ] : if int ( networkx_version . split ( '.' ) [ 0 ] ) < 2 : link [ 'source' ] = wf_dict [ 'nodes' ] [ link [ 'source' ] ] [ 'id' ] . name link [ 'target' ] = wf_dict [ 'nodes' ] [ link [ 'target' ] ] [ 'id' ] . name else : link [ 'source' ] = link [ 'source' ] . name link [ 'target' ] = link [ 'target' ] . name wf_dict [ 'nodes' ] = { n [ 'id' ] . name : n [ 'id' ] . prov for n in wf_dict [ 'nodes' ] } wf_dict = json . loads ( json . dumps ( wf_dict ) ) dependency_versions = { d : extract_package_version ( d ) for d in ARCANA_DEPENDENCIES } pkg_versions = { 'arcana' : __version__ } pkg_versions . update ( ( k , v ) for k , v in dependency_versions . items ( ) if v is not None ) prov = { '__prov_version__' : PROVENANCE_VERSION , 'name' : self . name , 'workflow' : wf_dict , 'study' : self . study . prov , 'pkg_versions' : pkg_versions , 'python_version' : sys . version , 'joined_ids' : self . _joined_ids ( ) } return prov
Extracts provenance information from the pipeline into a PipelineProv object
51,703
def expected_record ( self , node ) : exp_inputs = { } for inpt in self . inputs : iterators_to_join = ( self . iterators ( inpt . frequency ) - self . iterators ( node . frequency ) ) if not iterators_to_join : exp_inputs [ inpt . name ] = inpt . collection . item ( node . subject_id , node . visit_id ) . checksums elif len ( iterators_to_join ) == 1 : exp_inputs [ inpt . name ] = [ inpt . collection . item ( n . subject_id , n . visit_id ) . checksums for n in node . nodes ( inpt . frequency ) ] else : exp_inputs [ inpt . name ] = [ ] for subj in node . subjects : exp_inputs [ inpt . name ] . append ( [ inpt . collection . item ( s . subject_id , s . visit_id ) . checksums for s in subj . sessions ] ) exp_outputs = { o . name : o . collection . item ( node . subject_id , node . visit_id ) . checksums for o in self . outputs } exp_prov = copy ( self . prov ) if PY2 : exp_inputs = json . loads ( json . dumps ( exp_inputs ) ) exp_outputs = json . loads ( json . dumps ( exp_outputs ) ) exp_prov [ 'inputs' ] = exp_inputs exp_prov [ 'outputs' ] = exp_outputs exp_prov [ 'joined_ids' ] = self . _joined_ids ( ) return Record ( self . name , node . frequency , node . subject_id , node . visit_id , self . study . name , exp_prov )
Constructs the provenance record that would be saved in the given node if the pipeline was run on the current state of the repository
51,704
def tree ( self , subject_ids = None , visit_ids = None , ** kwargs ) : return Tree . construct ( self , * self . find_data ( subject_ids = subject_ids , visit_ids = visit_ids ) , ** kwargs )
Return the tree of subject and sessions information within a project in the XNAT repository
51,705
def cached_tree ( self , subject_ids = None , visit_ids = None , fill = False ) : if subject_ids is not None : subject_ids = frozenset ( subject_ids ) if visit_ids is not None : visit_ids = frozenset ( visit_ids ) try : tree = self . _cache [ subject_ids ] [ visit_ids ] except KeyError : if fill : fill_subjects = subject_ids fill_visits = visit_ids else : fill_subjects = fill_visits = None tree = self . tree ( subject_ids = subject_ids , visit_ids = visit_ids , fill_visits = fill_visits , fill_subjects = fill_subjects ) self . _cache [ subject_ids ] [ visit_ids ] = self . _cache [ frozenset ( tree . subject_ids ) ] [ frozenset ( tree . visit_ids ) ] = tree return tree
Access the repository tree and caches it for subsequent accesses
51,706
def resource_names ( self , repo_type ) : try : names = self . _resource_names [ repo_type ] except KeyError : names = [ self . name , self . name . upper ( ) ] return names
Names of resources used to store the format on a given repository type . Defaults to the name of the name of the format
51,707
def default_aux_file_paths ( self , primary_path ) : return dict ( ( n , primary_path [ : - len ( self . ext ) ] + ext ) for n , ext in self . aux_files . items ( ) )
Get the default paths for auxiliary files relative to the path of the primary file i . e . the same name as the primary path with a different extension
51,708
def matches ( self , fileset ) : if fileset . _resource_name is not None : return ( fileset . _resource_name in self . resource_names ( fileset . repository . type ) ) elif self . directory : if op . isdir ( fileset . path ) : if self . within_dir_exts is None : return True else : return self . within_dir_exts == frozenset ( split_extension ( f ) [ 1 ] for f in os . listdir ( fileset . path ) if not f . startswith ( '.' ) ) else : return False else : if op . isfile ( fileset . path ) : all_paths = [ fileset . path ] + fileset . _potential_aux_files try : primary_path = self . assort_files ( all_paths ) [ 0 ] except ArcanaFileFormatError : return False else : return primary_path == fileset . path else : return False
Checks to see whether the format matches the given fileset
51,709
def set_converter ( self , file_format , converter ) : self . _converters [ file_format . name ] = ( file_format , converter )
Register a Converter and the FileFormat that it is able to convert from
51,710
def parse_url ( url ) : scheme , dest = url . split ( '://' ) host = None ssl_context = None if scheme == 'elk' : host , port = dest . split ( ':' ) if ':' in dest else ( dest , 2101 ) elif scheme == 'elks' : host , port = dest . split ( ':' ) if ':' in dest else ( dest , 2601 ) ssl_context = ssl . SSLContext ( ssl . PROTOCOL_TLSv1 ) ssl_context . verify_mode = ssl . CERT_NONE elif scheme == 'serial' : host , port = dest . split ( ':' ) if ':' in dest else ( dest , 115200 ) else : raise ValueError ( "Invalid scheme '%s'" % scheme ) return ( scheme , host , int ( port ) , ssl_context )
Parse a Elk connection string
51,711
def pretty_const ( value ) : words = value . split ( '_' ) pretty = words [ 0 ] . capitalize ( ) for word in words [ 1 : ] : pretty += ' ' + word . lower ( ) return pretty
Make a constant pretty for printing in GUI
51,712
def username ( elk , user_number ) : if user_number >= 0 and user_number < elk . users . max_elements : return elk . users [ user_number ] . name if user_number == 201 : return "*Program*" if user_number == 202 : return "*Elk RP*" if user_number == 203 : return "*Quick arm*" return ""
Return name of user .
51,713
async def _connect ( self , connection_lost_callbk = None ) : self . connection_lost_callbk = connection_lost_callbk url = self . _config [ 'url' ] LOG . info ( "Connecting to ElkM1 at %s" , url ) scheme , dest , param , ssl_context = parse_url ( url ) conn = partial ( Connection , self . loop , self . _connected , self . _disconnected , self . _got_data , self . _timeout ) try : if scheme == 'serial' : await serial_asyncio . create_serial_connection ( self . loop , conn , dest , baudrate = param ) else : await asyncio . wait_for ( self . loop . create_connection ( conn , host = dest , port = param , ssl = ssl_context ) , timeout = 30 ) except ( ValueError , OSError , asyncio . TimeoutError ) as err : LOG . warning ( "Could not connect to ElkM1 (%s). Retrying in %d seconds" , err , self . _connection_retry_timer ) self . loop . call_later ( self . _connection_retry_timer , self . connect ) self . _connection_retry_timer = 2 * self . _connection_retry_timer if self . _connection_retry_timer < 32 else 60
Asyncio connection to Elk .
51,714
def _connected ( self , transport , conn ) : LOG . info ( "Connected to ElkM1" ) self . _conn = conn self . _transport = transport self . _connection_retry_timer = 1 if url_scheme_is_secure ( self . _config [ 'url' ] ) : self . _conn . write_data ( self . _config [ 'userid' ] , raw = True ) self . _conn . write_data ( self . _config [ 'password' ] , raw = True ) self . call_sync_handlers ( ) if not self . _config [ 'url' ] . startswith ( 'serial://' ) : self . _heartbeat = self . loop . call_later ( 120 , self . _reset_connection )
Login and sync the ElkM1 panel to memory .
51,715
def send ( self , msg ) : if self . _conn : self . _conn . write_data ( msg . message , msg . response_command )
Send a message to Elk panel .
51,716
def sync ( self ) : self . elk . send ( az_encode ( ) ) self . elk . send ( zd_encode ( ) ) self . elk . send ( zp_encode ( ) ) self . elk . send ( zs_encode ( ) ) self . get_descriptions ( TextDescriptions . ZONE . value )
Retrieve zones from ElkM1
51,717
def has_offline_historical_manager_or_raise ( self ) : try : model = self . instance . __class__ . history . model except AttributeError : model = self . instance . __class__ field = [ field for field in model . _meta . fields if field . name == "history_id" ] if field and not isinstance ( field [ 0 ] , UUIDField ) : raise OfflineHistoricalManagerError ( f"Field 'history_id' of historical model " f"'{model._meta.app_label}.{model._meta.model_name}' " "must be an UUIDfield. " "For history = HistoricalRecords() use edc_model.HistoricalRecords instead of " "simple_history.HistoricalRecords(). " f"See '{self.instance._meta.app_label}.{self.instance._meta.model_name}'." )
Raises an exception if model uses a history manager and historical model history_id is not a UUIDField .
51,718
def primary_key_field ( self ) : return [ field for field in self . instance . _meta . fields if field . primary_key ] [ 0 ]
Return the primary key field .
51,719
def to_outgoing_transaction ( self , using , created = None , deleted = None ) : OutgoingTransaction = django_apps . get_model ( "django_collect_offline" , "OutgoingTransaction" ) created = True if created is None else created action = INSERT if created else UPDATE timestamp_datetime = ( self . instance . created if created else self . instance . modified ) if not timestamp_datetime : timestamp_datetime = get_utcnow ( ) if deleted : timestamp_datetime = get_utcnow ( ) action = DELETE outgoing_transaction = None if self . is_serialized : hostname = socket . gethostname ( ) outgoing_transaction = OutgoingTransaction . objects . using ( using ) . create ( tx_name = self . instance . _meta . label_lower , tx_pk = getattr ( self . instance , self . primary_key_field . name ) , tx = self . encrypted_json ( ) , timestamp = timestamp_datetime . strftime ( "%Y%m%d%H%M%S%f" ) , producer = f"{hostname}-{using}" , action = action , using = using , ) return outgoing_transaction
Serialize the model instance to an AES encrypted json object and saves the json object to the OutgoingTransaction model .
51,720
def encrypted_json ( self ) : json = serialize ( objects = [ self . instance ] ) encrypted_json = Cryptor ( ) . aes_encrypt ( json , LOCAL_MODE ) return encrypted_json
Returns an encrypted json serialized from self .
51,721
def map_req ( self , requirement ) : if isinstance ( self . _packages_map , dict ) : local_name = self . _packages_map . get ( requirement , requirement . name ) else : local_name = self . _packages_map ( requirement ) return local_name
Maps the name of an Requirement class to the name of the corresponding module in the environment
51,722
def map_version ( self , requirement , local_version ) : if isinstance ( self . _versions_map , dict ) : version = self . _versions_map . get ( requirement , { } ) . get ( local_version , local_version ) else : version = self . _versions_map ( requirement , local_version ) return version
Maps a local version name to one recognised by the Requirement class
51,723
def init_tag_processors ( self ) : register = self . register_tag_processor register ( 'class' , classTagProcessor ( ** self . opts ) ) register ( 'file' , fileTagProcessor ( ** self . opts ) ) register ( 'namespace' , namespaceTagProcessor ( ** self . opts ) ) register ( 'struct' , structTagProcessor ( ** self . opts ) ) register ( 'union' , unionTagProcessor ( ** self . opts ) ) register ( 'function' , functionTagProcessor ( ** self . opts ) ) register ( 'define' , defineTagProcessor ( ** self . opts ) ) register ( 'enumeration' , enumerationTagProcessor ( ** self . opts ) ) register ( 'enumvalue' , enumvalueTagProcessor ( ** self . opts ) ) register ( 'typedef' , typedefTagProcessor ( ** self . opts ) ) register ( 'variable' , variableTagProcessor ( ** self . opts ) )
Register the TagProcessors that are bundled with doxytag2zealdb .
51,724
def process ( self ) : for tag_proc in self . tag_procs : before_count = self . entry_count self . run_tag_processor ( tag_proc ) after_count = self . entry_count if self . verbose : print ( 'Inserted %d entries for "%s" tag processor' % ( after_count - before_count , tag_proc ) , file = sys . stderr ) if self . verbose : print ( 'Inserted %d entries overall' % self . entry_count , file = sys . stderr )
Run all tag processors .
51,725
def run_tag_processor ( self , tag_proc_name ) : tag_processor = self . tag_procs [ tag_proc_name ] for tag in tag_processor . find ( self . soup ) : self . process_tag ( tag_proc_name , tag )
Run a tag processor .
51,726
def process_tag ( self , tag_proc_name , tag ) : tag_processor = self . tag_procs [ tag_proc_name ] db_entry = ( tag_processor . get_name ( tag ) , tag_processor . get_entry_type ( tag ) , tag_processor . get_filename ( tag ) ) self . zeal_db . insert ( * db_entry ) self . entry_count += 1
Process a tag with a tag processor and insert a DB entry .
51,727
def _get_args ( self , node , keywords ) : args = super ( ArcanaSlurmGraphPlugin , self ) . _get_args ( node , keywords ) new_args = [ ] for name , arg in zip ( keywords , args ) : if name == 'template' : new_args . append ( self . _processor . slurm_template ( node ) ) else : new_args . append ( arg ) return tuple ( new_args )
Intercept calls to get template and return our own node - specific template
51,728
def wall_time_str ( self , wall_time ) : days = int ( wall_time // 1440 ) hours = int ( ( wall_time - days * 1440 ) // 60 ) minutes = int ( math . floor ( wall_time - days * 1440 - hours * 60 ) ) seconds = int ( ( wall_time - math . floor ( wall_time ) ) * 60 ) return "{}-{:0>2}:{:0>2}:{:0>2}" . format ( days , hours , minutes , seconds )
Returns the wall time in the format required for the sbatch script
51,729
def sync ( self ) : self . _elk . add_handler ( 'VN' , self . _vn_handler ) self . _elk . add_handler ( 'XK' , self . _xk_handler ) self . _elk . add_handler ( 'RP' , self . _rp_handler ) self . _elk . add_handler ( 'IE' , self . _elk . call_sync_handlers ) self . _elk . add_handler ( 'SS' , self . _ss_handler ) self . _elk . send ( vn_encode ( ) ) self . _elk . send ( lw_encode ( ) ) self . _elk . send ( ss_encode ( ) )
Retrieve panel information from ElkM1
51,730
def renamed ( self , name ) : duplicate = copy ( self ) duplicate . _name = name return duplicate
Duplicate the datum and rename it
51,731
def compare ( self , other ) : if self . _req != other . _req : raise ArcanaUsageError ( "Can't compare versions of different requirements {} and {}" . format ( self . _req , other . _req ) ) if self . _seq < other . _seq : return - 1 elif self . _seq > other . _seq : return 1 s = self . _prerelease if self . _prerelease is not None else ( 'z' , ) o = other . _prerelease if other . _prerelease is not None else ( 'z' , ) if s < o : return - 1 if s > o : return 1 s = self . _post if self . _post is not None else 0 o = other . _post if other . _post is not None else 0 if s < o : return - 1 if s > o : return 1 s = self . _dev if self . _dev is not None else 0 o = other . _dev if other . _dev is not None else 0 if s < o : return - 1 if s > o : return 1 assert self == other return 0
Compares the version with another
51,732
def v ( self , version , max_version = None , ** kwargs ) : if not isinstance ( version , Version ) : version = self . version_cls ( self , version , ** kwargs ) if max_version is not None : if not isinstance ( max_version , Version ) : max_version = self . version_cls ( self , max_version , ** kwargs ) version = VersionRange ( version , max_version ) return version
Returns either a single requirement version or a requirement version range depending on whether two arguments are supplied or one
51,733
def open ( self ) : if self . conn is not None : self . close ( ) self . conn = sqlite3 . connect ( self . filename ) self . cursor = self . conn . cursor ( ) c = self . cursor c . execute ( 'SELECT name FROM sqlite_master WHERE type="table"' ) if ( u'searchIndex' , ) in c : c . execute ( 'DROP TABLE searchIndex' ) if self . verbose : print ( 'Dropped existing table' , file = sys . stderr ) c . executescript ( )
Open a connection to the database .
51,734
def serialize ( objects = None ) : return serializers . serialize ( "json" , objects , ensure_ascii = True , use_natural_foreign_keys = True , use_natural_primary_keys = False , )
A simple wrapper of Django s serializer with defaults for JSON and natural keys .
51,735
def branch ( self , name , values = None ) : if isinstance ( values , basestring ) : values = [ values ] spec = self . parameter_spec ( name ) if not isinstance ( spec , SwitchSpec ) : raise ArcanaUsageError ( "{} is standard parameter not a switch" . format ( spec ) ) switch = self . _get_parameter ( name ) if spec . is_boolean : if values is not None : raise ArcanaDesignError ( "Should not provide values ({}) to boolean switch " "'{}' in {}" . format ( values , name , self . _param_error_location ) ) in_branch = switch . value else : if values is None : raise ArcanaDesignError ( "Value(s) need(s) to be provided non-boolean switch" " '{}' in {}" . format ( name , self . _param_error_location ) ) unrecognised_values = set ( values ) - set ( spec . choices ) if unrecognised_values : raise ArcanaDesignError ( "Provided value(s) ('{}') for switch '{}' in {} " "is not a valid option ('{}')" . format ( "', '" . join ( unrecognised_values ) , name , self . _param_error_location , "', '" . join ( spec . choices ) ) ) in_branch = switch . value in values return in_branch
Checks whether the given switch matches the value provided
51,736
def unhandled_branch ( self , name ) : raise ArcanaDesignError ( "'{}' value of '{}' switch in {} is not handled" . format ( self . _get_parameter ( name ) , name , self . _param_error_location ) )
Convenient method for raising exception if a pipeline doesn t handle a particular switch value
51,737
def save_workflow_graph_for ( self , spec_name , fname , full = False , style = 'flat' , ** kwargs ) : pipeline = self . spec ( spec_name ) . pipeline if full : workflow = pe . Workflow ( name = '{}_gen' . format ( spec_name ) , base_dir = self . processor . work_dir ) self . processor . _connect_pipeline ( pipeline , workflow , ** kwargs ) else : workflow = pipeline . _workflow fname = op . expanduser ( fname ) if not fname . endswith ( '.png' ) : fname += '.png' dotfilename = fname [ : - 4 ] + '.dot' workflow . write_graph ( graph2use = style , dotfilename = dotfilename )
Saves a graph of the workflow to generate the requested spec_name
51,738
def spec ( self , name ) : if isinstance ( name , ( BaseData , Parameter ) ) : name = name . name if name in self . _param_specs : return self . _param_specs [ name ] else : return self . bound_spec ( name )
Returns either the input corresponding to a fileset or field field spec or a spec or parameter that has either been passed to the study as an input or can be derived .
51,739
def bound_spec ( self , name ) : if isinstance ( name , BaseData ) : name = name . name spec = self . data_spec ( name ) try : bound = self . _inputs [ name ] except KeyError : if not spec . derived and spec . default is None : raise ArcanaMissingDataException ( "Acquired (i.e. non-generated) fileset '{}' " "was not supplied when the study '{}' was " "initiated" . format ( name , self . name ) ) else : try : bound = self . _bound_specs [ name ] except KeyError : bound = self . _bound_specs [ name ] = spec . bind ( self ) return bound
Returns an input selector or derived spec bound to the study i . e . where the repository tree is checked for existing outputs
51,740
def data_spec ( cls , name ) : if isinstance ( name , BaseData ) : name = name . name try : return cls . _data_specs [ name ] except KeyError : raise ArcanaNameError ( name , "No fileset spec named '{}' in {}, available:\n{}" . format ( name , cls . __name__ , "\n" . join ( list ( cls . _data_specs . keys ( ) ) ) ) )
Return the fileset_spec i . e . the template of the fileset expected to be supplied or generated corresponding to the fileset_spec name .
51,741
def cache_inputs ( self ) : workflow = pe . Workflow ( name = 'cache_download' , base_dir = self . processor . work_dir ) subjects = pe . Node ( IdentityInterface ( [ 'subject_id' ] ) , name = 'subjects' , environment = self . environment ) sessions = pe . Node ( IdentityInterface ( [ 'subject_id' , 'visit_id' ] ) , name = 'sessions' , environment = self . environment ) subjects . iterables = ( 'subject_id' , tuple ( self . subject_ids ) ) sessions . iterables = ( 'visit_id' , tuple ( self . visit_ids ) ) source = pe . Node ( RepositorySource ( self . bound_spec ( i ) . collection for i in self . inputs ) , name = 'source' ) workflow . connect ( subjects , 'subject_id' , sessions , 'subject_id' ) workflow . connect ( sessions , 'subject_id' , source , 'subject_id' ) workflow . connect ( sessions , 'visit_id' , source , 'visit_id' ) workflow . run ( )
Runs the Study s repository source node for each of the inputs of the study thereby caching any data required from remote repositorys . Useful when launching many parallel jobs that will all try to concurrently access the remote repository and probably lead to timeout errors .
51,742
def provided ( self , spec_name , default_okay = True ) : try : spec = self . bound_spec ( spec_name ) except ArcanaMissingDataException : return False if isinstance ( spec , BaseInputSpec ) : return spec . default is not None and default_okay else : return True
Checks to see whether the corresponding data spec was provided an explicit input as opposed to derivatives or missing optional inputs
51,743
def freq_from_iterators ( cls , iterators ) : return { set ( it ) : f for f , it in cls . FREQUENCIES . items ( ) } [ set ( iterators ) ]
Returns the frequency corresponding to the given iterators
51,744
def prov ( self ) : input_repos = list ( set ( ( i . repository for i in self . inputs ) ) ) inputs = { } for input in self . inputs : inputs [ input . name ] = { 'repository_index' : input_repos . index ( input . repository ) } if input . frequency == 'per_study' : inputs [ input . name ] [ 'names' ] = next ( input . collection ) . name elif input . frequency == 'per_subject' : inputs [ input . name ] [ 'names' ] = { i . subject_id : i . name for i in input . collection } elif input . frequency == 'per_visit' : inputs [ input . name ] [ 'names' ] = { i . visit_id : i . name for i in input . collection } elif input . frequency == 'per_session' : names = defaultdict ( dict ) for item in input . collection : names [ item . subject_id ] [ item . visit_id ] = item . name inputs [ input . name ] [ 'names' ] = dict ( names . items ( ) ) return { 'name' : self . name , 'type' : get_class_info ( type ( self ) ) , 'parameters' : { p . name : p . value for p in self . parameters } , 'inputs' : inputs , 'environment' : self . environment . prov , 'repositories' : [ r . prov for r in input_repos ] , 'processor' : self . processor . prov , 'subject_ids' : self . subject_ids , 'visit_ids' : self . visit_ids }
Extracts provenance information from the study for storage alongside generated derivatives . Typically for reference purposes only as only the pipeline workflow inputs and outputs are checked by default when determining which sessions require reprocessing .
51,745
def display_message ( self , clear , beep , timeout , line1 , line2 ) : self . _elk . send ( dm_encode ( self . _index , clear , beep , timeout , line1 , line2 ) )
Display a message on all of the keypads in this area .
51,746
def sync ( self ) : for i in range ( 4 ) : self . elk . send ( ps_encode ( i ) ) self . get_descriptions ( TextDescriptions . LIGHT . value )
Retrieve lights from ElkM1
51,747
def split_extension ( path ) : for double_ext in double_exts : if path . endswith ( double_ext ) : return path [ : - len ( double_ext ) ] , double_ext dirname = os . path . dirname ( path ) filename = os . path . basename ( path ) parts = filename . split ( '.' ) if len ( parts ) == 1 : base = filename ext = None else : ext = '.' + parts [ - 1 ] base = '.' . join ( parts [ : - 1 ] ) return os . path . join ( dirname , base ) , ext
A extension splitter that checks for compound extensions such as file . nii . gz
51,748
def parse_single_value ( value ) : if isinstance ( value , basestring ) : try : if value . startswith ( '"' ) and value . endswith ( '"' ) : value = str ( value [ 1 : - 1 ] ) elif '.' in value : value = float ( value ) else : value = int ( value ) except ValueError : value = str ( value ) elif not isinstance ( value , ( int , float ) ) : raise ArcanaUsageError ( "Unrecognised type for single value {}" . format ( value ) ) return value
Tries to convert to int float and then gives up and assumes the value is of type string . Useful when excepting values that may be string representations of numerical values
51,749
def serialize_m2m_on_save ( sender , action , instance , using , ** kwargs ) : if action == "post_add" : try : wrapped_instance = site_offline_models . get_wrapped_instance ( instance ) except ModelNotRegistered : pass else : wrapped_instance . to_outgoing_transaction ( using , created = True )
Part of the serialize transaction process that ensures m2m are serialized correctly .
51,750
def serialize_on_save ( sender , instance , raw , created , using , ** kwargs ) : if not raw : if "historical" not in instance . _meta . label_lower : try : wrapped_instance = site_offline_models . get_wrapped_instance ( instance ) except ModelNotRegistered : pass else : wrapped_instance . to_outgoing_transaction ( using , created = created )
Serialize the model instance as an OutgoingTransaction .
51,751
def serialize_history_on_post_create ( history_instance , using , ** kwargs ) : try : wrapped_instance = site_offline_models . get_wrapped_instance ( history_instance ) except ModelNotRegistered : pass else : wrapped_instance . to_outgoing_transaction ( using , created = True )
Serialize the history instance as an OutgoingTransaction .
51,752
def serialize_on_post_delete ( sender , instance , using , ** kwargs ) : try : wrapped_instance = site_offline_models . get_wrapped_instance ( instance ) except ModelNotRegistered : pass else : wrapped_instance . to_outgoing_transaction ( using , created = False , deleted = True )
Creates a serialized OutgoingTransaction when a model instance is deleted .
51,753
def sync ( self ) : self . elk . send ( cp_encode ( ) ) self . get_descriptions ( TextDescriptions . SETTING . value )
Retrieve custom values from ElkM1
51,754
def paths ( self ) : if self . format is None : raise ArcanaFileFormatError ( "Cannot get paths of fileset ({}) that hasn't had its format " "set" . format ( self ) ) if self . format . directory : return chain ( * ( ( op . join ( root , f ) for f in files ) for root , _ , files in os . walk ( self . path ) ) ) else : return chain ( [ self . path ] , self . aux_files . values ( ) )
Iterates through all files in the set
51,755
def contents_equal ( self , other , ** kwargs ) : if hasattr ( self . format , 'contents_equal' ) : equal = self . format . contents_equal ( self , other , ** kwargs ) else : equal = ( self . checksums == other . checksums ) return equal
Test the equality of the fileset contents with another fileset . If the fileset s format implements a contents_equal method than that is used to determine the equality otherwise a straight comparison of the checksums is used .
51,756
def bind ( self , study , ** kwargs ) : if self . default is None : raise ArcanaError ( "Attempted to bind '{}' to {} but only acquired specs with " "a default value should be bound to studies{})" . format ( self . name , study ) ) if self . _study is not None : bound = self else : bound = copy ( self ) bound . _study = study bound . _default = bound . default . bind ( study ) return bound
Returns a copy of the AcquiredSpec bound to the given study
51,757
def bind ( self , study , ** kwargs ) : if self . _study is not None : bound = self else : bound = copy ( self ) bound . _study = study if not hasattr ( study , self . pipeline_getter ) : raise ArcanaError ( "{} does not have a method named '{}' required to " "derive {}" . format ( study , self . pipeline_getter , self ) ) bound . _bind_tree ( study . tree ) return bound
Returns a copy of the Spec bound to the given study
51,758
def nodes ( self , tree ) : if self . frequency == 'per_session' : nodes = [ ] for subject in tree . subjects : for sess in subject . sessions : nodes . append ( sess ) elif self . frequency == 'per_subject' : nodes = tree . subjects elif self . frequency == 'per_visit' : nodes = tree . visits elif self . frequency == 'per_study' : nodes = [ tree ] else : assert False , "Unrecognised frequency '{}'" . format ( self . frequency ) return nodes
Returns the relevant nodes for the spec s frequency
51,759
def reconnect_all ( self ) : for role in self . Instances . keys ( ) : for connection in self . Instances [ role ] : connection . reconnect ( )
Re - establish connection to all instances
51,760
def add_instance ( self , role , instance , username = 'root' , key_filename = None , output_shell = False ) : if not role in self . Instances . keys ( ) : self . Instances [ role ] = [ ] self . logger . debug ( 'Adding ' + role + ' with private_hostname ' + instance [ 'private_hostname' ] + ', public_hostname ' + instance [ 'public_hostname' ] ) self . Instances [ role ] . append ( Connection ( instance , username , key_filename , output_shell = output_shell ) )
Add instance to the setup
51,761
def setup_from_yamlfile ( self , yamlfile , output_shell = False ) : self . logger . debug ( 'Loading config from ' + yamlfile ) with open ( yamlfile , 'r' ) as yamlfd : yamlconfig = yaml . load ( yamlfd ) for instance in yamlconfig [ 'Instances' ] : self . add_instance ( instance [ 'role' ] . upper ( ) , instance , output_shell = output_shell ) if 'Config' in yamlconfig . keys ( ) : self . logger . debug ( 'Config found: ' + str ( yamlconfig [ 'Config' ] ) ) self . config = yamlconfig [ 'Config' ] . copy ( )
Setup from yaml config
51,762
def process_iter ( proc , cmd = "" ) : try : for l in proc . stdout : yield l finally : if proc . poll ( ) is None : return else : proc . wait ( ) if proc . returncode not in ( 0 , None , signal . SIGPIPE , signal . SIGPIPE + 128 ) : sys . stderr . write ( "cmd was:%s\n" % cmd ) sys . stderr . write ( "return code was:%s\n" % proc . returncode ) raise ProcessException ( cmd )
helper function to iterate over a process stdout and report error messages when done
51,763
def header ( fname , sep = "\t" ) : fh = iter ( nopen ( fname ) ) h = tokens ( next ( fh ) , sep ) h [ 0 ] = h [ 0 ] . lstrip ( "#" ) return h
just grab the header from a given file
51,764
def is_newer_b ( a , bfiles ) : if isinstance ( bfiles , basestring ) : bfiles = [ bfiles ] if not op . exists ( a ) : return False if not all ( op . exists ( b ) for b in bfiles ) : return False atime = os . stat ( a ) . st_mtime for b in bfiles : if atime > os . stat ( b ) . st_mtime : return False return True
check that all b files have been modified more recently than a
51,765
def expect_list ( connection , regexp_list , timeout = 10 ) : result = "" count = 0 while count < timeout : try : recv_part = connection . channel . recv ( 32768 ) . decode ( ) logging . getLogger ( 'stitches.expect' ) . debug ( "RCV: " + recv_part ) if connection . output_shell : sys . stdout . write ( recv_part ) result += recv_part except socket . timeout : pass for ( regexp , retvalue ) in regexp_list : if re . match ( regexp , result ) : return retvalue time . sleep ( 1 ) count += 1 raise ExpectFailed ( result )
Expect a list of expressions
51,766
def expect ( connection , strexp , timeout = 10 ) : return Expect . expect_list ( connection , [ ( re . compile ( ".*" + strexp + ".*" , re . DOTALL ) , True ) ] , timeout )
Expect one expression
51,767
def match ( connection , regexp , grouplist = [ 1 ] , timeout = 10 ) : logging . getLogger ( 'stitches.expect' ) . debug ( "MATCHING: " + regexp . pattern ) result = "" count = 0 while count < timeout : try : recv_part = connection . channel . recv ( 32768 ) . decode ( ) logging . getLogger ( 'stitches.expect' ) . debug ( "RCV: " + recv_part ) if connection . output_shell : sys . stdout . write ( recv_part ) result += recv_part except socket . timeout : pass match = regexp . match ( result ) if match : ret_list = [ ] for group in grouplist : logging . getLogger ( 'stitches.expect' ) . debug ( "matched: " + match . group ( group ) ) ret_list . append ( match . group ( group ) ) return ret_list time . sleep ( 1 ) count += 1 raise ExpectFailed ( result )
Match against an expression
51,768
def expect_retval ( connection , command , expected_status = 0 , timeout = 10 ) : retval = connection . recv_exit_status ( command , timeout ) if retval is None : raise ExpectFailed ( "Got timeout (%i seconds) while executing '%s'" % ( timeout , command ) ) elif retval != expected_status : raise ExpectFailed ( "Got %s exit status (%s expected)\ncmd: %s\nstdout: %s\nstderr: %s" % ( retval , expected_status , connection . last_command , connection . last_stdout , connection . last_stderr ) ) if connection . output_shell : sys . stdout . write ( "Run '%s', got %i return value\n" % ( command , retval ) ) return retval
Run command and expect specified return valud
51,769
def find ( self , soup ) : for tag in soup . recursiveChildGenerator ( ) : if self . match_criterion ( tag ) : yield tag
Yield tags matching the tag criterion from a soup .
51,770
def get_name ( self , tag ) : name = tag . findChild ( 'name' ) . contents [ 0 ] if self . include_parent_scopes : parent_tag = tag . findParent ( ) if parent_tag . get ( 'kind' ) in [ 'class' , 'struct' , 'namespace' ] : name = parent_tag . findChild ( 'name' ) . contents [ 0 ] + '::' + name return name
Extract and return a representative name from a tag .
51,771
def get_filename ( self , tag ) : if tag . find ( 'filename' , recursive = False ) is not None : return tag . filename . contents [ 0 ] elif tag . find ( 'anchorfile' , recursive = False ) is not None : return tag . anchorfile . contents [ 0 ] + '#' + tag . anchor . contents [ 0 ]
Extract and return a documentation filename from a tag .
51,772
def match_criterion ( self , tag ) : return tag . name == self . reference_tag_name and tag . attrs . get ( 'kind' , '' ) == self . reference_tag_kind
Override . Determine if a tag has the desired name and kind attribute value .
51,773
def get_name ( self , tag ) : name = super ( functionTagProcessor , self ) . get_name ( tag ) if self . include_function_signatures : func_args = tag . findChild ( 'arglist' ) if func_args and len ( func_args . contents ) : name += func_args . contents [ 0 ] ret_type = tag . findChild ( 'type' ) if ret_type and len ( ret_type . contents ) : name += ' -> ' + ret_type . contents [ 0 ] return name
Override . Extract a representative name from a function tag .
51,774
def pool ( n = None , dummy = False ) : if dummy : from multiprocessing . dummy import Pool else : from multiprocessing import Pool if n is None : import multiprocessing n = multiprocessing . cpu_count ( ) - 1 return Pool ( n )
create a multiprocessing pool that responds to interrupts .
51,775
def _call_callbacks ( self ) : for callback in self . _callbacks : callback ( self , self . _changeset ) self . _changeset = { }
Callbacks when attribute of element changes
51,776
def setattr ( self , attr , new_value , close_the_changeset = True ) : existing_value = getattr ( self , attr , None ) if existing_value != new_value : setattr ( self , attr , new_value ) self . _changeset [ attr ] = new_value if close_the_changeset and self . _changeset : self . _call_callbacks ( )
If attribute value has changed then set it and call the callbacks
51,777
def default_name ( self , separator = '-' ) : return self . __class__ . __name__ + '{}{:03d}' . format ( separator , self . _index + 1 )
Return a default name for based on class and index of element
51,778
def as_dict ( self ) : attrs = vars ( self ) return { key : attrs [ key ] for key in attrs if not key . startswith ( '_' ) }
Package up the public attributes as a dict .
51,779
def get_descriptions ( self , description_type ) : ( desc_type , max_units ) = description_type results = [ None ] * max_units self . elk . _descriptions_in_progress [ desc_type ] = ( max_units , results , self . _got_desc ) self . elk . send ( sd_encode ( desc_type = desc_type , unit = 0 ) )
Gets the descriptions for specified type . When complete the callback is called with a list of descriptions
51,780
def normalize_locale_code ( locale : Union [ Locale , str ] ) -> str : if not isinstance ( locale , Locale ) : locale = Locale . parse ( locale . replace ( '-' , '_' ) ) return str ( locale )
Determine the normalized locale code string .
51,781
def set ( self , key : CacheKey , value : Optional [ CacheValue ] ) -> None : r raise NotImplementedError ( 'Concreate subclasses of {0.__module__}.{0.__qualname__} have to ' 'override .set() method' . format ( CachePolicy ) )
r Create or update a cache .
51,782
def getlist ( self , key : 'Entity' ) -> Sequence [ object ] : r if not ( isinstance ( key , type ( self ) ) and key . type is EntityType . property ) : return [ ] claims_map = self . attributes . get ( 'claims' ) or { } assert isinstance ( claims_map , collections . abc . Mapping ) claims = claims_map . get ( key . id , [ ] ) claims . sort ( key = lambda claim : claim [ 'rank' ] , reverse = True ) logger = logging . getLogger ( __name__ + '.Entity.getitem' ) if logger . isEnabledFor ( logging . DEBUG ) : logger . debug ( 'claim data: %s' , __import__ ( 'pprint' ) . pformat ( claims ) ) decode = self . client . decode_datavalue return [ decode ( snak [ 'datatype' ] , snak [ 'datavalue' ] ) for snak in ( claim [ 'mainsnak' ] for claim in claims ) ]
r Return all values associated to the given key property in sequence .
51,783
def make_user_role_table ( table_name = 'user' , id_column_name = 'id' ) : return db . Table ( 'fp_user_role' , db . Column ( 'user_id' , db . Integer , db . ForeignKey ( '{}.{}' . format ( table_name , id_column_name ) ) ) , db . Column ( 'role_id' , db . Integer , db . ForeignKey ( 'fp_role.id' ) ) , extend_existing = True )
Create the user - role association table so that it correctly references your own UserMixin subclass .
51,784
def combo_serve ( request , path , client ) : joinfile = path sourcefiles = msettings [ 'JOINED' ] [ path ] combo_data , dirname = combine_files ( joinfile , sourcefiles , client ) if path . endswith ( '.css' ) : mime_type = 'text/css' elif joinfile . endswith ( '.js' ) : mime_type = 'application/javascript' return HttpResponse ( combo_data , mimetype = mime_type )
Handles generating a combo file for the given path . This is similar to what happens when we upload to S3 . Processors are applied and we get the value that we would if we were serving from S3 . This is a good way to make sure combo files work as intended before rolling out to production .
51,785
def static_serve ( request , path , client ) : if msettings [ 'SERVE_REMOTE' ] : url = client . remote_media_url ( ) . strip ( '/' ) + '/%(path)s' return redirect ( url , permanent = True ) if not msettings [ 'SERVE_REMOTE' ] and msettings [ 'EMULATE_COMBO' ] : combo_match = _find_combo_match ( path ) if combo_match : return combo_serve ( request , combo_match , client ) resp = serve ( request , path , document_root = client . media_root , show_indexes = True ) try : resp . content = client . process ( resp . content , resp [ 'Content-Type' ] , path ) except KeyError : pass return resp
Given a request for a media asset this view does the necessary wrangling to get the correct thing delivered to the user . This can also emulate the combo behavior seen when SERVE_REMOTE == False and EMULATE_COMBO == True .
51,786
def start_server ( self , port_number , number_of_procs = 0 ) : signal . signal ( signal . SIGTERM , self . _on_signal ) signal . signal ( signal . SIGINT , self . _on_signal ) xheaders = self . application . settings . get ( 'xheaders' , False ) max_body_size = self . application . settings . get ( 'max_body_size' , None ) max_buffer_size = self . application . settings . get ( 'max_buffer_size' , None ) self . server = httpserver . HTTPServer ( self . application . tornado_application , xheaders = xheaders , max_body_size = max_body_size , max_buffer_size = max_buffer_size ) if self . application . settings . get ( 'debug' , False ) : self . logger . info ( 'starting 1 process on port %d' , port_number ) self . server . listen ( port_number ) else : self . logger . info ( 'starting processes on port %d' , port_number ) self . server . bind ( port_number , reuse_port = True ) self . server . start ( number_of_procs )
Create a HTTP server and start it .
51,787
def run ( self , port_number , number_of_procs = 0 ) : self . start_server ( port_number , number_of_procs ) iol = ioloop . IOLoop . instance ( ) try : self . application . start ( iol ) except Exception : self . logger . exception ( 'application terminated during start, ' 'exiting' ) sys . exit ( 70 ) iol . start ( )
Create the server and run the IOLoop .
51,788
def add_module ( self , module ) : for key , value in module . __dict__ . iteritems ( ) : if key [ 0 : 2 ] != '__' : self . __setattr__ ( attr = key , value = value )
Adds configuration parameters from a Python module .
51,789
def _check_permission ( self , name , obj = None ) : def redirect_or_exception ( ex ) : if not self . request . user or not self . request . user . is_authenticated : if self . auto_login_redirect : redirect_to_login ( self . request . get_full_path ( ) ) else : raise HTTPUnauthorizedResponseException else : raise ex try : if not self . _has_permission ( name , obj ) : redirect_or_exception ( HTTPForbiddenResponseException ) except Http404 as ex : redirect_or_exception ( ex )
If customer is not authorized he should not get information that object is exists . Therefore 403 is returned if object was not found or is redirected to the login page . If custmer is authorized and object was not found is returned 404 . If object was found and user is not authorized is returned 403 or redirect to login page . If object was found and user is authorized is returned 403 or 200 according of result of _has_permission method .
51,790
def plot_and_save ( self , ** kwargs ) : self . fig = pyplot . figure ( ) self . plot ( ) self . axes = pyplot . gca ( ) self . save_plot ( self . fig , self . axes , ** kwargs ) pyplot . close ( self . fig )
Used when the plot method defined does not create a figure nor calls save_plot Then the plot method has to use self . fig
51,791
def plot ( self , bins = 250 , ** kwargs ) : counts = [ sum ( map ( len , b . contigs ) ) for b in self . parent . bins ] if 'log' in kwargs . get ( 'x_scale' , '' ) : start , stop = numpy . log10 ( 1 ) , numpy . log10 ( max ( counts ) ) bins = list ( numpy . logspace ( start = start , stop = stop , num = bins ) ) bins . insert ( 0 , 0 ) fig = pyplot . figure ( ) pyplot . hist ( counts , bins = bins , color = 'gray' ) axes = pyplot . gca ( ) title = 'Distribution of the total nucleotide count in the bins' axes . set_title ( title ) axes . set_xlabel ( 'Number of nucleotides in a bin' ) axes . set_ylabel ( 'Number of bins with that many nucleotides in them' ) self . save_plot ( fig , axes , ** kwargs ) pyplot . close ( fig ) return self
An example plot function . You have to subclass this method .
51,792
def count_processors ( ) : if 'SLURM_NTASKS' in os . environ : return int ( os . environ [ 'SLURM_NTASKS' ] ) elif 'SLURM_JOB_CPUS_PER_NODE' in os . environ : text = os . environ [ 'SLURM_JOB_CPUS_PER_NODE' ] if is_integer ( text ) : return int ( text ) else : n , N = re . findall ( "([1-9]+)\(x([1-9]+)\)" , text ) [ 0 ] return int ( n ) * int ( N ) else : return multiprocessing . cpu_count ( )
How many cores does the current computer have ?
51,793
def guess_server_name ( ) : if os . environ . get ( 'CSCSERVICE' ) == 'sisu' : return "sisu" elif os . environ . get ( 'SLURM_JOB_PARTITION' ) == 'halvan' : return "halvan" elif os . environ . get ( 'SNIC_RESOURCE' ) == 'milou' : return "milou" elif os . environ . get ( 'LAPTOP' ) == 'macbook_air' : return "macbook_air" else : return "unknown"
We often use the same servers which one are we running on now ?
51,794
def get_instance ( cls , instance_or_pk ) : if isinstance ( instance_or_pk , cls ) : if instance_or_pk in cls . _flask_signalbus_sa . session : return instance_or_pk instance_or_pk = inspect ( cls ) . primary_key_from_instance ( instance_or_pk ) return cls . query . get ( instance_or_pk )
Return a model instance in db . session .
51,795
def lock_instance ( cls , instance_or_pk , read = False ) : mapper = inspect ( cls ) pk_attrs = [ mapper . get_property_by_column ( c ) . class_attribute for c in mapper . primary_key ] pk_values = cls . get_pk_values ( instance_or_pk ) clause = and_ ( * [ attr == value for attr , value in zip ( pk_attrs , pk_values ) ] ) return cls . query . filter ( clause ) . with_for_update ( read = read ) . one_or_none ( )
Return a locked model instance in db . session .
51,796
def get_pk_values ( cls , instance_or_pk ) : if isinstance ( instance_or_pk , cls ) : cls . _flask_signalbus_sa . session . flush ( ) instance_or_pk = inspect ( cls ) . primary_key_from_instance ( instance_or_pk ) return instance_or_pk if isinstance ( instance_or_pk , tuple ) else ( instance_or_pk , )
Return a primary key as a tuple .
51,797
def atomic ( self , func ) : @ wraps ( func ) def wrapper ( * args , ** kwargs ) : session = self . session session_info = session . info if session_info . get ( _ATOMIC_FLAG_SESSION_INFO_KEY ) : return func ( * args , ** kwargs ) f = retry_on_deadlock ( session ) ( func ) session_info [ _ATOMIC_FLAG_SESSION_INFO_KEY ] = True try : result = f ( * args , ** kwargs ) session . flush ( ) session . expunge_all ( ) session . commit ( ) return result except Exception : session . rollback ( ) raise finally : session_info [ _ATOMIC_FLAG_SESSION_INFO_KEY ] = False return wrapper
A decorator that wraps a function in an atomic block .
51,798
def _save_translations ( sender , instance , * args , ** kwargs ) : if site_is_monolingual ( ) : return False cls = sender if not hasattr ( cls . _meta , "translatable_fields" ) : return False for field in cls . _meta . translatable_fields : value = getattr ( instance , field ) if not value is None : md5_value = checksum ( value ) setattr ( instance , u"md5" + field , md5_value ) for lang in settings . LANGUAGES : lang = lang [ 0 ] if lang != settings . LANGUAGE_CODE : context = u"Updating from object" if hasattr ( instance , "trans_context" ) : context = getattr ( instance , "trans_context" ) trans = FieldTranslation . update ( instance , field , lang , context )
This signal saves model translations .
51,799
def _get_fieldtranslations ( instance , field = None , lang = None ) : _filter = { "module" : instance . __module__ , "model" : instance . __class__ . __name__ , "object_id" : instance . id } if lang : _filter [ "lang" ] = lang if field : _filter [ "field" ] = field try : return FieldTranslation . objects . get ( ** _filter ) except FieldTranslation . DoesNotExist : return False return FieldTranslation . objects . filter ( ** _filter )
Get all the translations for this object .