idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
6,700
def stored_version ( self , name ) : link_path = self . _link_path ( name ) if not _path_exists ( link_path ) : return None return _file_version ( link_path )
Returns the version of file name or None if it doesn t exist .
6,701
def save ( self , new_path = None ) : self . saved_in_temp = new_path is None if new_path is None : fd , new_path = tempfile . mkstemp ( ) os . close ( fd ) if self . current_path : shutil . move ( self . current_path , new_path ) else : with open ( new_path , 'wb' ) as dest : _copy_stream ( self . _data , dest , self . _size ) self . current_path = new_path
Moves or creates the file with stream contents to a new location .
6,702
def get_lps ( self ) : if self . header is not None : for linguisticProcessor in self . header : for lp in linguisticProcessor : yield lp
Iterator that returns all the lp objects from linguistic processors layers from the header
6,703
def get_trees_as_list ( self ) : mytrees = [ ] if self . constituency_layer is not None : for tree in self . constituency_layer . get_trees ( ) : mytrees . append ( tree ) return mytrees
Iterator that returns the constituency trees
6,704
def convert_factualitylayer_to_factualities ( self ) : if self . factuality_layer is not None : this_node = self . factuality_layer . get_node ( ) if this_node . tag == 'factualitylayer' : new_node = Cfactualities ( ) token2term = { } for t in self . get_terms ( ) : s = t . get_span ( ) for w in s . get_span_ids ( ) : token2term [ w ] = t . get_id ( ) fnr = 0 for fv in self . get_factvalues ( ) : fnr += 1 conf = fv . get_confidence ( ) wid = fv . get_id ( ) tid = token2term . get ( wid ) fnode = Cfactuality ( ) fspan = Cspan ( ) fspan . add_target_id ( tid ) fnode . set_span ( fspan ) fVal = Cfactval ( ) fVal . set_resource ( 'factbank' ) fVal . set_value ( fv . get_prediction ( ) ) if conf : fVal . set_confidence ( conf ) fnode . set_id ( 'f' + str ( fnr ) ) fnode . add_factval ( fVal ) new_node . add_factuality ( fnode ) self . root . remove ( this_node ) self . root . append ( new_node . get_node ( ) ) self . factuality_layer = new_node
Takes information from factuality layer in old representation Creates new factuality representation and removes the old layer
6,705
def get_constituency_extractor ( self ) : if self . constituency_layer is not None : if self . my_constituency_extractor is None : self . my_constituency_extractor = Cconstituency_extractor ( self ) return self . my_constituency_extractor else : return None
Returns a constituency extractor object
6,706
def get_dependency_extractor ( self ) : if self . dependency_layer is not None : if self . my_dependency_extractor is None : self . my_dependency_extractor = Cdependency_extractor ( self ) return self . my_dependency_extractor else : return None
Returns a dependency extractor object
6,707
def add_wf ( self , wf_obj ) : if self . text_layer is None : self . text_layer = Ctext ( type = self . type ) self . root . append ( self . text_layer . get_node ( ) ) self . text_layer . add_wf ( wf_obj )
Adds a token to the text layer
6,708
def add_term ( self , term_obj ) : if self . term_layer is None : self . term_layer = Cterms ( type = self . type ) self . root . append ( self . term_layer . get_node ( ) ) self . term_layer . add_term ( term_obj )
Adds a term to the term layer
6,709
def add_chunk ( self , chunk_obj ) : if self . chunk_layer is None : self . chunk_layer = Cchunks ( type = self . type ) self . root . append ( self . chunk_layer . get_node ( ) ) self . chunk_layer . add_chunk ( chunk_obj )
Adds a chunk to the chunk layer
6,710
def create_term ( self , lemma , pos , morphofeat , tokens , id = None ) : if id is None : n = 1 if self . term_layer is None else len ( self . term_layer . idx ) + 1 id = "t{n}" . format ( ** locals ( ) ) new_term = Cterm ( type = self . type ) new_term . set_id ( id ) new_term . set_lemma ( lemma ) new_term . set_pos ( pos ) new_term . set_morphofeat ( morphofeat ) new_span = Cspan ( ) for token in tokens : new_span . add_target_id ( token . get_id ( ) ) new_term . set_span ( new_span ) self . add_term ( new_term ) return new_term
Create a new term and add it to the term layer
6,711
def add_markable ( self , markable_obj ) : if self . markable_layer is None : self . markable_layer = Cmarkables ( type = self . type ) self . root . append ( self . markable_layer . get_node ( ) ) self . markable_layer . add_markable ( markable_obj )
Adds a markable to the markable layer
6,712
def add_opinion ( self , opinion_obj ) : if self . opinion_layer is None : self . opinion_layer = Copinions ( ) self . root . append ( self . opinion_layer . get_node ( ) ) self . opinion_layer . add_opinion ( opinion_obj )
Adds an opinion to the opinion layer
6,713
def add_statement ( self , statement_obj ) : if self . attribution_layer is None : self . attribution_layer = Cattribution ( ) self . root . append ( self . attribution_layer . get_node ( ) ) self . attribution_layer . add_statement ( statement_obj )
Adds a statement to the attribution layer
6,714
def add_predicate ( self , predicate_obj ) : if self . srl_layer is None : self . srl_layer = Csrl ( ) self . root . append ( self . srl_layer . get_node ( ) ) self . srl_layer . add_predicate ( predicate_obj )
Adds a predicate to the semantic layer
6,715
def add_timex ( self , time_obj ) : if self . timex_layer is None : self . timex_layer = CtimeExpressions ( ) self . root . append ( self . timex_layer . get_node ( ) ) self . timex_layer . add_timex ( time_obj )
Adds a timex entry to the time layer
6,716
def set_header ( self , header ) : self . header = header self . root . insert ( 0 , header . get_node ( ) )
Sets the header of the object
6,717
def add_linguistic_processor ( self , layer , my_lp ) : if self . header is None : self . header = CHeader ( type = self . type ) self . root . insert ( 0 , self . header . get_node ( ) ) self . header . add_linguistic_processor ( layer , my_lp )
Adds a linguistic processor to the header
6,718
def create_linguistic_processor ( self , layer , name , version , ** kwargs ) : lp = Clp ( name = name , version = version , ** kwargs ) self . add_linguistic_processor ( layer , lp ) return lp
Create a new linguistic processor element and add it to the header
6,719
def add_dependency ( self , my_dep ) : if self . dependency_layer is None : self . dependency_layer = Cdependencies ( ) self . root . append ( self . dependency_layer . get_node ( ) ) self . dependency_layer . add_dependency ( my_dep )
Adds a dependency to the dependency layer
6,720
def create_dependency ( self , _from , to , function , comment = None ) : new_dependency = Cdependency ( ) new_dependency . set_from ( _from ) new_dependency . set_to ( to ) new_dependency . set_function ( function ) if comment : new_dependency . set_comment ( comment ) self . add_dependency ( new_dependency ) return new_dependency
Create a new dependency object and add it to the dependency layer
6,721
def add_tlink ( self , my_tlink ) : if self . temporalRelations_layer is None : self . temporalRelations_layer = CtemporalRelations ( ) self . root . append ( self . temporalRelations_layer . get_node ( ) ) self . temporalRelations_layer . add_tlink ( my_tlink )
Adds a tlink to the temporalRelations layer
6,722
def add_predicateAnchor ( self , my_predAnch ) : if self . temporalRelations_layer is None : self . temporalRelations_layer = CtemporalRelations ( ) self . root . append ( self . temporalRelations_layer . get_node ( ) ) self . temporalRelations_layer . add_predicateAnchor ( my_predAnch )
Adds a predAnch to the temporalRelations layer
6,723
def add_clink ( self , my_clink ) : if self . causalRelations_layer is None : self . causalRelations_layer = CcausalRelations ( ) self . root . append ( self . causalRelations_layer . get_node ( ) ) self . causalRelations_layer . add_clink ( my_clink )
Adds a clink to the causalRelations layer
6,724
def add_factuality ( self , my_fact ) : if self . factuality_layer is None : self . factuality_layer = Cfactualities ( ) self . root . append ( self . factuality_layer . get_node ( ) ) self . factuality_layer . add_factuality ( my_fact )
Adds a factuality to the factuality layer
6,725
def add_entity ( self , entity ) : if self . entity_layer is None : self . entity_layer = Centities ( type = self . type ) self . root . append ( self . entity_layer . get_node ( ) ) self . entity_layer . add_entity ( entity )
Adds an entity to the entity layer
6,726
def add_coreference ( self , coreference ) : if self . coreference_layer is None : self . coreference_layer = Ccoreferences ( type = self . type ) self . root . append ( self . coreference_layer . get_node ( ) ) self . coreference_layer . add_coreference ( coreference )
Adds an coreference to the coreference layer
6,727
def create_coreference ( self , coref_type , term_ids , id = None ) : if id is None : if self . coreference_layer is None : i = 1 else : corefs = ( l for l in self . coreference_layer . get_corefs ( ) if l . get_type == coref_type ) i = len ( list ( corefs ) ) + 1 id = "co{coref_type}{i}" . format ( ** locals ( ) ) new_coref = Ccoreference ( type = self . type ) new_coref . set_id ( id ) new_coref . set_type ( coref_type ) new_coref . add_span ( term_ids ) self . add_coreference ( new_coref ) return new_coref
Create a new coreference object and add it to the coreferences layer
6,728
def add_constituency_tree ( self , my_tree ) : if self . constituency_layer is None : self . constituency_layer = Cconstituency ( ) self . root . append ( self . constituency_layer . get_node ( ) ) self . constituency_layer . add_tree ( my_tree )
Adds a constituency tree to the constituency layer
6,729
def add_property ( self , label , term_span , pid = None ) : if self . features_layer is None : self . features_layer = Cfeatures ( type = self . type ) self . root . append ( self . features_layer . get_node ( ) ) self . features_layer . add_property ( pid , label , term_span )
Adds a property to the property layer
6,730
def get_dict_tokens_for_termid ( self , term_id ) : if self . dict_tokens_for_tid is None : self . dict_tokens_for_tid = { } for term in self . get_terms ( ) : self . dict_tokens_for_tid [ term . get_id ( ) ] = term . get_span ( ) . get_span_ids ( ) return self . dict_tokens_for_tid . get ( term_id , [ ] )
Returns the tokens ids that are the span of the term specified
6,731
def map_tokens_to_terms ( self , list_tokens ) : if self . terms_for_token is None : self . terms_for_token = { } for term in self . get_terms ( ) : termid = term . get_id ( ) token_ids = term . get_span ( ) . get_span_ids ( ) for tokid in token_ids : if tokid not in self . terms_for_token : self . terms_for_token [ tokid ] = [ termid ] else : self . terms_for_token [ tokid ] . append ( termid ) ret = set ( ) for my_id in list_tokens : term_ids = self . terms_for_token . get ( my_id , [ ] ) ret |= set ( term_ids ) return sorted ( list ( ret ) )
Maps a list of token ids to the corresponding term ids
6,732
def add_external_reference_to_term ( self , term_id , external_ref ) : if self . term_layer is not None : self . term_layer . add_external_reference ( term_id , external_ref )
Adds an external reference to the given term identifier
6,733
def add_external_reference_to_role ( self , role_id , external_ref ) : if self . srl_layer is not None : self . srl_layer . add_external_reference_to_role ( role_id , external_ref )
Adds an external reference to the given role identifier in the SRL layer
6,734
def remove_external_references_from_srl_layer ( self ) : if self . srl_layer is not None : for pred in self . srl_layer . get_predicates ( ) : pred . remove_external_references ( ) pred . remove_external_references_from_roles ( )
Removes all external references present in the term layer
6,735
def add_external_reference_to_entity ( self , entity_id , external_ref ) : if self . entity_layer is not None : self . entity_layer . add_external_reference_to_entity ( entity_id , external_ref )
Adds an external reference to the given entity identifier in the entity layer
6,736
def read ( self , size = - 1 ) : chunksize = size < 0 and self . CHUNKSIZE or min ( self . CHUNKSIZE , size ) buf = self . _rbuf buf . seek ( 0 , os . SEEK_END ) collected = buf . tell ( ) while 1 : if size >= 0 and collected >= size : break output = self . _read_chunk ( chunksize ) if output is None : continue if not output : break collected += len ( output ) buf . write ( output ) rc = buf . getvalue ( ) buf . seek ( 0 ) buf . truncate ( ) if size >= 0 : buf . write ( rc [ size : ] ) return rc [ : size ] return rc
read a number of bytes from the file and return it as a string
6,737
def readline ( self , max_len = - 1 ) : buf = self . _rbuf newline , chunksize = self . NEWLINE , self . CHUNKSIZE buf . seek ( 0 ) text = buf . read ( ) if len ( text ) >= max_len >= 0 : buf . seek ( 0 ) buf . truncate ( ) buf . write ( text [ max_len : ] ) return text [ : max_len ] while text . find ( newline ) < 0 : text = self . _read_chunk ( chunksize ) if text is None : text = '' continue if buf . tell ( ) + len ( text ) >= max_len >= 0 : text = buf . getvalue ( ) + text buf . seek ( 0 ) buf . truncate ( ) buf . write ( text [ max_len : ] ) return text [ : max_len ] if not text : break buf . write ( text ) else : rc = buf . getvalue ( ) index = rc . find ( newline ) + len ( newline ) buf . seek ( 0 ) buf . truncate ( ) buf . write ( rc [ index : ] ) return rc [ : index ] rc = buf . getvalue ( ) buf . seek ( 0 ) buf . truncate ( ) return rc
read from the file until a newline is encountered
6,738
def write ( self , data ) : while data : went = self . _write_chunk ( data ) if went is None : continue data = data [ went : ]
write data to the file
6,739
def _wait_event ( self , reading ) : "wait on our events" with self . _registered ( reading , not reading ) : ( self . _readable if reading else self . _writable ) . wait ( ) if scheduler . state . interrupted : raise IOError ( errno . EINTR , "interrupted system call" )
wait on our events
6,740
def fromfd ( cls , fd , mode = 'rb' , bufsize = - 1 ) : fp = object . __new__ ( cls ) fp . _rbuf = StringIO ( ) fp . encoding = None fp . mode = mode fp . _fileno = fd fp . _closed = False cls . _add_flags ( fd , cls . _mode_to_flags ( mode ) ) fp . _set_up_waiting ( ) return fp
create a cooperating greenhouse file from an existing descriptor
6,741
def isatty ( self ) : "return whether the file is connected to a tty or not" try : return os . isatty ( self . _fileno ) except OSError , e : raise IOError ( * e . args )
return whether the file is connected to a tty or not
6,742
def seek ( self , position , modifier = 0 ) : os . lseek ( self . _fileno , position , modifier ) buf = self . _rbuf buf . seek ( 0 ) buf . truncate ( )
move the cursor on the file descriptor to a different location
6,743
def tell ( self ) : "get the file descriptor's position relative to the file's beginning" with _fdopen ( os . dup ( self . _fileno ) ) as fp : return fp . tell ( )
get the file descriptor s position relative to the file s beginning
6,744
def fit ( self , labels , samples , pstates ) : assert len ( labels ) == len ( samples ) == len ( pstates ) for label in set ( labels ) : label_samples = [ s for l , s in zip ( labels , samples ) if l == label ] label_pstates = [ p for l , p in zip ( labels , pstates ) if l == label ] pohmm = self . pohmm_factory ( ) pohmm . fit ( label_samples , label_pstates ) self . pohmms [ label ] = pohmm return self
Fit the classifier with labels y and observations X
6,745
def fit_df ( self , labels , dfs , pstate_col = PSTATE_COL ) : assert len ( labels ) == len ( dfs ) for label in set ( labels ) : label_dfs = [ s for l , s in zip ( labels , dfs ) if l == label ] pohmm = self . pohmm_factory ( ) pohmm . fit_df ( label_dfs , pstate_col = pstate_col ) self . pohmms [ label ] = pohmm return self
Fit the classifier with labels y and DataFrames dfs
6,746
def predict ( self , sample , pstates ) : scores = { } for label , pohmm in self . pohmms . items ( ) : scores [ label ] = pohmm . score ( sample , pstates ) max_score_label = max ( scores . items ( ) , key = itemgetter ( 1 ) ) [ 0 ] return max_score_label , scores
Predict the class label of X
6,747
def predict_df ( self , df , pstate_col = PSTATE_COL ) : scores = { } for label , pohmm in self . pohmms . items ( ) : scores [ label ] = pohmm . score_df ( df , pstate_col = pstate_col ) max_score_label = max ( scores . items ( ) , key = itemgetter ( 1 ) ) [ 0 ] return max_score_label , scores
Predict the class label of DataFrame df
6,748
def load_secrets ( self , secret_path ) : self . _config = p_config . render_secrets ( self . config_path , secret_path )
render secrets into config object
6,749
def logger ( self ) : if self . _logger : return self . _logger else : log_builder = p_logging . ProsperLogger ( self . PROGNAME , self . config . get_option ( 'LOGGING' , 'log_path' ) , config_obj = self . config ) if self . verbose : log_builder . configure_debug_logger ( ) else : id_string = '({platform}--{version})' . format ( platform = platform . node ( ) , version = self . VERSION ) if self . config . get_option ( 'LOGGING' , 'discord_webhook' ) : log_builder . configure_discord_logger ( custom_args = id_string ) if self . config . get_option ( 'LOGGING' , 'slack_webhook' ) : log_builder . configure_slack_logger ( custom_args = id_string ) if self . config . get_option ( 'LOGGING' , 'hipchat_webhook' ) : log_builder . configure_hipchat_logger ( custom_args = id_string ) self . _logger = log_builder . get_logger ( ) return self . _logger
uses global logger for logging
6,750
def config ( self ) : if self . _config : return self . _config else : self . _config = p_config . ProsperConfig ( self . config_path ) return self . _config
uses global config for cfg
6,751
def notify_launch ( self , log_level = 'ERROR' ) : if not self . debug : self . logger . log ( logging . getLevelName ( log_level ) , 'LAUNCHING %s -- %s' , self . PROGNAME , platform . node ( ) ) flask_options = { key : getattr ( self , key ) for key in OPTION_ARGS } flask_options [ 'host' ] = self . get_host ( ) self . logger . info ( 'OPTIONS: %s' , flask_options )
logs launcher message before startup
6,752
def landsat_c1_toa_cloud_mask ( input_img , snow_flag = False , cirrus_flag = False , cloud_confidence = 2 , shadow_confidence = 3 , snow_confidence = 3 , cirrus_confidence = 3 ) : qa_img = input_img . select ( [ 'BQA' ] ) cloud_mask = qa_img . rightShift ( 4 ) . bitwiseAnd ( 1 ) . neq ( 0 ) . And ( qa_img . rightShift ( 5 ) . bitwiseAnd ( 3 ) . gte ( cloud_confidence ) ) . Or ( qa_img . rightShift ( 7 ) . bitwiseAnd ( 3 ) . gte ( shadow_confidence ) ) if snow_flag : cloud_mask = cloud_mask . Or ( qa_img . rightShift ( 9 ) . bitwiseAnd ( 3 ) . gte ( snow_confidence ) ) if cirrus_flag : cloud_mask = cloud_mask . Or ( qa_img . rightShift ( 11 ) . bitwiseAnd ( 3 ) . gte ( cirrus_confidence ) ) return cloud_mask . Not ( )
Extract cloud mask from the Landsat Collection 1 TOA BQA band
6,753
def landsat_c1_sr_cloud_mask ( input_img , cloud_confidence = 3 , snow_flag = False ) : qa_img = input_img . select ( [ 'pixel_qa' ] ) cloud_mask = qa_img . rightShift ( 5 ) . bitwiseAnd ( 1 ) . neq ( 0 ) . And ( qa_img . rightShift ( 6 ) . bitwiseAnd ( 3 ) . gte ( cloud_confidence ) ) . Or ( qa_img . rightShift ( 3 ) . bitwiseAnd ( 1 ) . neq ( 0 ) ) if snow_flag : cloud_mask = cloud_mask . Or ( qa_img . rightShift ( 4 ) . bitwiseAnd ( 1 ) . neq ( 0 ) ) return cloud_mask . Not ( )
Extract cloud mask from the Landsat Collection 1 SR pixel_qa band
6,754
def sentinel2_toa_cloud_mask ( input_img ) : qa_img = input_img . select ( [ 'QA60' ] ) cloud_mask = qa_img . rightShift ( 10 ) . bitwiseAnd ( 1 ) . neq ( 0 ) . Or ( qa_img . rightShift ( 11 ) . bitwiseAnd ( 1 ) . neq ( 0 ) ) return cloud_mask . Not ( )
Extract cloud mask from the Sentinel 2 TOA QA60 band
6,755
def where ( cmd , path = None ) : raw_result = shutil . which ( cmd , os . X_OK , path ) if raw_result : return os . path . abspath ( raw_result ) else : raise ValueError ( "Could not find '{}' in the path" . format ( cmd ) )
A function to wrap shutil . which for universal usage
6,756
def search_file ( pattern , file_path ) : try : with open ( file_path ) as file : string = file . read ( ) except PermissionError : return [ ] matches = re . findall ( pattern , string ) return matches
Search a given file s contents for the regex pattern given as pattern
6,757
def call ( commands , * , print_result = False , raise_exception = False , print_commands = False ) : if isinstance ( commands , str ) : commands = commands . split ( ) if not ( isinstance ( commands , tuple ) or isinstance ( commands , list ) ) : raise ValueError ( "Function 'call' does not accept a 'commands'" "argument of type '{}'" . format ( type ( commands ) ) ) if raise_exception : print_result = False try : process = subprocess . Popen ( commands , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) if print_commands : _print_commands ( commands ) except : output = traceback . format_exc ( ) result = Result ( 1 , stderr = output ) if print_result and not raise_exception : print ( output , file = sys . stderr ) else : result = _extract_output ( process , print_result , raise_exception ) if raise_exception and ( result . returncode == 1 ) : message = "An error occurred in an external process:\n\n{}" raise Exception ( message . format ( result . getStderr ( ) ) ) return result
Will call a set of commands and wrangle the output how you choose
6,758
def multiCall ( * commands , dependent = True , bundle = False , print_result = False , print_commands = False ) : results = [ ] dependent_failed = False for command in commands : if not dependent_failed : response = call ( command , print_result = print_result , print_commands = print_commands ) if ( response . returncode == 1 ) and dependent : dependent_failed = True else : response = None results . append ( response ) if bundle : result = Result ( ) for response in results : if not response : continue elif response . returncode == 1 : result . returncode = 1 result . extendInformation ( response ) processed_response = result else : processed_response = results return processed_response
Calls the function call multiple times given sets of commands
6,759
def distribute ( function , iterable , * , workers = 4 ) : with multiprocessing . Pool ( workers ) as pool : processes = [ ] for item in iterable : pickled = dill . dumps ( ( function , item ) ) process = pool . apply_async ( _run_pickled , ( pickled , ) ) processes . append ( process ) results = [ process . get ( ) for process in processes ] return results
A version of multiprocessing . Pool . map that works using dill to pickle the function and iterable
6,760
def getOutput ( self ) : output = self . stdout if self . stdout : output += '\r\n' output += self . stderr return output
Returns the combined output of stdout and stderr
6,761
def extendInformation ( self , response ) : if response . stdout : self . stdout += '\r\n' + response . stdout if response . stderr : self . stderr += '\r\n' + response . stderr
This extends the objects stdout and stderr by response s stdout and stderr
6,762
def wait_socks ( sock_events , inmask = 1 , outmask = 2 , timeout = None ) : results = [ ] for sock , mask in sock_events : if isinstance ( sock , zmq . backend . Socket ) : mask = _check_events ( sock , mask , inmask , outmask ) if mask : results . append ( ( sock , mask ) ) if results : return results fd_map = { } fd_events = [ ] for sock , mask in sock_events : if isinstance ( sock , zmq . backend . Socket ) : fd = sock . getsockopt ( zmq . FD ) elif isinstance ( sock , int ) : fd = sock else : fd = sock . fileno ( ) fd_map [ fd ] = sock fd_events . append ( ( fd , mask ) ) while 1 : started = time . time ( ) active = descriptor . wait_fds ( fd_events , inmask , outmask , timeout ) if not active : return [ ] results = [ ] for fd , mask in active : sock = fd_map [ fd ] if isinstance ( sock , zmq . backend . Socket ) : mask = _check_events ( sock , mask , inmask , outmask ) if not mask : continue results . append ( ( sock , mask ) ) if results : return results timeout -= time . time ( ) - started
wait on a combination of zeromq sockets normal sockets and fds
6,763
def remove_this_factuality ( self , factuality_id ) : for fact in self . get_factualities ( ) : if fact . get_id ( ) == factuality_id : self . node . remove ( fact . get_node ( ) ) break
Removes the factuality for the given factuality identifier
6,764
def remove_factuality ( self , fid ) : for node_pre in self . node . findall ( 'factuality' ) : if node_pre . get ( 'id' ) == fid : self . node . remove ( node_pre )
Removes a factuality element with a specific id from the layer
6,765
def remove_this_factvalue ( self , factvalue_id ) : for fact in self . get_factvalues ( ) : if fact . get_id ( ) == factvalue_id : self . node . remove ( fact . get_node ( ) ) break
Removes the factvalue for the given factvalue identifier
6,766
def _add_version_to_request ( self , url , headers , version ) : if self . _has_capability ( SERVER_REQUIRES_VERSION_HEADER ) : new_headers = headers . copy ( ) new_headers [ 'Last-Modified' ] = email . utils . formatdate ( version ) return url , new_headers else : url_params = { 'last_modified' : email . utils . formatdate ( version ) } new_url = url + "?" + urlencode ( url_params ) return new_url , headers
Adds version to either url or headers depending on protocol .
6,767
def _protocol_version ( self ) : if hasattr ( self , '_protocol_ver' ) : return self . _protocol_ver response = requests . get ( self . base_url + '/version/' ) if response . status_code == 404 : server_versions = { 1 } elif response . status_code == 200 : server_versions = set ( response . json ( ) [ 'protocol_versions' ] ) if not server_versions : raise FiletrackerError ( 'Server hasn\'t reported any supported protocols' ) else : response . raise_for_status ( ) common_versions = _SUPPORTED_VERSIONS . intersection ( server_versions ) if not common_versions : raise FiletrackerError ( 'Couldn\'t agree on protocol version: client supports ' '{}, server supports {}.' . format ( _PROTOCOL_CAPABILITIES , server_versions ) ) self . _protocol_ver = max ( common_versions ) print ( 'Settled for protocol version {}' . format ( self . _protocol_ver ) ) return self . _protocol_ver
Returns the protocol version that should be used .
6,768
def run ( self ) : logger . info ( "Starting daemon." ) while True : try : self . _scan_disk ( ) do_cleaning , delete_from_index = self . _analyze_file_index ( ) if do_cleaning : self . _clean_cache ( delete_from_index ) except Exception : logger . exception ( "Following exception occurred:" ) sleeping_until_time = datetime . datetime . now ( ) + self . scan_interval logger . info ( "Sleeping until %s." , sleeping_until_time ) time . sleep ( self . scan_interval . total_seconds ( ) )
Starts cleaning cache in infinite loop .
6,769
def normalize ( path_name , override = None ) : identity = identify ( path_name , override = override ) new_path_name = os . path . normpath ( os . path . expanduser ( path_name ) ) return new_path_name , identity
Prepares a path name to be worked with . Path name must not be empty . This function will return the normpath ed path and the identity of the path . This function takes an optional overriding argument for the identity .
6,770
def join_ext ( name , extension ) : if extension [ 0 ] == EXT : ret = name + extension else : ret = name + EXT + extension return ret
Joins a given name with an extension . If the extension doesn t have a . it will add it for you
6,771
def has_ext ( path_name , * , multiple = None , if_all_ext = False ) : base = os . path . basename ( path_name ) count = base . count ( EXT ) if not if_all_ext and base [ 0 ] == EXT and count != 0 : count -= 1 if multiple is None : return count >= 1 elif multiple : return count > 1 else : return count == 1
Determine if the given path name has an extension
6,772
def get_ext ( path_name , * , if_all_ext = False ) : if has_ext ( path_name ) : return os . path . splitext ( path_name ) [ EXTENSION ] elif if_all_ext and has_ext ( path_name , if_all_ext = True ) : return os . path . splitext ( path_name ) [ NAME ] else : return ''
Get an extension from the given path name . If an extension cannot be found it will return an empty string
6,773
def get_dir ( path_name , * , greedy = False , override = None , identity = None ) : if identity is None : identity = identify ( path_name , override = override ) path_name = os . path . normpath ( path_name ) if greedy and identity == ISDIR : return path_name else : return os . path . dirname ( path_name )
Gets the directory path of the given path name . If the argument greedy is specified as True then if the path name represents a directory itself the function will return the whole path
6,774
def get_system_drives ( ) : drives = [ ] if os . name == 'nt' : import ctypes bitmask = ctypes . windll . kernel32 . GetLogicalDrives ( ) letter = ord ( 'A' ) while bitmask > 0 : if bitmask & 1 : name = chr ( letter ) + ':' + os . sep if os . path . isdir ( name ) : drives . append ( name ) bitmask >>= 1 letter += 1 else : current_drive = get_drive ( os . getcwd ( ) ) if current_drive : drive = current_drive else : drive = os . sep drives . append ( drive ) return drives
Get the available drive names on the system . Always returns a list .
6,775
def has_suffix ( path_name , suffix ) : if isinstance ( suffix , str ) : suffix = disintegrate ( suffix ) components = disintegrate ( path_name ) for i in range ( - 1 , - ( len ( suffix ) + 1 ) , - 1 ) : if components [ i ] != suffix [ i ] : break else : return True return False
Determines if path_name has a suffix of at least suffix
6,776
def path ( path_name = None , override = None , * , root = None , name = None , ext = None , inject = None , relpath = None , reduce = False ) : path_name , identity , root = _initialize ( path_name , override , root , inject ) new_name = _process_name ( path_name , identity , name , ext ) new_directory = _process_directory ( path_name , identity , root , inject ) full_path = os . path . normpath ( os . path . join ( new_directory , new_name ) ) if APPEND_SEP_TO_DIRS and not new_name and full_path [ - 1 ] != os . sep : full_path += os . sep final_path = _format_path ( full_path , root , relpath , reduce ) return final_path
Path manipulation black magic
6,777
def path ( self , ** kwargs ) : new_path = path ( self . getPath ( ) , ** kwargs ) return File ( new_path )
Returns a different object with the specified changes applied to it . This object is not changed in the process .
6,778
def isOutDated ( self , output_file ) : if output_file . exists ( ) : source_time = self . getmtime ( ) output_time = output_file . getmtime ( ) return source_time > output_time else : return True
Figures out if Cyther should compile the given FileInfo object by checking the both of the modified times
6,779
def isUpdated ( self ) : modified_time = self . getmtime ( ) valid = modified_time > self . __stamp return valid
Figures out if the file had previously errored and hasn t been fixed since given a numerical time
6,780
def get_translations ( self , status = None ) : if status is not None : result = self . api_call ( 'translation/?status=%s' % status ) else : result = self . api_call ( 'translation/' ) if result . status_code == 200 : translations_json = json . loads ( result . content ) [ "objects" ] translations = [ Translation ( ** tj ) for tj in translations_json ] else : log . critical ( 'Error status when fetching translation from server: {' '}!' . format ( result . status_code ) ) translations = [ ] return translations
Returns the translations requested by the user
6,781
def get_translation ( self , uid ) : result = self . api_call ( 'translation/{}/' . format ( uid ) ) if result . status_code == 200 : translation = Translation ( ** json . loads ( result . content ) ) else : log . critical ( 'Error status when fetching translation from server: {' '}!' . format ( result . status_code ) ) raise ValueError ( result . content ) return translation
Returns a translation with the given id
6,782
def get_language_pairs ( self , train_langs = None ) : if train_langs is None : result = self . api_call ( 'language_pair/' ) else : result = self . api_call ( 'language_pair/?train_langs={}' . format ( train_langs ) ) try : langs_json = json . loads ( result . content ) if 'error' in langs_json : return [ ] languages = [ LangPair ( Language ( shortname = lang_json [ "lang_pair" ] [ "source_language" ] [ "shortname" ] , name = lang_json [ "lang_pair" ] [ "source_language" ] [ "name" ] ) , Language ( shortname = lang_json [ "lang_pair" ] [ "target_language" ] [ "shortname" ] , name = lang_json [ "lang_pair" ] [ "target_language" ] [ "name" ] ) ) for lang_json in langs_json [ "objects" ] ] except Exception , e : log . exception ( "Error decoding get language pairs" ) raise e return languages
Returns the language pairs available on unbabel
6,783
def get_tones ( self ) : result = self . api_call ( 'tone/' ) tones_json = json . loads ( result . content ) tones = [ Tone ( name = tone_json [ "tone" ] [ "name" ] , description = tone_json [ "tone" ] [ "description" ] ) for tone_json in tones_json [ "objects" ] ] return tones
Returns the tones available on unbabel
6,784
def get_topics ( self ) : result = self . api_call ( 'topic/' ) topics_json = json . loads ( result . content ) topics = [ Topic ( name = topic_json [ "topic" ] [ "name" ] ) for topic_json in topics_json [ "objects" ] ] return topics
Returns the topics available on unbabel
6,785
def rand ( self , unique_pstates , random_state = None ) : self . _init_pstates ( unique_pstates ) self . _init_random ( random_state = random_state ) self . _compute_marginals ( ) return self
Randomize the POHMM parameters
6,786
def score_events ( self , obs , pstates ) : pstates_idx = np . array ( [ self . e [ p ] for p in pstates ] ) framelogprob = self . _compute_log_likelihood ( obs , pstates_idx ) _ , fwdlattice = self . _do_forward_pass ( framelogprob , pstates_idx ) L = logsumexp ( fwdlattice , axis = 1 ) return np . concatenate ( [ L [ [ 0 ] ] , np . diff ( L ) ] )
Compute the log probability of each event under the model .
6,787
def predict ( self , obs , pstates , next_pstate = None ) : assert len ( obs ) == len ( pstates ) pstates_idx = np . array ( [ self . e [ ei ] for ei in pstates ] ) next_pstate_idx = self . e [ next_pstate ] if len ( obs ) == 0 : next_hstate_prob = self . startprob [ next_pstate_idx ] else : framelogprob = self . _compute_log_likelihood ( obs , pstates_idx ) _ , fwdlattice = self . _do_forward_pass ( framelogprob , pstates_idx ) next_hstate_prob = np . zeros ( self . n_hidden_states ) alpha_n = fwdlattice [ - 1 ] vmax = alpha_n . max ( axis = 0 ) alpha_n = np . exp ( alpha_n - vmax ) alpha_n = alpha_n / alpha_n . sum ( ) trans = self . transmat [ pstates_idx [ - 1 ] , next_pstate_idx ] for i in range ( self . n_hidden_states ) : next_hstate_prob [ i ] = np . sum ( [ alpha_n [ j ] * trans [ j , i ] for j in range ( self . n_hidden_states ) ] ) assert next_hstate_prob . sum ( ) - 1 < TOLERANCE prediction = np . array ( [ self . expected_value ( feature , pstate = next_pstate , hstate_prob = next_hstate_prob ) for feature in self . emission_name ] ) return prediction
Predict the next observation
6,788
def fit_df ( self , dfs , pstate_col = PSTATE_COL ) : obs_cols = list ( self . emission_name ) obs = [ df [ df . columns . difference ( [ pstate_col ] ) ] [ obs_cols ] . values for df in dfs ] pstates = [ df [ pstate_col ] . values for df in dfs ] return self . fit ( obs , pstates )
Convenience function to fit a model from a list of dataframes
6,789
def sample_df ( self , pstates = None , n_obs = None , random_state = None , pstate_col = PSTATE_COL , hstate_col = HSTATE_COL ) : try : import pandas as pd except Exception as e : raise e obs , pstates , hstates = self . sample ( pstates , n_obs , random_state ) items = [ ] if pstate_col is not None : items . append ( ( pstate_col , pstates ) ) if hstate_col is not None : items . append ( ( hstate_col , hstates ) ) items = items + [ ( self . emission_name [ i ] , obs [ : , i ] ) for i in range ( self . n_features ) ] df = pd . DataFrame . from_items ( items ) return df
Convenience function to generate samples a model and create a dataframe
6,790
def main ( args = None ) : if args is None : args = sys . argv [ 1 : ] if not args : args = [ '-h' ] namespace = parser . parse_args ( args ) entry_function = namespace . func del namespace . func kwargs = namespace . __dict__ return entry_function ( ** kwargs )
Entry point for cyther - script generated by setup . py on installation
6,791
def get_ldap_users ( self ) : if ( not self . conf_LDAP_SYNC_USER ) : return ( None , None ) user_keys = set ( self . conf_LDAP_SYNC_USER_ATTRIBUTES . keys ( ) ) user_keys . update ( self . conf_LDAP_SYNC_USER_EXTRA_ATTRIBUTES ) uri_users_server , users = self . ldap_search ( self . conf_LDAP_SYNC_USER_FILTER , user_keys , self . conf_LDAP_SYNC_USER_INCREMENTAL , self . conf_LDAP_SYNC_USER_FILTER_INCREMENTAL ) logger . debug ( "Retrieved %d users from %s LDAP server" % ( len ( users ) , uri_users_server ) ) return ( uri_users_server , users )
Retrieve user data from LDAP server .
6,792
def get_ldap_groups ( self ) : if ( not self . conf_LDAP_SYNC_GROUP ) : return ( None , None ) uri_groups_server , groups = self . ldap_search ( self . conf_LDAP_SYNC_GROUP_FILTER , self . conf_LDAP_SYNC_GROUP_ATTRIBUTES . keys ( ) , self . conf_LDAP_SYNC_GROUP_INCREMENTAL , self . conf_LDAP_SYNC_GROUP_FILTER_INCREMENTAL ) logger . debug ( "Retrieved %d groups from %s LDAP server" % ( len ( groups ) , uri_groups_server ) ) return ( uri_groups_server , groups )
Retrieve groups from LDAP server .
6,793
def get_ldap_user_membership ( self , user_dn ) : membership_filter = self . conf_LDAP_SYNC_GROUP_MEMBERSHIP_FILTER . replace ( '{distinguishedName}' , user_dn . replace ( '(' , "\(" ) . replace ( ')' , "\)" ) ) try : uri , groups = self . ldap_search ( membership_filter , self . conf_LDAP_SYNC_GROUP_ATTRIBUTES . keys ( ) , False , membership_filter ) except Exception as e : logger . error ( "Error reading membership: Filter %s, Keys %s" % ( membership_filter , str ( self . conf_LDAP_SYNC_GROUP_ATTRIBUTES . keys ( ) ) ) ) return None return ( uri , groups )
Retrieve user membership from LDAP server .
6,794
def sync_ldap_user_membership ( self , user , ldap_groups ) : groupname_field = 'name' actualGroups = user . groups . values_list ( 'name' , flat = True ) user_Membership_total = len ( ldap_groups ) user_Membership_added = 0 user_Membership_deleted = 0 user_Membership_errors = 0 ldap_groups += self . conf_LDAP_SYNC_GROUP_MEMBERSHIP_ADD_DEFAULT ldap_name_groups = [ ] for cname , ldap_attributes in ldap_groups : defaults = { } try : for name , attribute in ldap_attributes . items ( ) : defaults [ self . conf_LDAP_SYNC_GROUP_ATTRIBUTES [ name ] ] = attribute [ 0 ] . decode ( 'utf-8' ) except AttributeError : continue try : groupname = defaults [ groupname_field ] ldap_name_groups . append ( groupname ) except KeyError : logger . warning ( "Group is missing a required attribute '%s'" % groupname_field ) user_Membership_errors += 1 continue if ( groupname not in actualGroups ) : kwargs = { groupname_field + '__iexact' : groupname , 'defaults' : defaults , } try : if ( self . conf_LDAP_SYNC_GROUP_MEMBERSHIP_CREATE_IF_NOT_EXISTS ) : group , created = Group . objects . get_or_create ( ** kwargs ) else : group = Group . objects . get ( name = groupname ) created = False except ( ObjectDoesNotExist ) : continue except ( IntegrityError , DataError ) as e : logger . error ( "Error creating group %s: %s" % ( groupname , e ) ) user_Membership_errors += 1 else : if created : logger . debug ( "Created group %s" % groupname ) group . user_set . add ( user ) user_Membership_added += 1 for check_group in actualGroups : if ( check_group not in ldap_name_groups ) : group = Group . objects . get ( name = check_group ) group . user_set . remove ( user ) user_Membership_deleted += 1 if ( ( user_Membership_deleted > 0 ) or ( user_Membership_added > 0 ) ) : group . save ( ) logger . info ( "Group membership for user %s synchronized: %d Added, %d Removed" % ( user . username , user_Membership_added , user_Membership_deleted ) ) self . stats_membership_total += user_Membership_total self . stats_membership_added += user_Membership_added self . stats_membership_deleted += user_Membership_deleted self . stats_membership_errors += user_Membership_errors
Synchronize LDAP membership to Django membership
6,795
def ldap_search ( self , filter , attributes , incremental , incremental_filter ) : for uri in self . conf_LDAP_SYNC_BIND_URI : if ( self . working_uri == uri ) : adldap_sync = self . working_adldap_sync created = False else : adldap_sync , created = ADldap_Sync . objects . get_or_create ( ldap_sync_uri = uri ) if ( ( adldap_sync . syncs_to_full > 0 ) and incremental ) : filter_to_use = incremental_filter . replace ( '?' , self . whenchanged . strftime ( self . conf_LDAP_SYNC_INCREMENTAL_TIMESTAMPFORMAT ) ) logger . debug ( "Using an incremental search. Filter is:'%s'" % filter_to_use ) else : filter_to_use = filter ldap . set_option ( ldap . OPT_REFERRALS , 0 ) l = PagedLDAPObject ( uri ) l . protocol_version = 3 if ( uri . startswith ( 'ldaps:' ) ) : l . set_option ( ldap . OPT_X_TLS , ldap . OPT_X_TLS_DEMAND ) l . set_option ( ldap . OPT_X_TLS_REQUIRE_CERT , ldap . OPT_X_TLS_DEMAND ) l . set_option ( ldap . OPT_X_TLS_DEMAND , True ) else : l . set_option ( ldap . OPT_X_TLS , ldap . OPT_X_TLS_NEVER ) l . set_option ( ldap . OPT_X_TLS_REQUIRE_CERT , ldap . OPT_X_TLS_NEVER ) l . set_option ( ldap . OPT_X_TLS_DEMAND , False ) try : l . simple_bind_s ( self . conf_LDAP_SYNC_BIND_DN , self . conf_LDAP_SYNC_BIND_PASS ) except ldap . LDAPError as e : logger . error ( "Error connecting to LDAP server %s : %s" % ( uri , e ) ) continue results = l . paged_search_ext_s ( self . conf_LDAP_SYNC_BIND_SEARCH , ldap . SCOPE_SUBTREE , filter_to_use , attrlist = attributes , serverctrls = None ) l . unbind_s ( ) if ( self . working_uri is None ) : self . working_uri = uri self . conf_LDAP_SYNC_BIND_URI . insert ( 0 , uri ) self . working_adldap_sync = adldap_sync return ( uri , results ) raise
Query the configured LDAP server with the provided search filter and attribute list .
6,796
def sliver_reader ( filename_end_mask = "*[0-9].mhd" , sliver_reference_dir = "~/data/medical/orig/sliver07/training/" , read_orig = True , read_seg = False ) : sliver_reference_dir = op . expanduser ( sliver_reference_dir ) orig_fnames = glob . glob ( sliver_reference_dir + "*orig" + filename_end_mask ) ref_fnames = glob . glob ( sliver_reference_dir + "*seg" + filename_end_mask ) orig_fnames . sort ( ) ref_fnames . sort ( ) output = [ ] for i in range ( 0 , len ( orig_fnames ) ) : oname = orig_fnames [ i ] rname = ref_fnames [ i ] vs_mm = None ref_data = None orig_data = None if read_orig : orig_data , metadata = io3d . datareader . read ( oname ) vs_mm = metadata [ 'voxelsize_mm' ] if read_seg : ref_data , metadata = io3d . datareader . read ( rname ) vs_mm = metadata [ 'voxelsize_mm' ] import re numeric_label = re . search ( ".*g(\d+)" , oname ) . group ( 1 ) out = ( numeric_label , vs_mm , oname , orig_data , rname , ref_data ) yield out
Generator for reading sliver data from directory structure .
6,797
def make_gunicorn_config ( _gunicorn_config_path = '' , ) : gunicorn_py = gunicorn_file = 'gunicorn.conf' if _gunicorn_config_path : gunicorn_file = _gunicorn_config_path with open ( gunicorn_file , 'w' ) as gunicorn_cfg : gunicorn_cfg . write ( gunicorn_py )
makes gunicorn . conf file for launching in docker
6,798
def _print_details ( extra = None ) : def print_node_handler ( name , node , depth ) : line = "{0}{1} {2} ({3}:{4})" . format ( depth , ( " " * depth ) , name , node . line , node . col ) if extra is not None : line += " [{0}]" . format ( extra ( node ) ) sys . stdout . write ( line + "\n" ) return print_node_handler
Return a function that prints node details .
6,799
def do_print ( filename ) : with open ( filename ) as cmake_file : body = ast . parse ( cmake_file . read ( ) ) word_print = _print_details ( lambda n : "{0} {1}" . format ( n . type , n . contents ) ) ast_visitor . recurse ( body , while_stmnt = _print_details ( ) , foreach = _print_details ( ) , function_def = _print_details ( ) , macro_def = _print_details ( ) , if_block = _print_details ( ) , if_stmnt = _print_details ( ) , elseif_stmnt = _print_details ( ) , else_stmnt = _print_details ( ) , function_call = _print_details ( lambda n : n . name ) , word = word_print )
Print the AST of filename .