idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
14,300 | def get_t ( self ) : if isinstance ( self . __t , int ) is False : raise TypeError ( "The type of __t must be int." ) return self . __t | getter Time . |
14,301 | def set_t ( self , value ) : if isinstance ( value , int ) is False : raise TypeError ( "The type of __t must be int." ) self . __t = value | setter Time . |
14,302 | def update_q ( self , state_key , action_key , reward_value , next_max_q ) : q = self . extract_q_df ( state_key , action_key ) new_q = q + self . alpha_value * ( reward_value + ( self . gamma_value * next_max_q ) - q ) self . save_q_df ( state_key , action_key , new_q ) | Update Q - Value . |
14,303 | def predict_next_action ( self , state_key , next_action_list ) : if self . q_df is not None : next_action_q_df = self . q_df [ self . q_df . state_key == state_key ] next_action_q_df = next_action_q_df [ next_action_q_df . action_key . isin ( next_action_list ) ] if next_action_q_df . shape [ 0 ] == 0 : return random ... | Predict next action by Q - Learning . |
14,304 | def pull ( self , arm_id , success , failure ) : self . __beta_dist_dict [ arm_id ] . observe ( success , failure ) | Pull arms . |
14,305 | def recommend ( self , limit = 10 ) : expected_list = [ ( arm_id , beta_dist . expected_value ( ) ) for arm_id , beta_dist in self . __beta_dist_dict . items ( ) ] expected_list = sorted ( expected_list , key = lambda x : x [ 1 ] , reverse = True ) return expected_list [ : limit ] | Listup arms and expected value . |
14,306 | def get_time_rate ( self ) : if isinstance ( self . __time_rate , float ) is False : raise TypeError ( "The type of __time_rate must be float." ) if self . __time_rate <= 0.0 : raise ValueError ( "The value of __time_rate must be greater than 0.0" ) return self . __time_rate | getter Time rate . |
14,307 | def set_time_rate ( self , value ) : if isinstance ( value , float ) is False : raise TypeError ( "The type of __time_rate must be float." ) if value <= 0.0 : raise ValueError ( "The value of __time_rate must be greater than 0.0" ) self . __time_rate = value | setter Time rate . |
14,308 | def __calculate_sigmoid ( self ) : sigmoid = 1 / np . log ( self . t * self . time_rate + 1.1 ) return sigmoid | Function of temperature . |
14,309 | def __calculate_boltzmann_factor ( self , state_key , next_action_list ) : sigmoid = self . __calculate_sigmoid ( ) q_df = self . q_df [ self . q_df . state_key == state_key ] q_df = q_df [ q_df . isin ( next_action_list ) ] q_df [ "boltzmann_factor" ] = q_df [ "q_value" ] / sigmoid q_df [ "boltzmann_factor" ] = q_df [... | Calculate boltzmann factor . |
14,310 | def get_model ( self ) : class Model ( object ) : def __init__ ( self , lstm_model ) : self . lstm_model = lstm_model return Model ( self . __lstm_model ) | object of model as a function approximator which has lstm_model whose type is pydbm . rnn . lstm_model . LSTMModel . |
14,311 | def filter ( self , scored_list ) : if len ( scored_list ) > 0 : avg = np . mean ( [ s [ 1 ] for s in scored_list ] ) std = np . std ( [ s [ 1 ] for s in scored_list ] ) else : avg = 0 std = 0 limiter = avg + 0.5 * std mean_scored = [ ( sent_idx , score ) for ( sent_idx , score ) in scored_list if score > limiter ] ret... | Filtering with std . |
14,312 | def search ( self , query , fields = None , page = 1 , max_records = None , flatten = True ) : if fields is None : fields = [ ] page = int ( page ) pages = float ( 'inf' ) data = { "query" : query , "page" : page , "fields" : fields , "flatten" : flatten } count = 0 while page <= pages : payload = self . _post ( self .... | returns iterator over all records that match the given query |
14,313 | def adjustColors ( self , mode = 'dark' ) : rp = Game . __color_modes . get ( mode , { } ) for k , color in self . __colors . items ( ) : self . __colors [ k ] = rp . get ( color , color ) | Change a few colors depending on the mode to use . The default mode doesn t assume anything and avoid using white & black colors . The dark mode use white and avoid dark blue while the light mode use black and avoid yellow to give a few examples . |
14,314 | def loadBestScore ( self ) : try : with open ( self . scores_file , 'r' ) as f : self . best_score = int ( f . readline ( ) , 10 ) except : return False return True | load local best score from the default file |
14,315 | def saveBestScore ( self ) : if self . score > self . best_score : self . best_score = self . score try : with open ( self . scores_file , 'w' ) as f : f . write ( str ( self . best_score ) ) except : return False return True | save current best score in the default file |
14,316 | def incScore ( self , pts ) : self . score += pts if self . score > self . best_score : self . best_score = self . score | update the current score by adding it the specified number of points |
14,317 | def store ( self ) : size = self . board . SIZE cells = [ ] for i in range ( size ) : for j in range ( size ) : cells . append ( str ( self . board . getCell ( j , i ) ) ) score_str = "%s\n%d" % ( ' ' . join ( cells ) , self . score ) try : with open ( self . store_file , 'w' ) as f : f . write ( score_str ) except : r... | save the current game session s score and data for further use |
14,318 | def restore ( self ) : size = self . board . SIZE try : with open ( self . store_file , 'r' ) as f : lines = f . readlines ( ) score_str = lines [ 0 ] self . score = int ( lines [ 1 ] ) except : return False score_str_list = score_str . split ( ' ' ) count = 0 for i in range ( size ) : for j in range ( size ) : value =... | restore the saved game score and data |
14,319 | def loop ( self ) : pause_key = self . board . PAUSE margins = { 'left' : 4 , 'top' : 4 , 'bottom' : 4 } atexit . register ( self . showCursor ) try : self . hideCursor ( ) while True : self . clearScreen ( ) print ( self . __str__ ( margins = margins ) ) if self . board . won ( ) or not self . board . canMove ( ) : br... | main game loop . returns the final score . |
14,320 | def getCellStr ( self , x , y ) : c = self . board . getCell ( x , y ) if c == 0 : return '.' if self . __azmode else ' .' elif self . __azmode : az = { } for i in range ( 1 , int ( math . log ( self . board . goal ( ) , 2 ) ) ) : az [ 2 ** i ] = chr ( i + 96 ) if c not in az : return '?' s = az [ c ] elif c == 1024 :... | return a string representation of the cell located at x y . |
14,321 | def boardToString ( self , margins = None ) : if margins is None : margins = { } b = self . board rg = range ( b . size ( ) ) left = ' ' * margins . get ( 'left' , 0 ) s = '\n' . join ( [ left + ' ' . join ( [ self . getCellStr ( x , y ) for x in rg ] ) for y in rg ] ) return s | return a string representation of the current board . |
14,322 | def canMove ( self ) : if not self . filled ( ) : return True for y in self . __size_range : for x in self . __size_range : c = self . getCell ( x , y ) if ( x < self . __size - 1 and c == self . getCell ( x + 1 , y ) ) or ( y < self . __size - 1 and c == self . getCell ( x , y + 1 ) ) : return True return False | test if a move is possible |
14,323 | def setCell ( self , x , y , v ) : self . cells [ y ] [ x ] = v | set the cell value at x y |
14,324 | def getCol ( self , x ) : return [ self . getCell ( x , i ) for i in self . __size_range ] | return the x - th column starting at 0 |
14,325 | def setCol ( self , x , l ) : for i in xrange ( 0 , self . __size ) : self . setCell ( x , i , l [ i ] ) | set the x - th column starting at 0 |
14,326 | def __collapseLineOrCol ( self , line , d ) : if ( d == Board . LEFT or d == Board . UP ) : inc = 1 rg = xrange ( 0 , self . __size - 1 , inc ) else : inc = - 1 rg = xrange ( self . __size - 1 , 0 , inc ) pts = 0 for i in rg : if line [ i ] == 0 : continue if line [ i ] == line [ i + inc ] : v = line [ i ] * 2 if v == ... | Merge tiles in a line or column according to a direction and return a tuple with the new line and the score for the move on this line |
14,327 | def move ( self , d , add_tile = True ) : if d == Board . LEFT or d == Board . RIGHT : chg , get = self . setLine , self . getLine elif d == Board . UP or d == Board . DOWN : chg , get = self . setCol , self . getCol else : return 0 moved = False score = 0 for i in self . __size_range : origin = get ( i ) line = self .... | move and return the move score |
14,328 | def parse_cli_args ( ) : parser = argparse . ArgumentParser ( description = '2048 in your terminal' ) parser . add_argument ( '--mode' , dest = 'mode' , type = str , default = None , help = 'colors mode (dark or light)' ) parser . add_argument ( '--az' , dest = 'azmode' , action = 'store_true' , help = 'Use the letters... | parse args from the CLI and return a dict |
14,329 | def start_game ( debug = False ) : args = parse_cli_args ( ) if args [ 'version' ] : print_version_and_exit ( ) if args [ 'rules' ] : print_rules_and_exit ( ) game = Game ( ** args ) if args [ 'resume' ] : game . restore ( ) if debug : return game return game . loop ( ) | Start a new game . If debug is set to True the game object is returned and the game loop isn t fired . |
14,330 | def on_message ( self , message ) : message = ObjectDict ( escape . json_decode ( message ) ) if message . command == 'hello' : handshake = { 'command' : 'hello' , 'protocols' : [ 'http://livereload.com/protocols/official-7' , ] , 'serverName' : 'livereload-tornado' , } self . send_message ( handshake ) if message . co... | Handshake with livereload . js |
14,331 | def get_content_modified_time ( cls , abspath ) : stat_result = os . stat ( abspath ) modified = datetime . datetime . utcfromtimestamp ( stat_result [ stat . ST_MTIME ] ) return modified | Returns the time that abspath was last modified . |
14,332 | def ignore ( self , filename ) : _ , ext = os . path . splitext ( filename ) return ext in [ '.pyc' , '.pyo' , '.o' , '.swp' ] | Ignore a given filename or not . |
14,333 | def watch ( self , path , func = None , delay = 0 , ignore = None ) : self . _tasks [ path ] = { 'func' : func , 'delay' : delay , 'ignore' : ignore , } | Add a task to watcher . |
14,334 | def already_coords ( self , address ) : m = re . search ( self . COORD_MATCH , address ) return ( m != None ) | test used to see if we have coordinates or address |
14,335 | def coords_string_parser ( self , coords ) : lat , lon = coords . split ( ',' ) return { "lat" : lat . strip ( ) , "lon" : lon . strip ( ) , "bounds" : { } } | Pareses the address string into coordinates to match address_to_coords return object |
14,336 | def address_to_coords ( self , address ) : base_coords = self . BASE_COORDS [ self . region ] get_cord = self . COORD_SERVERS [ self . region ] url_options = { "q" : address , "lang" : "eng" , "origin" : "livemap" , "lat" : base_coords [ "lat" ] , "lon" : base_coords [ "lon" ] } response = requests . get ( self . WAZE_... | Convert address to coordinates |
14,337 | def get_route ( self , npaths = 1 , time_delta = 0 ) : routing_server = self . ROUTING_SERVERS [ self . region ] url_options = { "from" : "x:%s y:%s" % ( self . start_coords [ "lon" ] , self . start_coords [ "lat" ] ) , "to" : "x:%s y:%s" % ( self . end_coords [ "lon" ] , self . end_coords [ "lat" ] ) , "at" : time_del... | Get route data from waze |
14,338 | def _add_up_route ( self , results , real_time = True , stop_at_bounds = False ) : start_bounds = self . start_coords [ 'bounds' ] end_bounds = self . end_coords [ 'bounds' ] def between ( target , min , max ) : return target > min and target < max time = 0 distance = 0 for segment in results : if stop_at_bounds and se... | Calculate route time and distance . |
14,339 | def calc_route_info ( self , real_time = True , stop_at_bounds = False , time_delta = 0 ) : route = self . get_route ( 1 , time_delta ) results = route [ 'results' ] route_time , route_distance = self . _add_up_route ( results , real_time = real_time , stop_at_bounds = stop_at_bounds ) self . log . info ( 'Time %.2f mi... | Calculate best route info . |
14,340 | def calc_all_routes_info ( self , npaths = 3 , real_time = True , stop_at_bounds = False , time_delta = 0 ) : routes = self . get_route ( npaths , time_delta ) results = { route [ 'routeName' ] : self . _add_up_route ( route [ 'results' ] , real_time = real_time , stop_at_bounds = stop_at_bounds ) for route in routes }... | Calculate all route infos . |
14,341 | def _initialize ( self ) : self . _key_prefix = self . _config . get ( 'redis' , 'key_prefix' ) self . _job_expire_interval = int ( self . _config . get ( 'sharq' , 'job_expire_interval' ) ) self . _default_job_requeue_limit = int ( self . _config . get ( 'sharq' , 'default_job_requeue_limit' ) ) redis_connection_type ... | Read the SharQ configuration and set appropriate variables . Open a redis connection pool and load all the Lua scripts . |
14,342 | def _load_config ( self ) : self . _config = ConfigParser . SafeConfigParser ( ) self . _config . read ( self . config_path ) | Read the configuration file and load it into memory . |
14,343 | def _load_lua_scripts ( self ) : lua_script_path = os . path . join ( os . path . dirname ( os . path . abspath ( __file__ ) ) , 'scripts/lua' ) with open ( os . path . join ( lua_script_path , 'enqueue.lua' ) , 'r' ) as enqueue_file : self . _lua_enqueue_script = enqueue_file . read ( ) self . _lua_enqueue = self . _r... | Loads all lua scripts required by SharQ . |
14,344 | def enqueue ( self , payload , interval , job_id , queue_id , queue_type = 'default' , requeue_limit = None ) : if not is_valid_interval ( interval ) : raise BadArgumentException ( '`interval` has an invalid value.' ) if not is_valid_identifier ( job_id ) : raise BadArgumentException ( '`job_id` has an invalid value.' ... | Enqueues the job into the specified queue_id of a particular queue_type |
14,345 | def dequeue ( self , queue_type = 'default' ) : if not is_valid_identifier ( queue_type ) : raise BadArgumentException ( '`queue_type` has an invalid value.' ) timestamp = str ( generate_epoch ( ) ) keys = [ self . _key_prefix , queue_type ] args = [ timestamp , self . _job_expire_interval ] dequeue_response = self . _... | Dequeues a job from any of the ready queues based on the queue_type . If no job is ready returns a failure status . |
14,346 | def interval ( self , interval , queue_id , queue_type = 'default' ) : if not is_valid_interval ( interval ) : raise BadArgumentException ( '`interval` has an invalid value.' ) if not is_valid_identifier ( queue_id ) : raise BadArgumentException ( '`queue_id` has an invalid value.' ) if not is_valid_identifier ( queue_... | Updates the interval for a specific queue_id of a particular queue type . |
14,347 | def is_valid_identifier ( identifier ) : if not isinstance ( identifier , basestring ) : return False if len ( identifier ) > 100 or len ( identifier ) < 1 : return False condensed_form = set ( list ( identifier . lower ( ) ) ) return condensed_form . issubset ( VALID_IDENTIFIER_SET ) | Checks if the given identifier is valid or not . A valid identifier may consists of the following characters with a maximum length of 100 characters minimum of 1 character . |
14,348 | def is_valid_interval ( interval ) : if not isinstance ( interval , ( int , long ) ) : return False if interval <= 0 : return False return True | Checks if the given interval is valid . A valid interval is always a positive non - zero integer value . |
14,349 | def is_valid_requeue_limit ( requeue_limit ) : if not isinstance ( requeue_limit , ( int , long ) ) : return False if requeue_limit <= - 2 : return False return True | Checks if the given requeue limit is valid . A valid requeue limit is always greater than or equal to - 1 . |
14,350 | def get_search_names ( name ) : parts = re . split ( '[-_.]' , name ) if len ( parts ) == 1 : return parts result = set ( ) for i in range ( len ( parts ) - 1 , 0 , - 1 ) : for s1 in '-_.' : prefix = s1 . join ( parts [ : i ] ) for s2 in '-_.' : suffix = s2 . join ( parts [ i : ] ) for s3 in '-_.' : result . add ( s3 .... | Return a list of values to search on when we are looking for a package with the given name . |
14,351 | def alter_old_distutils_request ( request : WSGIRequest ) : body = request . body if request . POST or request . FILES : return new_body = BytesIO ( ) content_type , opts = parse_header ( request . META [ 'CONTENT_TYPE' ] . encode ( 'ascii' ) ) parts = body . split ( b'\n--' + opts [ 'boundary' ] + b'\n' ) for part in ... | Alter the request body for compatibility with older distutils clients |
14,352 | def delete_files ( sender , ** kwargs ) : instance = kwargs [ 'instance' ] if not hasattr ( instance . distribution , 'path' ) : return if not os . path . exists ( instance . distribution . path ) : return is_referenced = ( instance . __class__ . objects . filter ( distribution = instance . distribution ) . exclude ( p... | Signal callback for deleting old files when database item is deleted |
14,353 | def md5_hash_file ( fh ) : md5 = hashlib . md5 ( ) while True : data = fh . read ( 8192 ) if not data : break md5 . update ( data ) return md5 . hexdigest ( ) | Return the md5 hash of the given file - object |
14,354 | def get_versio_versioning_scheme ( full_class_path ) : module_path = '.' . join ( full_class_path . split ( '.' ) [ 0 : - 1 ] ) class_name = full_class_path . split ( '.' ) [ - 1 ] try : module = importlib . import_module ( module_path ) except ImportError : raise RuntimeError ( 'Invalid specified Versio schema {}' . f... | Return a class based on it s full path |
14,355 | def search ( spec , operator = 'and' ) : field_map = { 'name' : 'name__icontains' , 'summary' : 'releases__summary__icontains' , } query_filter = None for field , values in spec . items ( ) : for value in values : if field not in field_map : continue field_filter = Q ( ** { field_map [ field ] : value } ) if not query_... | Implement xmlrpc search command . |
14,356 | def credentials_required ( view_func ) : @ wraps ( view_func , assigned = available_attrs ( view_func ) ) def decorator ( request , * args , ** kwargs ) : if settings . LOCALSHOP_USE_PROXIED_IP : try : ip_addr = request . META [ 'HTTP_X_FORWARDED_FOR' ] except KeyError : return HttpResponseForbidden ( 'No permission' )... | This decorator should be used with views that need simple authentication against Django s authentication framework . |
14,357 | def no_duplicates ( function , * args , ** kwargs ) : @ wraps ( function ) def wrapper ( self , * args , ** kwargs ) : key = generate_key ( function , * args , ** kwargs ) try : function ( self , * args , ** kwargs ) finally : logging . info ( 'Removing key %s' , key ) cache . delete ( key ) return wrapper | Makes sure that no duplicated tasks are enqueued . |
14,358 | def download_file ( pk ) : release_file = models . ReleaseFile . objects . get ( pk = pk ) logging . info ( "Downloading %s" , release_file . url ) proxies = None if settings . LOCALSHOP_HTTP_PROXY : proxies = settings . LOCALSHOP_HTTP_PROXY response = requests . get ( release_file . url , stream = True , proxies = pro... | Download the file reference in models . ReleaseFile with the given pk . |
14,359 | def handle_register_or_upload ( post_data , files , user , repository ) : name = post_data . get ( 'name' ) version = post_data . get ( 'version' ) if settings . LOCALSHOP_VERSIONING_TYPE : scheme = get_versio_versioning_scheme ( settings . LOCALSHOP_VERSIONING_TYPE ) try : Version ( version , scheme = scheme ) except ... | Process a register or upload comment issued via distutils . |
14,360 | def download ( self ) : from . tasks import download_file if not settings . LOCALSHOP_ISOLATED : download_file . delay ( pk = self . pk ) else : download_file ( pk = self . pk ) | Start a celery task to download the release file from pypi . |
14,361 | def dispatch_queue ( loader ) : queue = loader . _queue loader . _queue = [ ] max_batch_size = loader . max_batch_size if max_batch_size and max_batch_size < len ( queue ) : chunks = get_chunks ( queue , max_batch_size ) for chunk in chunks : dispatch_queue_batch ( loader , chunk ) else : dispatch_queue_batch ( loader ... | Given the current state of a Loader instance perform a batch load from its current queue . |
14,362 | def failed_dispatch ( loader , queue , error ) : for l in queue : loader . clear ( l . key ) l . reject ( error ) | Do not cache individual loads if the entire batch dispatch fails but still reject each request so they do not hang . |
14,363 | def load ( self , key = None ) : if key is None : raise TypeError ( ( "The loader.load() function must be called with a value," + "but got: {}." ) . format ( key ) ) cache_key = self . get_cache_key ( key ) if self . cache : cached_promise = self . _promise_cache . get ( cache_key ) if cached_promise : return cached_pr... | Loads a key returning a Promise for the value represented by that key . |
14,364 | def load_many ( self , keys ) : if not isinstance ( keys , Iterable ) : raise TypeError ( ( "The loader.loadMany() function must be called with Array<key> " + "but got: {}." ) . format ( keys ) ) return Promise . all ( [ self . load ( key ) for key in keys ] ) | Loads multiple keys promising an array of values |
14,365 | def clear ( self , key ) : cache_key = self . get_cache_key ( key ) self . _promise_cache . pop ( cache_key , None ) return self | Clears the value at key from the cache if it exists . Returns itself for method chaining . |
14,366 | def prime ( self , key , value ) : cache_key = self . get_cache_key ( key ) if cache_key not in self . _promise_cache : if isinstance ( value , Exception ) : promise = Promise . reject ( value ) else : promise = Promise . resolve ( value ) self . _promise_cache [ cache_key ] = promise return self | Adds the provied key and value to the cache . If the key already exists no change is made . Returns itself for method chaining . |
14,367 | def get_complete_version ( version = None ) : if version is None : from promise import VERSION return VERSION else : assert len ( version ) == 5 assert version [ 3 ] in ( "alpha" , "beta" , "rc" , "final" ) return version | Returns a tuple of the promise version . If version argument is non - empty then checks for correctness of the tuple provided . |
14,368 | def _xcorr_interp ( ccc , dt ) : if ccc . shape [ 0 ] == 1 : cc = ccc [ 0 ] else : cc = ccc cc_curvature = np . concatenate ( ( np . zeros ( 1 ) , np . diff ( cc , 2 ) , np . zeros ( 1 ) ) ) cc_t = np . arange ( 0 , len ( cc ) * dt , dt ) peak_index = cc . argmax ( ) first_sample = peak_index while first_sample > 0 and... | Intrpolate around the maximum correlation value for sub - sample precision . |
14,369 | def _day_loop ( detection_streams , template , min_cc , detections , horizontal_chans , vertical_chans , interpolate , cores , parallel , debug = 0 ) : if len ( detection_streams ) == 0 : return Catalog ( ) if not cores : num_cores = cpu_count ( ) else : num_cores = cores if num_cores > len ( detection_streams ) : num_... | Function to loop through multiple detections for one template . |
14,370 | def read_trigger_parameters ( filename ) : parameters = [ ] f = open ( filename , 'r' ) print ( 'Reading parameters with the following header:' ) for line in f : if line [ 0 ] == '#' : print ( line . rstrip ( '\n' ) . lstrip ( '\n' ) ) else : parameter_dict = ast . literal_eval ( line ) trig_par = TriggerParameters ( p... | Read the trigger parameters into trigger_parameter classes . |
14,371 | def _channel_loop ( tr , parameters , max_trigger_length = 60 , despike = False , debug = 0 ) : for par in parameters : if par [ 'station' ] == tr . stats . station and par [ 'channel' ] == tr . stats . channel : parameter = par break else : msg = 'No parameters set for station ' + str ( tr . stats . station ) warnings... | Internal loop for parellel processing . |
14,372 | def write ( self , filename , append = True ) : header = ' ' . join ( [ '# User:' , getpass . getuser ( ) , '\n# Creation date:' , str ( UTCDateTime ( ) ) , '\n# EQcorrscan version:' , str ( eqcorrscan . __version__ ) , '\n\n\n' ] ) if append : f = open ( filename , 'a' ) else : f = open ( filename , 'w' ) f . write ( ... | Write the parameters to a file as a human - readable series of dicts . |
14,373 | def _get_lib_name ( lib ) : ext_suffix = sysconfig . get_config_var ( "EXT_SUFFIX" ) if not ext_suffix : try : ext_suffix = sysconfig . get_config_var ( "SO" ) except Exception as e : msg = ( "Empty 'EXT_SUFFIX' encountered while building CDLL " "filename and fallback to 'SO' variable failed " "(%s)." % str ( e ) ) war... | Helper function to get an architecture and Python version specific library filename . |
14,374 | def _load_cdll ( name ) : libname = _get_lib_name ( name ) libdir = os . path . join ( os . path . dirname ( __file__ ) , 'lib' ) libpath = os . path . join ( libdir , libname ) static_fftw = os . path . join ( libdir , 'libfftw3-3.dll' ) static_fftwf = os . path . join ( libdir , 'libfftw3f-3.dll' ) try : fftw_lib = c... | Helper function to load a shared library built during installation with ctypes . |
14,375 | def cross_net ( stream , env = False , debug = 0 , master = False ) : event = Event ( ) event . origins . append ( Origin ( ) ) event . creation_info = CreationInfo ( author = 'EQcorrscan' , creation_time = UTCDateTime ( ) ) event . comments . append ( Comment ( text = 'cross_net' ) ) samp_rate = stream [ 0 ] . stats .... | Generate picks using a simple envelope cross - correlation . |
14,376 | def cross_chan_coherence ( st1 , st2 , allow_shift = False , shift_len = 0.2 , i = 0 , xcorr_func = 'time_domain' ) : cccoh = 0.0 kchan = 0 array_xcorr = get_array_xcorr ( xcorr_func ) for tr in st1 : tr2 = st2 . select ( station = tr . stats . station , channel = tr . stats . channel ) if len ( tr2 ) > 0 and tr . stat... | Calculate cross - channel coherency . |
14,377 | def distance_matrix ( stream_list , allow_shift = False , shift_len = 0 , cores = 1 ) : dist_mat = np . array ( [ np . array ( [ 0.0 ] * len ( stream_list ) ) ] * len ( stream_list ) ) for i , master in enumerate ( stream_list ) : pool = Pool ( processes = cores ) results = [ pool . apply_async ( cross_chan_coherence ,... | Compute distance matrix for waveforms based on cross - correlations . |
14,378 | def cluster ( template_list , show = True , corr_thresh = 0.3 , allow_shift = False , shift_len = 0 , save_corrmat = False , cores = 'all' , debug = 1 ) : if cores == 'all' : num_cores = cpu_count ( ) else : num_cores = cores stream_list = [ x [ 0 ] for x in template_list ] if debug >= 1 : print ( 'Computing the distan... | Cluster template waveforms based on average correlations . |
14,379 | def SVD ( stream_list , full = False ) : warnings . warn ( 'Depreciated, use svd instead.' ) return svd ( stream_list = stream_list , full = full ) | Depreciated . Use svd . |
14,380 | def svd ( stream_list , full = False ) : stachans = list ( set ( [ ( tr . stats . station , tr . stats . channel ) for st in stream_list for tr in st ] ) ) stachans . sort ( ) svalues = [ ] svectors = [ ] uvectors = [ ] for stachan in stachans : lengths = [ ] for st in stream_list : tr = st . select ( station = stachan... | Compute the SVD of a number of templates . |
14,381 | def empirical_SVD ( stream_list , linear = True ) : warnings . warn ( 'Depreciated, use empirical_svd instead.' ) return empirical_svd ( stream_list = stream_list , linear = linear ) | Depreciated . Use empirical_svd . |
14,382 | def empirical_svd ( stream_list , linear = True ) : stachans = list ( set ( [ ( tr . stats . station , tr . stats . channel ) for st in stream_list for tr in st ] ) ) for stachan in stachans : lengths = [ ] for st in stream_list : lengths . append ( len ( st . select ( station = stachan [ 0 ] , channel = stachan [ 1 ] ... | Empirical subspace detector generation function . |
14,383 | def SVD_2_stream ( uvectors , stachans , k , sampling_rate ) : warnings . warn ( 'Depreciated, use svd_to_stream instead.' ) return svd_to_stream ( uvectors = uvectors , stachans = stachans , k = k , sampling_rate = sampling_rate ) | Depreciated . Use svd_to_stream |
14,384 | def svd_to_stream ( uvectors , stachans , k , sampling_rate ) : svstreams = [ ] for i in range ( k ) : svstream = [ ] for j , stachan in enumerate ( stachans ) : if len ( uvectors [ j ] ) <= k : warnings . warn ( 'Too few traces at %s for a %02d dimensional ' 'subspace. Detector streams will not include ' 'this channel... | Convert the singular vectors output by SVD to streams . |
14,385 | def corr_cluster ( trace_list , thresh = 0.9 ) : stack = stacking . linstack ( [ Stream ( tr ) for tr in trace_list ] ) [ 0 ] output = np . array ( [ False ] * len ( trace_list ) ) group1 = [ ] array_xcorr = get_array_xcorr ( ) for i , tr in enumerate ( trace_list ) : if array_xcorr ( np . array ( [ tr . data ] ) , sta... | Group traces based on correlations above threshold with the stack . |
14,386 | def dist_mat_km ( catalog ) : dist_mat = np . array ( [ np . array ( [ 0.0 ] * len ( catalog ) ) ] * len ( catalog ) ) for i , master in enumerate ( catalog ) : mast_list = [ ] if master . preferred_origin ( ) : master_ori = master . preferred_origin ( ) else : master_ori = master . origins [ - 1 ] master_tup = ( maste... | Compute the distance matrix for all a catalog using epicentral separation . |
14,387 | def space_cluster ( catalog , d_thresh , show = True ) : dist_mat = dist_mat_km ( catalog ) dist_vec = squareform ( dist_mat ) Z = linkage ( dist_vec , method = 'average' ) indices = fcluster ( Z , t = d_thresh , criterion = 'distance' ) group_ids = list ( set ( indices ) ) indices = [ ( indices [ i ] , i ) for i in ra... | Cluster a catalog by distance only . |
14,388 | def space_time_cluster ( catalog , t_thresh , d_thresh ) : initial_spatial_groups = space_cluster ( catalog = catalog , d_thresh = d_thresh , show = False ) initial_spatial_lists = [ ] for group in initial_spatial_groups : initial_spatial_lists . append ( list ( group ) ) groups = [ ] for group in initial_spatial_lists... | Cluster detections in space and time . |
14,389 | def re_thresh_csv ( path , old_thresh , new_thresh , chan_thresh ) : from eqcorrscan . core . match_filter import read_detections warnings . warn ( 'Legacy function, please use ' 'eqcorrscan.core.match_filter.Party.rethreshold.' ) old_detections = read_detections ( path ) old_thresh = float ( old_thresh ) new_thresh = ... | Remove detections by changing the threshold . |
14,390 | def pool_boy ( Pool , traces , ** kwargs ) : n_cores = kwargs . get ( 'cores' , cpu_count ( ) ) if n_cores is None : n_cores = cpu_count ( ) if n_cores > traces : n_cores = traces pool = Pool ( n_cores ) yield pool pool . close ( ) pool . join ( ) | A context manager for handling the setup and cleanup of a pool object . |
14,391 | def _general_multithread ( func ) : def multithread ( templates , stream , * args , ** kwargs ) : with pool_boy ( ThreadPool , len ( stream ) , ** kwargs ) as pool : return _pool_normxcorr ( templates , stream , pool = pool , func = func ) return multithread | return the general multithreading function using func |
14,392 | def register_array_xcorr ( name , func = None , is_default = False ) : valid_methods = set ( list ( XCOR_ARRAY_METHODS ) + list ( XCORR_STREAM_METHODS ) ) cache = { } def register ( register_str ) : if register_str not in valid_methods : msg = 'register_name must be in %s' % valid_methods raise ValueError ( msg ) def _... | Decorator for registering correlation functions . |
14,393 | def _get_registerd_func ( name_or_func ) : if callable ( name_or_func ) : func = register_array_xcorr ( name_or_func ) else : func = XCOR_FUNCS [ name_or_func or 'default' ] assert callable ( func ) , 'func is not callable' if not hasattr ( func , 'registered' ) : func = register_array_xcorr ( func ) return func | get a xcorr function from a str or callable . |
14,394 | def numpy_normxcorr ( templates , stream , pads , * args , ** kwargs ) : import bottleneck from scipy . signal . signaltools import _centered used_chans = ~ np . isnan ( templates ) . any ( axis = 1 ) stream = stream . astype ( np . float64 ) templates = templates . astype ( np . float64 ) template_length = templates .... | Compute the normalized cross - correlation using numpy and bottleneck . |
14,395 | def time_multi_normxcorr ( templates , stream , pads , threaded = False , * args , ** kwargs ) : used_chans = ~ np . isnan ( templates ) . any ( axis = 1 ) utilslib = _load_cdll ( 'libutils' ) argtypes = [ np . ctypeslib . ndpointer ( dtype = np . float32 , ndim = 1 , flags = native_str ( 'C_CONTIGUOUS' ) ) , ctypes . ... | Compute cross - correlations in the time - domain using C routine . |
14,396 | def _time_threaded_normxcorr ( templates , stream , * args , ** kwargs ) : no_chans = np . zeros ( len ( templates ) ) chans = [ [ ] for _ in range ( len ( templates ) ) ] array_dict_tuple = _get_array_dicts ( templates , stream ) stream_dict , template_dict , pad_dict , seed_ids = array_dict_tuple cccsums = np . zeros... | Use the threaded time - domain routine for concurrency |
14,397 | def _fftw_stream_xcorr ( templates , stream , * args , ** kwargs ) : num_cores_inner = kwargs . get ( 'cores' ) num_cores_outer = kwargs . get ( 'cores_outer' ) if num_cores_inner is None and num_cores_outer is None : num_cores_inner = int ( os . getenv ( "OMP_NUM_THREADS" , cpu_count ( ) ) ) num_cores_outer = 1 elif n... | Apply fftw normxcorr routine concurrently . |
14,398 | def get_stream_xcorr ( name_or_func = None , concurrency = None ) : func = _get_registerd_func ( name_or_func ) concur = concurrency or 'stream_xcorr' if not hasattr ( func , concur ) : msg = '%s does not support concurrency %s' % ( func . __name__ , concur ) raise ValueError ( msg ) return getattr ( func , concur ) | Return a function for performing normalized cross correlation on lists of streams . |
14,399 | def _get_array_dicts ( templates , stream , copy_streams = True ) : template_dict = { } stream_dict = { } pad_dict = { } t_starts = [ ] stream . sort ( [ 'network' , 'station' , 'location' , 'channel' ] ) for template in templates : template . sort ( [ 'network' , 'station' , 'location' , 'channel' ] ) t_starts . appen... | prepare templates and stream return dicts |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.