idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
14,100 | def create_subscription ( self , client_id , client_secret , callback_url , object_type = model . Subscription . OBJECT_TYPE_ACTIVITY , aspect_type = model . Subscription . ASPECT_TYPE_CREATE , verify_token = model . Subscription . VERIFY_TOKEN_DEFAULT ) : params = dict ( client_id = client_id , client_secret = client_secret , object_type = object_type , aspect_type = aspect_type , callback_url = callback_url , verify_token = verify_token ) raw = self . protocol . post ( '/push_subscriptions' , use_webhook_server = True , ** params ) return model . Subscription . deserialize ( raw , bind_client = self ) | Creates a webhook event subscription . |
14,101 | def handle_subscription_callback ( self , raw , verify_token = model . Subscription . VERIFY_TOKEN_DEFAULT ) : callback = model . SubscriptionCallback . deserialize ( raw ) callback . validate ( verify_token ) response_raw = { 'hub.challenge' : callback . hub_challenge } return response_raw | Validate callback request and return valid response with challenge . |
14,102 | def list_subscriptions ( self , client_id , client_secret ) : result_fetcher = functools . partial ( self . protocol . get , '/push_subscriptions' , client_id = client_id , client_secret = client_secret , use_webhook_server = True ) return BatchedResultsIterator ( entity = model . Subscription , bind_client = self , result_fetcher = result_fetcher ) | List current webhook event subscriptions in place for the current application . |
14,103 | def delete_subscription ( self , subscription_id , client_id , client_secret ) : self . protocol . delete ( '/push_subscriptions/{id}' , id = subscription_id , client_id = client_id , client_secret = client_secret , use_webhook_server = True ) | Unsubscribe from webhook events for an existing subscription . |
14,104 | def _fill_buffer ( self ) : if self . _all_results_fetched : self . _eof ( ) raw_results = self . result_fetcher ( page = self . _page , per_page = self . per_page ) entities = [ ] for raw in raw_results : entities . append ( self . entity . deserialize ( raw , bind_client = self . bind_client ) ) self . _buffer = collections . deque ( entities ) self . log . debug ( "Requested page {0} (got: {1} items)" . format ( self . _page , len ( self . _buffer ) ) ) if len ( self . _buffer ) < self . per_page : self . _all_results_fetched = True self . _page += 1 | Fills the internal size - 50 buffer from Strava API . |
14,105 | def update_from_response ( self , response , raise_exc = True ) : self . upload_id = response . get ( 'id' ) self . external_id = response . get ( 'external_id' ) self . activity_id = response . get ( 'activity_id' ) self . status = response . get ( 'status' ) or response . get ( 'message' ) if response . get ( 'error' ) : self . error = response . get ( 'error' ) elif response . get ( 'errors' ) : self . error = str ( response . get ( 'errors' ) ) else : self . error = None if raise_exc : self . raise_for_error ( ) | Updates internal state of object . |
14,106 | def wait ( self , timeout = None , poll_interval = 1.0 ) : start = time . time ( ) while self . activity_id is None : self . poll ( ) time . sleep ( poll_interval ) if timeout and ( time . time ( ) - start ) > timeout : raise exc . TimeoutExceeded ( ) return self . client . get_activity ( self . activity_id ) | Wait for the upload to complete or to err out . |
14,107 | def full_photos ( self ) : if self . _photos is None : if self . total_photo_count > 0 : self . assert_bind_client ( ) self . _photos = self . bind_client . get_activity_photos ( self . id , only_instagram = False ) else : self . _photos = [ ] return self . _photos | Gets a list of photos using default options . |
14,108 | def get ( self , key , default = None ) : key = self . make_key ( key ) if self . debug : return default try : value = self . database [ key ] except KeyError : self . metrics [ 'misses' ] += 1 return default else : self . metrics [ 'hits' ] += 1 return pickle . loads ( value ) | Retreive a value from the cache . In the event the value does not exist return the default . |
14,109 | def set ( self , key , value , timeout = None ) : key = self . make_key ( key ) if timeout is None : timeout = self . default_timeout if self . debug : return True pickled_value = pickle . dumps ( value ) self . metrics [ 'writes' ] += 1 if timeout : return self . database . setex ( key , int ( timeout ) , pickled_value ) else : return self . database . set ( key , pickled_value ) | Cache the given value in the specified key . If no timeout is specified the default timeout will be used . |
14,110 | def delete ( self , key ) : if not self . debug : self . database . delete ( self . make_key ( key ) ) | Remove the given key from the cache . |
14,111 | def flush ( self ) : keys = list ( self . keys ( ) ) if keys : return self . database . delete ( * keys ) | Remove all cached objects from the database . |
14,112 | def cached_property ( self , key_fn = _key_fn , timeout = None ) : this = self class _cached_property ( object ) : def __init__ ( self , fn ) : self . _fn = this . cached ( key_fn , timeout ) ( fn ) def __get__ ( self , instance , instance_type = None ) : if instance is None : return self return self . _fn ( instance ) def __delete__ ( self , obj ) : self . _fn . bust ( obj ) def __set__ ( self , instance , value ) : raise ValueError ( 'Cannot set value of a cached property.' ) def decorator ( fn ) : return _cached_property ( fn ) return decorator | Decorator that will transparently cache calls to the wrapped method . The method will be exposed as a property . |
14,113 | def cache_async ( self , key_fn = _key_fn , timeout = 3600 ) : def decorator ( fn ) : wrapped = self . cached ( key_fn , timeout ) ( fn ) @ wraps ( fn ) def inner ( * args , ** kwargs ) : q = Queue ( ) def _sub_fn ( ) : q . put ( wrapped ( * args , ** kwargs ) ) def _get_value ( block = True , timeout = None ) : if not hasattr ( _get_value , '_return_value' ) : result = q . get ( block = block , timeout = timeout ) _get_value . _return_value = result return _get_value . _return_value thread = threading . Thread ( target = _sub_fn ) thread . start ( ) return _get_value return inner return decorator | Decorator that will execute the cached function in a separate thread . The function will immediately return returning a callable to the user . This callable can be used to check for a return value . |
14,114 | def store ( self , obj_id , title = None , data = None , obj_type = None ) : if title is None : title = obj_id if data is None : data = title obj_type = obj_type or '' if self . _use_json : data = json . dumps ( data ) combined_id = self . object_key ( obj_id , obj_type ) if self . exists ( obj_id , obj_type ) : stored_title = self . _title_data [ combined_id ] if stored_title == title : self . _data [ combined_id ] = data return else : self . remove ( obj_id , obj_type ) self . _data [ combined_id ] = data self . _title_data [ combined_id ] = title clean_title = ' ' . join ( self . tokenize_title ( title ) ) title_score = self . score_token ( clean_title ) for idx , word in enumerate ( self . tokenize_title ( title ) ) : word_score = self . score_token ( word ) position_score = word_score + ( self . _offset * idx ) key_score = position_score + title_score for substring in self . substrings ( word ) : self . database . zadd ( self . word_key ( substring ) , { combined_id : key_score } ) return True | Store data in the autocomplete index . |
14,115 | def exists ( self , obj_id , obj_type = None ) : return self . object_key ( obj_id , obj_type ) in self . _data | Return whether the given object exists in the search index . |
14,116 | def boost_object ( self , obj_id = None , obj_type = None , multiplier = 1.1 , relative = True ) : combined_id = self . object_key ( obj_id or '' , obj_type or '' ) if relative : current = float ( self . _boosts [ combined_id ] or 1.0 ) self . _boosts [ combined_id ] = current * multiplier else : self . _boosts [ combined_id ] = multiplier | Boost search results for the given object or type by the amount specified . When the multiplier is greater than 1 the results will percolate to the top . Values between 0 and 1 will percolate results to the bottom . |
14,117 | def list_data ( self ) : fn = ( lambda v : json . loads ( decode ( v ) ) ) if self . _use_json else decode return map ( fn , self . _data . values ( ) ) | Return all the data stored in the autocomplete index . If the data was stored as serialized JSON then it will be de - serialized before being returned . |
14,118 | def flush ( self , batch_size = 1000 ) : keys = self . database . keys ( self . namespace + ':*' ) for i in range ( 0 , len ( keys ) , batch_size ) : self . database . delete ( * keys [ i : i + batch_size ] ) | Delete all autocomplete indexes and metadata . |
14,119 | def metaphone ( self , words ) : for word in words : r = 0 for w in double_metaphone ( word ) : if w : w = w . strip ( ) if w : r += 1 yield w if not r : yield word | Apply the double metaphone algorithm to the given words . Using metaphone allows the search index to tolerate misspellings and small typos . |
14,120 | def tokenize ( self , value ) : words = self . split_phrase ( decode ( value ) . lower ( ) ) if self . _stopwords : words = [ w for w in words if w not in self . _stopwords ] if self . _min_word_length : words = [ w for w in words if len ( w ) >= self . _min_word_length ] fraction = 1. / ( len ( words ) + 1 ) if self . _use_stemmer : words = self . stem ( words ) if self . _use_metaphone : words = self . metaphone ( words ) scores = { } for word in words : scores . setdefault ( word , 0 ) scores [ word ] += fraction return scores | Split the incoming value into tokens and process each token optionally stemming or running metaphone . |
14,121 | def expire ( self , ttl = None ) : if ttl is not None : self . database . expire ( self . key , ttl ) else : self . database . persist ( self . key ) | Expire the given key in the given number of seconds . If ttl is None then any expiry will be cleared and key will be persisted . |
14,122 | def pexpire ( self , ttl = None ) : if ttl is not None : self . database . pexpire ( self . key , ttl ) else : self . database . persist ( self . key ) | Expire the given key in the given number of milliseconds . If ttl is None then any expiry will be cleared and key will be persisted . |
14,123 | def search ( self , pattern , count = None ) : return self . _scan ( match = pattern , count = count ) | Search the keys of the given hash using the specified pattern . |
14,124 | def from_dict ( cls , database , key , data , clear = False ) : hsh = cls ( database , key ) if clear : hsh . clear ( ) hsh . update ( data ) return hsh | Create and populate a Hash object from a data dictionary . |
14,125 | def as_list ( self , decode = False ) : items = self . database . lrange ( self . key , 0 , - 1 ) return [ _decode ( item ) for item in items ] if decode else items | Return a list containing all the items in the list . |
14,126 | def from_list ( cls , database , key , data , clear = False ) : lst = cls ( database , key ) if clear : lst . clear ( ) lst . extend ( data ) return lst | Create and populate a List object from a data list . |
14,127 | def diffstore ( self , dest , * others ) : keys = [ self . key ] keys . extend ( [ other . key for other in others ] ) self . database . sdiffstore ( dest , keys ) return self . database . Set ( dest ) | Store the set difference of the current set and one or more others in a new key . |
14,128 | def interstore ( self , dest , * others ) : keys = [ self . key ] keys . extend ( [ other . key for other in others ] ) self . database . sinterstore ( dest , keys ) return self . database . Set ( dest ) | Store the intersection of the current set and one or more others in a new key . |
14,129 | def as_set ( self , decode = False ) : items = self . database . smembers ( self . key ) return set ( _decode ( item ) for item in items ) if decode else items | Return a Python set containing all the items in the collection . |
14,130 | def from_set ( cls , database , key , data , clear = False ) : s = cls ( database , key ) if clear : s . clear ( ) s . add ( * data ) return s | Create and populate a Set object from a data set . |
14,131 | def rank ( self , item , reverse = False ) : fn = reverse and self . database . zrevrank or self . database . zrank return fn ( self . key , item ) | Return the rank of the given item . |
14,132 | def count ( self , low , high = None ) : if high is None : high = low return self . database . zcount ( self . key , low , high ) | Return the number of items between the given bounds . |
14,133 | def range ( self , low , high , with_scores = False , desc = False , reverse = False ) : if reverse : return self . database . zrevrange ( self . key , low , high , with_scores ) else : return self . database . zrange ( self . key , low , high , desc , with_scores ) | Return a range of items between low and high . By default scores will not be included but this can be controlled via the with_scores parameter . |
14,134 | def remove_by_score ( self , low , high = None ) : if high is None : high = low return self . database . zremrangebyscore ( self . key , low , high ) | Remove elements from the ZSet by their score . |
14,135 | def incr ( self , key , incr_by = 1. ) : return self . database . zincrby ( self . key , incr_by , key ) | Increment the score of an item in the ZSet . |
14,136 | def interstore ( self , dest , * others , ** kwargs ) : keys = [ self . key ] keys . extend ( [ other . key for other in others ] ) self . database . zinterstore ( dest , keys , ** kwargs ) return self . database . ZSet ( dest ) | Store the intersection of the current zset and one or more others in a new key . |
14,137 | def from_dict ( cls , database , key , data , clear = False ) : zset = cls ( database , key ) if clear : zset . clear ( ) zset . add ( data ) return zset | Create and populate a ZSet object from a data dictionary . |
14,138 | def append ( self , value ) : self . database . run_script ( 'array_append' , keys = [ self . key ] , args = [ value ] ) | Append a new value to the end of the array . |
14,139 | def extend ( self , values ) : self . database . run_script ( 'array_extend' , keys = [ self . key ] , args = values ) | Extend the array appending the given values . |
14,140 | def pop ( self , idx = None ) : if idx is not None : return self . database . run_script ( 'array_remove' , keys = [ self . key ] , args = [ idx ] ) else : return self . database . run_script ( 'array_pop' , keys = [ self . key ] , args = [ ] ) | Remove an item from the array . By default this will be the last item by index but any index can be specified . |
14,141 | def as_list ( self , decode = False ) : return [ _decode ( i ) for i in self ] if decode else list ( self ) | Return a list of items in the array . |
14,142 | def from_list ( cls , database , key , data , clear = False ) : arr = cls ( database , key ) if clear : arr . clear ( ) arr . extend ( data ) return arr | Create and populate an Array object from a data dictionary . |
14,143 | def add ( self , data , id = '*' , maxlen = None , approximate = True ) : return self . database . xadd ( self . key , data , id , maxlen , approximate ) | Add data to a stream . |
14,144 | def range ( self , start = '-' , stop = '+' , count = None ) : return self . database . xrange ( self . key , start , stop , count ) | Read a range of values from a stream . |
14,145 | def revrange ( self , start = '+' , stop = '-' , count = None ) : return self . database . xrevrange ( self . key , start , stop , count ) | Read a range of values from a stream in reverse . |
14,146 | def read ( self , count = None , block = None , last_id = None ) : if last_id is None : last_id = '0-0' resp = self . database . xread ( { self . key : _decode ( last_id ) } , count , block ) return resp [ 0 ] [ 1 ] if resp else [ ] | Monitor stream for new data . |
14,147 | def trim ( self , count , approximate = True ) : return self . database . xtrim ( self . key , count , approximate ) | Trim the stream to the given count of messages discarding the oldest messages first . |
14,148 | def pending ( self , start = '-' , stop = '+' , count = 1000 , consumer = None ) : return self . database . xpending_range ( self . key , self . group , start , stop , count , consumer ) | List pending messages within the consumer group for this stream . |
14,149 | def delete_consumer ( self , consumer = None ) : if consumer is None : consumer = self . _consumer return self . database . xgroup_delconsumer ( self . key , self . group , consumer ) | Remove a specific consumer from a consumer group . |
14,150 | def create ( self , ensure_keys_exist = True , mkstream = False ) : if ensure_keys_exist : for key in self . keys : if not self . database . exists ( key ) : msg_id = self . database . xadd ( key , { '' : '' } , id = b'0-1' ) self . database . xdel ( key , msg_id ) elif self . database . type ( key ) != b'stream' : raise ValueError ( 'Consumer group key "%s" exists and is ' 'not a stream. To prevent data-loss ' 'this key will not be deleted.' ) resp = { } for key , value in self . keys . items ( ) : try : resp [ key ] = self . database . xgroup_create ( key , self . name , value , mkstream ) except ResponseError as exc : if exception_message ( exc ) . startswith ( 'BUSYGROUP' ) : resp [ key ] = False else : raise return resp | Create the consumer group and register it with the group s stream keys . |
14,151 | def destroy ( self ) : resp = { } for key in self . keys : resp [ key ] = self . database . xgroup_destroy ( key , self . name ) return resp | Destroy the consumer group . |
14,152 | def get ( self , fmt , offset ) : bfo = BitFieldOperation ( self . database , self . key ) return bfo . get ( fmt , offset ) | Get the value of a given bitfield . |
14,153 | def set ( self , fmt , offset , value ) : bfo = BitFieldOperation ( self . database , self . key ) return bfo . set ( fmt , offset , value ) | Set the value of a given bitfield . |
14,154 | def add ( self , data ) : bfo = BitFieldOperation ( self . database , self . key ) for bit_index in self . _get_seeds ( data ) : bfo . set ( 'u1' , bit_index , 1 ) bfo . execute ( ) | Add an item to the bloomfilter . |
14,155 | def contains ( self , data ) : bfo = BitFieldOperation ( self . database , self . key ) for bit_index in self . _get_seeds ( data ) : bfo . get ( 'u1' , bit_index ) return all ( bfo . execute ( ) ) | Check if an item has been added to the bloomfilter . |
14,156 | def store_many ( self , items ) : with self . walrus . atomic ( ) : for item in items : self . store ( * item ) | Store multiple subject - predicate - object triples in the database . |
14,157 | def delete ( self , s , p , o ) : with self . walrus . atomic ( ) : for key in self . keys_for_values ( s , p , o ) : del self . _z [ key ] | Remove the given subj - pred - obj triple from the database . |
14,158 | def search ( self , * conditions ) : results = { } for condition in conditions : if isinstance ( condition , tuple ) : query = dict ( zip ( 'spo' , condition ) ) else : query = condition . copy ( ) materialized = { } targets = [ ] for part in ( 's' , 'p' , 'o' ) : if isinstance ( query [ part ] , Variable ) : variable = query . pop ( part ) materialized [ part ] = set ( ) targets . append ( ( variable , part ) ) for result in self . query ( ** query ) : ok = True for var , part in targets : if var in results and result [ part ] not in results [ var ] : ok = False break if ok : for var , part in targets : materialized [ part ] . add ( result [ part ] ) for var , part in targets : if var in results : results [ var ] &= materialized [ part ] else : results [ var ] = materialized [ part ] return dict ( ( var . name , vals ) for ( var , vals ) in results . items ( ) ) | Given a set of conditions return all values that satisfy the conditions for a given set of variables . |
14,159 | def run_script ( self , script_name , keys = None , args = None ) : return self . _scripts [ script_name ] ( keys , args ) | Execute a walrus script with the given arguments . |
14,160 | def rate_limit ( self , name , limit = 5 , per = 60 , debug = False ) : return RateLimit ( self , name , limit , per , debug ) | Rate limit implementation . Allows up to limit of events every per seconds . |
14,161 | def cas ( self , key , value , new_value ) : return self . run_script ( 'cas' , keys = [ key ] , args = [ value , new_value ] ) | Perform an atomic compare - and - set on the value in key using a prefix match on the provided value . |
14,162 | def listener ( self , channels = None , patterns = None , is_async = False ) : def decorator ( fn ) : _channels = channels or [ ] _patterns = patterns or [ ] @ wraps ( fn ) def inner ( ) : pubsub = self . pubsub ( ) def listen ( ) : for channel in _channels : pubsub . subscribe ( channel ) for pattern in _patterns : pubsub . psubscribe ( pattern ) for data_dict in pubsub . listen ( ) : try : ret = fn ( ** data_dict ) except StopIteration : pubsub . close ( ) break if is_async : worker = threading . Thread ( target = listen ) worker . start ( ) return worker else : listen ( ) return inner return decorator | Decorator for wrapping functions used to listen for Redis pub - sub messages . |
14,163 | def stream_log ( self , callback , connection_id = 'monitor' ) : conn = self . connection_pool . get_connection ( connection_id , None ) conn . send_command ( 'monitor' ) while callback ( conn . read_response ( ) ) : pass | Stream Redis activity one line at a time to the given callback . |
14,164 | def make_key ( self , * parts ) : separator = getattr ( self . model_class , 'index_separator' , '.' ) parts = map ( decode , parts ) return '%s%s' % ( self . _base_key , separator . join ( map ( str , parts ) ) ) | Generate a namespaced key for the given path . |
14,165 | def get ( cls , expression ) : executor = Executor ( cls . __database__ ) result = executor . execute ( expression ) if len ( result ) != 1 : raise ValueError ( 'Got %s results, expected 1.' % len ( result ) ) return cls . load ( result . _first_or_any ( ) , convert_key = False ) | Retrieve the model instance matching the given expression . If the number of matching results is not equal to one then a ValueError will be raised . |
14,166 | def load ( cls , primary_key , convert_key = True ) : if convert_key : primary_key = cls . _query . get_primary_hash_key ( primary_key ) if not cls . __database__ . hash_exists ( primary_key ) : raise KeyError ( 'Object not found.' ) raw_data = cls . __database__ . hgetall ( primary_key ) if PY3 : raw_data = decode_dict_keys ( raw_data ) data = { } for name , field in cls . _fields . items ( ) : if isinstance ( field , _ContainerField ) : continue elif name in raw_data : data [ name ] = field . python_value ( raw_data [ name ] ) else : data [ name ] = None return cls ( ** data ) | Retrieve a model instance by primary key . |
14,167 | def delete ( self , for_update = False ) : hash_key = self . get_hash_id ( ) try : original_instance = self . load ( hash_key , convert_key = False ) except KeyError : return all_index = self . _query . all_index ( ) all_index . remove ( hash_key ) for field in self . _indexes : for index in field . get_indexes ( ) : index . remove ( original_instance ) if not for_update : for field in self . _fields . values ( ) : if isinstance ( field , _ContainerField ) : field . _delete ( self ) self . __database__ . delete ( hash_key ) | Delete the given model instance . |
14,168 | def limit ( self , key ) : if self . _debug : return False counter = self . database . List ( self . name + ':' + key ) n = len ( counter ) is_limited = False if n < self . _limit : counter . prepend ( str ( time . time ( ) ) ) else : oldest = float ( counter [ - 1 ] ) if time . time ( ) - oldest < self . _per : is_limited = True else : counter . prepend ( str ( time . time ( ) ) ) del counter [ : self . _limit ] counter . pexpire ( int ( self . _per * 2000 ) ) return is_limited | Function to log an event with the given key . If the key has not exceeded their alotted events then the function returns False to indicate that no limit is being imposed . |
14,169 | def rate_limited ( self , key_function = None ) : if key_function is None : def key_function ( * args , ** kwargs ) : data = pickle . dumps ( ( args , sorted ( kwargs . items ( ) ) ) ) return hashlib . md5 ( data ) . hexdigest ( ) def decorator ( fn ) : @ wraps ( fn ) def inner ( * args , ** kwargs ) : key = key_function ( * args , ** kwargs ) if self . limit ( key ) : raise RateLimitException ( 'Call to %s exceeded %s events in %s seconds.' % ( fn . __name__ , self . _limit , self . _per ) ) return fn ( * args , ** kwargs ) return inner return decorator | Function or method decorator that will prevent calls to the decorated function when the number of events has been exceeded for the given time period . |
14,170 | def is_monotonic ( df , items = None , increasing = None , strict = False ) : if items is None : items = { k : ( increasing , strict ) for k in df } for col , ( increasing , strict ) in items . items ( ) : s = pd . Index ( df [ col ] ) if increasing : good = getattr ( s , 'is_monotonic_increasing' ) elif increasing is None : good = getattr ( s , 'is_monotonic' ) | getattr ( s , 'is_monotonic_decreasing' ) else : good = getattr ( s , 'is_monotonic_decreasing' ) if strict : if increasing : good = good & ( s . to_series ( ) . diff ( ) . dropna ( ) > 0 ) . all ( ) elif increasing is None : good = good & ( ( s . to_series ( ) . diff ( ) . dropna ( ) > 0 ) . all ( ) | ( s . to_series ( ) . diff ( ) . dropna ( ) < 0 ) . all ( ) ) else : good = good & ( s . to_series ( ) . diff ( ) . dropna ( ) < 0 ) . all ( ) if not good : raise AssertionError return df | Asserts that the DataFrame is monotonic . |
14,171 | def is_shape ( df , shape ) : try : check = np . all ( np . equal ( df . shape , shape ) | ( np . equal ( shape , [ - 1 , - 1 ] ) | np . equal ( shape , [ None , None ] ) ) ) assert check except AssertionError as e : msg = ( "Expected shape: {}\n" "\t\tActual shape: {}" . format ( shape , df . shape ) ) e . args = ( msg , ) raise return df | Asserts that the DataFrame is of a known shape . |
14,172 | def unique ( df , columns = None ) : if columns is None : columns = df . columns for col in columns : if not df [ col ] . is_unique : raise AssertionError ( "Column {!r} contains non-unique values" . format ( col ) ) return df | Asserts that columns in the DataFrame only have unique values . |
14,173 | def unique_index ( df ) : try : assert df . index . is_unique except AssertionError as e : e . args = df . index . get_duplicates ( ) raise return df | Assert that the index is unique |
14,174 | def within_n_std ( df , n = 3 ) : means = df . mean ( ) stds = df . std ( ) inliers = ( np . abs ( df [ means . index ] - means ) < n * stds ) if not np . all ( inliers ) : msg = generic . bad_locations ( ~ inliers ) raise AssertionError ( msg ) return df | Assert that every value is within n standard deviations of its column s mean . |
14,175 | def has_dtypes ( df , items ) : dtypes = df . dtypes for k , v in items . items ( ) : if not dtypes [ k ] == v : raise AssertionError ( "{} has the wrong dtype. Should be ({}), is ({})" . format ( k , v , dtypes [ k ] ) ) return df | Assert that a DataFrame has dtypes |
14,176 | def one_to_many ( df , unitcol , manycol ) : subset = df [ [ manycol , unitcol ] ] . drop_duplicates ( ) for many in subset [ manycol ] . unique ( ) : if subset [ subset [ manycol ] == many ] . shape [ 0 ] > 1 : msg = "{} in {} has multiple values for {}" . format ( many , manycol , unitcol ) raise AssertionError ( msg ) return df | Assert that a many - to - one relationship is preserved between two columns . For example a retail store will have have distinct departments each with several employees . If each employee may only work in a single department then the relationship of the department to the employees is one to many . |
14,177 | def is_same_as ( df , df_to_compare , ** kwargs ) : try : tm . assert_frame_equal ( df , df_to_compare , ** kwargs ) except AssertionError as exc : six . raise_from ( AssertionError ( "DataFrames are not equal" ) , exc ) return df | Assert that two pandas dataframes are the equal |
14,178 | def ensure_pyplot ( self ) : if not self . _pyplot_imported : if 'matplotlib.backends' not in sys . modules : import matplotlib matplotlib . use ( 'agg' ) self . process_input_line ( 'import matplotlib.pyplot as plt' , store_history = False ) self . _pyplot_imported = True | Ensures that pyplot has been imported into the embedded IPython shell . |
14,179 | def _fetch_remote_json ( service_url , params = None , use_http_post = False ) : if not params : params = { } request_url , response = _fetch_remote ( service_url , params , use_http_post ) if six . PY3 : str_response = response . read ( ) . decode ( 'utf-8' ) return ( request_url , json . loads ( str_response , parse_float = Decimal ) ) return ( request_url , json . load ( response , parse_float = Decimal ) ) | Retrieves a JSON object from a URL . |
14,180 | def _fetch_remote_file ( service_url , params = None , use_http_post = False ) : if not params : params = { } request_url , response = _fetch_remote ( service_url , params , use_http_post ) dummy , params = cgi . parse_header ( response . headers . get ( 'Content-Disposition' , '' ) ) fn = params [ 'filename' ] return ( response . headers . get ( 'content-type' ) , fn , response . read ( ) , response . geturl ( ) ) | Retrieves a file from a URL . |
14,181 | def geocode_location ( location , sensor = False , api_key = None ) : params = { 'address' : location , 'sensor' : str ( sensor ) . lower ( ) } if api_key is not None : params [ 'key' ] = api_key url , geo_response = _fetch_remote_json ( GooglePlaces . GEOCODE_API_URL , params ) _validate_response ( url , geo_response ) if geo_response [ 'status' ] == GooglePlaces . RESPONSE_STATUS_ZERO_RESULTS : error_detail = ( 'Lat/Lng for location \'%s\' can\'t be determined.' % location ) raise GooglePlacesError ( error_detail ) return geo_response [ 'results' ] [ 0 ] [ 'geometry' ] [ 'location' ] | Converts a human - readable location to lat - lng . |
14,182 | def _get_place_details ( place_id , api_key , sensor = False , language = lang . ENGLISH ) : url , detail_response = _fetch_remote_json ( GooglePlaces . DETAIL_API_URL , { 'placeid' : place_id , 'sensor' : str ( sensor ) . lower ( ) , 'key' : api_key , 'language' : language } ) _validate_response ( url , detail_response ) return detail_response [ 'result' ] | Gets a detailed place response . |
14,183 | def _validate_response ( url , response ) : if response [ 'status' ] not in [ GooglePlaces . RESPONSE_STATUS_OK , GooglePlaces . RESPONSE_STATUS_ZERO_RESULTS ] : error_detail = ( 'Request to URL %s failed with response code: %s' % ( url , response [ 'status' ] ) ) raise GooglePlacesError ( error_detail ) | Validates that the response from Google was successful . |
14,184 | def nearby_search ( self , language = lang . ENGLISH , keyword = None , location = None , lat_lng = None , name = None , radius = 3200 , rankby = ranking . PROMINENCE , sensor = False , type = None , types = [ ] , pagetoken = None ) : if location is None and lat_lng is None and pagetoken is None : raise ValueError ( 'One of location, lat_lng or pagetoken must be passed in.' ) if rankby == 'distance' : if keyword is None and types == [ ] and name is None : raise ValueError ( 'When rankby = googleplaces.ranking.DISTANCE, ' + 'name, keyword or types kwargs ' + 'must be specified.' ) self . _sensor = sensor radius = ( radius if radius <= GooglePlaces . MAXIMUM_SEARCH_RADIUS else GooglePlaces . MAXIMUM_SEARCH_RADIUS ) lat_lng_str = self . _generate_lat_lng_string ( lat_lng , location ) self . _request_params = { 'location' : lat_lng_str } if rankby == 'prominence' : self . _request_params [ 'radius' ] = radius else : self . _request_params [ 'rankby' ] = rankby if type : self . _request_params [ 'type' ] = type elif types : if len ( types ) == 1 : self . _request_params [ 'type' ] = types [ 0 ] elif len ( types ) > 1 : self . _request_params [ 'types' ] = '|' . join ( types ) if keyword is not None : self . _request_params [ 'keyword' ] = keyword if name is not None : self . _request_params [ 'name' ] = name if pagetoken is not None : self . _request_params [ 'pagetoken' ] = pagetoken if language is not None : self . _request_params [ 'language' ] = language self . _add_required_param_keys ( ) url , places_response = _fetch_remote_json ( GooglePlaces . NEARBY_SEARCH_API_URL , self . _request_params ) _validate_response ( url , places_response ) return GooglePlacesSearchResult ( self , places_response ) | Perform a nearby search using the Google Places API . |
14,185 | def text_search ( self , query = None , language = lang . ENGLISH , lat_lng = None , radius = 3200 , type = None , types = [ ] , location = None , pagetoken = None ) : self . _request_params = { 'query' : query } if lat_lng is not None or location is not None : lat_lng_str = self . _generate_lat_lng_string ( lat_lng , location ) self . _request_params [ 'location' ] = lat_lng_str self . _request_params [ 'radius' ] = radius if type : self . _request_params [ 'type' ] = type elif types : if len ( types ) == 1 : self . _request_params [ 'type' ] = types [ 0 ] elif len ( types ) > 1 : self . _request_params [ 'types' ] = '|' . join ( types ) if language is not None : self . _request_params [ 'language' ] = language if pagetoken is not None : self . _request_params [ 'pagetoken' ] = pagetoken self . _add_required_param_keys ( ) url , places_response = _fetch_remote_json ( GooglePlaces . TEXT_SEARCH_API_URL , self . _request_params ) _validate_response ( url , places_response ) return GooglePlacesSearchResult ( self , places_response ) | Perform a text search using the Google Places API . |
14,186 | def autocomplete ( self , input , lat_lng = None , location = None , radius = 3200 , language = lang . ENGLISH , types = None , components = [ ] ) : self . _request_params = { 'input' : input } if lat_lng is not None or location is not None : lat_lng_str = self . _generate_lat_lng_string ( lat_lng , location ) self . _request_params [ 'location' ] = lat_lng_str self . _request_params [ 'radius' ] = radius if types : self . _request_params [ 'types' ] = types if len ( components ) > 0 : self . _request_params [ 'components' ] = '|' . join ( [ '{}:{}' . format ( c [ 0 ] , c [ 1 ] ) for c in components ] ) if language is not None : self . _request_params [ 'language' ] = language self . _add_required_param_keys ( ) url , places_response = _fetch_remote_json ( GooglePlaces . AUTOCOMPLETE_API_URL , self . _request_params ) _validate_response ( url , places_response ) return GoogleAutocompleteSearchResult ( self , places_response ) | Perform an autocomplete search using the Google Places API . |
14,187 | def radar_search ( self , sensor = False , keyword = None , name = None , language = lang . ENGLISH , lat_lng = None , opennow = False , radius = 3200 , type = None , types = [ ] , location = None ) : if keyword is None and name is None and len ( types ) is 0 : raise ValueError ( 'One of keyword, name or types must be supplied.' ) if location is None and lat_lng is None : raise ValueError ( 'One of location or lat_lng must be passed in.' ) try : radius = int ( radius ) except : raise ValueError ( 'radius must be passed supplied as an integer.' ) if sensor not in [ True , False ] : raise ValueError ( 'sensor must be passed in as a boolean value.' ) self . _request_params = { 'radius' : radius } self . _sensor = sensor self . _request_params [ 'location' ] = self . _generate_lat_lng_string ( lat_lng , location ) if keyword is not None : self . _request_params [ 'keyword' ] = keyword if name is not None : self . _request_params [ 'name' ] = name if type : self . _request_params [ 'type' ] = type elif types : if len ( types ) == 1 : self . _request_params [ 'type' ] = types [ 0 ] elif len ( types ) > 1 : self . _request_params [ 'types' ] = '|' . join ( types ) if language is not None : self . _request_params [ 'language' ] = language if opennow is True : self . _request_params [ 'opennow' ] = 'true' self . _add_required_param_keys ( ) url , places_response = _fetch_remote_json ( GooglePlaces . RADAR_SEARCH_API_URL , self . _request_params ) _validate_response ( url , places_response ) return GooglePlacesSearchResult ( self , places_response ) | Perform a radar search using the Google Places API . |
14,188 | def checkin ( self , place_id , sensor = False ) : data = { 'placeid' : place_id } url , checkin_response = _fetch_remote_json ( GooglePlaces . CHECKIN_API_URL % ( str ( sensor ) . lower ( ) , self . api_key ) , json . dumps ( data ) , use_http_post = True ) _validate_response ( url , checkin_response ) | Checks in a user to a place . |
14,189 | def get_place ( self , place_id , sensor = False , language = lang . ENGLISH ) : place_details = _get_place_details ( place_id , self . api_key , sensor , language = language ) return Place ( self , place_details ) | Gets a detailed place object . |
14,190 | def add_place ( self , ** kwargs ) : required_kwargs = { 'name' : [ str ] , 'lat_lng' : [ dict ] , 'accuracy' : [ int ] , 'types' : [ str , list ] } request_params = { } for key in required_kwargs : if key not in kwargs or kwargs [ key ] is None : raise ValueError ( 'The %s argument is required.' % key ) expected_types = required_kwargs [ key ] type_is_valid = False for expected_type in expected_types : if isinstance ( kwargs [ key ] , expected_type ) : type_is_valid = True break if not type_is_valid : raise ValueError ( 'Invalid value for %s' % key ) if key is not 'lat_lng' : request_params [ key ] = kwargs [ key ] if len ( kwargs [ 'name' ] ) > 255 : raise ValueError ( 'The place name must not exceed 255 characters ' + 'in length.' ) try : kwargs [ 'lat_lng' ] [ 'lat' ] kwargs [ 'lat_lng' ] [ 'lng' ] request_params [ 'location' ] = kwargs [ 'lat_lng' ] except KeyError : raise ValueError ( 'Invalid keys for lat_lng.' ) request_params [ 'language' ] = ( kwargs . get ( 'language' ) if kwargs . get ( 'language' ) is not None else lang . ENGLISH ) sensor = ( kwargs . get ( 'sensor' ) if kwargs . get ( 'sensor' ) is not None else False ) if isinstance ( kwargs [ 'types' ] , str ) : request_params [ 'types' ] = [ kwargs [ 'types' ] ] else : request_params [ 'types' ] = kwargs [ 'types' ] url , add_response = _fetch_remote_json ( GooglePlaces . ADD_API_URL % ( str ( sensor ) . lower ( ) , self . api_key ) , json . dumps ( request_params ) , use_http_post = True ) _validate_response ( url , add_response ) return { 'place_id' : add_response [ 'place_id' ] , 'id' : add_response [ 'id' ] } | Adds a place to the Google Places database . |
14,191 | def delete_place ( self , place_id , sensor = False ) : request_params = { 'place_id' : place_id } url , delete_response = _fetch_remote_json ( GooglePlaces . DELETE_API_URL % ( str ( sensor ) . lower ( ) , self . api_key ) , json . dumps ( request_params ) , use_http_post = True ) _validate_response ( url , delete_response ) | Deletes a place from the Google Places database . |
14,192 | def types ( self ) : if self . _types == '' and self . details != None and 'types' in self . details : self . _icon = self . details [ 'types' ] return self . _types | Returns a list of feature types describing the given result . |
14,193 | def icon ( self ) : if self . _icon == '' and self . details != None and 'icon' in self . details : self . _icon = self . details [ 'icon' ] return self . _icon | Returns the URL of a recommended icon for display . |
14,194 | def name ( self ) : if self . _name == '' and self . details != None and 'name' in self . details : self . _name = self . details [ 'name' ] return self . _name | Returns the human - readable name of the place . |
14,195 | def vicinity ( self ) : if self . _vicinity == '' and self . details != None and 'vicinity' in self . details : self . _vicinity = self . details [ 'vicinity' ] return self . _vicinity | Returns a feature name of a nearby location . |
14,196 | def rating ( self ) : if self . _rating == '' and self . details != None and 'rating' in self . details : self . _rating = self . details [ 'rating' ] return self . _rating | Returns the Place s rating from 0 . 0 to 5 . 0 based on user reviews . |
14,197 | def checkin ( self ) : self . _query_instance . checkin ( self . place_id , self . _query_instance . sensor ) | Checks in an anonymous user in . |
14,198 | def get ( self , maxheight = None , maxwidth = None , sensor = False ) : if not maxheight and not maxwidth : raise GooglePlacesError ( 'You must specify maxheight or maxwidth!' ) result = _get_place_photo ( self . photo_reference , self . _query_instance . api_key , maxheight = maxheight , maxwidth = maxwidth , sensor = sensor ) self . mimetype , self . filename , self . data , self . url = result | Fetch photo from API . |
14,199 | def to_png ( data , size , level = 6 , output = None ) : width , height = size line = width * 3 png_filter = struct . pack ( ">B" , 0 ) scanlines = b"" . join ( [ png_filter + data [ y * line : y * line + line ] for y in range ( height ) ] ) magic = struct . pack ( ">8B" , 137 , 80 , 78 , 71 , 13 , 10 , 26 , 10 ) ihdr = [ b"" , b"IHDR" , b"" , b"" ] ihdr [ 2 ] = struct . pack ( ">2I5B" , width , height , 8 , 2 , 0 , 0 , 0 ) ihdr [ 3 ] = struct . pack ( ">I" , zlib . crc32 ( b"" . join ( ihdr [ 1 : 3 ] ) ) & 0xFFFFFFFF ) ihdr [ 0 ] = struct . pack ( ">I" , len ( ihdr [ 2 ] ) ) idat = [ b"" , b"IDAT" , zlib . compress ( scanlines , level ) , b"" ] idat [ 3 ] = struct . pack ( ">I" , zlib . crc32 ( b"" . join ( idat [ 1 : 3 ] ) ) & 0xFFFFFFFF ) idat [ 0 ] = struct . pack ( ">I" , len ( idat [ 2 ] ) ) iend = [ b"" , b"IEND" , b"" , b"" ] iend [ 3 ] = struct . pack ( ">I" , zlib . crc32 ( iend [ 1 ] ) & 0xFFFFFFFF ) iend [ 0 ] = struct . pack ( ">I" , len ( iend [ 2 ] ) ) if not output : return magic + b"" . join ( ihdr + idat + iend ) with open ( output , "wb" ) as fileh : fileh . write ( magic ) fileh . write ( b"" . join ( ihdr ) ) fileh . write ( b"" . join ( idat ) ) fileh . write ( b"" . join ( iend ) ) return None | Dump data to a PNG file . If output is None create no file but return the whole PNG data . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.