code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
try: pkg_resources.get_distribution('celery') app.config.setdefault( "ACCOUNTS_USE_CELERY", not (app.debug or app.testing)) except pkg_resources.DistributionNotFound: # pragma: no cover app.config.setdefault("ACCOUNTS_USE_CELERY", False) # Register Invenio legacy password hashing register_crypt_handler(InvenioAesEncryptedEmail) # Change Flask defaults app.config.setdefault( 'SESSION_COOKIE_SECURE', not app.debug ) # Change Flask-Security defaults app.config.setdefault( 'SECURITY_PASSWORD_SALT', app.config['SECRET_KEY'] ) # Set JWT secret key app.config.setdefault( 'ACCOUNTS_JWT_SECRET_KEY', app.config.get( 'ACCOUNTS_JWT_SECRET_KEY', app.config.get('SECRET_KEY') ) ) config_apps = ['ACCOUNTS', 'SECURITY_'] for k in dir(config): if any([k.startswith(prefix) for prefix in config_apps]): app.config.setdefault(k, getattr(config, k)) # Set Session KV store if app.config.get('ACCOUNTS_SESSION_REDIS_URL'): import redis from simplekv.memory.redisstore import RedisStore session_kvstore = RedisStore(redis.StrictRedis.from_url( app.config['ACCOUNTS_SESSION_REDIS_URL'])) else: from simplekv.memory import DictStore session_kvstore = DictStore() self.kvsession_extension = KVSessionExtension( session_kvstore, app)
def init_config(self, app)
Initialize configuration. :param app: The Flask application.
3.11204
3.22873
0.963859
user_logged_in.connect(login_listener, app) user_logged_out.connect(logout_listener, app) from .views.settings import blueprint from .views.security import security, revoke_session blueprint.route('/security/', methods=['GET'])(security) blueprint.route('/sessions/revoke/', methods=['POST'])(revoke_session)
def _enable_session_activity(self, app)
Enable session activity.
4.615284
4.660278
0.990345
return super(InvenioAccountsREST, self).init_app( app, sessionstore=sessionstore, register_blueprint=register_blueprint, )
def init_app(self, app, sessionstore=None, register_blueprint=False)
Flask application initialization. :param app: The Flask application. :param sessionstore: store for sessions. Passed to ``flask-kvsession``. If ``None`` then Redis is configured. (Default: ``None``) :param register_blueprint: If ``True``, the application registers the blueprints. (Default: ``True``)
3.627901
5.088491
0.712962
self.make_session_permanent(app) return super(InvenioAccountsUI, self).init_app( app, sessionstore=sessionstore, register_blueprint=register_blueprint )
def init_app(self, app, sessionstore=None, register_blueprint=True)
Flask application initialization. :param app: The Flask application. :param sessionstore: store for sessions. Passed to ``flask-kvsession``. If ``None`` then Redis is configured. (Default: ``None``) :param register_blueprint: If ``True``, the application registers the blueprints. (Default: ``True``)
3.974547
4.830162
0.82286
res = super(SessionAwareSQLAlchemyUserDatastore, self).deactivate_user( user) if res: delete_user_sessions(user) return res
def deactivate_user(self, user)
Deactivate a user. :param user: A :class:`invenio_accounts.models.User` instance. :returns: The datastore instance.
6.861957
7.379364
0.929885
model_class._attributes = {} for k, v in attrs.iteritems(): if isinstance(v, Attribute): model_class._attributes[k] = v v.name = v.name or k
def _initialize_attributes(model_class, name, bases, attrs)
Initialize the attributes of the model.
2.686706
2.375508
1.131003
# this should be a descriptor def _related_objects(self): return (model_class.objects .filter(**{attribute.attname: self.id})) klass = attribute._target_type if isinstance(klass, basestring): return (klass, model_class, attribute) else: related_name = (attribute.related_name or model_class.__name__.lower() + '_set') setattr(klass, related_name, property(_related_objects))
def _initialize_referenced(model_class, attribute)
Adds a property to the target of a reference field that returns the list of associated objects.
4.937054
4.627656
1.066859
model_class._lists = {} for k, v in attrs.iteritems(): if isinstance(v, ListField): model_class._lists[k] = v v.name = v.name or k
def _initialize_lists(model_class, name, bases, attrs)
Stores the list fields descriptors of a model.
2.891577
2.248547
1.285976
model_class._references = {} h = {} deferred = [] for k, v in attrs.iteritems(): if isinstance(v, ReferenceField): model_class._references[k] = v v.name = v.name or k att = Attribute(name=v.attname) h[v.attname] = att setattr(model_class, v.attname, att) refd = _initialize_referenced(model_class, v) if refd: deferred.append(refd) attrs.update(h) return deferred
def _initialize_references(model_class, name, bases, attrs)
Stores the list of reference field descriptors of a model.
3.295097
3.06276
1.075859
model_class._indices = [] for k, v in attrs.iteritems(): if isinstance(v, (Attribute, ListField)) and v.indexed: model_class._indices.append(k) if model_class._meta['indices']: model_class._indices.extend(model_class._meta['indices'])
def _initialize_indices(model_class, name, bases, attrs)
Stores the list of indexed attributes.
3.030425
2.646106
1.145239
model_class._counters = [] for k, v in attrs.iteritems(): if isinstance(v, Counter): model_class._counters.append(k)
def _initialize_counters(model_class, name, bases, attrs)
Stores the list of counter fields.
2.745583
2.100676
1.307
_known_models = {} model_name = key.split(':', 2)[0] # populate for klass in Model.__subclasses__(): _known_models[klass.__name__] = klass return _known_models.get(model_name, None)
def get_model_from_key(key)
Gets the model from a given key.
4.084025
3.954953
1.032635
model = get_model_from_key(key) if model is None: raise BadKeyError try: _, id = key.split(':', 2) id = int(id) except ValueError, TypeError: raise BadKeyError return model.objects.get_by_id(id)
def from_key(key)
Returns the model instance based on the key. Raises BadKeyError if the key is not recognized by redisco or no defined model can be found. Returns None if the key could not be found.
3.590904
3.133842
1.145847
self._errors = [] for field in self.fields: try: field.validate(self) except FieldValidationError, e: self._errors.extend(e.errors) self.validate() return not bool(self._errors)
def is_valid(self)
Returns True if all the fields are valid. It first validates the fields (required, unique, etc.) and then calls the validate method.
3.235064
2.579498
1.254145
attrs = self.attributes.values() + self.lists.values() \ + self.references.values() for att in attrs: if att.name in kwargs: att.__set__(self, kwargs[att.name])
def update_attributes(self, **kwargs)
Updates the attributes of the model.
4.32256
3.932058
1.099312
if not self.is_valid(): return self._errors _new = self.is_new() if _new: self._initialize_id() with Mutex(self): self._write(_new) return True
def save(self)
Saves the instance to the datastore.
7.601161
6.646096
1.143703
if att is not None: return self._key[self.id][att] else: return self._key[self.id]
def key(self, att=None)
Returns the Redis key where the values are stored.
3.249271
3.133296
1.037014
pipeline = self.db.pipeline() self._delete_from_indices(pipeline) self._delete_membership(pipeline) pipeline.delete(self.key()) pipeline.execute()
def delete(self)
Deletes the object from the datastore.
5.540074
4.447626
1.245625
if att not in self.counters: raise ValueError("%s is not a counter.") self.db.hincrby(self.key(), att, val)
def incr(self, att, val=1)
Increments a counter.
5.781158
4.163088
1.388671
h = {} for k in self.attributes.keys(): h[k] = getattr(self, k) for k in self.lists.keys(): h[k] = getattr(self, k) for k in self.references.keys(): h[k] = getattr(self, k) return h
def attributes_dict(self)
Returns the mapping of the model attributes and their values.
2.324596
2.241327
1.037152
return (self.attributes.values() + self.lists.values() + self.references.values())
def fields(self)
Returns the list of field names of the model.
10.220922
8.133998
1.256568
return bool(redisco.get_client().exists(cls._key[str(id)]) or redisco.get_client().sismember(cls._key['all'], str(id)))
def exists(cls, id)
Checks if the model with id exists.
8.022629
7.199047
1.114402
self.id = str(self.db.incr(self._key['id']))
def _initialize_id(self)
Initializes the id of the instance.
14.527751
10.475152
1.386877
pipeline = self.db.pipeline() self._create_membership(pipeline) self._update_indices(pipeline) h = {} # attributes for k, v in self.attributes.iteritems(): if isinstance(v, DateTimeField): if v.auto_now: setattr(self, k, datetime.now()) if v.auto_now_add and _new: setattr(self, k, datetime.now()) elif isinstance(v, DateField): if v.auto_now: setattr(self, k, date.today()) if v.auto_now_add and _new: setattr(self, k, date.today()) for_storage = getattr(self, k) if for_storage is not None: h[k] = v.typecast_for_storage(for_storage) # indices for index in self.indices: if index not in self.lists and index not in self.attributes: v = getattr(self, index) if callable(v): v = v() if v: try: h[index] = unicode(v) except UnicodeError: h[index] = unicode(v.decode('utf-8')) pipeline.delete(self.key()) if h: pipeline.hmset(self.key(), h) # lists for k, v in self.lists.iteritems(): l = List(self.key()[k], pipeline=pipeline) l.clear() values = getattr(self, k) if values: if v._redisco_model: l.extend([item.id for item in values]) else: l.extend(values) pipeline.execute()
def _write(self, _new=False)
Writes the values of the attributes to the datastore. This method also creates the indices and saves the lists associated to the object.
2.79117
2.692048
1.03682
Set(self._key['all'], pipeline=pipeline).add(self.id)
def _create_membership(self, pipeline=None)
Adds the id of the object to the set of all objects of the same class.
41.294647
19.797241
2.085879
Set(self._key['all'], pipeline=pipeline).remove(self.id)
def _delete_membership(self, pipeline=None)
Removes the id of the object to the set of all objects of the same class.
35.428768
21.381968
1.656946
for att in self.indices: self._add_to_index(att, pipeline=pipeline)
def _add_to_indices(self, pipeline)
Adds the base64 encoded values of the indices.
7.503694
5.250398
1.429167
index = self._index_key_for(att) if index is None: return t, index = index if t == 'attribute': pipeline.sadd(index, self.id) pipeline.sadd(self.key()['_indices'], index) elif t == 'list': for i in index: pipeline.sadd(i, self.id) pipeline.sadd(self.key()['_indices'], i) elif t == 'sortedset': zindex, index = index pipeline.sadd(index, self.id) pipeline.sadd(self.key()['_indices'], index) descriptor = self.attributes[att] score = descriptor.typecast_for_storage(getattr(self, att)) pipeline.zadd(zindex, self.id, score) pipeline.sadd(self.key()['_zindices'], zindex)
def _add_to_index(self, att, val=None, pipeline=None)
Adds the id to the index. This also adds to the _indices set of the object.
3.065551
3.032485
1.010904
s = Set(self.key()['_indices']) z = Set(self.key()['_zindices']) for index in s.members: pipeline.srem(index, self.id) for index in z.members: pipeline.zrem(index, self.id) pipeline.delete(s.key) pipeline.delete(z.key)
def _delete_from_indices(self, pipeline)
Deletes the object's id from the sets(indices) it has been added to and removes its list of indices (used for housekeeping).
3.872491
3.270473
1.184077
if value is None: value = getattr(self, att) if callable(value): value = value() if value is None: return None if att not in self.lists: return self._get_index_key_for_non_list_attr(att, value) else: return self._tuple_for_index_key_attr_list(att, value)
def _index_key_for(self, att, value=None)
Returns a key based on the attribute and its value. The key is used for indexing.
3.082154
3.371199
0.91426
return not bool(self.db.sinter([self.key, other.key]))
def isdisjoint(self, other)
Return True if the set has no elements in common with other.
9.793269
9.656387
1.014175
if not isinstance(key, str): raise ValueError("String expected.") self.db.sunionstore(key, [self.key] + [o.key for o in others]) return Set(key)
def union(self, key, *others)
Return a new set with elements from the set and all others.
5.203618
4.919995
1.057647
if not isinstance(key, str): raise ValueError("String expected.") self.db.sinterstore(key, [self.key] + [o.key for o in others]) return Set(key)
def intersection(self, key, *others)
Return a new set with elements common to the set and all others.
4.71969
4.732172
0.997362
if not isinstance(key, str): raise ValueError("String expected.") self.db.sdiffstore(key, [self.key] + [o.key for o in others]) return Set(key)
def difference(self, key, *others)
Return a new set with elements in the set that are not in the others.
5.102868
5.00702
1.019143
self.db.sunionstore(self.key, [self.key] + [o.key for o in others])
def update(self, *others)
Update the set, adding elements from all others.
6.413672
5.478011
1.170803
self.db.sinterstore(self.key, [o.key for o in [self.key] + others])
def intersection_update(self, *others)
Update the set, keeping only elements found in it and all others.
7.989538
7.305034
1.093703
self.db.sdiffstore(self.key, [o.key for o in [self.key] + others])
def difference_update(self, *others)
Update the set, removing elements found in others.
9.360932
8.872879
1.055005
copy = Set(key=key, db=self.db) copy.clear() copy |= self return copy
def copy(self, key)
Copy the set to another key and return the new Set. WARNING: If the key exists, it overwrites it.
7.473859
7.826428
0.954951
return self.db.sinter([self.key] + [s.key for s in other_sets])
def sinter(self, *other_sets)
Performs an intersection between Sets. Returns a set of common members. Uses Redis.sinter.
4.380291
4.664478
0.939074
return self.db.sunion([self.key] + [s.key for s in other_sets])
def sunion(self, *other_sets)
Union between Sets. Returns a set of common members. Uses Redis.sunion.
4.641077
4.266571
1.087777
return self.db.sdiff([self.key] + [s.key for s in other_sets])
def sdiff(self, *other_sets)
Union between Sets. Returns a set of common members. Uses Redis.sdiff.
4.847445
4.539817
1.067762
r = self[:] r.reverse() self.clear() self.extend(r)
def reverse(self)
Reverse in place.
5.071305
3.734762
1.357866
copy = List(key, self.db) copy.clear() copy.extend(self) return copy
def copy(self, key)
Copy the list to a new list. WARNING: If key exists, it clears it before copying.
6.741955
7.494527
0.899584
if limit is not None and offset is None: offset = 0 return self.zrangebyscore(self._min_score, "(%f" % v, start=offset, num=limit)
def lt(self, v, limit=None, offset=None)
Returns the list of the members of the set that have scores less than v.
5.840318
5.004502
1.167013
if limit is not None and offset is None: offset = 0 return self.zrangebyscore("(%f" % v, self._max_score, start=offset, num=limit)
def gt(self, v, limit=None, offset=None)
Returns the list of the members of the set that have scores greater than v.
6.324362
5.610454
1.127246
if limit is not None and offset is None: offset = 0 return self.zrangebyscore(min, max, start=offset, num=limit)
def between(self, min, max, limit=None, offset=None)
Returns the list of the members of the set that have scores between min and max.
3.911377
3.398615
1.150874
if len(s) % 2: s = b'0' + s ia = [int(s[i:i+2], 16) for i in range(0, len(s), 2)] # int array return bs(ia) if PYTHON_MAJOR_VER == 3 else b''.join([chr(c) for c in ia])
def hex_to_bytes(s)
convert hex string to bytes
2.988513
2.807219
1.064581
if DEBUG: a = DEBUG_PRIVATE_KEY N, g, k = get_prime() A = pow(g, a, N) if DEBUG_PRINT: print('a=', binascii.b2a_hex(long2bytes(a)), end='\n') print('A=', binascii.b2a_hex(long2bytes(A)), end='\n') return A, a
def client_seed(a=random.randrange(0, 1 << SRP_KEY_SIZE))
A: Client public key a: Client private key
3.976847
3.962
1.003747
N, g, k = get_prime() if DEBUG: b = DEBUG_PRIVATE_KEY gb = pow(g, b, N) kv = (k * v) % N B = (kv + gb) % N if DEBUG_PRINT: print("v", binascii.b2a_hex(long2bytes(v)), end='\n') print('b=', binascii.b2a_hex(long2bytes(b)), end='\n') print("gb", binascii.b2a_hex(long2bytes(gb)), end='\n') print("k", binascii.b2a_hex(long2bytes(k)), end='\n') print("v", binascii.b2a_hex(long2bytes(v)), end='\n') print("kv", binascii.b2a_hex(long2bytes(kv)), end='\n') print('B=', binascii.b2a_hex(long2bytes(B)), end='\n') return B, b
def server_seed(v, b=random.randrange(0, 1 << SRP_KEY_SIZE))
B: Server public key b: Server private key
2.382942
2.382256
1.000288
N, g, k = get_prime() u = get_scramble(A, B) x = getUserHash(salt, user, password) # x gx = pow(g, x, N) # g^x kgx = (k * gx) % N # kg^x diff = (B - kgx) % N # B - kg^x ux = (u * x) % N aux = (a + ux) % N session_secret = pow(diff, aux, N) # (B - kg^x) ^ (a + ux) K = hash_digest(hashlib.sha1, session_secret) if DEBUG_PRINT: print('B=', binascii.b2a_hex(long2bytes(B)), end='\n') print('u=', binascii.b2a_hex(long2bytes(u)), end='\n') print('x=', binascii.b2a_hex(long2bytes(x)), end='\n') print('gx=', binascii.b2a_hex(long2bytes(gx)), end='\n') print('kgx=', binascii.b2a_hex(long2bytes(kgx)), end='\n') print('diff=', binascii.b2a_hex(long2bytes(diff)), end='\n') print('ux=', binascii.b2a_hex(long2bytes(ux)), end='\n') print('aux=', binascii.b2a_hex(long2bytes(aux)), end='\n') print('session_secret=', binascii.b2a_hex(long2bytes(session_secret)), end='\n') print('session_key:K=', binascii.b2a_hex(K)) return K
def client_session(user, password, salt, A, B, a)
Client session secret Both: u = H(A, B) User: x = H(s, p) (user enters password) User: S = (B - kg^x) ^ (a + ux) (computes session key) User: K = H(S)
2.371457
2.297375
1.032246
N, g, k = get_prime() u = get_scramble(A, B) v = get_verifier(user, password, salt) vu = pow(v, u, N) # v^u Avu = (A * vu) % N # Av^u session_secret = pow(Avu, b, N) # (Av^u) ^ b K = hash_digest(hashlib.sha1, session_secret) if DEBUG_PRINT: print('server session_secret=', binascii.b2a_hex(long2bytes(session_secret)), end='\n') print('server session hash K=', binascii.b2a_hex(K)) return K
def server_session(user, password, salt, A, B, b)
Server session secret Both: u = H(A, B) Host: S = (Av^u) ^ b (computes session key) Host: K = H(S)
5.199374
4.991116
1.041726
N, g, k = get_prime() K = client_session(user, password, salt, A, B, a) n1 = bytes2long(hash_digest(hashlib.sha1, N)) n2 = bytes2long(hash_digest(hashlib.sha1, g)) if DEBUG_PRINT: print('n1-1=', binascii.b2a_hex(long2bytes(n1)), end='\n') print('n2-1=', binascii.b2a_hex(long2bytes(n2)), end='\n') n1 = pow(n1, n2, N) n2 = bytes2long(hash_digest(hashlib.sha1, user)) M = hash_digest(hash_algo, n1, n2, salt, A, B, K) if DEBUG_PRINT: print('n1-2=', binascii.b2a_hex(long2bytes(n1)), end='\n') print('n2-2=', binascii.b2a_hex(long2bytes(n2)), end='\n') print('client_proof:M=', binascii.b2a_hex(M), end='\n') return M, K
def client_proof(user, password, salt, A, B, a, hash_algo)
M = H(H(N) xor H(g), H(I), s, A, B, K)
2.405763
2.284963
1.052867
# https://en.wikipedia.org/wiki/Decimal128_floating-point_format#Densely_packed_decimal_significand_field num_segments = num_bits // 10 segments = [] for i in range(num_segments): segments.append(dpd_bits & 0b1111111111) dpd_bits >>= 10 segments.reverse() v = prefix for dpd in segments: v = v * 1000 + dpd_to_int(dpd) return v
def calc_significand(prefix, dpd_bits, num_bits)
prefix: High bits integer value dpd_bits: dpd encoded bits num_bits: bit length of dpd_bits
3.478464
3.402912
1.022202
"decimal128 bytes to Decimal" v = decimal128_to_sign_digits_exponent(b) if isinstance(v, Decimal): return v sign, digits, exponent = v return Decimal((sign, Decimal(digits).as_tuple()[1], exponent))
def decimal128_to_decimal(b)
decimal128 bytes to Decimal
4.982069
4.502043
1.106624
if response.status_code >= 500: raise exceptions.GeocodioServerError elif response.status_code == 403: raise exceptions.GeocodioAuthError elif response.status_code == 422: raise exceptions.GeocodioDataError(response.json()["error"]) else: raise exceptions.GeocodioError( "Unknown service error (HTTP {0})".format(response.status_code) )
def error_response(response)
Raises errors matching the response code
2.840019
2.714736
1.046149
url = self.BASE_URL.format(verb=verb) request_headers = {"content-type": "application/json"} request_params = {"api_key": self.API_KEY} request_headers.update(headers) request_params.update(params) return getattr(requests, method)( url, params=request_params, headers=request_headers, data=data )
def _req(self, method="get", verb=None, headers={}, params={}, data={})
Method to wrap all request building :return: a Response object based on the specified method and request values.
2.305299
2.472615
0.932333
response = self._req(verb="parse", params={"q": address}) if response.status_code != 200: return error_response(response) return Address(response.json())
def parse(self, address)
Returns an Address dictionary with the components of the queried address. >>> client = GeocodioClient('some_api_key') >>> client.parse("1600 Pennsylvania Ave, Washington DC") { "address_components": { "number": "1600", "street": "Pennsylvania", "suffix": "Ave", "city": "Washington", "state": "DC" }, "formatted_address": "1600 Pennsylvania Ave, Washington DC" }
5.398937
5.498417
0.981907
fields = ",".join(kwargs.pop("fields", [])) response = self._req( "post", verb="geocode", params={"fields": fields}, data=json.dumps(addresses), ) if response.status_code != 200: return error_response(response) return LocationCollection(response.json()["results"])
def batch_geocode(self, addresses, **kwargs)
Returns an Address dictionary with the components of the queried address.
4.046661
3.978974
1.017011
fields = ",".join(kwargs.pop("fields", [])) response = self._req(verb="geocode", params={"q": address, "fields": fields}) if response.status_code != 200: return error_response(response) return Location(response.json())
def geocode_address(self, address, **kwargs)
Returns a Location dictionary with the components of the queried address and the geocoded location. >>> client = GeocodioClient('some_api_key') >>> client.geocode("1600 Pennsylvania Ave, Washington DC") { "input": { "address_components": { "number": "1600", "street": "Pennsylvania", "suffix": "Ave", "city": "Washington", "state": "DC" }, "formatted_address": "1600 Pennsylvania Ave, Washington DC" }, "results": [ { "address_components": { "number": "1600", "street": "Pennsylvania", "suffix": "Ave", "city": "Washington", "state": "DC", "zip": "20500" }, "formatted_address": "1600 Pennsylvania Ave, Washington DC, 20500", "location": { "lat": 38.897700000000, "lng": -77.03650000000, }, "accuracy": 1 }, { "address_components": { "number": "1600", "street": "Pennsylvania", "suffix": "Ave", "city": "Washington", "state": "DC", "zip": "20500" }, "formatted_address": "1600 Pennsylvania Ave, Washington DC, 20500", "location": { "lat": 38.897700000000, "lng": -77.03650000000, }, "accuracy": 0.8 } ] }
4.223374
4.764266
0.886469
if isinstance(address_data, list): return self.batch_geocode(address_data, **kwargs) return self.geocode_address(address_data, **kwargs)
def geocode(self, address_data, **kwargs)
Returns geocoding data for either a list of addresses or a single address represented as a string. Provides a single point of access for end users.
2.53559
2.633379
0.962866
fields = ",".join(kwargs.pop("fields", [])) point_param = "{0},{1}".format(latitude, longitude) response = self._req( verb="reverse", params={"q": point_param, "fields": fields} ) if response.status_code != 200: return error_response(response) return Location(response.json())
def reverse_point(self, latitude, longitude, **kwargs)
Method for identifying an address from a geographic point
4.029677
3.947361
1.020853
fields = ",".join(kwargs.pop("fields", [])) response = self._req( "post", verb="reverse", params={"fields": fields}, data=json_points(points) ) if response.status_code != 200: return error_response(response) logger.debug(response) return LocationCollection(response.json()["results"])
def batch_reverse(self, points, **kwargs)
Method for identifying the addresses from a list of lat/lng tuples
5.991525
5.171449
1.158578
if isinstance(points, list): return self.batch_reverse(points, **kwargs) if self.order == "lat": x, y = points else: y, x = points return self.reverse_point(x, y, **kwargs)
def reverse(self, points, **kwargs)
General method for reversing addresses, either a single address or multiple. *args should either be a longitude/latitude pair or a list of such pairs:: >>> multiple_locations = reverse([(40, -19), (43, 112)]) >>> single_location = reverse((40, -19))
3.691916
4.643907
0.795002
"convert str to bytes" if (PYTHON_MAJOR_VER == 3 or (PYTHON_MAJOR_VER == 2 and type(s) == unicode)): return s.encode(charset_map.get(self.charset, self.charset)) return s
def str_to_bytes(self, s)
convert str to bytes
4.221365
4.024836
1.048829
"convert bytes array to raw string" if PYTHON_MAJOR_VER == 3: return b.decode(charset_map.get(self.charset, self.charset)) return b
def bytes_to_str(self, b)
convert bytes array to raw string
6.10984
5.394749
1.132553
"convert bytes array to unicode string" return b.decode(charset_map.get(self.charset, self.charset))
def bytes_to_ustr(self, b)
convert bytes array to unicode string
6.610196
6.474426
1.02097
x, y = ("lat", "lng") if self.order == "lat" else ("lng", "lat") try: return (self["location"][x], self["location"][y]) except KeyError: return None
def coords(self)
Returns a tuple representing the location of the address in a GIS coords format, i.e. (longitude, latitude).
4.813442
4.984734
0.965637
if isinstance(key, tuple): # TODO handle different ordering try: x, y = float(key[0]), float(key[1]) except IndexError: raise ValueError("Two values are required for a coordinate pair") except ValueError: raise ValueError("Only float or float-coercable values can be passed") key = "{0},{1}".format(x, y) return self[self.lookups[key]]
def get(self, key)
Returns an individual Location by query lookup, e.g. address or point.
6.3988
5.876751
1.088833
"Calculate BLR from XSQLVAR array." ln = len(xsqlda) * 2 blr = [5, 2, 4, 0, ln & 255, ln >> 8] for x in xsqlda: sqltype = x.sqltype if sqltype == SQL_TYPE_VARYING: blr += [37, x.sqllen & 255, x.sqllen >> 8] elif sqltype == SQL_TYPE_TEXT: blr += [14, x.sqllen & 255, x.sqllen >> 8] elif sqltype == SQL_TYPE_LONG: blr += [8, x.sqlscale] elif sqltype == SQL_TYPE_SHORT: blr += [7, x.sqlscale] elif sqltype == SQL_TYPE_INT64: blr += [16, x.sqlscale] elif sqltype == SQL_TYPE_QUAD: blr += [9, x.sqlscale] elif sqltype == SQL_TYPE_DEC_FIXED: blr += [26, x.sqlscale] else: blr += sqltype2blr[sqltype] blr += [7, 0] # [blr_short, 0] blr += [255, 76] # [blr_end, blr_eoc] # x.sqlscale value shoud be negative, so b convert to range(0, 256) return bs(256 + b if b < 0 else b for b in blr)
def calc_blr(xsqlda)
Calculate BLR from XSQLVAR array.
3.728809
3.487613
1.069158
"Convert raw data to datetime.date" nday = bytes_to_bint(raw_value) + 678882 century = (4 * nday - 1) // 146097 nday = 4 * nday - 1 - 146097 * century day = nday // 4 nday = (4 * day + 3) // 1461 day = 4 * day + 3 - 1461 * nday day = (day + 4) // 4 month = (5 * day - 3) // 153 day = 5 * day - 3 - 153 * month day = (day + 5) // 5 year = 100 * century + nday if month < 10: month += 3 else: month -= 9 year += 1 return year, month, day
def _parse_date(self, raw_value)
Convert raw data to datetime.date
2.912012
2.802736
1.038989
"Convert raw data to datetime.time" n = bytes_to_bint(raw_value) s = n // 10000 m = s // 60 h = m // 60 m = m % 60 s = s % 60 return (h, m, s, (n % 10000) * 100)
def _parse_time(self, raw_value)
Convert raw data to datetime.time
3.426155
3.176691
1.078529
provider_class = getattr(dnsupdater, dnsupdater.AVAILABLE_PLUGINS.get(args.provider)) updater_options = {} process_message = None auth = None if args.store: # --store argument if provider_class.auth_type == 'T': user_arg = args.usertoken or utils.read_input( "Paste your auth token: ") auth = authinfo.ApiAuth(usertoken=user_arg) else: user_arg = args.usertoken or utils.read_input( "Type your username: ") pass_arg = args.password or getpass.getpass("Type your password: ") auth = authinfo.ApiAuth(user_arg, pass_arg) authinfo.store(auth, args.provider, args.config) exec_result = EXECUTION_RESULT_OK if not args.hostname: update_ddns = False process_message = "Auth info stored." else: update_ddns = True # informations arguments elif args.usertoken and args.hostname: if provider_class.auth_type == 'T': auth = authinfo.ApiAuth(args.usertoken) else: auth = authinfo.ApiAuth(args.usertoken, args.password) update_ddns = True exec_result = EXECUTION_RESULT_OK elif args.hostname: if authinfo.exists(args.provider, args.config): auth = authinfo.load(args.provider, args.config) update_ddns = True exec_result = EXECUTION_RESULT_OK else: update_ddns = False exec_result = EXECUTION_RESULT_NOK process_message = "No stored auth information found for " \ "provider: '%s'" % args.provider else: # no arguments update_ddns = False exec_result = EXECUTION_RESULT_NOK process_message = "Warning: The hostname to be updated must be " \ "provided.\nUsertoken and password can be either " \ "provided via command line or stored with --store " \ "option.\nExecute noipy --help for more details." if update_ddns and args.provider == 'generic': if args.url: if not URL_RE.match(args.url): process_message = "Malformed URL." exec_result = EXECUTION_RESULT_NOK update_ddns = False else: updater_options['url'] = args.url else: process_message = "Must use --url if --provider is 'generic' " \ "(default)" exec_result = EXECUTION_RESULT_NOK update_ddns = False response_code = None response_text = None if update_ddns: ip_address = args.ip if args.ip else utils.get_ip() if not ip_address: process_message = "Unable to get IP address. Check connection." exec_result = EXECUTION_RESULT_NOK elif ip_address == utils.get_dns_ip(args.hostname): process_message = "No update required." else: updater = provider_class(auth, args.hostname, updater_options) print("Updating hostname '%s' with IP address %s " "[provider: '%s']..." % (args.hostname, ip_address, args.provider)) response_code, response_text = updater.update_dns(ip_address) process_message = updater.status_message proc_result = { 'exec_result': exec_result, 'response_code': response_code, 'response_text': response_text, 'process_message': process_message, } return proc_result
def execute_update(args)
Execute the update based on command line args and returns a dictionary with 'execution result, ''response code', 'response info' and 'process friendly message'.
2.922363
2.871948
1.017554
headers = None if self.auth_type == 'T': api_call_url = self._base_url.format(hostname=self.hostname, token=self.auth.token, ip=new_ip) else: api_call_url = self._base_url.format(hostname=self.hostname, ip=new_ip) headers = { 'Authorization': "Basic %s" % self.auth.base64key.decode('utf-8'), 'User-Agent': "%s/%s %s" % (__title__, __version__, __email__) } r = requests.get(api_call_url, headers=headers) self.last_ddns_response = str(r.text).strip() return r.status_code, r.text
def update_dns(self, new_ip)
Call No-IP API based on dict login_info and return the status code.
3.156777
3.060696
1.031392
msg = None if self.last_ddns_response in response_messages.keys(): return response_messages.get(self.last_ddns_response) if 'good' in self.last_ddns_response: ip = re.search(r'(\d{1,3}\.?){4}', self.last_ddns_response).group() msg = "SUCCESS: DNS hostname IP (%s) successfully updated." % ip elif 'nochg' in self.last_ddns_response: ip = re.search(r'(\d{1,3}\.?){4}', self.last_ddns_response).group() msg = "SUCCESS: IP address (%s) is up to date, nothing was changed. " \ "Additional 'nochg' updates may be considered abusive." % ip else: msg = "ERROR: Ooops! Something went wrong !!!" return msg
def status_message(self)
Return friendly response from API based on response code.
3.92711
3.66349
1.071959
try: r = requests.get(HTTPBIN_URL) ip, _ = r.json()['origin'].split(',') return ip if r.status_code == 200 else None except requests.exceptions.ConnectionError: return None
def get_ip()
Return machine's origin IP address.
3.677704
3.893425
0.944593
auth_file = None try: # only for custom locations _create_config_dir(config_location, "Creating custom config directory [%s]... ") config_dir = os.path.join(config_location, NOIPY_CONFIG) _create_config_dir(config_dir, "Creating directory [%s]... ") auth_file = os.path.join(config_dir, provider) print("Creating auth info file [%s]... " % auth_file, end="") with open(auth_file, 'w') as f: buff = auth.base64key.decode('utf-8') f.write(buff) print("OK.") except IOError as e: print('{0}: "{1}"'.format(e.strerror, auth_file)) raise e
def store(auth, provider, config_location=DEFAULT_CONFIG_DIR)
Store auth info in file for specified provider
4.18837
3.972243
1.054409
auth = None auth_file = None try: config_dir = os.path.join(config_location, NOIPY_CONFIG) print("Loading stored auth info [%s]... " % config_dir, end="") auth_file = os.path.join(config_dir, provider) with open(auth_file) as f: auth_key = f.read() auth = ApiAuth.get_instance(auth_key.encode('utf-8')) print("OK.") except IOError as e: print('{0}: "{1}"'.format(e.strerror, auth_file)) raise e return auth
def load(provider, config_location=DEFAULT_CONFIG_DIR)
Load provider specific auth info from file
3.888122
3.62799
1.071701
config_dir = os.path.join(config_location, NOIPY_CONFIG) auth_file = os.path.join(config_dir, provider) return os.path.exists(auth_file)
def exists(provider, config_location=DEFAULT_CONFIG_DIR)
Check whether provider info is already stored
3.831116
3.524332
1.087048
login_str = base64.b64decode(encoded_key).decode('utf-8') usertoken, password = login_str.strip().split(':', 1) instance = cls(usertoken, password) return instance
def get_instance(cls, encoded_key)
Return an ApiAuth instance from an encoded key
3.930762
3.646552
1.077939
''' Deletes an item from the index. :param item: must be a serializable object. :param model_name: doctype, which must also be the model name. :param refresh: a boolean that determines whether to refresh the index, making all operations performed since the last refresh immediately available for search, instead of needing to wait for the scheduled Elasticsearch execution. Defaults to True. ''' src = Bungiesearch() logger.info('Getting index for model {}.'.format(model_name)) for index_name in src.get_index(model_name): index_instance = src.get_model_index(model_name) item_es_id = index_instance.fields['_id'].value(item) try: src.get_es_instance().delete(index_name, model_name, item_es_id) except NotFoundError as e: logger.warning('NotFoundError: could not delete {}.{} from index {}: {}.'.format(model_name, item_es_id, index_name, str(e))) if refresh: src.get_es_instance().indices.refresh(index=index_name)
def delete_index_item(item, model_name, refresh=True)
Deletes an item from the index. :param item: must be a serializable object. :param model_name: doctype, which must also be the model name. :param refresh: a boolean that determines whether to refresh the index, making all operations performed since the last refresh immediately available for search, instead of needing to wait for the scheduled Elasticsearch execution. Defaults to True.
4.694196
2.389341
1.964641
''' Creates the document that will be passed into the bulk index function. Either a list of serialized objects to index, or a a dictionary specifying the primary keys of items to be delete. ''' data = [] if action == 'delete': for pk in model_items: data.append({'_id': pk, '_op_type': action}) else: for doc in model_items: if index_instance.matches_indexing_condition(doc): data.append(index_instance.serialize_object(doc)) return data
def create_indexed_document(index_instance, model_items, action)
Creates the document that will be passed into the bulk index function. Either a list of serialized objects to index, or a a dictionary specifying the primary keys of items to be delete.
5.092267
2.302465
2.211658
''' Filters the model items queryset based on start and end date.''' if index_instance.updated_field is None: logger.warning("No updated date field found for {} - not restricting with start and end date".format(model_name)) else: if start_date: model_items = model_items.filter(**{'{}__gte'.format(index_instance.updated_field): __str_to_tzdate__(start_date)}) if end_date: model_items = model_items.filter(**{'{}__lte'.format(index_instance.updated_field): __str_to_tzdate__(end_date)}) return model_items
def filter_model_items(index_instance, model_items, model_name, start_date, end_date)
Filters the model items queryset based on start and end date.
2.997404
2.767363
1.083126
words = txt.replace(',', ' ').split() version = None for x in reversed(words): if len(x) > 2: if x[0].lower() == 'v': x = x[1:] if '.' in x and x[0].isdigit(): version = x break return version
def extract_version(txt)
This function tries to extract the version from the help text of any program.
3.13032
3.098012
1.010428
ret = self.call().return_code ok = ret == return_code if not ok: raise EasyProcessError( self, 'check error, return code is not {0}!'.format(return_code)) return self
def check(self, return_code=0)
Run command with arguments. Wait for command to complete. If the exit code was as expected and there is no exception then return, otherwise raise EasyProcessError. :param return_code: int, expected return code :rtype: self
7.016392
5.905854
1.18804
self.start().wait(timeout=timeout) if self.is_alive(): self.stop() return self
def call(self, timeout=None)
Run command with arguments. Wait for command to complete. same as: 1. :meth:`start` 2. :meth:`wait` 3. :meth:`stop` :rtype: self
5.132267
6.089052
0.842868
if self.is_started: raise EasyProcessError(self, 'process was started twice!') if self.use_temp_files: self._stdout_file = tempfile.TemporaryFile(prefix='stdout_') self._stderr_file = tempfile.TemporaryFile(prefix='stderr_') stdout = self._stdout_file stderr = self._stderr_file else: stdout = subprocess.PIPE stderr = subprocess.PIPE cmd = list(map(uniencode, self.cmd)) try: self.popen = subprocess.Popen(cmd, stdout=stdout, stderr=stderr, cwd=self.cwd, env=self.env, ) except OSError as oserror: log.debug('OSError exception: %s', oserror) self.oserror = oserror raise EasyProcessError(self, 'start error') self.is_started = True log.debug('process was started (pid=%s)', self.pid) return self
def start(self)
start command in background and does not wait for it. :rtype: self
2.96494
2.935021
1.010194
if timeout is not None: if not self._thread: self._thread = threading.Thread(target=self._wait4process) self._thread.daemon = 1 self._thread.start() if self._thread: self._thread.join(timeout=timeout) self.timeout_happened = self.timeout_happened or self._thread.isAlive() else: # no timeout and no existing thread self._wait4process() return self
def wait(self, timeout=None)
Wait for command to complete. Timeout: - discussion: http://stackoverflow.com/questions/1191374/subprocess-with-timeout - implementation: threading :rtype: self
3.361272
3.089758
1.087875
''' Kill process (:meth:`subprocess.Popen.terminate`). Do not wait for command to complete. :rtype: self ''' if not self.is_started: raise EasyProcessError(self, 'process was not started!') log.debug('stopping process (pid=%s cmd="%s")', self.pid, self.cmd) if self.popen: if self.is_alive(): log.debug('process is active -> sending SIGTERM') try: try: self.popen.terminate() except AttributeError: os.kill(self.popen.pid, signal.SIGKILL) except OSError as oserror: log.debug('exception in terminate:%s', oserror) else: log.debug('process was already stopped') else: log.debug('process was not started') return self
def sendstop(self)
Kill process (:meth:`subprocess.Popen.terminate`). Do not wait for command to complete. :rtype: self
3.959
2.917974
1.356764
''' returns a function which: 1. start process 2. call func, save result 3. stop process 4. returns result similar to :keyword:`with` statement :rtype: ''' def wrapped(): self.start() if delay: self.sleep(delay) x = None try: x = func() except OSError as oserror: log.debug('OSError exception:%s', oserror) self.oserror = oserror raise EasyProcessError(self, 'wrap error!') finally: self.stop() return x return wrapped
def wrap(self, func, delay=0)
returns a function which: 1. start process 2. call func, save result 3. stop process 4. returns result similar to :keyword:`with` statement :rtype:
6.672537
3.286036
2.030573
return SendMessage(settings=self.settings, **kwargs).call( message=message, room_id=room_id, **kwargs )
def send_message(self, message, room_id, **kwargs)
Send a message to a given room
4.523521
4.60559
0.98218
return GetPrivateRooms(settings=self.settings, **kwargs).call(**kwargs)
def get_private_rooms(self, **kwargs)
Get a listing of all private rooms with their names and IDs
12.245225
11.066603
1.106503
return GetPrivateRoomHistory(settings=self.settings, **kwargs).call( room_id=room_id, oldest=oldest, **kwargs )
def get_private_room_history(self, room_id, oldest=None, **kwargs)
Get various history of specific private group in this case private :param room_id: :param kwargs: :return:
4.485701
5.801224
0.773234
return GetPublicRooms(settings=self.settings, **kwargs).call(**kwargs)
def get_public_rooms(self, **kwargs)
Get a listing of all public rooms with their names and IDs
12.268001
10.973272
1.117989
return GetRoomInfo(settings=self.settings, **kwargs).call( room_id=room_id, **kwargs )
def get_room_info(self, room_id, **kwargs)
Get various information about a specific channel/room :param room_id: :param kwargs: :return:
5.863688
8.221437
0.713219
return UploadFile(settings=self.settings, **kwargs).call( room_id=room_id, description=description, file=file, message=message, **kwargs )
def upload_file(self, room_id, description, file, message, **kwargs)
Upload file to room :param room_id: :param description: :param file: :param kwargs: :return:
2.975234
3.876838
0.767438
return GetPrivateRoomInfo(settings=self.settings, **kwargs).call( room_id=room_id, **kwargs )
def get_private_room_info(self, room_id, **kwargs)
Get various information about a specific private group :param room_id: :param kwargs: :return:
5.864043
8.356936
0.701698
return GetRoomId(settings=self.settings, **kwargs).call( room_name=room_name, **kwargs )
def get_room_id(self, room_name, **kwargs)
Get room ID :param room_name: :param kwargs: :return:
5.593689
7.440295
0.75181
return GetRoomHistory(settings=self.settings, **kwargs).call( room_id=room_id, oldest=oldest, latest=latest, inclusive=inclusive, count=count, unreads=unreads, **kwargs )
def get_room_history( self, room_id, oldest=None, latest=datetime.now(), inclusive=False, count=20, unreads=False, **kwargs )
Get various history of specific channel/room :param room_id: :param kwargs: :return:
2.308338
2.999074
0.769684
return CreatePublicRoom(settings=self.settings, **kwargs).call(name=name, **kwargs)
def create_public_room(self, name, **kwargs)
Create room with given name :param name: Room name :param kwargs: members: The users to add to the channel when it is created. Optional; Ex.: ["rocket.cat"], Default: [] read_only: Set if the channel is read only or not. Optional; Ex.: True, Default: False :return:
8.387897
14.450068
0.580475
return DeletePublicRoom(settings=self.settings, **kwargs).call(room_id=room_id, **kwargs)
def delete_public_room(self, room_id, **kwargs)
Delete room with given ID :param room_id: Room ID :param kwargs: :return:
6.019956
8.6373
0.696972