sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def security():
"""View for security page."""
sessions = SessionActivity.query_by_user(
user_id=current_user.get_id()
).all()
master_session = None
for index, session in enumerate(sessions):
if SessionActivity.is_current(session.sid_s):
master_session = session
del sessions[index]
return render_template(
current_app.config['ACCOUNTS_SETTINGS_SECURITY_TEMPLATE'],
formclass=RevokeForm,
sessions=[master_session] + sessions,
is_current=SessionActivity.is_current
) | View for security page. | entailment |
def revoke_session():
"""Revoke a session."""
form = RevokeForm(request.form)
if not form.validate_on_submit():
abort(403)
sid_s = form.data['sid_s']
if SessionActivity.query.filter_by(
user_id=current_user.get_id(), sid_s=sid_s).count() == 1:
delete_session(sid_s=sid_s)
db.session.commit()
if not SessionActivity.is_current(sid_s=sid_s):
# if it's the same session doesn't show the message, otherwise
# the session will be still open without the database record
flash('Session {0} successfully removed.'.format(sid_s), 'success')
else:
flash('Unable to remove the session {0}.'.format(sid_s), 'error')
return redirect(url_for('invenio_accounts.security')) | Revoke a session. | entailment |
def upgrade():
"""Upgrade database."""
with op.batch_alter_table('accounts_user_session_activity') as batch_op:
batch_op.add_column(sa.Column('browser', sa.String(80), nullable=True))
batch_op.add_column(
sa.Column('browser_version', sa.String(30), nullable=True))
batch_op.add_column(
sa.Column('country', sa.String(3), nullable=True))
batch_op.add_column(
sa.Column('device', sa.String(80), nullable=True))
batch_op.add_column(
sa.Column('ip', sa.String(80), nullable=True))
batch_op.add_column(
sa.Column('os', sa.String(80), nullable=True)) | Upgrade database. | entailment |
def query_by_expired(cls):
"""Query to select all expired sessions."""
lifetime = current_app.permanent_session_lifetime
expired_moment = datetime.utcnow() - lifetime
return cls.query.filter(cls.created < expired_moment) | Query to select all expired sessions. | entailment |
def monkey_patch_flask_security():
"""Monkey-patch Flask-Security."""
if utils.get_hmac != get_hmac:
utils.get_hmac = get_hmac
if utils.hash_password != hash_password:
utils.hash_password = hash_password
changeable.hash_password = hash_password
recoverable.hash_password = hash_password
registerable.hash_password = hash_password
# Disable remember me cookie generation as it does not work with
# session activity tracking (a remember me token will bypass revoking
# of a session).
def patch_do_nothing(*args, **kwargs):
pass
LoginManager._set_cookie = patch_do_nothing
# Disable loading user from headers and object because we want to be
# sure we can load user only through the login form.
def patch_reload_anonym(self, *args, **kwargs):
self.reload_user()
LoginManager._load_from_header = patch_reload_anonym
LoginManager._load_from_request = patch_reload_anonym | Monkey-patch Flask-Security. | entailment |
def init_app(self, app, sessionstore=None, register_blueprint=True):
"""Flask application initialization.
The following actions are executed:
#. Initialize the configuration.
#. Monkey-patch Flask-Security.
#. Create the user datastore.
#. Create the sessionstore.
#. Initialize the extension, the forms to register users and
confirms their emails, the CLI and, if ``ACCOUNTS_USE_CELERY`` is
``True``, register a celery task to send emails.
:param app: The Flask application.
:param sessionstore: store for sessions. Passed to
``flask-kvsession``. If ``None`` then Redis is configured.
(Default: ``None``)
:param register_blueprint: If ``True``, the application registers the
blueprints. (Default: ``True``)
"""
self.init_config(app)
# Monkey-patch Flask-Security
InvenioAccounts.monkey_patch_flask_security()
# Create user datastore
if not self.datastore:
self.datastore = SessionAwareSQLAlchemyUserDatastore(
db, User, Role)
if app.config['ACCOUNTS_SESSION_ACTIVITY_ENABLED']:
self._enable_session_activity(app=app)
# Initialize extension.
_register_blueprint = app.config.get('ACCOUNTS_REGISTER_BLUEPRINT')
if _register_blueprint is not None:
register_blueprint = _register_blueprint
state = self.security.init_app(app, datastore=self.datastore,
register_blueprint=register_blueprint)
self.register_anonymous_identity_loader(state)
app.extensions['security'].register_form = register_form_factory(
app.extensions['security'].register_form, app)
app.extensions['security'].confirm_register_form = \
confirm_register_form_factory(
app.extensions['security'].confirm_register_form, app
)
app.extensions['security'].login_form = login_form_factory(
app.extensions['security'].login_form, app)
if app.config['ACCOUNTS_USE_CELERY']:
from invenio_accounts.tasks import send_security_email
@state.send_mail_task
def delay_security_email(msg):
send_security_email.delay(msg.__dict__)
# Register context processor
if app.config['ACCOUNTS_JWT_DOM_TOKEN']:
from invenio_accounts.context_processors.jwt import \
jwt_proccessor
app.context_processor(jwt_proccessor)
# Register signal receiver
if app.config.get('ACCOUNTS_USERINFO_HEADERS'):
request_finished.connect(set_session_info, app)
app.extensions['invenio-accounts'] = self | Flask application initialization.
The following actions are executed:
#. Initialize the configuration.
#. Monkey-patch Flask-Security.
#. Create the user datastore.
#. Create the sessionstore.
#. Initialize the extension, the forms to register users and
confirms their emails, the CLI and, if ``ACCOUNTS_USE_CELERY`` is
``True``, register a celery task to send emails.
:param app: The Flask application.
:param sessionstore: store for sessions. Passed to
``flask-kvsession``. If ``None`` then Redis is configured.
(Default: ``None``)
:param register_blueprint: If ``True``, the application registers the
blueprints. (Default: ``True``) | entailment |
def init_config(self, app):
"""Initialize configuration.
:param app: The Flask application.
"""
try:
pkg_resources.get_distribution('celery')
app.config.setdefault(
"ACCOUNTS_USE_CELERY", not (app.debug or app.testing))
except pkg_resources.DistributionNotFound: # pragma: no cover
app.config.setdefault("ACCOUNTS_USE_CELERY", False)
# Register Invenio legacy password hashing
register_crypt_handler(InvenioAesEncryptedEmail)
# Change Flask defaults
app.config.setdefault(
'SESSION_COOKIE_SECURE',
not app.debug
)
# Change Flask-Security defaults
app.config.setdefault(
'SECURITY_PASSWORD_SALT',
app.config['SECRET_KEY']
)
# Set JWT secret key
app.config.setdefault(
'ACCOUNTS_JWT_SECRET_KEY',
app.config.get(
'ACCOUNTS_JWT_SECRET_KEY',
app.config.get('SECRET_KEY')
)
)
config_apps = ['ACCOUNTS', 'SECURITY_']
for k in dir(config):
if any([k.startswith(prefix) for prefix in config_apps]):
app.config.setdefault(k, getattr(config, k))
# Set Session KV store
if app.config.get('ACCOUNTS_SESSION_REDIS_URL'):
import redis
from simplekv.memory.redisstore import RedisStore
session_kvstore = RedisStore(redis.StrictRedis.from_url(
app.config['ACCOUNTS_SESSION_REDIS_URL']))
else:
from simplekv.memory import DictStore
session_kvstore = DictStore()
self.kvsession_extension = KVSessionExtension(
session_kvstore, app) | Initialize configuration.
:param app: The Flask application. | entailment |
def _enable_session_activity(self, app):
"""Enable session activity."""
user_logged_in.connect(login_listener, app)
user_logged_out.connect(logout_listener, app)
from .views.settings import blueprint
from .views.security import security, revoke_session
blueprint.route('/security/', methods=['GET'])(security)
blueprint.route('/sessions/revoke/', methods=['POST'])(revoke_session) | Enable session activity. | entailment |
def init_app(self, app, sessionstore=None, register_blueprint=False):
"""Flask application initialization.
:param app: The Flask application.
:param sessionstore: store for sessions. Passed to
``flask-kvsession``. If ``None`` then Redis is configured.
(Default: ``None``)
:param register_blueprint: If ``True``, the application registers the
blueprints. (Default: ``True``)
"""
return super(InvenioAccountsREST, self).init_app(
app, sessionstore=sessionstore,
register_blueprint=register_blueprint,
) | Flask application initialization.
:param app: The Flask application.
:param sessionstore: store for sessions. Passed to
``flask-kvsession``. If ``None`` then Redis is configured.
(Default: ``None``)
:param register_blueprint: If ``True``, the application registers the
blueprints. (Default: ``True``) | entailment |
def init_app(self, app, sessionstore=None, register_blueprint=True):
"""Flask application initialization.
:param app: The Flask application.
:param sessionstore: store for sessions. Passed to
``flask-kvsession``. If ``None`` then Redis is configured.
(Default: ``None``)
:param register_blueprint: If ``True``, the application registers the
blueprints. (Default: ``True``)
"""
self.make_session_permanent(app)
return super(InvenioAccountsUI, self).init_app(
app, sessionstore=sessionstore,
register_blueprint=register_blueprint
) | Flask application initialization.
:param app: The Flask application.
:param sessionstore: store for sessions. Passed to
``flask-kvsession``. If ``None`` then Redis is configured.
(Default: ``None``)
:param register_blueprint: If ``True``, the application registers the
blueprints. (Default: ``True``) | entailment |
def deactivate_user(self, user):
"""Deactivate a user.
:param user: A :class:`invenio_accounts.models.User` instance.
:returns: The datastore instance.
"""
res = super(SessionAwareSQLAlchemyUserDatastore, self).deactivate_user(
user)
if res:
delete_user_sessions(user)
return res | Deactivate a user.
:param user: A :class:`invenio_accounts.models.User` instance.
:returns: The datastore instance. | entailment |
def _initialize_attributes(model_class, name, bases, attrs):
"""Initialize the attributes of the model."""
model_class._attributes = {}
for k, v in attrs.iteritems():
if isinstance(v, Attribute):
model_class._attributes[k] = v
v.name = v.name or k | Initialize the attributes of the model. | entailment |
def _initialize_referenced(model_class, attribute):
"""Adds a property to the target of a reference field that
returns the list of associated objects.
"""
# this should be a descriptor
def _related_objects(self):
return (model_class.objects
.filter(**{attribute.attname: self.id}))
klass = attribute._target_type
if isinstance(klass, basestring):
return (klass, model_class, attribute)
else:
related_name = (attribute.related_name or
model_class.__name__.lower() + '_set')
setattr(klass, related_name,
property(_related_objects)) | Adds a property to the target of a reference field that
returns the list of associated objects. | entailment |
def _initialize_lists(model_class, name, bases, attrs):
"""Stores the list fields descriptors of a model."""
model_class._lists = {}
for k, v in attrs.iteritems():
if isinstance(v, ListField):
model_class._lists[k] = v
v.name = v.name or k | Stores the list fields descriptors of a model. | entailment |
def _initialize_references(model_class, name, bases, attrs):
"""Stores the list of reference field descriptors of a model."""
model_class._references = {}
h = {}
deferred = []
for k, v in attrs.iteritems():
if isinstance(v, ReferenceField):
model_class._references[k] = v
v.name = v.name or k
att = Attribute(name=v.attname)
h[v.attname] = att
setattr(model_class, v.attname, att)
refd = _initialize_referenced(model_class, v)
if refd:
deferred.append(refd)
attrs.update(h)
return deferred | Stores the list of reference field descriptors of a model. | entailment |
def _initialize_indices(model_class, name, bases, attrs):
"""Stores the list of indexed attributes."""
model_class._indices = []
for k, v in attrs.iteritems():
if isinstance(v, (Attribute, ListField)) and v.indexed:
model_class._indices.append(k)
if model_class._meta['indices']:
model_class._indices.extend(model_class._meta['indices']) | Stores the list of indexed attributes. | entailment |
def _initialize_counters(model_class, name, bases, attrs):
"""Stores the list of counter fields."""
model_class._counters = []
for k, v in attrs.iteritems():
if isinstance(v, Counter):
model_class._counters.append(k) | Stores the list of counter fields. | entailment |
def get_model_from_key(key):
"""Gets the model from a given key."""
_known_models = {}
model_name = key.split(':', 2)[0]
# populate
for klass in Model.__subclasses__():
_known_models[klass.__name__] = klass
return _known_models.get(model_name, None) | Gets the model from a given key. | entailment |
def from_key(key):
"""Returns the model instance based on the key.
Raises BadKeyError if the key is not recognized by
redisco or no defined model can be found.
Returns None if the key could not be found.
"""
model = get_model_from_key(key)
if model is None:
raise BadKeyError
try:
_, id = key.split(':', 2)
id = int(id)
except ValueError, TypeError:
raise BadKeyError
return model.objects.get_by_id(id) | Returns the model instance based on the key.
Raises BadKeyError if the key is not recognized by
redisco or no defined model can be found.
Returns None if the key could not be found. | entailment |
def is_valid(self):
"""Returns True if all the fields are valid.
It first validates the fields (required, unique, etc.)
and then calls the validate method.
"""
self._errors = []
for field in self.fields:
try:
field.validate(self)
except FieldValidationError, e:
self._errors.extend(e.errors)
self.validate()
return not bool(self._errors) | Returns True if all the fields are valid.
It first validates the fields (required, unique, etc.)
and then calls the validate method. | entailment |
def update_attributes(self, **kwargs):
"""Updates the attributes of the model."""
attrs = self.attributes.values() + self.lists.values() \
+ self.references.values()
for att in attrs:
if att.name in kwargs:
att.__set__(self, kwargs[att.name]) | Updates the attributes of the model. | entailment |
def save(self):
"""Saves the instance to the datastore."""
if not self.is_valid():
return self._errors
_new = self.is_new()
if _new:
self._initialize_id()
with Mutex(self):
self._write(_new)
return True | Saves the instance to the datastore. | entailment |
def key(self, att=None):
"""Returns the Redis key where the values are stored."""
if att is not None:
return self._key[self.id][att]
else:
return self._key[self.id] | Returns the Redis key where the values are stored. | entailment |
def delete(self):
"""Deletes the object from the datastore."""
pipeline = self.db.pipeline()
self._delete_from_indices(pipeline)
self._delete_membership(pipeline)
pipeline.delete(self.key())
pipeline.execute() | Deletes the object from the datastore. | entailment |
def incr(self, att, val=1):
"""Increments a counter."""
if att not in self.counters:
raise ValueError("%s is not a counter.")
self.db.hincrby(self.key(), att, val) | Increments a counter. | entailment |
def attributes_dict(self):
"""Returns the mapping of the model attributes and their
values.
"""
h = {}
for k in self.attributes.keys():
h[k] = getattr(self, k)
for k in self.lists.keys():
h[k] = getattr(self, k)
for k in self.references.keys():
h[k] = getattr(self, k)
return h | Returns the mapping of the model attributes and their
values. | entailment |
def fields(self):
"""Returns the list of field names of the model."""
return (self.attributes.values() + self.lists.values()
+ self.references.values()) | Returns the list of field names of the model. | entailment |
def exists(cls, id):
"""Checks if the model with id exists."""
return bool(redisco.get_client().exists(cls._key[str(id)]) or
redisco.get_client().sismember(cls._key['all'], str(id))) | Checks if the model with id exists. | entailment |
def _initialize_id(self):
"""Initializes the id of the instance."""
self.id = str(self.db.incr(self._key['id'])) | Initializes the id of the instance. | entailment |
def _write(self, _new=False):
"""Writes the values of the attributes to the datastore.
This method also creates the indices and saves the lists
associated to the object.
"""
pipeline = self.db.pipeline()
self._create_membership(pipeline)
self._update_indices(pipeline)
h = {}
# attributes
for k, v in self.attributes.iteritems():
if isinstance(v, DateTimeField):
if v.auto_now:
setattr(self, k, datetime.now())
if v.auto_now_add and _new:
setattr(self, k, datetime.now())
elif isinstance(v, DateField):
if v.auto_now:
setattr(self, k, date.today())
if v.auto_now_add and _new:
setattr(self, k, date.today())
for_storage = getattr(self, k)
if for_storage is not None:
h[k] = v.typecast_for_storage(for_storage)
# indices
for index in self.indices:
if index not in self.lists and index not in self.attributes:
v = getattr(self, index)
if callable(v):
v = v()
if v:
try:
h[index] = unicode(v)
except UnicodeError:
h[index] = unicode(v.decode('utf-8'))
pipeline.delete(self.key())
if h:
pipeline.hmset(self.key(), h)
# lists
for k, v in self.lists.iteritems():
l = List(self.key()[k], pipeline=pipeline)
l.clear()
values = getattr(self, k)
if values:
if v._redisco_model:
l.extend([item.id for item in values])
else:
l.extend(values)
pipeline.execute() | Writes the values of the attributes to the datastore.
This method also creates the indices and saves the lists
associated to the object. | entailment |
def _create_membership(self, pipeline=None):
"""Adds the id of the object to the set of all objects of the same
class.
"""
Set(self._key['all'], pipeline=pipeline).add(self.id) | Adds the id of the object to the set of all objects of the same
class. | entailment |
def _delete_membership(self, pipeline=None):
"""Removes the id of the object to the set of all objects of the
same class.
"""
Set(self._key['all'], pipeline=pipeline).remove(self.id) | Removes the id of the object to the set of all objects of the
same class. | entailment |
def _add_to_indices(self, pipeline):
"""Adds the base64 encoded values of the indices."""
for att in self.indices:
self._add_to_index(att, pipeline=pipeline) | Adds the base64 encoded values of the indices. | entailment |
def _add_to_index(self, att, val=None, pipeline=None):
"""
Adds the id to the index.
This also adds to the _indices set of the object.
"""
index = self._index_key_for(att)
if index is None:
return
t, index = index
if t == 'attribute':
pipeline.sadd(index, self.id)
pipeline.sadd(self.key()['_indices'], index)
elif t == 'list':
for i in index:
pipeline.sadd(i, self.id)
pipeline.sadd(self.key()['_indices'], i)
elif t == 'sortedset':
zindex, index = index
pipeline.sadd(index, self.id)
pipeline.sadd(self.key()['_indices'], index)
descriptor = self.attributes[att]
score = descriptor.typecast_for_storage(getattr(self, att))
pipeline.zadd(zindex, self.id, score)
pipeline.sadd(self.key()['_zindices'], zindex) | Adds the id to the index.
This also adds to the _indices set of the object. | entailment |
def _delete_from_indices(self, pipeline):
"""Deletes the object's id from the sets(indices) it has been added
to and removes its list of indices (used for housekeeping).
"""
s = Set(self.key()['_indices'])
z = Set(self.key()['_zindices'])
for index in s.members:
pipeline.srem(index, self.id)
for index in z.members:
pipeline.zrem(index, self.id)
pipeline.delete(s.key)
pipeline.delete(z.key) | Deletes the object's id from the sets(indices) it has been added
to and removes its list of indices (used for housekeeping). | entailment |
def _index_key_for(self, att, value=None):
"""Returns a key based on the attribute and its value.
The key is used for indexing.
"""
if value is None:
value = getattr(self, att)
if callable(value):
value = value()
if value is None:
return None
if att not in self.lists:
return self._get_index_key_for_non_list_attr(att, value)
else:
return self._tuple_for_index_key_attr_list(att, value) | Returns a key based on the attribute and its value.
The key is used for indexing. | entailment |
def isdisjoint(self, other):
"""Return True if the set has no elements in common with other."""
return not bool(self.db.sinter([self.key, other.key])) | Return True if the set has no elements in common with other. | entailment |
def union(self, key, *others):
"""Return a new set with elements from the set and all others."""
if not isinstance(key, str):
raise ValueError("String expected.")
self.db.sunionstore(key, [self.key] + [o.key for o in others])
return Set(key) | Return a new set with elements from the set and all others. | entailment |
def intersection(self, key, *others):
"""Return a new set with elements common to the set and all others."""
if not isinstance(key, str):
raise ValueError("String expected.")
self.db.sinterstore(key, [self.key] + [o.key for o in others])
return Set(key) | Return a new set with elements common to the set and all others. | entailment |
def difference(self, key, *others):
"""Return a new set with elements in the set that are not in the others."""
if not isinstance(key, str):
raise ValueError("String expected.")
self.db.sdiffstore(key, [self.key] + [o.key for o in others])
return Set(key) | Return a new set with elements in the set that are not in the others. | entailment |
def update(self, *others):
"""Update the set, adding elements from all others."""
self.db.sunionstore(self.key, [self.key] + [o.key for o in others]) | Update the set, adding elements from all others. | entailment |
def intersection_update(self, *others):
"""Update the set, keeping only elements found in it and all others."""
self.db.sinterstore(self.key, [o.key for o in [self.key] + others]) | Update the set, keeping only elements found in it and all others. | entailment |
def difference_update(self, *others):
"""Update the set, removing elements found in others."""
self.db.sdiffstore(self.key, [o.key for o in [self.key] + others]) | Update the set, removing elements found in others. | entailment |
def copy(self, key):
"""Copy the set to another key and return the new Set.
WARNING: If the key exists, it overwrites it.
"""
copy = Set(key=key, db=self.db)
copy.clear()
copy |= self
return copy | Copy the set to another key and return the new Set.
WARNING: If the key exists, it overwrites it. | entailment |
def sinter(self, *other_sets):
"""Performs an intersection between Sets.
Returns a set of common members. Uses Redis.sinter.
"""
return self.db.sinter([self.key] + [s.key for s in other_sets]) | Performs an intersection between Sets.
Returns a set of common members. Uses Redis.sinter. | entailment |
def sunion(self, *other_sets):
"""Union between Sets.
Returns a set of common members. Uses Redis.sunion.
"""
return self.db.sunion([self.key] + [s.key for s in other_sets]) | Union between Sets.
Returns a set of common members. Uses Redis.sunion. | entailment |
def sdiff(self, *other_sets):
"""Union between Sets.
Returns a set of common members. Uses Redis.sdiff.
"""
return self.db.sdiff([self.key] + [s.key for s in other_sets]) | Union between Sets.
Returns a set of common members. Uses Redis.sdiff. | entailment |
def reverse(self):
"""Reverse in place."""
r = self[:]
r.reverse()
self.clear()
self.extend(r) | Reverse in place. | entailment |
def copy(self, key):
"""Copy the list to a new list.
WARNING: If key exists, it clears it before copying.
"""
copy = List(key, self.db)
copy.clear()
copy.extend(self)
return copy | Copy the list to a new list.
WARNING: If key exists, it clears it before copying. | entailment |
def lt(self, v, limit=None, offset=None):
"""Returns the list of the members of the set that have scores
less than v.
"""
if limit is not None and offset is None:
offset = 0
return self.zrangebyscore(self._min_score, "(%f" % v,
start=offset, num=limit) | Returns the list of the members of the set that have scores
less than v. | entailment |
def gt(self, v, limit=None, offset=None):
"""Returns the list of the members of the set that have scores
greater than v.
"""
if limit is not None and offset is None:
offset = 0
return self.zrangebyscore("(%f" % v, self._max_score,
start=offset, num=limit) | Returns the list of the members of the set that have scores
greater than v. | entailment |
def between(self, min, max, limit=None, offset=None):
"""Returns the list of the members of the set that have scores
between min and max.
"""
if limit is not None and offset is None:
offset = 0
return self.zrangebyscore(min, max,
start=offset, num=limit) | Returns the list of the members of the set that have scores
between min and max. | entailment |
def hex_to_bytes(s):
"""
convert hex string to bytes
"""
if len(s) % 2:
s = b'0' + s
ia = [int(s[i:i+2], 16) for i in range(0, len(s), 2)] # int array
return bs(ia) if PYTHON_MAJOR_VER == 3 else b''.join([chr(c) for c in ia]) | convert hex string to bytes | entailment |
def client_seed(a=random.randrange(0, 1 << SRP_KEY_SIZE)):
"""
A: Client public key
a: Client private key
"""
if DEBUG:
a = DEBUG_PRIVATE_KEY
N, g, k = get_prime()
A = pow(g, a, N)
if DEBUG_PRINT:
print('a=', binascii.b2a_hex(long2bytes(a)), end='\n')
print('A=', binascii.b2a_hex(long2bytes(A)), end='\n')
return A, a | A: Client public key
a: Client private key | entailment |
def server_seed(v, b=random.randrange(0, 1 << SRP_KEY_SIZE)):
"""
B: Server public key
b: Server private key
"""
N, g, k = get_prime()
if DEBUG:
b = DEBUG_PRIVATE_KEY
gb = pow(g, b, N)
kv = (k * v) % N
B = (kv + gb) % N
if DEBUG_PRINT:
print("v", binascii.b2a_hex(long2bytes(v)), end='\n')
print('b=', binascii.b2a_hex(long2bytes(b)), end='\n')
print("gb", binascii.b2a_hex(long2bytes(gb)), end='\n')
print("k", binascii.b2a_hex(long2bytes(k)), end='\n')
print("v", binascii.b2a_hex(long2bytes(v)), end='\n')
print("kv", binascii.b2a_hex(long2bytes(kv)), end='\n')
print('B=', binascii.b2a_hex(long2bytes(B)), end='\n')
return B, b | B: Server public key
b: Server private key | entailment |
def client_session(user, password, salt, A, B, a):
"""
Client session secret
Both: u = H(A, B)
User: x = H(s, p) (user enters password)
User: S = (B - kg^x) ^ (a + ux) (computes session key)
User: K = H(S)
"""
N, g, k = get_prime()
u = get_scramble(A, B)
x = getUserHash(salt, user, password) # x
gx = pow(g, x, N) # g^x
kgx = (k * gx) % N # kg^x
diff = (B - kgx) % N # B - kg^x
ux = (u * x) % N
aux = (a + ux) % N
session_secret = pow(diff, aux, N) # (B - kg^x) ^ (a + ux)
K = hash_digest(hashlib.sha1, session_secret)
if DEBUG_PRINT:
print('B=', binascii.b2a_hex(long2bytes(B)), end='\n')
print('u=', binascii.b2a_hex(long2bytes(u)), end='\n')
print('x=', binascii.b2a_hex(long2bytes(x)), end='\n')
print('gx=', binascii.b2a_hex(long2bytes(gx)), end='\n')
print('kgx=', binascii.b2a_hex(long2bytes(kgx)), end='\n')
print('diff=', binascii.b2a_hex(long2bytes(diff)), end='\n')
print('ux=', binascii.b2a_hex(long2bytes(ux)), end='\n')
print('aux=', binascii.b2a_hex(long2bytes(aux)), end='\n')
print('session_secret=', binascii.b2a_hex(long2bytes(session_secret)), end='\n')
print('session_key:K=', binascii.b2a_hex(K))
return K | Client session secret
Both: u = H(A, B)
User: x = H(s, p) (user enters password)
User: S = (B - kg^x) ^ (a + ux) (computes session key)
User: K = H(S) | entailment |
def server_session(user, password, salt, A, B, b):
"""
Server session secret
Both: u = H(A, B)
Host: S = (Av^u) ^ b (computes session key)
Host: K = H(S)
"""
N, g, k = get_prime()
u = get_scramble(A, B)
v = get_verifier(user, password, salt)
vu = pow(v, u, N) # v^u
Avu = (A * vu) % N # Av^u
session_secret = pow(Avu, b, N) # (Av^u) ^ b
K = hash_digest(hashlib.sha1, session_secret)
if DEBUG_PRINT:
print('server session_secret=', binascii.b2a_hex(long2bytes(session_secret)), end='\n')
print('server session hash K=', binascii.b2a_hex(K))
return K | Server session secret
Both: u = H(A, B)
Host: S = (Av^u) ^ b (computes session key)
Host: K = H(S) | entailment |
def client_proof(user, password, salt, A, B, a, hash_algo):
"""
M = H(H(N) xor H(g), H(I), s, A, B, K)
"""
N, g, k = get_prime()
K = client_session(user, password, salt, A, B, a)
n1 = bytes2long(hash_digest(hashlib.sha1, N))
n2 = bytes2long(hash_digest(hashlib.sha1, g))
if DEBUG_PRINT:
print('n1-1=', binascii.b2a_hex(long2bytes(n1)), end='\n')
print('n2-1=', binascii.b2a_hex(long2bytes(n2)), end='\n')
n1 = pow(n1, n2, N)
n2 = bytes2long(hash_digest(hashlib.sha1, user))
M = hash_digest(hash_algo, n1, n2, salt, A, B, K)
if DEBUG_PRINT:
print('n1-2=', binascii.b2a_hex(long2bytes(n1)), end='\n')
print('n2-2=', binascii.b2a_hex(long2bytes(n2)), end='\n')
print('client_proof:M=', binascii.b2a_hex(M), end='\n')
return M, K | M = H(H(N) xor H(g), H(I), s, A, B, K) | entailment |
def dpd_to_int(dpd):
"""
Convert DPD encodined value to int (0-999)
dpd: DPD encoded value. 10bit unsigned int
"""
b = [None] * 10
b[9] = 1 if dpd & 0b1000000000 else 0
b[8] = 1 if dpd & 0b0100000000 else 0
b[7] = 1 if dpd & 0b0010000000 else 0
b[6] = 1 if dpd & 0b0001000000 else 0
b[5] = 1 if dpd & 0b0000100000 else 0
b[4] = 1 if dpd & 0b0000010000 else 0
b[3] = 1 if dpd & 0b0000001000 else 0
b[2] = 1 if dpd & 0b0000000100 else 0
b[1] = 1 if dpd & 0b0000000010 else 0
b[0] = 1 if dpd & 0b0000000001 else 0
d = [None] * 3
if b[3] == 0:
d[2] = b[9] * 4 + b[8] * 2 + b[7]
d[1] = b[6] * 4 + b[5] * 2 + b[4]
d[0] = b[2] * 4 + b[1] * 2 + b[0]
elif (b[3], b[2], b[1]) == (1, 0, 0):
d[2] = b[9] * 4 + b[8] * 2 + b[7]
d[1] = b[6] * 4 + b[5] * 2 + b[4]
d[0] = 8 + b[0]
elif (b[3], b[2], b[1]) == (1, 0, 1):
d[2] = b[9] * 4 + b[8] * 2 + b[7]
d[1] = 8 + b[4]
d[0] = b[6] * 4 + b[5] * 2 + b[0]
elif (b[3], b[2], b[1]) == (1, 1, 0):
d[2] = 8 + b[7]
d[1] = b[6] * 4 + b[5] * 2 + b[4]
d[0] = b[9] * 4 + b[8] * 2 + b[0]
elif (b[6], b[5], b[3], b[2], b[1]) == (0, 0, 1, 1, 1):
d[2] = 8 + b[7]
d[1] = 8 + b[4]
d[0] = b[9] * 4 + b[8] * 2 + b[0]
elif (b[6], b[5], b[3], b[2], b[1]) == (0, 1, 1, 1, 1):
d[2] = 8 + b[7]
d[1] = b[9] * 4 + b[8] * 2 + b[4]
d[0] = 8 + b[0]
elif (b[6], b[5], b[3], b[2], b[1]) == (1, 0, 1, 1, 1):
d[2] = b[9] * 4 + b[8] * 2 + b[7]
d[1] = 8 + b[4]
d[0] = 8 + b[0]
elif (b[6], b[5], b[3], b[2], b[1]) == (1, 1, 1, 1, 1):
d[2] = 8 + b[7]
d[1] = 8 + b[4]
d[0] = 8 + b[0]
else:
raise ValueError('Invalid DPD encoding')
return d[2] * 100 + d[1] * 10 + d[0] | Convert DPD encodined value to int (0-999)
dpd: DPD encoded value. 10bit unsigned int | entailment |
def calc_significand(prefix, dpd_bits, num_bits):
"""
prefix: High bits integer value
dpd_bits: dpd encoded bits
num_bits: bit length of dpd_bits
"""
# https://en.wikipedia.org/wiki/Decimal128_floating-point_format#Densely_packed_decimal_significand_field
num_segments = num_bits // 10
segments = []
for i in range(num_segments):
segments.append(dpd_bits & 0b1111111111)
dpd_bits >>= 10
segments.reverse()
v = prefix
for dpd in segments:
v = v * 1000 + dpd_to_int(dpd)
return v | prefix: High bits integer value
dpd_bits: dpd encoded bits
num_bits: bit length of dpd_bits | entailment |
def decimal128_to_decimal(b):
"decimal128 bytes to Decimal"
v = decimal128_to_sign_digits_exponent(b)
if isinstance(v, Decimal):
return v
sign, digits, exponent = v
return Decimal((sign, Decimal(digits).as_tuple()[1], exponent)) | decimal128 bytes to Decimal | entailment |
def error_response(response):
"""
Raises errors matching the response code
"""
if response.status_code >= 500:
raise exceptions.GeocodioServerError
elif response.status_code == 403:
raise exceptions.GeocodioAuthError
elif response.status_code == 422:
raise exceptions.GeocodioDataError(response.json()["error"])
else:
raise exceptions.GeocodioError(
"Unknown service error (HTTP {0})".format(response.status_code)
) | Raises errors matching the response code | entailment |
def _req(self, method="get", verb=None, headers={}, params={}, data={}):
"""
Method to wrap all request building
:return: a Response object based on the specified method and request values.
"""
url = self.BASE_URL.format(verb=verb)
request_headers = {"content-type": "application/json"}
request_params = {"api_key": self.API_KEY}
request_headers.update(headers)
request_params.update(params)
return getattr(requests, method)(
url, params=request_params, headers=request_headers, data=data
) | Method to wrap all request building
:return: a Response object based on the specified method and request values. | entailment |
def parse(self, address):
"""
Returns an Address dictionary with the components of the queried
address.
>>> client = GeocodioClient('some_api_key')
>>> client.parse("1600 Pennsylvania Ave, Washington DC")
{
"address_components": {
"number": "1600",
"street": "Pennsylvania",
"suffix": "Ave",
"city": "Washington",
"state": "DC"
},
"formatted_address": "1600 Pennsylvania Ave, Washington DC"
}
"""
response = self._req(verb="parse", params={"q": address})
if response.status_code != 200:
return error_response(response)
return Address(response.json()) | Returns an Address dictionary with the components of the queried
address.
>>> client = GeocodioClient('some_api_key')
>>> client.parse("1600 Pennsylvania Ave, Washington DC")
{
"address_components": {
"number": "1600",
"street": "Pennsylvania",
"suffix": "Ave",
"city": "Washington",
"state": "DC"
},
"formatted_address": "1600 Pennsylvania Ave, Washington DC"
} | entailment |
def batch_geocode(self, addresses, **kwargs):
"""
Returns an Address dictionary with the components of the queried
address.
"""
fields = ",".join(kwargs.pop("fields", []))
response = self._req(
"post",
verb="geocode",
params={"fields": fields},
data=json.dumps(addresses),
)
if response.status_code != 200:
return error_response(response)
return LocationCollection(response.json()["results"]) | Returns an Address dictionary with the components of the queried
address. | entailment |
def geocode_address(self, address, **kwargs):
"""
Returns a Location dictionary with the components of the queried
address and the geocoded location.
>>> client = GeocodioClient('some_api_key')
>>> client.geocode("1600 Pennsylvania Ave, Washington DC")
{
"input": {
"address_components": {
"number": "1600",
"street": "Pennsylvania",
"suffix": "Ave",
"city": "Washington",
"state": "DC"
},
"formatted_address": "1600 Pennsylvania Ave, Washington DC"
},
"results": [
{
"address_components": {
"number": "1600",
"street": "Pennsylvania",
"suffix": "Ave",
"city": "Washington",
"state": "DC",
"zip": "20500"
},
"formatted_address": "1600 Pennsylvania Ave, Washington DC, 20500",
"location": {
"lat": 38.897700000000,
"lng": -77.03650000000,
},
"accuracy": 1
},
{
"address_components": {
"number": "1600",
"street": "Pennsylvania",
"suffix": "Ave",
"city": "Washington",
"state": "DC",
"zip": "20500"
},
"formatted_address": "1600 Pennsylvania Ave, Washington DC, 20500",
"location": {
"lat": 38.897700000000,
"lng": -77.03650000000,
},
"accuracy": 0.8
}
]
}
"""
fields = ",".join(kwargs.pop("fields", []))
response = self._req(verb="geocode", params={"q": address, "fields": fields})
if response.status_code != 200:
return error_response(response)
return Location(response.json()) | Returns a Location dictionary with the components of the queried
address and the geocoded location.
>>> client = GeocodioClient('some_api_key')
>>> client.geocode("1600 Pennsylvania Ave, Washington DC")
{
"input": {
"address_components": {
"number": "1600",
"street": "Pennsylvania",
"suffix": "Ave",
"city": "Washington",
"state": "DC"
},
"formatted_address": "1600 Pennsylvania Ave, Washington DC"
},
"results": [
{
"address_components": {
"number": "1600",
"street": "Pennsylvania",
"suffix": "Ave",
"city": "Washington",
"state": "DC",
"zip": "20500"
},
"formatted_address": "1600 Pennsylvania Ave, Washington DC, 20500",
"location": {
"lat": 38.897700000000,
"lng": -77.03650000000,
},
"accuracy": 1
},
{
"address_components": {
"number": "1600",
"street": "Pennsylvania",
"suffix": "Ave",
"city": "Washington",
"state": "DC",
"zip": "20500"
},
"formatted_address": "1600 Pennsylvania Ave, Washington DC, 20500",
"location": {
"lat": 38.897700000000,
"lng": -77.03650000000,
},
"accuracy": 0.8
}
]
} | entailment |
def geocode(self, address_data, **kwargs):
"""
Returns geocoding data for either a list of addresses or a single
address represented as a string.
Provides a single point of access for end users.
"""
if isinstance(address_data, list):
return self.batch_geocode(address_data, **kwargs)
return self.geocode_address(address_data, **kwargs) | Returns geocoding data for either a list of addresses or a single
address represented as a string.
Provides a single point of access for end users. | entailment |
def reverse_point(self, latitude, longitude, **kwargs):
"""
Method for identifying an address from a geographic point
"""
fields = ",".join(kwargs.pop("fields", []))
point_param = "{0},{1}".format(latitude, longitude)
response = self._req(
verb="reverse", params={"q": point_param, "fields": fields}
)
if response.status_code != 200:
return error_response(response)
return Location(response.json()) | Method for identifying an address from a geographic point | entailment |
def batch_reverse(self, points, **kwargs):
"""
Method for identifying the addresses from a list of lat/lng tuples
"""
fields = ",".join(kwargs.pop("fields", []))
response = self._req(
"post", verb="reverse", params={"fields": fields}, data=json_points(points)
)
if response.status_code != 200:
return error_response(response)
logger.debug(response)
return LocationCollection(response.json()["results"]) | Method for identifying the addresses from a list of lat/lng tuples | entailment |
def reverse(self, points, **kwargs):
"""
General method for reversing addresses, either a single address or
multiple.
*args should either be a longitude/latitude pair or a list of
such pairs::
>>> multiple_locations = reverse([(40, -19), (43, 112)])
>>> single_location = reverse((40, -19))
"""
if isinstance(points, list):
return self.batch_reverse(points, **kwargs)
if self.order == "lat":
x, y = points
else:
y, x = points
return self.reverse_point(x, y, **kwargs) | General method for reversing addresses, either a single address or
multiple.
*args should either be a longitude/latitude pair or a list of
such pairs::
>>> multiple_locations = reverse([(40, -19), (43, 112)])
>>> single_location = reverse((40, -19)) | entailment |
def str_to_bytes(self, s):
"convert str to bytes"
if (PYTHON_MAJOR_VER == 3 or
(PYTHON_MAJOR_VER == 2 and type(s) == unicode)):
return s.encode(charset_map.get(self.charset, self.charset))
return s | convert str to bytes | entailment |
def bytes_to_str(self, b):
"convert bytes array to raw string"
if PYTHON_MAJOR_VER == 3:
return b.decode(charset_map.get(self.charset, self.charset))
return b | convert bytes array to raw string | entailment |
def bytes_to_ustr(self, b):
"convert bytes array to unicode string"
return b.decode(charset_map.get(self.charset, self.charset)) | convert bytes array to unicode string | entailment |
def params_to_blr(self, trans_handle, params):
"Convert parameter array to BLR and values format."
ln = len(params) * 2
blr = bs([5, 2, 4, 0, ln & 255, ln >> 8])
if self.accept_version < PROTOCOL_VERSION13:
values = bs([])
else:
# start with null indicator bitmap
null_indicator = 0
for i, p in enumerate(params):
if p is None:
null_indicator |= (1 << i)
n = len(params) // 8
if len(params) % 8 != 0:
n += 1
if n % 4: # padding
n += 4 - n % 4
null_indicator_bytes = []
for i in range(n):
null_indicator_bytes.append(null_indicator & 255)
null_indicator >>= 8
values = bs(null_indicator_bytes)
for p in params:
if (
(PYTHON_MAJOR_VER == 2 and type(p) == unicode) or
(PYTHON_MAJOR_VER == 3 and type(p) == str)
):
p = self.str_to_bytes(p)
t = type(p)
if p is None:
v = bs([])
blr += bs([14, 0, 0])
elif (
(PYTHON_MAJOR_VER == 2 and t == str) or
(PYTHON_MAJOR_VER == 3 and t == bytes)
):
if len(p) > MAX_CHAR_LENGTH:
v = self._create_blob(trans_handle, p)
blr += bs([9, 0])
else:
v = p
nbytes = len(v)
pad_length = ((4-nbytes) & 3)
v += bs([0]) * pad_length
blr += bs([14, nbytes & 255, nbytes >> 8])
elif t == int:
v = bint_to_bytes(p, 4)
blr += bs([8, 0]) # blr_long
elif t == float and p == float("inf"):
v = b'\x7f\x80\x00\x00'
blr += bs([10])
elif t == decimal.Decimal or t == float:
if t == float:
p = decimal.Decimal(str(p))
(sign, digits, exponent) = p.as_tuple()
v = 0
ln = len(digits)
for i in range(ln):
v += digits[i] * (10 ** (ln - i - 1))
if sign:
v *= -1
v = bint_to_bytes(v, 8)
if exponent < 0:
exponent += 256
blr += bs([16, exponent])
elif t == datetime.date:
v = convert_date(p)
blr += bs([12])
elif t == datetime.time:
if p.tzinfo:
v = convert_time_tz(p)
blr += bs([28])
else:
v = convert_time(p)
blr += bs([13])
elif t == datetime.datetime:
if p.tzinfo:
v = convert_timestamp_tz(p)
blr += bs([29])
else:
v = convert_timestamp(p)
blr += bs([35])
elif t == bool:
v = bs([1, 0, 0, 0]) if p else bs([0, 0, 0, 0])
blr += bs([23])
else: # fallback, convert to string
p = p.__repr__()
if PYTHON_MAJOR_VER == 3 or (PYTHON_MAJOR_VER == 2 and type(p) == unicode):
p = self.str_to_bytes(p)
v = p
nbytes = len(v)
pad_length = ((4-nbytes) & 3)
v += bs([0]) * pad_length
blr += bs([14, nbytes & 255, nbytes >> 8])
blr += bs([7, 0])
values += v
if self.accept_version < PROTOCOL_VERSION13:
values += bs([0]) * 4 if not p is None else bs([0xff, 0xff, 0xff, 0xff])
blr += bs([255, 76]) # [blr_end, blr_eoc]
return blr, values | Convert parameter array to BLR and values format. | entailment |
def coords(self):
"""
Returns a tuple representing the location of the address in a
GIS coords format, i.e. (longitude, latitude).
"""
x, y = ("lat", "lng") if self.order == "lat" else ("lng", "lat")
try:
return (self["location"][x], self["location"][y])
except KeyError:
return None | Returns a tuple representing the location of the address in a
GIS coords format, i.e. (longitude, latitude). | entailment |
def get(self, key):
"""
Returns an individual Location by query lookup, e.g. address or point.
"""
if isinstance(key, tuple):
# TODO handle different ordering
try:
x, y = float(key[0]), float(key[1])
except IndexError:
raise ValueError("Two values are required for a coordinate pair")
except ValueError:
raise ValueError("Only float or float-coercable values can be passed")
key = "{0},{1}".format(x, y)
return self[self.lookups[key]] | Returns an individual Location by query lookup, e.g. address or point. | entailment |
def calc_blr(xsqlda):
"Calculate BLR from XSQLVAR array."
ln = len(xsqlda) * 2
blr = [5, 2, 4, 0, ln & 255, ln >> 8]
for x in xsqlda:
sqltype = x.sqltype
if sqltype == SQL_TYPE_VARYING:
blr += [37, x.sqllen & 255, x.sqllen >> 8]
elif sqltype == SQL_TYPE_TEXT:
blr += [14, x.sqllen & 255, x.sqllen >> 8]
elif sqltype == SQL_TYPE_LONG:
blr += [8, x.sqlscale]
elif sqltype == SQL_TYPE_SHORT:
blr += [7, x.sqlscale]
elif sqltype == SQL_TYPE_INT64:
blr += [16, x.sqlscale]
elif sqltype == SQL_TYPE_QUAD:
blr += [9, x.sqlscale]
elif sqltype == SQL_TYPE_DEC_FIXED:
blr += [26, x.sqlscale]
else:
blr += sqltype2blr[sqltype]
blr += [7, 0] # [blr_short, 0]
blr += [255, 76] # [blr_end, blr_eoc]
# x.sqlscale value shoud be negative, so b convert to range(0, 256)
return bs(256 + b if b < 0 else b for b in blr) | Calculate BLR from XSQLVAR array. | entailment |
def _parse_date(self, raw_value):
"Convert raw data to datetime.date"
nday = bytes_to_bint(raw_value) + 678882
century = (4 * nday - 1) // 146097
nday = 4 * nday - 1 - 146097 * century
day = nday // 4
nday = (4 * day + 3) // 1461
day = 4 * day + 3 - 1461 * nday
day = (day + 4) // 4
month = (5 * day - 3) // 153
day = 5 * day - 3 - 153 * month
day = (day + 5) // 5
year = 100 * century + nday
if month < 10:
month += 3
else:
month -= 9
year += 1
return year, month, day | Convert raw data to datetime.date | entailment |
def _parse_time(self, raw_value):
"Convert raw data to datetime.time"
n = bytes_to_bint(raw_value)
s = n // 10000
m = s // 60
h = m // 60
m = m % 60
s = s % 60
return (h, m, s, (n % 10000) * 100) | Convert raw data to datetime.time | entailment |
def execute_update(args):
"""Execute the update based on command line args and returns a dictionary
with 'execution result, ''response code', 'response info' and
'process friendly message'.
"""
provider_class = getattr(dnsupdater,
dnsupdater.AVAILABLE_PLUGINS.get(args.provider))
updater_options = {}
process_message = None
auth = None
if args.store: # --store argument
if provider_class.auth_type == 'T':
user_arg = args.usertoken or utils.read_input(
"Paste your auth token: ")
auth = authinfo.ApiAuth(usertoken=user_arg)
else:
user_arg = args.usertoken or utils.read_input(
"Type your username: ")
pass_arg = args.password or getpass.getpass("Type your password: ")
auth = authinfo.ApiAuth(user_arg, pass_arg)
authinfo.store(auth, args.provider, args.config)
exec_result = EXECUTION_RESULT_OK
if not args.hostname:
update_ddns = False
process_message = "Auth info stored."
else:
update_ddns = True
# informations arguments
elif args.usertoken and args.hostname:
if provider_class.auth_type == 'T':
auth = authinfo.ApiAuth(args.usertoken)
else:
auth = authinfo.ApiAuth(args.usertoken, args.password)
update_ddns = True
exec_result = EXECUTION_RESULT_OK
elif args.hostname:
if authinfo.exists(args.provider, args.config):
auth = authinfo.load(args.provider, args.config)
update_ddns = True
exec_result = EXECUTION_RESULT_OK
else:
update_ddns = False
exec_result = EXECUTION_RESULT_NOK
process_message = "No stored auth information found for " \
"provider: '%s'" % args.provider
else: # no arguments
update_ddns = False
exec_result = EXECUTION_RESULT_NOK
process_message = "Warning: The hostname to be updated must be " \
"provided.\nUsertoken and password can be either " \
"provided via command line or stored with --store " \
"option.\nExecute noipy --help for more details."
if update_ddns and args.provider == 'generic':
if args.url:
if not URL_RE.match(args.url):
process_message = "Malformed URL."
exec_result = EXECUTION_RESULT_NOK
update_ddns = False
else:
updater_options['url'] = args.url
else:
process_message = "Must use --url if --provider is 'generic' " \
"(default)"
exec_result = EXECUTION_RESULT_NOK
update_ddns = False
response_code = None
response_text = None
if update_ddns:
ip_address = args.ip if args.ip else utils.get_ip()
if not ip_address:
process_message = "Unable to get IP address. Check connection."
exec_result = EXECUTION_RESULT_NOK
elif ip_address == utils.get_dns_ip(args.hostname):
process_message = "No update required."
else:
updater = provider_class(auth, args.hostname, updater_options)
print("Updating hostname '%s' with IP address %s "
"[provider: '%s']..."
% (args.hostname, ip_address, args.provider))
response_code, response_text = updater.update_dns(ip_address)
process_message = updater.status_message
proc_result = {
'exec_result': exec_result,
'response_code': response_code,
'response_text': response_text,
'process_message': process_message,
}
return proc_result | Execute the update based on command line args and returns a dictionary
with 'execution result, ''response code', 'response info' and
'process friendly message'. | entailment |
def update_dns(self, new_ip):
"""Call No-IP API based on dict login_info and return the status code.
"""
headers = None
if self.auth_type == 'T':
api_call_url = self._base_url.format(hostname=self.hostname,
token=self.auth.token,
ip=new_ip)
else:
api_call_url = self._base_url.format(hostname=self.hostname,
ip=new_ip)
headers = {
'Authorization': "Basic %s" %
self.auth.base64key.decode('utf-8'),
'User-Agent': "%s/%s %s" % (__title__, __version__, __email__)
}
r = requests.get(api_call_url, headers=headers)
self.last_ddns_response = str(r.text).strip()
return r.status_code, r.text | Call No-IP API based on dict login_info and return the status code. | entailment |
def status_message(self):
"""Return friendly response from API based on response code. """
msg = None
if self.last_ddns_response in response_messages.keys():
return response_messages.get(self.last_ddns_response)
if 'good' in self.last_ddns_response:
ip = re.search(r'(\d{1,3}\.?){4}', self.last_ddns_response).group()
msg = "SUCCESS: DNS hostname IP (%s) successfully updated." % ip
elif 'nochg' in self.last_ddns_response:
ip = re.search(r'(\d{1,3}\.?){4}', self.last_ddns_response).group()
msg = "SUCCESS: IP address (%s) is up to date, nothing was changed. " \
"Additional 'nochg' updates may be considered abusive." % ip
else:
msg = "ERROR: Ooops! Something went wrong !!!"
return msg | Return friendly response from API based on response code. | entailment |
def get_ip():
"""Return machine's origin IP address.
"""
try:
r = requests.get(HTTPBIN_URL)
ip, _ = r.json()['origin'].split(',')
return ip if r.status_code == 200 else None
except requests.exceptions.ConnectionError:
return None | Return machine's origin IP address. | entailment |
def store(auth, provider, config_location=DEFAULT_CONFIG_DIR):
"""Store auth info in file for specified provider """
auth_file = None
try:
# only for custom locations
_create_config_dir(config_location,
"Creating custom config directory [%s]... ")
config_dir = os.path.join(config_location, NOIPY_CONFIG)
_create_config_dir(config_dir, "Creating directory [%s]... ")
auth_file = os.path.join(config_dir, provider)
print("Creating auth info file [%s]... " % auth_file, end="")
with open(auth_file, 'w') as f:
buff = auth.base64key.decode('utf-8')
f.write(buff)
print("OK.")
except IOError as e:
print('{0}: "{1}"'.format(e.strerror, auth_file))
raise e | Store auth info in file for specified provider | entailment |
def load(provider, config_location=DEFAULT_CONFIG_DIR):
"""Load provider specific auth info from file """
auth = None
auth_file = None
try:
config_dir = os.path.join(config_location, NOIPY_CONFIG)
print("Loading stored auth info [%s]... " % config_dir, end="")
auth_file = os.path.join(config_dir, provider)
with open(auth_file) as f:
auth_key = f.read()
auth = ApiAuth.get_instance(auth_key.encode('utf-8'))
print("OK.")
except IOError as e:
print('{0}: "{1}"'.format(e.strerror, auth_file))
raise e
return auth | Load provider specific auth info from file | entailment |
def exists(provider, config_location=DEFAULT_CONFIG_DIR):
"""Check whether provider info is already stored """
config_dir = os.path.join(config_location, NOIPY_CONFIG)
auth_file = os.path.join(config_dir, provider)
return os.path.exists(auth_file) | Check whether provider info is already stored | entailment |
def get_instance(cls, encoded_key):
"""Return an ApiAuth instance from an encoded key """
login_str = base64.b64decode(encoded_key).decode('utf-8')
usertoken, password = login_str.strip().split(':', 1)
instance = cls(usertoken, password)
return instance | Return an ApiAuth instance from an encoded key | entailment |
def update_index(model_items, model_name, action='index', bulk_size=100, num_docs=-1, start_date=None, end_date=None, refresh=True):
'''
Updates the index for the provided model_items.
:param model_items: a list of model_items (django Model instances, or proxy instances) which are to be indexed/updated or deleted.
If action is 'index', the model_items must be serializable objects. If action is 'delete', the model_items must be primary keys
corresponding to obects in the index.
:param model_name: doctype, which must also be the model name.
:param action: the action that you'd like to perform on this group of data. Must be in ('index', 'delete') and defaults to 'index.'
:param bulk_size: bulk size for indexing. Defaults to 100.
:param num_docs: maximum number of model_items from the provided list to be indexed.
:param start_date: start date for indexing. Must be as YYYY-MM-DD.
:param end_date: end date for indexing. Must be as YYYY-MM-DD.
:param refresh: a boolean that determines whether to refresh the index, making all operations performed since the last refresh
immediately available for search, instead of needing to wait for the scheduled Elasticsearch execution. Defaults to True.
:note: If model_items contain multiple models, then num_docs is applied to *each* model. For example, if bulk_size is set to 5,
and item contains models Article and Article2, then 5 model_items of Article *and* 5 model_items of Article2 will be indexed.
'''
src = Bungiesearch()
if action == 'delete' and not hasattr(model_items, '__iter__'):
raise ValueError("If action is 'delete', model_items must be an iterable of primary keys.")
logger.info('Getting index for model {}.'.format(model_name))
for index_name in src.get_index(model_name):
index_instance = src.get_model_index(model_name)
model = index_instance.get_model()
if num_docs == -1:
if isinstance(model_items, (list, tuple)):
num_docs = len(model_items)
else:
model_items = filter_model_items(index_instance, model_items, model_name, start_date, end_date)
num_docs = model_items.count()
if not model_items.ordered:
model_items = model_items.order_by('pk')
else:
logger.warning('Limiting the number of model_items to {} to {}.'.format(action, num_docs))
logger.info('{} {} documents on index {}'.format(action, num_docs, index_name))
prev_step = 0
max_docs = num_docs + bulk_size if num_docs > bulk_size else bulk_size + 1
for next_step in range(bulk_size, max_docs, bulk_size):
logger.info('{}: documents {} to {} of {} total on index {}.'.format(action.capitalize(), prev_step, next_step, num_docs, index_name))
data = create_indexed_document(index_instance, model_items[prev_step:next_step], action)
bulk_index(src.get_es_instance(), data, index=index_name, doc_type=model.__name__, raise_on_error=True)
prev_step = next_step
if refresh:
src.get_es_instance().indices.refresh(index=index_name) | Updates the index for the provided model_items.
:param model_items: a list of model_items (django Model instances, or proxy instances) which are to be indexed/updated or deleted.
If action is 'index', the model_items must be serializable objects. If action is 'delete', the model_items must be primary keys
corresponding to obects in the index.
:param model_name: doctype, which must also be the model name.
:param action: the action that you'd like to perform on this group of data. Must be in ('index', 'delete') and defaults to 'index.'
:param bulk_size: bulk size for indexing. Defaults to 100.
:param num_docs: maximum number of model_items from the provided list to be indexed.
:param start_date: start date for indexing. Must be as YYYY-MM-DD.
:param end_date: end date for indexing. Must be as YYYY-MM-DD.
:param refresh: a boolean that determines whether to refresh the index, making all operations performed since the last refresh
immediately available for search, instead of needing to wait for the scheduled Elasticsearch execution. Defaults to True.
:note: If model_items contain multiple models, then num_docs is applied to *each* model. For example, if bulk_size is set to 5,
and item contains models Article and Article2, then 5 model_items of Article *and* 5 model_items of Article2 will be indexed. | entailment |
def delete_index_item(item, model_name, refresh=True):
'''
Deletes an item from the index.
:param item: must be a serializable object.
:param model_name: doctype, which must also be the model name.
:param refresh: a boolean that determines whether to refresh the index, making all operations performed since the last refresh
immediately available for search, instead of needing to wait for the scheduled Elasticsearch execution. Defaults to True.
'''
src = Bungiesearch()
logger.info('Getting index for model {}.'.format(model_name))
for index_name in src.get_index(model_name):
index_instance = src.get_model_index(model_name)
item_es_id = index_instance.fields['_id'].value(item)
try:
src.get_es_instance().delete(index_name, model_name, item_es_id)
except NotFoundError as e:
logger.warning('NotFoundError: could not delete {}.{} from index {}: {}.'.format(model_name, item_es_id, index_name, str(e)))
if refresh:
src.get_es_instance().indices.refresh(index=index_name) | Deletes an item from the index.
:param item: must be a serializable object.
:param model_name: doctype, which must also be the model name.
:param refresh: a boolean that determines whether to refresh the index, making all operations performed since the last refresh
immediately available for search, instead of needing to wait for the scheduled Elasticsearch execution. Defaults to True. | entailment |
def create_indexed_document(index_instance, model_items, action):
'''
Creates the document that will be passed into the bulk index function.
Either a list of serialized objects to index, or a a dictionary specifying the primary keys of items to be delete.
'''
data = []
if action == 'delete':
for pk in model_items:
data.append({'_id': pk, '_op_type': action})
else:
for doc in model_items:
if index_instance.matches_indexing_condition(doc):
data.append(index_instance.serialize_object(doc))
return data | Creates the document that will be passed into the bulk index function.
Either a list of serialized objects to index, or a a dictionary specifying the primary keys of items to be delete. | entailment |
def filter_model_items(index_instance, model_items, model_name, start_date, end_date):
''' Filters the model items queryset based on start and end date.'''
if index_instance.updated_field is None:
logger.warning("No updated date field found for {} - not restricting with start and end date".format(model_name))
else:
if start_date:
model_items = model_items.filter(**{'{}__gte'.format(index_instance.updated_field): __str_to_tzdate__(start_date)})
if end_date:
model_items = model_items.filter(**{'{}__lte'.format(index_instance.updated_field): __str_to_tzdate__(end_date)})
return model_items | Filters the model items queryset based on start and end date. | entailment |
def extract_version(txt):
"""This function tries to extract the version from the help text of any
program."""
words = txt.replace(',', ' ').split()
version = None
for x in reversed(words):
if len(x) > 2:
if x[0].lower() == 'v':
x = x[1:]
if '.' in x and x[0].isdigit():
version = x
break
return version | This function tries to extract the version from the help text of any
program. | entailment |
def check(self, return_code=0):
"""Run command with arguments. Wait for command to complete. If the
exit code was as expected and there is no exception then return,
otherwise raise EasyProcessError.
:param return_code: int, expected return code
:rtype: self
"""
ret = self.call().return_code
ok = ret == return_code
if not ok:
raise EasyProcessError(
self, 'check error, return code is not {0}!'.format(return_code))
return self | Run command with arguments. Wait for command to complete. If the
exit code was as expected and there is no exception then return,
otherwise raise EasyProcessError.
:param return_code: int, expected return code
:rtype: self | entailment |
def call(self, timeout=None):
"""Run command with arguments. Wait for command to complete.
same as:
1. :meth:`start`
2. :meth:`wait`
3. :meth:`stop`
:rtype: self
"""
self.start().wait(timeout=timeout)
if self.is_alive():
self.stop()
return self | Run command with arguments. Wait for command to complete.
same as:
1. :meth:`start`
2. :meth:`wait`
3. :meth:`stop`
:rtype: self | entailment |
def start(self):
"""start command in background and does not wait for it.
:rtype: self
"""
if self.is_started:
raise EasyProcessError(self, 'process was started twice!')
if self.use_temp_files:
self._stdout_file = tempfile.TemporaryFile(prefix='stdout_')
self._stderr_file = tempfile.TemporaryFile(prefix='stderr_')
stdout = self._stdout_file
stderr = self._stderr_file
else:
stdout = subprocess.PIPE
stderr = subprocess.PIPE
cmd = list(map(uniencode, self.cmd))
try:
self.popen = subprocess.Popen(cmd,
stdout=stdout,
stderr=stderr,
cwd=self.cwd,
env=self.env,
)
except OSError as oserror:
log.debug('OSError exception: %s', oserror)
self.oserror = oserror
raise EasyProcessError(self, 'start error')
self.is_started = True
log.debug('process was started (pid=%s)', self.pid)
return self | start command in background and does not wait for it.
:rtype: self | entailment |
def wait(self, timeout=None):
"""Wait for command to complete.
Timeout:
- discussion: http://stackoverflow.com/questions/1191374/subprocess-with-timeout
- implementation: threading
:rtype: self
"""
if timeout is not None:
if not self._thread:
self._thread = threading.Thread(target=self._wait4process)
self._thread.daemon = 1
self._thread.start()
if self._thread:
self._thread.join(timeout=timeout)
self.timeout_happened = self.timeout_happened or self._thread.isAlive()
else:
# no timeout and no existing thread
self._wait4process()
return self | Wait for command to complete.
Timeout:
- discussion: http://stackoverflow.com/questions/1191374/subprocess-with-timeout
- implementation: threading
:rtype: self | entailment |
def sendstop(self):
'''
Kill process (:meth:`subprocess.Popen.terminate`).
Do not wait for command to complete.
:rtype: self
'''
if not self.is_started:
raise EasyProcessError(self, 'process was not started!')
log.debug('stopping process (pid=%s cmd="%s")', self.pid, self.cmd)
if self.popen:
if self.is_alive():
log.debug('process is active -> sending SIGTERM')
try:
try:
self.popen.terminate()
except AttributeError:
os.kill(self.popen.pid, signal.SIGKILL)
except OSError as oserror:
log.debug('exception in terminate:%s', oserror)
else:
log.debug('process was already stopped')
else:
log.debug('process was not started')
return self | Kill process (:meth:`subprocess.Popen.terminate`).
Do not wait for command to complete.
:rtype: self | entailment |
def wrap(self, func, delay=0):
'''
returns a function which:
1. start process
2. call func, save result
3. stop process
4. returns result
similar to :keyword:`with` statement
:rtype:
'''
def wrapped():
self.start()
if delay:
self.sleep(delay)
x = None
try:
x = func()
except OSError as oserror:
log.debug('OSError exception:%s', oserror)
self.oserror = oserror
raise EasyProcessError(self, 'wrap error!')
finally:
self.stop()
return x
return wrapped | returns a function which:
1. start process
2. call func, save result
3. stop process
4. returns result
similar to :keyword:`with` statement
:rtype: | entailment |
def send_message(self, message, room_id, **kwargs):
"""
Send a message to a given room
"""
return SendMessage(settings=self.settings, **kwargs).call(
message=message,
room_id=room_id,
**kwargs
) | Send a message to a given room | entailment |
def get_private_rooms(self, **kwargs):
"""
Get a listing of all private rooms with their names and IDs
"""
return GetPrivateRooms(settings=self.settings, **kwargs).call(**kwargs) | Get a listing of all private rooms with their names and IDs | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.