sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def change_owner(ctx, owner, uuid): """Changes the ownership of objects""" objects = ctx.obj['objects'] database = ctx.obj['db'] if uuid is True: owner_filter = {'uuid': owner} else: owner_filter = {'name': owner} owner = database.objectmodels['user'].find_one(owner_filter) if owner is None: log('User unknown.', lvl=error) return for item in objects: item.owner = owner.uuid item.save() log('Done')
Changes the ownership of objects
entailment
def notify(self, event): """Notify a user""" self.log('Got a notification event!') self.log(event, pretty=True) self.log(event.__dict__)
Notify a user
entailment
def clientdisconnect(self, event): """Handler to deal with a possibly disconnected remote controlling client :param event: ClientDisconnect Event """ try: if event.clientuuid == self.remote_controller: self.log("Remote controller disconnected!", lvl=critical) self.remote_controller = None except Exception as e: self.log("Strange thing while client disconnected", e, type(e))
Handler to deal with a possibly disconnected remote controlling client :param event: ClientDisconnect Event
entailment
def getlist(self, event): """Processes configuration list requests :param event: """ try: componentlist = model_factory(Schema).find({}) data = [] for comp in componentlist: try: data.append({ 'name': comp.name, 'uuid': comp.uuid, 'class': comp.componentclass, 'active': comp.active }) except AttributeError: self.log('Bad component without component class encountered:', lvl=warn) self.log(comp.serializablefields(), pretty=True, lvl=warn) data = sorted(data, key=lambda x: x['name']) response = { 'component': 'hfos.ui.configurator', 'action': 'getlist', 'data': data } self.fireEvent(send(event.client.uuid, response)) return except Exception as e: self.log("List error: ", e, type(e), lvl=error, exc=True)
Processes configuration list requests :param event:
entailment
def put(self, event): """Store a given configuration""" self.log("Configuration put request ", event.user) try: component = model_factory(Schema).find_one({ 'uuid': event.data['uuid'] }) component.update(event.data) component.save() response = { 'component': 'hfos.ui.configurator', 'action': 'put', 'data': True } self.log('Updated component configuration:', component.name) self.fireEvent(reload_configuration(component.name)) except (KeyError, ValueError, ValidationError, PermissionError) as e: response = { 'component': 'hfos.ui.configurator', 'action': 'put', 'data': False } self.log('Storing component configuration failed: ', type(e), e, exc=True, lvl=error) self.fireEvent(send(event.client.uuid, response)) return
Store a given configuration
entailment
def get(self, event): """Get a stored configuration""" try: comp = event.data['uuid'] except KeyError: comp = None if not comp: self.log('Invalid get request without schema or component', lvl=error) return self.log("Config data get request for ", event.data, "from", event.user) component = model_factory(Schema).find_one({ 'uuid': comp }) response = { 'component': 'hfos.ui.configurator', 'action': 'get', 'data': component.serializablefields() } self.fireEvent(send(event.client.uuid, response))
Get a stored configuration
entailment
def rec(self): """Records a single snapshot""" try: self._snapshot() except Exception as e: self.log("Timer error: ", e, type(e), lvl=error)
Records a single snapshot
entailment
def _toggle_filming(self): """Toggles the camera system recording state""" if self._filming: self.log("Stopping operation") self._filming = False self.timer.stop() else: self.log("Starting operation") self._filming = True self.timer.start()
Toggles the camera system recording state
entailment
def client_disconnect(self, event): """ A client has disconnected, update possible subscriptions accordingly. :param event: """ self.log("Removing disconnected client from subscriptions", lvl=debug) client_uuid = event.clientuuid self._unsubscribe(client_uuid)
A client has disconnected, update possible subscriptions accordingly. :param event:
entailment
def get(self, event): """Get a specified object""" try: data, schema, user, client = self._get_args(event) except AttributeError: return object_filter = self._get_filter(event) if 'subscribe' in data: do_subscribe = data['subscribe'] is True else: do_subscribe = False try: uuid = str(data['uuid']) except (KeyError, TypeError): uuid = "" opts = schemastore[schema].get('options', {}) hidden = opts.get('hidden', []) if object_filter == {}: if uuid == "": self.log('Object with no filter/uuid requested:', schema, data, lvl=warn) return object_filter = {'uuid': uuid} storage_object = None storage_object = objectmodels[schema].find_one(object_filter) if not storage_object: self._cancel_by_error(event, uuid + '(' + str(object_filter) + ') of ' + schema + ' unavailable') return if storage_object: self.log("Object found, checking permissions: ", data, lvl=verbose) if not self._check_permissions(user, 'read', storage_object): self._cancel_by_permission(schema, data, event) return for field in hidden: storage_object._fields.pop(field, None) if do_subscribe and uuid != "": self._add_subscription(uuid, event) result = { 'component': 'hfos.events.objectmanager', 'action': 'get', 'data': { 'schema': schema, 'uuid': uuid, 'object': storage_object.serializablefields() } } self._respond(None, result, event)
Get a specified object
entailment
def search(self, event): """Search for an object""" try: data, schema, user, client = self._get_args(event) except AttributeError: return # object_filter['$text'] = {'$search': str(data['search'])} if data.get('fulltext', False) is True: object_filter = { 'name': { '$regex': str(data['search']), '$options': '$i' } } else: if isinstance(data['search'], dict): object_filter = data['search'] else: object_filter = {} if 'fields' in data: fields = data['fields'] else: fields = [] skip = data.get('skip', 0) limit = data.get('limit', 0) sort = data.get('sort', None) # page = data.get('page', 0) # count = data.get('count', 0) # # if page > 0 and count > 0: # skip = page * count # limit = count if 'subscribe' in data: self.log('Subscription:', data['subscribe'], lvl=verbose) do_subscribe = data['subscribe'] is True else: do_subscribe = False object_list = [] size = objectmodels[schema].count(object_filter) if size > WARNSIZE and (limit > 0 and limit > WARNSIZE): self.log("Getting a very long (", size, ") list of items for ", schema, lvl=warn) opts = schemastore[schema].get('options', {}) hidden = opts.get('hidden', []) self.log("object_filter: ", object_filter, ' Schema: ', schema, "Fields: ", fields, lvl=verbose) options = {} if skip > 0: options['skip'] = skip if limit > 0: options['limit'] = limit if sort is not None: options['sort'] = [] for item in sort: key = item[0] direction = item[1] direction = ASCENDING if direction == 'asc' else DESCENDING options['sort'].append([key, direction]) cursor = objectmodels[schema].find(object_filter, **options) for item in cursor: if not self._check_permissions(user, 'list', item): continue self.log("Search found item: ", item, lvl=verbose) try: list_item = {'uuid': item.uuid} if fields in ('*', ['*']): item_fields = item.serializablefields() for field in hidden: item_fields.pop(field, None) object_list.append(item_fields) else: if 'name' in item._fields: list_item['name'] = item.name for field in fields: if field in item._fields and field not in hidden: list_item[field] = item._fields[field] else: list_item[field] = None object_list.append(list_item) if do_subscribe: self._add_subscription(item.uuid, event) except Exception as e: self.log("Faulty object or field: ", e, type(e), item._fields, fields, lvl=error, exc=True) # self.log("Generated object search list: ", object_list) result = { 'component': 'hfos.events.objectmanager', 'action': 'search', 'data': { 'schema': schema, 'list': object_list, 'size': size } } self._respond(None, result, event)
Search for an object
entailment
def objectlist(self, event): """Get a list of objects""" self.log('LEGACY LIST FUNCTION CALLED!', lvl=warn) try: data, schema, user, client = self._get_args(event) except AttributeError: return object_filter = self._get_filter(event) self.log('Object list for', schema, 'requested from', user.account.name, lvl=debug) if 'fields' in data: fields = data['fields'] else: fields = [] object_list = [] opts = schemastore[schema].get('options', {}) hidden = opts.get('hidden', []) if objectmodels[schema].count(object_filter) > WARNSIZE: self.log("Getting a very long list of items for ", schema, lvl=warn) try: for item in objectmodels[schema].find(object_filter): try: if not self._check_permissions(user, 'list', item): continue if fields in ('*', ['*']): item_fields = item.serializablefields() for field in hidden: item_fields.pop(field, None) object_list.append(item_fields) else: list_item = {'uuid': item.uuid} if 'name' in item._fields: list_item['name'] = item._fields['name'] for field in fields: if field in item._fields and field not in hidden: list_item[field] = item._fields[field] else: list_item[field] = None object_list.append(list_item) except Exception as e: self.log("Faulty object or field: ", e, type(e), item._fields, fields, lvl=error, exc=True) except ValidationError as e: self.log('Invalid object in database encountered!', e, exc=True, lvl=warn) # self.log("Generated object list: ", object_list) result = { 'component': 'hfos.events.objectmanager', 'action': 'getlist', 'data': { 'schema': schema, 'list': object_list } } self._respond(None, result, event)
Get a list of objects
entailment
def change(self, event): """Change an existing object""" try: data, schema, user, client = self._get_args(event) except AttributeError: return try: uuid = data['uuid'] change = data['change'] field = change['field'] new_data = change['value'] except KeyError as e: self.log("Update request with missing arguments!", data, e, lvl=critical) self._cancel_by_error(event, 'missing_args') return storage_object = None try: storage_object = objectmodels[schema].find_one({'uuid': uuid}) except Exception as e: self.log('Change for unknown object requested:', schema, data, lvl=warn) if storage_object is None: self._cancel_by_error(event, 'not_found') return if not self._check_permissions(user, 'write', storage_object): self._cancel_by_permission(schema, data, event) return self.log("Changing object:", storage_object._fields, lvl=debug) storage_object._fields[field] = new_data self.log("Storing object:", storage_object._fields, lvl=debug) try: storage_object.validate() except ValidationError: self.log("Validation of changed object failed!", storage_object, lvl=warn) self._cancel_by_error(event, 'invalid_object') return storage_object.save() self.log("Object stored.") result = { 'component': 'hfos.events.objectmanager', 'action': 'change', 'data': { 'schema': schema, 'uuid': uuid } } self._respond(None, result, event)
Change an existing object
entailment
def put(self, event): """Put an object""" try: data, schema, user, client = self._get_args(event) except AttributeError: return try: clientobject = data['obj'] uuid = clientobject['uuid'] except KeyError as e: self.log("Put request with missing arguments!", e, data, lvl=critical) return try: model = objectmodels[schema] created = False storage_object = None if uuid != 'create': storage_object = model.find_one({'uuid': uuid}) if uuid == 'create' or model.count({'uuid': uuid}) == 0: if uuid == 'create': uuid = str(uuid4()) created = True clientobject['uuid'] = uuid clientobject['owner'] = user.uuid storage_object = model(clientobject) if not self._check_create_permission(user, schema): self._cancel_by_permission(schema, data, event) return if storage_object is not None: if not self._check_permissions(user, 'write', storage_object): self._cancel_by_permission(schema, data, event) return self.log("Updating object:", storage_object._fields, lvl=debug) storage_object.update(clientobject) else: storage_object = model(clientobject) if not self._check_permissions(user, 'write', storage_object): self._cancel_by_permission(schema, data, event) return self.log("Storing object:", storage_object._fields, lvl=debug) try: storage_object.validate() except ValidationError: self.log("Validation of new object failed!", clientobject, lvl=warn) storage_object.save() self.log("Object %s stored." % schema) # Notify backend listeners if created: notification = objectcreation( storage_object.uuid, schema, client ) else: notification = objectchange( storage_object.uuid, schema, client ) self._update_subscribers(schema, storage_object) result = { 'component': 'hfos.events.objectmanager', 'action': 'put', 'data': { 'schema': schema, 'object': storage_object.serializablefields(), 'uuid': storage_object.uuid, } } self._respond(notification, result, event) except Exception as e: self.log("Error during object storage:", e, type(e), data, lvl=error, exc=True, pretty=True)
Put an object
entailment
def delete(self, event): """Delete an existing object""" try: data, schema, user, client = self._get_args(event) except AttributeError: return try: uuids = data['uuid'] if not isinstance(uuids, list): uuids = [uuids] if schema not in objectmodels.keys(): self.log("Unknown schema encountered: ", schema, lvl=warn) return for uuid in uuids: self.log("Looking for object to be deleted:", uuid, lvl=debug) storage_object = objectmodels[schema].find_one({'uuid': uuid}) if not storage_object: self._cancel_by_error(event, 'not found') return self.log("Found object.", lvl=debug) if not self._check_permissions(user, 'write', storage_object): self._cancel_by_permission(schema, data, event) return # self.log("Fields:", storage_object._fields, "\n\n\n", # storage_object.__dict__) storage_object.delete() self.log("Deleted. Preparing notification.", lvl=debug) notification = objectdeletion(uuid, schema, client) if uuid in self.subscriptions: deletion = { 'component': 'hfos.events.objectmanager', 'action': 'deletion', 'data': { 'schema': schema, 'uuid': uuid, } } for recipient in self.subscriptions[uuid]: self.fireEvent(send(recipient, deletion)) del (self.subscriptions[uuid]) result = { 'component': 'hfos.events.objectmanager', 'action': 'delete', 'data': { 'schema': schema, 'uuid': storage_object.uuid } } self._respond(notification, result, event) except Exception as e: self.log("Error during delete request: ", e, type(e), lvl=error)
Delete an existing object
entailment
def subscribe(self, event): """Subscribe to an object's future changes""" uuids = event.data if not isinstance(uuids, list): uuids = [uuids] subscribed = [] for uuid in uuids: try: self._add_subscription(uuid, event) subscribed.append(uuid) except KeyError: continue result = { 'component': 'hfos.events.objectmanager', 'action': 'subscribe', 'data': { 'uuid': subscribed, 'success': True } } self._respond(None, result, event)
Subscribe to an object's future changes
entailment
def unsubscribe(self, event): """Unsubscribe from an object's future changes""" # TODO: Automatic Unsubscription uuids = event.data if not isinstance(uuids, list): uuids = [uuids] result = [] for uuid in uuids: if uuid in self.subscriptions: self.subscriptions[uuid].pop(event.client.uuid) if len(self.subscriptions[uuid]) == 0: del (self.subscriptions[uuid]) result.append(uuid) result = { 'component': 'hfos.events.objectmanager', 'action': 'unsubscribe', 'data': { 'uuid': result, 'success': True } } self._respond(None, result, event)
Unsubscribe from an object's future changes
entailment
def update_subscriptions(self, event): """OM event handler for to be stored and client shared objectmodels :param event: OMRequest with uuid, schema and object data """ # self.log("Event: '%s'" % event.__dict__) try: self._update_subscribers(event.schema, event.data) except Exception as e: self.log("Error during subscription update: ", type(e), e, exc=True)
OM event handler for to be stored and client shared objectmodels :param event: OMRequest with uuid, schema and object data
entailment
def GithubImporter(ctx, repository, all, owner, project, ignore_labels, no_tags, username, password): """Project Importer for Github Repository Issues Argument REPOSITORY must be given as 'username/repository' Owner and project have to be UUIDs """ db = ctx.obj['db'] if project is not None: project_obj = db.objectmodels['project'].find_one({'uuid': project}) if project_obj is None: project_obj = db.objectmodels['project'].find_one({'name': project}) if project_obj is None: log('Project not found.', lvl=error) return else: project_uuid = project_obj.uuid else: project_uuid = None tags = {} if not ignore_labels: for tag in db.objectmodels['tag'].find(): tags[tag.name.lower()] = tag def write_issue(issue): """Stores a single github issue as task""" if 'pull_request' not in issue: issue_tags = [] if not ignore_labels: for l in issue['labels']: if l['name'].lower() not in tags: initial = { 'uuid': std_uuid(), 'name': l['name'] } new_tag = db.objectmodels['tag'](initial) new_tag.save() tags[new_tag.name] = new_tag issue_tags.append(new_tag.uuid) else: issue_tags.append(tags[l['name'].lower()].uuid) date = issue['created_at'].split('T')[0] initial = { 'uuid': std_uuid(), 'name': issue['title'], 'notes': str(issue['state']) + "\n\n" + issue['html_url'], 'created': date, 'project': project_uuid } if len(issue_tags) > 0: initial['tags'] = issue_tags task = db.objectmodels['task'](initial) task.save() else: log('Pull request issue:', issue, lvl=debug) def write_issues(r): """Parses JSON response and stores all issues.""" if r.status_code != 200: raise Exception(r.status_code) for issue in r.json(): write_issue(issue) def get_issues(name, state, auth): """Requests issues from GitHub API""" url = 'https://api.github.com/repos/{}/issues?state={}'.format(name, state) r = requests.get(url, auth=auth) write_issues(r) # Multiple requests are required if response is paged if 'link' in r.headers: pages = {rel[6:-1]: url[url.index('<') + 1:-1] for url, rel in (link.split(';') for link in r.headers['link'].split(','))} while 'last' in pages and 'next' in pages: pages = {rel[6:-1]: url[url.index('<') + 1:-1] for url, rel in (link.split(';') for link in r.headers['link'].split(','))} r = requests.get(pages['next'], auth=auth) write_issues(r) if pages['next'] == pages['last']: break # username = input("Username for 'https://github.com': ") # password = getpass("Password for 'https://{}@github.com': ".format(username)) # auth = (username, password) # for repository in args.repositories: # get_issues(repository) if all: state = 'all' else: state = 'open' auth = (username, password) get_issues(repository, state, auth)
Project Importer for Github Repository Issues Argument REPOSITORY must be given as 'username/repository' Owner and project have to be UUIDs
entailment
def all_languages(): """Compile a list of all available language translations""" rv = [] for lang in os.listdir(localedir): base = lang.split('_')[0].split('.')[0].split('@')[0] if 2 <= len(base) <= 3 and all(c.islower() for c in base): if base != 'all': rv.append(lang) rv.sort() rv.append('en') l10n_log('Registered languages:', rv, lvl=verbose) return rv
Compile a list of all available language translations
entailment
def language_token_to_name(languages): """Get a descriptive title for all languages""" result = {} with open(os.path.join(localedir, 'languages.json'), 'r') as f: language_lookup = json.load(f) for language in languages: language = language.lower() try: result[language] = language_lookup[language] except KeyError: l10n_log('Language token lookup not found:', language, lvl=warn) result[language] = language return result
Get a descriptive title for all languages
entailment
def print_messages(domain, msg): """Debugging function to print all message language variants""" domain = Domain(domain) for lang in all_languages(): print(lang, ':', domain.get(lang, msg))
Debugging function to print all message language variants
entailment
def i18n(msg, event=None, lang='en', domain='backend'): """Gettext function wrapper to return a message in a specified language by domain To use internationalization (i18n) on your messages, import it as '_' and use as usual. Do not forget to supply the client's language setting.""" if event is not None: language = event.client.language else: language = lang domain = Domain(domain) return domain.get(language, msg)
Gettext function wrapper to return a message in a specified language by domain To use internationalization (i18n) on your messages, import it as '_' and use as usual. Do not forget to supply the client's language setting.
entailment
def std_hash(word, salt): """Generates a cryptographically strong (sha512) hash with this nodes salt added.""" try: password = word.encode('utf-8') except UnicodeDecodeError: password = word word_hash = sha512(password) word_hash.update(salt) hex_hash = word_hash.hexdigest() return hex_hash
Generates a cryptographically strong (sha512) hash with this nodes salt added.
entailment
def std_human_uid(kind=None): """Return a random generated human-friendly phrase as low-probability unique id""" kind_list = alphabet if kind == 'animal': kind_list = animals elif kind == 'place': kind_list = places name = "{color} {adjective} {kind} of {attribute}".format( color=choice(colors), adjective=choice(adjectives), kind=choice(kind_list), attribute=choice(attributes) ) return name
Return a random generated human-friendly phrase as low-probability unique id
entailment
def std_table(rows): """Return a formatted table of given rows""" result = "" if len(rows) > 1: headers = rows[0]._fields lens = [] for i in range(len(rows[0])): lens.append(len(max([x[i] for x in rows] + [headers[i]], key=lambda x: len(str(x))))) formats = [] hformats = [] for i in range(len(rows[0])): if isinstance(rows[0][i], int): formats.append("%%%dd" % lens[i]) else: formats.append("%%-%ds" % lens[i]) hformats.append("%%-%ds" % lens[i]) pattern = " | ".join(formats) hpattern = " | ".join(hformats) separator = "-+-".join(['-' * n for n in lens]) result += hpattern % tuple(headers) + " \n" result += separator + "\n" for line in rows: result += pattern % tuple(t for t in line) + "\n" elif len(rows) == 1: row = rows[0] hwidth = len(max(row._fields, key=lambda x: len(x))) for i in range(len(row)): result += "%*s = %s" % (hwidth, row._fields[i], row[i]) + "\n" return result
Return a formatted table of given rows
entailment
def std_salt(length=16, lowercase=True): """Generates a cryptographically sane salt of 'length' (default: 16) alphanumeric characters """ alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ" if lowercase is True: alphabet += "abcdefghijklmnopqrstuvwxyz" chars = [] for i in range(length): chars.append(choice(alphabet)) return "".join(chars)
Generates a cryptographically sane salt of 'length' (default: 16) alphanumeric characters
entailment
def _get_translation(self, lang): """Add a new translation language to the live gettext translator""" try: return self._translations[lang] except KeyError: # The fact that `fallback=True` is not the default is a serious design flaw. rv = self._translations[lang] = gettext.translation(self._domain, localedir=localedir, languages=[lang], fallback=True) return rv
Add a new translation language to the live gettext translator
entailment
def handler(*names, **kwargs): """Creates an Event Handler This decorator can be applied to methods of classes derived from :class:`circuits.core.components.BaseComponent`. It marks the method as a handler for the events passed as arguments to the ``@handler`` decorator. The events are specified by their name. The decorated method's arguments must match the arguments passed to the :class:`circuits.core.events.Event` on creation. Optionally, the method may have an additional first argument named *event*. If declared, the event object that caused the handler to be invoked is assigned to it. By default, the handler is invoked by the component's root :class:`~.manager.Manager` for events that are propagated on the channel determined by the BaseComponent's *channel* attribute. This may be overridden by specifying a different channel as a keyword parameter of the decorator (``channel=...``). Keyword argument ``priority`` influences the order in which handlers for a specific event are invoked. The higher the priority, the earlier the handler is executed. If you want to override a handler defined in a base class of your component, you must specify ``override=True``, else your method becomes an additional handler for the event. **Return value** Normally, the results returned by the handlers for an event are simply collected in the :class:`circuits.core.events.Event`'s :attr:`value` attribute. As a special case, a handler may return a :class:`types.GeneratorType`. This signals to the dispatcher that the handler isn't ready to deliver a result yet. Rather, it has interrupted it's execution with a ``yield None`` statement, thus preserving its current execution state. The dispatcher saves the returned generator object as a task. All tasks are reexamined (i.e. their :meth:`next()` method is invoked) when the pending events have been executed. This feature avoids an unnecessarily complicated chaining of event handlers. Imagine a handler A that needs the results from firing an event E in order to complete. Then without this feature, the final action of A would be to fire event E, and another handler for an event ``SuccessE`` would be required to complete handler A's operation, now having the result from invoking E available (actually it's even a bit more complicated). Using this "suspend" feature, the handler simply fires event E and then yields ``None`` until e.g. it finds a result in E's :attr:`value` attribute. For the simplest scenario, there even is a utility method :meth:`circuits.core.manager.Manager.callEvent` that combines firing and waiting. """ def wrapper(f): if names and isinstance(names[0], bool) and not names[0]: f.handler = False return f if len(names) > 0 and inspect.isclass(names[0]) and \ issubclass(names[0], hfosEvent): f.names = (str(names[0].realname()),) else: f.names = names f.handler = True f.priority = kwargs.get("priority", 0) f.channel = kwargs.get("channel", None) f.override = kwargs.get("override", False) args = inspect.getargspec(f)[0] if args and args[0] == "self": del args[0] f.event = getattr(f, "event", bool(args and args[0] == "event")) return f return wrapper
Creates an Event Handler This decorator can be applied to methods of classes derived from :class:`circuits.core.components.BaseComponent`. It marks the method as a handler for the events passed as arguments to the ``@handler`` decorator. The events are specified by their name. The decorated method's arguments must match the arguments passed to the :class:`circuits.core.events.Event` on creation. Optionally, the method may have an additional first argument named *event*. If declared, the event object that caused the handler to be invoked is assigned to it. By default, the handler is invoked by the component's root :class:`~.manager.Manager` for events that are propagated on the channel determined by the BaseComponent's *channel* attribute. This may be overridden by specifying a different channel as a keyword parameter of the decorator (``channel=...``). Keyword argument ``priority`` influences the order in which handlers for a specific event are invoked. The higher the priority, the earlier the handler is executed. If you want to override a handler defined in a base class of your component, you must specify ``override=True``, else your method becomes an additional handler for the event. **Return value** Normally, the results returned by the handlers for an event are simply collected in the :class:`circuits.core.events.Event`'s :attr:`value` attribute. As a special case, a handler may return a :class:`types.GeneratorType`. This signals to the dispatcher that the handler isn't ready to deliver a result yet. Rather, it has interrupted it's execution with a ``yield None`` statement, thus preserving its current execution state. The dispatcher saves the returned generator object as a task. All tasks are reexamined (i.e. their :meth:`next()` method is invoked) when the pending events have been executed. This feature avoids an unnecessarily complicated chaining of event handlers. Imagine a handler A that needs the results from firing an event E in order to complete. Then without this feature, the final action of A would be to fire event E, and another handler for an event ``SuccessE`` would be required to complete handler A's operation, now having the result from invoking E available (actually it's even a bit more complicated). Using this "suspend" feature, the handler simply fires event E and then yields ``None`` until e.g. it finds a result in E's :attr:`value` attribute. For the simplest scenario, there even is a utility method :meth:`circuits.core.manager.Manager.callEvent` that combines firing and waiting.
entailment
def log(self, *args, **kwargs): """Log a statement from this component""" func = inspect.currentframe().f_back.f_code # Dump the message + the name of this function to the log. if 'exc' in kwargs and kwargs['exc'] is True: exc_type, exc_obj, exc_tb = exc_info() line_no = exc_tb.tb_lineno # print('EXCEPTION DATA:', line_no, exc_type, exc_obj, exc_tb) args += traceback.extract_tb(exc_tb), else: line_no = func.co_firstlineno sourceloc = "[%.10s@%s:%i]" % ( func.co_name, func.co_filename, line_no ) hfoslog(sourceloc=sourceloc, emitter=self.uniquename, *args, **kwargs)
Log a statement from this component
entailment
def register(self, *args): """Register a configurable component in the configuration schema store""" super(ConfigurableMeta, self).register(*args) from hfos.database import configschemastore # self.log('ADDING SCHEMA:') # pprint(self.configschema) configschemastore[self.name] = self.configschema
Register a configurable component in the configuration schema store
entailment
def unregister(self): """Removes the unique name from the systems unique name list""" self.names.remove(self.uniquename) super(ConfigurableMeta, self).unregister()
Removes the unique name from the systems unique name list
entailment
def _read_config(self): """Read this component's configuration from the database""" try: self.config = self.componentmodel.find_one( {'name': self.uniquename}) except ServerSelectionTimeoutError: # pragma: no cover self.log("No database access! Check if mongodb is running " "correctly.", lvl=critical) if self.config: self.log("Configuration read.", lvl=verbose) else: self.log("No configuration found.", lvl=warn)
Read this component's configuration from the database
entailment
def _write_config(self): """Write this component's configuration back to the database""" if not self.config: self.log("Unable to write non existing configuration", lvl=error) return self.config.save() self.log("Configuration stored.")
Write this component's configuration back to the database
entailment
def _set_config(self, config=None): """Set this component's initial configuration""" if not config: config = {} try: # pprint(self.configschema) self.config = self.componentmodel(config) # self.log("Config schema:", lvl=critical) # pprint(self.config.__dict__) # pprint(self.config._fields) try: name = self.config.name self.log("Name set to: ", name, lvl=verbose) except (AttributeError, KeyError): # pragma: no cover self.log("Has no name.", lvl=verbose) try: self.config.name = self.uniquename except (AttributeError, KeyError) as e: # pragma: no cover self.log("Cannot set component name for configuration: ", e, type(e), self.name, exc=True, lvl=critical) try: uuid = self.config.uuid self.log("UUID set to: ", uuid, lvl=verbose) except (AttributeError, KeyError): self.log("Has no UUID", lvl=verbose) self.config.uuid = str(uuid4()) try: notes = self.config.notes self.log("Notes set to: ", notes, lvl=verbose) except (AttributeError, KeyError): self.log("Has no notes, trying docstring", lvl=verbose) notes = self.__doc__ if notes is None: notes = "No notes." else: notes = notes.lstrip().rstrip() self.log(notes) self.config.notes = notes try: componentclass = self.config.componentclass self.log("Componentclass set to: ", componentclass, lvl=verbose) except (AttributeError, KeyError): self.log("Has no component class", lvl=verbose) self.config.componentclass = self.name except ValidationError as e: self.log("Not setting invalid component configuration: ", e, type(e), exc=True, lvl=error)
Set this component's initial configuration
entailment
def reload_configuration(self, event): """Event triggered configuration reload""" if event.target == self.uniquename: self.log('Reloading configuration') self._read_config()
Event triggered configuration reload
entailment
def _augment_info(info): """Fill out the template information""" info['description_header'] = "=" * len(info['description']) info['component_name'] = info['plugin_name'].capitalize() info['year'] = time.localtime().tm_year info['license_longtext'] = '' info['keyword_list'] = u"" for keyword in info['keywords'].split(" "): print(keyword) info['keyword_list'] += u"\'" + str(keyword) + u"\', " print(info['keyword_list']) if len(info['keyword_list']) > 0: # strip last comma info['keyword_list'] = info['keyword_list'][:-2] return info
Fill out the template information
entailment
def _construct_module(info, target): """Build a module from templates and user supplied information""" for path in paths: real_path = os.path.abspath(os.path.join(target, path.format(**info))) log("Making directory '%s'" % real_path) os.makedirs(real_path) # pprint(info) for item in templates.values(): source = os.path.join('dev/templates', item[0]) filename = os.path.abspath( os.path.join(target, item[1].format(**info))) log("Creating file from template '%s'" % filename, emitter='MANAGE') write_template_file(source, filename, info)
Build a module from templates and user supplied information
entailment
def _ask_questionnaire(): """Asks questions to fill out a HFOS plugin template""" answers = {} print(info_header) pprint(questions.items()) for question, default in questions.items(): response = _ask(question, default, str(type(default)), show_hint=True) if type(default) == unicode and type(response) != str: response = response.decode('utf-8') answers[question] = response return answers
Asks questions to fill out a HFOS plugin template
entailment
def create_module(clear_target, target): """Creates a new template HFOS plugin module""" if os.path.exists(target): if clear_target: shutil.rmtree(target) else: log("Target exists! Use --clear to delete it first.", emitter='MANAGE') sys.exit(2) done = False info = None while not done: info = _ask_questionnaire() pprint(info) done = _ask('Is the above correct', default='y', data_type='bool') augmented_info = _augment_info(info) log("Constructing module %(plugin_name)s" % info) _construct_module(augmented_info, target)
Creates a new template HFOS plugin module
entailment
def lookup_field(key, lookup_type=None, placeholder=None, html_class="div", select_type="strapselect", mapping="uuid"): """Generates a lookup field for form definitions""" if lookup_type is None: lookup_type = key if placeholder is None: placeholder = "Select a " + lookup_type result = { 'key': key, 'htmlClass': html_class, 'type': select_type, 'placeholder': placeholder, 'options': { "type": lookup_type, "asyncCallback": "$ctrl.getFormData", "map": {'valueProperty': mapping, 'nameProperty': 'name'} } } return result
Generates a lookup field for form definitions
entailment
def fieldset(title, items, options=None): """A field set with a title and sub items""" result = { 'title': title, 'type': 'fieldset', 'items': items } if options is not None: result.update(options) return result
A field set with a title and sub items
entailment
def section(rows, columns, items, label=None): """A section consisting of rows and columns""" # TODO: Integrate label sections = [] column_class = "section-column col-sm-%i" % (12 / columns) for vertical in range(columns): column_items = [] for horizontal in range(rows): try: item = items[horizontal][vertical] column_items.append(item) except IndexError: hfoslog('Field in', label, 'omitted, due to missing row/column:', vertical, horizontal, lvl=warn, emitter='FORMS', tb=True, frame=2) column = { 'type': 'section', 'htmlClass': column_class, 'items': column_items } sections.append(column) result = { 'type': 'section', 'htmlClass': 'row', 'items': sections } return result
A section consisting of rows and columns
entailment
def emptyArray(key, add_label=None): """An array that starts empty""" result = { 'key': key, 'startEmpty': True } if add_label is not None: result['add'] = add_label result['style'] = {'add': 'btn-success'} return result
An array that starts empty
entailment
def tabset(titles, contents): """A tabbed container widget""" tabs = [] for no, title in enumerate(titles): tab = { 'title': title, } content = contents[no] if isinstance(content, list): tab['items'] = content else: tab['items'] = [content] tabs.append(tab) result = { 'type': 'tabs', 'tabs': tabs } return result
A tabbed container widget
entailment
def country_field(key='country'): """Provides a select box for country selection""" country_list = list(countries) title_map = [] for item in country_list: title_map.append({'value': item.alpha_3, 'name': item.name}) widget = { 'key': key, 'type': 'uiselect', 'titleMap': title_map } return widget
Provides a select box for country selection
entailment
def area_field(key='area'): """Provides a select box for country selection""" area_list = list(subdivisions) title_map = [] for item in area_list: title_map.append({'value': item.code, 'name': item.name}) widget = { 'key': key, 'type': 'uiselect', 'titleMap': title_map } return widget
Provides a select box for country selection
entailment
def timed_connectivity_check(self, event): """Tests internet connectivity in regular intervals and updates the nodestate accordingly""" self.status = self._can_connect() self.log('Timed connectivity check:', self.status, lvl=verbose) if self.status: if not self.old_status: self.log('Connectivity gained') self.fireEvent(backend_nodestate_toggle(STATE_UUID_CONNECTIVITY, on=True, force=True)) else: if self.old_status: self.log('Connectivity lost', lvl=warn) self.old_status = False self.fireEvent(backend_nodestate_toggle(STATE_UUID_CONNECTIVITY, off=True, force=True)) self.old_status = self.status
Tests internet connectivity in regular intervals and updates the nodestate accordingly
entailment
def _can_connect(self): """Tries to connect to the configured host:port and returns True if the connection was established""" self.log('Trying to reach configured connectivity check endpoint', lvl=verbose) try: socket.setdefaulttimeout(self.config.timeout) socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((self.config.host, self.config.port)) return True except Exception as ex: self.log(ex, pretty=True, lvl=debug) return False
Tries to connect to the configured host:port and returns True if the connection was established
entailment
def referenceframe(self, event): """Handles navigational reference frame updates. These are necessary to assign geo coordinates to alerts and other misc things. :param event with incoming referenceframe message """ self.log("Got a reference frame update! ", event, lvl=verbose) self.referenceframe = event.data
Handles navigational reference frame updates. These are necessary to assign geo coordinates to alerts and other misc things. :param event with incoming referenceframe message
entailment
def activityrequest(self, event): """ActivityMonitor event handler for incoming events :param event with incoming ActivityMonitor message """ # self.log("Event: '%s'" % event.__dict__) try: action = event.action data = event.data self.log("Activityrequest: ", action, data) except Exception as e: self.log("Error: '%s' %s" % (e, type(e)), lvl=error)
ActivityMonitor event handler for incoming events :param event with incoming ActivityMonitor message
entailment
def modify(ctx, schema, uuid, object_filter, field, value): """Modify field values of objects""" database = ctx.obj['db'] model = database.objectmodels[schema] obj = None if uuid: obj = model.find_one({'uuid': uuid}) elif object_filter: obj = model.find_one(literal_eval(object_filter)) else: log('No object uuid or filter specified.', lvl=error) if obj is None: log('No object found', lvl=error) return log('Object found, modifying', lvl=debug) try: new_value = literal_eval(value) except ValueError: log('Interpreting value as string') new_value = str(value) obj._fields[field] = new_value obj.validate() log('Changed object validated', lvl=debug) obj.save() log('Done')
Modify field values of objects
entailment
def view(ctx, schema, uuid, object_filter): """Show stored objects""" database = ctx.obj['db'] if schema is None: log('No schema given. Read the help', lvl=warn) return model = database.objectmodels[schema] if uuid: obj = model.find({'uuid': uuid}) elif object_filter: obj = model.find(literal_eval(object_filter)) else: obj = model.find() for item in obj: pprint(item._fields)
Show stored objects
entailment
def delete(ctx, schema, uuid, object_filter, yes): """Delete stored objects (CAUTION!)""" database = ctx.obj['db'] if schema is None: log('No schema given. Read the help', lvl=warn) return model = database.objectmodels[schema] if uuid: count = model.count({'uuid': uuid}) obj = model.find({'uuid': uuid}) elif object_filter: count = model.count(literal_eval(object_filter)) obj = model.find(literal_eval(object_filter)) else: count = model.count() obj = model.find() if count == 0: log('No objects to delete found') return if not yes and not _ask("Are you sure you want to delete %i objects" % count, default=False, data_type="bool", show_hint=True): return for item in obj: item.delete() log('Done')
Delete stored objects (CAUTION!)
entailment
def validate(ctx, schema, all_schemata): """Validates all objects or all objects of a given schema.""" database = ctx.obj['db'] if schema is None: if all_schemata is False: log('No schema given. Read the help', lvl=warn) return else: schemata = database.objectmodels.keys() else: schemata = [schema] for schema in schemata: try: things = database.objectmodels[schema] with click.progressbar(things.find(), length=things.count(), label='Validating %15s' % schema) as object_bar: for obj in object_bar: obj.validate() except Exception as e: log('Exception while validating:', schema, e, type(e), '\n\nFix this object and rerun validation!', emitter='MANAGE', lvl=error) log('Done')
Validates all objects or all objects of a given schema.
entailment
def find_field(ctx, search, by_type, obj): """Find fields in registered data models.""" # TODO: Fix this to work recursively on all possible subschemes if search is not None: search = search else: search = _ask("Enter search term") database = ctx.obj['db'] def find(search_schema, search_field, find_result=None, key=""): """Examine a schema to find fields by type or name""" if find_result is None: find_result = [] fields = search_schema['properties'] if not by_type: if search_field in fields: find_result.append(key) # log("Found queried fieldname in ", model) else: for field in fields: try: if "type" in fields[field]: # log(fields[field], field) if fields[field]["type"] == search_field: find_result.append((key, field)) # log("Found field", field, "in", model) except KeyError as e: log("Field access error:", e, type(e), exc=True, lvl=debug) if 'properties' in fields: # log('Sub properties checking:', fields['properties']) find_result.append(find(fields['properties'], search_field, find_result, key=fields['name'])) for field in fields: if 'items' in fields[field]: if 'properties' in fields[field]['items']: # log('Sub items checking:', fields[field]) find_result.append(find(fields[field]['items'], search_field, find_result, key=field)) else: pass # log('Items without proper definition!') return find_result if obj is not None: schema = database.objectmodels[obj]._schema result = find(schema, search, [], key="top") if result: # log(args.object, result) print(obj) pprint(result) else: for model, thing in database.objectmodels.items(): schema = thing._schema result = find(schema, search, [], key="top") if result: print(model) # log(model, result) print(result)
Find fields in registered data models.
entailment
def Distance(lat1, lon1, lat2, lon2): """Get distance between pairs of lat-lon points""" az12, az21, dist = wgs84_geod.inv(lon1, lat1, lon2, lat2) return az21, dist
Get distance between pairs of lat-lon points
entailment
def client_details(self, *args): """Display known details about a given client""" self.log(_('Client details:', lang='de')) client = self._clients[args[0]] self.log('UUID:', client.uuid, 'IP:', client.ip, 'Name:', client.name, 'User:', self._users[client.useruuid], pretty=True)
Display known details about a given client
entailment
def client_list(self, *args): """Display a list of connected clients""" if len(self._clients) == 0: self.log('No clients connected') else: self.log(self._clients, pretty=True)
Display a list of connected clients
entailment
def users_list(self, *args): """Display a list of connected users""" if len(self._users) == 0: self.log('No users connected') else: self.log(self._users, pretty=True)
Display a list of connected users
entailment
def sourcess_list(self, *args): """Display a list of all registered events""" from pprint import pprint sources = {} sources.update(self.authorized_events) sources.update(self.anonymous_events) for source in sources: pprint(source)
Display a list of all registered events
entailment
def events_list(self, *args): """Display a list of all registered events""" def merge(a, b, path=None): "merges b into a" if path is None: path = [] for key in b: if key in a: if isinstance(a[key], dict) and isinstance(b[key], dict): merge(a[key], b[key], path + [str(key)]) elif a[key] == b[key]: pass # same leaf value else: raise Exception('Conflict at %s' % '.'.join(path + [str(key)])) else: a[key] = b[key] return a events = {} sources = merge(self.authorized_events, self.anonymous_events) for source, source_events in sources.items(): events[source] = [] for item in source_events: events[source].append(item) self.log(events, pretty=True)
Display a list of all registered events
entailment
def who(self, *args): """Display a table of connected users and clients""" if len(self._users) == 0: self.log('No users connected') if len(self._clients) == 0: self.log('No clients connected') return Row = namedtuple("Row", ['User', 'Client', 'IP']) rows = [] for user in self._users.values(): for key, client in self._clients.items(): if client.useruuid == user.uuid: row = Row(user.account.name, key, client.ip) rows.append(row) for key, client in self._clients.items(): if client.useruuid is None: row = Row('ANON', key, client.ip) rows.append(row) self.log("\n" + std_table(rows))
Display a table of connected users and clients
entailment
def disconnect(self, sock): """Handles socket disconnections""" self.log("Disconnect ", sock, lvl=debug) try: if sock in self._sockets: self.log("Getting socket", lvl=debug) sockobj = self._sockets[sock] self.log("Getting clientuuid", lvl=debug) clientuuid = sockobj.clientuuid self.log("getting useruuid", lvl=debug) useruuid = self._clients[clientuuid].useruuid self.log("Firing disconnect event", lvl=debug) self.fireEvent(clientdisconnect(clientuuid, self._clients[ clientuuid].useruuid)) self.log("Logging out relevant client", lvl=debug) if useruuid is not None: self.log("Client was logged in", lvl=debug) try: self._logoutclient(useruuid, clientuuid) self.log("Client logged out", useruuid, clientuuid) except Exception as e: self.log("Couldn't clean up logged in user! ", self._users[useruuid], e, type(e), lvl=critical) self.log("Deleting Client (", self._clients.keys, ")", lvl=debug) del self._clients[clientuuid] self.log("Deleting Socket", lvl=debug) del self._sockets[sock] except Exception as e: self.log("Error during disconnect handling: ", e, type(e), lvl=critical)
Handles socket disconnections
entailment
def _logoutclient(self, useruuid, clientuuid): """Log out a client and possibly associated user""" self.log("Cleaning up client of logged in user.", lvl=debug) try: self._users[useruuid].clients.remove(clientuuid) if len(self._users[useruuid].clients) == 0: self.log("Last client of user disconnected.", lvl=verbose) self.fireEvent(userlogout(useruuid, clientuuid)) del self._users[useruuid] self._clients[clientuuid].useruuid = None except Exception as e: self.log("Error during client logout: ", e, type(e), clientuuid, useruuid, lvl=error, exc=True)
Log out a client and possibly associated user
entailment
def connect(self, *args): """Registers new sockets and their clients and allocates uuids""" self.log("Connect ", args, lvl=verbose) try: sock = args[0] ip = args[1] if sock not in self._sockets: self.log("New client connected:", ip, lvl=debug) clientuuid = str(uuid4()) self._sockets[sock] = Socket(ip, clientuuid) # Key uuid is temporary, until signin, will then be replaced # with account uuid self._clients[clientuuid] = Client( sock=sock, ip=ip, clientuuid=clientuuid, ) self.log("Client connected:", clientuuid, lvl=debug) else: self.log("Old IP reconnected!", lvl=warn) # self.fireEvent(write(sock, "Another client is # connecting from your IP!")) # self._sockets[sock] = (ip, uuid.uuid4()) except Exception as e: self.log("Error during connect: ", e, type(e), lvl=critical)
Registers new sockets and their clients and allocates uuids
entailment
def send(self, event): """Sends a packet to an already known user or one of his clients by UUID""" try: jsonpacket = json.dumps(event.packet, cls=ComplexEncoder) if event.sendtype == "user": # TODO: I think, caching a user name <-> uuid table would # make sense instead of looking this up all the time. if event.uuid is None: userobject = objectmodels['user'].find_one({ 'name': event.username }) else: userobject = objectmodels['user'].find_one({ 'uuid': event.uuid }) if userobject is None: self.log("No user by that name known.", lvl=warn) return else: uuid = userobject.uuid self.log("Broadcasting to all of users clients: '%s': '%s" % ( uuid, str(event.packet)[:20]), lvl=network) if uuid not in self._users: self.log("User not connected!", event, lvl=critical) return clients = self._users[uuid].clients for clientuuid in clients: sock = self._clients[clientuuid].sock if not event.raw: self.log("Sending json to client", jsonpacket[:50], lvl=network) self.fireEvent(write(sock, jsonpacket), "wsserver") else: self.log("Sending raw data to client") self.fireEvent(write(sock, event.packet), "wsserver") else: # only to client self.log("Sending to user's client: '%s': '%s'" % ( event.uuid, jsonpacket[:20]), lvl=network) if event.uuid not in self._clients: if not event.fail_quiet: self.log("Unknown client!", event.uuid, lvl=critical) self.log("Clients:", self._clients, lvl=debug) return sock = self._clients[event.uuid].sock if not event.raw: self.fireEvent(write(sock, jsonpacket), "wsserver") else: self.log("Sending raw data to client", lvl=network) self.fireEvent(write(sock, event.packet[:20]), "wsserver") except Exception as e: self.log("Exception during sending: %s (%s)" % (e, type(e)), lvl=critical, exc=True)
Sends a packet to an already known user or one of his clients by UUID
entailment
def broadcast(self, event): """Broadcasts an event either to all users or clients, depending on event flag""" try: if event.broadcasttype == "users": if len(self._users) > 0: self.log("Broadcasting to all users:", event.content, lvl=network) for useruuid in self._users.keys(): self.fireEvent( send(useruuid, event.content, sendtype="user")) # else: # self.log("Not broadcasting, no users connected.", # lvl=debug) elif event.broadcasttype == "clients": if len(self._clients) > 0: self.log("Broadcasting to all clients: ", event.content, lvl=network) for client in self._clients.values(): self.fireEvent(write(client.sock, event.content), "wsserver") # else: # self.log("Not broadcasting, no clients # connected.", # lvl=debug) elif event.broadcasttype == "socks": if len(self._sockets) > 0: self.log("Emergency?! Broadcasting to all sockets: ", event.content) for sock in self._sockets: self.fireEvent(write(sock, event.content), "wsserver") # else: # self.log("Not broadcasting, no sockets # connected.", # lvl=debug) except Exception as e: self.log("Error during broadcast: ", e, type(e), lvl=critical)
Broadcasts an event either to all users or clients, depending on event flag
entailment
def _checkPermissions(self, user, event): """Checks if the user has in any role that allows to fire the event.""" for role in user.account.roles: if role in event.roles: self.log('Access granted', lvl=verbose) return True self.log('Access denied', lvl=verbose) return False
Checks if the user has in any role that allows to fire the event.
entailment
def _handleAuthorizedEvents(self, component, action, data, user, client): """Isolated communication link for authorized events.""" try: if component == "debugger": self.log(component, action, data, user, client, lvl=info) if not user and component in self.authorized_events.keys(): self.log("Unknown client tried to do an authenticated " "operation: %s", component, action, data, user) return event = self.authorized_events[component][action]['event'](user, action, data, client) self.log('Authorized event roles:', event.roles, lvl=verbose) if not self._checkPermissions(user, event): result = { 'component': 'hfos.ui.clientmanager', 'action': 'Permission', 'data': _('You have no role that allows this action.', lang='de') } self.fireEvent(send(event.client.uuid, result)) return self.log("Firing authorized event: ", component, action, str(data)[:100], lvl=debug) # self.log("", (user, action, data, client), lvl=critical) self.fireEvent(event) except Exception as e: self.log("Critical error during authorized event handling:", component, action, e, type(e), lvl=critical, exc=True)
Isolated communication link for authorized events.
entailment
def _handleAnonymousEvents(self, component, action, data, client): """Handler for anonymous (public) events""" try: event = self.anonymous_events[component][action]['event'] self.log("Firing anonymous event: ", component, action, str(data)[:20], lvl=network) # self.log("", (user, action, data, client), lvl=critical) self.fireEvent(event(action, data, client)) except Exception as e: self.log("Critical error during anonymous event handling:", component, action, e, type(e), lvl=critical, exc=True)
Handler for anonymous (public) events
entailment
def _handleAuthenticationEvents(self, requestdata, requestaction, clientuuid, sock): """Handler for authentication events""" # TODO: Move this stuff over to ./auth.py if requestaction in ("login", "autologin"): try: self.log("Login request", lvl=verbose) if requestaction == "autologin": username = password = None requestedclientuuid = requestdata auto = True self.log("Autologin for", requestedclientuuid, lvl=debug) else: username = requestdata['username'] password = requestdata['password'] if 'clientuuid' in requestdata: requestedclientuuid = requestdata['clientuuid'] else: requestedclientuuid = None auto = False self.log("Auth request by", username, lvl=verbose) self.fireEvent(authenticationrequest( username, password, clientuuid, requestedclientuuid, sock, auto, ), "auth") return except Exception as e: self.log("Login failed: ", e, type(e), lvl=warn, exc=True) elif requestaction == "logout": self.log("User logged out, refreshing client.", lvl=network) try: if clientuuid in self._clients: client = self._clients[clientuuid] user_id = client.useruuid if client.useruuid: self.log("Logout client uuid: ", clientuuid) self._logoutclient(client.useruuid, clientuuid) self.fireEvent(clientdisconnect(clientuuid)) else: self.log("Client is not connected!", lvl=warn) except Exception as e: self.log("Error during client logout: ", e, type(e), lvl=error, exc=True) else: self.log("Unsupported auth action requested:", requestaction, lvl=warn)
Handler for authentication events
entailment
def _reset_flood_offenders(self, *args): """Resets the list of flood offenders on event trigger""" offenders = [] # self.log('Resetting flood offenders') for offender, offence_time in self._flooding.items(): if time() - offence_time < 10: self.log('Removed offender from flood list:', offender) offenders.append(offender) for offender in offenders: del self._flooding[offender]
Resets the list of flood offenders on event trigger
entailment
def _check_flood_protection(self, component, action, clientuuid): """Checks if any clients have been flooding the node""" if clientuuid not in self._flood_counter: self._flood_counter[clientuuid] = 0 self._flood_counter[clientuuid] += 1 if self._flood_counter[clientuuid] > 100: packet = { 'component': 'hfos.ui.clientmanager', 'action': 'Flooding', 'data': True } self.fireEvent(send(clientuuid, packet)) self.log('Flooding from', clientuuid) return True
Checks if any clients have been flooding the node
entailment
def read(self, *args): """Handles raw client requests and distributes them to the appropriate components""" self.log("Beginning new transaction: ", args, lvl=network) try: sock, msg = args[0], args[1] user = password = client = clientuuid = useruuid = requestdata = \ requestaction = None # self.log("", msg) clientuuid = self._sockets[sock].clientuuid except Exception as e: self.log("Receiving error: ", e, type(e), lvl=error) if clientuuid in self._flooding: return try: msg = json.loads(msg) self.log("Message from client received: ", msg, lvl=network) except Exception as e: self.log("JSON Decoding failed! %s (%s of %s)" % (msg, e, type(e))) return try: requestcomponent = msg['component'] requestaction = msg['action'] except (KeyError, AttributeError) as e: self.log("Unpacking error: ", msg, e, type(e), lvl=error) return if self._check_flood_protection(requestcomponent, requestaction, clientuuid): self.log('Flood protection triggered') self._flooding[clientuuid] = time() try: # TODO: Do not unpickle or decode anything from unsafe events requestdata = msg['data'] if isinstance(requestdata, (dict, list)) and 'raw' in requestdata: # self.log(requestdata['raw'], lvl=critical) requestdata['raw'] = b64decode(requestdata['raw']) # self.log(requestdata['raw']) except (KeyError, AttributeError) as e: self.log("No payload.", lvl=network) requestdata = None if requestcomponent == "auth": self._handleAuthenticationEvents(requestdata, requestaction, clientuuid, sock) return try: client = self._clients[clientuuid] except KeyError as e: self.log('Could not get client for request!', e, type(e), lvl=warn) return if requestcomponent in self.anonymous_events and requestaction in \ self.anonymous_events[requestcomponent]: self.log('Executing anonymous event:', requestcomponent, requestaction) try: self._handleAnonymousEvents(requestcomponent, requestaction, requestdata, client) except Exception as e: self.log("Anonymous request failed:", e, type(e), lvl=warn, exc=True) return elif requestcomponent in self.authorized_events: try: useruuid = client.useruuid self.log("Authenticated operation requested by ", useruuid, client.config, lvl=network) except Exception as e: self.log("No useruuid!", e, type(e), lvl=critical) return self.log('Checking if user is logged in', lvl=verbose) try: user = self._users[useruuid] except KeyError: if not (requestaction == 'ping' and requestcomponent == 'hfos.ui.clientmanager'): self.log("User not logged in.", lvl=warn) return self.log('Handling event:', requestcomponent, requestaction, lvl=verbose) try: self._handleAuthorizedEvents(requestcomponent, requestaction, requestdata, user, client) except Exception as e: self.log("User request failed: ", e, type(e), lvl=warn, exc=True) else: self.log('Invalid event received:', requestcomponent, requestaction, lvl=warn)
Handles raw client requests and distributes them to the appropriate components
entailment
def authentication(self, event): """Links the client to the granted account and profile, then notifies the client""" try: self.log("Authorization has been granted by DB check:", event.username, lvl=debug) account, profile, clientconfig = event.userdata useruuid = event.useruuid originatingclientuuid = event.clientuuid clientuuid = clientconfig.uuid if clientuuid != originatingclientuuid: self.log("Mutating client uuid to request id:", clientuuid, lvl=network) # Assign client to user if useruuid in self._users: signedinuser = self._users[useruuid] else: signedinuser = User(account, profile, useruuid) self._users[account.uuid] = signedinuser if clientuuid in signedinuser.clients: self.log("Client configuration already logged in.", lvl=critical) # TODO: What now?? # Probably senseful would be to add the socket to the # client's other socket # The clients would be identical then - that could cause # problems # which could be remedied by duplicating the configuration else: signedinuser.clients.append(clientuuid) self.log("Active client (", clientuuid, ") registered to " "user", useruuid, lvl=debug) # Update socket.. socket = self._sockets[event.sock] socket.clientuuid = clientuuid self._sockets[event.sock] = socket # ..and client lists try: language = clientconfig.language except AttributeError: language = "en" # TODO: Rewrite and simplify this: newclient = Client( sock=event.sock, ip=socket.ip, clientuuid=clientuuid, useruuid=useruuid, name=clientconfig.name, config=clientconfig, language=language ) del (self._clients[originatingclientuuid]) self._clients[clientuuid] = newclient authpacket = {"component": "auth", "action": "login", "data": account.serializablefields()} self.log("Transmitting Authorization to client", authpacket, lvl=network) self.fireEvent( write(event.sock, json.dumps(authpacket)), "wsserver" ) profilepacket = {"component": "profile", "action": "get", "data": profile.serializablefields()} self.log("Transmitting Profile to client", profilepacket, lvl=network) self.fireEvent(write(event.sock, json.dumps(profilepacket)), "wsserver") clientconfigpacket = {"component": "clientconfig", "action": "get", "data": clientconfig.serializablefields()} self.log("Transmitting client configuration to client", clientconfigpacket, lvl=network) self.fireEvent(write(event.sock, json.dumps(clientconfigpacket)), "wsserver") self.fireEvent(userlogin(clientuuid, useruuid, clientconfig, signedinuser)) self.log("User configured: Name", signedinuser.account.name, "Profile", signedinuser.profile.uuid, "Clients", signedinuser.clients, lvl=debug) except Exception as e: self.log("Error (%s, %s) during auth grant: %s" % ( type(e), e, event), lvl=error)
Links the client to the granted account and profile, then notifies the client
entailment
def selectlanguage(self, event): """Store client's selection of a new translation""" self.log('Language selection event:', event.client, pretty=True) if event.data not in all_languages(): self.log('Unavailable language selected:', event.data, lvl=warn) language = None else: language = event.data if language is None: language = 'en' event.client.language = language if event.client.config is not None: event.client.config.language = language event.client.config.save()
Store client's selection of a new translation
entailment
def getlanguages(self, event): """Compile and return a human readable list of registered translations""" self.log('Client requests all languages.', lvl=verbose) result = { 'component': 'hfos.ui.clientmanager', 'action': 'getlanguages', 'data': language_token_to_name(all_languages()) } self.fireEvent(send(event.client.uuid, result))
Compile and return a human readable list of registered translations
entailment
def ping(self, event): """Perform a ping to measure client <-> node latency""" self.log('Client ping received:', event.data, lvl=verbose) response = { 'component': 'hfos.ui.clientmanager', 'action': 'pong', 'data': [event.data, time() * 1000] } self.fire(send(event.client.uuid, response))
Perform a ping to measure client <-> node latency
entailment
def convert(self, lat, lon, source, dest, height=0, datetime=None, precision=1e-10, ssheight=50*6371): """Converts between geodetic, modified apex, quasi-dipole and MLT. Parameters ========== lat : array_like Latitude lon : array_like Longitude/MLT source : {'geo', 'apex', 'qd', 'mlt'} Input coordinate system dest : {'geo', 'apex', 'qd', 'mlt'} Output coordinate system height : array_like, optional Altitude in km datetime : :class:`datetime.datetime` Date and time for MLT conversions (required for MLT conversions) precision : float, optional Precision of output (degrees) when converting to geo. A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision (all coordinates being converted to geo are converted to QD first and passed through APXG2Q). ssheight : float, optional Altitude in km to use for converting the subsolar point from geographic to magnetic coordinates. A high altitude is used to ensure the subsolar point is mapped to high latitudes, which prevents the South-Atlantic Anomaly (SAA) from influencing the MLT. Returns ======= lat : ndarray or float Converted latitude (if converting to MLT, output latitude is apex) lat : ndarray or float Converted longitude/MLT """ if datetime is None and ('mlt' in [source, dest]): raise ValueError('datetime must be given for MLT calculations') lat = helpers.checklat(lat) if source == dest: return lat, lon # from geo elif source == 'geo' and dest == 'apex': lat, lon = self.geo2apex(lat, lon, height) elif source == 'geo' and dest == 'qd': lat, lon = self.geo2qd(lat, lon, height) elif source == 'geo' and dest == 'mlt': lat, lon = self.geo2apex(lat, lon, height) lon = self.mlon2mlt(lon, datetime, ssheight=ssheight) # from apex elif source == 'apex' and dest == 'geo': lat, lon, _ = self.apex2geo(lat, lon, height, precision=precision) elif source == 'apex' and dest == 'qd': lat, lon = self.apex2qd(lat, lon, height=height) elif source == 'apex' and dest == 'mlt': lon = self.mlon2mlt(lon, datetime, ssheight=ssheight) # from qd elif source == 'qd' and dest == 'geo': lat, lon, _ = self.qd2geo(lat, lon, height, precision=precision) elif source == 'qd' and dest == 'apex': lat, lon = self.qd2apex(lat, lon, height=height) elif source == 'qd' and dest == 'mlt': lat, lon = self.qd2apex(lat, lon, height=height) lon = self.mlon2mlt(lon, datetime, ssheight=ssheight) # from mlt (input latitude assumed apex) elif source == 'mlt' and dest == 'geo': lon = self.mlt2mlon(lon, datetime, ssheight=ssheight) lat, lon, _ = self.apex2geo(lat, lon, height, precision=precision) elif source == 'mlt' and dest == 'apex': lon = self.mlt2mlon(lon, datetime, ssheight=ssheight) elif source == 'mlt' and dest == 'qd': lon = self.mlt2mlon(lon, datetime, ssheight=ssheight) lat, lon = self.apex2qd(lat, lon, height=height) # no other transformations are implemented else: estr = 'Unknown coordinate transformation: ' estr += '{} -> {}'.format(source, dest) raise NotImplementedError(estr) return lat, lon
Converts between geodetic, modified apex, quasi-dipole and MLT. Parameters ========== lat : array_like Latitude lon : array_like Longitude/MLT source : {'geo', 'apex', 'qd', 'mlt'} Input coordinate system dest : {'geo', 'apex', 'qd', 'mlt'} Output coordinate system height : array_like, optional Altitude in km datetime : :class:`datetime.datetime` Date and time for MLT conversions (required for MLT conversions) precision : float, optional Precision of output (degrees) when converting to geo. A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision (all coordinates being converted to geo are converted to QD first and passed through APXG2Q). ssheight : float, optional Altitude in km to use for converting the subsolar point from geographic to magnetic coordinates. A high altitude is used to ensure the subsolar point is mapped to high latitudes, which prevents the South-Atlantic Anomaly (SAA) from influencing the MLT. Returns ======= lat : ndarray or float Converted latitude (if converting to MLT, output latitude is apex) lat : ndarray or float Converted longitude/MLT
entailment
def geo2apex(self, glat, glon, height): """Converts geodetic to modified apex coordinates. Parameters ========== glat : array_like Geodetic latitude glon : array_like Geodetic longitude height : array_like Altitude in km Returns ======= alat : ndarray or float Modified apex latitude alon : ndarray or float Modified apex longitude """ glat = helpers.checklat(glat, name='glat') alat, alon = self._geo2apex(glat, glon, height) if np.any(np.float64(alat) == -9999): warnings.warn('Apex latitude set to -9999 where undefined ' '(apex height may be < reference height)') # if array is returned, dtype is object, so convert to float return np.float64(alat), np.float64(alon)
Converts geodetic to modified apex coordinates. Parameters ========== glat : array_like Geodetic latitude glon : array_like Geodetic longitude height : array_like Altitude in km Returns ======= alat : ndarray or float Modified apex latitude alon : ndarray or float Modified apex longitude
entailment
def apex2geo(self, alat, alon, height, precision=1e-10): """Converts modified apex to geodetic coordinates. Parameters ========== alat : array_like Modified apex latitude alon : array_like Modified apex longitude height : array_like Altitude in km precision : float, optional Precision of output (degrees). A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision. Returns ======= glat : ndarray or float Geodetic latitude glon : ndarray or float Geodetic longitude error : ndarray or float The angular difference (degrees) between the input QD coordinates and the qlat/qlon produced by feeding the output glat and glon into geo2qd (APXG2Q) """ alat = helpers.checklat(alat, name='alat') qlat, qlon = self.apex2qd(alat, alon, height=height) glat, glon, error = self.qd2geo(qlat, qlon, height, precision=precision) return glat, glon, error
Converts modified apex to geodetic coordinates. Parameters ========== alat : array_like Modified apex latitude alon : array_like Modified apex longitude height : array_like Altitude in km precision : float, optional Precision of output (degrees). A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision. Returns ======= glat : ndarray or float Geodetic latitude glon : ndarray or float Geodetic longitude error : ndarray or float The angular difference (degrees) between the input QD coordinates and the qlat/qlon produced by feeding the output glat and glon into geo2qd (APXG2Q)
entailment
def geo2qd(self, glat, glon, height): """Converts geodetic to quasi-dipole coordinates. Parameters ========== glat : array_like Geodetic latitude glon : array_like Geodetic longitude height : array_like Altitude in km Returns ======= qlat : ndarray or float Quasi-dipole latitude qlon : ndarray or float Quasi-dipole longitude """ glat = helpers.checklat(glat, name='glat') qlat, qlon = self._geo2qd(glat, glon, height) # if array is returned, dtype is object, so convert to float return np.float64(qlat), np.float64(qlon)
Converts geodetic to quasi-dipole coordinates. Parameters ========== glat : array_like Geodetic latitude glon : array_like Geodetic longitude height : array_like Altitude in km Returns ======= qlat : ndarray or float Quasi-dipole latitude qlon : ndarray or float Quasi-dipole longitude
entailment
def qd2geo(self, qlat, qlon, height, precision=1e-10): """Converts quasi-dipole to geodetic coordinates. Parameters ========== qlat : array_like Quasi-dipole latitude qlon : array_like Quasi-dipole longitude height : array_like Altitude in km precision : float, optional Precision of output (degrees). A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision. Returns ======= glat : ndarray or float Geodetic latitude glon : ndarray or float Geodetic longitude error : ndarray or float The angular difference (degrees) between the input QD coordinates and the qlat/qlon produced by feeding the output glat and glon into geo2qd (APXG2Q) """ qlat = helpers.checklat(qlat, name='qlat') glat, glon, error = self._qd2geo(qlat, qlon, height, precision) # if array is returned, dtype is object, so convert to float return np.float64(glat), np.float64(glon), np.float64(error)
Converts quasi-dipole to geodetic coordinates. Parameters ========== qlat : array_like Quasi-dipole latitude qlon : array_like Quasi-dipole longitude height : array_like Altitude in km precision : float, optional Precision of output (degrees). A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision. Returns ======= glat : ndarray or float Geodetic latitude glon : ndarray or float Geodetic longitude error : ndarray or float The angular difference (degrees) between the input QD coordinates and the qlat/qlon produced by feeding the output glat and glon into geo2qd (APXG2Q)
entailment
def _apex2qd_nonvectorized(self, alat, alon, height): """Convert from apex to quasi-dipole (not-vectorised) Parameters ----------- alat : (float) Apex latitude in degrees alon : (float) Apex longitude in degrees height : (float) Height in km Returns --------- qlat : (float) Quasi-dipole latitude in degrees qlon : (float) Quasi-diplole longitude in degrees """ alat = helpers.checklat(alat, name='alat') # convert modified apex to quasi-dipole: qlon = alon # apex height hA = self.get_apex(alat) if hA < height: if np.isclose(hA, height, rtol=0, atol=1e-5): # allow for values that are close hA = height else: estr = 'height {:.3g} is > apex height '.format(np.max(height)) estr += '{:.3g} for alat {:.3g}'.format(hA, alat) raise ApexHeightError(estr) qlat = np.sign(alat) * np.degrees(np.arccos(np.sqrt((self.RE + height) / (self.RE + hA)))) return qlat, qlon
Convert from apex to quasi-dipole (not-vectorised) Parameters ----------- alat : (float) Apex latitude in degrees alon : (float) Apex longitude in degrees height : (float) Height in km Returns --------- qlat : (float) Quasi-dipole latitude in degrees qlon : (float) Quasi-diplole longitude in degrees
entailment
def apex2qd(self, alat, alon, height): """Converts modified apex to quasi-dipole coordinates. Parameters ========== alat : array_like Modified apex latitude alon : array_like Modified apex longitude height : array_like Altitude in km Returns ======= qlat : ndarray or float Quasi-dipole latitude qlon : ndarray or float Quasi-dipole longitude Raises ====== ApexHeightError if `height` > apex height """ qlat, qlon = self._apex2qd(alat, alon, height) # if array is returned, the dtype is object, so convert to float return np.float64(qlat), np.float64(qlon)
Converts modified apex to quasi-dipole coordinates. Parameters ========== alat : array_like Modified apex latitude alon : array_like Modified apex longitude height : array_like Altitude in km Returns ======= qlat : ndarray or float Quasi-dipole latitude qlon : ndarray or float Quasi-dipole longitude Raises ====== ApexHeightError if `height` > apex height
entailment
def qd2apex(self, qlat, qlon, height): """Converts quasi-dipole to modified apex coordinates. Parameters ========== qlat : array_like Quasi-dipole latitude qlon : array_like Quasi-dipole longitude height : array_like Altitude in km Returns ======= alat : ndarray or float Modified apex latitude alon : ndarray or float Modified apex longitude Raises ====== ApexHeightError if apex height < reference height """ alat, alon = self._qd2apex(qlat, qlon, height) # if array is returned, the dtype is object, so convert to float return np.float64(alat), np.float64(alon)
Converts quasi-dipole to modified apex coordinates. Parameters ========== qlat : array_like Quasi-dipole latitude qlon : array_like Quasi-dipole longitude height : array_like Altitude in km Returns ======= alat : ndarray or float Modified apex latitude alon : ndarray or float Modified apex longitude Raises ====== ApexHeightError if apex height < reference height
entailment
def mlon2mlt(self, mlon, datetime, ssheight=50*6371): """Computes the magnetic local time at the specified magnetic longitude and UT. Parameters ========== mlon : array_like Magnetic longitude (apex and quasi-dipole longitude are always equal) datetime : :class:`datetime.datetime` Date and time ssheight : float, optional Altitude in km to use for converting the subsolar point from geographic to magnetic coordinates. A high altitude is used to ensure the subsolar point is mapped to high latitudes, which prevents the South-Atlantic Anomaly (SAA) from influencing the MLT. Returns ======= mlt : ndarray or float Magnetic local time [0, 24) Notes ===== To compute the MLT, we find the apex longitude of the subsolar point at the given time. Then the MLT of the given point will be computed from the separation in magnetic longitude from this point (1 hour = 15 degrees). """ ssglat, ssglon = helpers.subsol(datetime) ssalat, ssalon = self.geo2apex(ssglat, ssglon, ssheight) # np.float64 will ensure lists are converted to arrays return (180 + np.float64(mlon) - ssalon)/15 % 24
Computes the magnetic local time at the specified magnetic longitude and UT. Parameters ========== mlon : array_like Magnetic longitude (apex and quasi-dipole longitude are always equal) datetime : :class:`datetime.datetime` Date and time ssheight : float, optional Altitude in km to use for converting the subsolar point from geographic to magnetic coordinates. A high altitude is used to ensure the subsolar point is mapped to high latitudes, which prevents the South-Atlantic Anomaly (SAA) from influencing the MLT. Returns ======= mlt : ndarray or float Magnetic local time [0, 24) Notes ===== To compute the MLT, we find the apex longitude of the subsolar point at the given time. Then the MLT of the given point will be computed from the separation in magnetic longitude from this point (1 hour = 15 degrees).
entailment
def mlt2mlon(self, mlt, datetime, ssheight=50*6371): """Computes the magnetic longitude at the specified magnetic local time and UT. Parameters ========== mlt : array_like Magnetic local time datetime : :class:`datetime.datetime` Date and time ssheight : float, optional Altitude in km to use for converting the subsolar point from geographic to magnetic coordinates. A high altitude is used to ensure the subsolar point is mapped to high latitudes, which prevents the South-Atlantic Anomaly (SAA) from influencing the MLT. Returns ======= mlon : ndarray or float Magnetic longitude [0, 360) (apex and quasi-dipole longitude are always equal) Notes ===== To compute the magnetic longitude, we find the apex longitude of the subsolar point at the given time. Then the magnetic longitude of the given point will be computed from the separation in magnetic local time from this point (1 hour = 15 degrees). """ ssglat, ssglon = helpers.subsol(datetime) ssalat, ssalon = self.geo2apex(ssglat, ssglon, ssheight) # np.float64 will ensure lists are converted to arrays return (15*np.float64(mlt) - 180 + ssalon + 360) % 360
Computes the magnetic longitude at the specified magnetic local time and UT. Parameters ========== mlt : array_like Magnetic local time datetime : :class:`datetime.datetime` Date and time ssheight : float, optional Altitude in km to use for converting the subsolar point from geographic to magnetic coordinates. A high altitude is used to ensure the subsolar point is mapped to high latitudes, which prevents the South-Atlantic Anomaly (SAA) from influencing the MLT. Returns ======= mlon : ndarray or float Magnetic longitude [0, 360) (apex and quasi-dipole longitude are always equal) Notes ===== To compute the magnetic longitude, we find the apex longitude of the subsolar point at the given time. Then the magnetic longitude of the given point will be computed from the separation in magnetic local time from this point (1 hour = 15 degrees).
entailment
def map_to_height(self, glat, glon, height, newheight, conjugate=False, precision=1e-10): """Performs mapping of points along the magnetic field to the closest or conjugate hemisphere. Parameters ========== glat : array_like Geodetic latitude glon : array_like Geodetic longitude height : array_like Source altitude in km newheight : array_like Destination altitude in km conjugate : bool, optional Map to `newheight` in the conjugate hemisphere instead of the closest hemisphere precision : float, optional Precision of output (degrees). A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision. Returns ======= newglat : ndarray or float Geodetic latitude of mapped point newglon : ndarray or float Geodetic longitude of mapped point error : ndarray or float The angular difference (degrees) between the input QD coordinates and the qlat/qlon produced by feeding the output glat and glon into geo2qd (APXG2Q) Notes ===== The mapping is done by converting glat/glon/height to modified apex lat/lon, and converting back to geographic using newheight (if conjugate, use negative apex latitude when converting back) """ alat, alon = self.geo2apex(glat, glon, height) if conjugate: alat = -alat try: newglat, newglon, error = self.apex2geo(alat, alon, newheight, precision=precision) except ApexHeightError: raise ApexHeightError("newheight is > apex height") return newglat, newglon, error
Performs mapping of points along the magnetic field to the closest or conjugate hemisphere. Parameters ========== glat : array_like Geodetic latitude glon : array_like Geodetic longitude height : array_like Source altitude in km newheight : array_like Destination altitude in km conjugate : bool, optional Map to `newheight` in the conjugate hemisphere instead of the closest hemisphere precision : float, optional Precision of output (degrees). A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision. Returns ======= newglat : ndarray or float Geodetic latitude of mapped point newglon : ndarray or float Geodetic longitude of mapped point error : ndarray or float The angular difference (degrees) between the input QD coordinates and the qlat/qlon produced by feeding the output glat and glon into geo2qd (APXG2Q) Notes ===== The mapping is done by converting glat/glon/height to modified apex lat/lon, and converting back to geographic using newheight (if conjugate, use negative apex latitude when converting back)
entailment
def map_E_to_height(self, alat, alon, height, newheight, E): """Performs mapping of electric field along the magnetic field. It is assumed that the electric field is perpendicular to B. Parameters ========== alat : (N,) array_like or float Modified apex latitude alon : (N,) array_like or float Modified apex longitude height : (N,) array_like or float Source altitude in km newheight : (N,) array_like or float Destination altitude in km E : (3,) or (3, N) array_like Electric field (at `alat`, `alon`, `height`) in geodetic east, north, and up components Returns ======= E : (3, N) or (3,) ndarray The electric field at `newheight` (geodetic east, north, and up components) """ return self._map_EV_to_height(alat, alon, height, newheight, E, 'E')
Performs mapping of electric field along the magnetic field. It is assumed that the electric field is perpendicular to B. Parameters ========== alat : (N,) array_like or float Modified apex latitude alon : (N,) array_like or float Modified apex longitude height : (N,) array_like or float Source altitude in km newheight : (N,) array_like or float Destination altitude in km E : (3,) or (3, N) array_like Electric field (at `alat`, `alon`, `height`) in geodetic east, north, and up components Returns ======= E : (3, N) or (3,) ndarray The electric field at `newheight` (geodetic east, north, and up components)
entailment
def map_V_to_height(self, alat, alon, height, newheight, V): """Performs mapping of electric drift velocity along the magnetic field. It is assumed that the electric field is perpendicular to B. Parameters ========== alat : (N,) array_like or float Modified apex latitude alon : (N,) array_like or float Modified apex longitude height : (N,) array_like or float Source altitude in km newheight : (N,) array_like or float Destination altitude in km V : (3,) or (3, N) array_like Electric drift velocity (at `alat`, `alon`, `height`) in geodetic east, north, and up components Returns ======= V : (3, N) or (3,) ndarray The electric drift velocity at `newheight` (geodetic east, north, and up components) """ return self._map_EV_to_height(alat, alon, height, newheight, V, 'V')
Performs mapping of electric drift velocity along the magnetic field. It is assumed that the electric field is perpendicular to B. Parameters ========== alat : (N,) array_like or float Modified apex latitude alon : (N,) array_like or float Modified apex longitude height : (N,) array_like or float Source altitude in km newheight : (N,) array_like or float Destination altitude in km V : (3,) or (3, N) array_like Electric drift velocity (at `alat`, `alon`, `height`) in geodetic east, north, and up components Returns ======= V : (3, N) or (3,) ndarray The electric drift velocity at `newheight` (geodetic east, north, and up components)
entailment
def basevectors_qd(self, lat, lon, height, coords='geo', precision=1e-10): """Returns quasi-dipole base vectors f1 and f2 at the specified coordinates. The vectors are described by Richmond [1995] [2]_ and Emmert et al. [2010] [3]_. The vector components are geodetic east and north. Parameters ========== lat : (N,) array_like or float Latitude lon : (N,) array_like or float Longitude height : (N,) array_like or float Altitude in km coords : {'geo', 'apex', 'qd'}, optional Input coordinate system precision : float, optional Precision of output (degrees) when converting to geo. A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision (all coordinates being converted to geo are converted to QD first and passed through APXG2Q). Returns ======= f1 : (2, N) or (2,) ndarray f2 : (2, N) or (2,) ndarray References ========== .. [2] Richmond, A. D. (1995), Ionospheric Electrodynamics Using Magnetic Apex Coordinates, Journal of geomagnetism and geoelectricity, 47(2), 191–212, :doi:`10.5636/jgg.47.191`. .. [3] Emmert, J. T., A. D. Richmond, and D. P. Drob (2010), A computationally compact representation of Magnetic-Apex and Quasi-Dipole coordinates with smooth base vectors, J. Geophys. Res., 115(A8), A08322, :doi:`10.1029/2010JA015326`. """ glat, glon = self.convert(lat, lon, coords, 'geo', height=height, precision=precision) f1, f2 = self._basevec(glat, glon, height) # if inputs are not scalar, each vector is an array of arrays, # so reshape to a single array if f1.dtype == object: f1 = np.vstack(f1).T f2 = np.vstack(f2).T return f1, f2
Returns quasi-dipole base vectors f1 and f2 at the specified coordinates. The vectors are described by Richmond [1995] [2]_ and Emmert et al. [2010] [3]_. The vector components are geodetic east and north. Parameters ========== lat : (N,) array_like or float Latitude lon : (N,) array_like or float Longitude height : (N,) array_like or float Altitude in km coords : {'geo', 'apex', 'qd'}, optional Input coordinate system precision : float, optional Precision of output (degrees) when converting to geo. A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision (all coordinates being converted to geo are converted to QD first and passed through APXG2Q). Returns ======= f1 : (2, N) or (2,) ndarray f2 : (2, N) or (2,) ndarray References ========== .. [2] Richmond, A. D. (1995), Ionospheric Electrodynamics Using Magnetic Apex Coordinates, Journal of geomagnetism and geoelectricity, 47(2), 191–212, :doi:`10.5636/jgg.47.191`. .. [3] Emmert, J. T., A. D. Richmond, and D. P. Drob (2010), A computationally compact representation of Magnetic-Apex and Quasi-Dipole coordinates with smooth base vectors, J. Geophys. Res., 115(A8), A08322, :doi:`10.1029/2010JA015326`.
entailment
def basevectors_apex(self, lat, lon, height, coords='geo', precision=1e-10): """Returns base vectors in quasi-dipole and apex coordinates. The vectors are described by Richmond [1995] [4]_ and Emmert et al. [2010] [5]_. The vector components are geodetic east, north, and up (only east and north for `f1` and `f2`). Parameters ========== lat, lon : (N,) array_like or float Latitude lat : (N,) array_like or float Longitude height : (N,) array_like or float Altitude in km coords : {'geo', 'apex', 'qd'}, optional Input coordinate system return_all : bool, optional Will also return f3, g1, g2, and g3, and f1 and f2 have 3 components (the last component is zero). Requires `lat`, `lon`, and `height` to be broadcast to 1D (at least one of the parameters must be 1D and the other two parameters must be 1D or 0D). precision : float, optional Precision of output (degrees) when converting to geo. A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision (all coordinates being converted to geo are converted to QD first and passed through APXG2Q). Returns ======= f1, f2 : (2, N) or (2,) ndarray f3, g1, g2, g3, d1, d2, d3, e1, e2, e3 : (3, N) or (3,) ndarray Note ==== `f3`, `g1`, `g2`, and `g3` are not part of the Fortran code by Emmert et al. [2010] [5]_. They are calculated by this Python library according to the following equations in Richmond [1995] [4]_: * `g1`: Eqn. 6.3 * `g2`: Eqn. 6.4 * `g3`: Eqn. 6.5 * `f3`: Eqn. 6.8 References ========== .. [4] Richmond, A. D. (1995), Ionospheric Electrodynamics Using Magnetic Apex Coordinates, Journal of geomagnetism and geoelectricity, 47(2), 191–212, :doi:`10.5636/jgg.47.191`. .. [5] Emmert, J. T., A. D. Richmond, and D. P. Drob (2010), A computationally compact representation of Magnetic-Apex and Quasi-Dipole coordinates with smooth base vectors, J. Geophys. Res., 115(A8), A08322, :doi:`10.1029/2010JA015326`. """ glat, glon = self.convert(lat, lon, coords, 'geo', height=height, precision=precision) returnvals = self._geo2apexall(glat, glon, height) qlat = np.float64(returnvals[0]) alat = np.float64(returnvals[2]) f1, f2 = returnvals[4:6] d1, d2, d3 = returnvals[7:10] e1, e2, e3 = returnvals[11:14] # if inputs are not scalar, each vector is an array of arrays, # so reshape to a single array if f1.dtype == object: f1 = np.vstack(f1).T f2 = np.vstack(f2).T d1 = np.vstack(d1).T d2 = np.vstack(d2).T d3 = np.vstack(d3).T e1 = np.vstack(e1).T e2 = np.vstack(e2).T e3 = np.vstack(e3).T # make sure arrays are 2D f1 = f1.reshape((2, f1.size//2)) f2 = f2.reshape((2, f2.size//2)) d1 = d1.reshape((3, d1.size//3)) d2 = d2.reshape((3, d2.size//3)) d3 = d3.reshape((3, d3.size//3)) e1 = e1.reshape((3, e1.size//3)) e2 = e2.reshape((3, e2.size//3)) e3 = e3.reshape((3, e3.size//3)) # compute f3, g1, g2, g3 F1 = np.vstack((f1, np.zeros_like(f1[0]))) F2 = np.vstack((f2, np.zeros_like(f2[0]))) F = np.cross(F1.T, F2.T).T[-1] cosI = helpers.getcosIm(alat) k = np.array([0, 0, 1], dtype=np.float64).reshape((3, 1)) g1 = ((self.RE + np.float64(height)) / (self.RE + self.refh))**(3/2) \ * d1 / F g2 = -1.0 / (2.0 * F * np.tan(np.radians(qlat))) * \ (k + ((self.RE + np.float64(height)) / (self.RE + self.refh)) * d2 / cosI) g3 = k*F f3 = np.cross(g1.T, g2.T).T if np.any(alat == -9999): warnings.warn(('Base vectors g, d, e, and f3 set to -9999 where ' 'apex latitude is undefined (apex height may be < ' 'reference height)')) f3 = np.where(alat == -9999, -9999, f3) g1 = np.where(alat == -9999, -9999, g1) g2 = np.where(alat == -9999, -9999, g2) g3 = np.where(alat == -9999, -9999, g3) d1 = np.where(alat == -9999, -9999, d1) d2 = np.where(alat == -9999, -9999, d2) d3 = np.where(alat == -9999, -9999, d3) e1 = np.where(alat == -9999, -9999, e1) e2 = np.where(alat == -9999, -9999, e2) e3 = np.where(alat == -9999, -9999, e3) return tuple(np.squeeze(x) for x in [f1, f2, f3, g1, g2, g3, d1, d2, d3, e1, e2, e3])
Returns base vectors in quasi-dipole and apex coordinates. The vectors are described by Richmond [1995] [4]_ and Emmert et al. [2010] [5]_. The vector components are geodetic east, north, and up (only east and north for `f1` and `f2`). Parameters ========== lat, lon : (N,) array_like or float Latitude lat : (N,) array_like or float Longitude height : (N,) array_like or float Altitude in km coords : {'geo', 'apex', 'qd'}, optional Input coordinate system return_all : bool, optional Will also return f3, g1, g2, and g3, and f1 and f2 have 3 components (the last component is zero). Requires `lat`, `lon`, and `height` to be broadcast to 1D (at least one of the parameters must be 1D and the other two parameters must be 1D or 0D). precision : float, optional Precision of output (degrees) when converting to geo. A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision (all coordinates being converted to geo are converted to QD first and passed through APXG2Q). Returns ======= f1, f2 : (2, N) or (2,) ndarray f3, g1, g2, g3, d1, d2, d3, e1, e2, e3 : (3, N) or (3,) ndarray Note ==== `f3`, `g1`, `g2`, and `g3` are not part of the Fortran code by Emmert et al. [2010] [5]_. They are calculated by this Python library according to the following equations in Richmond [1995] [4]_: * `g1`: Eqn. 6.3 * `g2`: Eqn. 6.4 * `g3`: Eqn. 6.5 * `f3`: Eqn. 6.8 References ========== .. [4] Richmond, A. D. (1995), Ionospheric Electrodynamics Using Magnetic Apex Coordinates, Journal of geomagnetism and geoelectricity, 47(2), 191–212, :doi:`10.5636/jgg.47.191`. .. [5] Emmert, J. T., A. D. Richmond, and D. P. Drob (2010), A computationally compact representation of Magnetic-Apex and Quasi-Dipole coordinates with smooth base vectors, J. Geophys. Res., 115(A8), A08322, :doi:`10.1029/2010JA015326`.
entailment
def get_apex(self, lat, height=None): """ Calculate apex height Parameters ----------- lat : (float) Latitude in degrees height : (float or NoneType) Height above the surface of the earth in km or NoneType to use reference height (default=None) Returns ---------- apex_height : (float) Height of the field line apex in km """ lat = helpers.checklat(lat, name='alat') if height is None: height = self.refh cos_lat_squared = np.cos(np.radians(lat))**2 apex_height = (self.RE + height) / cos_lat_squared - self.RE return apex_height
Calculate apex height Parameters ----------- lat : (float) Latitude in degrees height : (float or NoneType) Height above the surface of the earth in km or NoneType to use reference height (default=None) Returns ---------- apex_height : (float) Height of the field line apex in km
entailment
def set_epoch(self, year): """Updates the epoch for all subsequent conversions. Parameters ========== year : float Decimal year """ fa.loadapxsh(self.datafile, np.float(year)) self.year = year
Updates the epoch for all subsequent conversions. Parameters ========== year : float Decimal year
entailment
def basic_parser(patterns, with_name=None): """ Basic ordered parser. """ def parse(line): output = None highest_order = 0 highest_pattern_name = None for pattern in patterns: results = pattern.findall(line) if results and any(results): if pattern.order > highest_order: output = results highest_order = pattern.order if with_name: highest_pattern_name = pattern.name if with_name: return output, highest_pattern_name return output return parse
Basic ordered parser.
entailment
def alt_parser(patterns): """ This parser is able to handle multiple different patterns finding stuff in text-- while removing matches that overlap. """ from reparse.util import remove_lower_overlapping get_first = lambda items: [i[0] for i in items] get_second = lambda items: [i[1] for i in items] def parse(line): output = [] for pattern in patterns: results = pattern.scan(line) if results and any(results): output.append((pattern.order, results)) return get_first(reduce(remove_lower_overlapping, get_second(sorted(output)), [])) return parse
This parser is able to handle multiple different patterns finding stuff in text-- while removing matches that overlap.
entailment
def build_tree_parser(patterns): """ This parser_type simply outputs an array of [(tree, regex)] for use in another language. """ def output(): for pattern in patterns: yield (pattern.build_full_tree(), pattern.regex) return list(output())
This parser_type simply outputs an array of [(tree, regex)] for use in another language.
entailment
def parser(parser_type=basic_parser, functions=None, patterns=None, expressions=None, patterns_yaml_path=None, expressions_yaml_path=None): """ A Reparse parser description. Simply provide the functions, patterns, & expressions to build. If you are using YAML for expressions + patterns, you can use ``expressions_yaml_path`` & ``patterns_yaml_path`` for convenience. The default parser_type is the basic ordered parser. """ from reparse.builders import build_all from reparse.validators import validate def _load_yaml(file_path): import yaml with open(file_path) as f: return yaml.safe_load(f) assert expressions or expressions_yaml_path, "Reparse can't build a parser without expressions" assert patterns or patterns_yaml_path, "Reparse can't build a parser without patterns" assert functions, "Reparse can't build without a functions" if patterns_yaml_path: patterns = _load_yaml(patterns_yaml_path) if expressions_yaml_path: expressions = _load_yaml(expressions_yaml_path) validate(patterns, expressions) return parser_type(build_all(patterns, expressions, functions))
A Reparse parser description. Simply provide the functions, patterns, & expressions to build. If you are using YAML for expressions + patterns, you can use ``expressions_yaml_path`` & ``patterns_yaml_path`` for convenience. The default parser_type is the basic ordered parser.
entailment