code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
api = api or cls._API if parent: parent = Transform.to_task(parent) if project: project = Transform.to_project(project) if created_from: created_from = Transform.to_datestring(created_from) if created_to: created_to = Transform.to_datestring(created_to) if started_from: started_from = Transform.to_datestring(started_from) if started_to: started_to = Transform.to_datestring(started_to) if ended_from: ended_from = Transform.to_datestring(ended_from) if ended_to: ended_to = Transform.to_datestring(ended_to) return super(Task, cls)._query( url=cls._URL['query'], project=project, status=status, batch=batch, parent=parent, created_from=created_from, created_to=created_to, started_from=started_from, started_to=started_to, ended_from=ended_from, ended_to=ended_to, offset=offset, limit=limit, order_by=order_by, order=order, fields='_all', api=api )
def query(cls, project=None, status=None, batch=None, parent=None, created_from=None, created_to=None, started_from=None, started_to=None, ended_from=None, ended_to=None, offset=None, limit=None, order_by=None, order=None, api=None)
Query (List) tasks. Date parameters may be both strings and python date objects. :param project: Target project. optional. :param status: Task status. :param batch: Only batch tasks. :param parent: Parent batch task identifier. :param ended_to: All tasks that ended until this date. :param ended_from: All tasks that ended from this date. :param started_to: All tasks that were started until this date. :param started_from: All tasks that were started from this date. :param created_to: All tasks that were created until this date. :param created_from: All tasks that were created from this date. :param offset: Pagination offset. :param limit: Pagination limit. :param order_by: Property to order by. :param order: Ascending or descending ordering. :param api: Api instance. :return: Collection object.
1.588046
1.633408
0.972229
task_data = {} params = {} project = Transform.to_project(project) app_id = Transform.to_app(app) if revision: app_id = app_id + "/" + six.text_type(revision) else: if isinstance(app, App): app_id = app_id + "/" + six.text_type(app.revision) task_inputs = { 'inputs': Task._serialize_inputs(inputs) if inputs else {} } if batch_input and batch_by: task_data['batch_input'] = batch_input task_data['batch_by'] = batch_by if disable_batch: params.update({'batch': False}) task_meta = { 'name': name, 'project': project, 'app': app_id, 'description': description, } task_data.update(task_meta) task_data.update(task_inputs) if interruptible is not None: task_data['use_interruptible_instances'] = interruptible if execution_settings: task_data.update({'execution_settings': execution_settings}) if run: params.update({'action': 'run'}) api = api if api else cls._API created_task = api.post(cls._URL['query'], data=task_data, params=params).json() if run and 'errors' in created_task: if bool(created_task['errors']): raise TaskValidationError( 'Unable to run task! Task contains errors.', task=Task(api=api, **created_task) ) return Task(api=api, **created_task)
def create(cls, name, project, app, revision=None, batch_input=None, batch_by=None, inputs=None, description=None, run=False, disable_batch=False, interruptible=None, execution_settings=None, api=None)
Creates a task on server. :param name: Task name. :param project: Project identifier. :param app: CWL app identifier. :param revision: CWL app revision. :param batch_input: Batch input. :param batch_by: Batch criteria. :param inputs: Input map. :param description: Task description. :param run: True if you want to run a task upon creation. :param disable_batch: If True disables batching of a batch task. :param interruptible: If True interruptible instance will be used. :param execution_settings: Execution settings for the task. :param api: Api instance. :return: Task object. :raises: TaskValidationError if validation Fails. :raises: SbgError if any exception occurs during request.
2.750554
2.74584
1.001717
extra = { 'resource': self.__class__.__name__, 'query': {'id': self.id} } logger.info('Aborting task', extra=extra) task_data = self._api.post( url=self._URL['abort'].format(id=self.id)).json() return Task(api=self._api, **task_data)
def abort(self, inplace=True)
Abort task :param inplace Apply action on the current object or return a new one. :return: Task object.
4.704558
4.469794
1.052522
params = {} if not batch: params['batch'] = False if interruptible is not None: params['use_interruptible_instances'] = interruptible extra = { 'resource': self.__class__.__name__, 'query': {'id': self.id, 'batch': batch} } logger.info('Running task', extra=extra) task_data = self._api.post( url=self._URL['run'].format(id=self.id), params=params).json() return Task(api=self._api, **task_data)
def run(self, batch=True, interruptible=None, inplace=True)
Run task :param batch if False batching will be disabled. :param interruptible: If true interruptible instance will be used. :param inplace Apply action on the current object or return a new one. :return: Task object.
3.935845
4.048494
0.972175
params = {} if run: params.update({'action': 'run'}) extra = { 'resource': self.__class__.__name__, 'query': {'id': self.id, 'run': run} } logger.info('Cloning task', extra=extra) task_data = self._api.post( url=self._URL['clone'].format(id=self.id), params=params).json() return Task(api=self._api, **task_data)
def clone(self, run=True)
Clone task :param run: run task after cloning :return: Task object.
4.061579
4.305895
0.94326
modified_data = self._modified_data() if bool(modified_data): task_request_data = {} inputs = modified_data.pop('inputs', None) execution_settings = modified_data.pop('execution_settings', None) task_request_data.update(modified_data) if inputs: task_request_data['inputs'] = self._serialize_inputs(inputs) if execution_settings: task_request_data['execution_settings'] = ( self._serialize_execution_settings(execution_settings) ) extra = { 'resource': self.__class__.__name__, 'query': {'id': self.id, 'data': task_request_data} } logger.info('Saving task', extra=extra) data = self._api.patch(url=self._URL['get'].format(id=self.id), data=task_request_data).json() task = Task(api=self._api, **data) return task
def save(self, inplace=True)
Saves all modification to the task on the server. :param inplace Apply edits on the current instance or get a new one. :return: Task instance.
2.880447
2.802582
1.027783
serialized_inputs = {} for input_id, input_value in inputs.items(): if isinstance(input_value, list): serialized_list = Task._serialize_input_list(input_value) serialized_inputs[input_id] = serialized_list else: if isinstance(input_value, File): input_value = Task._to_api_file_format(input_value) serialized_inputs[input_id] = input_value return serialized_inputs
def _serialize_inputs(inputs)
Serialize task input dictionary
2.586138
2.40657
1.074616
input_list = [] for item in input_value: if isinstance(item, list): input_list.append(Task._serialize_input_list(item)) else: if isinstance(item, File): item = Task._to_api_file_format(item) input_list.append(item) return input_list
def _serialize_input_list(input_value)
Recursively serialize task input list
2.885013
2.553336
1.1299
extra = { 'resource': self.__class__.__name__, 'query': {'id': self.id} } logger.info('Get execution details', extra=extra) data = self._api.get( self._URL['execution_details'].format(id=self.id)).json() return ExecutionDetails(api=self._api, **data)
def get_execution_details(self)
Retrieves execution details for a task. :return: Execution details instance.
3.972198
3.855726
1.030207
if not self.batch: raise SbgError("This task is not a batch task.") return self.query(parent=self.id, api=self._api)
def get_batch_children(self)
Retrieves batch child tasks for this task if its a batch task. :return: Collection instance. :raises SbError if task is not a batch task.
12.744866
7.561921
1.685401
api = api or cls._API task_ids = [Transform.to_task(task) for task in tasks] data = {'task_ids': task_ids} logger.info('Getting tasks in bulk.') response = api.post(url=cls._URL['bulk_get'], data=data) return TaskBulkRecord.parse_records(response=response, api=api)
def bulk_get(cls, tasks, api=None)
Retrieve tasks with specified ids in bulk :param tasks: Tasks to be retrieved. :param api: Api instance. :return: List of TaskBulkRecord objects.
4.463137
4.340405
1.028277
while self.status not in [ TaskStatus.COMPLETED, TaskStatus.FAILED, TaskStatus.ABORTED ]: self.reload() time.sleep(period) if callback: return callback(*args, **kwargs)
def wait(self=None, period=10, callback=None, *args, **kwargs)
Wait until task is complete :param period: Time in seconds between reloads :param callback: Function to call after the task has finished, arguments and keyword arguments can be provided for it :return: Return value of provided callback function or None if a callback function was not provided
3.408655
3.257383
1.04644
png = self.call() if png is None: return if png.startswith('GD extension must be loaded'): current_plugin.logger.warning('Piwik server answered on ImageGraph.get: %s', png) return return 'data:image/png;base64,{}'.format(b64encode(png))
def get_result(self)
Perform the call and return the graph data :return: Encoded PNG graph data string to be inserted in a `src` atribute of a HTML img tag.
14.22175
13.099036
1.08571
if project: project = Transform.to_project(project) api = api or cls._API return super(App, cls)._query(url=cls._URL['query'], project=project, visibility=visibility, q=q, id=id, offset=offset, limit=limit, api=api)
def query(cls, project=None, visibility=None, q=None, id=None, offset=None, limit=None, api=None)
Query (List) apps. :param project: Source project. :param visibility: private|public for private or public apps. :param q: List containing search terms. :param id: List contains app ids. Fetch apps with specific ids. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: collection object
3.462861
4.228444
0.818945
api = api if api else cls._API extra = {'resource': cls.__name__, 'query': { 'id': id, 'revision': revision }} logger.info('Get revision', extra=extra) app = api.get(url=cls._URL['get_revision'].format( id=id, revision=revision)).json() return App(api=api, **app)
def get_revision(cls, id, revision, api=None)
Get app revision. :param id: App identifier. :param revision: App revision :param api: Api instance. :return: App object.
4.435038
4.049787
1.095129
api = api if api else cls._API raw_format = raw_format.lower() if raw_format else AppRawFormat.JSON extra = { 'resource': cls.__name__, 'query': { 'id': id, 'data': raw } } logger.info('Installing app', extra=extra) # Set content type for raw app data if raw_format not in cls._CONTENT_TYPE.keys(): raise SbgError( 'Unsupported raw data format: "{}".' .format(raw_format)) headers = {'Content-Type': cls._CONTENT_TYPE[raw_format]} app = api.post( url=cls._URL['raw'].format(id=id), data=raw, headers=headers, ).json() app_wrapper = api.get( url=cls._URL['get'].format(id=app['sbg:id'])).json() return App(api=api, **app_wrapper)
def install_app(cls, id, raw, api=None, raw_format=None)
Installs and app. :param id: App identifier. :param raw: Raw cwl data. :param api: Api instance. :param raw_format: Format of raw app data being sent, json by default :return: App object.
3.617589
3.593111
1.006813
api = api if api else cls._API extra = {'resource': cls.__name__, 'query': { 'id': id, 'data': raw }} logger.info('Creating app revision', extra=extra) app = api.post(url=cls._URL['create_revision'].format( id=id, revision=revision), data=raw).json() app_wrapper = api.get( url=cls._URL['get'].format(id=app['sbg:id'])).json() return App(api=api, **app_wrapper)
def create_revision(cls, id, revision, raw, api=None)
Create a new app revision. :param id: App identifier. :param revision: App revision. :param raw: Raw cwl object. :param api: Api instance. :return: App object.
4.775008
4.412489
1.082157
strategy = strategy or AppCopyStrategy.CLONE project = Transform.to_project(project) data = { 'project': project, 'strategy': strategy } if name: data['name'] = name extra = {'resource': self.__class__.__name__, 'query': { 'id': self.id, 'data': data }} logger.info('Copying app', extra=extra) app = self._api.post(url=self._URL['copy'].format(id=self.id), data=data).json() return App(api=self._api, **app)
def copy(self, project, name=None, strategy=None)
Copies the current app. :param project: Destination project. :param name: Destination app name. :param strategy: App copy strategy. :return: Copied App object. :Copy strategies: clone copy all revisions and continue getting updates form the original app (default method when the key is omitted) direct copy only the latest revision and get the updates from this point on clone_direct copy the app like the direct strategy, but keep all revisions transient copy only the latest revision and continue getting updates from the original app
4.083951
4.655754
0.877184
app = self._api.post(url=self._URL['sync'].format(id=self.id)).json() return App(api=self._api, **app)
def sync(self)
Syncs the parent app changes with the current app instance. :return: Synced App object.
7.57261
6.248961
1.211819
from indico_chat.plugin import ChatPlugin return self.custom_server or ChatPlugin.settings.get('muc_server')
def server(self)
The server name of the chatroom. Usually the default one unless a custom one is set.
16.207031
12.892554
1.257085
query = cls.find(event_id=event.id, **kwargs) if not include_hidden: query = query.filter(~cls.hidden) return query
def find_for_event(cls, event, include_hidden=False, **kwargs)
Returns a Query that retrieves the chatrooms for an event :param event: an indico event (with a numeric ID) :param include_hidden: if hidden chatrooms should be included, too :param kwargs: extra kwargs to pass to ``find()``
2.845685
2.873725
0.990243
db.session.delete(self) db.session.flush() if not self.chatroom.events: db.session.delete(self.chatroom) db.session.flush() delete_room(self.chatroom, reason) return True return False
def delete(self, reason='')
Deletes the event chatroom and if necessary the chatroom, too. :param reason: reason for the deletion :return: True if the associated chatroom was also deleted, otherwise False
3.378113
3.07721
1.097784
from indico_livesync.models.queue import EntryType if isinstance(obj, Category): ref = {'type': EntryType.category, 'category_id': obj.id} elif isinstance(obj, Event): ref = {'type': EntryType.event, 'event_id': obj.id} elif isinstance(obj, Session): ref = {'type': EntryType.session, 'session_id': obj.id} elif isinstance(obj, Contribution): ref = {'type': EntryType.contribution, 'contrib_id': obj.id} elif isinstance(obj, SubContribution): ref = {'type': EntryType.subcontribution, 'subcontrib_id': obj.id} else: raise ValueError('Unexpected object: {}'.format(obj.__class__.__name__)) return ImmutableDict(ref)
def obj_ref(obj)
Returns a tuple identifying a category/event/contrib/subcontrib
2.090045
1.932744
1.081388
from indico_livesync.models.queue import EntryType if ref['type'] == EntryType.category: return Category.get_one(ref['category_id']) elif ref['type'] == EntryType.event: return Event.get_one(ref['event_id']) elif ref['type'] == EntryType.session: return Session.get_one(ref['session_id']) elif ref['type'] == EntryType.contribution: return Contribution.get_one(ref['contrib_id']) elif ref['type'] == EntryType.subcontribution: return SubContribution.get_one(ref['subcontrib_id']) else: raise ValueError('Unexpected object type: {}'.format(ref['type']))
def obj_deref(ref)
Returns the object identified by `ref`
2.021889
2.087681
0.968486
from indico_livesync.plugin import LiveSyncPlugin from indico_livesync.models.queue import LiveSyncQueueEntry queue_entry_ttl = LiveSyncPlugin.settings.get('queue_entry_ttl') if not queue_entry_ttl: return expire_threshold = now_utc() - timedelta(days=queue_entry_ttl) LiveSyncQueueEntry.find(LiveSyncQueueEntry.processed, LiveSyncQueueEntry.timestamp < expire_threshold).delete(synchronize_session='fetch')
def clean_old_entries()
Deletes obsolete entries from the queues
3.313841
3.295496
1.005567
from indico_livesync.plugin import LiveSyncPlugin return {int(x['id']) for x in LiveSyncPlugin.settings.get('excluded_categories')}
def get_excluded_categories()
Get excluded category IDs.
6.308538
5.270992
1.196841
if isinstance(obj, (Category, Session)): raise TypeError('Compound IDs are not supported for this entry type') elif isinstance(obj, Event): return unicode(obj.id) elif isinstance(obj, Contribution): return '{}.{}'.format(obj.event_id, obj.id) elif isinstance(obj, SubContribution): return '{}.{}.{}'.format(obj.contribution.event_id, obj.contribution_id, obj.id)
def compound_id(obj)
Generate a hierarchical compound ID, separated by dots.
3.225712
3.024769
1.066432
from indico_piwik.plugin import PiwikPlugin if not download_url: raise ValueError("download_url can't be empty") if not download_title: raise ValueError("download_title can't be empty") request = PiwikRequest(server_url=PiwikPlugin.settings.get('server_api_url'), site_id=PiwikPlugin.settings.get('site_id_events'), api_token=PiwikPlugin.settings.get('server_token'), query_script=PiwikPlugin.track_script) action_url = quote(download_url) dt = datetime.now() request.call(idsite=request.site_id, rec=1, action_name=quote(download_title.encode('utf-8')), url=action_url, download=action_url, h=dt.hour, m=dt.minute, s=dt.second)
def track_download_request(download_url, download_title)
Track a download in Piwik
3.714665
3.565506
1.041834
from indico_livesync.plugin import LiveSyncPlugin return LiveSyncPlugin.instance.backend_classes.get(self.backend_name)
def backend(self)
Returns the backend class
6.441344
7.050131
0.913649
tpl = get_plugin_template_module('emails/created.txt', chatroom=room, event=event, user=user) _send(event, tpl)
def notify_created(room, event, user)
Notifies about the creation of a chatroom. :param room: the chatroom :param event: the event :param user: the user performing the action
7.320507
8.37251
0.87435
tpl = get_plugin_template_module('emails/attached.txt', chatroom=room, event=event, user=user) _send(event, tpl)
def notify_attached(room, event, user)
Notifies about an existing chatroom being attached to an event. :param room: the chatroom :param event: the event :param user: the user performing the action
7.615682
8.686439
0.876732
tpl = get_plugin_template_module('emails/modified.txt', chatroom=room, event=event, user=user) _send(event, tpl)
def notify_modified(room, event, user)
Notifies about the modification of a chatroom. :param room: the chatroom :param event: the event :param user: the user performing the action
7.645805
9.001552
0.849387
tpl = get_plugin_template_module('emails/deleted.txt', chatroom=room, event=event, user=user, room_deleted=room_deleted) _send(event, tpl)
def notify_deleted(room, event, user, room_deleted)
Notifies about the deletion of a chatroom. :param room: the chatroom :param event: the event :param user: the user performing the action; `None` if due to event deletion :param room_deleted: if the room has been deleted from the jabber server
6.647447
7.226267
0.919901
automation_id = Transform.to_automation(automation) api = api or cls._API return super(AutomationMember, cls)._query( url=cls._URL['query'].format(automation_id=automation_id), automation_id=automation_id, offset=offset, limit=limit, api=api, )
def query(cls, automation=None, offset=None, limit=None, api=None)
Query (List) apps. :param automation: Automation id. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: collection object
3.804794
5.455063
0.697479
username = Transform.to_resource(id) automation = Transform.to_automation(automation) api = api or cls._API member = api.get(url=cls._URL['get'].format( automation_id=automation, id=username )).json() return AutomationMember(api=api, **member)
def get(cls, id, automation, api=None)
Fetches the resource from the server. :param id: Automation member username :param automation: Automation id or object :param api: sevenbridges Api instance. :return: AutomationMember object.
6.451748
6.014775
1.07265
user = Transform.to_user(user) automation = Transform.to_automation(automation) api = api or cls._API data = {'username': user} if isinstance(permissions, dict): data.update({ 'permissions': permissions }) member_data = api.post( url=cls._URL['query'].format(automation_id=automation), data=data ).json() return AutomationMember(api=api, **member_data)
def add(cls, user, permissions, automation, api=None)
Add a member to the automation. :param user: Member username :param permissions: Permissions dictionary. :param automation: Automation object or id :param api: sevenbridges Api instance :return: Automation member object.
4.002806
3.745746
1.068627
user = Transform.to_user(user) automation = Transform.to_automation(automation) api = api or cls._API api.delete( cls._URL['get'].format(automation_id=automation, id=user) )
def remove(cls, user, automation, api=None)
Remove a member from the automation. :param user: Member username :param automation: Automation id :param api: sevenbridges Api instance :return: None
5.027622
5.66384
0.88767
modified = self._modified_data() if bool(modified): new_data = self.permissions.copy() new_data.update(modified['permissions']) data = { 'permissions': new_data } url = six.text_type(self.href) self._api.patch(url=url, data=data, append_base=False) else: raise ResourceNotModified()
def save(self, inplace=True)
Saves modification to the api server.
5.226873
4.39989
1.187955
api = api or cls._API return super(Automation, cls)._query( url=cls._URL['query'], name=name, offset=offset, limit=limit, api=api, )
def query(cls, name=None, offset=None, limit=None, api=None)
Query (List) automations. :param name: Automation name. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: collection object
4.165462
4.230502
0.984626
api = api or self._API return AutomationPackage.query( automation=self.id, offset=offset, limit=limit, api=api )
def get_packages(self, offset=None, limit=None, api=None)
Return list of packages that belong to this automation :param offset: Pagination offset. :param limit: Pagination limit. :param api: sevenbridges Api instance. :return: AutomationPackage collection
7.520291
6.55286
1.147635
member = Transform.to_automation_member(username) api = api or self._API return AutomationMember.get( id=member, automation=self.id, api=api )
def get_member(self, username, api=None)
Return specified automation member :param username: Member username :param api: sevenbridges Api instance. :return: AutomationMember object
11.553232
9.526496
1.212747
api = api or self._API return AutomationMember.query( automation=self.id, offset=offset, limit=limit, api=api )
def get_members(self, offset=None, limit=None, api=None)
Return list of automation members :param offset: Pagination offset. :param limit: Pagination limit. :param api: sevenbridges Api instance. :return: AutomationMember collection
7.212896
6.621345
1.08934
api = api or self._API return AutomationMember.add( automation=self.id, user=user, permissions=permissions, api=api )
def add_member(self, user, permissions, api=None)
Add member to the automation :param user: Member username :param permissions: Member permissions :param api: sevenbridges Api instance :return: AutomationMember object
6.982943
5.137343
1.359252
api = api or self._API AutomationMember.remove(automation=self.id, user=user, api=api)
def remove_member(self, user, api=None)
Remove a member from the automation :param user: Member username :param api: sevenbridges Api instance :return: None
11.292759
8.184831
1.379718
api = api or self._API return AutomationRun.query( automation=self.id, package=package, status=status, name=name, created_by=created_by, created_from=created_from, created_to=created_to, order_by=order_by, order=order, offset=offset, limit=limit, api=api )
def get_runs(self, package=None, status=None, name=None, created_by=None, created_from=None, created_to=None, order_by=None, order=None, offset=None, limit=None, api=None)
Query automation runs that belong to this automation :param package: Package id :param status: Run status :param name: Automation run name :param created_by: Username of member that created the run :param created_from: Date the run was created after :param created_to: Date the run was created before :param order_by: Property by which to order results :param order: Ascending or Descending ("asc" or "desc") :param offset: Pagination offset. :param limit: Pagination limit. :param api: sevenbridges Api instance :return: AutomationRun collection
2.266041
2.248427
1.007834
if automation: automation = Transform.to_automation(automation) if package: package = Transform.to_automation_package(package) api = api or cls._API return super(AutomationRun, cls)._query( url=cls._URL['query'], name=name, automation=automation, package=package, status=status, created_by=created_by, created_from=created_from, created_to=created_to, order_by=order_by, order=order, offset=offset, limit=limit, api=api, )
def query(cls, automation=None, package=None, status=None, name=None, created_by=None, created_from=None, created_to=None, order_by=None, order=None, offset=None, limit=None, api=None)
Query (List) automation runs. :param name: Automation run name :param automation: Automation template :param package: Package :param status: Run status :param created_by: Username of user that created the run :param order_by: Property by which to order results :param order: Ascending or descending ("asc" or "desc") :param created_from: Date the run is created after :param created_to: Date the run is created before :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: collection object
2.085745
2.18117
0.95625
package = Transform.to_automation_package(package) data = {'package': package} if inputs: data['inputs'] = inputs if settings: data['settings'] = settings if resume_from: data['resume_from'] = resume_from if name: data['name'] = name if secret_settings: data['secret_settings'] = secret_settings api = api or cls._API automation_run = api.post( url=cls._URL['query'], data=data, ).json() return AutomationRun(api=api, **automation_run)
def create(cls, package, inputs=None, settings=None, resume_from=None, name=None, secret_settings=None, api=None)
Create and start a new run. :param package: Automation package id :param inputs: Input dictionary :param settings: Settings override dictionary :param resume_from: Run to resume from :param name: Automation run name :param secret_settings: dict to override secret_settings from automation template :param api: sevenbridges Api instance :return: AutomationRun object
2.632653
2.554123
1.030746
api = api or self._API return api.post( url=self._URL['actions'].format( id=self.id, action=AutomationRunActions.STOP ) ).content
def stop(self, api=None)
Stop automation run. :param api: sevenbridges Api instance. :return: AutomationRun object
10.612356
8.831418
1.201659
api = api or self._API log_file_data = self.execution_details.get('log_file') return File(api=api, **log_file_data) if log_file_data else None
def get_log_file(self, api=None)
Retrieve automation run log. :param api: sevenbridges Api instance :return: Log string
4.66415
5.022365
0.928676
api = api or self._API return api.get(self._URL['state'].format(id=self.id)).json()
def get_state(self, api=None)
Retrieve automation run state. :param api: sevenbridges Api instance :return: State file json contents as string
6.62677
6.894043
0.961231
try: response = api.get(url.format(upload_id=upload, part_number=part)) return response.json()['url'] except Exception: raise SbgError( 'Unable to get upload url for part number {}'.format(part) )
def _get_part_url(api, url, upload, part)
Used by the worker to fetch url for the part that is to be uploaded. :param api: Api instance. :param url: Part url. :param upload: Upload identifier. :param part: Part number. :return: Storage generated URL for the part.
3.672139
3.953469
0.92884
part_data = { 'part_number': part, 'response': { 'headers': { 'ETag': e_tag } } } try: api.post( url.format(upload_id=upload, part_number=''), data=part_data ) except Exception as e: raise SbgError( 'Unable to report part number {}. Reason: {}'.format( part, six.text_type(e) ) )
def _report_part(api, url, upload, part, e_tag)
Used by the worker to report the completion of the part upload. :param api: Api instance. :param url: Part url. :param upload: Upload identifier. :param part: Part number. :param e_tag: ETag
3.29964
3.730725
0.88445
try: response = session.put(url, data=part, timeout=timeout) return response.headers.get('etag').strip('"') except Exception as e: raise SbgError( 'Failed to submit the part. Reason: {}'.format( six.text_type(e) ) )
def _submit_part(session, url, part, timeout)
Used by the worker to submit the part data to the storage service URL. :param session: Storage service session. :param url: Part url. :param part: Part data in bytes. :param timeout: Timeout for storage session. :return: ETag for the submitted part.
3.495492
3.124949
1.118575
part_url = retry(retry_count)(_get_part_url)( api, url, upload, part_number ) e_tag = retry(retry_count)(_submit_part)( session, part_url, part, timeout ) retry(retry_count)(_report_part)(api, url, upload, part_number, e_tag)
def _upload_part(api, session, url, upload, part_number, part, retry_count, timeout)
Used by the worker to upload a part to the storage service. :param api: Api instance. :param session: Storage service session. :param url: Part url. :param upload: Upload identifier. :param part_number: Part number. :param part: Part data. :param retry_count: Number of times to retry. :param timeout: Timeout for storage session.
3.764796
4.678756
0.804657
futures = [] while self.submitted < 4 and not self.done(): part = self.parts.pop(0) part_number = part['part'] part_read_offset = part['offset'] part_read_limit = part['limit'] self.fp.seek(part_read_offset) part_data = self.fp.read(part_read_limit - part_read_offset) futures.append( self.pool.submit( _upload_part, self.api, self.session, self._URL['upload_part'], self.upload_id, part_number, part_data, self.retry, self.timeout ) ) self.submitted += 1 self.total_submitted += 1 return futures
def submit(self)
Partitions the file into chunks and submits them into group of 4 for upload on the api upload pool. :return: Futures
3.4702
3.183434
1.090081
parts = [] start_byte = 0 for i in range(1, self.total + 1): end_byte = start_byte + self.part_size if end_byte >= self.file_size - 1: end_byte = self.file_size parts.append({ 'part': i, 'offset': start_byte, 'limit': end_byte }) start_byte = end_byte return parts
def get_parts(self)
Partitions the file and saves the parts to be uploaded in memory.
2.436477
2.220517
1.097257
total = int(math.ceil(self._file_size / self._part_size)) if total > PartSize.MAXIMUM_TOTAL_PARTS: self._status = TransferState.FAILED raise SbgError( 'Total parts = {}. Maximum number of parts is {}'.format( total, PartSize.MAXIMUM_TOTAL_PARTS) )
def _verify_part_number(self)
Verifies that the total number of parts is smaller then 10^5 which is the maximum number of parts.
5.715785
4.765638
1.199374
if self._part_size > PartSize.MAXIMUM_UPLOAD_SIZE: self._status = TransferState.FAILED raise SbgError('Part size = {}b. Maximum part size is {}b'.format( self._part_size, PartSize.MAXIMUM_UPLOAD_SIZE) )
def _verify_part_size(self)
Verifies that the part size is smaller then the maximum part size which is 5GB.
5.736195
5.145801
1.114733
if self._file_size > PartSize.MAXIMUM_OBJECT_SIZE: self._status = TransferState.FAILED raise SbgError('File size = {}b. Maximum file size is {}b'.format( self._file_size, PartSize.MAXIMUM_OBJECT_SIZE) )
def _verify_file_size(self)
Verifies that the file is smaller then 5TB which is the maximum that is allowed for upload.
6.695217
6.01415
1.113244
init_data = { 'name': self._file_name, 'part_size': self._part_size, 'size': self._file_size } if self._project: init_data['project'] = self._project elif self._parent: init_data['parent'] = self._parent init_params = {} if self._overwrite: init_params['overwrite'] = self._overwrite try: response = self._api.post( self._URL['upload_init'], data=init_data, params=init_params ) self._upload_id = response.json()['upload_id'] except SbgError as e: self._status = TransferState.FAILED raise SbgError( 'Unable to initialize upload! Failed to get upload id! ' 'Reason: {}'.format(e.message) )
def _initialize_upload(self)
Initialized the upload on the API server by submitting the information about the project, the file name, file size and the part size that is going to be used during multipart upload.
2.912112
2.771871
1.050594
from sevenbridges.models.file import File try: response = self._api.post( self._URL['upload_complete'].format(upload_id=self._upload_id) ).json() self._result = File(api=self._api, **response) self._status = TransferState.COMPLETED except SbgError as e: self._status = TransferState.FAILED raise SbgError( 'Failed to complete upload! Reason: {}'.format(e.message) )
def _finalize_upload(self)
Finalizes the upload on the API server.
4.080644
3.870751
1.054225
try: self._api.delete( self._URL['upload_info'].format(upload_id=self._upload_id) ) except SbgError as e: self._status = TransferState.FAILED raise SbgError( 'Failed to abort upload! Reason: {}'.format(e.message) )
def _abort_upload(self)
Aborts the upload on the API server.
5.09001
4.805195
1.059272
self._callback = callback self._errorback = errorback
def add_callback(self, callback=None, errorback=None)
Adds a callback that will be called when the upload finishes successfully or when error is raised.
4.426033
4.531321
0.976764
if self._status == TransferState.PREPARING: super(Upload, self).start() else: raise SbgError( 'Unable to start. Upload not in PREPARING state.' )
def start(self)
Starts the upload. :raises SbgError: If upload is not in PREPARING state.
9.335533
4.295433
2.173363
self._running.set() self._status = TransferState.RUNNING self._time_started = time.time() # Initializes the upload self._initialize_upload() # Opens the file for reading in binary mode. try: with io.open(self._file_path, mode='rb') as fp: # Creates a partitioned file parted_file = UPartedFile( fp, self._file_size, self._part_size, self._upload_id, self._retry, self._timeout, self.session, self._api ) # Iterates over parts and submits them for upload. for _ in parted_file: if self._stop_signal: return self._running.wait() self._bytes_done += self._part_size # If the progress callback is set we need to provide a # progress object for it. if self._progress_callback: progress = Progress( parted_file.total, parted_file.total_submitted, self._bytes_done, self._file_size, self.duration ) self._progress_callback(progress) except IOError: raise SbgError('Unable to open file {}'.format(self._file_path)) except Exception as e: # If the errorback callback is set call it with status self._status = TransferState.FAILED if self._errorback: self._errorback(self._status) else: raise SbgError(six.text_type(e)) # Finalizes the upload. self._finalize_upload() self._status = TransferState.COMPLETED # If the callback is set call it. if self._callback: self._callback(self._status)
def run(self)
Runs the thread! Should not be used use start() method instead.
3.806476
3.716989
1.024075
changes = defaultdict(int) cascaded_update_records = set() cascaded_delete_records = set() for record in records: if record.change != ChangeType.deleted and record.object is None: # Skip entries which are not deletions but have no corresponding objects. # Probably they are updates for objects that got deleted afterwards. continue if record.change == ChangeType.created: assert record.type != EntryType.category changes[record.object] |= SimpleChange.created elif record.change == ChangeType.deleted: assert record.type != EntryType.category cascaded_delete_records.add(record) elif record.change in {ChangeType.moved, ChangeType.protection_changed}: cascaded_update_records.add(record) elif record.change == ChangeType.data_changed: assert record.type != EntryType.category changes[record.object] |= SimpleChange.updated for obj in _process_cascaded_category_contents(cascaded_update_records): changes[obj] |= SimpleChange.updated for obj in _process_cascaded_event_contents(cascaded_delete_records): changes[obj] |= SimpleChange.deleted return changes
def process_records(records)
Converts queue entries into object changes. :param records: an iterable containing `LiveSyncQueueEntry` objects :return: a dict mapping object references to `SimpleChange` bitsets
3.274689
3.060468
1.069996
category_prot_records = {rec.category_id for rec in records if rec.type == EntryType.category and rec.change == ChangeType.protection_changed} category_move_records = {rec.category_id for rec in records if rec.type == EntryType.category and rec.change == ChangeType.moved} changed_events = set() category_prot_records -= category_move_records # A move already implies sending the whole record # Protection changes are handled differently, as there may not be the need to re-generate the record if category_prot_records: for categ in Category.find(Category.id.in_(category_prot_records)): cte = categ.get_protection_parent_cte() # Update only children that inherit inheriting_categ_children = (Event.query .join(cte, db.and_((Event.category_id == cte.c.id), (cte.c.protection_parent == categ.id)))) inheriting_direct_children = Event.find((Event.category_id == categ.id) & Event.is_inheriting) changed_events.update(itertools.chain(inheriting_direct_children, inheriting_categ_children)) # Add move operations and explicitly-passed event records if category_move_records: changed_events.update(Event.find(Event.category_chain_overlaps(category_move_records))) for elem in _process_cascaded_event_contents(records, additional_events=changed_events): yield elem
def _process_cascaded_category_contents(records)
Travel from categories to subcontributions, flattening the whole event structure. Yields everything that it finds (except for elements whose protection has changed but are not inheriting their protection settings from anywhere). :param records: queue records to process
5.15581
5.012583
1.028574
changed_events = additional_events or set() changed_contributions = set() changed_subcontributions = set() session_records = {rec.session_id for rec in records if rec.type == EntryType.session} contribution_records = {rec.contrib_id for rec in records if rec.type == EntryType.contribution} subcontribution_records = {rec.subcontrib_id for rec in records if rec.type == EntryType.subcontribution} event_records = {rec.event_id for rec in records if rec.type == EntryType.event} if event_records: changed_events.update(Event.find(Event.id.in_(event_records))) for event in changed_events: yield event # Sessions are added (explicitly changed only, since they don't need to be sent anywhere) if session_records: changed_contributions.update(Contribution .find(Contribution.session_id.in_(session_records), ~Contribution.is_deleted)) # Contributions are added (implictly + explicitly changed) changed_event_ids = {ev.id for ev in changed_events} condition = Contribution.event_id.in_(changed_event_ids) & ~Contribution.is_deleted if contribution_records: condition = db.or_(condition, Contribution.id.in_(contribution_records)) contrib_query = Contribution.find(condition).options(joinedload('subcontributions')) for contribution in contrib_query: yield contribution changed_subcontributions.update(contribution.subcontributions) # Same for subcontributions if subcontribution_records: changed_subcontributions.update(SubContribution.find(SubContribution.id.in_(subcontribution_records))) for subcontrib in changed_subcontributions: yield subcontrib
def _process_cascaded_event_contents(records, additional_events=None)
Flatten a series of records into its most basic elements (subcontribution level). Yields results. :param records: queue records to process :param additional_events: events whose content will be included in addition to those found in records
2.711324
2.717326
0.997791
client = AdminClient(self.settings) owner = retrieve_principal(vc_room.data['owner']) login_gen = iter_user_identities(owner) login = next(login_gen, None) if login is None: raise VCRoomError(_("No valid Vidyo account found for this user"), field='owner_user') extension_gen = iter_extensions(self.settings.get('indico_room_prefix'), event.id) extension = next(extension_gen) while True: room_mode = { 'isLocked': False, 'hasPIN': bool(vc_room.data['room_pin']), 'hasModeratorPIN': bool(vc_room.data['moderation_pin']) } if room_mode['hasPIN']: room_mode['roomPIN'] = vc_room.data['room_pin'] if room_mode['hasModeratorPIN']: room_mode['moderatorPIN'] = vc_room.data['moderation_pin'] room_obj = client.create_room_object( name=vc_room.name, RoomType='Public', ownerName=login, extension=extension, groupName=self.settings.get('room_group_name'), description=vc_room.data['description'], RoomMode=room_mode) if room_obj.RoomMode.hasPIN: room_obj.RoomMode.roomPIN = vc_room.data['room_pin'] if room_obj.RoomMode.hasModeratorPIN: room_obj.RoomMode.moderatorPIN = vc_room.data['moderation_pin'] try: client.add_room(room_obj) except APIException as err: err_msg = err.message if err_msg.startswith('Room exist for name'): raise VCRoomError(_("Room name already in use"), field='name') elif err_msg.startswith('Member not found for ownerName'): login = next(login_gen, None) if login is None: raise VCRoomError(_("No valid Vidyo account found for this user"), field='owner_user') elif err_msg.startswith('Room exist for extension'): extension = next(extension_gen) else: raise else: # get room back, in order to fetch Vidyo-set parameters created_room = client.find_room(extension) if not created_room: raise VCRoomNotFoundError(_("Could not find newly created room in Vidyo")) vc_room.data.update({ 'vidyo_id': unicode(created_room.roomID), 'url': created_room.RoomMode.roomURL, 'owner_identity': created_room.ownerName }) flag_modified(vc_room, 'data') vc_room.vidyo_extension = VidyoExtension(vc_room_id=vc_room.id, extension=int(created_room.extension), owned_by_user=owner) client.set_automute(created_room.roomID, vc_room.data['auto_mute']) break
def create_room(self, vc_room, event)
Create a new Vidyo room for an event, given a VC room. In order to create the Vidyo room, the function will try to do so with all the available identities of the user based on the authenticators defined in Vidyo plugin's settings, in that order. :param vc_room: VCRoom -- The VC room from which to create the Vidyo room :param event: Event -- The event to the Vidyo room will be attached
3.232696
3.29625
0.980719
if self.type == EntryType.category: return self.category elif self.type == EntryType.event: return self.event elif self.type == EntryType.session: return self.session elif self.type == EntryType.contribution: return self.contribution elif self.type == EntryType.subcontribution: return self.subcontribution
def object(self)
Return the changed object.
2.372116
2.354744
1.007378
return ImmutableDict(type=self.type, category_id=self.category_id, event_id=self.event_id, session_id=self.session_id, contrib_id=self.contrib_id, subcontrib_id=self.subcontrib_id)
def object_ref(self)
Return the reference of the changed object.
4.044979
3.917196
1.032621
ref = dict(ref) obj = obj_deref(ref) if isinstance(obj, Category): if any(c.id in excluded_categories for c in obj.chain_query): return else: event = obj if isinstance(obj, Event) else obj.event if event.category not in g.setdefault('livesync_excluded_categories_checked', {}): g.livesync_excluded_categories_checked[event.category] = excluded_categories & set(event.category_chain) if g.livesync_excluded_categories_checked[event.category]: return try: agents = g.livesync_agents except AttributeError: agents = g.livesync_agents = LiveSyncAgent.query.all() for change in changes: for agent in agents: entry = cls(agent=agent, change=change, **ref) db.session.add(entry) db.session.flush()
def create(cls, changes, ref, excluded_categories=set())
Create a new change in all queues. :param changes: the change types, an iterable containing :class:`ChangeType` :param ref: the object reference (returned by `obj_ref`) of the changed object :param excluded_categories: set of categories (IDs) whose items will not be tracked
3.940568
3.975276
0.991269
id = Transform.to_async_job(id) api = api if api else cls._API async_job = api.get( url=cls._URL['get_file_copy_job'].format(id=id) ).json() return AsyncJob(api=api, **async_job)
def get_file_copy_job(cls, id, api=None)
Retrieve file copy async job :param id: Async job identifier :param api: Api instance :return:
4.975346
5.466156
0.910209
api = api or self._API if not self.result: return [] return AsyncFileBulkRecord.parse_records( result=self.result, api=api )
def get_result(self, api=None)
Get async job result in bulk format :return: List of AsyncFileBulkRecord objects
9.818765
4.83171
2.032151
api = api or cls._API return super(AsyncJob, cls)._query( api=api, url=cls._URL['list_file_jobs'], offset=offset, limit=limit, )
def list_file_jobs(cls, offset=None, limit=None, api=None)
Query ( List ) async jobs :param offset: Pagination offset :param limit: Pagination limit :param api: Api instance :return: Collection object
4.570236
5.455255
0.837768
code = "%-6s" % code ind1 = code[3:4] if ind1 == " ": ind1 = "_" ind2 = code[4:5] if ind2 == " ": ind2 = "_" subcode = code[5:6] if subcode == " ": subcode = None return (code[0:3], ind1, ind2, subcode)
def decompose_code(code)
Decomposes a MARC "code" into tag, ind1, ind2, subcode
3.023743
2.224904
1.359044
self.browser = mechanize.Browser() self.browser.set_handle_robots(False) self.browser.open(self.server_url + "/youraccount/login") self.browser.select_form(nr=0) try: self.browser['nickname'] = self.user self.browser['password'] = self.password except: self.browser['p_un'] = self.user self.browser['p_pw'] = self.password # Set login_method to be writable self.browser.form.find_control('login_method').readonly = False self.browser['login_method'] = self.login_method self.browser.submit()
def _init_browser(self)
Ovveride this method with the appropriate way to prepare a logged in browser.
2.836741
2.717677
1.043811
parse_results = False of = kwparams.get('of', "") if of == "": parse_results = True of = "xm" kwparams['of'] = of params = urllib.urlencode(kwparams, doseq=1) # Are we running locally? If so, better directly access the # search engine directly if self.local and of != 't': # See if user tries to search any restricted collection c = kwparams.get('c', "") if c != "": if type(c) is list: colls = c else: colls = [c] for collection in colls: if collection_restricted_p(collection): if self.user: self._check_credentials() continue raise InvenioConnectorAuthError("You are trying to search a restricted collection. Please authenticate yourself.\n") kwparams['of'] = 'id' results = perform_request_search(**kwparams) if of.lower() != 'id': results = format_records(results, of) else: if params + str(parse_results) not in self.cached_queries or not read_cache: if self.user: results = self.browser.open(self.server_url + "/search?" + params) else: results = urllib2.urlopen(self.server_url + "/search?" + params) if 'youraccount/login' in results.geturl(): # Current user not able to search collection raise InvenioConnectorAuthError("You are trying to search a restricted collection. Please authenticate yourself.\n") else: return self.cached_queries[params + str(parse_results)] if parse_results: # FIXME: we should not try to parse if results is string parsed_records = self._parse_results(results, self.cached_records) self.cached_queries[params + str(parse_results)] = parsed_records return parsed_records else: # pylint: disable=E1103 # The whole point of the following code is to make sure we can # handle two types of variable. try: res = results.read() except AttributeError: res = results # pylint: enable=E1103 if of == "id": try: if type(res) is str: # Transform to list res = [int(recid.strip()) for recid in \ res.strip("[]").split(",") if recid.strip() != ""] res.reverse() except (ValueError, AttributeError): res = [] self.cached_queries[params + str(parse_results)] = res return self.cached_queries[params + str(parse_results)]
def search(self, read_cache=True, **kwparams)
Returns records corresponding to the given search query. See docstring of invenio.legacy.search_engine.perform_request_search() for an overview of available parameters. @raise InvenioConnectorAuthError: if authentication fails
4.238767
4.0757
1.040009
results = [] count = 0 while count < retrycount: try: results = self.search(**params) break except urllib2.URLError: sys.stderr.write("Timeout while searching...Retrying\n") time.sleep(sleeptime) count += 1 else: sys.stderr.write("Aborting search after %d attempts.\n" % (retrycount,)) return results
def search_with_retry(self, sleeptime=3.0, retrycount=3, **params)
This function performs a search given a dictionary of search(..) parameters. It accounts for server timeouts as necessary and will retry some number of times. @param sleeptime: number of seconds to sleep between retries @type sleeptime: float @param retrycount: number of times to retry given search @type retrycount: int @param params: search parameters @type params: **kwds @rtype: list @return: returns records in given format
2.594707
2.628866
0.987006
if bskid not in self.cached_baskets or not read_cache: if self.user: if group_basket: group_basket = '&category=G' else: group_basket = '' results = self.browser.open(self.server_url + \ "/yourbaskets/display?of=xm&bskid=" + str(bskid) + group_basket) else: results = urllib2.urlopen(self.server_url + \ "/yourbaskets/display_public?of=xm&bskid=" + str(bskid)) else: return self.cached_baskets[bskid] parsed_records = self._parse_results(results, self.cached_records) self.cached_baskets[bskid] = parsed_records return parsed_records
def get_records_from_basket(self, bskid, group_basket=False, read_cache=True)
Returns the records from the (public) basket with given bskid
3.096205
2.977399
1.039903
if recid in self.cached_records or not read_cache: return self.cached_records[recid] else: return self.search(p="recid:" + str(recid))
def get_record(self, recid, read_cache=True)
Returns the record with given recid
3.838269
3.625707
1.058627
if mode not in ["-i", "-r", "-c", "-a", "-ir"]: raise NameError, "Incorrect mode " + str(mode) # Are we running locally? If so, submit directly if self.local: (code, marcxml_filepath) = tempfile.mkstemp(prefix="upload_%s" % \ time.strftime("%Y%m%d_%H%M%S_", time.localtime())) marcxml_file_d = os.fdopen(code, "w") marcxml_file_d.write(marcxml) marcxml_file_d.close() return task_low_level_submission("bibupload", "", mode, marcxml_filepath) else: params = urllib.urlencode({'file': marcxml, 'mode': mode}) ## We don't use self.browser as batchuploader is protected by IP opener = urllib2.build_opener() opener.addheaders = [('User-Agent', CFG_USER_AGENT)] return opener.open(self.server_url + "/batchuploader/robotupload", params,)
def upload_marcxml(self, marcxml, mode)
Uploads a record to the server Parameters: marcxml - *str* the XML to upload. mode - *str* the mode to use for the upload. "-i" insert new records "-r" replace existing records "-c" correct fields of records "-a" append fields to records "-ir" insert record or replace if it exists
5.544972
5.01867
1.104869
parser = xml.sax.make_parser() handler = RecordsHandler(cached_records) parser.setContentHandler(handler) parser.parse(results) return handler.records
def _parse_results(self, results, cached_records)
Parses the given results (in MARCXML format). The given "cached_records" list is a pool of already existing parsed records (in order to avoid keeping several times the same records in memory)
3.094779
2.917608
1.060724
try: request = requests.head(self.server_url) if request.status_code >= 400: raise InvenioConnectorServerError( "Unexpected status code '%d' accessing URL: %s" % (request.status_code, self.server_url)) except (InvalidSchema, MissingSchema) as err: raise InvenioConnectorServerError( "Bad schema, expecting http:// or https://:\n %s" % (err,)) except ConnectionError as err: raise InvenioConnectorServerError( "Couldn't establish connection to '%s':\n %s" % (self.server_url, err)) except InvalidURL as err: raise InvenioConnectorServerError( "Invalid URL '%s':\n %s" % (self.server_url, err)) except RequestException as err: raise InvenioConnectorServerError( "Unknown error connecting to '%s':\n %s" % (self.server_url, err))
def _validate_server_url(self)
Validates self.server_url
2.321724
2.251945
1.030986
hits = {'total': 0, 'unique': 0} day_hits = list(hits[0] for hits in results.values() if hits) for metrics in day_hits: hits['total'] += metrics['nb_hits'] hits['unique'] += metrics['nb_uniq_visitors'] return hits
def _get_cumulative_results(self, results)
Returns a dictionary of {'total': x, 'unique': y} for the date range.
5.106277
4.432662
1.151966
result = get_json_from_remote_server(self.call) referrers = list(result) for referrer in referrers: referrer['sum_visit_length'] = stringify_seconds(referrer['sum_visit_length']) return sorted(referrers, key=itemgetter('nb_visits'), reverse=True)[0:10]
def get_result(self)
Perform the call and return a list of referrers
5.784444
4.848361
1.193072
result = get_json_from_remote_server(self.call) seconds = self._get_average_duration(result) if result else 0 return stringify_seconds(seconds)
def get_result(self)
Perform the call and return a string with the time in hh:mm:ss
11.745324
10.050555
1.168624
result = get_json_from_remote_server(self.call) if result: date, value = max(result.iteritems(), key=itemgetter(1)) return {'date': date, 'users': value} else: return {'date': "No Data", 'users': 0}
def get_result(self)
Perform the call and return the peak date and how many users
5.230553
3.946437
1.325386
query_url = self.get_query_url(**query_params) return self._perform_call(query_url, default_response)
def call(self, default_response=None, **query_params)
Perform a query to the Piwik server and return the response. :param default_response: Return value in case the query fails :param query_params: Dictionary with the parameters of the query
4.182089
4.139986
1.01017
if query_params is None: query_params = {} query = '' query_params['idSite'] = self.site_id if self.api_token is not None: query_params['token_auth'] = self.api_token for key, value in query_params.iteritems(): if isinstance(value, list): value = ','.join(value) query += '{}={}&'.format(str(key), str(value)) return query[:-1]
def get_query(self, query_params=None)
Return a query string
2.334028
2.236808
1.043463
try: response = requests.get(query_url, timeout=timeout) except socket.timeout: current_plugin.logger.warning("Timeout contacting Piwik server") return default_response except Exception: current_plugin.logger.exception("Unable to connect") return default_response return response.content
def _perform_call(self, query_url, default_response=None, timeout=10)
Returns the raw results from the API
3.856929
3.813706
1.011334
self_name = type(self).__name__ for i, batch in enumerate(grouper(records, self.BATCH_SIZE, skip_missing=True), 1): self.logger.info('%s processing batch %d', self_name, i) try: for j, proc_batch in enumerate(grouper( process_records(batch).iteritems(), self.BATCH_SIZE, skip_missing=True), 1): self.logger.info('%s uploading chunk #%d (batch %d)', self_name, j, i) self.upload_records({k: v for k, v in proc_batch}, from_queue=True) except Exception: self.logger.exception('%s could not upload batch', self_name) return self.logger.info('%s finished batch %d', self_name, i) self.processed_records(batch) self.logger.info('%s finished', self_name)
def run(self, records)
Runs the batch upload :param records: an iterable containing queue entries
3.282432
3.351789
0.979308
self_name = type(self).__name__ for i, batch in enumerate(grouper(events, self.INITIAL_BATCH_SIZE, skip_missing=True), 1): self.logger.debug('%s processing initial batch %d', self_name, i) for j, processed_batch in enumerate(grouper( batch, self.BATCH_SIZE, skip_missing=True), 1): self.logger.info('%s uploading initial chunk #%d (batch %d)', self_name, j, i) self.upload_records(processed_batch, from_queue=False)
def run_initial(self, events)
Runs the initial batch upload :param events: an iterable containing events
4.288418
4.242735
1.010767
for record in records: self.logger.debug('Marking as processed: %s', record) record.processed = True db.session.commit()
def processed_records(self, records)
Executed after successfully uploading a batch of records from the queue. :param records: a list of queue entries
4.103288
5.145699
0.797421
if config.DB_LOG: click.secho('Warning: The database logger is currently enabled (DB_LOG = True).\n' 'This will slow down the migration. Unless you database is very small, please disable it.', fg='yellow') click.confirm('Continue anyway?', abort=True)
def cli()
Migrate data to S3. Use the `copy` subcommand to copy data to S3. This can be done safely while Indico is running. At the end it will show you what you need to add to your `indico.conf`. Once you updated your config with the new storage backends, you can use the `apply` subcommand to update your database so files will actually be loaded using the new S3 storage backends. In case you ever need to switch back to your previous storage, you can use `revert` to undo the database changes.
9.754638
10.012914
0.974206
if self.config: self.websocket = self.ws_client( self.loop, self.session, self.host, self.config.websocketport, self.async_session_handler) self.websocket.start() else: _LOGGER.error('No deCONZ config available')
def start(self) -> None
Connect websocket to deCONZ.
7.721155
5.065229
1.524345
data = await self.async_get_state('') _LOGGER.debug(pformat(data)) config = data.get('config', {}) groups = data.get('groups', {}) lights = data.get('lights', {}) sensors = data.get('sensors', {}) if not self.config: self.config = DeconzConfig(config) # Update scene for existing groups for group_id, group in groups.items(): if group_id in self.groups: self.groups[group_id].async_add_scenes( group.get('scenes'), self.async_put_state) self.groups.update({ group_id: DeconzGroup(group_id, group, self.async_put_state) for group_id, group in groups.items() if group_id not in self.groups }) self.lights.update({ light_id: DeconzLight(light_id, light, self.async_put_state) for light_id, light in lights.items() if light_id not in self.lights }) self.update_group_color(self.lights.keys()) self.scenes.update({ group.id + '_' + scene.id: scene for group in self.groups.values() for scene in group.scenes.values() if group.id + '_' + scene.id not in self.scenes }) self.sensors.update({ sensor_id: create_sensor(sensor_id, sensor, self.async_put_state) for sensor_id, sensor in sensors.items() if supported_sensor(sensor) and sensor_id not in self.sensors })
async def async_load_parameters(self) -> bool
Load deCONZ parameters. Returns lists of indices of which devices was added.
2.202425
2.140587
1.028888
session = self.session.put url = self.api_url + field jsondata = json.dumps(data) response_dict = await async_request(session, url, data=jsondata) return response_dict
async def async_put_state(self, field: str, data: dict) -> dict
Set state of object in deCONZ. Field is a string representing a specific device in deCONZ e.g. field='/lights/1/state'. Data is a json object with what data you want to alter e.g. data={'on': True}. See Dresden Elektroniks REST API documentation for details: http://dresden-elektronik.github.io/deconz-rest-doc/rest/
4.640431
4.580338
1.01312
session = self.session.get url = self.api_url + field response_dict = await async_request(session, url) return response_dict
async def async_get_state(self, field: str) -> dict
Get state of object in deCONZ. Field is a string representing an API endpoint or lower e.g. field='/lights'. See Dresden Elektroniks REST API documentation for details: http://dresden-elektronik.github.io/deconz-rest-doc/rest/
7.060427
5.918528
1.192936
if signal == 'data': self.async_event_handler(self.websocket.data) elif signal == 'state': if self.async_connection_status_callback: self.async_connection_status_callback( self.websocket.state == 'running')
def async_session_handler(self, signal: str) -> None
Signalling from websocket. data - new data available for processing. state - network state has changed.
5.364614
4.242905
1.264373