code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
if form is None: form = xmpp.plugin['xep_0004'].make_form(ftype='submit') form.add_field('FORM_TYPE', value='http://jabber.org/protocol/muc#roomconfig') form.add_field('muc#roomconfig_publicroom', value='1') form.add_field('muc#roomconfig_whois', value='moderators') form.add_field('muc#roomconfig_membersonly', value='0') form.add_field('muc#roomconfig_moderatedroom', value='1') form.add_field('muc#roomconfig_changesubject', value='1') form.add_field('muc#roomconfig_allowinvites', value='1') form.add_field('muc#roomconfig_allowvisitorstatus', value='1') form.add_field('muc#roomconfig_allowvisitornickchange', value='1') form.add_field('muc#roomconfig_enablelogging', value='1') form.add_field('public_list', value='1') form.add_field('members_by_default', value='1') form.add_field('allow_private_messages', value='1') form.add_field('allow_query_users', value='1') form.add_field('muc#roomconfig_persistentroom', value='1') form.add_field('muc#roomconfig_roomname', value=room.name) form.add_field('muc#roomconfig_passwordprotectedroom', value='1' if room.password else '0') if room.description: form.add_field('muc#roomconfig_roomdesc', value=room.description) if room.password: form.add_field('muc#roomconfig_roomsecret', value=room.password) return form
def _set_form_values(xmpp, room, form=None)
Creates/Updates an XMPP room config form
1.818226
1.811575
1.003671
from indico_chat.plugin import ChatPlugin check_config() jid = ChatPlugin.settings.get('bot_jid') password = ChatPlugin.settings.get('bot_password') if '@' not in jid: jid = '{}@{}'.format(jid, ChatPlugin.settings.get('server')) result = [None, None] # result, exception app = current_app._get_current_object() # callback runs in another thread def _session_start(event): try: with app.app_context(): result[0] = connected_callback(xmpp) except Exception as e: result[1] = e if isinstance(e, IqError): current_plugin.logger.exception('XMPP callback failed: %s', e.condition) else: current_plugin.logger.exception('XMPP callback failed') finally: xmpp.disconnect(wait=0) xmpp = ClientXMPP(jid, password) xmpp.register_plugin('xep_0045') xmpp.register_plugin('xep_0004') xmpp.register_plugin('xep_0030') xmpp.add_event_handler('session_start', _session_start) try: xmpp.connect() except Exception: current_plugin.logger.exception('XMPP connection failed') xmpp.disconnect() raise try: xmpp.process(threaded=False) finally: xmpp.disconnect(wait=0) if result[1] is not None: raise result[1] return result[0]
def _execute_xmpp(connected_callback)
Connects to the XMPP server and executes custom code :param connected_callback: function to execute after connecting :return: return value of the callback
2.458045
2.507441
0.9803
from indico_chat.plugin import ChatPlugin base_url = ChatPlugin.settings.get('log_url') if not base_url or room.custom_server: return None params = {'cr': room.jid} if start_date: params['sdate'] = start_date.strftime('%Y-%m-%d') if end_date: params['edate'] = end_date.strftime('%Y-%m-%d') try: response = requests.get(base_url, params=params) except RequestException: current_plugin.logger.exception('Could not retrieve logs for %s', room.jid) return None if response.headers.get('content-type') == 'application/json': current_plugin.logger.warning('Could not retrieve logs for %s: %s', room.jid, response.json().get('error')) return None return response.text
def retrieve_logs(room, start_date=None, end_date=None)
Retrieves chat logs :param room: the `Chatroom` :param start_date: the earliest date to get logs for :param end_date: the latest date to get logs for :return: logs in html format
2.626923
2.590928
1.013893
from indico_chat.plugin import ChatPlugin base_url = ChatPlugin.settings.get('log_url') if not base_url or room.custom_server: return try: response = requests.get(posixpath.join(base_url, 'delete'), params={'cr': room.jid}).json() except (RequestException, ValueError): current_plugin.logger.exception('Could not delete logs for %s', room.jid) return if not response.get('success'): current_plugin.logger.warning('Could not delete logs for %s: %s', room.jid, response.get('error'))
def delete_logs(room)
Deletes chat logs
3.497157
3.581854
0.976354
data = self._modified_data() data = data['permissions'] if bool(data): url = six.text_type(self.href) + self._URL['permissions'] extra = {'resource': self.__class__.__name__, 'query': data} logger.info('Modifying permissions', extra=extra) self._api.patch(url=url, data=data, append_base=False) else: raise ResourceNotModified()
def save(self, inplace=True)
Saves modification to the api server.
6.400939
5.468471
1.170517
division = Transform.to_division(division) api = api if api else cls._API data = { 'name': name, 'division': division } extra = { 'resource': cls.__name__, 'query': data } logger.info('Creating team', extra=extra) created_team = api.post(cls._URL['query'], data=data).json() return Team(api=api, **created_team)
def create(cls, name, division, api=None)
Create team within a division :param name: Team name. :param division: Parent division. :param api: Api instance. :return: Team object.
4.467969
4.578265
0.975909
extra = { 'resource': self.__class__.__name__, 'query': {'id': self.id} } logger.info('Get team members', extra=extra) response = self._api.get( url=self._URL['members_query'].format(id=self.id), params={'offset': offset, 'limit': limit} ) data = response.json() total = response.headers['x-total-matching-query'] members = [TeamMember(api=self._api, **member) for member in data['items']] links = [Link(**link) for link in data['links']] href = data['href'] return Collection(resource=TeamMember, href=href, total=total, items=members, links=links, api=self._api)
def get_members(self, offset=None, limit=None)
Fetch team members for current team. :param offset: Pagination offset. :param limit: Pagination limit. :return: Collection object.
3.250196
3.10146
1.047957
user = Transform.to_user(user) data = { 'id': user } extra = { 'resource': self.__class__.__name__, 'query': { 'id': self.id, 'data': data, } } logger.info('Adding team member using id', extra=extra) response = self._api.post( url=self._URL['members_query'].format(id=self.id), data=data) member_data = response.json() return TeamMember(api=self._api, **member_data)
def add_member(self, user)
Add member to team :param user: User object or user's username :return: Added user.
4.621647
4.558864
1.013772
api = api if api else cls._API query_params = {} if owner: url = cls._URL['query'].format(owner=owner) else: url = cls._URL['query'].format(owner='') if name: query_params['name'] = name return super(Project, cls)._query( url=url, offset=offset, limit=limit, fields='_all', api=api, **query_params )
def query(cls, owner=None, name=None, offset=None, limit=None, api=None)
Query (List) projects :param owner: Owner username. :param name: Project name :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: Collection object.
3.002051
3.129331
0.959327
api = api if api else cls._API if name is None: raise SbgError('Project name is required!') data = { 'name': name, } if billing_group: data['billing_group'] = Transform.to_billing_group(billing_group) if description: data['description'] = description if tags: data['tags'] = tags if settings: data['settings'] = settings extra = { 'resource': cls.__name__, 'query': data } logger.info('Creating project', extra=extra) project_data = api.post(url=cls._URL['create'], data=data).json() return Project(api=api, **project_data)
def create(cls, name, billing_group=None, description=None, tags=None, settings=None, api=None)
Create a project. :param name: Project name. :param billing_group: Project billing group. :param description: Project description. :param tags: Project tags. :param settings: Project settings. :param api: Api instance. :return:
3.088413
3.198601
0.965551
team = Transform.to_team(team) data = {'id': team, 'type': 'TEAM'} if isinstance(permissions, dict): data.update({ 'permissions': permissions }) extra = { 'resource': self.__class__.__name__, 'query': { 'id': self.id, 'data': data, } } logger.info('Adding team member using team id', extra=extra) response = self._api.post( url=self._URL['members_query'].format(id=self.id), data=data) member_data = response.json() return Member(api=self._api, **member_data)
def add_member_team(self, team, permissions)
Add a member (team) to a project. :param team: Team object or team identifier. :param permissions: Permissions dictionary. :return: Member object.
4.302582
4.247022
1.013082
data = {'email': email} if isinstance(permissions, dict): data.update({ 'permissions': permissions }) extra = { 'resource': self.__class__.__name__, 'query': { 'id': self.id, 'data': data, } } logger.info('Adding member using email', extra=extra) response = self._api.post( url=self._URL['members_query'].format(id=self.id), data=data) member_data = response.json() return Member(api=self._api, **member_data)
def add_member_email(self, email, permissions=None)
Add a member to the project using member email. :param email: Member email. :param permissions: Permissions dictionary. :return: Member object.
3.54358
3.609453
0.98175
username = Transform.to_user(user) extra = { 'resource': self.__class__.__name__, 'query': { 'id': self.id, 'user': user, } } logger.info('Removing member', extra=extra) self._api.delete( url=self._URL['member'].format(id=self.id, username=username) )
def remove_member(self, user)
Remove member from the project. :param user: User to be removed.
4.327135
4.566331
0.947618
params = {'project': self.id, 'offset': offset, 'limit': limit} return self._api.files.query(api=self._api, **params)
def get_files(self, offset=None, limit=None)
Retrieves files in this project. :param offset: Pagination offset. :param limit: Pagination limit. :return: Collection object.
4.705006
5.348401
0.879703
for file in files: file.copy(project=self.id)
def add_files(self, files)
Adds files to this project. :param files: List of files or a Collection object.
14.797919
10.485651
1.411254
params = {'project': self.id, 'offset': offset, 'limit': limit} return self._api.apps.query(api=self._api, **params)
def get_apps(self, offset=None, limit=None)
Retrieves apps in this project. :param offset: Pagination offset. :param limit: Pagination limit. :return: Collection object.
5.387376
6.427078
0.838231
params = {'project': self.id, 'offset': offset, 'limit': limit} if status: params['status'] = status return self._api.tasks.query(api=self._api, **params)
def get_tasks(self, status=None, offset=None, limit=None)
Retrieves tasks in this project. :param status: Optional task status. :param offset: Pagination offset. :param limit: Pagination limit. :return: Collection object.
3.509261
4.048679
0.866767
return self._api.imports.query(project=self.id, volume=volume, state=state, offset=offset, limit=limit)
def get_imports(self, volume=None, state=None, offset=None, limit=None)
Fetches imports for this project. :param volume: Optional volume identifier. :param state: Optional state. :param offset: Pagination offset. :param limit: Pagination limit. :return: Collection object.
4.551538
5.564416
0.817972
return self._api.exports.query(project=self.id, volume=volume, state=state, offset=offset, limit=limit)
def get_exports(self, volume=None, state=None, offset=None, limit=None)
Fetches exports for this volume. :param volume: Optional volume identifier. :param state: Optional state. :param offset: Pagination offset. :param limit: Pagination limit. :return: Collection object.
4.293705
6.432406
0.667511
return self._api.tasks.create( name=name, project=self, app=app, revision=revision, batch_input=batch_input, batch_by=batch_by, inputs=inputs, description=description, run=run, disable_batch=disable_batch, interruptible=interruptible, execution_settings=execution_settings )
def create_task(self, name, app, revision=None, batch_input=None, batch_by=None, inputs=None, description=None, run=False, disable_batch=False, interruptible=True, execution_settings=None)
Creates a task for this project. :param name: Task name. :param app: CWL app identifier. :param revision: CWL app revision. :param batch_input: Batch input. :param batch_by: Batch criteria. :param inputs: Input map. :param description: Task description. :param run: True if you want to run a task upon creation. :param disable_batch: True if you want to disable batching. :param interruptible: True if you want to use interruptible instances. :param execution_settings: Execution settings for the task. :return: Task object.
1.690329
1.977909
0.854604
if not is_valid_mail(field.data, multi=False) and not re.match(r'^[a-zA-Z0-9]{13}$', field.data): raise ValidationError(_('Invalid email address / paypal ID'))
def validate_business(form, field)
Valiates a PayPal business string. It can either be an email address or a paypal business account ID.
5.976839
4.97441
1.201517
try: fp = os.open(path, os.O_CREAT | os.O_WRONLY) except IOError: raise SbgError('Unable to open file %s' % path) # Prepare range headers. headers = {} if end_byte is not None: headers['Range'] = 'bytes=%d-%d' % (int(start_byte), int(end_byte)) cause = None # Retry for retry in range(retry): try: response = session.get( url, headers=headers, timeout=timeout, stream=True ) response.raise_for_status() part_size = response.headers.get('Content-Length') os.lseek(fp, start_byte, os.SEEK_SET) for part in response.iter_content(32 * PartSize.KB): os.write(fp, part) os.close(fp) except requests.HTTPError as e: cause = e time.sleep(2 ** retry) continue except requests.RequestException as e: cause = e time.sleep(2 ** retry) continue else: return Part(start=start_byte, size=float(part_size)) else: os.close(fp) raise SbgError('Failed to download file after {} attempts.' ' Response: {}'.format(retry, six.text_type(cause)))
def _download_part(path, session, url, retry, timeout, start_byte, end_byte)
Downloads a single part. :param path: File path. :param session: Requests session. :param url: Url of the resource. :param retry: Number of times to retry on error. :param timeout: Session timeout. :param start_byte: Start byte of the part. :param end_byte: End byte of the part. :return:
2.50992
2.51836
0.996649
futures = [] while self.submitted < 4 and not self.done(): part = self.parts.pop(0) futures.append( self.pool.submit( _download_part, self.file_path, self.session, self.url, self.retry, self.timeout, *part) ) self.submitted += 1 self.total_submitted += 1 return futures
def submit(self)
Partitions the file into chunks and submits them into group of 4 for download on the api download pool.
4.735754
3.945514
1.200288
parts = [] start_b = 0 end_byte = start_b + PartSize.DOWNLOAD_MINIMUM_PART_SIZE - 1 for i in range(self.total): parts.append([start_b, end_byte]) start_b = end_byte + 1 end_byte = start_b + PartSize.DOWNLOAD_MINIMUM_PART_SIZE - 1 return parts
def get_parts(self)
Partitions the file and saves the part information in memory.
3.365308
3.005596
1.119681
if self._status == TransferState.RUNNING: self._running.clear() self._status = TransferState.PAUSED else: raise SbgError('Can not pause. Download not in RUNNING state.')
def pause(self)
Pauses the download. :raises SbgError: If upload is not in RUNNING state.
7.453615
4.106441
1.815104
if self.status in (TransferState.PAUSED, TransferState.RUNNING): self._stop_signal = True self.join() self._status = TransferState.STOPPED if self._callback: return self._callback(self._status) else: raise SbgError( 'Can not stop. Download not in PAUSED or RUNNING state.' )
def stop(self)
Stops the download. :raises SbgError: If download is not in PAUSED or RUNNING state.
5.599978
3.620948
1.546551
if self._status != TransferState.PAUSED: self._running.set() self._status = TransferState.RUNNING else: raise SbgError('Can not pause. Download not in PAUSED state.')
def resume(self)
Resumes the download. :raises SbgError: If download is not in RUNNING state.
10.014559
6.216767
1.610895
if self._status == TransferState.PREPARING: self._running.set() super(Download, self).start() self._status = TransferState.RUNNING self._time_started = time.time() else: raise SbgError( 'Unable to start. Download not in PREPARING state.' )
def start(self)
Starts the download. :raises SbgError: If download is not in PREPARING state.
6.214895
3.796319
1.637085
self._running.set() self._status = TransferState.RUNNING self._time_started = time.time() parted_file = DPartedFile(self._temp_file, self._session, self.url, self._file_size, self._part_size, self._retry_count, self._timeout, self._api.download_pool) try: for part in parted_file: if self._stop_signal: return self._running.wait() self._bytes_done += part.size if self._progress_callback: progress = Progress( parted_file.total, parted_file.total_submitted, self._bytes_done, self._file_size, self.duration ) self._progress_callback(progress) except Exception as exc: if self._errorback: return self._errorback(exc) else: raise SbgError('Download failed! %s' % str(exc)) self._status = TransferState.COMPLETED try: os.rename(self._temp_file, self._file_path) except Exception: raise SbgError("Unable to rename the file.") if self._callback: return self._callback(self._status)
def run(self)
Runs the thread! Should not be used use start() method instead.
3.888505
3.76356
1.033199
file_size = retry(self._retry_count)(_get_content_length)( self._session, self.url, self._timeout ) file_size = int(file_size) if file_size == 0: with io.open(self._file_path, 'a', encoding='utf-8'): pass return file_size
def _get_file_size(self)
Fetches file size by reading the Content-Length header for the resource. :return: File size.
4.147794
4.124803
1.005574
if self.uploader is None: # pragma: no cover raise NotImplementedError records = self.fetch_records() uploader = self.uploader(self) LiveSyncPlugin.logger.info('Uploading %d records', len(records)) uploader.run(records) self.update_last_run()
def run(self)
Runs the livesync export
6.67965
5.839582
1.143858
if self.uploader is None: # pragma: no cover raise NotImplementedError uploader = self.uploader(self) uploader.run_initial(events)
def run_initial_export(self, events)
Runs the initial export. This process is expected to take a very long time. :param events: iterable of all events in this indico instance
6.143109
7.893353
0.778264
from indico_chat.plugin import ChatPlugin settings = ChatPlugin.settings.get_all() missing = not all(settings[x] for x in ('server', 'muc_server', 'bot_jid', 'bot_password')) if missing and not quiet: raise IndicoError(_('Chat plugin is not configured properly')) return not missing
def check_config(quiet=False)
Checks if all required config options are set :param quiet: if True, return the result as a bool, otherwise raise `IndicoError` if any setting is missing
6.324462
4.807678
1.315492
from indico_chat.plugin import ChatPlugin return ChatPlugin.settings.acls.contains_user('admins', user)
def is_chat_admin(user)
Checks if a user is a chat admin
13.120997
15.647593
0.838531
if isinstance(item, list): return [map_input_output(it, api) for it in item] elif isinstance(item, dict) and 'class' in item: if item['class'].lower() == 'file': return File(id=item['path'], api=api) else: return item
def map_input_output(item, api)
Maps item to appropriate sevebridges object. :param item: Input/Output value. :param api: Api instance. :return: Mapped object.
2.88673
3.006411
0.960192
# noinspection PyProtectedMember def wrapped(obj, *args, **kwargs): in_place = True if kwargs.get('inplace') in (True, None) else False api_object = method(obj, *args, **kwargs) if in_place and api_object: obj._data = api_object._data obj._dirty = api_object._dirty obj._data.fetched = False return obj elif api_object: return api_object else: return obj return wrapped
def inplace_reload(method)
Executes the wrapped function and reloads the object with data returned from the server.
3.489512
3.299822
1.057485
def wrapper(f): @functools.wraps(f) def deco(*args, **kwargs): for i in range(0, retry_count): try: return f(*args, **kwargs) except excs: if logger: logger.warning( 'HTTPError caught.Retrying ...'.format(f.__name__), exc_info=True ) time.sleep(delay) else: logger.error( '{} failed after {} retries'.format( f.__name__, retry_count) ) return f(*args, **kwargs) return deco return wrapper
def retry_on_excs(excs, retry_count=3, delay=5)
Retry decorator used to retry callables on for specific exceptions. :param excs: Exceptions tuple. :param retry_count: Retry count. :param delay: Delay in seconds between retries. :return: Wrapped function object.
2.454461
2.517697
0.974883
def func(f): @functools.wraps(f) def wrapper(*args, **kwargs): for backoff in range(retry_count): try: return f(*args, **kwargs) except Exception: time.sleep(2 ** backoff) else: raise SbgError('{}: failed to complete: {}'.format( threading.current_thread().getName(), f.__name__) ) return wrapper return func
def retry(retry_count)
Retry decorator used during file upload and download.
3.333567
3.170716
1.051361
@functools.wraps(func) def wrapper(*args, **kwargs): try: response = func(*args, **kwargs) status_code = response.status_code if status_code in range(200, 204): return response if status_code == 204: return data = response.json() e = { 400: BadRequest, 401: Unauthorized, 403: Forbidden, 404: NotFound, 405: MethodNotAllowed, 408: RequestTimeout, 409: Conflict, 429: TooManyRequests, 500: ServerError, 503: ServiceUnavailable, }.get(status_code, SbgError)() if 'message' in data: e.message = data['message'] if 'code' in data: e.code = data['code'] if 'status' in data: e.status = data['status'] if 'more_info' in data: e.more_info = data['more_info'] raise e except requests.RequestException as e: raise SbgError(message=six.text_type(e)) except JSONDecodeError: message = ( 'Service might be unavailable. Can also occur by providing ' 'too many query parameters.' ) raise_from( ServiceUnavailable(message=six.text_type(message)), None ) except ValueError as e: raise SbgError(message=six.text_type(e)) return wrapper
def check_for_error(func)
Executes the wrapped function and inspects the response object for specific errors.
2.218631
2.175691
1.019737
page = self._load(self.href) while True: try: for item in page._items: yield item page = page.next_page() except PaginationError: break
def all(self)
Fetches all available items. :return: Collection object.
6.121253
5.868934
1.042992
for link in self.links: if link.rel.lower() == 'next': return self._load(link.href) raise PaginationError('No more entries.')
def next_page(self)
Fetches next result set. :return: Collection object.
6.183865
5.195284
1.190284
for link in self.links: if link.next: return self._load(link.next) raise PaginationError('No more entries.')
def next_page(self)
Fetches next result set. :return: VolumeCollection object.
8.2719
8.301219
0.996468
from indico_piwik.plugin import PiwikPlugin if not PiwikPlugin.settings.get('cache_enabled'): return cls(*args, **kwargs).to_serializable() cache = GenericCache('Piwik.Report') key = u'{}-{}-{}'.format(cls.__name__, args, kwargs) report = cache.get(key) if not report: report = cls(*args, **kwargs) cache.set(key, report, PiwikPlugin.settings.get('cache_ttl')) return report.to_serializable()
def get(cls, *args, **kwargs)
Create and return a serializable Report object, retrieved from cache if possible
3.545309
2.941402
1.205312
self.end_date = end_date self.start_date = start_date if self.end_date is None: today = now_utc().date() end_date = self.event.end_dt.date() self.end_date = end_date if end_date < today else today if self.start_date is None: self.start_date = self.end_date - timedelta(days=ReportBase.default_report_interval)
def _init_date_range(self, start_date=None, end_date=None)
Set date range defaults if no dates are passed
2.675753
2.48702
1.075887
self.metrics = {} queries = {'visits': PiwikQueryReportEventMetricVisits(**self.params), 'unique_visits': PiwikQueryReportEventMetricUniqueVisits(**self.params), 'visit_duration': PiwikQueryReportEventMetricVisitDuration(**self.params), 'referrers': PiwikQueryReportEventMetricReferrers(**self.params), 'peak': PiwikQueryReportEventMetricPeakDateAndVisitors(**self.params)} for query_name, query in queries.iteritems(): self.metrics[query_name] = query.get_result() self._fetch_contribution_info()
def _build_report(self)
Build the report by performing queries to Piwik
4.046474
3.436022
1.177662
self.contributions = {} query = (Contribution.query .with_parent(self.event) .options(joinedload('legacy_mapping'), joinedload('timetable_entry').lazyload('*'))) for contribution in query: if not contribution.start_dt: continue cid = (contribution.legacy_mapping.legacy_contribution_id if contribution.legacy_mapping else contribution.id) key = '{}t{}'.format(contribution.event_id, cid) self.contributions[key] = u'{} ({})'.format(contribution.title, to_unicode(format_time(contribution.start_dt)))
def _fetch_contribution_info(self)
Build the list of information entries for contributions of the event
4.715659
4.37767
1.077207
if callback in self._async_callbacks: self._async_callbacks.remove(callback)
def remove_callback(self, callback)
Remove callback previously registered.
3.993418
3.727158
1.071438
changed_attr = [] for key, value in attr.items(): if value is None: continue if getattr(self, "_{0}".format(key), None) != value: changed_attr.append(key) self.__setattr__("_{0}".format(key), value) _LOGGER.debug('%s: update %s with %s', self.name, key, value) return changed_attr
def update_attr(self, attr)
Update input attr in self. Return list of attributes with changed values.
2.961397
2.892682
1.023755
api = api if api else cls._API data = {'type': type, 'text': text, 'referrer': referrer if referrer else six.text_type( client_info )} extra = { 'resource': cls.__name__, 'query': data } logger.info('Sending feedback', extra=extra) api.post(url=cls._URL['send_feedback'], data=data)
def send_feedback(cls, type=FeedbackType.IDEA, referrer=None, text=None, api=None)
Sends feedback to sevenbridges. :param type: FeedbackType wither IDEA, PROBLEM or THOUGHT. :param text: Feedback text. :param referrer: Feedback referrer. :param api: Api instance.
4.632167
5.179338
0.894355
api = api if api else cls._API files = [Transform.to_file(file) for file in files] data = { 'project': destination_project, 'file_ids': files } extra = { 'resource': cls.__name__, 'query': data } logger.info('Performing bulk copy', extra=extra) return api.post(url=cls._URL['bulk_copy'], data=data).json()
def bulk_copy_files(cls, files, destination_project, api=None)
Bulk copy of files. :param files: List containing files to be copied. :param destination_project: Destination project. :param api: Api instance. :return: MultiStatus copy result.
3.816819
4.138717
0.922223
field = self.deconz_id + '/action' await self._async_set_state_callback(field, data) self.async_update({'state': data})
async def async_set_state(self, data)
Set state of light group. { "on": true, "bri": 180, "hue": 43680, "sat": 255, "transitiontime": 10 } Also update local values of group since websockets doesn't.
9.861294
10.530828
0.936421
self._scenes = { scene['id']: DeconzScene(self, scene, async_set_state_callback) for scene in scenes if scene['id'] not in self._scenes }
def async_add_scenes(self, scenes, async_set_state_callback)
Add scenes belonging to group.
3.309418
3.120986
1.060376
x, y = light.xy or (None, None) self.async_update({ 'state': { 'bri': light.brightness, 'hue': light.hue, 'sat': light.sat, 'ct': light.ct, 'x': x, 'y': y, 'colormode': light.colormode, }, })
def update_color_state(self, light)
Sync color state with light.
3.067622
2.836464
1.081495
field = self._deconz_id + '/recall' await self._async_set_state_callback(field, data)
async def async_set_state(self, data)
Recall scene to group.
14.850649
10.062064
1.475905
api = api if api else cls._API extra = { 'resource': cls.__name__, 'query': {} } logger.info('Fetching user information', extra=extra) user_data = api.get(cls._URL['me']).json() return User(api=api, **user_data)
def me(cls, api=None)
Retrieves current user information. :param api: Api instance. :return: User object.
5.049497
4.980588
1.013836
api = api if api else cls._API file = Transform.to_file(file) return super(Marker, cls)._query( url=cls._URL['query'], offset=offset, limit=limit, file=file, fields='_all', api=api )
def query(cls, file, offset=None, limit=None, api=None)
Queries genome markers on a file. :param file: Genome file - Usually bam file. :param offset: Pagination offset. :param limit: Pagination limit. :param api: Api instance. :return: Collection object.
6.472484
6.49046
0.99723
api = api if api else cls._API file = Transform.to_file(file) data = { 'file': file, 'name': name, 'position': position, 'chromosome': chromosome, 'private': private } extra = { 'resource': cls.__name__, 'query': data } logger.info('Creating marker', extra=extra) marker_data = api.post(url=cls._URL['query'], data=data).json() return Marker(api=api, **marker_data)
def create(cls, file, name, position, chromosome, private=True, api=None)
Create a marker on a file. :param file: File object or identifier. :param name: Marker name. :param position: Marker position object. :param chromosome: Chromosome number. :param private: Whether the marker is private or public. :param api: Api instance. :return: Marker object.
3.59206
3.592689
0.999825
modified_data = self._modified_data() if bool(modified_data): extra = { 'resource': self.__class__.__name__, 'query': { 'id': self.id, 'modified_data': modified_data } } logger.info('Saving marker', extra=extra) data = self._api.patch(url=self._URL['get'].format(id=self.id), data=modified_data).json() marker = Marker(api=self._api, **data) return marker else: raise ResourceNotModified()
def save(self, inplace=True)
Saves all modification to the marker on the server. :param inplace Apply edits on the current instance or get a new one. :return: Marker instance.
4.231041
3.779077
1.119597
self.update_attr(event.get('state', {})) super().async_update(event)
def async_update(self, event)
New event for light. Check that state is part of event. Signal that light has updated state.
7.786742
6.197579
1.256417
if self._xy != (None, None): self._x, self._y = self._xy if self._x is not None and self._y is not None: x = self._x if self._x > 1: x = self._x / 65555 y = self._y if self._y > 1: y = self._y / 65555 return (x, y) return None
def xy(self)
CIE xy color space coordinates as array [x, y] of real values (0..1).
2.574765
2.51669
1.023076
from sevenbridges.models.project import Project if not project: raise SbgError('Project is required!') elif isinstance(project, Project): return project.id elif isinstance(project, six.string_types): return project else: raise SbgError('Invalid project parameter!')
def to_project(project)
Serializes project to id string :param project: object to serialize :return: string id
3.441681
3.565209
0.965352
from sevenbridges.models.task import Task if not task: raise SbgError('Task is required!') elif isinstance(task, Task): return task.id elif isinstance(task, six.string_types): return task else: raise SbgError('Invalid task parameter!')
def to_task(task)
Serializes task to id string :param task: object to serialize :return: string id
3.470181
3.576791
0.970194
from sevenbridges.models.app import App if not app: raise SbgError('App is required!') elif isinstance(app, App): return app.id elif isinstance(app, six.string_types): return app else: raise SbgError('Invalid app parameter!')
def to_app(app)
Serializes app to id string :param app: object to serialize :return: string id
3.477153
3.511981
0.990083
from sevenbridges.models.file import File if not file_: raise SbgError('File is required!') elif isinstance(file_, File): return file_.id elif isinstance(file_, six.string_types): return file_ else: raise SbgError('Invalid file parameter!')
def to_file(file_)
Serializes file to id string :param file_: object to serialize :return: string id
3.37782
3.584264
0.942403
from sevenbridges.models.user import User if not user: raise SbgError('User is required!') elif isinstance(user, User): return user.username elif isinstance(user, six.string_types): return user else: raise SbgError('Invalid user parameter!')
def to_user(user)
Serializes user to id string :param user: object to serialize :return: string id
3.47102
3.894842
0.891184
from sevenbridges.models.billing_group import BillingGroup if not billing_group: raise SbgError('Billing group is required!') elif isinstance(billing_group, BillingGroup): return billing_group.id elif isinstance(billing_group, six.string_types): return billing_group else: raise SbgError('Invalid billing group parameter!')
def to_billing_group(billing_group)
Serializes billing_group to id string :param billing_group: object to serialize :return: string id
2.758933
2.830974
0.974552
from sevenbridges.models.volume import Volume if not volume: raise SbgError('Volume is required!') elif isinstance(volume, Volume): return volume.id elif isinstance(volume, six.string_types): return volume else: raise SbgError('Invalid volume parameter!')
def to_volume(volume)
Serializes volume to id string :param volume: object to serialize :return: string id
3.46162
3.608711
0.95924
from sevenbridges.models.marker import Marker if not marker: raise SbgError('Marker is required!') elif isinstance(marker, Marker): return marker.id elif isinstance(marker, six.string_types): return marker else: raise SbgError('Invalid marker parameter!')
def to_marker(marker)
Serializes marker to string :param marker: object to serialize :return: string id
3.44739
3.598922
0.957895
from sevenbridges.models.division import Division if not division: raise SbgError('Division is required!') elif isinstance(division, Division): return division.id elif isinstance(division, six.string_types): return division else: raise SbgError('Invalid division parameter!')
def to_division(division)
Serializes division to id string :param division: object to serialize :return: string id
3.306193
3.419223
0.966943
from sevenbridges.models.team import Team if not team: raise SbgError('Team is required!') elif isinstance(team, Team): return team.id elif isinstance(team, six.string_types): return team else: raise SbgError('Invalid team parameter!')
def to_team(team)
Serializes team to id string :param team: object to serialize :return: string id
3.458598
3.792599
0.911933
from sevenbridges.models.storage_import import Import if not import_: raise SbgError('Import is required!') elif isinstance(import_, Import): return import_.id elif isinstance(import_, six.string_types): return import_ else: raise SbgError('Invalid import parameter!')
def to_import(import_)
Serializes import to id string :param import_: object to serialize :return: string id
3.743591
3.741146
1.000654
from sevenbridges.models.storage_export import Export if not export: raise SbgError('Export is required!') elif isinstance(export, Export): return export.id elif isinstance(export, six.string_types): return export else: raise SbgError('Invalid export parameter!')
def to_export(export)
Serializes export to id string :param export: object to serialize :return: string id
3.725162
3.915097
0.951487
if not location: raise SbgError('Location is required!') if isinstance(location, six.string_types): return location else: raise SbgError('Invalid location parameter!')
def to_location(location)
Serializes location to string :param location: object to serialize :return: string
5.365901
6.09982
0.879682
api = api if api else cls._API return super(Dataset, cls)._query( url=cls._URL['query'], visibility=visibility, fields='_all', api=api )
def query(cls, visibility=None, api=None)
Query ( List ) datasets :param visibility: If provided as 'public', retrieves public datasets :param api: Api instance :return: Collection object
6.315618
7.055596
0.895122
api = api if api else cls._API return super(Dataset, cls)._query( url=cls._URL['owned_by'].format(username=username), fields='_all', api=api )
def get_owned_by(cls, username, api=None)
Query ( List ) datasets by owner :param api: Api instance :param username: Owner username :return: Collection object
6.504479
5.931934
1.096519
modified_data = self._modified_data() if bool(modified_data): dataset_request_data = {} name = modified_data.pop('name', None) description = modified_data.pop('description', None) dataset_request_data.update(modified_data) if name: dataset_request_data['name'] = name if description: dataset_request_data['description'] = description response = self._api.patch( url=self._URL['get'].format(id=self.id), data=dataset_request_data ) data = response.json() dataset = Dataset(api=self._api, **data) return dataset
def save(self, inplace=True)
Save all modification to the dataset on the server. :param inplace: Apply edits on the current instance or get a new one. :return: Dataset instance.
2.762249
2.73362
1.010473
api = api or self._API response = api.get(url=self._URL['members'].format(id=self.id)) data = response.json() total = response.headers['x-total-matching-query'] members = [Member(api=api, **member) for member in data['items']] links = [Link(**link) for link in data['links']] href = data['href'] return Collection( resource=Member, href=href, total=total, items=members, links=links, api=api )
def get_members(self, api=None)
Retrieve dataset members :param api: Api instance :return: Collection object
3.333996
3.279377
1.016655
api = api or self._API data = { 'username': username, 'permissions': permissions } response = api.post( url=self._URL['members'].format(id=self.id), data=data ) data = response.json() return Member(api=api, **data)
def add_member(self, username, permissions, api=None)
Add member to a dataset :param username: Member username :param permissions: Permissions dict :param api: Api instance :return: New member instance
2.91456
3.181801
0.91601
api = api or self._API username = Transform.to_member(member) api.delete( url=self._URL['member'].format( id=self.id, username=username ) )
def remove_member(self, member, api=None)
Remove member from a dataset :param member: Member username :param api: Api instance :return: None
6.696239
6.720527
0.996386
api = api or cls._API query_params = {} if project: project = Transform.to_project(project) query_params['project'] = project if dataset: dataset = Transform.to_dataset(dataset) query_params['dataset'] = dataset if parent: query_params['parent'] = Transform.to_file(parent) if not (project or dataset or parent): raise SbgError('Project, dataset or parent must be provided.') if [project, parent, dataset].count(None) < 2: raise SbgError( 'Only one out of project, parent or dataset must be provided.' ) if names is not None and isinstance(names, list): if len(names) == 0: names.append("") query_params['name'] = names metadata_params = {} if metadata and isinstance(metadata, dict): for k, v in metadata.items(): metadata_params['metadata.' + k] = metadata[k] if tags: query_params['tag'] = tags query_params.update(metadata_params) origin_params = {} if origin and isinstance(origin, dict): for k, v in origin.items(): origin_params['origin.' + k] = origin[k] query_params.update(origin_params) return super(File, cls)._query( api=api, url=cls._URL['query'], offset=offset, limit=limit, fields='_all', **query_params )
def query(cls, project=None, names=None, metadata=None, origin=None, tags=None, offset=None, limit=None, dataset=None, api=None, parent=None)
Query ( List ) files, requires project or dataset :param project: Project id :param names: Name list :param metadata: Metadata query dict :param origin: Origin query dict :param tags: List of tags to filter on :param offset: Pagination offset :param limit: Pagination limit :param dataset: Dataset id :param api: Api instance. :param parent: Folder id or File object with type folder :return: Collection object.
2.381611
2.334789
1.020054
api = api or cls._API extra = {'resource': cls.__name__, 'query': { 'path': path, 'project': project, 'file_name': file_name, 'overwrite': overwrite, 'retry': retry, 'timeout': timeout, 'part_size': part_size, 'wait': wait, }} logger.info('Uploading file', extra=extra) if not project and not parent: raise SbgError('A project or parent identifier is required.') if project and parent: raise SbgError( 'Project and parent identifiers are mutually exclusive.' ) if project: project = Transform.to_project(project) if parent: parent = Transform.to_file(parent) upload = Upload( file_path=path, project=project, parent=parent, file_name=file_name, overwrite=overwrite, retry_count=retry, timeout=timeout, part_size=part_size, api=api ) if wait: upload.start() upload.wait() return upload else: return upload
def upload(cls, path, project=None, parent=None, file_name=None, overwrite=False, retry=5, timeout=10, part_size=PartSize.UPLOAD_MINIMUM_PART_SIZE, wait=True, api=None)
Uploads a file using multipart upload and returns an upload handle if the wait parameter is set to False. If wait is set to True it will block until the upload is completed. :param path: File path on local disc. :param project: Project identifier :param parent: Parent folder identifier :param file_name: Optional file name. :param overwrite: If true will overwrite the file on the server. :param retry: Number of retries if error occurs during upload. :param timeout: Timeout for http requests. :param part_size: Part size in bytes. :param wait: If true will wait for upload to complete. :param api: Api instance.
2.492919
2.513949
0.991635
project = Transform.to_project(project) data = { 'project': project } if name: data['name'] = name extra = {'resource': self.__class__.__name__, 'query': { 'id': self.id, 'data': data }} logger.info('Copying file', extra=extra) new_file = self._api.post(url=self._URL['copy'].format(id=self.id), data=data).json() return File(api=self._api, **new_file)
def copy(self, project, name=None)
Copies the current file. :param project: Destination project. :param name: Destination file name. :return: Copied File object.
3.837322
4.224794
0.908286
info = self._api.get(url=self._URL['download_info'].format(id=self.id)) return DownloadInfo(api=self._api, **info.json())
def download_info(self)
Fetches download information containing file url that can be used to download file. :return: Download info object.
5.445722
5.506608
0.988943
if not overwrite and os.path.exists(path): raise LocalFileAlreadyExists(message=path) extra = {'resource': self.__class__.__name__, 'query': { 'id': self.id, 'path': path, 'overwrite': overwrite, 'retry': retry, 'timeout': timeout, 'chunk_size': chunk_size, 'wait': wait, }} logger.info('Downloading file', extra=extra) info = self.download_info() download = Download( url=info.url, file_path=path, retry_count=retry, timeout=timeout, part_size=chunk_size, api=self._api ) if wait: download.start() download.wait() else: return download
def download(self, path, retry=5, timeout=10, chunk_size=PartSize.DOWNLOAD_MINIMUM_PART_SIZE, wait=True, overwrite=False)
Downloads the file and returns a download handle. Download will not start until .start() method is invoked. :param path: Full path to the new file. :param retry: Number of retries if error occurs during download. :param timeout: Timeout for http requests. :param chunk_size: Chunk size in bytes. :param wait: If true will wait for download to complete. :param overwrite: If True will silently overwrite existing file. :return: Download handle.
3.067655
3.011291
1.018717
modified_data = self._modified_data() if silent or bool(modified_data): # If metadata is to be set if 'metadata' in modified_data: if hasattr(self, '_method'): self._api.put( url=self._URL['metadata'].format(id=self.id), data=modified_data['metadata'] ) else: self._api.patch( url=self._URL['metadata'].format(id=self.id), data=modified_data['metadata'] ) modified_data.pop('metadata') if 'tags' in modified_data: self._api.put( url=self._URL['tags'].format(id=self.id), data=modified_data['tags'] ) modified_data.pop('tags') # Change everything else if bool(modified_data): self._api.patch( url=self._URL['get'].format(id=self.id), data=modified_data ) else: raise ResourceNotModified() return self.reload()
def save(self, inplace=True, silent=False)
Saves all modification to the file on the server. By default this method raises an error if you are trying to save an instance that was not changed. Set check_if_modified param to False to disable this behaviour. :param inplace: Apply edits to the current instance or get a new one. :param silent: If Raises exception if file wasn't modified. :raise ResourceNotModified :return: File instance.
2.651551
2.503744
1.059035
download_info = self.download_info() response = self._api.get( url=download_info.url, stream=True, append_base=False ) for part in response.iter_content(part_size): yield part
def stream(self, part_size=32 * PartSize.KB)
Creates an iterator which can be used to stream the file content. :param part_size: Size of the part in bytes. Default 32KB :return Iterator
4.806348
4.856011
0.989773
try: data = self._api.get(self.href, append_base=False).json() resource = File(api=self._api, **data) except Exception: try: data = self._api.get( self._URL['get'].format(id=self.id)).json() resource = File(api=self._api, **data) except Exception: raise SbgError('Resource can not be refreshed!') self._data = resource._data self._dirty = resource._dirty self._old = copy.deepcopy(self._data.data) # If file.metadata = value was executed # file object will have attribute _method='PUT', which tells us # to force overwrite of metadata on the server. This is metadata # specific. Once we reload the resource we delete the attribute # _method from the instance. try: delattr(self, '_method') except AttributeError: pass
def reload(self)
Refreshes the file with the data from the server.
6.703001
6.459449
1.037705
if path: self.download(wait=True, path=path, overwrite=overwrite) with io.open(path, 'r', encoding=encoding) as fp: return fp.read() with tempfile.NamedTemporaryFile() as tmpfile: self.download(wait=True, path=tmpfile.name, overwrite=overwrite) with io.open(tmpfile.name, 'r', encoding=encoding) as fp: return fp.read()
def content(self, path=None, overwrite=True, encoding='utf-8')
Downloads file to the specified path or as temporary file and reads the file content in memory. Should not be used on very large files. :param path: Path for file download If omitted tmp file will be used. :param overwrite: Overwrite file if exists locally :param encoding: File encoding, by default it is UTF-8 :return: File content.
1.954677
2.067256
0.945542
api = api or cls._API file_ids = [Transform.to_file(file_) for file_ in files] data = {'file_ids': file_ids} logger.info('Getting files in bulk.') response = api.post(url=cls._URL['bulk_get'], data=data) return FileBulkRecord.parse_records(response=response, api=api)
def bulk_get(cls, files, api=None)
Retrieve files with specified ids in bulk :param files: Files to be retrieved. :param api: Api instance. :return: List of FileBulkRecord objects.
4.572885
4.602982
0.993461
if not files: raise SbgError('Files are required.') api = api or cls._API data = { 'items': [ { 'id': file_.id, 'name': file_.name, 'tags': file_.tags, 'metadata': file_.metadata, } for file_ in files ] } logger.info('Updating files in bulk.') response = api.post(url=cls._URL['bulk_update'], data=data) return FileBulkRecord.parse_records(response=response, api=api)
def bulk_update(cls, files, api=None)
This call updates the details for multiple specified files. Use this call to set new information for the files, thus replacing all existing information and erasing omitted parameters. For each of the specified files, the call sets a new name, new tags and metadata. :param files: List of file instances. :param api: Api instance. :return: List of FileBulkRecord objects.
3.394619
2.991825
1.134631
api = api or self._API if not self.is_folder(): raise SbgError('{name} is not a folder'.format(name=self.name)) url = self._URL['list_folder'].format(id=self.id) return super(File, self.__class__)._query( api=api, url=url, offset=offset, limit=limit, fields='_all' )
def list_files(self, offset=None, limit=None, api=None)
List files in a folder :param api: Api instance :param offset: Pagination offset :param limit: Pagination limit :return: List of files
4.851232
5.172929
0.937811
api = api or cls._API data = { 'name': name, 'type': cls.FOLDER_TYPE } if not parent and not project: raise SbgError('Parent or project must be provided') if parent and project: raise SbgError( 'Providing both "parent" and "project" is not allowed' ) if parent: data['parent'] = Transform.to_file(file_=parent) if project: data['project'] = Transform.to_project(project=project) response = api.post(url=cls._URL['create_folder'], data=data).json() return cls(api=api, **response)
def create_folder(cls, name, parent=None, project=None, api=None)
Create a new folder :param name: Folder name :param parent: Parent folder :param project: Project to create folder in :param api: Api instance :return: New folder
3.138318
3.510194
0.894058
api = api or self._API if self.is_folder(): raise SbgError('Copying folders is not supported') data = { 'parent': Transform.to_file(parent) } if name: data['name'] = name response = api.post( url=self._URL['copy_to_folder'].format(file_id=self.id), data=data ).json() return File(api=api, **response)
def copy_to_folder(self, parent, name=None, api=None)
Copy file to folder :param parent: Folder to copy file to :param name: New file name :param api: Api instance :return: New file instance
3.954459
4.17556
0.947049
rawjson = func(**kwargs) if rawjson is None: # If the request failed we already logged in in PiwikRequest; # no need to get into the exception handler below. return {} try: data = json.loads(rawjson) if isinstance(data, dict) and data.get('result') == 'error': current_plugin.logger.error('The Piwik server responded with an error: %s', data['message']) return {} return data except Exception: current_plugin.logger.exception('Unable to load JSON from source %s', rawjson) return {}
def get_json_from_remote_server(func, **kwargs)
Safely manage calls to the remote server by encapsulating JSON creation from Piwik data.
4.907845
4.53556
1.082082
return reduce(lambda x, y: int(x) + int(y), data.values())
def reduce_json(data)
Reduce a JSON object
4.521276
4.819283
0.938164
seconds = int(seconds) minutes = seconds / 60 ti = {'h': 0, 'm': 0, 's': 0} if seconds > 0: ti['s'] = seconds % 60 ti['m'] = minutes % 60 ti['h'] = minutes / 60 return "%dh %dm %ds" % (ti['h'], ti['m'], ti['s'])
def stringify_seconds(seconds=0)
Takes time as a value of seconds and deduces the delta in human-readable HHh MMm SSs format.
2.246439
2.263514
0.992456
if sensor['type'] in CONSUMPTION: return Consumption(sensor_id, sensor) if sensor['type'] in CARBONMONOXIDE: return CarbonMonoxide(sensor_id, sensor) if sensor['type'] in DAYLIGHT: return Daylight(sensor_id, sensor) if sensor['type'] in FIRE: return Fire(sensor_id, sensor) if sensor['type'] in GENERICFLAG: return GenericFlag(sensor_id, sensor) if sensor['type'] in GENERICSTATUS: return GenericStatus(sensor_id, sensor) if sensor['type'] in HUMIDITY: return Humidity(sensor_id, sensor) if sensor['type'] in LIGHTLEVEL: return LightLevel(sensor_id, sensor) if sensor['type'] in OPENCLOSE: return OpenClose(sensor_id, sensor) if sensor['type'] in POWER: return Power(sensor_id, sensor) if sensor['type'] in PRESENCE: return Presence(sensor_id, sensor) if sensor['type'] in PRESSURE: return Pressure(sensor_id, sensor) if sensor['type'] in SWITCH: return Switch(sensor_id, sensor) if sensor['type'] in TEMPERATURE: return Temperature(sensor_id, sensor) if sensor['type'] in THERMOSTAT: return Thermostat(sensor_id, sensor, async_set_state_callback) if sensor['type'] in VIBRATION: return Vibration(sensor_id, sensor) if sensor['type'] in WATER: return Water(sensor_id, sensor)
def create_sensor(sensor_id, sensor, async_set_state_callback)
Simplify creating sensor by not needing to know type.
1.502116
1.48457
1.011819
if sensor['type'] in DECONZ_BINARY_SENSOR + DECONZ_SENSOR + OTHER_SENSOR: return True _LOGGER.info('Unsupported sensor type %s (%s)', sensor['type'], sensor['name']) return False
def supported_sensor(sensor)
Check if sensor is supported by pydeconz.
5.917969
4.6589
1.27025
reason['attr'] = [] for data in ['state', 'config']: changed_attr = self.update_attr(event.get(data, {})) reason[data] = data in event reason['attr'] += changed_attr super().async_update(event, reason)
def async_update(self, event, reason={})
New event for sensor. Check if state or config is part of event. Signal that sensor has updated attributes. Inform what attributes got changed values.
6.275179
5.214762
1.203349
if self._status == 100: return "nadir" elif self._status == 110: return "night_end" elif self._status == 120: return "nautical_dawn" elif self._status == 130: return "dawn" elif self._status == 140: return "sunrise_start" elif self._status == 150: return "sunrise_end" elif self._status == 160: return "golden_hour_1" elif self._status == 170: return "solar_noon" elif self._status == 180: return "golden_hour_2" elif self._status == 190: return "sunset_start" elif self._status == 200: return "sunset_end" elif self._status == 210: return "dusk" elif self._status == 220: return "nautical_dusk" elif self._status == 230: return "night_start" else: return "unknown"
def status(self)
Return the daylight status string.
1.521893
1.422475
1.069892