INSTRUCTION
stringlengths
1
8.43k
RESPONSE
stringlengths
75
104k
Do not warn on external images.
def _warn_node(self, msg, *args, **kwargs): """Do not warn on external images.""" if not msg.startswith('nonlocal image URI found:'): _warn_node_old(self, msg, *args, **kwargs)
Connect receivers to signals.
def connect_receivers(): """Connect receivers to signals.""" request_created.connect(send_email_validation) request_confirmed.connect(send_confirmed_notifications) request_rejected.connect(send_reject_notification) # Order is important: request_accepted.connect(create_secret_link) request_accepted.connect(send_accept_notification)
Receiver for request - accepted signal.
def create_secret_link(request, message=None, expires_at=None): """Receiver for request-accepted signal.""" pid, record = get_record(request.recid) if not record: raise RecordNotFound(request.recid) description = render_template( "zenodo_accessrequests/link_description.tpl", request=request, record=record, pid=pid, expires_at=expires_at, message=message, ) request.create_secret_link( record["title"], description=description, expires_at=expires_at )
Receiver for request - accepted signal to send email notification.
def send_accept_notification(request, message=None, expires_at=None): """Receiver for request-accepted signal to send email notification.""" pid, record = get_record(request.recid) _send_notification( request.sender_email, _("Access request accepted"), "zenodo_accessrequests/emails/accepted.tpl", request=request, record=record, pid=pid, record_link=request.link.get_absolute_url('invenio_records_ui.recid'), message=message, expires_at=expires_at, )
Receiver for request - confirmed signal to send email notification.
def send_confirmed_notifications(request): """Receiver for request-confirmed signal to send email notification.""" pid, record = get_record(request.recid) if record is None: current_app.logger.error("Cannot retrieve record %s. Emails not sent" % request.recid) return title = _("Access request: %(record)s", record=record["title"]) _send_notification( request.receiver.email, title, "zenodo_accessrequests/emails/new_request.tpl", request=request, record=record, pid=pid, ) _send_notification( request.sender_email, title, "zenodo_accessrequests/emails/confirmation.tpl", request=request, record=record, pid=pid, )
Receiver for request - created signal to send email notification.
def send_email_validation(request): """Receiver for request-created signal to send email notification.""" token = EmailConfirmationSerializer().create_token( request.id, dict(email=request.sender_email) ) pid, record = get_record(request.recid) _send_notification( request.sender_email, _("Access request verification"), "zenodo_accessrequests/emails/validate_email.tpl", request=request, record=record, pid=pid, days=timedelta( seconds=current_app.config["ACCESSREQUESTS_CONFIRMLINK_EXPIRES_IN"] ).days, confirm_link=url_for( "invenio_records_ui.recid_access_request_email_confirm", pid_value=request.recid, token=token, _external=True, ) )
Receiver for request - rejected signal to send email notification.
def send_reject_notification(request, message=None): """Receiver for request-rejected signal to send email notification.""" pid, record = get_record(request.recid) _send_notification( request.sender_email, _("Access request rejected"), "zenodo_accessrequests/emails/rejected.tpl", request=request, record=record, pid=pid, message=message, )
Render a template and send as email.
def _send_notification(to, subject, template, **ctx): """Render a template and send as email.""" msg = Message( subject, sender=current_app.config.get('SUPPORT_EMAIL'), recipients=[to] ) msg.body = render_template(template, **ctx) send_email.delay(msg.__dict__)
Create a new secret link.
def create(cls, title, owner, extra_data, description="", expires_at=None): """Create a new secret link.""" if isinstance(expires_at, date): expires_at = datetime.combine(expires_at, datetime.min.time()) with db.session.begin_nested(): obj = cls( owner=owner, title=title, description=description, expires_at=expires_at, token='', ) db.session.add(obj) with db.session.begin_nested(): # Create token (dependent on obj.id and recid) obj.token = SecretLinkFactory.create_token( obj.id, extra_data, expires_at=expires_at ).decode('utf8') link_created.send(obj) return obj
Validate a secret link token.
def validate_token(cls, token, expected_data): """Validate a secret link token. Only queries the database if token is valid to determine that the token has not been revoked. """ data = SecretLinkFactory.validate_token( token, expected_data=expected_data ) if data: link = cls.query.get(data['id']) if link and link.is_valid(): return True return False
Load token data stored in token ( ignores expiry date of tokens ).
def extra_data(self): """Load token data stored in token (ignores expiry date of tokens).""" if self.token: return SecretLinkFactory.load_token(self.token, force=True)["data"] return None
Get absolute for secret link ( using https scheme ).
def get_absolute_url(self, endpoint): """Get absolute for secret link (using https scheme). The endpoint is passed to ``url_for`` with ``token`` and ``extra_data`` as keyword arguments. E.g.:: >>> link.extra_data dict(recid=1) >>> link.get_absolute_url('record.metadata') translates into:: >>> url_for('record.metadata', token="...", recid=1, ) """ copy = deepcopy(self.extra_data) if 'recid' in copy: copy['pid_value'] = copy.pop('recid') return url_for( endpoint, token=self.token, _external=True, **(copy or {}) )
Revoken a secret link.
def revoke(self): """Revoken a secret link.""" if self.revoked_at is None: with db.session.begin_nested(): self.revoked_at = datetime.utcnow() link_revoked.send(self) return True return False
Create a new access request.
def create(cls, recid=None, receiver=None, sender_full_name=None, sender_email=None, justification=None, sender=None): """Create a new access request. :param recid: Record id (required). :param receiver: User object of receiver (required). :param sender_full_name: Full name of sender (required). :param sender_email: Email address of sender (required). :param justification: Justification message (required). :param sender: User object of sender (optional). """ sender_user_id = None if sender is None else sender.id assert recid assert receiver assert sender_full_name assert sender_email assert justification # Determine status status = RequestStatus.EMAIL_VALIDATION if sender and sender.confirmed_at: status = RequestStatus.PENDING with db.session.begin_nested(): # Create object obj = cls( status=status, recid=recid, receiver_user_id=receiver.id, sender_user_id=sender_user_id, sender_full_name=sender_full_name, sender_email=sender_email, justification=justification ) db.session.add(obj) # Send signal if obj.status == RequestStatus.EMAIL_VALIDATION: request_created.send(obj) else: request_confirmed.send(obj) return obj
Get access request for a specific receiver.
def get_by_receiver(cls, request_id, user): """Get access request for a specific receiver.""" return cls.query.filter_by( id=request_id, receiver_user_id=user.id ).first()
Confirm that senders email is valid.
def confirm_email(self): """Confirm that senders email is valid.""" with db.session.begin_nested(): if self.status != RequestStatus.EMAIL_VALIDATION: raise InvalidRequestStateError(RequestStatus.EMAIL_VALIDATION) self.status = RequestStatus.PENDING request_confirmed.send(self)
Accept request.
def accept(self, message=None, expires_at=None): """Accept request.""" with db.session.begin_nested(): if self.status != RequestStatus.PENDING: raise InvalidRequestStateError(RequestStatus.PENDING) self.status = RequestStatus.ACCEPTED request_accepted.send(self, message=message, expires_at=expires_at)
Reject request.
def reject(self, message=None): """Reject request.""" with db.session.begin_nested(): if self.status != RequestStatus.PENDING: raise InvalidRequestStateError(RequestStatus.PENDING) self.status = RequestStatus.REJECTED request_rejected.send(self, message=message)
Create a secret link from request.
def create_secret_link(self, title, description=None, expires_at=None): """Create a secret link from request.""" self.link = SecretLink.create( title, self.receiver, extra_data=dict(recid=self.recid), description=description, expires_at=expires_at, ) return self.link
Given required properties from a NistBeaconValue compute the SHA512Hash object.
def get_hash( cls, version: str, frequency: int, timestamp: int, seed_value: str, prev_output: str, status_code: str, ) -> SHA512Hash: """ Given required properties from a NistBeaconValue, compute the SHA512Hash object. :param version: NistBeaconValue.version :param frequency: NistBeaconValue.frequency :param timestamp: NistBeaconValue.timestamp :param seed_value: NistBeaconValue.seed_value :param prev_output: NistBeaconValue.previous_output_value :param status_code: NistBeaconValue.status_code :return: SHA512 Hash for NistBeaconValue signature verification """ return SHA512.new( version.encode() + struct.pack( '>1I1Q64s64s1I', frequency, timestamp, binascii.a2b_hex(seed_value), binascii.a2b_hex(prev_output), int(status_code), ) )
Verify a given NIST message hash and signature for a beacon value.
def verify( cls, timestamp: int, message_hash: SHA512Hash, signature: bytes, ) -> bool: """ Verify a given NIST message hash and signature for a beacon value. :param timestamp: The timestamp of the record being verified. :param message_hash: The hash that was carried out over the message. This is an object belonging to the `Crypto.Hash` module. :param signature: The signature that needs to be validated. :return: True if verification is correct. False otherwise. """ # Determine verifier type to use based on timestamp. if timestamp < 1496176860: verifier = cls._VERIFIER_20130905 elif timestamp < 1502202360: verifier = None else: verifier = cls._VERIFIER_20170808 # If a verifier exists to handle this problem, use it directly. # Else, we cannot verify the record and must mark it invalid. if verifier: result = verifier.verify( message_hash, signature, ) else: result = False # Convert 1 to 'True', 'False' otherwise if isinstance(result, int): result = True if result == 1 else False return result
Template filter to check if a record is embargoed.
def is_embargoed(record): """Template filter to check if a record is embargoed.""" return record.get('access_right') == 'embargoed' and \ record.get('embargo_date') and \ record.get('embargo_date') > datetime.utcnow().date()
Create an access request.
def access_request(pid, record, template, **kwargs): """Create an access request.""" recid = int(pid.pid_value) datastore = LocalProxy( lambda: current_app.extensions['security'].datastore) # Record must be in restricted access mode. if record.get('access_right') != 'restricted' or \ not record.get('access_conditions'): abort(404) # Record must have an owner and owner must still exists. owners = record.get('owners', []) record_owners = [datastore.find_user(id=owner_id) for owner_id in owners] if not record_owners: abort(404) sender = None initialdata = dict() # Prepare initial form data if current_user.is_authenticated: sender = current_user initialdata['email'] = current_user.email if current_user.profile: initialdata['full_name'] = current_user.profile.full_name # Normal form validation form = AccessRequestForm(formdata=request.form, **initialdata) if form.validate_on_submit(): accreq = AccessRequest.create( recid=recid, receiver=record_owners[0], sender_full_name=form.data['full_name'], sender_email=form.data['email'], justification=form.data['justification'], sender=sender ) db.session.commit() if accreq.status == RequestStatus.EMAIL_VALIDATION: flash(_( "Email confirmation needed: We have sent you an email to " "verify your address. Please check the email and follow the " "instructions to complete the access request."), category='info') else: flash(_("Access request submitted."), category='info') return redirect(url_for('invenio_records_ui.recid', pid_value=recid)) return render_template( template, pid=pid, record=record, form=form, owners=record_owners, )
Confirm email address.
def confirm(pid, record, template, **kwargs): """Confirm email address.""" recid = int(pid.pid_value) token = request.view_args['token'] # Validate token data = EmailConfirmationSerializer.compat_validate_token(token) if data is None: flash(_("Invalid confirmation link."), category='danger') return redirect(url_for("invenio_records_ui.recid", pid_value=recid)) # Validate request exists. r = AccessRequest.query.get(data['id']) if not r: abort(404) # Confirm email address. if r.status != RequestStatus.EMAIL_VALIDATION: abort(404) r.confirm_email() db.session.commit() flash(_("Email validated and access request submitted."), category='info') return redirect(url_for("invenio_records_ui.recid", pid_value=recid))
Creates a generic endpoint connection that doesn t finish
def _get_endpoint(self): """ Creates a generic endpoint connection that doesn't finish """ return SSHCommandClientEndpoint.newConnection( reactor, b'/bin/cat', self.username, self.hostname, port=self.port, keys=self.keys, password=self.password, knownHosts = self.knownHosts)
Get reverse direction of ordering.
def reverse(self, col): """Get reverse direction of ordering.""" if col in self.options: if self.is_selected(col): return col if not self.asc else '-{0}'.format(col) else: return col return None
Get direction ( ascending/ descending ) of ordering.
def dir(self, col, asc='asc', desc='desc'): """Get direction (ascending/descending) of ordering.""" if col == self._selected and self.asc is not None: return asc if self.asc else desc else: return None
Get column which is being order by.
def selected(self): """Get column which is being order by.""" if self._selected: return self._selected if self.asc else \ "-{0}".format(self._selected) return None
Get query with correct ordering.
def items(self): """Get query with correct ordering.""" if self.asc is not None: if self._selected and self.asc: return self.query.order_by(self._selected) elif self._selected and not self.asc: return self.query.order_by(desc(self._selected)) return self.query
Open the file referenced in this object and scrape the version.
def get_version(self) -> str: """ Open the file referenced in this object, and scrape the version. :return: The version as a string, an empty string if there is no match to the magic_line, or any file exception messages encountered. """ try: f = open(self.file_path, 'r') lines = f.readlines() f.close() except Exception as e: return str(e) result = '' for line in lines: if self.magic_line in line: start = len(self.magic_line) end = len(line) - self.strip_end_chars result = line[start:end] break return result
Set the version for this given file.
def set_version(self, new_version: str): """ Set the version for this given file. :param new_version: The new version string to set. """ try: f = open(self.file_path, 'r') lines = f.readlines() f.close() except Exception as e: print(str(e)) return for idx, line in enumerate(lines): if self.magic_line in line: start = len(self.magic_line) end = len(line) - self.strip_end_chars start_str = line[0:start] end_str = line[end:] lines[idx] = start_str + new_version + end_str try: f = open(self.file_path, 'w') f.writelines(lines) f.close() except Exception as e: print(str(e)) return
Load test data fixture.
def records(): """Load test data fixture.""" import uuid from invenio_records.api import Record from invenio_pidstore.models import PersistentIdentifier, PIDStatus create_test_user() indexer = RecordIndexer() # Record 1 - Live record with db.session.begin_nested(): rec_uuid = uuid.uuid4() pid1 = PersistentIdentifier.create( 'recid', '1', object_type='rec', object_uuid=rec_uuid, status=PIDStatus.REGISTERED) Record.create({ 'title': 'Registered', 'description': 'This is an awesome description', 'control_number': '1', 'access_right': 'restricted', 'access_conditions': 'fuu', 'owners': [1, 2], 'recid': 1 }, id_=rec_uuid) indexer.index_by_id(pid1.object_uuid) db.session.commit() sleep(3)
Configure SSH client options
def _init_ssh(self): """ Configure SSH client options """ self.ssh_host = self.config.get('ssh_host', self.hostname) self.known_hosts = self.config.get('ssh_knownhosts_file', self.tensor.config.get('ssh_knownhosts_file', None)) self.ssh_keyfile = self.config.get('ssh_keyfile', self.tensor.config.get('ssh_keyfile', None)) self.ssh_key = self.config.get('ssh_key', self.tensor.config.get('ssh_key', None)) # Not sure why you'd bother but maybe you've got a weird policy self.ssh_keypass = self.config.get('ssh_keypass', self.tensor.config.get('ssh_keypass', None)) self.ssh_user = self.config.get('ssh_username', self.tensor.config.get('ssh_username', None)) self.ssh_password = self.config.get('ssh_password', self.tensor.config.get('ssh_password', None)) self.ssh_port = self.config.get('ssh_port', self.tensor.config.get('ssh_port', 22)) # Verify config to see if we're good to go if not (self.ssh_key or self.ssh_keyfile or self.ssh_password): raise Exception("To use SSH you must specify *one* of ssh_key," " ssh_keyfile or ssh_password for this source" " check or globally") if not self.ssh_user: raise Exception("ssh_username must be set") self.ssh_keydb = [] cHash = hashlib.sha1(':'.join(( self.ssh_host, self.ssh_user, str(self.ssh_port), str(self.ssh_password), str(self.ssh_key), str(self.ssh_keyfile) )).encode()).hexdigest() if cHash in self.tensor.hostConnectorCache: self.ssh_client = self.tensor.hostConnectorCache.get(cHash) self.ssh_connector = False else: self.ssh_connector = True self.ssh_client = ssh.SSHClient(self.ssh_host, self.ssh_user, self.ssh_port, password=self.ssh_password, knownhosts=self.known_hosts) if self.ssh_keyfile: self.ssh_client.addKeyFile(self.ssh_keyfile, self.ssh_keypass) if self.ssh_key: self.ssh_client.addKeyString(self.ssh_key, self.ssh_keypass) self.tensor.hostConnectorCache[cHash] = self.ssh_client
Starts the timer for this source
def startTimer(self): """Starts the timer for this source""" self.td = self.t.start(self.inter) if self.use_ssh and self.ssh_connector: self.ssh_client.connect()
Called for every timer tick. Calls self. get which can be a deferred and passes that result back to the queueBack method Returns a deferred
def tick(self): """Called for every timer tick. Calls self.get which can be a deferred and passes that result back to the queueBack method Returns a deferred""" if self.sync: if self.running: defer.returnValue(None) self.running = True try: event = yield self._get() if event: self.queueBack(event) except Exception as e: log.msg("[%s] Unhandled error: %s" % (self.service, e)) self.running = False
Creates an Event object from the Source configuration
def createEvent(self, state, description, metric, prefix=None, hostname=None, aggregation=None, evtime=None): """Creates an Event object from the Source configuration""" if prefix: service_name = self.service + "." + prefix else: service_name = self.service return Event(state, service_name, description, metric, self.ttl, hostname=hostname or self.hostname, aggregation=aggregation, evtime=evtime, tags=self.tags, attributes=self.attributes )
Creates an Event object from the Source configuration
def createLog(self, type, data, evtime=None, hostname=None): """Creates an Event object from the Source configuration""" return Event(None, type, data, 0, self.ttl, hostname=hostname or self.hostname, evtime=evtime, tags=self.tags, type='log' )
List pending access requests and shared links.
def index(): """List pending access requests and shared links.""" query = request.args.get('query', '') order = request.args.get('sort', '-created') try: page = int(request.args.get('page', 1)) per_page = int(request.args.get('per_page', 20)) except (TypeError, ValueError): abort(404) # Delete form form = DeleteForm(request.form) if form.validate_on_submit(): link = SecretLink.query_by_owner(current_user).filter_by( id=form.link.data).first() if link.revoke(): flash(_("Shared link revoked."), category='success') db.session.commit() # Links links = SecretLink.query_by_owner(current_user).filter( SecretLink.revoked_at.is_(None) ) # Querying if query: lquery = "%{0}%".format(query) links = links.filter( SecretLink.title.like(lquery) | SecretLink.description.like(lquery) ) # Ordering ordering = QueryOrdering(links, ['title', 'created', 'expires_at'], order) links = ordering.items() # Pending access requests requests = AccessRequest.query_by_receiver(current_user).filter_by( status=RequestStatus.PENDING).order_by('created') return render_template( "zenodo_accessrequests/settings/index.html", links_pagination=links.paginate(page, per_page=per_page), requests=requests, query=query, order=ordering, get_record=get_record, form=DeleteForm(), )
Accept/ reject access request.
def accessrequest(request_id): """Accept/reject access request.""" r = AccessRequest.get_by_receiver(request_id, current_user) if not r or r.status != RequestStatus.PENDING: abort(404) form = ApprovalForm(request.form) if form.validate_on_submit(): if form.accept.data: r.accept(message=form.data['message'], expires_at=form.expires_at.data) db.session.commit() flash(_("Request accepted.")) return redirect(url_for(".index")) elif form.reject.data: r.reject(message=form.data['message']) db.session.commit() flash(_("Request rejected.")) return redirect(url_for(".index")) pid, record = get_record(r.recid) return render_template( "zenodo_accessrequests/settings/request.html", accessrequest=r, record=record, form=form, )
Create a TCP connection to Riemann with automatic reconnection
def createClient(self): """Create a TCP connection to Riemann with automatic reconnection """ server = self.config.get('server', 'localhost') port = self.config.get('port', 5555) failover = self.config.get('failover', False) self.factory = riemann.RiemannClientFactory(server, failover=failover) if failover: initial = random.choice(server) else: initial = server log.msg('Connecting to Riemann on %s:%s' % (initial, port)) if self.tls: if SSL: self.connector = reactor.connectSSL(initial, port, self.factory, ClientTLSContext(self.key, self.cert)) else: log.msg('[FATAL] SSL support not available!' \ ' Please install PyOpenSSL. Exiting now') reactor.stop() else: self.connector = reactor.connectTCP(initial, port, self.factory) d = defer.Deferred() def cb(): # Wait until we have a useful proto object if hasattr(self.factory, 'proto') and self.factory.proto: self.t.start(self.inter) d.callback(None) else: reactor.callLater(0.01, cb) cb() return d
Stop this client.
def stop(self): """Stop this client. """ self.t.stop() self.factory.stopTrying() self.connector.disconnect()
Clock tick called every self. inter
def tick(self): """Clock tick called every self.inter """ if self.factory.proto: # Check backpressure if (self.pressure < 0) or (self.factory.proto.pressure <= self.pressure): self.emptyQueue() elif self.expire: # Check queue age and expire stale events for i, e in enumerate(self.events): if (time.time() - e.time) > e.ttl: self.events.pop(i)
Remove all or self. queueDepth events from the queue
def emptyQueue(self): """Remove all or self.queueDepth events from the queue """ if self.events: if self.queueDepth and (len(self.events) > self.queueDepth): # Remove maximum of self.queueDepth items from queue events = self.events[:self.queueDepth] self.events = self.events[self.queueDepth:] else: events = self.events self.events = [] if self.allow_nan: self.factory.proto.sendEvents(events) else: self.factory.proto.sendEvents([e for e in events if e.metric is not None])
Receives a list of events and transmits them to Riemann
def eventsReceived(self, events): """Receives a list of events and transmits them to Riemann Arguments: events -- list of `tensor.objects.Event` """ # Make sure queue isn't oversized if (self.maxsize < 1) or (len(self.events) < self.maxsize): self.events.extend(events)
Create a UDP connection to Riemann
def createClient(self): """Create a UDP connection to Riemann""" server = self.config.get('server', '127.0.0.1') port = self.config.get('port', 5555) def connect(ip): self.protocol = riemann.RiemannUDP(ip, port) self.endpoint = reactor.listenUDP(0, self.protocol) d = reactor.resolve(server) d.addCallback(connect) return d
Sets up HTTP connector and starts queue timer
def createClient(self): """Sets up HTTP connector and starts queue timer """ server = self.config.get('server', 'localhost') port = int(self.config.get('port', 9200)) self.client = elasticsearch.ElasticSearch(self.url, self.user, self.password, self.index) self.t.start(self.inter)
Clock tick called every self. inter
def tick(self): """Clock tick called every self.inter """ if self.events: if self.queueDepth and (len(self.events) > self.queueDepth): # Remove maximum of self.queueDepth items from queue events = self.events[:self.queueDepth] self.events = self.events[self.queueDepth:] else: events = self.events self.events = [] try: result = yield self.sendEvents(events) if result.get('errors', False): log.msg(repr(result)) self.events.extend(events) except Exception as e: log.msg('Could not connect to elasticsearch ' + str(e)) self.events.extend(events)
Adapts an Event object to a Riemann protobuf event Event
def encodeEvent(self, event): """Adapts an Event object to a Riemann protobuf event Event""" pbevent = proto_pb2.Event( time=int(event.time), state=event.state, service=event.service, host=event.hostname, description=event.description, tags=event.tags, ttl=event.ttl, ) if event.metric is not None: # I have no idea what I'm doing if isinstance(event.metric, int): pbevent.metric_sint64 = event.metric pbevent.metric_f = float(event.metric) else: pbevent.metric_d = float(event.metric) pbevent.metric_f = float(event.metric) if event.attributes is not None: for key, value in event.attributes.items(): attribute = pbevent.attributes.add() attribute.key, attribute.value = key, value return pbevent
Encode a list of Tensor events with protobuf
def encodeMessage(self, events): """Encode a list of Tensor events with protobuf""" message = proto_pb2.Msg( events=[self.encodeEvent(e) for e in events if e._type=='riemann'] ) return message.SerializeToString()
Decode a protobuf message into a list of Tensor events
def decodeMessage(self, data): """Decode a protobuf message into a list of Tensor events""" message = proto_pb2.Msg() message.ParseFromString(data) return message
Send a Tensor Event to Riemann
def sendEvents(self, events): """Send a Tensor Event to Riemann""" self.pressure += 1 self.sendString(self.encodeMessage(events))
Generate preview for URL.
def generate(ctx, url, *args, **kwargs): """ Generate preview for URL. """ file_previews = ctx.obj['file_previews'] options = {} metadata = kwargs['metadata'] width = kwargs['width'] height = kwargs['height'] output_format = kwargs['format'] if metadata: options['metadata'] = metadata.split(',') if width: options.setdefault('size', {}) options['size']['width'] = width if height: options.setdefault('size', {}) options['size']['height'] = height if output_format: options['format'] = output_format results = file_previews.generate(url, **options) click.echo(results)
Retreive preview results for ID.
def retrieve(ctx, preview_id, *args, **kwargs): """ Retreive preview results for ID. """ file_previews = ctx.obj['file_previews'] results = file_previews.retrieve(preview_id) click.echo(results)
If the client application has a refresh token it can use it to send a request for a new access token. To ask for a new access token the client application should send a POST request to https:// login. instance_name/ services/ oauth2/ token with the following query parameters: grant_type: Value must be refresh_token for this flow. refresh_token: The refresh token the client application already received. client_id: Consumer key from the remote access application definition. client_secret: Consumer secret from the remote access application definition. format: Expected return format. This parameter is optional. The default is json. Values are: * urlencoded * json * xml e. g. $ curl - i -- form grant_type = refresh_token \ -- form refresh_token = <refresh_token > \ -- form client_id = <client_id > \ -- form client_secret = <client_secret > \ -- form format = json \ https:// na1. salesforce. com/ services/ oauth2/ token
def _refresh_access_token(self): """ If the client application has a refresh token, it can use it to send a request for a new access token. To ask for a new access token, the client application should send a POST request to https://login.instance_name/services/oauth2/token with the following query parameters: grant_type: Value must be refresh_token for this flow. refresh_token: The refresh token the client application already received. client_id: Consumer key from the remote access application definition. client_secret: Consumer secret from the remote access application definition. format: Expected return format. This parameter is optional. The default is json. Values are: * urlencoded * json * xml e.g. $ curl -i --form grant_type=refresh_token \ --form refresh_token=<refresh_token> \ --form client_id=<client_id> \ --form client_secret=<client_secret> \ --form format=json \ https://na1.salesforce.com/services/oauth2/token """ resource = "https://na1.salesforce.com/services/oauth2/token" fields = dict(grant_type="refresh_token", refresh_token=self.refresh_token, client_id=self.auth.client_id, client_secret=self.auth.client_secret, format="json") status, data = self._handle_response("POST", resource, fields=fields, refresh_access_token=False) if "access_token" in data: # Update access token self.access_token = data["access_token"] # Notify others via callback if callable(self.access_token_refreshed_callback): self.access_token_refreshed_callback(self.access_token) # Return True, indicating access_token refresehed return True # Return False, indicating access_token not refreshed return False
Send message dicts through r_q and throw explicit errors for pickle problems
def r_q_send(self, msg_dict): """Send message dicts through r_q, and throw explicit errors for pickle problems""" # Check whether msg_dict can be pickled... no_pickle_keys = self.invalid_dict_pickle_keys(msg_dict) if no_pickle_keys == []: self.r_q.put(msg_dict) else: ## Explicit pickle error handling hash_func = md5() hash_func.update(str(msg_dict)) dict_hash = str(hash_func.hexdigest())[-7:] # Last 7 digits of hash linesep = os.linesep sys.stderr.write( "{0} {1}r_q_send({2}) Can't pickle this dict:{3} '''{7}{4} {5}{7}{6}''' {7}".format( datetime.now(), Style.BRIGHT, dict_hash, Style.RESET_ALL, Fore.MAGENTA, msg_dict, Style.RESET_ALL, linesep, ) ) ## Verbose list of the offending key(s) / object attrs ## Send all output to stderr... err_frag1 = ( Style.BRIGHT + " r_q_send({0}) Offending dict keys:".format(dict_hash) + Style.RESET_ALL ) err_frag2 = Fore.YELLOW + " {0}".format(no_pickle_keys) + Style.RESET_ALL err_frag3 = "{0}".format(linesep) sys.stderr.write(err_frag1 + err_frag2 + err_frag3) for key in sorted(no_pickle_keys): sys.stderr.write( " msg_dict['{0}']: {1}'{2}'{3}{4}".format( key, Fore.MAGENTA, repr(msg_dict.get(key)), Style.RESET_ALL, linesep, ) ) if isinstance(msg_dict.get(key), object): thisobj = msg_dict.get(key) no_pickle_attrs = self.invalid_obj_pickle_attrs(thisobj) err_frag1 = ( Style.BRIGHT + " r_q_send({0}) Offending attrs:".format(dict_hash) + Style.RESET_ALL ) err_frag2 = ( Fore.YELLOW + " {0}".format(no_pickle_attrs) + Style.RESET_ALL ) err_frag3 = "{0}".format(linesep) sys.stderr.write(err_frag1 + err_frag2 + err_frag3) for attr in no_pickle_attrs: sys.stderr.write( " msg_dict['{0}'].{1}: {2}'{3}'{4}{5}".format( key, attr, Fore.RED, repr(getattr(thisobj, attr)), Style.RESET_ALL, linesep, ) ) sys.stderr.write( " {0}r_q_send({1}) keys (no problems):{2}{3}".format( Style.BRIGHT, dict_hash, Style.RESET_ALL, linesep ) ) for key in sorted(set(msg_dict.keys()).difference(no_pickle_keys)): sys.stderr.write( " msg_dict['{0}']: {1}{2}{3}{4}".format( key, Fore.GREEN, repr(msg_dict.get(key)), Style.RESET_ALL, linesep, ) )
Return a list of keys that can t be pickled. Return [] if there are no pickling problems with the values associated with the keys. Return the list of keys if there are problems.
def invalid_dict_pickle_keys(self, msg_dict): """Return a list of keys that can't be pickled. Return [] if there are no pickling problems with the values associated with the keys. Return the list of keys, if there are problems.""" no_pickle_keys = list() for key, val in msg_dict.items(): try: pickle.dumps(key) pickle.dumps(val) except TypeError: no_pickle_keys.append(key) # This key has an unpicklable value except pickle.PicklingError: no_pickle_keys.append(key) # This key has an unpicklable value except pickle.UnpickleableError: no_pickle_keys.append(key) # This key has an unpicklable value return no_pickle_keys
Loop through messages and execute tasks
def message_loop(self, t_q, r_q): """Loop through messages and execute tasks""" t_msg = {} while t_msg.get("state", "") != "__DIE__": try: t_msg = t_q.get(True, self.cycle_sleep) # Poll blocking self.task = t_msg.get("task", "") # __DIE__ has no task if self.task != "": self.task.task_start = time.time() # Start the timer # Send ACK to the controller who requested work on this task self.r_q_send( {"w_id": self.w_id, "task": self.task, "state": "__ACK__"} ) # Update the sleep time with latest recommendations self.cycle_sleep = self.task.worker_loop_delay # Assign the result of task.run() to task.result self.task.result = self.task.run() self.task.task_stop = time.time() # Seconds since epoch self.r_q_send( {"w_id": self.w_id, "task": self.task, "state": "__FINISHED__"} ) # Ack work finished self.task = None except Empty: pass except Full: time.sleep(0.1) ## Disable extraneous error handling... except: if self.task is not None: self.task.task_stop = time.time() # Seconds since epoch # Handle all other errors here... tb_str = "".join(tb.format_exception(*(sys.exc_info()))) self.r_q_send( { "w_id": self.w_id, "task": self.task, "error": tb_str, "state": "__ERROR__", } ) return
Return True if it s time to log
def log_time(self): """Return True if it's time to log""" if self.hot_loop and self.time_delta >= self.log_interval: return True return False
Build a log message and reset the stats
def log_message(self): """Build a log message and reset the stats""" time_delta = deepcopy(self.time_delta) total_work_time = self.worker_count * time_delta time_worked = sum(self.exec_times) pct_busy = time_worked / total_work_time * 100.0 min_task_time = min(self.exec_times) avg_task_time = sum(self.exec_times) / len(self.exec_times) max_task_time = max(self.exec_times) min_queue_time = min(self.queue_times) avg_queue_time = sum(self.queue_times) / len(self.queue_times) max_queue_time = max(self.queue_times) time_delta = self.time_delta total_tasks = len(self.exec_times) avg_task_rate = total_tasks / time_delta self.reset() task_msg = """Ran {0} tasks, {1} tasks/s; {2} workers {3}% busy""".format( total_tasks, round(avg_task_rate, 1), self.worker_count, round(pct_busy, 1) ) task_mam = """ Task run times: {0}/{1}/{2} (min/avg/max)""".format( round(min_task_time, 3), round(avg_task_time, 3), round(max_task_time, 3) ) queue_mam = """ Time in queue: {0}/{1}/{2} (min/avg/max)""".format( round(min_queue_time, 6), round(avg_queue_time, 6), round(max_queue_time, 6) ) return """{0}\n{1}\n{2}""".format(task_msg, task_mam, queue_mam)
If not in a hot_loop call supervise () to start the tasks
def supervise(self): """If not in a hot_loop, call supervise() to start the tasks""" self.retval = set([]) stats = TaskMgrStats( worker_count=self.worker_count, log_interval=self.log_interval, hot_loop=self.hot_loop, ) hot_loop = self.hot_loop if self.log_level >= 2: logmsg = "TaskMgr.supervise() started {0} workers".format(self.worker_count) self.log.info(logmsg) self.workers = self.spawn_workers() ## Add work self.num_tasks = 0 if not hot_loop: if self.log_level >= 2: logmsg = "TaskMgr.supervise() received {0} tasks".format( len(self.work_todo) ) self.log.info(logmsg) for task in self.work_todo: self.num_tasks += 1 if self.log_level >= 2: logmsg = "TaskMgr.supervise() queued task: {0}".format(task) self.log.info(logmsg) self.queue_task(task) finished = False while not finished: try: if hot_loop: # Calculate the adaptive loop delay delay = self.calc_wait_time(stats.exec_times) self.queue_tasks_from_controller(delay=delay) # queue tasks time.sleep(delay) r_msg = self.r_q.get_nowait() # __ACK__ or __FINISHED__ task = r_msg.get("task") w_id = r_msg.get("w_id") state = r_msg.get("state", "") if state == "__ACK__": self.worker_assignments[w_id] = task self.work_todo.remove(task) if self.log_level >= 3: self.log.debug("r_msg: {0}".format(r_msg)) if self.log_level >= 3: self.log.debug("w_id={0} received task={1}".format(w_id, task)) elif state == "__FINISHED__": now = time.time() task_exec_time = task.task_stop - task.task_start task_queue_time = now - task.queue_time - task_exec_time stats.exec_times.append(task_exec_time) stats.queue_times.append(task_queue_time) if self.log_level >= 1: self.log.debug( "TaskMgr.work_todo: {0} tasks left".format( len(self.work_todo) ) ) if self.log_level >= 3: self.log.debug("TaskMgr.work_todo: {0}".format(self.work_todo)) self.log.debug("r_msg: {0}".format(r_msg)) if not hot_loop: self.retval.add(task) # Add result to retval self.worker_assignments.pop(w_id) # Delete the key finished = self.is_finished() else: self.controller.to_q.put(task) # Send to the controller self.worker_assignments.pop(w_id) # Delete the key elif state == "__ERROR__": now = time.time() task_exec_time = task.task_stop - task.task_start task_queue_time = now - task.queue_time - task_exec_time stats.exec_times.append(task_exec_time) stats.queue_times.append(task_queue_time) if self.log_level >= 1: self.log.error("r_msg: {0}".format(r_msg)) self.log.error("".join(r_msg.get("error"))) self.log.debug( "TaskMgr.work_todo: {0} tasks left".format( len(self.work_todo) ) ) if self.log_level >= 3: self.log.debug("TaskMgr.work_todo: {0}".format(self.work_todo)) if not hot_loop: if not self.resubmit_on_error: # If task is in work_todo, delete it for tt in self.work_todo: if tt == task: self.work_todo.remove(task) # Remove task... try: # Delete the worker assignment... self.worker_assignments.pop(w_id) except: pass self.retval.add(task) # Add result to retval self.respawn_dead_workers() except Empty: state = "__EMPTY__" except Exception as e: tb_str = "".join(tb.format_exception(*(sys.exc_info()))) raise e(tb_str) if stats.log_time: if self.log_level >= 0: self.log.info(stats.log_message) # Adaptive loop delay delay = self.calc_wait_time(stats.exec_times) time.sleep(delay) self.respawn_dead_workers() finished = self.is_finished() if not hot_loop: self.kill_workers() for w_id, p in self.workers.items(): p.join() ## Log a final stats summary... if self.log_level > 0: self.log.info(stats.log_message) return self.retval
Respawn workers/ tasks upon crash
def respawn_dead_workers(self): """Respawn workers / tasks upon crash""" for w_id, p in self.workers.items(): if not p.is_alive(): # Queue the task for another worker, if required... if self.log_level >= 2: self.log.info("Worker w_id {0} died".format(w_id)) task = self.worker_assignments.get(w_id, {}) if self.log_level >= 2 and task != {}: self.log.info( "Dead worker w_id {0} was assigned task - {1}".format( w_id, task ) ) error_suffix = "" if task != {}: del self.worker_assignments[w_id] if self.resubmit_on_error or self.hot_loop: self.work_todo.append(task) self.queue_task(task) if self.log_level >= 2: self.log.info("Resubmitting task - {0}".format(task)) error_suffix = " with task={1}".format(task) if self.log_level >= 1: self.log.debug( "TaskMgr.work_todo: {0} tasks left".format(len(self.work_todo)) ) if self.log_level >= 2: self.log.info( "Respawning worker - w_id={0}{1}".format(w_id, error_suffix) ) self.workers[w_id] = Process( target=Worker, args=(w_id, self.t_q, self.r_q, self.worker_cycle_sleep), ) self.workers[w_id].daemon = True self.workers[w_id].start()
Return synchronous version of Monoprice interface: param port_url: serial port i. e./ dev/ ttyUSB0: return: synchronous implementation of Monoprice interface
def get_monoprice(port_url): """ Return synchronous version of Monoprice interface :param port_url: serial port, i.e. '/dev/ttyUSB0' :return: synchronous implementation of Monoprice interface """ lock = RLock() def synchronized(func): @wraps(func) def wrapper(*args, **kwargs): with lock: return func(*args, **kwargs) return wrapper class MonopriceSync(Monoprice): def __init__(self, port_url): self._port = serial.serial_for_url(port_url, do_not_open=True) self._port.baudrate = 9600 self._port.stopbits = serial.STOPBITS_ONE self._port.bytesize = serial.EIGHTBITS self._port.parity = serial.PARITY_NONE self._port.timeout = TIMEOUT self._port.write_timeout = TIMEOUT self._port.open() def _process_request(self, request: bytes, skip=0): """ :param request: request that is sent to the monoprice :param skip: number of bytes to skip for end of transmission decoding :return: ascii string returned by monoprice """ _LOGGER.debug('Sending "%s"', request) # clear self._port.reset_output_buffer() self._port.reset_input_buffer() # send self._port.write(request) self._port.flush() # receive result = bytearray() while True: c = self._port.read(1) if not c: raise serial.SerialTimeoutException( 'Connection timed out! Last received bytes {}'.format([hex(a) for a in result])) result += c if len(result) > skip and result[-LEN_EOL:] == EOL: break ret = bytes(result) _LOGGER.debug('Received "%s"', ret) return ret.decode('ascii') @synchronized def zone_status(self, zone: int): # Ignore first 6 bytes as they will contain 3 byte command and 3 bytes of EOL return ZoneStatus.from_string(self._process_request(_format_zone_status_request(zone), skip=6)) @synchronized def set_power(self, zone: int, power: bool): self._process_request(_format_set_power(zone, power)) @synchronized def set_mute(self, zone: int, mute: bool): self._process_request(_format_set_mute(zone, mute)) @synchronized def set_volume(self, zone: int, volume: int): self._process_request(_format_set_volume(zone, volume)) @synchronized def set_treble(self, zone: int, treble: int): self._process_request(_format_set_treble(zone, treble)) @synchronized def set_bass(self, zone: int, bass: int): self._process_request(_format_set_bass(zone, bass)) @synchronized def set_balance(self, zone: int, balance: int): self._process_request(_format_set_balance(zone, balance)) @synchronized def set_source(self, zone: int, source: int): self._process_request(_format_set_source(zone, source)) @synchronized def restore_zone(self, status: ZoneStatus): self.set_power(status.zone, status.power) self.set_mute(status.zone, status.mute) self.set_volume(status.zone, status.volume) self.set_treble(status.zone, status.treble) self.set_bass(status.zone, status.bass) self.set_balance(status.zone, status.balance) self.set_source(status.zone, status.source) return MonopriceSync(port_url)
Return asynchronous version of Monoprice interface: param port_url: serial port i. e./ dev/ ttyUSB0: return: asynchronous implementation of Monoprice interface
def get_async_monoprice(port_url, loop): """ Return asynchronous version of Monoprice interface :param port_url: serial port, i.e. '/dev/ttyUSB0' :return: asynchronous implementation of Monoprice interface """ lock = asyncio.Lock() def locked_coro(coro): @asyncio.coroutine @wraps(coro) def wrapper(*args, **kwargs): with (yield from lock): return (yield from coro(*args, **kwargs)) return wrapper class MonopriceAsync(Monoprice): def __init__(self, monoprice_protocol): self._protocol = monoprice_protocol @locked_coro @asyncio.coroutine def zone_status(self, zone: int): # Ignore first 6 bytes as they will contain 3 byte command and 3 bytes of EOL string = yield from self._protocol.send(_format_zone_status_request(zone), skip=6) return ZoneStatus.from_string(string) @locked_coro @asyncio.coroutine def set_power(self, zone: int, power: bool): yield from self._protocol.send(_format_set_power(zone, power)) @locked_coro @asyncio.coroutine def set_mute(self, zone: int, mute: bool): yield from self._protocol.send(_format_set_mute(zone, mute)) @locked_coro @asyncio.coroutine def set_volume(self, zone: int, volume: int): yield from self._protocol.send(_format_set_volume(zone, volume)) @locked_coro @asyncio.coroutine def set_treble(self, zone: int, treble: int): yield from self._protocol.send(_format_set_treble(zone, treble)) @locked_coro @asyncio.coroutine def set_bass(self, zone: int, bass: int): yield from self._protocol.send(_format_set_bass(zone, bass)) @locked_coro @asyncio.coroutine def set_balance(self, zone: int, balance: int): yield from self._protocol.send(_format_set_balance(zone, balance)) @locked_coro @asyncio.coroutine def set_source(self, zone: int, source: int): yield from self._protocol.send(_format_set_source(zone, source)) @locked_coro @asyncio.coroutine def restore_zone(self, status: ZoneStatus): yield from self._protocol.send(_format_set_power(status.zone, status.power)) yield from self._protocol.send(_format_set_mute(status.zone, status.mute)) yield from self._protocol.send(_format_set_volume(status.zone, status.volume)) yield from self._protocol.send(_format_set_treble(status.zone, status.treble)) yield from self._protocol.send(_format_set_bass(status.zone, status.bass)) yield from self._protocol.send(_format_set_balance(status.zone, status.balance)) yield from self._protocol.send(_format_set_source(status.zone, status.source)) class MonopriceProtocol(asyncio.Protocol): def __init__(self, loop): super().__init__() self._loop = loop self._lock = asyncio.Lock() self._transport = None self._connected = asyncio.Event(loop=loop) self.q = asyncio.Queue(loop=loop) def connection_made(self, transport): self._transport = transport self._connected.set() _LOGGER.debug('port opened %s', self._transport) def data_received(self, data): asyncio.ensure_future(self.q.put(data), loop=self._loop) @asyncio.coroutine def send(self, request: bytes, skip=0): yield from self._connected.wait() result = bytearray() # Only one transaction at a time with (yield from self._lock): self._transport.serial.reset_output_buffer() self._transport.serial.reset_input_buffer() while not self.q.empty(): self.q.get_nowait() self._transport.write(request) try: while True: result += yield from asyncio.wait_for(self.q.get(), TIMEOUT, loop=self._loop) if len(result) > skip and result[-LEN_EOL:] == EOL: ret = bytes(result) _LOGGER.debug('Received "%s"', ret) return ret.decode('ascii') except asyncio.TimeoutError: _LOGGER.error("Timeout during receiving response for command '%s', received='%s'", request, result) raise _, protocol = yield from create_serial_connection(loop, functools.partial(MonopriceProtocol, loop), port_url, baudrate=9600) return MonopriceAsync(protocol)
Comptaibility layer for old: class: SASLInterface implementations.
def from_reply(cls, state): """ Comptaibility layer for old :class:`SASLInterface` implementations. Accepts the follwing set of :class:`SASLState` or strings and maps the strings to :class:`SASLState` elements as follows: ``"challenge"`` :member:`SASLState.CHALLENGE` ``"failue"`` :member:`SASLState.FAILURE` ``"success"`` :member:`SASLState.SUCCESS` """ if state in (SASLState.FAILURE, SASLState.SUCCESS, SASLState.CHALLENGE): return state if state in ("failure", "success", "challenge"): return SASLState(state) else: raise RuntimeError("invalid SASL state", state)
Initiate the SASL handshake and advertise the use of the given mechanism. If payload is not: data: None it will be base64 encoded and sent as initial client response along with the <auth/ > element.
def initiate(self, mechanism, payload=None): """ Initiate the SASL handshake and advertise the use of the given `mechanism`. If `payload` is not :data:`None`, it will be base64 encoded and sent as initial client response along with the ``<auth />`` element. Return the next state of the state machine as tuple (see :class:`SASLStateMachine` for details). """ if self._state != SASLState.INITIAL: raise RuntimeError("initiate has already been called") try: next_state, payload = yield from self.interface.initiate( mechanism, payload=payload) except SASLFailure: self._state = SASLState.FAILURE raise next_state = SASLState.from_reply(next_state) self._state = next_state return next_state, payload
Send a response to the previously received challenge with the given payload. The payload is encoded using base64 and transmitted to the server.
def response(self, payload): """ Send a response to the previously received challenge, with the given `payload`. The payload is encoded using base64 and transmitted to the server. Return the next state of the state machine as tuple (see :class:`SASLStateMachine` for details). """ if self._state == SASLState.SUCCESS_SIMULATE_CHALLENGE: if payload != b"": # XXX: either our mechanism is buggy or the server # sent SASLState.SUCCESS before all challenge-response # messages defined by the mechanism were sent self._state = SASLState.FAILURE raise SASLFailure( None, "protocol violation: mechanism did not" " respond with an empty response to a" " challenge with final data – this suggests" " a protocol-violating early success from the server." ) self._state = SASLState.SUCCESS return SASLState.SUCCESS, None if self._state != SASLState.CHALLENGE: raise RuntimeError( "no challenge has been made or negotiation failed") try: next_state, payload = yield from self.interface.respond(payload) except SASLFailure: self._state = SASLState.FAILURE raise next_state = SASLState.from_reply(next_state) # unfold the (SASLState.SUCCESS, payload) to a sequence of # (SASLState.CHALLENGE, payload), (SASLState.SUCCESS, None) for the SASLMethod # to allow uniform treatment of both cases if next_state == SASLState.SUCCESS and payload is not None: self._state = SASLState.SUCCESS_SIMULATE_CHALLENGE return SASLState.CHALLENGE, payload self._state = next_state return next_state, payload
Abort an initiated SASL authentication process. The expected result state is failure.
def abort(self): """ Abort an initiated SASL authentication process. The expected result state is ``failure``. """ if self._state == SASLState.INITIAL: raise RuntimeError("SASL authentication hasn't started yet") if self._state == SASLState.SUCCESS_SIMULATE_CHALLENGE: raise RuntimeError("SASL message exchange already over") try: return (yield from self.interface.abort()) finally: self._state = SASLState.FAILURE
Perform the stringprep mapping step of SASLprep. Operates in - place on a list of unicode characters provided in chars.
def _saslprep_do_mapping(chars): """ Perform the stringprep mapping step of SASLprep. Operates in-place on a list of unicode characters provided in `chars`. """ i = 0 while i < len(chars): c = chars[i] if stringprep.in_table_c12(c): chars[i] = "\u0020" elif stringprep.in_table_b1(c): del chars[i] continue i += 1
Implement the trace profile specified in: rfc: 4505.
def trace(string): """ Implement the ``trace`` profile specified in :rfc:`4505`. """ check_prohibited_output( string, ( stringprep.in_table_c21, stringprep.in_table_c22, stringprep.in_table_c3, stringprep.in_table_c4, stringprep.in_table_c5, stringprep.in_table_c6, stringprep.in_table_c8, stringprep.in_table_c9, ) ) check_bidi(string) return string
Calculate the byte wise exclusive of of two: class: bytes objects of the same length.
def xor_bytes(a, b): """ Calculate the byte wise exclusive of of two :class:`bytes` objects of the same length. """ assert len(a) == len(b) return bytes(map(operator.xor, a, b))
Template tag that renders the footer information based on the authenticated user s permissions.
def admin_footer(parser, token): """ Template tag that renders the footer information based on the authenticated user's permissions. """ # split_contents() doesn't know how to split quoted strings. tag_name = token.split_contents() if len(tag_name) > 1: raise base.TemplateSyntaxError( '{} tag does not accept any argument(s): {}'.format( token.contents.split()[0], ', '.join(token.contents.split()[1:]) )) return AdminFooterNode()
Builds the parameters needed to present the user with a datatrans payment form.
def build_payment_parameters(amount: Money, client_ref: str) -> PaymentParameters: """ Builds the parameters needed to present the user with a datatrans payment form. :param amount: The amount and currency we want the user to pay :param client_ref: A unique reference for this payment :return: The parameters needed to display the datatrans form """ merchant_id = web_merchant_id amount, currency = money_to_amount_and_currency(amount) refno = client_ref sign = sign_web(merchant_id, amount, currency, refno) parameters = PaymentParameters( merchant_id=merchant_id, amount=amount, currency=currency, refno=refno, sign=sign, use_alias=False, ) logger.info('build-payment-parameters', parameters=parameters) return parameters
Builds the parameters needed to present the user with a datatrans form to register a credit card. Contrary to a payment form datatrans will not show an amount.
def build_register_credit_card_parameters(client_ref: str) -> PaymentParameters: """ Builds the parameters needed to present the user with a datatrans form to register a credit card. Contrary to a payment form, datatrans will not show an amount. :param client_ref: A unique reference for this alias capture. :return: The parameters needed to display the datatrans form """ amount = 0 currency = 'CHF' # Datatrans requires this value to be filled, so we use this arbitrary currency. merchant_id = web_merchant_id refno = client_ref sign = sign_web(merchant_id, amount, currency, refno) parameters = PaymentParameters( merchant_id=merchant_id, amount=amount, currency=currency, refno=refno, sign=sign, use_alias=True, ) logger.info('building-payment-parameters', parameters=parameters) return parameters
Charges money using datatrans given a previously registered credit card alias.
def pay_with_alias(amount: Money, alias_registration_id: str, client_ref: str) -> Payment: """ Charges money using datatrans, given a previously registered credit card alias. :param amount: The amount and currency we want to charge :param alias_registration_id: The alias registration to use :param client_ref: A unique reference for this charge :return: a Payment (either successful or not) """ if amount.amount <= 0: raise ValueError('Pay with alias takes a strictly positive amount') alias_registration = AliasRegistration.objects.get(pk=alias_registration_id) logger.info('paying-with-alias', amount=amount, client_ref=client_ref, alias_registration=alias_registration) request_xml = build_pay_with_alias_request_xml(amount, client_ref, alias_registration) logger.info('sending-pay-with-alias-request', url=datatrans_authorize_url, data=request_xml) response = requests.post( url=datatrans_authorize_url, headers={'Content-Type': 'application/xml'}, data=request_xml) logger.info('processing-pay-with-alias-response', response=response.content) charge_response = parse_pay_with_alias_response_xml(response.content) charge_response.save() charge_response.send_signal() return charge_response
Both alias registration and payments are received here. We can differentiate them by looking at the use - alias user - parameter ( and verifying the amount is o ).
def parse_notification_xml(xml: str) -> Union[AliasRegistration, Payment]: """" Both alias registration and payments are received here. We can differentiate them by looking at the use-alias user-parameter (and verifying the amount is o). """ body = fromstring(xml).find('body') transaction = body.find('transaction') _user_parameters = transaction.find('userParameters') def get_named_parameter(name): return _user_parameters.find("parameter[@name='" + name + "']") def success(): return transaction.get('status') == 'success' def parse_success(): # From the spec: sign2 is only returned in the success case computed_signature = sign_web(body.get('merchantId'), transaction.find('amount').text, transaction.find('currency').text, transaction.find('uppTransactionId').text) sign2 = get_named_parameter('sign2').text if computed_signature != sign2: raise ValueError('sign2 did not match computed signature') success = transaction.find('success') d = dict( response_code=success.find('responseCode').text, response_message=success.find('responseMessage').text, authorization_code=success.find('authorizationCode').text, acquirer_authorization_code=success.find('acqAuthorizationCode').text, ) return {k: v for k, v in d.items() if v is not None} def parse_error(): error = transaction.find('error') d = dict( error_code=error.find('errorCode').text, error_message=error.find('errorMessage').text, error_detail=error.find('errorDetail').text) acquirer_error_code = get_named_parameter('acqErrorCode') if acquirer_error_code is not None: d['acquirer_error_code'] = acquirer_error_code.text return {k: v for k, v in d.items() if v is not None} def parse_common_attributes(): d = dict( transaction_id=transaction.find('uppTransactionId').text, merchant_id=body.get('merchantId'), client_ref=transaction.get('refno'), amount=parse_money(transaction)) payment_method = transaction.find('pmethod') if payment_method is not None: d['payment_method'] = payment_method.text request_type = transaction.find('reqtype') if request_type is not None: d['request_type'] = request_type.text credit_card_country = get_named_parameter('returnCustomerCountry') if credit_card_country is not None: d['credit_card_country'] = credit_card_country.text expiry_month = get_named_parameter('expm') if expiry_month is not None: d['expiry_month'] = int(expiry_month.text) expiry_year = get_named_parameter('expy') if expiry_year is not None: d['expiry_year'] = int(expiry_year.text) return d # End of inner helper functions, we're back inside parse_notification_xml use_alias_parameter = get_named_parameter('useAlias') if use_alias_parameter is not None and use_alias_parameter.text == 'true': # It's an alias registration d = dict(parse_common_attributes()) masked_card_number = get_named_parameter('maskedCC') if masked_card_number is not None: d['masked_card_number'] = masked_card_number.text card_alias = get_named_parameter('aliasCC') if card_alias is not None: d['card_alias'] = card_alias.text if success(): d['success'] = True d.update(parse_success()) else: d['success'] = False d.update(parse_error()) return AliasRegistration(**d) else: # It's a payment or a charge if success(): d = dict(success=True) cardno = get_named_parameter('cardno') if cardno is not None: d['masked_card_number'] = cardno.text d.update(parse_common_attributes()) d.update(parse_success()) return Payment(**d) else: d = dict(success=False) d.update(parse_common_attributes()) d.update(parse_error()) return Payment(**d)
Return short application version. For example: 1. 0. 0.
def short_version(version=None): """ Return short application version. For example: `1.0.0`. """ v = version or __version__ return '.'.join([str(x) for x in v[:3]])
Return full version nr inc. rc beta etc tags.
def get_version(version=None): """ Return full version nr, inc. rc, beta etc tags. For example: `2.0.0a1` :rtype: str """ v = version or __version__ if len(v) == 4: return '{0}{1}'.format(short_version(v), v[3]) return short_version(v)
Refunds ( partially or completely ) a previously authorized and settled payment.: param amount: The amount and currency we want to refund. Must be positive in the same currency as the original payment and not exceed the amount of the original payment.: param payment_id: The id of the payment to refund.: return: a Refund ( either successful or not ).
def refund(amount: Money, payment_id: str) -> Refund: """ Refunds (partially or completely) a previously authorized and settled payment. :param amount: The amount and currency we want to refund. Must be positive, in the same currency as the original payment, and not exceed the amount of the original payment. :param payment_id: The id of the payment to refund. :return: a Refund (either successful or not). """ if amount.amount <= 0: raise ValueError('Refund takes a strictly positive amount') payment = Payment.objects.get(pk=payment_id) if not payment.success: raise ValueError('Only successful payments can be refunded') if payment.amount.currency != amount.currency: raise ValueError('Refund currency must be identical to original payment currency') if amount.amount > payment.amount.amount: raise ValueError('Refund amount exceeds original payment amount') logger.info('refunding-payment', amount=str(amount), payment=dict(amount=str(payment.amount), transaction_id=payment.transaction_id, masked_card_number=payment.masked_card_number)) client_ref = '{}-r'.format(payment.client_ref) request_xml = build_refund_request_xml(amount=amount, original_transaction_id=payment.transaction_id, client_ref=client_ref, merchant_id=payment.merchant_id) logger.info('sending-refund-request', url=datatrans_processor_url, data=request_xml) response = requests.post( url=datatrans_processor_url, headers={'Content-Type': 'application/xml'}, data=request_xml) logger.info('processing-refund-response', response=response.content) refund_response = parse_refund_response_xml(response.content) refund_response.save() refund_response.send_signal() return refund_response
Construct widget.
def _construct(self): '''Construct widget.''' self.setLayout(QtGui.QVBoxLayout()) self._headerLayout = QtGui.QHBoxLayout() self._locationWidget = QtGui.QComboBox() self._headerLayout.addWidget(self._locationWidget, stretch=1) self._upButton = QtGui.QToolButton() self._upButton.setIcon(QtGui.QIcon(':riffle/icon/up')) self._headerLayout.addWidget(self._upButton) self.layout().addLayout(self._headerLayout) self._contentSplitter = QtGui.QSplitter() self._bookmarksWidget = QtGui.QListView() self._contentSplitter.addWidget(self._bookmarksWidget) self._filesystemWidget = QtGui.QTableView() self._filesystemWidget.setSelectionBehavior( self._filesystemWidget.SelectRows ) self._filesystemWidget.setSelectionMode( self._filesystemWidget.SingleSelection ) self._filesystemWidget.verticalHeader().hide() self._contentSplitter.addWidget(self._filesystemWidget) proxy = riffle.model.FilesystemSortProxy(self) model = riffle.model.Filesystem( path=self._root, parent=self, iconFactory=self._iconFactory ) proxy.setSourceModel(model) proxy.setDynamicSortFilter(True) self._filesystemWidget.setModel(proxy) self._filesystemWidget.setSortingEnabled(True) self._contentSplitter.setStretchFactor(1, 1) self.layout().addWidget(self._contentSplitter) self._footerLayout = QtGui.QHBoxLayout() self._footerLayout.addStretch(1) self._cancelButton = QtGui.QPushButton('Cancel') self._footerLayout.addWidget(self._cancelButton) self._acceptButton = QtGui.QPushButton('Choose') self._footerLayout.addWidget(self._acceptButton) self.layout().addLayout(self._footerLayout)
Perform post - construction operations.
def _postConstruction(self): '''Perform post-construction operations.''' self.setWindowTitle('Filesystem Browser') self._filesystemWidget.sortByColumn(0, QtCore.Qt.AscendingOrder) # TODO: Remove once bookmarks widget implemented. self._bookmarksWidget.hide() self._acceptButton.setDefault(True) self._acceptButton.setDisabled(True) self._acceptButton.clicked.connect(self.accept) self._cancelButton.clicked.connect(self.reject) self._configureShortcuts() self.setLocation(self._root) self._filesystemWidget.horizontalHeader().setResizeMode( QtGui.QHeaderView.ResizeToContents ) self._filesystemWidget.horizontalHeader().setResizeMode( 0, QtGui.QHeaderView.Stretch ) self._upButton.clicked.connect(self._onNavigateUpButtonClicked) self._locationWidget.currentIndexChanged.connect( self._onNavigate ) self._filesystemWidget.activated.connect(self._onActivateItem) selectionModel = self._filesystemWidget.selectionModel() selectionModel.currentRowChanged.connect(self._onSelectItem)
Add keyboard shortcuts to navigate the filesystem.
def _configureShortcuts(self): '''Add keyboard shortcuts to navigate the filesystem.''' self._upShortcut = QtGui.QShortcut( QtGui.QKeySequence('Backspace'), self ) self._upShortcut.setAutoRepeat(False) self._upShortcut.activated.connect(self._onNavigateUpButtonClicked)
Handle activation of item in listing.
def _onActivateItem(self, index): '''Handle activation of item in listing.''' item = self._filesystemWidget.model().item(index) if not isinstance(item, riffle.model.File): self._acceptButton.setDisabled(True) self.setLocation(item.path, interactive=True)
Handle selection of item in listing.
def _onSelectItem(self, selection, previousSelection): '''Handle selection of item in listing.''' self._acceptButton.setEnabled(True) del self._selected[:] item = self._filesystemWidget.model().item(selection) self._selected.append(item.path)
Handle selection of path segment.
def _onNavigate(self, index): '''Handle selection of path segment.''' if index > 0: self.setLocation( self._locationWidget.itemData(index), interactive=True )
Return list of valid * path * segments.
def _segmentPath(self, path): '''Return list of valid *path* segments.''' parts = [] model = self._filesystemWidget.model() # Separate root path from remainder. remainder = path while True: if remainder == model.root.path: break if remainder: parts.append(remainder) head, tail = os.path.split(remainder) if head == remainder: break remainder = head parts.append(model.root.path) return parts
Set current location to * path *.
def setLocation(self, path, interactive=False): '''Set current location to *path*. *path* must be the same as root or under the root. .. note:: Comparisons are case-sensitive. If you set the root as 'D:/' then location can be set as 'D:/folder' *not* 'd:/folder'. If *interactive* is True, catch any exception occurring and display an appropriate warning dialog to the user. Otherwise allow exceptions to bubble up as normal. ''' try: self._setLocation(path) except Exception as error: if not interactive: raise else: warning_dialog = QtGui.QMessageBox( QtGui.QMessageBox.Warning, 'Location is not available', '{0} is not accessible.'.format(path), QtGui.QMessageBox.Ok, self ) warning_dialog.setDetailedText(str(error)) warning_dialog.exec_()
Set current location to * path *.
def _setLocation(self, path): '''Set current location to *path*. *path* must be the same as root or under the root. .. note:: Comparisons are case-sensitive. If you set the root as 'D:/' then location can be set as 'D:/folder' *not* 'd:/folder'. ''' model = self._filesystemWidget.model() if not path.startswith(model.root.path): raise ValueError('Location must be root or under root.') # Ensure children for each segment in path are loaded. segments = self._segmentPath(path) for segment in reversed(segments): pathIndex = model.pathIndex(segment) model.fetchMore(pathIndex) self._filesystemWidget.setRootIndex(model.pathIndex(path)) self._locationWidget.clear() # Add history entry for each segment. for segment in segments: index = model.pathIndex(segment) if not index.isValid(): # Root item. icon = model.iconFactory.icon( riffle.icon_factory.IconType.Computer ) self._locationWidget.addItem( icon, model.root.path or model.root.name, model.root.path ) else: icon = model.icon(index) self._locationWidget.addItem(icon, segment, segment) if self._locationWidget.count() > 1: self._upButton.setEnabled(True) self._upShortcut.setEnabled(True) else: self._upButton.setEnabled(False) self._upShortcut.setEnabled(False)
Finalize options to be used.
def finalize_options(self): '''Finalize options to be used.''' self.resource_source_path = os.path.join(RESOURCE_PATH, 'resource.qrc') self.resource_target_path = RESOURCE_TARGET_PATH
Run build.
def run(self): '''Run build.''' if ON_READ_THE_DOCS: # PySide not available. return try: pyside_rcc_command = 'pyside-rcc' # On Windows, pyside-rcc is not automatically available on the # PATH so try to find it manually. if sys.platform == 'win32': import PySide pyside_rcc_command = os.path.join( os.path.dirname(PySide.__file__), 'pyside-rcc.exe' ) subprocess.check_call([ pyside_rcc_command, '-o', self.resource_target_path, self.resource_source_path ]) except (subprocess.CalledProcessError, OSError): print( 'Error compiling resource.py using pyside-rcc. Possibly ' 'pyside-rcc could not be found. You might need to manually add ' 'it to your PATH.' ) raise SystemExit()
Run clean.
def run(self): '''Run clean.''' relative_resource_path = os.path.relpath( RESOURCE_TARGET_PATH, ROOT_PATH ) if os.path.exists(relative_resource_path): os.remove(relative_resource_path) else: distutils.log.warn( '\'{0}\' does not exist -- can\'t clean it' .format(relative_resource_path) ) relative_compiled_resource_path = relative_resource_path + 'c' if os.path.exists(relative_compiled_resource_path): os.remove(relative_compiled_resource_path) else: distutils.log.warn( '\'{0}\' does not exist -- can\'t clean it' .format(relative_compiled_resource_path) ) CleanCommand.run(self)
Return appropriate: py: class: Item instance for * path *.
def ItemFactory(path): '''Return appropriate :py:class:`Item` instance for *path*. If *path* is null then return Computer root. ''' if not path: return Computer() elif os.path.isfile(path): return File(path) elif os.path.ismount(path): return Mount(path) elif os.path.isdir(path): return Directory(path) else: raise ValueError('Could not determine correct type for path: {0}' .format(path))
Add * item * as child of this item.
def addChild(self, item): '''Add *item* as child of this item.''' if item.parent and item.parent != self: item.parent.removeChild(item) self.children.append(item) item.parent = self
Fetch and return new children.
def fetchChildren(self): '''Fetch and return new children. Will only fetch children whilst canFetchMore is True. .. note:: It is the caller's responsibility to add each fetched child to this parent if desired using :py:meth:`Item.addChild`. ''' if not self.canFetchMore(): return [] children = self._fetchChildren() self._fetched = True return children
Reload children.
def refetch(self): '''Reload children.''' # Reset children for child in self.children[:]: self.removeChild(child) # Enable children fetching self._fetched = False
Fetch and return new child items.
def _fetchChildren(self): '''Fetch and return new child items.''' children = [] for entry in QDir.drives(): path = os.path.normpath(entry.canonicalFilePath()) children.append(Mount(path)) return children
Fetch and return new child items.
def _fetchChildren(self): '''Fetch and return new child items.''' children = [] # List paths under this directory. paths = [] for name in os.listdir(self.path): paths.append(os.path.normpath(os.path.join(self.path, name))) # Handle collections. collections, remainder = clique.assemble( paths, [clique.PATTERNS['frames']] ) for path in remainder: try: child = ItemFactory(path) except ValueError: pass else: children.append(child) for collection in collections: children.append(Collection(collection)) return children
Fetch and return new child items.
def _fetchChildren(self): '''Fetch and return new child items.''' children = [] for path in self._collection: try: child = ItemFactory(path) except ValueError: pass else: children.append(child) return children
Return number of children * parent * index has.
def rowCount(self, parent): '''Return number of children *parent* index has.''' if parent.column() > 0: return 0 if parent.isValid(): item = parent.internalPointer() else: item = self.root return len(item.children)
Return index for * row * and * column * under * parent *.
def index(self, row, column, parent): '''Return index for *row* and *column* under *parent*.''' if not self.hasIndex(row, column, parent): return QModelIndex() if not parent.isValid(): item = self.root else: item = parent.internalPointer() try: child = item.children[row] except IndexError: return QModelIndex() else: return self.createIndex(row, column, child)
Return index of item with * path *.
def pathIndex(self, path): '''Return index of item with *path*.''' if path == self.root.path: return QModelIndex() if not path.startswith(self.root.path): return QModelIndex() parts = [] while True: if path == self.root.path: break head, tail = os.path.split(path) if head == path: if path: parts.append(path) break parts.append(tail) path = head parts.reverse() if parts: item = self.root count = 0 for count, part in enumerate(parts): matched = False for child in item.children: if child.name == part: item = child matched = True break if not matched: break if count + 1 == len(parts): return self.createIndex(item.row, 0, item) return QModelIndex()