signature
stringlengths 8
3.44k
| body
stringlengths 0
1.41M
| docstring
stringlengths 1
122k
| id
stringlengths 5
17
|
|---|---|---|---|
@classmethod<EOL><INDENT>def generate_mont_priv(cls):<DEDENT>
|
return bytes(cls._generate_mont_priv())<EOL>
|
Return a Montgomery private key to be used with XEdDSA.
:returns: The private key as a bytes-like object with length MONT_PRIV_KEY_SIZE.
|
f11665:c0:m1
|
@staticmethod<EOL><INDENT>def _generate_mont_priv():<DEDENT>
|
raise NotImplementedError<EOL>
|
Return a Montgomery private key to be used with XEdDSA.
:returns: The private key as a bytearray with length MONT_PRIV_KEY_SIZE.
|
f11665:c0:m2
|
@classmethod<EOL><INDENT>def mont_pub_from_mont_priv(cls, mont_priv):<DEDENT>
|
if not isinstance(mont_priv, bytes):<EOL><INDENT>raise TypeError("<STR_LIT>")<EOL><DEDENT>if len(mont_priv) != cls.MONT_PRIV_KEY_SIZE:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>return bytes(cls._mont_pub_from_mont_priv(bytearray(mont_priv)))<EOL>
|
Restore the Montgomery public key from a Montgomery private key.
:param mont_priv: A bytes-like object encoding the private key with length
MONT_PRIV_KEY_SIZE.
:returns: A bytes-like object encoding the public key with length
MONT_PUB_KEY_SIZE.
|
f11665:c0:m3
|
@staticmethod<EOL><INDENT>def _mont_pub_from_mont_priv(mont_priv):<DEDENT>
|
raise NotImplementedError<EOL>
|
Restore the Montgomery public key from a Montgomery private key.
:param mont_priv: A bytearray encoding the private keywith length
MONT_PRIV_KEY_SIZE.
:returns: A bytearray encoding the public key with length MONT_PUB_KEY_SIZE.
|
f11665:c0:m4
|
@classmethod<EOL><INDENT>def mont_priv_to_ed_pair(cls, mont_priv):<DEDENT>
|
if not isinstance(mont_priv, bytes):<EOL><INDENT>raise TypeError("<STR_LIT>")<EOL><DEDENT>if len(mont_priv) != cls.MONT_PRIV_KEY_SIZE:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>ed_priv, ed_pub = cls._mont_priv_to_ed_pair(bytearray(mont_priv))<EOL>return bytes(ed_priv), bytes(ed_pub)<EOL>
|
Derive a Twisted Edwards key pair from given Montgomery private key.
:param mont_priv: A bytes-like object encoding the private key with length
MONT_PRIV_KEY_SIZE.
:returns: A tuple of bytes-like objects encoding the private key with length
ED_PRIV_KEY_SIZE and the public key with length ED_PUB_KEY_SIZE.
|
f11665:c0:m5
|
@staticmethod<EOL><INDENT>def _mont_priv_to_ed_pair(mont_priv):<DEDENT>
|
raise NotImplementedError<EOL>
|
Derive a Twisted Edwards key pair from given Montgomery private key.
:param mont_priv: A bytearray encoding the private key with length
MONT_PRIV_KEY_SIZE.
:returns: A tuple of bytearrays encoding the private key with length
ED_PRIV_KEY_SIZE and the public key with length ED_PUB_KEY_SIZE.
|
f11665:c0:m6
|
@classmethod<EOL><INDENT>def mont_pub_to_ed_pub(cls, mont_pub):<DEDENT>
|
if not isinstance(mont_pub, bytes):<EOL><INDENT>raise TypeError("<STR_LIT>")<EOL><DEDENT>if len(mont_pub) != cls.MONT_PUB_KEY_SIZE:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>return bytes(cls._mont_pub_to_ed_pub(bytearray(mont_pub)))<EOL>
|
Derive a Twisted Edwards public key from given Montgomery public key.
:param mont_pub: A bytes-like object encoding the public key with length
MONT_PUB_KEY_SIZE.
:returns: A bytes-like object encoding the public key with length ED_PUB_KEY_SIZE.
|
f11665:c0:m7
|
@staticmethod<EOL><INDENT>def _mont_pub_to_ed_pub(mont_pub):<DEDENT>
|
raise NotImplementedError<EOL>
|
Derive a Twisted Edwards public key from given Montgomery public key.
:param mont_pub: A bytearray encoding the public key with length
MONT_PUB_KEY_SIZE.
:returns: A bytearray encoding the public key with length ED_PUB_KEY_SIZE.
|
f11665:c0:m8
|
def sign(self, data, nonce = None):
|
cls = self.__class__<EOL>if not self.__mont_priv:<EOL><INDENT>raise MissingKeyException(<EOL>"<STR_LIT>"<EOL>)<EOL><DEDENT>if not isinstance(data, bytes):<EOL><INDENT>raise TypeError("<STR_LIT>")<EOL><DEDENT>if nonce == None:<EOL><INDENT>nonce = os.urandom(<NUM_LIT:64>)<EOL><DEDENT>if not isinstance(nonce, bytes):<EOL><INDENT>raise TypeError("<STR_LIT>")<EOL><DEDENT>if len(nonce) != <NUM_LIT:64>:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>ed_priv, ed_pub = cls._mont_priv_to_ed_pair(bytearray(self.__mont_priv))<EOL>return bytes(cls._sign(<EOL>bytearray(data),<EOL>bytearray(nonce),<EOL>ed_priv,<EOL>ed_pub<EOL>))<EOL>
|
Sign data using the Montgomery private key stored by this XEdDSA instance.
:param data: A bytes-like object containing the data to sign.
:param nonce: A bytes-like object with length 64 or None.
:returns: A bytes-like object encoding the signature with length SIGNATURE_SIZE.
If the nonce parameter is None, a new nonce is generated and used.
:raises MissingKeyException: If the Montgomery private key is not available.
|
f11665:c0:m9
|
@staticmethod<EOL><INDENT>def _sign(data, nonce, ed_priv, ed_pub):<DEDENT>
|
raise NotImplementedError<EOL>
|
Sign data using given Twisted Edwards key pair.
:param data: A bytearray containing the data to sign.
:param nonce: A bytearray with length 64.
:param ed_priv: A bytearray encoding the private key with length ED_PRIV_KEY_SIZE.
:param ed_pub: A bytearray encoding the public key with length ED_PUB_KEY_SIZE.
:returns: A bytearray encoding the signature with length SIGNATURE_SIZE.
|
f11665:c0:m10
|
def verify(self, data, signature):
|
cls = self.__class__<EOL>if not isinstance(data, bytes):<EOL><INDENT>raise TypeError("<STR_LIT>")<EOL><DEDENT>if not isinstance(signature, bytes):<EOL><INDENT>raise TypeError("<STR_LIT>")<EOL><DEDENT>if len(signature) != cls.SIGNATURE_SIZE:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>return cls._verify(<EOL>bytearray(data),<EOL>bytearray(signature),<EOL>cls._mont_pub_to_ed_pub(bytearray(self.__mont_pub))<EOL>)<EOL>
|
Verify signed data using the Montgomery public key stored by this XEdDSA instance.
:param data: A bytes-like object containing the data that was signed.
:param signature: A bytes-like object encoding the signature with length
SIGNATURE_SIZE.
:returns: A boolean indicating whether the signature was valid or not.
|
f11665:c0:m11
|
@staticmethod<EOL><INDENT>def _verify(data, signature, ed_pub):<DEDENT>
|
raise NotImplementedError<EOL>
|
Verify signed data using given Twisted Edwards public key.
:param data: A bytearray containing the data that was signed.
:param signature: A bytearray encoding the signature with length SIGNATURE_SIZE.
:returns: A boolean indicating whether the signature was valid or not.
|
f11665:c0:m12
|
def call(func, max_attempts=None, exceptions=Exception, wait=<NUM_LIT:0.0>,<EOL>cleanup_hook=None, pre_retry_hook=None):
|
<EOL>_assert_callable(func, allow_none=False)<EOL>_assert_callable(cleanup_hook, allow_none=True)<EOL>_assert_callable(pre_retry_hook, allow_none=True)<EOL>if not (max_attempts is None or max_attempts >= <NUM_LIT:1>):<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>wait_func = wait if type(wait) not in [int, float] else lambda _: wait<EOL>_assert_callable(wait_func, allow_none=False)<EOL>def log_failed_attempt(attempt, error):<EOL><INDENT>if max_attempts is None:<EOL><INDENT>nr_display = '<STR_LIT>'.format(attempt)<EOL><DEDENT>else:<EOL><INDENT>nr_display = '<STR_LIT>'.format(attempt, max_attempts)<EOL><DEDENT>logger.debug('<STR_LIT>'<EOL>.format(nr=nr_display, func=func, msg=error))<EOL><DEDENT>for attempt, f in enumerate(_repeat(func, max_attempts), start=<NUM_LIT:1>):<EOL><INDENT>try:<EOL><INDENT>return f()<EOL><DEDENT>except exceptions as e:<EOL><INDENT>log_failed_attempt(attempt=attempt, error=e)<EOL>if cleanup_hook is not None:<EOL><INDENT>cleanup_hook()<EOL><DEDENT>if attempt == max_attempts:<EOL><INDENT>raise<EOL><DEDENT>if wait:<EOL><INDENT>waittime = wait_func(attempt)<EOL>time.sleep(waittime)<EOL><DEDENT>if pre_retry_hook is not None:<EOL><INDENT>pre_retry_hook()<EOL><DEDENT><DEDENT><DEDENT>
|
:param func (callable):
The function to retry. No arguments are passed to this function.
If your function requires arguments, consider defining a separate
function or use functools.partial / a lambda function.
:param max_attempts:
Any integer number to limit the maximum number of attempts.
Set to None for unlimited retries.
:param exceptions:
A tuple of exceptions that should result in a retry. Catches
everything derived from 'Exception' by default.
:param wait:
This can be an integer / float to specify the waittime in seconds
before the next attempt. You can also pass a function which accepts
a single argument 'attempt'.
:param cleanup_hook:
Can be set to a callable and will be called after an exception is
raised from calling `func`.
No arguments are passed to this function.
If your function requires arguments, consider defining a separate
function or use functools.partial / a lambda function.
:param pre_retry_hook:
Can be set to any callable that will be called before `function`
is called.
No arguments are passed to this function.
If your function requires arguments, consider defining a separate
function or use functools.partial / a lambda function.
If `wait` is set, `pre_retry_hook` will be called before the
waittime.
Exceptions that are raised when calling this hook are not caught.
:returns:
The result of calling the given `func`.
:raises:
Any exception which is
- not in the given `exceptions`
- raised in `pre_retry_hook` or in `cleanup_hook`
- raised in the last attempt at calling `func`
|
f11670:m2
|
def switch_schema(task, kwargs, **kw):
|
<EOL>from .compat import get_public_schema_name, get_tenant_model<EOL>old_schema = (connection.schema_name, connection.include_public_schema)<EOL>setattr(task, '<STR_LIT>', old_schema)<EOL>schema = (<EOL>get_schema_name_from_task(task, kwargs) or<EOL>get_public_schema_name()<EOL>)<EOL>if connection.schema_name == schema:<EOL><INDENT>return<EOL><DEDENT>if connection.schema_name != get_public_schema_name():<EOL><INDENT>connection.set_schema_to_public()<EOL><DEDENT>if schema == get_public_schema_name():<EOL><INDENT>return<EOL><DEDENT>tenant = get_tenant_model().objects.get(schema_name=schema)<EOL>connection.set_tenant(tenant, include_public=True)<EOL>
|
Switches schema of the task, before it has been run.
|
f11676:m1
|
def restore_schema(task, **kwargs):
|
from .compat import get_public_schema_name<EOL>schema_name = get_public_schema_name()<EOL>include_public = True<EOL>if hasattr(task, '<STR_LIT>'):<EOL><INDENT>schema_name, include_public = task._old_schema<EOL><DEDENT>if connection.schema_name == schema_name:<EOL><INDENT>return<EOL><DEDENT>connection.set_schema(schema_name, include_public=include_public)<EOL>
|
Switches the schema back to the one from before running the task.
|
f11676:m2
|
def make_request(access_token, client_func, endpoint, urlargs=None, data=None,<EOL>is_json=True, code=None, headers=None,<EOL>follow_redirects=False):
|
urlargs = urlargs or {}<EOL>urlargs['<STR_LIT>'] = access_token<EOL>if headers is None:<EOL><INDENT>headers = [('<STR_LIT>', '<STR_LIT:application/json>')] if is_json else []<EOL><DEDENT>if data is not None:<EOL><INDENT>request_args = dict(<EOL>data=json.dumps(data) if is_json else data,<EOL>headers=headers,<EOL>)<EOL><DEDENT>else:<EOL><INDENT>request_args = {}<EOL><DEDENT>url = url_for(endpoint, **urlargs)<EOL>response = client_func(<EOL>url,<EOL>follow_redirects=follow_redirects,<EOL>**request_args<EOL>)<EOL>if code is not None:<EOL><INDENT>assert code == response.status_code<EOL><DEDENT>return response<EOL>
|
Make a request to the API endpoint.
Ensures request looks like they arrive on CFG_SITE_SECURE_URL.
That header "Contet-Type: application/json" is added if the parameter
is_json is True
:param endpoint: Endpoint passed to url_for.
:param urlargs: Keyword args passed to url_for
:param data: Request body, either as a dictionary if ``is_json`` is
True, or as a string if ``is_json`` is False
:param headers: List of headers for the request
:param code: Assert response status code
:param follow_redirects: Whether to follow redirects.
|
f11690:m0
|
def add_link_header(response, links):
|
if links is not None:<EOL><INDENT>response.headers.extend({<EOL>'<STR_LIT>': '<STR_LIT:U+002CU+0020>'.join([<EOL>'<STR_LIT>'.format(l, r) for r, l in links.items()])<EOL>})<EOL><DEDENT>
|
Add a Link HTTP header to a REST response.
:param response: REST response instance.
:param links: Dictionary of links.
|
f11697:m0
|
def make_response(event):
|
code, message = event.status<EOL>response = jsonify(**event.response)<EOL>response.headers['<STR_LIT>'] = event.receiver_id<EOL>response.headers['<STR_LIT>'] = event.id<EOL>if message:<EOL><INDENT>response.headers['<STR_LIT>'] = message<EOL><DEDENT>add_link_header(response, {'<STR_LIT>': url_for(<EOL>'<STR_LIT>', receiver_id=event.receiver_id, event_id=event.id,<EOL>_external=True<EOL>)})<EOL>return response, code<EOL>
|
Make a response from webhook event.
|
f11697:m1
|
def error_handler(f):
|
@wraps(f)<EOL>def inner(*args, **kwargs):<EOL><INDENT>try:<EOL><INDENT>return f(*args, **kwargs)<EOL><DEDENT>except ReceiverDoesNotExist:<EOL><INDENT>return jsonify(<EOL>status=<NUM_LIT>,<EOL>description='<STR_LIT>'<EOL>), <NUM_LIT><EOL><DEDENT>except InvalidPayload as e:<EOL><INDENT>return jsonify(<EOL>status=<NUM_LIT>,<EOL>description='<STR_LIT>'<EOL>'<STR_LIT>' % e.args[<NUM_LIT:0>]<EOL>), <NUM_LIT><EOL><DEDENT>except WebhooksError:<EOL><INDENT>return jsonify(<EOL>status=<NUM_LIT>,<EOL>description='<STR_LIT>'<EOL>), <NUM_LIT><EOL><DEDENT><DEDENT>return inner<EOL>
|
Return a json payload and appropriate status code on expection.
|
f11697:m2
|
@require_api_auth()<EOL><INDENT>@require_oauth_scopes('<STR_LIT>')<EOL>@error_handler<EOL>def post(self, receiver_id=None):<DEDENT>
|
try:<EOL><INDENT>user_id = request.oauth.access_token.user_id<EOL><DEDENT>except AttributeError:<EOL><INDENT>user_id = current_user.get_id()<EOL><DEDENT>event = Event.create(<EOL>receiver_id=receiver_id,<EOL>user_id=user_id<EOL>)<EOL>db.session.add(event)<EOL>db.session.commit()<EOL>event.process()<EOL>db.session.commit()<EOL>return make_response(event)<EOL>
|
Handle POST request.
|
f11697:c0:m0
|
def options(self, receiver_id=None):
|
abort(<NUM_LIT>)<EOL>
|
Handle OPTIONS request.
|
f11697:c0:m1
|
@staticmethod<EOL><INDENT>def _get_event(receiver_id, event_id):<DEDENT>
|
event = Event.query.filter_by(<EOL>receiver_id=receiver_id, id=event_id<EOL>).first_or_404()<EOL>try:<EOL><INDENT>user_id = request.oauth.access_token.user_id<EOL><DEDENT>except AttributeError:<EOL><INDENT>user_id = current_user.get_id()<EOL><DEDENT>if event.user_id != int(user_id):<EOL><INDENT>abort(<NUM_LIT>)<EOL><DEDENT>return event<EOL>
|
Find event and check access rights.
|
f11697:c1:m0
|
@require_api_auth()<EOL><INDENT>@require_oauth_scopes('<STR_LIT>')<EOL>@error_handler<EOL>def get(self, receiver_id=None, event_id=None):<DEDENT>
|
event = self._get_event(receiver_id, event_id)<EOL>return make_response(event)<EOL>
|
Handle GET request.
|
f11697:c1:m1
|
@require_api_auth()<EOL><INDENT>@require_oauth_scopes('<STR_LIT>')<EOL>@error_handler<EOL>def delete(self, receiver_id=None, event_id=None):<DEDENT>
|
event = self._get_event(receiver_id, event_id)<EOL>event.delete()<EOL>db.session.commit()<EOL>return make_response(event)<EOL>
|
Handle DELETE request.
|
f11697:c1:m2
|
def __init__(self, app, entry_point_group=None):
|
self.app = app<EOL>self.receivers = {}<EOL>if entry_point_group:<EOL><INDENT>self.load_entry_point_group(entry_point_group)<EOL><DEDENT>
|
Initialize state.
|
f11698:c0:m0
|
def register(self, receiver_id, receiver):
|
assert receiver_id not in self.receivers<EOL>self.receivers[receiver_id] = receiver(receiver_id)<EOL>
|
Register a receiver.
|
f11698:c0:m1
|
def unregister(self, receiver_id):
|
del self.receivers[receiver_id]<EOL>
|
Unregister a receiver by its id.
|
f11698:c0:m2
|
def load_entry_point_group(self, entry_point_group):
|
for ep in pkg_resources.iter_entry_points(group=entry_point_group):<EOL><INDENT>self.register(ep.name, ep.load())<EOL><DEDENT>
|
Load actions from an entry point group.
|
f11698:c0:m3
|
def __init__(self, app=None, **kwargs):
|
if app:<EOL><INDENT>self.init_app(app, **kwargs)<EOL><DEDENT>
|
Extension initialization.
|
f11698:c1:m0
|
def init_app(self, app, entry_point_group='<STR_LIT>'):
|
self.init_config(app)<EOL>state = _WebhooksState(app, entry_point_group=entry_point_group)<EOL>self._state = app.extensions['<STR_LIT>'] = state<EOL>
|
Flask application initialization.
|
f11698:c1:m1
|
def init_config(self, app):
|
app.config.setdefault(<EOL>'<STR_LIT>',<EOL>app.config.get('<STR_LIT>',<EOL>'<STR_LIT>'))<EOL>for k in dir(config):<EOL><INDENT>if k.startswith('<STR_LIT>'):<EOL><INDENT>app.config.setdefault(k, getattr(config, k))<EOL><DEDENT><DEDENT>
|
Initialize configuration.
|
f11698:c1:m2
|
def delete_cached_json_for(request):
|
if _FLASK_CURRENT_VERSION < _FLASK_VERSION_WITH_BUG:<EOL><INDENT>if hasattr(request, '<STR_LIT>'):<EOL><INDENT>delattr(request, '<STR_LIT>')<EOL><DEDENT><DEDENT>
|
Delete `_cached_json` attribute for the given request.
Bug workaround to delete `_cached_json` attribute when using Flask < 0.12.
More details: https://github.com/pallets/flask/issues/2087
Note that starting from Flask 1.0, the private `_cached_json` attribute
has been changed in Flask package, and this code will fail.
|
f11700:m0
|
def get_hmac(message):
|
key = current_app.config['<STR_LIT>']<EOL>hmac_value = hmac.new(<EOL>key.encode('<STR_LIT:utf-8>') if hasattr(key, '<STR_LIT>') else key,<EOL>message.encode('<STR_LIT:utf-8>') if hasattr(message, '<STR_LIT>') else message,<EOL>sha1<EOL>).hexdigest()<EOL>return hmac_value<EOL>
|
Calculate HMAC value of message using ``WEBHOOKS_SECRET_KEY``.
:param message: String to calculate HMAC for.
|
f11701:m0
|
def check_x_hub_signature(signature, message):
|
hmac_value = get_hmac(message)<EOL>if hmac_value == signature or(signature.find('<STR_LIT:=>') > -<NUM_LIT:1> and<EOL>hmac_value == signature[signature.find('<STR_LIT:=>') + <NUM_LIT:1>:]):<EOL><INDENT>return True<EOL><DEDENT>return False<EOL>
|
Check X-Hub-Signature used by GitHub to sign requests.
:param signature: HMAC signature extracted from request.
:param message: Request message.
|
f11701:m1
|
def upgrade():
|
pass<EOL>
|
Upgrade database.
|
f11702:m0
|
def downgrade():
|
pass<EOL>
|
Downgrade database.
|
f11702:m1
|
def upgrade():
|
def json_column(name, **kwargs):<EOL><INDENT>"""<STR_LIT>"""<EOL>return sa.Column(<EOL>name,<EOL>sqlalchemy_utils.types.JSONType().with_variant(<EOL>postgresql.JSON(none_as_null=True), '<STR_LIT>',<EOL>),<EOL>**kwargs<EOL>)<EOL><DEDENT>op.create_table(<EOL>'<STR_LIT>',<EOL>sa.Column('<STR_LIT>', sa.DateTime(), nullable=False),<EOL>sa.Column('<STR_LIT>', sa.DateTime(), nullable=False),<EOL>sa.Column(<EOL>'<STR_LIT:id>', sqlalchemy_utils.types.uuid.UUIDType(), nullable=False),<EOL>sa.Column('<STR_LIT>', sa.String(length=<NUM_LIT:255>), nullable=False),<EOL>sa.Column('<STR_LIT>', sa.Integer(), nullable=True),<EOL>json_column('<STR_LIT>', nullable=True),<EOL>json_column('<STR_LIT>', nullable=True),<EOL>json_column('<STR_LIT>', nullable=True),<EOL>json_column('<STR_LIT>', nullable=True),<EOL>sa.Column('<STR_LIT>', sa.Integer(), nullable=True),<EOL>sa.ForeignKeyConstraint(['<STR_LIT>'], [u'<STR_LIT>'], ),<EOL>sa.PrimaryKeyConstraint('<STR_LIT:id>')<EOL>)<EOL>op.create_index(<EOL>op.f('<STR_LIT>'),<EOL>'<STR_LIT>',<EOL>['<STR_LIT>'],<EOL>unique=False<EOL>)<EOL>
|
Upgrade database.
|
f11703:m0
|
def downgrade():
|
op.drop_index(<EOL>op.f('<STR_LIT>'),<EOL>table_name='<STR_LIT>'<EOL>)<EOL>op.drop_table('<STR_LIT>')<EOL>
|
Downgrade database.
|
f11703:m1
|
@shared_task(bind=True, ignore_results=True)<EOL>def process_event(self, event_id):
|
with db.session.begin_nested():<EOL><INDENT>event = Event.query.get(event_id)<EOL>event._celery_task = self <EOL>event.receiver.run(event) <EOL>flag_modified(event, '<STR_LIT>')<EOL>flag_modified(event, '<STR_LIT>')<EOL>db.session.add(event)<EOL><DEDENT>db.session.commit()<EOL>
|
Process event in Celery.
|
f11705:m0
|
def _json_column(**kwargs):
|
return db.Column(<EOL>JSONType().with_variant(<EOL>postgresql.JSON(none_as_null=True),<EOL>'<STR_LIT>',<EOL>),<EOL>nullable=True,<EOL>**kwargs<EOL>)<EOL>
|
Return JSON column.
|
f11705:m1
|
def __init__(self, receiver_id):
|
self.receiver_id = receiver_id<EOL>
|
Initialize a receiver identifier.
|
f11705:c0:m0
|
def __call__(self, event):
|
return self.run(event)<EOL>
|
Proxy to ``self.run`` method.
|
f11705:c0:m1
|
def run(self, event):
|
raise NotImplementedError()<EOL>
|
Implement method accepting the ``Event`` instance.
|
f11705:c0:m2
|
def status(self, event):
|
pass<EOL>
|
Return a tuple with current processing status code and message.
Return ``None`` if the backend does not support states.
|
f11705:c0:m3
|
def delete(self, event):
|
assert self.receiver_id == event.receiver_id<EOL>event.response = {'<STR_LIT:status>': <NUM_LIT>, '<STR_LIT:message>': '<STR_LIT>'}<EOL>event.response_code = <NUM_LIT><EOL>
|
Mark event as deleted.
|
f11705:c0:m4
|
def get_hook_url(self, access_token):
|
<EOL>if (current_app.debug or current_app.testing) andcurrent_app.config.get('<STR_LIT>', None):<EOL><INDENT>url_pattern = current_app.config[<EOL>'<STR_LIT>'].get(self.receiver_id, None)<EOL>if url_pattern:<EOL><INDENT>return url_pattern % dict(token=access_token)<EOL><DEDENT><DEDENT>return url_for(<EOL>'<STR_LIT>',<EOL>receiver_id=self.receiver_id,<EOL>access_token=access_token,<EOL>_external=True<EOL>)<EOL>
|
Get URL for webhook.
In debug and testing mode the hook URL can be overwritten using
``WEBHOOKS_DEBUG_RECEIVER_URLS`` configuration variable to allow
testing webhooks via services such as e.g. Ultrahook.
.. code-block:: python
WEBHOOKS_DEBUG_RECEIVER_URLS = dict(
github='http://github.userid.ultrahook.com',
)
|
f11705:c0:m5
|
def check_signature(self):
|
if not self.signature:<EOL><INDENT>return True<EOL><DEDENT>signature_value = request.headers.get(self.signature, None)<EOL>if signature_value:<EOL><INDENT>validator = '<STR_LIT>' + re.sub(r'<STR_LIT>', '<STR_LIT:_>', self.signature).lower()<EOL>check_signature = getattr(signatures, validator)<EOL>if check_signature(signature_value, request.data):<EOL><INDENT>return True<EOL><DEDENT><DEDENT>return False<EOL>
|
Check signature of signed request.
|
f11705:c0:m6
|
def extract_payload(self):
|
if not self.check_signature():<EOL><INDENT>raise InvalidSignature('<STR_LIT>')<EOL><DEDENT>if request.is_json:<EOL><INDENT>delete_cached_json_for(request)<EOL>return request.get_json(silent=False, cache=False)<EOL><DEDENT>elif request.content_type == '<STR_LIT>':<EOL><INDENT>return dict(request.form)<EOL><DEDENT>raise InvalidPayload(request.content_type)<EOL>
|
Extract payload from request.
|
f11705:c0:m7
|
def __call__(self, event):
|
process_event.apply_async(task_id=str(event.id), args=[str(event.id)])<EOL>
|
Fire a celery task.
|
f11705:c1:m0
|
def status(self, event):
|
result = AsyncResult(str(event.id))<EOL>return (<EOL>self.CELERY_STATES_TO_HTTP.get(result.state),<EOL>result.info.get('<STR_LIT:message>')<EOL>if result.state in self.CELERY_RESULT_INFO_FOR and result.info<EOL>else event.response.get('<STR_LIT:message>')<EOL>)<EOL>
|
Return a tuple with current processing status code and message.
|
f11705:c1:m1
|
def delete(self, event):
|
super(CeleryReceiver, self).delete(event)<EOL>AsyncResult(event.id).revoke(terminate=True)<EOL>
|
Abort running task if it exists.
|
f11705:c1:m2
|
@validates('<STR_LIT>')<EOL><INDENT>def validate_receiver(self, key, value):<DEDENT>
|
if value not in current_webhooks.receivers:<EOL><INDENT>raise ReceiverDoesNotExist(self.receiver_id)<EOL><DEDENT>return value<EOL>
|
Validate receiver identifier.
|
f11705:c2:m0
|
@classmethod<EOL><INDENT>def create(cls, receiver_id, user_id=None):<DEDENT>
|
event = cls(id=uuid.uuid4(), receiver_id=receiver_id, user_id=user_id)<EOL>event.payload = event.receiver.extract_payload()<EOL>return event<EOL>
|
Create an event instance.
|
f11705:c2:m1
|
@property<EOL><INDENT>def receiver(self):<DEDENT>
|
try:<EOL><INDENT>return current_webhooks.receivers[self.receiver_id]<EOL><DEDENT>except KeyError:<EOL><INDENT>raise ReceiverDoesNotExist(self.receiver_id)<EOL><DEDENT>
|
Return registered receiver.
|
f11705:c2:m2
|
@receiver.setter<EOL><INDENT>def receiver(self, value):<DEDENT>
|
assert isinstance(value, Receiver)<EOL>self.receiver_id = value.receiver_id<EOL>
|
Set receiver instance.
|
f11705:c2:m3
|
def process(self):
|
try:<EOL><INDENT>self.receiver(self)<EOL><DEDENT>except Exception as e:<EOL><INDENT>current_app.logger.exception('<STR_LIT>')<EOL>self.response_code = <NUM_LIT><EOL>self.response = dict(status=<NUM_LIT>, message=str(e))<EOL><DEDENT>return self<EOL>
|
Process current event.
|
f11705:c2:m4
|
@property<EOL><INDENT>def status(self):<DEDENT>
|
status = self.receiver.status(self)<EOL>return status if status else (<EOL>self.response_code, self.response.get('<STR_LIT:message>')<EOL>)<EOL>
|
Return a tuple with current processing status code and message.
|
f11705:c2:m5
|
def delete(self):
|
self.receiver.delete(self)<EOL>
|
Make receiver delete this event.
|
f11705:c2:m6
|
@abc.abstractmethod<EOL><INDENT>def create(self, parent_importable):<DEDENT>
|
Create the importable object.
|
f11712:c0:m1
|
|
def main():
|
application = HaasApplication(sys.argv)<EOL>return application.run()<EOL>
|
Execute haas.
Parameters
----------
argv : list
The script's full argument list including the script itself.
|
f11727:m0
|
def print_errors(self):
|
self.stream.writeln()<EOL>self.print_error_list('<STR_LIT>', self.errors)<EOL>self.print_error_list('<STR_LIT>', self.failures)<EOL>
|
Print all errors and failures to the console.
|
f11728:c1:m8
|
def print_error_list(self, error_kind, errors):
|
for result in errors:<EOL><INDENT>self.stream.writeln(self.separator1)<EOL>self.stream.writeln(<EOL>'<STR_LIT>' % (error_kind, self.get_test_description(<EOL>result.test)))<EOL>self.stream.writeln(self.separator2)<EOL>self.stream.writeln(result.exception)<EOL><DEDENT>
|
Print the list of errors or failures.
Parameters
----------
error_kind : str
``'ERROR'`` or ``'FAIL'``
errors : list
List of :class:`~haas.result.TestResult`
|
f11728:c1:m9
|
@abstractclassmethod<EOL><INDENT>def from_args(cls, args, arg_prefix, loader):<DEDENT>
|
Construct the discoverer from parsed command line arguments.
Parameters
----------
args : argparse.Namespace
The ``argparse.Namespace`` containing parsed arguments.
arg_prefix : str
The prefix used for arguments beloning solely to this plugin.
loader : haas.loader.Loader
The test loader used to construct TestCase and TestSuite instances.
|
f11729:c0:m0
|
|
@abstractclassmethod<EOL><INDENT>def add_parser_arguments(cls, parser, option_prefix, dest_prefix):<DEDENT>
|
Add options for the plugin to the main argument parser.
Parameters
----------
parser : argparse.ArgumentParser
The parser to extend
option_prefix : str
The prefix that option strings added by this plugin should use.
dest_prefix : str
The prefix that ``dest`` strings for options added by this
plugin should use.
|
f11729:c0:m1
|
|
@abstractmethod<EOL><INDENT>def discover(self, start, top_level_directory=None, pattern=None):<DEDENT>
|
Do test case discovery.
This is the top-level entry-point for test discovery.
If the ``start`` argument is a drectory, then ``haas`` will
discover all tests in the package contained in that directory.
If the ``start`` argument is not a directory, it is assumed to
be a package or module name and tests in the package or module
are loaded.
FIXME: This needs a better description.
Parameters
----------
start : str
The directory, package, module, class or test to load.
top_level_directory : str
The path to the top-level directoy of the project. This is
the parent directory of the project'stop-level Python
package.
pattern : str
The glob pattern to match the filenames of modules to search
for tests.
|
f11729:c0:m2
|
|
@classmethod<EOL><INDENT>def from_args(cls, args, arg_prefix):<DEDENT>
|
initializer_spec = args.process_init<EOL>if initializer_spec is None:<EOL><INDENT>initializer = None<EOL><DEDENT>else:<EOL><INDENT>module_name, initializer_name = initializer_spec.rsplit('<STR_LIT:.>', <NUM_LIT:1>)<EOL>init_module = get_module_by_name(module_name)<EOL>initializer = getattr(init_module, initializer_name)<EOL><DEDENT>return cls(process_count=args.processes, initializer=initializer,<EOL>maxtasksperchild=args.process_max_tasks)<EOL>
|
Create a :class:`~.ParallelTestRunner` from command-line arguments.
|
f11733:c1:m1
|
@abstractclassmethod<EOL><INDENT>def from_args(cls, args, arg_prefix):<DEDENT>
|
Construct the runner from parsed command line arguments.
Parameters
----------
args : argparse.Namespace
The ``argparse.Namespace`` containing parsed arguments.
arg_prefix : str
The prefix used for arguments beloning solely to this plugin.
|
f11736:c0:m0
|
|
@abstractclassmethod<EOL><INDENT>def add_parser_arguments(self, parser, option_prefix, dest_prefix):<DEDENT>
|
Add options for the plugin to the main argument parser.
Parameters
----------
parser : argparse.ArgumentParser
The parser to extend
option_prefix : str
The prefix that option strings added by this plugin should use.
dest_prefix : str
The prefix that ``dest`` strings for options added by this
plugin should use.
|
f11736:c0:m1
|
|
@abstractclassmethod<EOL><INDENT>def add_parser_arguments(cls, parser, name, option_prefix, dest_prefix):<DEDENT>
|
Add options for the plugin to the main argument parser.
Parameters
----------
parser : argparse.ArgumentParser
The parser to extend
name : str
The name of the plugin.
option_prefix : str
The prefix that option strings added by this plugin should use.
dest_prefix : str
The prefix that ``dest`` strings for options added by this
plugin should use.
|
f11737:c0:m1
|
|
@abstractmethod<EOL><INDENT>def __call__(self, result):<DEDENT>
|
Handle the completed test result ``result``.
|
f11737:c0:m6
|
|
def find_top_level_directory(start_directory):
|
top_level = start_directory<EOL>while os.path.isfile(os.path.join(top_level, '<STR_LIT>')):<EOL><INDENT>top_level = os.path.dirname(top_level)<EOL>if top_level == os.path.dirname(top_level):<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT><DEDENT>return os.path.abspath(top_level)<EOL>
|
Finds the top-level directory of a project given a start directory
inside the project.
Parameters
----------
start_directory : str
The directory in which test discovery will start.
|
f11738:m7
|
@classmethod<EOL><INDENT>def from_args(cls, args, arg_prefix, loader):<DEDENT>
|
return cls(loader)<EOL>
|
Construct the discoverer from parsed command line arguments.
Parameters
----------
args : argparse.Namespace
The ``argparse.Namespace`` containing parsed arguments.
arg_prefix : str
The prefix used for arguments beloning solely to this plugin.
loader : haas.loader.Loader
The test loader used to construct TestCase and TestSuite instances.
|
f11738:c0:m1
|
@classmethod<EOL><INDENT>def add_parser_arguments(cls, parser, option_prefix, dest_prefix):<DEDENT>
|
Add options for the plugin to the main argument parser.
Parameters
----------
parser : argparse.ArgumentParser
The parser to extend
option_prefix : str
The prefix that option strings added by this plugin should use.
dest_prefix : str
The prefix that ``dest`` strings for options added by this
plugin should use.
|
f11738:c0:m2
|
|
def discover_single_case(self, module, case_attributes):
|
<EOL>case = module<EOL>loader = self._loader<EOL>for index, component in enumerate(case_attributes):<EOL><INDENT>case = getattr(case, component, None)<EOL>if case is None:<EOL><INDENT>return loader.create_suite()<EOL><DEDENT>elif loader.is_test_case(case):<EOL><INDENT>rest = case_attributes[index + <NUM_LIT:1>:]<EOL>if len(rest) > <NUM_LIT:1>:<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>elif len(rest) == <NUM_LIT:1>:<EOL><INDENT>return loader.create_suite(<EOL>[loader.load_test(case, *rest)])<EOL><DEDENT>return loader.load_case(case)<EOL><DEDENT><DEDENT>return loader.create_suite()<EOL>
|
Find and load a single TestCase or TestCase method from a module.
Parameters
----------
module : module
The imported Python module containing the TestCase to be
loaded.
case_attributes : list
A list (length 1 or 2) of str. The first component must be
the name of a TestCase subclass. The second component must
be the name of a method in the TestCase.
|
f11738:c0:m5
|
def discover_by_file(self, start_filepath, top_level_directory=None):
|
start_filepath = os.path.abspath(start_filepath)<EOL>start_directory = os.path.dirname(start_filepath)<EOL>if top_level_directory is None:<EOL><INDENT>top_level_directory = find_top_level_directory(<EOL>start_directory)<EOL><DEDENT>logger.debug('<STR_LIT>'<EOL>'<STR_LIT>', start_filepath,<EOL>top_level_directory)<EOL>assert_start_importable(top_level_directory, start_directory)<EOL>if top_level_directory not in sys.path:<EOL><INDENT>sys.path.insert(<NUM_LIT:0>, top_level_directory)<EOL><DEDENT>tests = self._load_from_file(<EOL>start_filepath, top_level_directory)<EOL>return self._loader.create_suite(list(tests))<EOL>
|
Run test discovery on a single file.
Parameters
----------
start_filepath : str
The module file in which to start test discovery.
top_level_directory : str
The path to the top-level directoy of the project. This is
the parent directory of the project'stop-level Python
package.
|
f11738:c0:m7
|
def create_argument_parser():
|
parser = argparse.ArgumentParser(prog='<STR_LIT>')<EOL>parser.add_argument('<STR_LIT>', action='<STR_LIT:version>',<EOL>version='<STR_LIT>'.format(haas.__version__))<EOL>verbosity = parser.add_mutually_exclusive_group()<EOL>verbosity.add_argument('<STR_LIT>', '<STR_LIT>', action='<STR_LIT>', default=<NUM_LIT:1>,<EOL>dest='<STR_LIT>', const=<NUM_LIT:2>, help='<STR_LIT>')<EOL>verbosity.add_argument('<STR_LIT>', '<STR_LIT>', action='<STR_LIT>', const=<NUM_LIT:0>,<EOL>dest='<STR_LIT>', help='<STR_LIT>')<EOL>parser.add_argument('<STR_LIT>', '<STR_LIT>', action='<STR_LIT:store_true>', default=False,<EOL>help='<STR_LIT>')<EOL>parser.add_argument('<STR_LIT:-c>', '<STR_LIT>', dest='<STR_LIT>',<EOL>action='<STR_LIT:store_true>', default=False,<EOL>help=('<STR_LIT>'<EOL>'<STR_LIT>'))<EOL>parser.add_argument('<STR_LIT>', '<STR_LIT>', action='<STR_LIT:store_true>', default=False,<EOL>help='<STR_LIT>')<EOL>parser.add_argument(<EOL>'<STR_LIT:start>', nargs='<STR_LIT:*>', default=[os.getcwd()],<EOL>help=('<STR_LIT>'<EOL>'<STR_LIT>'))<EOL>parser.add_argument('<STR_LIT>', '<STR_LIT>', default='<STR_LIT>',<EOL>help="<STR_LIT>")<EOL>parser.add_argument('<STR_LIT>', '<STR_LIT>', default=None,<EOL>help=('<STR_LIT>'<EOL>'<STR_LIT>'))<EOL>_add_log_level_option(parser)<EOL>return parser<EOL>
|
Creates the argument parser for haas.
|
f11745:m0
|
def run(self, plugin_manager=None):
|
if plugin_manager is None:<EOL><INDENT>plugin_manager = PluginManager()<EOL><DEDENT>plugin_manager.add_plugin_arguments(self.parser)<EOL>args = self.parser.parse_args(self.argv[<NUM_LIT:1>:])<EOL>environment_plugins = plugin_manager.get_enabled_hook_plugins(<EOL>plugin_manager.ENVIRONMENT_HOOK, args)<EOL>runner = plugin_manager.get_driver(<EOL>plugin_manager.TEST_RUNNER, args)<EOL>with PluginContext(environment_plugins):<EOL><INDENT>loader = Loader()<EOL>discoverer = plugin_manager.get_driver(<EOL>plugin_manager.TEST_DISCOVERY, args, loader=loader)<EOL>suites = [<EOL>discoverer.discover(<EOL>start=start,<EOL>top_level_directory=args.top_level_directory,<EOL>pattern=args.pattern,<EOL>)<EOL>for start in args.start<EOL>]<EOL>if len(suites) == <NUM_LIT:1>:<EOL><INDENT>suite = suites[<NUM_LIT:0>]<EOL><DEDENT>else:<EOL><INDENT>suite = loader.create_suite(suites)<EOL><DEDENT>test_count = suite.countTestCases()<EOL>result_handlers = plugin_manager.get_enabled_hook_plugins(<EOL>plugin_manager.RESULT_HANDLERS, args, test_count=test_count)<EOL>result_collector = ResultCollector(<EOL>buffer=args.buffer, failfast=args.failfast)<EOL>for result_handler in result_handlers:<EOL><INDENT>result_collector.add_result_handler(result_handler)<EOL><DEDENT>result = runner.run(result_collector, suite)<EOL>return not result.wasSuccessful()<EOL><DEDENT>
|
Run the haas test runner.
This will load and configure the selected plugins, set up the
environment and begin test discovery, loading and running.
Parameters
----------
plugin_manager : haas.plugin_manager.PluginManager
[Optional] Override the use of the default plugin manager.
|
f11745:c0:m1
|
def load_module(self, module):
|
cases = self.get_test_cases_from_module(module)<EOL>suites = [self.load_case(case) for case in cases]<EOL>return self.create_suite(suites)<EOL>
|
Create and return a test suite containing all cases loaded from the
provided module.
Parameters
----------
module : module
A module object containing ``TestCases``
|
f11746:c0:m7
|
def __call__(self, *args, **kwds):
|
return self.run(*args, **kwds)<EOL>
|
Run all tests in the suite.
Parameters
----------
result : unittest.result.TestResult
|
f11748:c1:m4
|
def run(self, result, _state=None):
|
if _state is None:<EOL><INDENT>state = _TestSuiteState(result)<EOL><DEDENT>else:<EOL><INDENT>state = _state<EOL><DEDENT>kwargs = {}<EOL>for test in self:<EOL><INDENT>if result.shouldStop:<EOL><INDENT>break<EOL><DEDENT>if state.setup(test):<EOL><INDENT>if isinstance(test, TestSuite):<EOL><INDENT>kwargs = {'<STR_LIT>': state}<EOL><DEDENT>logger.debug('<STR_LIT>', test)<EOL>test(result, **kwargs)<EOL><DEDENT><DEDENT>if _state is None:<EOL><INDENT>state.teardown()<EOL><DEDENT>return result<EOL>
|
Run all tests in the suite.
Parameters
----------
result : unittest.result.TestResult
|
f11748:c1:m5
|
def countTestCases(self):
|
return sum(test.countTestCases() for test in self)<EOL>
|
Return the total number of tests contained in this suite.
|
f11748:c1:m6
|
def _format_exception(err, is_failure, stdout=None, stderr=None):
|
exctype, value, tb = err<EOL>while tb and _is_relevant_tb_level(tb):<EOL><INDENT>tb = tb.tb_next<EOL><DEDENT>if is_failure:<EOL><INDENT>length = _count_relevant_tb_levels(tb)<EOL>msgLines = traceback.format_exception(exctype, value, tb, length)<EOL><DEDENT>else:<EOL><INDENT>msgLines = traceback.format_exception(exctype, value, tb)<EOL><DEDENT>encoding = locale.getpreferredencoding()<EOL>msgLines = [_decode(line, encoding) for line in msgLines]<EOL>if stdout:<EOL><INDENT>if not stdout.endswith('<STR_LIT:\n>'):<EOL><INDENT>stdout += '<STR_LIT:\n>'<EOL><DEDENT>msgLines.append(STDOUT_LINE % stdout)<EOL><DEDENT>if stderr:<EOL><INDENT>if not stderr.endswith('<STR_LIT:\n>'):<EOL><INDENT>stderr += '<STR_LIT:\n>'<EOL><DEDENT>msgLines.append(STDERR_LINE % stderr)<EOL><DEDENT>return '<STR_LIT>'.join(msgLines)<EOL>
|
Converts a sys.exc_info()-style tuple of values into a string.
|
f11749:m3
|
def to_dict(self):
|
return {<EOL>'<STR_LIT>': self.test_class,<EOL>'<STR_LIT>': self.test_method_name,<EOL>'<STR_LIT:status>': self.status,<EOL>'<STR_LIT>': self.exception,<EOL>'<STR_LIT:message>': self.message,<EOL>'<STR_LIT>': self.completed_time,<EOL>}<EOL>
|
Serialize the ``TestResult`` to a dictionary.
|
f11749:c2:m6
|
@classmethod<EOL><INDENT>def from_dict(cls, data):<DEDENT>
|
return cls(**data)<EOL>
|
Create a ``TestResult`` from a dictionary created by
:meth:`~.TestResult.to_dict`
|
f11749:c2:m7
|
def _setup_stdout(self):
|
if self.buffer:<EOL><INDENT>if self._stderr_buffer is None:<EOL><INDENT>self._stderr_buffer = StringIO()<EOL>self._stdout_buffer = StringIO()<EOL><DEDENT>sys.stdout = self._stdout_buffer<EOL>sys.stderr = self._stderr_buffer<EOL><DEDENT>
|
Hook stdout and stderr if buffering is enabled.
|
f11749:c3:m3
|
def _restore_stdout(self):
|
if self.buffer:<EOL><INDENT>if self._mirror_output:<EOL><INDENT>output = sys.stdout.getvalue()<EOL>error = sys.stderr.getvalue()<EOL>if output:<EOL><INDENT>if not output.endswith('<STR_LIT:\n>'):<EOL><INDENT>output += '<STR_LIT:\n>'<EOL><DEDENT>self._original_stdout.write(STDOUT_LINE % output)<EOL><DEDENT>if error:<EOL><INDENT>if not error.endswith('<STR_LIT:\n>'):<EOL><INDENT>error += '<STR_LIT:\n>'<EOL><DEDENT>self._original_stderr.write(STDERR_LINE % error)<EOL><DEDENT><DEDENT>sys.stdout = self._original_stdout<EOL>sys.stderr = self._original_stderr<EOL>self._stdout_buffer.seek(<NUM_LIT:0>)<EOL>self._stdout_buffer.truncate()<EOL>self._stderr_buffer.seek(<NUM_LIT:0>)<EOL>self._stderr_buffer.truncate()<EOL><DEDENT>
|
Unhook stdout and stderr if buffering is enabled.
|
f11749:c3:m4
|
def add_result_handler(self, handler):
|
self._result_handlers.append(handler)<EOL>if self._sorted_handlers:<EOL><INDENT>self._sorted_handlers = None<EOL><DEDENT>
|
Register a new result handler.
|
f11749:c3:m6
|
def startTestRun(self):
|
for handler in self._handlers:<EOL><INDENT>handler.start_test_run()<EOL><DEDENT>
|
Indicate that the test run is starting.
|
f11749:c3:m9
|
def stopTestRun(self):
|
for handler in self._handlers:<EOL><INDENT>handler.stop_test_run()<EOL><DEDENT>
|
Indicate that the test run has completed.
|
f11749:c3:m10
|
def add_result(self, result):
|
for handler in self._handlers:<EOL><INDENT>handler(result)<EOL><DEDENT>if self._successful and result.status not in _successful_results:<EOL><INDENT>self._successful = False<EOL><DEDENT>
|
Add an already-constructed :class:`~.TestResult` to this
:class:`~.ResultCollector`.
This may be used when collecting results created by other
ResultCollectors (e.g. in subprocesses).
|
f11749:c3:m11
|
def wasSuccessful(self):
|
return self._successful<EOL>
|
Return ``True`` if the run was successful.
|
f11749:c3:m19
|
def stop(self):
|
self.shouldStop = True<EOL>
|
Set the ``shouldStop`` flag, used by the test cases to determine if
they should terminate early.
|
f11749:c3:m20
|
def add_plugin_arguments(self, parser):
|
for manager in self.hook_managers.values():<EOL><INDENT>if len(list(manager)) == <NUM_LIT:0>:<EOL><INDENT>continue<EOL><DEDENT>manager.map(self._add_hook_extension_arguments, parser)<EOL><DEDENT>for namespace, manager in self.driver_managers.items():<EOL><INDENT>choices = list(sorted(manager.names()))<EOL>if len(choices) == <NUM_LIT:0>:<EOL><INDENT>continue<EOL><DEDENT>option, dest = self._namespace_to_option(namespace)<EOL>parser.add_argument(<EOL>option, help=self._help[namespace], dest=dest,<EOL>choices=choices, default='<STR_LIT:default>')<EOL>option_prefix = '<STR_LIT>'.format(option)<EOL>dest_prefix = '<STR_LIT>'.format(dest)<EOL>manager.map(self._add_driver_extension_arguments,<EOL>parser, option_prefix, dest_prefix)<EOL><DEDENT>
|
Add plugin arguments to argument parser.
Parameters
----------
parser : argparse.ArgumentParser
The main haas ArgumentParser.
|
f11752:c0:m7
|
def get_enabled_hook_plugins(self, hook, args, **kwargs):
|
manager = self.hook_managers[hook]<EOL>if len(list(manager)) == <NUM_LIT:0>:<EOL><INDENT>return []<EOL><DEDENT>return [<EOL>plugin for plugin in manager.map(<EOL>self._create_hook_plugin, args, **kwargs)<EOL>if plugin is not None<EOL>]<EOL>
|
Get enabled plugins for specified hook name.
|
f11752:c0:m8
|
def get_driver(self, namespace, parsed_args, **kwargs):
|
option, dest = self._namespace_to_option(namespace)<EOL>dest_prefix = '<STR_LIT>'.format(dest)<EOL>driver_name = getattr(parsed_args, dest, '<STR_LIT:default>')<EOL>driver_extension = self.driver_managers[namespace][driver_name]<EOL>return driver_extension.plugin.from_args(<EOL>parsed_args, dest_prefix, **kwargs)<EOL>
|
Get mutually-exlusive plugin for plugin namespace.
|
f11752:c0:m9
|
def fix_base(fix_environ):
|
def _is_android():<EOL><INDENT>import os<EOL>vm_path = os.sep+"<STR_LIT>"+os.sep+"<STR_LIT>"+os.sep+"<STR_LIT>"<EOL>if os.path.exists(vm_path) or os.path.exists(os.sep+"<STR_LIT>"+vm_path):<EOL><INDENT>return True<EOL><DEDENT>try:<EOL><INDENT>import android<EOL>del android <EOL>return True<EOL><DEDENT>except ImportError:<EOL><INDENT>pass<EOL><DEDENT>return False<EOL><DEDENT>def _fix_android_environ():<EOL><INDENT>import os<EOL>if "<STR_LIT>" not in os.environ:<EOL><INDENT>os.environ["<STR_LIT>"] = "<STR_LIT>"<EOL><DEDENT>lib_path = os.pathsep+"<STR_LIT>"+os.pathsep+"<STR_LIT>"<EOL>if sys.python_bits == <NUM_LIT:64>:<EOL><INDENT>lib_path = os.pathsep+"<STR_LIT>"+os.pathsep+"<STR_LIT>" + lib_path<EOL><DEDENT>os.environ["<STR_LIT>"] += lib_path<EOL><DEDENT>if sys.platform.startswith("<STR_LIT>") and sys.platform != "<STR_LIT>":<EOL><INDENT>if _is_android():<EOL><INDENT>sys.platform = "<STR_LIT>"<EOL><DEDENT>elif "<STR_LIT:->" not in sys.platform:<EOL><INDENT>sys.platform = "<STR_LIT>"<EOL><DEDENT><DEDENT>sys.platform_codename = sys.platform<EOL>if sys.platform_codename == "<STR_LIT:win32>":<EOL><INDENT>sys.platform_codename = "<STR_LIT>"<EOL><DEDENT>elif sys.platform_codename == "<STR_LIT>":<EOL><INDENT>sys.platform_codename = "<STR_LIT>"<EOL><DEDENT>if '<STR_LIT>' in sys.__dict__:<EOL><INDENT>if sys.maxsize > <NUM_LIT:2>**<NUM_LIT:32>:<EOL><INDENT>sys.python_bits = <NUM_LIT:64><EOL><DEDENT>else:<EOL><INDENT>sys.python_bits = <NUM_LIT:32><EOL><DEDENT><DEDENT>else:<EOL><INDENT>import struct<EOL>sys.python_bits = <NUM_LIT:8> * struct.calcsize("<STR_LIT:P>")<EOL>if sys.python_bits == <NUM_LIT:32>:<EOL><INDENT>sys.maxsize = <NUM_LIT><EOL><DEDENT>else:<EOL><INDENT>sys.maxsize = int("<STR_LIT>")<EOL><DEDENT><DEDENT>if fix_environ and sys.platform == "<STR_LIT>":<EOL><INDENT>_fix_android_environ()<EOL><DEDENT>
|
Activate the base compatibility.
|
f11756:m2
|
def fix_builtins(override_debug=False):
|
override_dict = {}<EOL>orig_print = None<EOL>used_print = None<EOL>if(__builtins__.__class__ is dict):<EOL><INDENT>builtins_dict = __builtins__<EOL><DEDENT>else:<EOL><INDENT>try:<EOL><INDENT>import builtins<EOL><DEDENT>except ImportError:<EOL><INDENT>import __builtin__ as builtins<EOL><DEDENT>builtins_dict = builtins.__dict__<EOL><DEDENT>def _deprecated(*args, **kwargs):<EOL><INDENT>"""<STR_LIT>"""<EOL>import traceback<EOL>raise DeprecationWarning("<STR_LIT>" +<EOL>traceback.extract_stack(None, <NUM_LIT:2>)[<NUM_LIT:0>][<NUM_LIT:3>])<EOL><DEDENT>def _print_wrapper(*args, **kwargs):<EOL><INDENT>flush = kwargs.get("<STR_LIT>", False)<EOL>if "<STR_LIT>" in kwargs:<EOL><INDENT>del kwargs["<STR_LIT>"]<EOL><DEDENT>orig_print(*args, **kwargs)<EOL>if flush:<EOL><INDENT>kwargs.get("<STR_LIT:file>", sys.stdout).flush()<EOL><DEDENT><DEDENT>def _print_full(*args, **kwargs):<EOL><INDENT>opt = {"<STR_LIT>": "<STR_LIT:U+0020>", "<STR_LIT:end>": "<STR_LIT:\n>", "<STR_LIT:file>": sys.stdout, "<STR_LIT>": False}<EOL>for key in kwargs:<EOL><INDENT>if(key in opt):<EOL><INDENT>opt[key] = kwargs[key]<EOL><DEDENT>else:<EOL><INDENT>raise TypeError("<STR_LIT:'>"+key+"<STR_LIT>"<EOL>"<STR_LIT>")<EOL><DEDENT><DEDENT>opt["<STR_LIT:file>"].write(opt["<STR_LIT>"].join(str(val) for val in args)+opt["<STR_LIT:end>"])<EOL>if opt["<STR_LIT>"]:<EOL><INDENT>opt["<STR_LIT:file>"].flush()<EOL><DEDENT><DEDENT>def _sorted(my_list):<EOL><INDENT>my_list = list(my_list)<EOL>my_list.sort()<EOL>return my_list<EOL><DEDENT>def _format(value, format_spec):<EOL><INDENT>return value.__format__(format_spec)<EOL><DEDENT>if builtins_dict.get(__name__, False):<EOL><INDENT>raise RuntimeError(__name__+"<STR_LIT>")<EOL><DEDENT>if builtins_dict.get("<STR_LIT>") is None:<EOL><INDENT>override_dict["<STR_LIT>"] = Exception<EOL><DEDENT>if builtins_dict.get("<STR_LIT>") is None:<EOL><INDENT>if builtins_dict.get("<STR_LIT>") is None:<EOL><INDENT>import types<EOL>override_dict["<STR_LIT>"] = types.StringType<EOL><DEDENT>else:<EOL><INDENT>override_dict["<STR_LIT>"] = (str, bytes) <EOL><DEDENT><DEDENT>if getattr(int, "<STR_LIT>", None) is None:<EOL><INDENT>import types<EOL>override_dict["<STR_LIT>"] = types.IntType<EOL><DEDENT>else:<EOL><INDENT>override_dict["<STR_LIT>"] = int <EOL><DEDENT>if '<STR_LIT>' not in str.__dict__:<EOL><INDENT>override_dict["<STR_LIT:str>"] = _Internal.ExtStr<EOL><DEDENT>if builtins_dict.get("<STR_LIT>") is not None:<EOL><INDENT>override_dict["<STR_LIT:input>"] = builtins_dict.get("<STR_LIT>")<EOL><DEDENT>override_dict["<STR_LIT>"] = _deprecated<EOL>if sys.version_info >= (<NUM_LIT:3>, <NUM_LIT:3>):<EOL><INDENT>used_print = builtins_dict.get("<STR_LIT>")<EOL><DEDENT>else:<EOL><INDENT>orig_print = builtins_dict.get("<STR_LIT>")<EOL>if orig_print is not None:<EOL><INDENT>used_print = _print_wrapper<EOL><DEDENT>else:<EOL><INDENT>used_print = _print_full<EOL><DEDENT>override_dict["<STR_LIT>"] = used_print<EOL><DEDENT>override_dict["<STR_LIT>"] = used_print<EOL>if builtins_dict.get("<STR_LIT>") is None:<EOL><INDENT>override_dict["<STR_LIT>"] = _sorted<EOL><DEDENT>if builtins_dict.get("<STR_LIT>") is None:<EOL><INDENT>override_dict["<STR_LIT>"] = _format<EOL><DEDENT>override_dict[__name__] = True<EOL>builtins_dict.update(override_dict)<EOL>del override_dict<EOL>
|
Activate the builtins compatibility.
|
f11756:m3
|
def fix_subprocess(override_debug=False, override_exception=False):
|
import subprocess<EOL>if subprocess.__dict__.get("<STR_LIT>") is None:<EOL><INDENT>subprocess.SubprocessError = _Internal.SubprocessError<EOL><DEDENT>if _InternalReferences.UsedCalledProcessError is None:<EOL><INDENT>if "<STR_LIT>" in subprocess.__dict__:<EOL><INDENT>_subprocess_called_process_error(True, subprocess)<EOL><DEDENT>else:<EOL><INDENT>_subprocess_called_process_error(False, subprocess)<EOL>subprocess.CalledProcessError = _InternalReferences.UsedCalledProcessError<EOL><DEDENT><DEDENT>def _check_output(*args, **kwargs):<EOL><INDENT>if "<STR_LIT>" in kwargs:<EOL><INDENT>raise ValueError("<STR_LIT>"<EOL>"<STR_LIT>")<EOL><DEDENT>process = subprocess.Popen(stdout=subprocess.PIPE, *args, **kwargs)<EOL>stdout_data, __ = process.communicate()<EOL>ret_code = process.poll()<EOL>if ret_code is None:<EOL><INDENT>raise RuntimeWarning("<STR_LIT>")<EOL><DEDENT>if ret_code:<EOL><INDENT>cmd = kwargs.get("<STR_LIT:args>")<EOL>if cmd is None:<EOL><INDENT>cmd = args[<NUM_LIT:0>]<EOL><DEDENT>raise _InternalReferences.UsedCalledProcessError(returncode=ret_code, cmd=cmd, output=stdout_data)<EOL><DEDENT>return stdout_data<EOL><DEDENT>try:<EOL><INDENT>subprocess.check_output<EOL><DEDENT>except AttributeError:<EOL><INDENT>subprocess.check_output = _check_output<EOL><DEDENT>
|
Activate the subprocess compatibility.
|
f11756:m4
|
def fix_all(override_debug=False, override_all=False):
|
fix_base(True)<EOL>fix_builtins(override_debug)<EOL>fix_subprocess(override_debug, override_all)<EOL>return True<EOL>
|
Activate the full compatibility.
|
f11756:m5
|
def validate(self, value, param_name, exc=None, logger=None):
|
if exc is not None:<EOL><INDENT>self.exc = exc<EOL><DEDENT>if logger is not None:<EOL><INDENT>self.logger = logger<EOL><DEDENT>if self.type is not None and not type(value) == self.type: <EOL><INDENT>self.error(<EOL>f'<STR_LIT>'<EOL>)<EOL><DEDENT>if self.instance is not None and not isinstance(value, self.instance):<EOL><INDENT>self.error(<EOL>f'<STR_LIT>'<EOL>f'<STR_LIT>'<EOL>)<EOL><DEDENT>if self.min is not None and value < self.min:<EOL><INDENT>self.error(f'<STR_LIT>')<EOL><DEDENT>if self.max is not None and value > self.max:<EOL><INDENT>self.error(f'<STR_LIT>')<EOL><DEDENT>if self.regex is not None and not re_full_match(self.regex, value):<EOL><INDENT>self.error(f'<STR_LIT>')<EOL><DEDENT>if self.in_list is not None and value not in self.in_list:<EOL><INDENT>self.error(f'<STR_LIT>')<EOL><DEDENT>if self.path_exists and not exists(value):<EOL><INDENT>self.error(f'<STR_LIT>')<EOL><DEDENT>return True<EOL>
|
:param value: value to validate
:param param_name: name of the value (for logging purpose)
:param exc: exception to raise (default is "ValidatorError")
:param logger: logger to use (default will be "Validator.logger")
|
f11775:c0:m1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.