partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
test
PasswordChangeSerializer.update
Check the old password is valid and set the new password.
user_management/api/serializers.py
def update(self, instance, validated_data): """Check the old password is valid and set the new password.""" if not instance.check_password(validated_data['old_password']): msg = _('Invalid password.') raise serializers.ValidationError({'old_password': msg}) instance.set_password(validated_data['new_password']) instance.save() return instance
def update(self, instance, validated_data): """Check the old password is valid and set the new password.""" if not instance.check_password(validated_data['old_password']): msg = _('Invalid password.') raise serializers.ValidationError({'old_password': msg}) instance.set_password(validated_data['new_password']) instance.save() return instance
[ "Check", "the", "old", "password", "is", "valid", "and", "set", "the", "new", "password", "." ]
incuna/django-user-management
python
https://github.com/incuna/django-user-management/blob/6784e33191d4eff624d2cf2df9ca01db4f23c9c6/user_management/api/serializers.py#L96-L104
[ "def", "update", "(", "self", ",", "instance", ",", "validated_data", ")", ":", "if", "not", "instance", ".", "check_password", "(", "validated_data", "[", "'old_password'", "]", ")", ":", "msg", "=", "_", "(", "'Invalid password.'", ")", "raise", "serializers", ".", "ValidationError", "(", "{", "'old_password'", ":", "msg", "}", ")", "instance", ".", "set_password", "(", "validated_data", "[", "'new_password'", "]", ")", "instance", ".", "save", "(", ")", "return", "instance" ]
6784e33191d4eff624d2cf2df9ca01db4f23c9c6
test
PasswordResetSerializer.update
Set the new password for the user.
user_management/api/serializers.py
def update(self, instance, validated_data): """Set the new password for the user.""" instance.set_password(validated_data['new_password']) instance.save() return instance
def update(self, instance, validated_data): """Set the new password for the user.""" instance.set_password(validated_data['new_password']) instance.save() return instance
[ "Set", "the", "new", "password", "for", "the", "user", "." ]
incuna/django-user-management
python
https://github.com/incuna/django-user-management/blob/6784e33191d4eff624d2cf2df9ca01db4f23c9c6/user_management/api/serializers.py#L133-L137
[ "def", "update", "(", "self", ",", "instance", ",", "validated_data", ")", ":", "instance", ".", "set_password", "(", "validated_data", "[", "'new_password'", "]", ")", "instance", ".", "save", "(", ")", "return", "instance" ]
6784e33191d4eff624d2cf2df9ca01db4f23c9c6
test
ResendConfirmationEmailSerializer.validate_email
Validate if email exists and requires a verification. `validate_email` will set a `user` attribute on the instance allowing the view to send an email confirmation.
user_management/api/serializers.py
def validate_email(self, email): """ Validate if email exists and requires a verification. `validate_email` will set a `user` attribute on the instance allowing the view to send an email confirmation. """ try: self.user = User.objects.get_by_natural_key(email) except User.DoesNotExist: msg = _('A user with this email address does not exist.') raise serializers.ValidationError(msg) if self.user.email_verified: msg = _('User email address is already verified.') raise serializers.ValidationError(msg) return email
def validate_email(self, email): """ Validate if email exists and requires a verification. `validate_email` will set a `user` attribute on the instance allowing the view to send an email confirmation. """ try: self.user = User.objects.get_by_natural_key(email) except User.DoesNotExist: msg = _('A user with this email address does not exist.') raise serializers.ValidationError(msg) if self.user.email_verified: msg = _('User email address is already verified.') raise serializers.ValidationError(msg) return email
[ "Validate", "if", "email", "exists", "and", "requires", "a", "verification", "." ]
incuna/django-user-management
python
https://github.com/incuna/django-user-management/blob/6784e33191d4eff624d2cf2df9ca01db4f23c9c6/user_management/api/serializers.py#L159-L175
[ "def", "validate_email", "(", "self", ",", "email", ")", ":", "try", ":", "self", ".", "user", "=", "User", ".", "objects", ".", "get_by_natural_key", "(", "email", ")", "except", "User", ".", "DoesNotExist", ":", "msg", "=", "_", "(", "'A user with this email address does not exist.'", ")", "raise", "serializers", ".", "ValidationError", "(", "msg", ")", "if", "self", ".", "user", ".", "email_verified", ":", "msg", "=", "_", "(", "'User email address is already verified.'", ")", "raise", "serializers", ".", "ValidationError", "(", "msg", ")", "return", "email" ]
6784e33191d4eff624d2cf2df9ca01db4f23c9c6
test
GetAuthToken.post
Create auth token. Differs from DRF that it always creates new token but not re-using them.
user_management/api/views.py
def post(self, request): """Create auth token. Differs from DRF that it always creates new token but not re-using them.""" serializer = self.serializer_class(data=request.data) if serializer.is_valid(): user = serializer.validated_data['user'] signals.user_logged_in.send(type(self), user=user, request=request) token = self.model.objects.create(user=user) token.update_expiry() return response.Response({'token': token.key}) return response.Response( serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def post(self, request): """Create auth token. Differs from DRF that it always creates new token but not re-using them.""" serializer = self.serializer_class(data=request.data) if serializer.is_valid(): user = serializer.validated_data['user'] signals.user_logged_in.send(type(self), user=user, request=request) token = self.model.objects.create(user=user) token.update_expiry() return response.Response({'token': token.key}) return response.Response( serializer.errors, status=status.HTTP_400_BAD_REQUEST)
[ "Create", "auth", "token", ".", "Differs", "from", "DRF", "that", "it", "always", "creates", "new", "token", "but", "not", "re", "-", "using", "them", "." ]
incuna/django-user-management
python
https://github.com/incuna/django-user-management/blob/6784e33191d4eff624d2cf2df9ca01db4f23c9c6/user_management/api/views.py#L35-L47
[ "def", "post", "(", "self", ",", "request", ")", ":", "serializer", "=", "self", ".", "serializer_class", "(", "data", "=", "request", ".", "data", ")", "if", "serializer", ".", "is_valid", "(", ")", ":", "user", "=", "serializer", ".", "validated_data", "[", "'user'", "]", "signals", ".", "user_logged_in", ".", "send", "(", "type", "(", "self", ")", ",", "user", "=", "user", ",", "request", "=", "request", ")", "token", "=", "self", ".", "model", ".", "objects", ".", "create", "(", "user", "=", "user", ")", "token", ".", "update_expiry", "(", ")", "return", "response", ".", "Response", "(", "{", "'token'", ":", "token", ".", "key", "}", ")", "return", "response", ".", "Response", "(", "serializer", ".", "errors", ",", "status", "=", "status", ".", "HTTP_400_BAD_REQUEST", ")" ]
6784e33191d4eff624d2cf2df9ca01db4f23c9c6
test
GetAuthToken.delete
Delete auth token when `delete` request was issued.
user_management/api/views.py
def delete(self, request, *args, **kwargs): """Delete auth token when `delete` request was issued.""" # Logic repeated from DRF because one cannot easily reuse it auth = get_authorization_header(request).split() if not auth or auth[0].lower() != b'token': return response.Response(status=status.HTTP_400_BAD_REQUEST) if len(auth) == 1: msg = 'Invalid token header. No credentials provided.' return response.Response(msg, status=status.HTTP_400_BAD_REQUEST) elif len(auth) > 2: msg = 'Invalid token header. Token string should not contain spaces.' return response.Response(msg, status=status.HTTP_400_BAD_REQUEST) try: token = self.model.objects.get(key=auth[1]) except self.model.DoesNotExist: pass else: token.delete() signals.user_logged_out.send( type(self), user=token.user, request=request, ) return response.Response(status=status.HTTP_204_NO_CONTENT)
def delete(self, request, *args, **kwargs): """Delete auth token when `delete` request was issued.""" # Logic repeated from DRF because one cannot easily reuse it auth = get_authorization_header(request).split() if not auth or auth[0].lower() != b'token': return response.Response(status=status.HTTP_400_BAD_REQUEST) if len(auth) == 1: msg = 'Invalid token header. No credentials provided.' return response.Response(msg, status=status.HTTP_400_BAD_REQUEST) elif len(auth) > 2: msg = 'Invalid token header. Token string should not contain spaces.' return response.Response(msg, status=status.HTTP_400_BAD_REQUEST) try: token = self.model.objects.get(key=auth[1]) except self.model.DoesNotExist: pass else: token.delete() signals.user_logged_out.send( type(self), user=token.user, request=request, ) return response.Response(status=status.HTTP_204_NO_CONTENT)
[ "Delete", "auth", "token", "when", "delete", "request", "was", "issued", "." ]
incuna/django-user-management
python
https://github.com/incuna/django-user-management/blob/6784e33191d4eff624d2cf2df9ca01db4f23c9c6/user_management/api/views.py#L49-L75
[ "def", "delete", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# Logic repeated from DRF because one cannot easily reuse it", "auth", "=", "get_authorization_header", "(", "request", ")", ".", "split", "(", ")", "if", "not", "auth", "or", "auth", "[", "0", "]", ".", "lower", "(", ")", "!=", "b'token'", ":", "return", "response", ".", "Response", "(", "status", "=", "status", ".", "HTTP_400_BAD_REQUEST", ")", "if", "len", "(", "auth", ")", "==", "1", ":", "msg", "=", "'Invalid token header. No credentials provided.'", "return", "response", ".", "Response", "(", "msg", ",", "status", "=", "status", ".", "HTTP_400_BAD_REQUEST", ")", "elif", "len", "(", "auth", ")", ">", "2", ":", "msg", "=", "'Invalid token header. Token string should not contain spaces.'", "return", "response", ".", "Response", "(", "msg", ",", "status", "=", "status", ".", "HTTP_400_BAD_REQUEST", ")", "try", ":", "token", "=", "self", ".", "model", ".", "objects", ".", "get", "(", "key", "=", "auth", "[", "1", "]", ")", "except", "self", ".", "model", ".", "DoesNotExist", ":", "pass", "else", ":", "token", ".", "delete", "(", ")", "signals", ".", "user_logged_out", ".", "send", "(", "type", "(", "self", ")", ",", "user", "=", "token", ".", "user", ",", "request", "=", "request", ",", ")", "return", "response", ".", "Response", "(", "status", "=", "status", ".", "HTTP_204_NO_CONTENT", ")" ]
6784e33191d4eff624d2cf2df9ca01db4f23c9c6
test
ResendConfirmationEmail.initial
Disallow users other than the user whose email is being reset.
user_management/api/views.py
def initial(self, request, *args, **kwargs): """Disallow users other than the user whose email is being reset.""" email = request.data.get('email') if request.user.is_authenticated() and email != request.user.email: raise PermissionDenied() return super(ResendConfirmationEmail, self).initial( request, *args, **kwargs )
def initial(self, request, *args, **kwargs): """Disallow users other than the user whose email is being reset.""" email = request.data.get('email') if request.user.is_authenticated() and email != request.user.email: raise PermissionDenied() return super(ResendConfirmationEmail, self).initial( request, *args, **kwargs )
[ "Disallow", "users", "other", "than", "the", "user", "whose", "email", "is", "being", "reset", "." ]
incuna/django-user-management
python
https://github.com/incuna/django-user-management/blob/6784e33191d4eff624d2cf2df9ca01db4f23c9c6/user_management/api/views.py#L277-L287
[ "def", "initial", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "email", "=", "request", ".", "data", ".", "get", "(", "'email'", ")", "if", "request", ".", "user", ".", "is_authenticated", "(", ")", "and", "email", "!=", "request", ".", "user", ".", "email", ":", "raise", "PermissionDenied", "(", ")", "return", "super", "(", "ResendConfirmationEmail", ",", "self", ")", ".", "initial", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
6784e33191d4eff624d2cf2df9ca01db4f23c9c6
test
ResendConfirmationEmail.post
Validate `email` and send a request to confirm it.
user_management/api/views.py
def post(self, request, *args, **kwargs): """Validate `email` and send a request to confirm it.""" serializer = self.serializer_class(data=request.data) if not serializer.is_valid(): return response.Response( serializer.errors, status=status.HTTP_400_BAD_REQUEST, ) serializer.user.send_validation_email() msg = _('Email confirmation sent.') return response.Response(msg, status=status.HTTP_204_NO_CONTENT)
def post(self, request, *args, **kwargs): """Validate `email` and send a request to confirm it.""" serializer = self.serializer_class(data=request.data) if not serializer.is_valid(): return response.Response( serializer.errors, status=status.HTTP_400_BAD_REQUEST, ) serializer.user.send_validation_email() msg = _('Email confirmation sent.') return response.Response(msg, status=status.HTTP_204_NO_CONTENT)
[ "Validate", "email", "and", "send", "a", "request", "to", "confirm", "it", "." ]
incuna/django-user-management
python
https://github.com/incuna/django-user-management/blob/6784e33191d4eff624d2cf2df9ca01db4f23c9c6/user_management/api/views.py#L289-L301
[ "def", "post", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "serializer", "=", "self", ".", "serializer_class", "(", "data", "=", "request", ".", "data", ")", "if", "not", "serializer", ".", "is_valid", "(", ")", ":", "return", "response", ".", "Response", "(", "serializer", ".", "errors", ",", "status", "=", "status", ".", "HTTP_400_BAD_REQUEST", ",", ")", "serializer", ".", "user", ".", "send_validation_email", "(", ")", "msg", "=", "_", "(", "'Email confirmation sent.'", ")", "return", "response", ".", "Response", "(", "msg", ",", "status", "=", "status", ".", "HTTP_204_NO_CONTENT", ")" ]
6784e33191d4eff624d2cf2df9ca01db4f23c9c6
test
UserCreationForm.clean_email
Since User.email is unique, this check is redundant, but it sets a nicer error message than the ORM. See #13147.
user_management/models/admin_forms.py
def clean_email(self): """ Since User.email is unique, this check is redundant, but it sets a nicer error message than the ORM. See #13147. """ email = self.cleaned_data['email'] try: User._default_manager.get(email__iexact=email) except User.DoesNotExist: return email.lower() raise forms.ValidationError(self.error_messages['duplicate_email'])
def clean_email(self): """ Since User.email is unique, this check is redundant, but it sets a nicer error message than the ORM. See #13147. """ email = self.cleaned_data['email'] try: User._default_manager.get(email__iexact=email) except User.DoesNotExist: return email.lower() raise forms.ValidationError(self.error_messages['duplicate_email'])
[ "Since", "User", ".", "email", "is", "unique", "this", "check", "is", "redundant", "but", "it", "sets", "a", "nicer", "error", "message", "than", "the", "ORM", ".", "See", "#13147", "." ]
incuna/django-user-management
python
https://github.com/incuna/django-user-management/blob/6784e33191d4eff624d2cf2df9ca01db4f23c9c6/user_management/models/admin_forms.py#L33-L43
[ "def", "clean_email", "(", "self", ")", ":", "email", "=", "self", ".", "cleaned_data", "[", "'email'", "]", "try", ":", "User", ".", "_default_manager", ".", "get", "(", "email__iexact", "=", "email", ")", "except", "User", ".", "DoesNotExist", ":", "return", "email", ".", "lower", "(", ")", "raise", "forms", ".", "ValidationError", "(", "self", ".", "error_messages", "[", "'duplicate_email'", "]", ")" ]
6784e33191d4eff624d2cf2df9ca01db4f23c9c6
test
AuthToken.update_expiry
Update token's expiration datetime on every auth action.
user_management/api/models.py
def update_expiry(self, commit=True): """Update token's expiration datetime on every auth action.""" self.expires = update_expiry(self.created) if commit: self.save()
def update_expiry(self, commit=True): """Update token's expiration datetime on every auth action.""" self.expires = update_expiry(self.created) if commit: self.save()
[ "Update", "token", "s", "expiration", "datetime", "on", "every", "auth", "action", "." ]
incuna/django-user-management
python
https://github.com/incuna/django-user-management/blob/6784e33191d4eff624d2cf2df9ca01db4f23c9c6/user_management/api/models.py#L68-L72
[ "def", "update_expiry", "(", "self", ",", "commit", "=", "True", ")", ":", "self", ".", "expires", "=", "update_expiry", "(", "self", ".", "created", ")", "if", "commit", ":", "self", ".", "save", "(", ")" ]
6784e33191d4eff624d2cf2df9ca01db4f23c9c6
test
password_reset_email_context
Email context to reset a user password.
user_management/utils/notifications.py
def password_reset_email_context(notification): """Email context to reset a user password.""" return { 'protocol': 'https', 'uid': notification.user.generate_uid(), 'token': notification.user.generate_token(), 'site': notification.site, }
def password_reset_email_context(notification): """Email context to reset a user password.""" return { 'protocol': 'https', 'uid': notification.user.generate_uid(), 'token': notification.user.generate_token(), 'site': notification.site, }
[ "Email", "context", "to", "reset", "a", "user", "password", "." ]
incuna/django-user-management
python
https://github.com/incuna/django-user-management/blob/6784e33191d4eff624d2cf2df9ca01db4f23c9c6/user_management/utils/notifications.py#L7-L14
[ "def", "password_reset_email_context", "(", "notification", ")", ":", "return", "{", "'protocol'", ":", "'https'", ",", "'uid'", ":", "notification", ".", "user", ".", "generate_uid", "(", ")", ",", "'token'", ":", "notification", ".", "user", ".", "generate_token", "(", ")", ",", "'site'", ":", "notification", ".", "site", ",", "}" ]
6784e33191d4eff624d2cf2df9ca01db4f23c9c6
test
email_handler
Send a notification by email.
user_management/utils/notifications.py
def email_handler(notification, email_context): """Send a notification by email.""" incuna_mail.send( to=notification.user.email, subject=notification.email_subject, template_name=notification.text_email_template, html_template_name=notification.html_email_template, context=email_context(notification), headers=getattr(notification, 'headers', {}), )
def email_handler(notification, email_context): """Send a notification by email.""" incuna_mail.send( to=notification.user.email, subject=notification.email_subject, template_name=notification.text_email_template, html_template_name=notification.html_email_template, context=email_context(notification), headers=getattr(notification, 'headers', {}), )
[ "Send", "a", "notification", "by", "email", "." ]
incuna/django-user-management
python
https://github.com/incuna/django-user-management/blob/6784e33191d4eff624d2cf2df9ca01db4f23c9c6/user_management/utils/notifications.py#L26-L35
[ "def", "email_handler", "(", "notification", ",", "email_context", ")", ":", "incuna_mail", ".", "send", "(", "to", "=", "notification", ".", "user", ".", "email", ",", "subject", "=", "notification", ".", "email_subject", ",", "template_name", "=", "notification", ".", "text_email_template", ",", "html_template_name", "=", "notification", ".", "html_email_template", ",", "context", "=", "email_context", "(", "notification", ")", ",", "headers", "=", "getattr", "(", "notification", ",", "'headers'", ",", "{", "}", ")", ",", ")" ]
6784e33191d4eff624d2cf2df9ca01db4f23c9c6
test
password_reset_email_handler
Password reset email handler.
user_management/utils/notifications.py
def password_reset_email_handler(notification): """Password reset email handler.""" base_subject = _('{domain} password reset').format(domain=notification.site.domain) subject = getattr(settings, 'DUM_PASSWORD_RESET_SUBJECT', base_subject) notification.email_subject = subject email_handler(notification, password_reset_email_context)
def password_reset_email_handler(notification): """Password reset email handler.""" base_subject = _('{domain} password reset').format(domain=notification.site.domain) subject = getattr(settings, 'DUM_PASSWORD_RESET_SUBJECT', base_subject) notification.email_subject = subject email_handler(notification, password_reset_email_context)
[ "Password", "reset", "email", "handler", "." ]
incuna/django-user-management
python
https://github.com/incuna/django-user-management/blob/6784e33191d4eff624d2cf2df9ca01db4f23c9c6/user_management/utils/notifications.py#L38-L43
[ "def", "password_reset_email_handler", "(", "notification", ")", ":", "base_subject", "=", "_", "(", "'{domain} password reset'", ")", ".", "format", "(", "domain", "=", "notification", ".", "site", ".", "domain", ")", "subject", "=", "getattr", "(", "settings", ",", "'DUM_PASSWORD_RESET_SUBJECT'", ",", "base_subject", ")", "notification", ".", "email_subject", "=", "subject", "email_handler", "(", "notification", ",", "password_reset_email_context", ")" ]
6784e33191d4eff624d2cf2df9ca01db4f23c9c6
test
validation_email_handler
Validation email handler.
user_management/utils/notifications.py
def validation_email_handler(notification): """Validation email handler.""" base_subject = _('{domain} account validate').format(domain=notification.site.domain) subject = getattr(settings, 'DUM_VALIDATE_EMAIL_SUBJECT', base_subject) notification.email_subject = subject email_handler(notification, validation_email_context)
def validation_email_handler(notification): """Validation email handler.""" base_subject = _('{domain} account validate').format(domain=notification.site.domain) subject = getattr(settings, 'DUM_VALIDATE_EMAIL_SUBJECT', base_subject) notification.email_subject = subject email_handler(notification, validation_email_context)
[ "Validation", "email", "handler", "." ]
incuna/django-user-management
python
https://github.com/incuna/django-user-management/blob/6784e33191d4eff624d2cf2df9ca01db4f23c9c6/user_management/utils/notifications.py#L46-L51
[ "def", "validation_email_handler", "(", "notification", ")", ":", "base_subject", "=", "_", "(", "'{domain} account validate'", ")", ".", "format", "(", "domain", "=", "notification", ".", "site", ".", "domain", ")", "subject", "=", "getattr", "(", "settings", ",", "'DUM_VALIDATE_EMAIL_SUBJECT'", ",", "base_subject", ")", "notification", ".", "email_subject", "=", "subject", "email_handler", "(", "notification", ",", "validation_email_context", ")" ]
6784e33191d4eff624d2cf2df9ca01db4f23c9c6
test
FormTokenAuthentication.authenticate
Authenticate a user from a token form field Errors thrown here will be swallowed by django-rest-framework, and it expects us to return None if authentication fails.
user_management/api/authentication.py
def authenticate(self, request): """ Authenticate a user from a token form field Errors thrown here will be swallowed by django-rest-framework, and it expects us to return None if authentication fails. """ try: key = request.data['token'] except KeyError: return try: token = AuthToken.objects.get(key=key) except AuthToken.DoesNotExist: return return (token.user, token)
def authenticate(self, request): """ Authenticate a user from a token form field Errors thrown here will be swallowed by django-rest-framework, and it expects us to return None if authentication fails. """ try: key = request.data['token'] except KeyError: return try: token = AuthToken.objects.get(key=key) except AuthToken.DoesNotExist: return return (token.user, token)
[ "Authenticate", "a", "user", "from", "a", "token", "form", "field" ]
incuna/django-user-management
python
https://github.com/incuna/django-user-management/blob/6784e33191d4eff624d2cf2df9ca01db4f23c9c6/user_management/api/authentication.py#L10-L27
[ "def", "authenticate", "(", "self", ",", "request", ")", ":", "try", ":", "key", "=", "request", ".", "data", "[", "'token'", "]", "except", "KeyError", ":", "return", "try", ":", "token", "=", "AuthToken", ".", "objects", ".", "get", "(", "key", "=", "key", ")", "except", "AuthToken", ".", "DoesNotExist", ":", "return", "return", "(", "token", ".", "user", ",", "token", ")" ]
6784e33191d4eff624d2cf2df9ca01db4f23c9c6
test
TokenAuthentication.authenticate_credentials
Custom authentication to check if auth token has expired.
user_management/api/authentication.py
def authenticate_credentials(self, key): """Custom authentication to check if auth token has expired.""" user, token = super(TokenAuthentication, self).authenticate_credentials(key) if token.expires < timezone.now(): msg = _('Token has expired.') raise exceptions.AuthenticationFailed(msg) # Update the token's expiration date token.update_expiry() return (user, token)
def authenticate_credentials(self, key): """Custom authentication to check if auth token has expired.""" user, token = super(TokenAuthentication, self).authenticate_credentials(key) if token.expires < timezone.now(): msg = _('Token has expired.') raise exceptions.AuthenticationFailed(msg) # Update the token's expiration date token.update_expiry() return (user, token)
[ "Custom", "authentication", "to", "check", "if", "auth", "token", "has", "expired", "." ]
incuna/django-user-management
python
https://github.com/incuna/django-user-management/blob/6784e33191d4eff624d2cf2df9ca01db4f23c9c6/user_management/api/authentication.py#L33-L44
[ "def", "authenticate_credentials", "(", "self", ",", "key", ")", ":", "user", ",", "token", "=", "super", "(", "TokenAuthentication", ",", "self", ")", ".", "authenticate_credentials", "(", "key", ")", "if", "token", ".", "expires", "<", "timezone", ".", "now", "(", ")", ":", "msg", "=", "_", "(", "'Token has expired.'", ")", "raise", "exceptions", ".", "AuthenticationFailed", "(", "msg", ")", "# Update the token's expiration date", "token", ".", "update_expiry", "(", ")", "return", "(", "user", ",", "token", ")" ]
6784e33191d4eff624d2cf2df9ca01db4f23c9c6
test
notebook_show
Displays bokeh output inside a notebook.
parambokeh/__init__.py
def notebook_show(obj, doc, comm): """ Displays bokeh output inside a notebook. """ target = obj.ref['id'] load_mime = 'application/vnd.holoviews_load.v0+json' exec_mime = 'application/vnd.holoviews_exec.v0+json' # Publish plot HTML bokeh_script, bokeh_div, _ = bokeh.embed.notebook.notebook_content(obj, comm.id) publish_display_data(data={'text/html': encode_utf8(bokeh_div)}) # Publish comm manager JS = '\n'.join([PYVIZ_PROXY, JupyterCommManager.js_manager]) publish_display_data(data={load_mime: JS, 'application/javascript': JS}) # Publish bokeh plot JS msg_handler = bokeh_msg_handler.format(plot_id=target) comm_js = comm.js_template.format(plot_id=target, comm_id=comm.id, msg_handler=msg_handler) bokeh_js = '\n'.join([comm_js, bokeh_script]) # Note: extension should be altered so text/html is not required publish_display_data(data={exec_mime: '', 'text/html': '', 'application/javascript': bokeh_js}, metadata={exec_mime: {'id': target}})
def notebook_show(obj, doc, comm): """ Displays bokeh output inside a notebook. """ target = obj.ref['id'] load_mime = 'application/vnd.holoviews_load.v0+json' exec_mime = 'application/vnd.holoviews_exec.v0+json' # Publish plot HTML bokeh_script, bokeh_div, _ = bokeh.embed.notebook.notebook_content(obj, comm.id) publish_display_data(data={'text/html': encode_utf8(bokeh_div)}) # Publish comm manager JS = '\n'.join([PYVIZ_PROXY, JupyterCommManager.js_manager]) publish_display_data(data={load_mime: JS, 'application/javascript': JS}) # Publish bokeh plot JS msg_handler = bokeh_msg_handler.format(plot_id=target) comm_js = comm.js_template.format(plot_id=target, comm_id=comm.id, msg_handler=msg_handler) bokeh_js = '\n'.join([comm_js, bokeh_script]) # Note: extension should be altered so text/html is not required publish_display_data(data={exec_mime: '', 'text/html': '', 'application/javascript': bokeh_js}, metadata={exec_mime: {'id': target}})
[ "Displays", "bokeh", "output", "inside", "a", "notebook", "." ]
ioam/parambokeh
python
https://github.com/ioam/parambokeh/blob/fb9744f216273c7b24e65d037b1d621c08d7fde6/parambokeh/__init__.py#L53-L77
[ "def", "notebook_show", "(", "obj", ",", "doc", ",", "comm", ")", ":", "target", "=", "obj", ".", "ref", "[", "'id'", "]", "load_mime", "=", "'application/vnd.holoviews_load.v0+json'", "exec_mime", "=", "'application/vnd.holoviews_exec.v0+json'", "# Publish plot HTML", "bokeh_script", ",", "bokeh_div", ",", "_", "=", "bokeh", ".", "embed", ".", "notebook", ".", "notebook_content", "(", "obj", ",", "comm", ".", "id", ")", "publish_display_data", "(", "data", "=", "{", "'text/html'", ":", "encode_utf8", "(", "bokeh_div", ")", "}", ")", "# Publish comm manager", "JS", "=", "'\\n'", ".", "join", "(", "[", "PYVIZ_PROXY", ",", "JupyterCommManager", ".", "js_manager", "]", ")", "publish_display_data", "(", "data", "=", "{", "load_mime", ":", "JS", ",", "'application/javascript'", ":", "JS", "}", ")", "# Publish bokeh plot JS", "msg_handler", "=", "bokeh_msg_handler", ".", "format", "(", "plot_id", "=", "target", ")", "comm_js", "=", "comm", ".", "js_template", ".", "format", "(", "plot_id", "=", "target", ",", "comm_id", "=", "comm", ".", "id", ",", "msg_handler", "=", "msg_handler", ")", "bokeh_js", "=", "'\\n'", ".", "join", "(", "[", "comm_js", ",", "bokeh_script", "]", ")", "# Note: extension should be altered so text/html is not required", "publish_display_data", "(", "data", "=", "{", "exec_mime", ":", "''", ",", "'text/html'", ":", "''", ",", "'application/javascript'", ":", "bokeh_js", "}", ",", "metadata", "=", "{", "exec_mime", ":", "{", "'id'", ":", "target", "}", "}", ")" ]
fb9744f216273c7b24e65d037b1d621c08d7fde6
test
process_hv_plots
Temporary fix to patch HoloViews plot comms
parambokeh/__init__.py
def process_hv_plots(widgets, plots): """ Temporary fix to patch HoloViews plot comms """ bokeh_plots = [] for plot in plots: if hasattr(plot, '_update_callbacks'): for subplot in plot.traverse(lambda x: x): subplot.comm = widgets.server_comm for cb in subplot.callbacks: for c in cb.callbacks: c.code = c.code.replace(plot.id, widgets.plot_id) plot = plot.state bokeh_plots.append(plot) return bokeh_plots
def process_hv_plots(widgets, plots): """ Temporary fix to patch HoloViews plot comms """ bokeh_plots = [] for plot in plots: if hasattr(plot, '_update_callbacks'): for subplot in plot.traverse(lambda x: x): subplot.comm = widgets.server_comm for cb in subplot.callbacks: for c in cb.callbacks: c.code = c.code.replace(plot.id, widgets.plot_id) plot = plot.state bokeh_plots.append(plot) return bokeh_plots
[ "Temporary", "fix", "to", "patch", "HoloViews", "plot", "comms" ]
ioam/parambokeh
python
https://github.com/ioam/parambokeh/blob/fb9744f216273c7b24e65d037b1d621c08d7fde6/parambokeh/__init__.py#L80-L94
[ "def", "process_hv_plots", "(", "widgets", ",", "plots", ")", ":", "bokeh_plots", "=", "[", "]", "for", "plot", "in", "plots", ":", "if", "hasattr", "(", "plot", ",", "'_update_callbacks'", ")", ":", "for", "subplot", "in", "plot", ".", "traverse", "(", "lambda", "x", ":", "x", ")", ":", "subplot", ".", "comm", "=", "widgets", ".", "server_comm", "for", "cb", "in", "subplot", ".", "callbacks", ":", "for", "c", "in", "cb", ".", "callbacks", ":", "c", ".", "code", "=", "c", ".", "code", ".", "replace", "(", "plot", ".", "id", ",", "widgets", ".", "plot_id", ")", "plot", "=", "plot", ".", "state", "bokeh_plots", ".", "append", "(", "plot", ")", "return", "bokeh_plots" ]
fb9744f216273c7b24e65d037b1d621c08d7fde6
test
Widgets._get_customjs
Returns a CustomJS callback that can be attached to send the widget state across the notebook comms.
parambokeh/__init__.py
def _get_customjs(self, change, p_name): """ Returns a CustomJS callback that can be attached to send the widget state across the notebook comms. """ data_template = "data = {{p_name: '{p_name}', value: cb_obj['{change}']}};" fetch_data = data_template.format(change=change, p_name=p_name) self_callback = JS_CALLBACK.format(comm_id=self.comm.id, timeout=self.timeout, debounce=self.debounce, plot_id=self.plot_id) js_callback = CustomJS(code='\n'.join([fetch_data, self_callback])) return js_callback
def _get_customjs(self, change, p_name): """ Returns a CustomJS callback that can be attached to send the widget state across the notebook comms. """ data_template = "data = {{p_name: '{p_name}', value: cb_obj['{change}']}};" fetch_data = data_template.format(change=change, p_name=p_name) self_callback = JS_CALLBACK.format(comm_id=self.comm.id, timeout=self.timeout, debounce=self.debounce, plot_id=self.plot_id) js_callback = CustomJS(code='\n'.join([fetch_data, self_callback])) return js_callback
[ "Returns", "a", "CustomJS", "callback", "that", "can", "be", "attached", "to", "send", "the", "widget", "state", "across", "the", "notebook", "comms", "." ]
ioam/parambokeh
python
https://github.com/ioam/parambokeh/blob/fb9744f216273c7b24e65d037b1d621c08d7fde6/parambokeh/__init__.py#L442-L455
[ "def", "_get_customjs", "(", "self", ",", "change", ",", "p_name", ")", ":", "data_template", "=", "\"data = {{p_name: '{p_name}', value: cb_obj['{change}']}};\"", "fetch_data", "=", "data_template", ".", "format", "(", "change", "=", "change", ",", "p_name", "=", "p_name", ")", "self_callback", "=", "JS_CALLBACK", ".", "format", "(", "comm_id", "=", "self", ".", "comm", ".", "id", ",", "timeout", "=", "self", ".", "timeout", ",", "debounce", "=", "self", ".", "debounce", ",", "plot_id", "=", "self", ".", "plot_id", ")", "js_callback", "=", "CustomJS", "(", "code", "=", "'\\n'", ".", "join", "(", "[", "fetch_data", ",", "self_callback", "]", ")", ")", "return", "js_callback" ]
fb9744f216273c7b24e65d037b1d621c08d7fde6
test
Widgets.widget
Get widget for param_name
parambokeh/__init__.py
def widget(self, param_name): """Get widget for param_name""" if param_name not in self._widgets: self._widgets[param_name] = self._make_widget(param_name) return self._widgets[param_name]
def widget(self, param_name): """Get widget for param_name""" if param_name not in self._widgets: self._widgets[param_name] = self._make_widget(param_name) return self._widgets[param_name]
[ "Get", "widget", "for", "param_name" ]
ioam/parambokeh
python
https://github.com/ioam/parambokeh/blob/fb9744f216273c7b24e65d037b1d621c08d7fde6/parambokeh/__init__.py#L458-L462
[ "def", "widget", "(", "self", ",", "param_name", ")", ":", "if", "param_name", "not", "in", "self", ".", "_widgets", ":", "self", ".", "_widgets", "[", "param_name", "]", "=", "self", ".", "_make_widget", "(", "param_name", ")", "return", "self", ".", "_widgets", "[", "param_name", "]" ]
fb9744f216273c7b24e65d037b1d621c08d7fde6
test
Widgets.widgets
Return name,widget boxes for all parameters (i.e., a property sheet)
parambokeh/__init__.py
def widgets(self): """Return name,widget boxes for all parameters (i.e., a property sheet)""" params = self.parameterized.params().items() key_fn = lambda x: x[1].precedence if x[1].precedence is not None else self.p.default_precedence sorted_precedence = sorted(params, key=key_fn) outputs = [k for k, p in sorted_precedence if isinstance(p, _View)] filtered = [(k,p) for (k,p) in sorted_precedence if ((p.precedence is None) or (p.precedence >= self.p.display_threshold)) and k not in outputs] groups = itertools.groupby(filtered, key=key_fn) sorted_groups = [sorted(grp) for (k,grp) in groups] ordered_params = [el[0] for group in sorted_groups for el in group] # Format name specially ordered_params.pop(ordered_params.index('name')) widgets = [Div(text='<b>{0}</b>'.format(self.parameterized.name))] def format_name(pname): p = self.parameterized.params(pname) # omit name for buttons, which already show the name on the button name = "" if issubclass(type(p),param.Action) else pname return Div(text=name) if self.p.show_labels: widgets += [self.widget(pname) for pname in ordered_params] else: widgets += [self.widget(pname) for pname in ordered_params] if self.p.button and not (self.p.callback is None and self.p.next_n==0): display_button = Button(label=self.p.button_text) def click_cb(): # Execute and clear changes since last button press try: self.execute(self._changed) except Exception as e: self._changed.clear() raise e self._changed.clear() display_button.on_click(click_cb) widgets.append(display_button) outputs = [self.widget(pname) for pname in outputs] return widgets, outputs
def widgets(self): """Return name,widget boxes for all parameters (i.e., a property sheet)""" params = self.parameterized.params().items() key_fn = lambda x: x[1].precedence if x[1].precedence is not None else self.p.default_precedence sorted_precedence = sorted(params, key=key_fn) outputs = [k for k, p in sorted_precedence if isinstance(p, _View)] filtered = [(k,p) for (k,p) in sorted_precedence if ((p.precedence is None) or (p.precedence >= self.p.display_threshold)) and k not in outputs] groups = itertools.groupby(filtered, key=key_fn) sorted_groups = [sorted(grp) for (k,grp) in groups] ordered_params = [el[0] for group in sorted_groups for el in group] # Format name specially ordered_params.pop(ordered_params.index('name')) widgets = [Div(text='<b>{0}</b>'.format(self.parameterized.name))] def format_name(pname): p = self.parameterized.params(pname) # omit name for buttons, which already show the name on the button name = "" if issubclass(type(p),param.Action) else pname return Div(text=name) if self.p.show_labels: widgets += [self.widget(pname) for pname in ordered_params] else: widgets += [self.widget(pname) for pname in ordered_params] if self.p.button and not (self.p.callback is None and self.p.next_n==0): display_button = Button(label=self.p.button_text) def click_cb(): # Execute and clear changes since last button press try: self.execute(self._changed) except Exception as e: self._changed.clear() raise e self._changed.clear() display_button.on_click(click_cb) widgets.append(display_button) outputs = [self.widget(pname) for pname in outputs] return widgets, outputs
[ "Return", "name", "widget", "boxes", "for", "all", "parameters", "(", "i", ".", "e", ".", "a", "property", "sheet", ")" ]
ioam/parambokeh
python
https://github.com/ioam/parambokeh/blob/fb9744f216273c7b24e65d037b1d621c08d7fde6/parambokeh/__init__.py#L472-L515
[ "def", "widgets", "(", "self", ")", ":", "params", "=", "self", ".", "parameterized", ".", "params", "(", ")", ".", "items", "(", ")", "key_fn", "=", "lambda", "x", ":", "x", "[", "1", "]", ".", "precedence", "if", "x", "[", "1", "]", ".", "precedence", "is", "not", "None", "else", "self", ".", "p", ".", "default_precedence", "sorted_precedence", "=", "sorted", "(", "params", ",", "key", "=", "key_fn", ")", "outputs", "=", "[", "k", "for", "k", ",", "p", "in", "sorted_precedence", "if", "isinstance", "(", "p", ",", "_View", ")", "]", "filtered", "=", "[", "(", "k", ",", "p", ")", "for", "(", "k", ",", "p", ")", "in", "sorted_precedence", "if", "(", "(", "p", ".", "precedence", "is", "None", ")", "or", "(", "p", ".", "precedence", ">=", "self", ".", "p", ".", "display_threshold", ")", ")", "and", "k", "not", "in", "outputs", "]", "groups", "=", "itertools", ".", "groupby", "(", "filtered", ",", "key", "=", "key_fn", ")", "sorted_groups", "=", "[", "sorted", "(", "grp", ")", "for", "(", "k", ",", "grp", ")", "in", "groups", "]", "ordered_params", "=", "[", "el", "[", "0", "]", "for", "group", "in", "sorted_groups", "for", "el", "in", "group", "]", "# Format name specially", "ordered_params", ".", "pop", "(", "ordered_params", ".", "index", "(", "'name'", ")", ")", "widgets", "=", "[", "Div", "(", "text", "=", "'<b>{0}</b>'", ".", "format", "(", "self", ".", "parameterized", ".", "name", ")", ")", "]", "def", "format_name", "(", "pname", ")", ":", "p", "=", "self", ".", "parameterized", ".", "params", "(", "pname", ")", "# omit name for buttons, which already show the name on the button", "name", "=", "\"\"", "if", "issubclass", "(", "type", "(", "p", ")", ",", "param", ".", "Action", ")", "else", "pname", "return", "Div", "(", "text", "=", "name", ")", "if", "self", ".", "p", ".", "show_labels", ":", "widgets", "+=", "[", "self", ".", "widget", "(", "pname", ")", "for", "pname", "in", "ordered_params", "]", "else", ":", "widgets", "+=", "[", "self", ".", "widget", "(", "pname", ")", "for", "pname", "in", "ordered_params", "]", "if", "self", ".", "p", ".", "button", "and", "not", "(", "self", ".", "p", ".", "callback", "is", "None", "and", "self", ".", "p", ".", "next_n", "==", "0", ")", ":", "display_button", "=", "Button", "(", "label", "=", "self", ".", "p", ".", "button_text", ")", "def", "click_cb", "(", ")", ":", "# Execute and clear changes since last button press", "try", ":", "self", ".", "execute", "(", "self", ".", "_changed", ")", "except", "Exception", "as", "e", ":", "self", ".", "_changed", ".", "clear", "(", ")", "raise", "e", "self", ".", "_changed", ".", "clear", "(", ")", "display_button", ".", "on_click", "(", "click_cb", ")", "widgets", ".", "append", "(", "display_button", ")", "outputs", "=", "[", "self", ".", "widget", "(", "pname", ")", "for", "pname", "in", "outputs", "]", "return", "widgets", ",", "outputs" ]
fb9744f216273c7b24e65d037b1d621c08d7fde6
test
render_function
The default Renderer function which handles HoloViews objects.
parambokeh/view.py
def render_function(obj, view): """ The default Renderer function which handles HoloViews objects. """ try: import holoviews as hv except: hv = None if hv and isinstance(obj, hv.core.Dimensioned): renderer = hv.renderer('bokeh') if not view._notebook: renderer = renderer.instance(mode='server') plot = renderer.get_plot(obj, doc=view._document) if view._notebook: plot.comm = view._comm plot.document = view._document return plot.state return obj
def render_function(obj, view): """ The default Renderer function which handles HoloViews objects. """ try: import holoviews as hv except: hv = None if hv and isinstance(obj, hv.core.Dimensioned): renderer = hv.renderer('bokeh') if not view._notebook: renderer = renderer.instance(mode='server') plot = renderer.get_plot(obj, doc=view._document) if view._notebook: plot.comm = view._comm plot.document = view._document return plot.state return obj
[ "The", "default", "Renderer", "function", "which", "handles", "HoloViews", "objects", "." ]
ioam/parambokeh
python
https://github.com/ioam/parambokeh/blob/fb9744f216273c7b24e65d037b1d621c08d7fde6/parambokeh/view.py#L3-L21
[ "def", "render_function", "(", "obj", ",", "view", ")", ":", "try", ":", "import", "holoviews", "as", "hv", "except", ":", "hv", "=", "None", "if", "hv", "and", "isinstance", "(", "obj", ",", "hv", ".", "core", ".", "Dimensioned", ")", ":", "renderer", "=", "hv", ".", "renderer", "(", "'bokeh'", ")", "if", "not", "view", ".", "_notebook", ":", "renderer", "=", "renderer", ".", "instance", "(", "mode", "=", "'server'", ")", "plot", "=", "renderer", ".", "get_plot", "(", "obj", ",", "doc", "=", "view", ".", "_document", ")", "if", "view", ".", "_notebook", ":", "plot", ".", "comm", "=", "view", ".", "_comm", "plot", ".", "document", "=", "view", ".", "_document", "return", "plot", ".", "state", "return", "obj" ]
fb9744f216273c7b24e65d037b1d621c08d7fde6
test
TextWidget
Forces a parameter value to be text
parambokeh/widgets.py
def TextWidget(*args, **kw): """Forces a parameter value to be text""" kw['value'] = str(kw['value']) kw.pop('options', None) return TextInput(*args,**kw)
def TextWidget(*args, **kw): """Forces a parameter value to be text""" kw['value'] = str(kw['value']) kw.pop('options', None) return TextInput(*args,**kw)
[ "Forces", "a", "parameter", "value", "to", "be", "text" ]
ioam/parambokeh
python
https://github.com/ioam/parambokeh/blob/fb9744f216273c7b24e65d037b1d621c08d7fde6/parambokeh/widgets.py#L16-L20
[ "def", "TextWidget", "(", "*", "args", ",", "*", "*", "kw", ")", ":", "kw", "[", "'value'", "]", "=", "str", "(", "kw", "[", "'value'", "]", ")", "kw", ".", "pop", "(", "'options'", ",", "None", ")", "return", "TextInput", "(", "*", "args", ",", "*", "*", "kw", ")" ]
fb9744f216273c7b24e65d037b1d621c08d7fde6
test
named_objs
Given a list of objects, returns a dictionary mapping from string name for the object to the object itself.
parambokeh/util.py
def named_objs(objlist): """ Given a list of objects, returns a dictionary mapping from string name for the object to the object itself. """ objs = [] for k, obj in objlist: if hasattr(k, '__name__'): k = k.__name__ else: k = as_unicode(k) objs.append((k, obj)) return objs
def named_objs(objlist): """ Given a list of objects, returns a dictionary mapping from string name for the object to the object itself. """ objs = [] for k, obj in objlist: if hasattr(k, '__name__'): k = k.__name__ else: k = as_unicode(k) objs.append((k, obj)) return objs
[ "Given", "a", "list", "of", "objects", "returns", "a", "dictionary", "mapping", "from", "string", "name", "for", "the", "object", "to", "the", "object", "itself", "." ]
ioam/parambokeh
python
https://github.com/ioam/parambokeh/blob/fb9744f216273c7b24e65d037b1d621c08d7fde6/parambokeh/util.py#L19-L31
[ "def", "named_objs", "(", "objlist", ")", ":", "objs", "=", "[", "]", "for", "k", ",", "obj", "in", "objlist", ":", "if", "hasattr", "(", "k", ",", "'__name__'", ")", ":", "k", "=", "k", ".", "__name__", "else", ":", "k", "=", "as_unicode", "(", "k", ")", "objs", ".", "append", "(", "(", "k", ",", "obj", ")", ")", "return", "objs" ]
fb9744f216273c7b24e65d037b1d621c08d7fde6
test
get_method_owner
Returns the instance owning the supplied instancemethod or the class owning the supplied classmethod.
parambokeh/util.py
def get_method_owner(meth): """ Returns the instance owning the supplied instancemethod or the class owning the supplied classmethod. """ if inspect.ismethod(meth): if sys.version_info < (3,0): return meth.im_class if meth.im_self is None else meth.im_self else: return meth.__self__
def get_method_owner(meth): """ Returns the instance owning the supplied instancemethod or the class owning the supplied classmethod. """ if inspect.ismethod(meth): if sys.version_info < (3,0): return meth.im_class if meth.im_self is None else meth.im_self else: return meth.__self__
[ "Returns", "the", "instance", "owning", "the", "supplied", "instancemethod", "or", "the", "class", "owning", "the", "supplied", "classmethod", "." ]
ioam/parambokeh
python
https://github.com/ioam/parambokeh/blob/fb9744f216273c7b24e65d037b1d621c08d7fde6/parambokeh/util.py#L34-L43
[ "def", "get_method_owner", "(", "meth", ")", ":", "if", "inspect", ".", "ismethod", "(", "meth", ")", ":", "if", "sys", ".", "version_info", "<", "(", "3", ",", "0", ")", ":", "return", "meth", ".", "im_class", "if", "meth", ".", "im_self", "is", "None", "else", "meth", ".", "im_self", "else", ":", "return", "meth", ".", "__self__" ]
fb9744f216273c7b24e65d037b1d621c08d7fde6
test
AsyncHttpConnection._assign_auth_values
Take the http_auth value and split it into the attributes that carry the http auth username and password :param str|tuple http_auth: The http auth value
tornado_elasticsearch.py
def _assign_auth_values(self, http_auth): """Take the http_auth value and split it into the attributes that carry the http auth username and password :param str|tuple http_auth: The http auth value """ if not http_auth: pass elif isinstance(http_auth, (tuple, list)): self._auth_user, self._auth_password = http_auth elif isinstance(http_auth, str): self._auth_user, self._auth_password = http_auth.split(':') else: raise ValueError('HTTP Auth Credentials should be str or ' 'tuple, not %s' % type(http_auth))
def _assign_auth_values(self, http_auth): """Take the http_auth value and split it into the attributes that carry the http auth username and password :param str|tuple http_auth: The http auth value """ if not http_auth: pass elif isinstance(http_auth, (tuple, list)): self._auth_user, self._auth_password = http_auth elif isinstance(http_auth, str): self._auth_user, self._auth_password = http_auth.split(':') else: raise ValueError('HTTP Auth Credentials should be str or ' 'tuple, not %s' % type(http_auth))
[ "Take", "the", "http_auth", "value", "and", "split", "it", "into", "the", "attributes", "that", "carry", "the", "http", "auth", "username", "and", "password" ]
gmr/tornado-elasticsearch
python
https://github.com/gmr/tornado-elasticsearch/blob/fafe0de680277ce6faceb7449ded0b33822438d0/tornado_elasticsearch.py#L105-L120
[ "def", "_assign_auth_values", "(", "self", ",", "http_auth", ")", ":", "if", "not", "http_auth", ":", "pass", "elif", "isinstance", "(", "http_auth", ",", "(", "tuple", ",", "list", ")", ")", ":", "self", ".", "_auth_user", ",", "self", ".", "_auth_password", "=", "http_auth", "elif", "isinstance", "(", "http_auth", ",", "str", ")", ":", "self", ".", "_auth_user", ",", "self", ".", "_auth_password", "=", "http_auth", ".", "split", "(", "':'", ")", "else", ":", "raise", "ValueError", "(", "'HTTP Auth Credentials should be str or '", "'tuple, not %s'", "%", "type", "(", "http_auth", ")", ")" ]
fafe0de680277ce6faceb7449ded0b33822438d0
test
AsyncElasticsearch.ping
Returns True if the cluster is up, False otherwise.
tornado_elasticsearch.py
def ping(self, params=None): """ Returns True if the cluster is up, False otherwise. """ try: self.transport.perform_request('HEAD', '/', params=params) except TransportError: raise gen.Return(False) raise gen.Return(True)
def ping(self, params=None): """ Returns True if the cluster is up, False otherwise. """ try: self.transport.perform_request('HEAD', '/', params=params) except TransportError: raise gen.Return(False) raise gen.Return(True)
[ "Returns", "True", "if", "the", "cluster", "is", "up", "False", "otherwise", "." ]
gmr/tornado-elasticsearch
python
https://github.com/gmr/tornado-elasticsearch/blob/fafe0de680277ce6faceb7449ded0b33822438d0/tornado_elasticsearch.py#L248-L254
[ "def", "ping", "(", "self", ",", "params", "=", "None", ")", ":", "try", ":", "self", ".", "transport", ".", "perform_request", "(", "'HEAD'", ",", "'/'", ",", "params", "=", "params", ")", "except", "TransportError", ":", "raise", "gen", ".", "Return", "(", "False", ")", "raise", "gen", ".", "Return", "(", "True", ")" ]
fafe0de680277ce6faceb7449ded0b33822438d0
test
AsyncElasticsearch.info
Get the basic info from the current cluster. :rtype: dict
tornado_elasticsearch.py
def info(self, params=None): """Get the basic info from the current cluster. :rtype: dict """ _, data = yield self.transport.perform_request('GET', '/', params=params) raise gen.Return(data)
def info(self, params=None): """Get the basic info from the current cluster. :rtype: dict """ _, data = yield self.transport.perform_request('GET', '/', params=params) raise gen.Return(data)
[ "Get", "the", "basic", "info", "from", "the", "current", "cluster", "." ]
gmr/tornado-elasticsearch
python
https://github.com/gmr/tornado-elasticsearch/blob/fafe0de680277ce6faceb7449ded0b33822438d0/tornado_elasticsearch.py#L258-L266
[ "def", "info", "(", "self", ",", "params", "=", "None", ")", ":", "_", ",", "data", "=", "yield", "self", ".", "transport", ".", "perform_request", "(", "'GET'", ",", "'/'", ",", "params", "=", "params", ")", "raise", "gen", ".", "Return", "(", "data", ")" ]
fafe0de680277ce6faceb7449ded0b33822438d0
test
AsyncElasticsearch.health
Coroutine. Queries cluster Health API. Returns a 2-tuple, where first element is request status, and second element is a dictionary with response data. :param params: dictionary of query parameters, will be handed over to the underlying :class:`~torando_elasticsearch.AsyncHTTPConnection` class for serialization
tornado_elasticsearch.py
def health(self, params=None): """Coroutine. Queries cluster Health API. Returns a 2-tuple, where first element is request status, and second element is a dictionary with response data. :param params: dictionary of query parameters, will be handed over to the underlying :class:`~torando_elasticsearch.AsyncHTTPConnection` class for serialization """ status, data = yield self.transport.perform_request( "GET", "/_cluster/health", params=params) raise gen.Return((status, data))
def health(self, params=None): """Coroutine. Queries cluster Health API. Returns a 2-tuple, where first element is request status, and second element is a dictionary with response data. :param params: dictionary of query parameters, will be handed over to the underlying :class:`~torando_elasticsearch.AsyncHTTPConnection` class for serialization """ status, data = yield self.transport.perform_request( "GET", "/_cluster/health", params=params) raise gen.Return((status, data))
[ "Coroutine", ".", "Queries", "cluster", "Health", "API", "." ]
gmr/tornado-elasticsearch
python
https://github.com/gmr/tornado-elasticsearch/blob/fafe0de680277ce6faceb7449ded0b33822438d0/tornado_elasticsearch.py#L269-L282
[ "def", "health", "(", "self", ",", "params", "=", "None", ")", ":", "status", ",", "data", "=", "yield", "self", ".", "transport", ".", "perform_request", "(", "\"GET\"", ",", "\"/_cluster/health\"", ",", "params", "=", "params", ")", "raise", "gen", ".", "Return", "(", "(", "status", ",", "data", ")", ")" ]
fafe0de680277ce6faceb7449ded0b33822438d0
test
AsyncElasticsearch.create
Adds a typed JSON document in a specific index, making it searchable. Behind the scenes this method calls index(..., op_type='create') `<http://elasticsearch.org/guide/reference/api/index_/>`_ :arg index: The name of the index :arg doc_type: The type of the document :arg id: Document ID :arg body: The document :arg consistency: Explicit write consistency setting for the operation :arg id: Specific document ID (when the POST method is used) :arg parent: ID of the parent document :arg percolate: Percolator queries to execute while indexing the doc :arg refresh: Refresh the index after performing the operation :arg replication: Specific replication type (default: sync) :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg timestamp: Explicit timestamp for the document :arg ttl: Expiration time for the document :arg version: Explicit version number for concurrency control :arg version_type: Specific version type
tornado_elasticsearch.py
def create(self, index, doc_type, body, id=None, params=None): """ Adds a typed JSON document in a specific index, making it searchable. Behind the scenes this method calls index(..., op_type='create') `<http://elasticsearch.org/guide/reference/api/index_/>`_ :arg index: The name of the index :arg doc_type: The type of the document :arg id: Document ID :arg body: The document :arg consistency: Explicit write consistency setting for the operation :arg id: Specific document ID (when the POST method is used) :arg parent: ID of the parent document :arg percolate: Percolator queries to execute while indexing the doc :arg refresh: Refresh the index after performing the operation :arg replication: Specific replication type (default: sync) :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg timestamp: Explicit timestamp for the document :arg ttl: Expiration time for the document :arg version: Explicit version number for concurrency control :arg version_type: Specific version type """ result = yield self.index(index, doc_type, body, id=id, params=params, op_type='create') raise gen.Return(result)
def create(self, index, doc_type, body, id=None, params=None): """ Adds a typed JSON document in a specific index, making it searchable. Behind the scenes this method calls index(..., op_type='create') `<http://elasticsearch.org/guide/reference/api/index_/>`_ :arg index: The name of the index :arg doc_type: The type of the document :arg id: Document ID :arg body: The document :arg consistency: Explicit write consistency setting for the operation :arg id: Specific document ID (when the POST method is used) :arg parent: ID of the parent document :arg percolate: Percolator queries to execute while indexing the doc :arg refresh: Refresh the index after performing the operation :arg replication: Specific replication type (default: sync) :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg timestamp: Explicit timestamp for the document :arg ttl: Expiration time for the document :arg version: Explicit version number for concurrency control :arg version_type: Specific version type """ result = yield self.index(index, doc_type, body, id=id, params=params, op_type='create') raise gen.Return(result)
[ "Adds", "a", "typed", "JSON", "document", "in", "a", "specific", "index", "making", "it", "searchable", ".", "Behind", "the", "scenes", "this", "method", "calls", "index", "(", "...", "op_type", "=", "create", ")", "<http", ":", "//", "elasticsearch", ".", "org", "/", "guide", "/", "reference", "/", "api", "/", "index_", "/", ">", "_" ]
gmr/tornado-elasticsearch
python
https://github.com/gmr/tornado-elasticsearch/blob/fafe0de680277ce6faceb7449ded0b33822438d0/tornado_elasticsearch.py#L288-L313
[ "def", "create", "(", "self", ",", "index", ",", "doc_type", ",", "body", ",", "id", "=", "None", ",", "params", "=", "None", ")", ":", "result", "=", "yield", "self", ".", "index", "(", "index", ",", "doc_type", ",", "body", ",", "id", "=", "id", ",", "params", "=", "params", ",", "op_type", "=", "'create'", ")", "raise", "gen", ".", "Return", "(", "result", ")" ]
fafe0de680277ce6faceb7449ded0b33822438d0
test
AsyncElasticsearch.index
Adds or updates a typed JSON document in a specific index, making it searchable. `<http://elasticsearch.org/guide/reference/api/index_/>`_ :arg index: The name of the index :arg doc_type: The type of the document :arg body: The document :arg id: Document ID :arg consistency: Explicit write consistency setting for the operation :arg op_type: Explicit operation type (default: index) :arg parent: ID of the parent document :arg percolate: Percolator queries to execute while indexing the doc :arg refresh: Refresh the index after performing the operation :arg replication: Specific replication type (default: sync) :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg timestamp: Explicit timestamp for the document :arg ttl: Expiration time for the document :arg version: Explicit version number for concurrency control :arg version_type: Specific version type
tornado_elasticsearch.py
def index(self, index, doc_type, body, id=None, params=None): """ Adds or updates a typed JSON document in a specific index, making it searchable. `<http://elasticsearch.org/guide/reference/api/index_/>`_ :arg index: The name of the index :arg doc_type: The type of the document :arg body: The document :arg id: Document ID :arg consistency: Explicit write consistency setting for the operation :arg op_type: Explicit operation type (default: index) :arg parent: ID of the parent document :arg percolate: Percolator queries to execute while indexing the doc :arg refresh: Refresh the index after performing the operation :arg replication: Specific replication type (default: sync) :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg timestamp: Explicit timestamp for the document :arg ttl: Expiration time for the document :arg version: Explicit version number for concurrency control :arg version_type: Specific version type """ _, data = yield self.transport.perform_request( 'PUT' if id else 'POST', _make_path(index, doc_type, id), params=params, body=body) raise gen.Return(data)
def index(self, index, doc_type, body, id=None, params=None): """ Adds or updates a typed JSON document in a specific index, making it searchable. `<http://elasticsearch.org/guide/reference/api/index_/>`_ :arg index: The name of the index :arg doc_type: The type of the document :arg body: The document :arg id: Document ID :arg consistency: Explicit write consistency setting for the operation :arg op_type: Explicit operation type (default: index) :arg parent: ID of the parent document :arg percolate: Percolator queries to execute while indexing the doc :arg refresh: Refresh the index after performing the operation :arg replication: Specific replication type (default: sync) :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg timestamp: Explicit timestamp for the document :arg ttl: Expiration time for the document :arg version: Explicit version number for concurrency control :arg version_type: Specific version type """ _, data = yield self.transport.perform_request( 'PUT' if id else 'POST', _make_path(index, doc_type, id), params=params, body=body) raise gen.Return(data)
[ "Adds", "or", "updates", "a", "typed", "JSON", "document", "in", "a", "specific", "index", "making", "it", "searchable", ".", "<http", ":", "//", "elasticsearch", ".", "org", "/", "guide", "/", "reference", "/", "api", "/", "index_", "/", ">", "_" ]
gmr/tornado-elasticsearch
python
https://github.com/gmr/tornado-elasticsearch/blob/fafe0de680277ce6faceb7449ded0b33822438d0/tornado_elasticsearch.py#L319-L345
[ "def", "index", "(", "self", ",", "index", ",", "doc_type", ",", "body", ",", "id", "=", "None", ",", "params", "=", "None", ")", ":", "_", ",", "data", "=", "yield", "self", ".", "transport", ".", "perform_request", "(", "'PUT'", "if", "id", "else", "'POST'", ",", "_make_path", "(", "index", ",", "doc_type", ",", "id", ")", ",", "params", "=", "params", ",", "body", "=", "body", ")", "raise", "gen", ".", "Return", "(", "data", ")" ]
fafe0de680277ce6faceb7449ded0b33822438d0
test
AsyncElasticsearch.exists
Returns a boolean indicating whether or not given document exists in Elasticsearch. `<http://elasticsearch.org/guide/reference/api/get/>`_ :arg index: The name of the index :arg id: The document ID :arg doc_type: The type of the document (uses `_all` by default to fetch the first document matching the ID across all types) :arg parent: The ID of the parent document :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg realtime: Specify whether to perform the operation in realtime or search mode :arg refresh: Refresh the shard containing the document before performing the operation :arg routing: Specific routing value
tornado_elasticsearch.py
def exists(self, index, id, doc_type='_all', params=None): """ Returns a boolean indicating whether or not given document exists in Elasticsearch. `<http://elasticsearch.org/guide/reference/api/get/>`_ :arg index: The name of the index :arg id: The document ID :arg doc_type: The type of the document (uses `_all` by default to fetch the first document matching the ID across all types) :arg parent: The ID of the parent document :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg realtime: Specify whether to perform the operation in realtime or search mode :arg refresh: Refresh the shard containing the document before performing the operation :arg routing: Specific routing value """ try: self.transport.perform_request( 'HEAD', _make_path(index, doc_type, id), params=params) except exceptions.NotFoundError: return gen.Return(False) raise gen.Return(True)
def exists(self, index, id, doc_type='_all', params=None): """ Returns a boolean indicating whether or not given document exists in Elasticsearch. `<http://elasticsearch.org/guide/reference/api/get/>`_ :arg index: The name of the index :arg id: The document ID :arg doc_type: The type of the document (uses `_all` by default to fetch the first document matching the ID across all types) :arg parent: The ID of the parent document :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg realtime: Specify whether to perform the operation in realtime or search mode :arg refresh: Refresh the shard containing the document before performing the operation :arg routing: Specific routing value """ try: self.transport.perform_request( 'HEAD', _make_path(index, doc_type, id), params=params) except exceptions.NotFoundError: return gen.Return(False) raise gen.Return(True)
[ "Returns", "a", "boolean", "indicating", "whether", "or", "not", "given", "document", "exists", "in", "Elasticsearch", ".", "<http", ":", "//", "elasticsearch", ".", "org", "/", "guide", "/", "reference", "/", "api", "/", "get", "/", ">", "_" ]
gmr/tornado-elasticsearch
python
https://github.com/gmr/tornado-elasticsearch/blob/fafe0de680277ce6faceb7449ded0b33822438d0/tornado_elasticsearch.py#L349-L372
[ "def", "exists", "(", "self", ",", "index", ",", "id", ",", "doc_type", "=", "'_all'", ",", "params", "=", "None", ")", ":", "try", ":", "self", ".", "transport", ".", "perform_request", "(", "'HEAD'", ",", "_make_path", "(", "index", ",", "doc_type", ",", "id", ")", ",", "params", "=", "params", ")", "except", "exceptions", ".", "NotFoundError", ":", "return", "gen", ".", "Return", "(", "False", ")", "raise", "gen", ".", "Return", "(", "True", ")" ]
fafe0de680277ce6faceb7449ded0b33822438d0
test
AsyncElasticsearch.get_alias
Retrieve a specified alias. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html>`_ :arg index: A comma-separated list of index names to filter aliases :arg name: A comma-separated list of alias names to return :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both., default 'all', valid choices are: 'open', 'closed', 'none', 'all' :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false)
tornado_elasticsearch.py
def get_alias(self, index=None, name=None, params=None): """ Retrieve a specified alias. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html>`_ :arg index: A comma-separated list of index names to filter aliases :arg name: A comma-separated list of alias names to return :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both., default 'all', valid choices are: 'open', 'closed', 'none', 'all' :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false) """ _, result = yield self.transport.perform_request( 'GET', _make_path(index, '_alias', name), params=params) raise gen.Return(result)
def get_alias(self, index=None, name=None, params=None): """ Retrieve a specified alias. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html>`_ :arg index: A comma-separated list of index names to filter aliases :arg name: A comma-separated list of alias names to return :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both., default 'all', valid choices are: 'open', 'closed', 'none', 'all' :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false) """ _, result = yield self.transport.perform_request( 'GET', _make_path(index, '_alias', name), params=params) raise gen.Return(result)
[ "Retrieve", "a", "specified", "alias", ".", "<http", ":", "//", "www", ".", "elastic", ".", "co", "/", "guide", "/", "en", "/", "elasticsearch", "/", "reference", "/", "current", "/", "indices", "-", "aliases", ".", "html", ">", "_", ":", "arg", "index", ":", "A", "comma", "-", "separated", "list", "of", "index", "names", "to", "filter", "aliases", ":", "arg", "name", ":", "A", "comma", "-", "separated", "list", "of", "alias", "names", "to", "return", ":", "arg", "allow_no_indices", ":", "Whether", "to", "ignore", "if", "a", "wildcard", "indices", "expression", "resolves", "into", "no", "concrete", "indices", ".", "(", "This", "includes", "_all", "string", "or", "when", "no", "indices", "have", "been", "specified", ")", ":", "arg", "expand_wildcards", ":", "Whether", "to", "expand", "wildcard", "expression", "to", "concrete", "indices", "that", "are", "open", "closed", "or", "both", ".", "default", "all", "valid", "choices", "are", ":", "open", "closed", "none", "all", ":", "arg", "ignore_unavailable", ":", "Whether", "specified", "concrete", "indices", "should", "be", "ignored", "when", "unavailable", "(", "missing", "or", "closed", ")", ":", "arg", "local", ":", "Return", "local", "information", "do", "not", "retrieve", "the", "state", "from", "master", "node", "(", "default", ":", "false", ")" ]
gmr/tornado-elasticsearch
python
https://github.com/gmr/tornado-elasticsearch/blob/fafe0de680277ce6faceb7449ded0b33822438d0/tornado_elasticsearch.py#L409-L428
[ "def", "get_alias", "(", "self", ",", "index", "=", "None", ",", "name", "=", "None", ",", "params", "=", "None", ")", ":", "_", ",", "result", "=", "yield", "self", ".", "transport", ".", "perform_request", "(", "'GET'", ",", "_make_path", "(", "index", ",", "'_alias'", ",", "name", ")", ",", "params", "=", "params", ")", "raise", "gen", ".", "Return", "(", "result", ")" ]
fafe0de680277ce6faceb7449ded0b33822438d0
test
AsyncElasticsearch.search
Execute a search query and get back search hits that match the query. `<http://www.elasticsearch.org/guide/reference/api/search/>`_ :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg doc_type: A comma-separated list of document types to search; leave empty to perform the operation on all types :arg body: The search definition using the Query DSL :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_exclude: A list of fields to exclude from the returned _source field :arg _source_include: A list of fields to extract and return from the _source field :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg default_operator: The default operator for query string query (AND or OR) (default: OR) :arg df: The field to use as default where no field prefix is given in the query string :arg explain: Specify whether to return detailed information about score computation as part of a hit :arg fields: A comma-separated list of fields to return as part of a hit :arg ignore_indices: When performed on multiple indices, allows to ignore `missing` ones (default: none) :arg indices_boost: Comma-separated list of index boosts :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg lowercase_expanded_terms: Specify whether query terms should be lowercased :arg from_: Starting offset (default: 0) :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg q: Query in the Lucene query string syntax :arg routing: A comma-separated list of specific routing values :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg search_type: Search operation type :arg size: Number of hits to return (default: 10) :arg sort: A comma-separated list of <field>:<direction> pairs :arg source: The URL-encoded request definition using the Query DSL (instead of using request body) :arg stats: Specific 'tag' of the request for logging and statistical purposes :arg suggest_field: Specify which field to use for suggestions :arg suggest_mode: Specify suggest mode (default: missing) :arg suggest_size: How many suggestions to return in response :arg suggest_text: The source text for which the suggestions should be returned :arg timeout: Explicit operation timeout :arg version: Specify whether to return document version as part of a hit
tornado_elasticsearch.py
def search(self, index=None, doc_type=None, body=None, params=None): """ Execute a search query and get back search hits that match the query. `<http://www.elasticsearch.org/guide/reference/api/search/>`_ :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg doc_type: A comma-separated list of document types to search; leave empty to perform the operation on all types :arg body: The search definition using the Query DSL :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_exclude: A list of fields to exclude from the returned _source field :arg _source_include: A list of fields to extract and return from the _source field :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg default_operator: The default operator for query string query (AND or OR) (default: OR) :arg df: The field to use as default where no field prefix is given in the query string :arg explain: Specify whether to return detailed information about score computation as part of a hit :arg fields: A comma-separated list of fields to return as part of a hit :arg ignore_indices: When performed on multiple indices, allows to ignore `missing` ones (default: none) :arg indices_boost: Comma-separated list of index boosts :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg lowercase_expanded_terms: Specify whether query terms should be lowercased :arg from_: Starting offset (default: 0) :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg q: Query in the Lucene query string syntax :arg routing: A comma-separated list of specific routing values :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg search_type: Search operation type :arg size: Number of hits to return (default: 10) :arg sort: A comma-separated list of <field>:<direction> pairs :arg source: The URL-encoded request definition using the Query DSL (instead of using request body) :arg stats: Specific 'tag' of the request for logging and statistical purposes :arg suggest_field: Specify which field to use for suggestions :arg suggest_mode: Specify suggest mode (default: missing) :arg suggest_size: How many suggestions to return in response :arg suggest_text: The source text for which the suggestions should be returned :arg timeout: Explicit operation timeout :arg version: Specify whether to return document version as part of a hit """ # from is a reserved word so it cannot be used, use from_ instead if 'from_' in params: params['from'] = params.pop('from_') if doc_type and not index: index = '_all' _, data = yield self.transport.perform_request('GET', _make_path(index, doc_type, '_search'), params=params, body=body) raise gen.Return(data)
def search(self, index=None, doc_type=None, body=None, params=None): """ Execute a search query and get back search hits that match the query. `<http://www.elasticsearch.org/guide/reference/api/search/>`_ :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg doc_type: A comma-separated list of document types to search; leave empty to perform the operation on all types :arg body: The search definition using the Query DSL :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_exclude: A list of fields to exclude from the returned _source field :arg _source_include: A list of fields to extract and return from the _source field :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg default_operator: The default operator for query string query (AND or OR) (default: OR) :arg df: The field to use as default where no field prefix is given in the query string :arg explain: Specify whether to return detailed information about score computation as part of a hit :arg fields: A comma-separated list of fields to return as part of a hit :arg ignore_indices: When performed on multiple indices, allows to ignore `missing` ones (default: none) :arg indices_boost: Comma-separated list of index boosts :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg lowercase_expanded_terms: Specify whether query terms should be lowercased :arg from_: Starting offset (default: 0) :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg q: Query in the Lucene query string syntax :arg routing: A comma-separated list of specific routing values :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg search_type: Search operation type :arg size: Number of hits to return (default: 10) :arg sort: A comma-separated list of <field>:<direction> pairs :arg source: The URL-encoded request definition using the Query DSL (instead of using request body) :arg stats: Specific 'tag' of the request for logging and statistical purposes :arg suggest_field: Specify which field to use for suggestions :arg suggest_mode: Specify suggest mode (default: missing) :arg suggest_size: How many suggestions to return in response :arg suggest_text: The source text for which the suggestions should be returned :arg timeout: Explicit operation timeout :arg version: Specify whether to return document version as part of a hit """ # from is a reserved word so it cannot be used, use from_ instead if 'from_' in params: params['from'] = params.pop('from_') if doc_type and not index: index = '_all' _, data = yield self.transport.perform_request('GET', _make_path(index, doc_type, '_search'), params=params, body=body) raise gen.Return(data)
[ "Execute", "a", "search", "query", "and", "get", "back", "search", "hits", "that", "match", "the", "query", ".", "<http", ":", "//", "www", ".", "elasticsearch", ".", "org", "/", "guide", "/", "reference", "/", "api", "/", "search", "/", ">", "_" ]
gmr/tornado-elasticsearch
python
https://github.com/gmr/tornado-elasticsearch/blob/fafe0de680277ce6faceb7449ded0b33822438d0/tornado_elasticsearch.py#L541-L609
[ "def", "search", "(", "self", ",", "index", "=", "None", ",", "doc_type", "=", "None", ",", "body", "=", "None", ",", "params", "=", "None", ")", ":", "# from is a reserved word so it cannot be used, use from_ instead", "if", "'from_'", "in", "params", ":", "params", "[", "'from'", "]", "=", "params", ".", "pop", "(", "'from_'", ")", "if", "doc_type", "and", "not", "index", ":", "index", "=", "'_all'", "_", ",", "data", "=", "yield", "self", ".", "transport", ".", "perform_request", "(", "'GET'", ",", "_make_path", "(", "index", ",", "doc_type", ",", "'_search'", ")", ",", "params", "=", "params", ",", "body", "=", "body", ")", "raise", "gen", ".", "Return", "(", "data", ")" ]
fafe0de680277ce6faceb7449ded0b33822438d0
test
AsyncElasticsearch.scroll
Scroll a search request created by specifying the scroll parameter. `<http://www.elasticsearch.org/guide/reference/api/search/scroll/>`_ :arg scroll_id: The scroll ID :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search
tornado_elasticsearch.py
def scroll(self, scroll_id, scroll, params=None): """ Scroll a search request created by specifying the scroll parameter. `<http://www.elasticsearch.org/guide/reference/api/search/scroll/>`_ :arg scroll_id: The scroll ID :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search """ body = { "scroll": scroll, "scroll_id": scroll_id } if params: if "scroll" in params.keys(): params.pop("scroll") if "scroll_id" in params.keys(): params.pop("scroll_id") _, data = yield self.transport.perform_request('POST', _make_path('_search', 'scroll'), body=body, params=params) raise gen.Return(data)
def scroll(self, scroll_id, scroll, params=None): """ Scroll a search request created by specifying the scroll parameter. `<http://www.elasticsearch.org/guide/reference/api/search/scroll/>`_ :arg scroll_id: The scroll ID :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search """ body = { "scroll": scroll, "scroll_id": scroll_id } if params: if "scroll" in params.keys(): params.pop("scroll") if "scroll_id" in params.keys(): params.pop("scroll_id") _, data = yield self.transport.perform_request('POST', _make_path('_search', 'scroll'), body=body, params=params) raise gen.Return(data)
[ "Scroll", "a", "search", "request", "created", "by", "specifying", "the", "scroll", "parameter", ".", "<http", ":", "//", "www", ".", "elasticsearch", ".", "org", "/", "guide", "/", "reference", "/", "api", "/", "search", "/", "scroll", "/", ">", "_" ]
gmr/tornado-elasticsearch
python
https://github.com/gmr/tornado-elasticsearch/blob/fafe0de680277ce6faceb7449ded0b33822438d0/tornado_elasticsearch.py#L661-L686
[ "def", "scroll", "(", "self", ",", "scroll_id", ",", "scroll", ",", "params", "=", "None", ")", ":", "body", "=", "{", "\"scroll\"", ":", "scroll", ",", "\"scroll_id\"", ":", "scroll_id", "}", "if", "params", ":", "if", "\"scroll\"", "in", "params", ".", "keys", "(", ")", ":", "params", ".", "pop", "(", "\"scroll\"", ")", "if", "\"scroll_id\"", "in", "params", ".", "keys", "(", ")", ":", "params", ".", "pop", "(", "\"scroll_id\"", ")", "_", ",", "data", "=", "yield", "self", ".", "transport", ".", "perform_request", "(", "'POST'", ",", "_make_path", "(", "'_search'", ",", "'scroll'", ")", ",", "body", "=", "body", ",", "params", "=", "params", ")", "raise", "gen", ".", "Return", "(", "data", ")" ]
fafe0de680277ce6faceb7449ded0b33822438d0
test
AsyncElasticsearch.clear_scroll
Clear the scroll request created by specifying the scroll parameter to search. `<http://www.elasticsearch.org/guide/reference/api/search/scroll/>`_ :arg scroll_id: The scroll ID or a list of scroll IDs
tornado_elasticsearch.py
def clear_scroll(self, scroll_id, params=None): """ Clear the scroll request created by specifying the scroll parameter to search. `<http://www.elasticsearch.org/guide/reference/api/search/scroll/>`_ :arg scroll_id: The scroll ID or a list of scroll IDs """ if not isinstance(scroll_id, list): scroll_id = [scroll_id] body = { "scroll_id": scroll_id } if params and "scroll_id" in params.keys(): params.pop("scroll_id") _, data = yield self.transport.perform_request('DELETE', _make_path('_search', 'scroll'), body=body, params=params) raise gen.Return(data)
def clear_scroll(self, scroll_id, params=None): """ Clear the scroll request created by specifying the scroll parameter to search. `<http://www.elasticsearch.org/guide/reference/api/search/scroll/>`_ :arg scroll_id: The scroll ID or a list of scroll IDs """ if not isinstance(scroll_id, list): scroll_id = [scroll_id] body = { "scroll_id": scroll_id } if params and "scroll_id" in params.keys(): params.pop("scroll_id") _, data = yield self.transport.perform_request('DELETE', _make_path('_search', 'scroll'), body=body, params=params) raise gen.Return(data)
[ "Clear", "the", "scroll", "request", "created", "by", "specifying", "the", "scroll", "parameter", "to", "search", ".", "<http", ":", "//", "www", ".", "elasticsearch", ".", "org", "/", "guide", "/", "reference", "/", "api", "/", "search", "/", "scroll", "/", ">", "_" ]
gmr/tornado-elasticsearch
python
https://github.com/gmr/tornado-elasticsearch/blob/fafe0de680277ce6faceb7449ded0b33822438d0/tornado_elasticsearch.py#L690-L713
[ "def", "clear_scroll", "(", "self", ",", "scroll_id", ",", "params", "=", "None", ")", ":", "if", "not", "isinstance", "(", "scroll_id", ",", "list", ")", ":", "scroll_id", "=", "[", "scroll_id", "]", "body", "=", "{", "\"scroll_id\"", ":", "scroll_id", "}", "if", "params", "and", "\"scroll_id\"", "in", "params", ".", "keys", "(", ")", ":", "params", ".", "pop", "(", "\"scroll_id\"", ")", "_", ",", "data", "=", "yield", "self", ".", "transport", ".", "perform_request", "(", "'DELETE'", ",", "_make_path", "(", "'_search'", ",", "'scroll'", ")", ",", "body", "=", "body", ",", "params", "=", "params", ")", "raise", "gen", ".", "Return", "(", "data", ")" ]
fafe0de680277ce6faceb7449ded0b33822438d0
test
AsyncElasticsearch.get_mapping
Retrieve mapping definition of index or index/type. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-mapping.html>`_ :arg index: A comma-separated list of index names :arg doc_type: A comma-separated list of document types :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both., default 'open', valid choices are: 'open', 'closed', 'none', 'all' :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false)
tornado_elasticsearch.py
def get_mapping(self, index=None, doc_type=None, params=None): """ Retrieve mapping definition of index or index/type. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-mapping.html>`_ :arg index: A comma-separated list of index names :arg doc_type: A comma-separated list of document types :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both., default 'open', valid choices are: 'open', 'closed', 'none', 'all' :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false) """ _, data = yield self.transport.perform_request('GET', _make_path(index, '_mapping', doc_type), params=params) raise gen.Return(data)
def get_mapping(self, index=None, doc_type=None, params=None): """ Retrieve mapping definition of index or index/type. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-mapping.html>`_ :arg index: A comma-separated list of index names :arg doc_type: A comma-separated list of document types :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both., default 'open', valid choices are: 'open', 'closed', 'none', 'all' :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false) """ _, data = yield self.transport.perform_request('GET', _make_path(index, '_mapping', doc_type), params=params) raise gen.Return(data)
[ "Retrieve", "mapping", "definition", "of", "index", "or", "index", "/", "type", ".", "<http", ":", "//", "www", ".", "elastic", ".", "co", "/", "guide", "/", "en", "/", "elasticsearch", "/", "reference", "/", "current", "/", "indices", "-", "get", "-", "mapping", ".", "html", ">", "_", ":", "arg", "index", ":", "A", "comma", "-", "separated", "list", "of", "index", "names", ":", "arg", "doc_type", ":", "A", "comma", "-", "separated", "list", "of", "document", "types", ":", "arg", "allow_no_indices", ":", "Whether", "to", "ignore", "if", "a", "wildcard", "indices", "expression", "resolves", "into", "no", "concrete", "indices", ".", "(", "This", "includes", "_all", "string", "or", "when", "no", "indices", "have", "been", "specified", ")", ":", "arg", "expand_wildcards", ":", "Whether", "to", "expand", "wildcard", "expression", "to", "concrete", "indices", "that", "are", "open", "closed", "or", "both", ".", "default", "open", "valid", "choices", "are", ":", "open", "closed", "none", "all", ":", "arg", "ignore_unavailable", ":", "Whether", "specified", "concrete", "indices", "should", "be", "ignored", "when", "unavailable", "(", "missing", "or", "closed", ")", ":", "arg", "local", ":", "Return", "local", "information", "do", "not", "retrieve", "the", "state", "from", "master", "node", "(", "default", ":", "false", ")" ]
gmr/tornado-elasticsearch
python
https://github.com/gmr/tornado-elasticsearch/blob/fafe0de680277ce6faceb7449ded0b33822438d0/tornado_elasticsearch.py#L847-L869
[ "def", "get_mapping", "(", "self", ",", "index", "=", "None", ",", "doc_type", "=", "None", ",", "params", "=", "None", ")", ":", "_", ",", "data", "=", "yield", "self", ".", "transport", ".", "perform_request", "(", "'GET'", ",", "_make_path", "(", "index", ",", "'_mapping'", ",", "doc_type", ")", ",", "params", "=", "params", ")", "raise", "gen", ".", "Return", "(", "data", ")" ]
fafe0de680277ce6faceb7449ded0b33822438d0
test
AsyncElasticsearch.suggest
The suggest feature suggests similar looking terms based on a provided text by using a suggester. `<http://elasticsearch.org/guide/reference/api/search/suggest/>`_ :arg index: A comma-separated list of index names to restrict the operation; use `_all` or empty string to perform the operation on all indices :arg body: The request definition :arg ignore_indices: When performed on multiple indices, allows to ignore `missing` ones (default: none) :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg routing: Specific routing value :arg source: The URL-encoded request definition (instead of using request body)
tornado_elasticsearch.py
def suggest(self, index=None, body=None, params=None): """ The suggest feature suggests similar looking terms based on a provided text by using a suggester. `<http://elasticsearch.org/guide/reference/api/search/suggest/>`_ :arg index: A comma-separated list of index names to restrict the operation; use `_all` or empty string to perform the operation on all indices :arg body: The request definition :arg ignore_indices: When performed on multiple indices, allows to ignore `missing` ones (default: none) :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg routing: Specific routing value :arg source: The URL-encoded request definition (instead of using request body) """ _, data = yield self.transport.perform_request('POST', _make_path(index, '_suggest'), params=params, body=body) raise gen.Return(data)
def suggest(self, index=None, body=None, params=None): """ The suggest feature suggests similar looking terms based on a provided text by using a suggester. `<http://elasticsearch.org/guide/reference/api/search/suggest/>`_ :arg index: A comma-separated list of index names to restrict the operation; use `_all` or empty string to perform the operation on all indices :arg body: The request definition :arg ignore_indices: When performed on multiple indices, allows to ignore `missing` ones (default: none) :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg routing: Specific routing value :arg source: The URL-encoded request definition (instead of using request body) """ _, data = yield self.transport.perform_request('POST', _make_path(index, '_suggest'), params=params, body=body) raise gen.Return(data)
[ "The", "suggest", "feature", "suggests", "similar", "looking", "terms", "based", "on", "a", "provided", "text", "by", "using", "a", "suggester", ".", "<http", ":", "//", "elasticsearch", ".", "org", "/", "guide", "/", "reference", "/", "api", "/", "search", "/", "suggest", "/", ">", "_" ]
gmr/tornado-elasticsearch
python
https://github.com/gmr/tornado-elasticsearch/blob/fafe0de680277ce6faceb7449ded0b33822438d0/tornado_elasticsearch.py#L873-L895
[ "def", "suggest", "(", "self", ",", "index", "=", "None", ",", "body", "=", "None", ",", "params", "=", "None", ")", ":", "_", ",", "data", "=", "yield", "self", ".", "transport", ".", "perform_request", "(", "'POST'", ",", "_make_path", "(", "index", ",", "'_suggest'", ")", ",", "params", "=", "params", ",", "body", "=", "body", ")", "raise", "gen", ".", "Return", "(", "data", ")" ]
fafe0de680277ce6faceb7449ded0b33822438d0
test
SynoFormatHelper.bytes_to_readable
Converts bytes to a human readable format
SynologyDSM/SynologyDSM.py
def bytes_to_readable(num): """Converts bytes to a human readable format""" if num < 512: return "0 Kb" elif num < 1024: return "1 Kb" for unit in ['', 'Kb', 'Mb', 'Gb', 'Tb', 'Pb', 'Eb', 'Zb']: if abs(num) < 1024.0: return "%3.1f%s" % (num, unit) num /= 1024.0 return "%.1f%s" % (num, 'Yb')
def bytes_to_readable(num): """Converts bytes to a human readable format""" if num < 512: return "0 Kb" elif num < 1024: return "1 Kb" for unit in ['', 'Kb', 'Mb', 'Gb', 'Tb', 'Pb', 'Eb', 'Zb']: if abs(num) < 1024.0: return "%3.1f%s" % (num, unit) num /= 1024.0 return "%.1f%s" % (num, 'Yb')
[ "Converts", "bytes", "to", "a", "human", "readable", "format" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L12-L23
[ "def", "bytes_to_readable", "(", "num", ")", ":", "if", "num", "<", "512", ":", "return", "\"0 Kb\"", "elif", "num", "<", "1024", ":", "return", "\"1 Kb\"", "for", "unit", "in", "[", "''", ",", "'Kb'", ",", "'Mb'", ",", "'Gb'", ",", "'Tb'", ",", "'Pb'", ",", "'Eb'", ",", "'Zb'", "]", ":", "if", "abs", "(", "num", ")", "<", "1024.0", ":", "return", "\"%3.1f%s\"", "%", "(", "num", ",", "unit", ")", "num", "/=", "1024.0", "return", "\"%.1f%s\"", "%", "(", "num", ",", "'Yb'", ")" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynoUtilization.cpu_total_load
Total CPU load for Synology DSM
SynologyDSM/SynologyDSM.py
def cpu_total_load(self): """Total CPU load for Synology DSM""" system_load = self.cpu_system_load user_load = self.cpu_user_load other_load = self.cpu_other_load if system_load is not None and \ user_load is not None and \ other_load is not None: return system_load + user_load + other_load
def cpu_total_load(self): """Total CPU load for Synology DSM""" system_load = self.cpu_system_load user_load = self.cpu_user_load other_load = self.cpu_other_load if system_load is not None and \ user_load is not None and \ other_load is not None: return system_load + user_load + other_load
[ "Total", "CPU", "load", "for", "Synology", "DSM" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L77-L86
[ "def", "cpu_total_load", "(", "self", ")", ":", "system_load", "=", "self", ".", "cpu_system_load", "user_load", "=", "self", ".", "cpu_user_load", "other_load", "=", "self", ".", "cpu_other_load", "if", "system_load", "is", "not", "None", "and", "user_load", "is", "not", "None", "and", "other_load", "is", "not", "None", ":", "return", "system_load", "+", "user_load", "+", "other_load" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynoUtilization.memory_size
Total Memory Size of Synology DSM
SynologyDSM/SynologyDSM.py
def memory_size(self, human_readable=True): """Total Memory Size of Synology DSM""" if self._data is not None: # Memory is actually returned in KB's so multiply before converting return_data = int(self._data["memory"]["memory_size"]) * 1024 if human_readable: return SynoFormatHelper.bytes_to_readable( return_data) else: return return_data
def memory_size(self, human_readable=True): """Total Memory Size of Synology DSM""" if self._data is not None: # Memory is actually returned in KB's so multiply before converting return_data = int(self._data["memory"]["memory_size"]) * 1024 if human_readable: return SynoFormatHelper.bytes_to_readable( return_data) else: return return_data
[ "Total", "Memory", "Size", "of", "Synology", "DSM" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L112-L121
[ "def", "memory_size", "(", "self", ",", "human_readable", "=", "True", ")", ":", "if", "self", ".", "_data", "is", "not", "None", ":", "# Memory is actually returned in KB's so multiply before converting\r", "return_data", "=", "int", "(", "self", ".", "_data", "[", "\"memory\"", "]", "[", "\"memory_size\"", "]", ")", "*", "1024", "if", "human_readable", ":", "return", "SynoFormatHelper", ".", "bytes_to_readable", "(", "return_data", ")", "else", ":", "return", "return_data" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynoUtilization._get_network
Function to get specific network (eth0, total, etc)
SynologyDSM/SynologyDSM.py
def _get_network(self, network_id): """Function to get specific network (eth0, total, etc)""" if self._data is not None: for network in self._data["network"]: if network["device"] == network_id: return network
def _get_network(self, network_id): """Function to get specific network (eth0, total, etc)""" if self._data is not None: for network in self._data["network"]: if network["device"] == network_id: return network
[ "Function", "to", "get", "specific", "network", "(", "eth0", "total", "etc", ")" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L178-L183
[ "def", "_get_network", "(", "self", ",", "network_id", ")", ":", "if", "self", ".", "_data", "is", "not", "None", ":", "for", "network", "in", "self", ".", "_data", "[", "\"network\"", "]", ":", "if", "network", "[", "\"device\"", "]", "==", "network_id", ":", "return", "network" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynoUtilization.network_up
Total upload speed being used
SynologyDSM/SynologyDSM.py
def network_up(self, human_readable=True): """Total upload speed being used""" network = self._get_network("total") if network is not None: return_data = int(network["tx"]) if human_readable: return SynoFormatHelper.bytes_to_readable( return_data) else: return return_data
def network_up(self, human_readable=True): """Total upload speed being used""" network = self._get_network("total") if network is not None: return_data = int(network["tx"]) if human_readable: return SynoFormatHelper.bytes_to_readable( return_data) else: return return_data
[ "Total", "upload", "speed", "being", "used" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L185-L194
[ "def", "network_up", "(", "self", ",", "human_readable", "=", "True", ")", ":", "network", "=", "self", ".", "_get_network", "(", "\"total\"", ")", "if", "network", "is", "not", "None", ":", "return_data", "=", "int", "(", "network", "[", "\"tx\"", "]", ")", "if", "human_readable", ":", "return", "SynoFormatHelper", ".", "bytes_to_readable", "(", "return_data", ")", "else", ":", "return", "return_data" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynoStorage.volumes
Returns all available volumes
SynologyDSM/SynologyDSM.py
def volumes(self): """Returns all available volumes""" if self._data is not None: volumes = [] for volume in self._data["volumes"]: volumes.append(volume["id"]) return volumes
def volumes(self): """Returns all available volumes""" if self._data is not None: volumes = [] for volume in self._data["volumes"]: volumes.append(volume["id"]) return volumes
[ "Returns", "all", "available", "volumes" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L220-L226
[ "def", "volumes", "(", "self", ")", ":", "if", "self", ".", "_data", "is", "not", "None", ":", "volumes", "=", "[", "]", "for", "volume", "in", "self", ".", "_data", "[", "\"volumes\"", "]", ":", "volumes", ".", "append", "(", "volume", "[", "\"id\"", "]", ")", "return", "volumes" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynoStorage._get_volume
Returns a specific volume
SynologyDSM/SynologyDSM.py
def _get_volume(self, volume_id): """Returns a specific volume""" if self._data is not None: for volume in self._data["volumes"]: if volume["id"] == volume_id: return volume
def _get_volume(self, volume_id): """Returns a specific volume""" if self._data is not None: for volume in self._data["volumes"]: if volume["id"] == volume_id: return volume
[ "Returns", "a", "specific", "volume" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L228-L233
[ "def", "_get_volume", "(", "self", ",", "volume_id", ")", ":", "if", "self", ".", "_data", "is", "not", "None", ":", "for", "volume", "in", "self", ".", "_data", "[", "\"volumes\"", "]", ":", "if", "volume", "[", "\"id\"", "]", "==", "volume_id", ":", "return", "volume" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynoStorage.volume_size_total
Total size of volume
SynologyDSM/SynologyDSM.py
def volume_size_total(self, volume, human_readable=True): """Total size of volume""" volume = self._get_volume(volume) if volume is not None: return_data = int(volume["size"]["total"]) if human_readable: return SynoFormatHelper.bytes_to_readable( return_data) else: return return_data
def volume_size_total(self, volume, human_readable=True): """Total size of volume""" volume = self._get_volume(volume) if volume is not None: return_data = int(volume["size"]["total"]) if human_readable: return SynoFormatHelper.bytes_to_readable( return_data) else: return return_data
[ "Total", "size", "of", "volume" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L247-L256
[ "def", "volume_size_total", "(", "self", ",", "volume", ",", "human_readable", "=", "True", ")", ":", "volume", "=", "self", ".", "_get_volume", "(", "volume", ")", "if", "volume", "is", "not", "None", ":", "return_data", "=", "int", "(", "volume", "[", "\"size\"", "]", "[", "\"total\"", "]", ")", "if", "human_readable", ":", "return", "SynoFormatHelper", ".", "bytes_to_readable", "(", "return_data", ")", "else", ":", "return", "return_data" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynoStorage.volume_percentage_used
Total used size in percentage for volume
SynologyDSM/SynologyDSM.py
def volume_percentage_used(self, volume): """Total used size in percentage for volume""" volume = self._get_volume(volume) if volume is not None: total = int(volume["size"]["total"]) used = int(volume["size"]["used"]) if used is not None and used > 0 and \ total is not None and total > 0: return round((float(used) / float(total)) * 100.0, 1)
def volume_percentage_used(self, volume): """Total used size in percentage for volume""" volume = self._get_volume(volume) if volume is not None: total = int(volume["size"]["total"]) used = int(volume["size"]["used"]) if used is not None and used > 0 and \ total is not None and total > 0: return round((float(used) / float(total)) * 100.0, 1)
[ "Total", "used", "size", "in", "percentage", "for", "volume" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L269-L278
[ "def", "volume_percentage_used", "(", "self", ",", "volume", ")", ":", "volume", "=", "self", ".", "_get_volume", "(", "volume", ")", "if", "volume", "is", "not", "None", ":", "total", "=", "int", "(", "volume", "[", "\"size\"", "]", "[", "\"total\"", "]", ")", "used", "=", "int", "(", "volume", "[", "\"size\"", "]", "[", "\"used\"", "]", ")", "if", "used", "is", "not", "None", "and", "used", ">", "0", "and", "total", "is", "not", "None", "and", "total", ">", "0", ":", "return", "round", "(", "(", "float", "(", "used", ")", "/", "float", "(", "total", ")", ")", "*", "100.0", ",", "1", ")" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynoStorage.volume_disk_temp_avg
Average temperature of all disks making up the volume
SynologyDSM/SynologyDSM.py
def volume_disk_temp_avg(self, volume): """Average temperature of all disks making up the volume""" volume = self._get_volume(volume) if volume is not None: vol_disks = volume["disks"] if vol_disks is not None: total_temp = 0 total_disks = 0 for vol_disk in vol_disks: disk_temp = self.disk_temp(vol_disk) if disk_temp is not None: total_disks += 1 total_temp += disk_temp if total_temp > 0 and total_disks > 0: return round(total_temp / total_disks, 0)
def volume_disk_temp_avg(self, volume): """Average temperature of all disks making up the volume""" volume = self._get_volume(volume) if volume is not None: vol_disks = volume["disks"] if vol_disks is not None: total_temp = 0 total_disks = 0 for vol_disk in vol_disks: disk_temp = self.disk_temp(vol_disk) if disk_temp is not None: total_disks += 1 total_temp += disk_temp if total_temp > 0 and total_disks > 0: return round(total_temp / total_disks, 0)
[ "Average", "temperature", "of", "all", "disks", "making", "up", "the", "volume" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L280-L296
[ "def", "volume_disk_temp_avg", "(", "self", ",", "volume", ")", ":", "volume", "=", "self", ".", "_get_volume", "(", "volume", ")", "if", "volume", "is", "not", "None", ":", "vol_disks", "=", "volume", "[", "\"disks\"", "]", "if", "vol_disks", "is", "not", "None", ":", "total_temp", "=", "0", "total_disks", "=", "0", "for", "vol_disk", "in", "vol_disks", ":", "disk_temp", "=", "self", ".", "disk_temp", "(", "vol_disk", ")", "if", "disk_temp", "is", "not", "None", ":", "total_disks", "+=", "1", "total_temp", "+=", "disk_temp", "if", "total_temp", ">", "0", "and", "total_disks", ">", "0", ":", "return", "round", "(", "total_temp", "/", "total_disks", ",", "0", ")" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynoStorage.volume_disk_temp_max
Maximum temperature of all disks making up the volume
SynologyDSM/SynologyDSM.py
def volume_disk_temp_max(self, volume): """Maximum temperature of all disks making up the volume""" volume = self._get_volume(volume) if volume is not None: vol_disks = volume["disks"] if vol_disks is not None: max_temp = 0 for vol_disk in vol_disks: disk_temp = self.disk_temp(vol_disk) if disk_temp is not None and disk_temp > max_temp: max_temp = disk_temp return max_temp
def volume_disk_temp_max(self, volume): """Maximum temperature of all disks making up the volume""" volume = self._get_volume(volume) if volume is not None: vol_disks = volume["disks"] if vol_disks is not None: max_temp = 0 for vol_disk in vol_disks: disk_temp = self.disk_temp(vol_disk) if disk_temp is not None and disk_temp > max_temp: max_temp = disk_temp return max_temp
[ "Maximum", "temperature", "of", "all", "disks", "making", "up", "the", "volume" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L298-L311
[ "def", "volume_disk_temp_max", "(", "self", ",", "volume", ")", ":", "volume", "=", "self", ".", "_get_volume", "(", "volume", ")", "if", "volume", "is", "not", "None", ":", "vol_disks", "=", "volume", "[", "\"disks\"", "]", "if", "vol_disks", "is", "not", "None", ":", "max_temp", "=", "0", "for", "vol_disk", "in", "vol_disks", ":", "disk_temp", "=", "self", ".", "disk_temp", "(", "vol_disk", ")", "if", "disk_temp", "is", "not", "None", "and", "disk_temp", ">", "max_temp", ":", "max_temp", "=", "disk_temp", "return", "max_temp" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynoStorage.disks
Returns all available (internal) disks
SynologyDSM/SynologyDSM.py
def disks(self): """Returns all available (internal) disks""" if self._data is not None: disks = [] for disk in self._data["disks"]: disks.append(disk["id"]) return disks
def disks(self): """Returns all available (internal) disks""" if self._data is not None: disks = [] for disk in self._data["disks"]: disks.append(disk["id"]) return disks
[ "Returns", "all", "available", "(", "internal", ")", "disks" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L314-L320
[ "def", "disks", "(", "self", ")", ":", "if", "self", ".", "_data", "is", "not", "None", ":", "disks", "=", "[", "]", "for", "disk", "in", "self", ".", "_data", "[", "\"disks\"", "]", ":", "disks", ".", "append", "(", "disk", "[", "\"id\"", "]", ")", "return", "disks" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynoStorage._get_disk
Returns a specific disk
SynologyDSM/SynologyDSM.py
def _get_disk(self, disk_id): """Returns a specific disk""" if self._data is not None: for disk in self._data["disks"]: if disk["id"] == disk_id: return disk
def _get_disk(self, disk_id): """Returns a specific disk""" if self._data is not None: for disk in self._data["disks"]: if disk["id"] == disk_id: return disk
[ "Returns", "a", "specific", "disk" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L322-L327
[ "def", "_get_disk", "(", "self", ",", "disk_id", ")", ":", "if", "self", ".", "_data", "is", "not", "None", ":", "for", "disk", "in", "self", ".", "_data", "[", "\"disks\"", "]", ":", "if", "disk", "[", "\"id\"", "]", "==", "disk_id", ":", "return", "disk" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynologyDSM._login
Build and execute login request
SynologyDSM/SynologyDSM.py
def _login(self): """Build and execute login request""" api_path = "%s/auth.cgi?api=SYNO.API.Auth&version=2" % ( self.base_url, ) login_path = "method=login&%s" % (self._encode_credentials()) url = "%s&%s&session=Core&format=cookie" % ( api_path, login_path) result = self._execute_get_url(url, False) # Parse Result if valid if result is not None: self.access_token = result["data"]["sid"] self._debuglog("Authentication Succesfull, token: " + str(self.access_token)) return True else: self._debuglog("Authentication Failed") return False
def _login(self): """Build and execute login request""" api_path = "%s/auth.cgi?api=SYNO.API.Auth&version=2" % ( self.base_url, ) login_path = "method=login&%s" % (self._encode_credentials()) url = "%s&%s&session=Core&format=cookie" % ( api_path, login_path) result = self._execute_get_url(url, False) # Parse Result if valid if result is not None: self.access_token = result["data"]["sid"] self._debuglog("Authentication Succesfull, token: " + str(self.access_token)) return True else: self._debuglog("Authentication Failed") return False
[ "Build", "and", "execute", "login", "request" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L417-L438
[ "def", "_login", "(", "self", ")", ":", "api_path", "=", "\"%s/auth.cgi?api=SYNO.API.Auth&version=2\"", "%", "(", "self", ".", "base_url", ",", ")", "login_path", "=", "\"method=login&%s\"", "%", "(", "self", ".", "_encode_credentials", "(", ")", ")", "url", "=", "\"%s&%s&session=Core&format=cookie\"", "%", "(", "api_path", ",", "login_path", ")", "result", "=", "self", ".", "_execute_get_url", "(", "url", ",", "False", ")", "# Parse Result if valid\r", "if", "result", "is", "not", "None", ":", "self", ".", "access_token", "=", "result", "[", "\"data\"", "]", "[", "\"sid\"", "]", "self", ".", "_debuglog", "(", "\"Authentication Succesfull, token: \"", "+", "str", "(", "self", ".", "access_token", ")", ")", "return", "True", "else", ":", "self", ".", "_debuglog", "(", "\"Authentication Failed\"", ")", "return", "False" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynologyDSM._get_url
Function to handle sessions for a GET request
SynologyDSM/SynologyDSM.py
def _get_url(self, url, retry_on_error=True): """Function to handle sessions for a GET request""" # Check if we failed to request the url or need to login if self.access_token is None or \ self._session is None or \ self._session_error: # Clear Access Token en reset session error self.access_token = None self._session_error = False # First Reset the session if self._session is not None: self._session = None self._debuglog("Creating New Session") self._session = requests.Session() # disable SSL certificate verification if self._use_https: self._session.verify = False # We Created a new Session so login if self._login() is False: self._session_error = True self._debuglog("Login Failed, unable to process request") return # Now request the data response = self._execute_get_url(url) if (self._session_error or response is None) and retry_on_error: self._debuglog("Error occured, retrying...") self._get_url(url, False) return response
def _get_url(self, url, retry_on_error=True): """Function to handle sessions for a GET request""" # Check if we failed to request the url or need to login if self.access_token is None or \ self._session is None or \ self._session_error: # Clear Access Token en reset session error self.access_token = None self._session_error = False # First Reset the session if self._session is not None: self._session = None self._debuglog("Creating New Session") self._session = requests.Session() # disable SSL certificate verification if self._use_https: self._session.verify = False # We Created a new Session so login if self._login() is False: self._session_error = True self._debuglog("Login Failed, unable to process request") return # Now request the data response = self._execute_get_url(url) if (self._session_error or response is None) and retry_on_error: self._debuglog("Error occured, retrying...") self._get_url(url, False) return response
[ "Function", "to", "handle", "sessions", "for", "a", "GET", "request" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L440-L473
[ "def", "_get_url", "(", "self", ",", "url", ",", "retry_on_error", "=", "True", ")", ":", "# Check if we failed to request the url or need to login\r", "if", "self", ".", "access_token", "is", "None", "or", "self", ".", "_session", "is", "None", "or", "self", ".", "_session_error", ":", "# Clear Access Token en reset session error\r", "self", ".", "access_token", "=", "None", "self", ".", "_session_error", "=", "False", "# First Reset the session\r", "if", "self", ".", "_session", "is", "not", "None", ":", "self", ".", "_session", "=", "None", "self", ".", "_debuglog", "(", "\"Creating New Session\"", ")", "self", ".", "_session", "=", "requests", ".", "Session", "(", ")", "# disable SSL certificate verification\r", "if", "self", ".", "_use_https", ":", "self", ".", "_session", ".", "verify", "=", "False", "# We Created a new Session so login\r", "if", "self", ".", "_login", "(", ")", "is", "False", ":", "self", ".", "_session_error", "=", "True", "self", ".", "_debuglog", "(", "\"Login Failed, unable to process request\"", ")", "return", "# Now request the data\r", "response", "=", "self", ".", "_execute_get_url", "(", "url", ")", "if", "(", "self", ".", "_session_error", "or", "response", "is", "None", ")", "and", "retry_on_error", ":", "self", ".", "_debuglog", "(", "\"Error occured, retrying...\"", ")", "self", ".", "_get_url", "(", "url", ",", "False", ")", "return", "response" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynologyDSM._execute_get_url
Function to execute and handle a GET request
SynologyDSM/SynologyDSM.py
def _execute_get_url(self, request_url, append_sid=True): """Function to execute and handle a GET request""" # Prepare Request self._debuglog("Requesting URL: '" + request_url + "'") if append_sid: self._debuglog("Appending access_token (SID: " + self.access_token + ") to url") request_url = "%s&_sid=%s" % ( request_url, self.access_token) # Execute Request try: resp = self._session.get(request_url) self._debuglog("Request executed: " + str(resp.status_code)) if resp.status_code == 200: # We got a response json_data = json.loads(resp.text) if json_data["success"]: self._debuglog("Succesfull returning data") self._debuglog(str(json_data)) return json_data else: if json_data["error"]["code"] in {105, 106, 107, 119}: self._debuglog("Session error: " + str(json_data["error"]["code"])) self._session_error = True else: self._debuglog("Failed: " + resp.text) else: # We got a 404 or 401 return None #pylint: disable=bare-except except: return None
def _execute_get_url(self, request_url, append_sid=True): """Function to execute and handle a GET request""" # Prepare Request self._debuglog("Requesting URL: '" + request_url + "'") if append_sid: self._debuglog("Appending access_token (SID: " + self.access_token + ") to url") request_url = "%s&_sid=%s" % ( request_url, self.access_token) # Execute Request try: resp = self._session.get(request_url) self._debuglog("Request executed: " + str(resp.status_code)) if resp.status_code == 200: # We got a response json_data = json.loads(resp.text) if json_data["success"]: self._debuglog("Succesfull returning data") self._debuglog(str(json_data)) return json_data else: if json_data["error"]["code"] in {105, 106, 107, 119}: self._debuglog("Session error: " + str(json_data["error"]["code"])) self._session_error = True else: self._debuglog("Failed: " + resp.text) else: # We got a 404 or 401 return None #pylint: disable=bare-except except: return None
[ "Function", "to", "execute", "and", "handle", "a", "GET", "request" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L475-L509
[ "def", "_execute_get_url", "(", "self", ",", "request_url", ",", "append_sid", "=", "True", ")", ":", "# Prepare Request\r", "self", ".", "_debuglog", "(", "\"Requesting URL: '\"", "+", "request_url", "+", "\"'\"", ")", "if", "append_sid", ":", "self", ".", "_debuglog", "(", "\"Appending access_token (SID: \"", "+", "self", ".", "access_token", "+", "\") to url\"", ")", "request_url", "=", "\"%s&_sid=%s\"", "%", "(", "request_url", ",", "self", ".", "access_token", ")", "# Execute Request\r", "try", ":", "resp", "=", "self", ".", "_session", ".", "get", "(", "request_url", ")", "self", ".", "_debuglog", "(", "\"Request executed: \"", "+", "str", "(", "resp", ".", "status_code", ")", ")", "if", "resp", ".", "status_code", "==", "200", ":", "# We got a response\r", "json_data", "=", "json", ".", "loads", "(", "resp", ".", "text", ")", "if", "json_data", "[", "\"success\"", "]", ":", "self", ".", "_debuglog", "(", "\"Succesfull returning data\"", ")", "self", ".", "_debuglog", "(", "str", "(", "json_data", ")", ")", "return", "json_data", "else", ":", "if", "json_data", "[", "\"error\"", "]", "[", "\"code\"", "]", "in", "{", "105", ",", "106", ",", "107", ",", "119", "}", ":", "self", ".", "_debuglog", "(", "\"Session error: \"", "+", "str", "(", "json_data", "[", "\"error\"", "]", "[", "\"code\"", "]", ")", ")", "self", ".", "_session_error", "=", "True", "else", ":", "self", ".", "_debuglog", "(", "\"Failed: \"", "+", "resp", ".", "text", ")", "else", ":", "# We got a 404 or 401\r", "return", "None", "#pylint: disable=bare-except\r", "except", ":", "return", "None" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynologyDSM.update
Updates the various instanced modules
SynologyDSM/SynologyDSM.py
def update(self): """Updates the various instanced modules""" if self._utilisation is not None: api = "SYNO.Core.System.Utilization" url = "%s/entry.cgi?api=%s&version=1&method=get&_sid=%s" % ( self.base_url, api, self.access_token) self._utilisation.update(self._get_url(url)) if self._storage is not None: api = "SYNO.Storage.CGI.Storage" url = "%s/entry.cgi?api=%s&version=1&method=load_info&_sid=%s" % ( self.base_url, api, self.access_token) self._storage.update(self._get_url(url))
def update(self): """Updates the various instanced modules""" if self._utilisation is not None: api = "SYNO.Core.System.Utilization" url = "%s/entry.cgi?api=%s&version=1&method=get&_sid=%s" % ( self.base_url, api, self.access_token) self._utilisation.update(self._get_url(url)) if self._storage is not None: api = "SYNO.Storage.CGI.Storage" url = "%s/entry.cgi?api=%s&version=1&method=load_info&_sid=%s" % ( self.base_url, api, self.access_token) self._storage.update(self._get_url(url))
[ "Updates", "the", "various", "instanced", "modules" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L512-L527
[ "def", "update", "(", "self", ")", ":", "if", "self", ".", "_utilisation", "is", "not", "None", ":", "api", "=", "\"SYNO.Core.System.Utilization\"", "url", "=", "\"%s/entry.cgi?api=%s&version=1&method=get&_sid=%s\"", "%", "(", "self", ".", "base_url", ",", "api", ",", "self", ".", "access_token", ")", "self", ".", "_utilisation", ".", "update", "(", "self", ".", "_get_url", "(", "url", ")", ")", "if", "self", ".", "_storage", "is", "not", "None", ":", "api", "=", "\"SYNO.Storage.CGI.Storage\"", "url", "=", "\"%s/entry.cgi?api=%s&version=1&method=load_info&_sid=%s\"", "%", "(", "self", ".", "base_url", ",", "api", ",", "self", ".", "access_token", ")", "self", ".", "_storage", ".", "update", "(", "self", ".", "_get_url", "(", "url", ")", ")" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynologyDSM.utilisation
Getter for various Utilisation variables
SynologyDSM/SynologyDSM.py
def utilisation(self): """Getter for various Utilisation variables""" if self._utilisation is None: api = "SYNO.Core.System.Utilization" url = "%s/entry.cgi?api=%s&version=1&method=get" % ( self.base_url, api) self._utilisation = SynoUtilization(self._get_url(url)) return self._utilisation
def utilisation(self): """Getter for various Utilisation variables""" if self._utilisation is None: api = "SYNO.Core.System.Utilization" url = "%s/entry.cgi?api=%s&version=1&method=get" % ( self.base_url, api) self._utilisation = SynoUtilization(self._get_url(url)) return self._utilisation
[ "Getter", "for", "various", "Utilisation", "variables" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L530-L538
[ "def", "utilisation", "(", "self", ")", ":", "if", "self", ".", "_utilisation", "is", "None", ":", "api", "=", "\"SYNO.Core.System.Utilization\"", "url", "=", "\"%s/entry.cgi?api=%s&version=1&method=get\"", "%", "(", "self", ".", "base_url", ",", "api", ")", "self", ".", "_utilisation", "=", "SynoUtilization", "(", "self", ".", "_get_url", "(", "url", ")", ")", "return", "self", ".", "_utilisation" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
SynologyDSM.storage
Getter for various Storage variables
SynologyDSM/SynologyDSM.py
def storage(self): """Getter for various Storage variables""" if self._storage is None: api = "SYNO.Storage.CGI.Storage" url = "%s/entry.cgi?api=%s&version=1&method=load_info" % ( self.base_url, api) self._storage = SynoStorage(self._get_url(url)) return self._storage
def storage(self): """Getter for various Storage variables""" if self._storage is None: api = "SYNO.Storage.CGI.Storage" url = "%s/entry.cgi?api=%s&version=1&method=load_info" % ( self.base_url, api) self._storage = SynoStorage(self._get_url(url)) return self._storage
[ "Getter", "for", "various", "Storage", "variables" ]
StaticCube/python-synology
python
https://github.com/StaticCube/python-synology/blob/a5446a052fc91a38f7589803dc7a654180db2566/SynologyDSM/SynologyDSM.py#L541-L549
[ "def", "storage", "(", "self", ")", ":", "if", "self", ".", "_storage", "is", "None", ":", "api", "=", "\"SYNO.Storage.CGI.Storage\"", "url", "=", "\"%s/entry.cgi?api=%s&version=1&method=load_info\"", "%", "(", "self", ".", "base_url", ",", "api", ")", "self", ".", "_storage", "=", "SynoStorage", "(", "self", ".", "_get_url", "(", "url", ")", ")", "return", "self", ".", "_storage" ]
a5446a052fc91a38f7589803dc7a654180db2566
test
Context.for_request
Creates the context for a specific request.
sentry_hipchat_ac/models.py
def for_request(request, body=None): """Creates the context for a specific request.""" tenant, jwt_data = Tenant.objects.for_request(request, body) webhook_sender_id = jwt_data.get('sub') sender_data = None if body and 'item' in body: if 'sender' in body['item']: sender_data = body['item']['sender'] elif 'message' in body['item'] and 'from' in body['item']['message']: sender_data = body['item']['message']['from'] if sender_data is None: if webhook_sender_id is None: raise BadTenantError('Cannot identify sender in tenant') sender_data = {'id': webhook_sender_id} return Context( tenant=tenant, sender=HipchatUser( id=sender_data.get('id'), name=sender_data.get('name'), mention_name=sender_data.get('mention_name'), ), signed_request=request.GET.get('signed_request'), context=jwt_data.get('context') or {}, )
def for_request(request, body=None): """Creates the context for a specific request.""" tenant, jwt_data = Tenant.objects.for_request(request, body) webhook_sender_id = jwt_data.get('sub') sender_data = None if body and 'item' in body: if 'sender' in body['item']: sender_data = body['item']['sender'] elif 'message' in body['item'] and 'from' in body['item']['message']: sender_data = body['item']['message']['from'] if sender_data is None: if webhook_sender_id is None: raise BadTenantError('Cannot identify sender in tenant') sender_data = {'id': webhook_sender_id} return Context( tenant=tenant, sender=HipchatUser( id=sender_data.get('id'), name=sender_data.get('name'), mention_name=sender_data.get('mention_name'), ), signed_request=request.GET.get('signed_request'), context=jwt_data.get('context') or {}, )
[ "Creates", "the", "context", "for", "a", "specific", "request", "." ]
getsentry/sentry-hipchat-ac
python
https://github.com/getsentry/sentry-hipchat-ac/blob/9063666f1e06cf352fed0530a8a437e45badc917/sentry_hipchat_ac/models.py#L245-L271
[ "def", "for_request", "(", "request", ",", "body", "=", "None", ")", ":", "tenant", ",", "jwt_data", "=", "Tenant", ".", "objects", ".", "for_request", "(", "request", ",", "body", ")", "webhook_sender_id", "=", "jwt_data", ".", "get", "(", "'sub'", ")", "sender_data", "=", "None", "if", "body", "and", "'item'", "in", "body", ":", "if", "'sender'", "in", "body", "[", "'item'", "]", ":", "sender_data", "=", "body", "[", "'item'", "]", "[", "'sender'", "]", "elif", "'message'", "in", "body", "[", "'item'", "]", "and", "'from'", "in", "body", "[", "'item'", "]", "[", "'message'", "]", ":", "sender_data", "=", "body", "[", "'item'", "]", "[", "'message'", "]", "[", "'from'", "]", "if", "sender_data", "is", "None", ":", "if", "webhook_sender_id", "is", "None", ":", "raise", "BadTenantError", "(", "'Cannot identify sender in tenant'", ")", "sender_data", "=", "{", "'id'", ":", "webhook_sender_id", "}", "return", "Context", "(", "tenant", "=", "tenant", ",", "sender", "=", "HipchatUser", "(", "id", "=", "sender_data", ".", "get", "(", "'id'", ")", ",", "name", "=", "sender_data", ".", "get", "(", "'name'", ")", ",", "mention_name", "=", "sender_data", ".", "get", "(", "'mention_name'", ")", ",", ")", ",", "signed_request", "=", "request", ".", "GET", ".", "get", "(", "'signed_request'", ")", ",", "context", "=", "jwt_data", ".", "get", "(", "'context'", ")", "or", "{", "}", ",", ")" ]
9063666f1e06cf352fed0530a8a437e45badc917
test
Context.tenant_token
The cached token of the current tenant.
sentry_hipchat_ac/models.py
def tenant_token(self): """The cached token of the current tenant.""" rv = getattr(self, '_tenant_token', None) if rv is None: rv = self._tenant_token = self.tenant.get_token() return rv
def tenant_token(self): """The cached token of the current tenant.""" rv = getattr(self, '_tenant_token', None) if rv is None: rv = self._tenant_token = self.tenant.get_token() return rv
[ "The", "cached", "token", "of", "the", "current", "tenant", "." ]
getsentry/sentry-hipchat-ac
python
https://github.com/getsentry/sentry-hipchat-ac/blob/9063666f1e06cf352fed0530a8a437e45badc917/sentry_hipchat_ac/models.py#L283-L288
[ "def", "tenant_token", "(", "self", ")", ":", "rv", "=", "getattr", "(", "self", ",", "'_tenant_token'", ",", "None", ")", "if", "rv", "is", "None", ":", "rv", "=", "self", ".", "_tenant_token", "=", "self", ".", "tenant", ".", "get_token", "(", ")", "return", "rv" ]
9063666f1e06cf352fed0530a8a437e45badc917
test
WidgetWrapperMixin.build_attrs
Helper function for building an attribute dictionary.
django_addanother/widgets.py
def build_attrs(self, extra_attrs=None, **kwargs): "Helper function for building an attribute dictionary." self.attrs = self.widget.build_attrs(extra_attrs=None, **kwargs) return self.attrs
def build_attrs(self, extra_attrs=None, **kwargs): "Helper function for building an attribute dictionary." self.attrs = self.widget.build_attrs(extra_attrs=None, **kwargs) return self.attrs
[ "Helper", "function", "for", "building", "an", "attribute", "dictionary", "." ]
jonashaag/django-addanother
python
https://github.com/jonashaag/django-addanother/blob/83dc0c8cc7665cc481dd58da0b9a746972264046/django_addanother/widgets.py#L28-L31
[ "def", "build_attrs", "(", "self", ",", "extra_attrs", "=", "None", ",", "*", "*", "kwargs", ")", ":", "self", ".", "attrs", "=", "self", ".", "widget", ".", "build_attrs", "(", "extra_attrs", "=", "None", ",", "*", "*", "kwargs", ")", "return", "self", ".", "attrs" ]
83dc0c8cc7665cc481dd58da0b9a746972264046
test
with_apps
Class decorator that makes sure the passed apps are present in INSTALLED_APPS.
override_settings/__init__.py
def with_apps(*apps): """ Class decorator that makes sure the passed apps are present in INSTALLED_APPS. """ apps_set = set(settings.INSTALLED_APPS) apps_set.update(apps) return override_settings(INSTALLED_APPS=list(apps_set))
def with_apps(*apps): """ Class decorator that makes sure the passed apps are present in INSTALLED_APPS. """ apps_set = set(settings.INSTALLED_APPS) apps_set.update(apps) return override_settings(INSTALLED_APPS=list(apps_set))
[ "Class", "decorator", "that", "makes", "sure", "the", "passed", "apps", "are", "present", "in", "INSTALLED_APPS", "." ]
edavis/django-override-settings
python
https://github.com/edavis/django-override-settings/blob/016a2ba44cf7132d3aeefbfeddaf201217b1d4b6/override_settings/__init__.py#L66-L73
[ "def", "with_apps", "(", "*", "apps", ")", ":", "apps_set", "=", "set", "(", "settings", ".", "INSTALLED_APPS", ")", "apps_set", ".", "update", "(", "apps", ")", "return", "override_settings", "(", "INSTALLED_APPS", "=", "list", "(", "apps_set", ")", ")" ]
016a2ba44cf7132d3aeefbfeddaf201217b1d4b6
test
without_apps
Class decorator that makes sure the passed apps are not present in INSTALLED_APPS.
override_settings/__init__.py
def without_apps(*apps): """ Class decorator that makes sure the passed apps are not present in INSTALLED_APPS. """ apps_list = [a for a in settings.INSTALLED_APPS if a not in apps] return override_settings(INSTALLED_APPS=apps_list)
def without_apps(*apps): """ Class decorator that makes sure the passed apps are not present in INSTALLED_APPS. """ apps_list = [a for a in settings.INSTALLED_APPS if a not in apps] return override_settings(INSTALLED_APPS=apps_list)
[ "Class", "decorator", "that", "makes", "sure", "the", "passed", "apps", "are", "not", "present", "in", "INSTALLED_APPS", "." ]
edavis/django-override-settings
python
https://github.com/edavis/django-override-settings/blob/016a2ba44cf7132d3aeefbfeddaf201217b1d4b6/override_settings/__init__.py#L75-L81
[ "def", "without_apps", "(", "*", "apps", ")", ":", "apps_list", "=", "[", "a", "for", "a", "in", "settings", ".", "INSTALLED_APPS", "if", "a", "not", "in", "apps", "]", "return", "override_settings", "(", "INSTALLED_APPS", "=", "apps_list", ")" ]
016a2ba44cf7132d3aeefbfeddaf201217b1d4b6
test
override_settings.get_global_settings
Return a dictionary of all global_settings values.
override_settings/__init__.py
def get_global_settings(self): """ Return a dictionary of all global_settings values. """ return dict((key, getattr(global_settings, key)) for key in dir(global_settings) if key.isupper())
def get_global_settings(self): """ Return a dictionary of all global_settings values. """ return dict((key, getattr(global_settings, key)) for key in dir(global_settings) if key.isupper())
[ "Return", "a", "dictionary", "of", "all", "global_settings", "values", "." ]
edavis/django-override-settings
python
https://github.com/edavis/django-override-settings/blob/016a2ba44cf7132d3aeefbfeddaf201217b1d4b6/override_settings/__init__.py#L14-L19
[ "def", "get_global_settings", "(", "self", ")", ":", "return", "dict", "(", "(", "key", ",", "getattr", "(", "global_settings", ",", "key", ")", ")", "for", "key", "in", "dir", "(", "global_settings", ")", "if", "key", ".", "isupper", "(", ")", ")" ]
016a2ba44cf7132d3aeefbfeddaf201217b1d4b6
test
OAuth2UtilRequestHandler.do_GET
Handle the retrieval of the code
OAuth2Util/OAuth2Util.py
def do_GET(self): """ Handle the retrieval of the code """ parsed_url = urlparse(self.path) if parsed_url[2] == "/" + SERVER_REDIRECT_PATH: # 2 = Path parsed_query = parse_qs(parsed_url[4]) # 4 = Query if "code" not in parsed_query: self.send_response(200) self.send_header("Content-Type", "text/plain") self.end_headers() self.wfile.write("No code found, try again!".encode("utf-8")) return self.server.response_code = parsed_query["code"][0] self.send_response(200) self.send_header("Content-Type", "text/plain") self.end_headers() self.wfile.write( "Thank you for using OAuth2Util. The authorization was successful, " "you can now close this window.".encode("utf-8")) elif parsed_url[2] == "/" + SERVER_LINK_PATH: # 2 = Path self.send_response(200) self.send_header("Content-Type", "text/html") self.end_headers() self.wfile.write("<html><body>Hey there!<br/>Click <a href=\"{0}\">here</a> to claim your prize.</body></html>" .format(self.server.authorize_url).encode("utf-8")) else: self.send_response(404) self.send_header("Content-Type", "text/plain") self.end_headers() self.wfile.write("404 not found".encode("utf-8"))
def do_GET(self): """ Handle the retrieval of the code """ parsed_url = urlparse(self.path) if parsed_url[2] == "/" + SERVER_REDIRECT_PATH: # 2 = Path parsed_query = parse_qs(parsed_url[4]) # 4 = Query if "code" not in parsed_query: self.send_response(200) self.send_header("Content-Type", "text/plain") self.end_headers() self.wfile.write("No code found, try again!".encode("utf-8")) return self.server.response_code = parsed_query["code"][0] self.send_response(200) self.send_header("Content-Type", "text/plain") self.end_headers() self.wfile.write( "Thank you for using OAuth2Util. The authorization was successful, " "you can now close this window.".encode("utf-8")) elif parsed_url[2] == "/" + SERVER_LINK_PATH: # 2 = Path self.send_response(200) self.send_header("Content-Type", "text/html") self.end_headers() self.wfile.write("<html><body>Hey there!<br/>Click <a href=\"{0}\">here</a> to claim your prize.</body></html>" .format(self.server.authorize_url).encode("utf-8")) else: self.send_response(404) self.send_header("Content-Type", "text/plain") self.end_headers() self.wfile.write("404 not found".encode("utf-8"))
[ "Handle", "the", "retrieval", "of", "the", "code" ]
SmBe19/praw-OAuth2Util
python
https://github.com/SmBe19/praw-OAuth2Util/blob/ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe/OAuth2Util/OAuth2Util.py#L58-L95
[ "def", "do_GET", "(", "self", ")", ":", "parsed_url", "=", "urlparse", "(", "self", ".", "path", ")", "if", "parsed_url", "[", "2", "]", "==", "\"/\"", "+", "SERVER_REDIRECT_PATH", ":", "# 2 = Path", "parsed_query", "=", "parse_qs", "(", "parsed_url", "[", "4", "]", ")", "# 4 = Query", "if", "\"code\"", "not", "in", "parsed_query", ":", "self", ".", "send_response", "(", "200", ")", "self", ".", "send_header", "(", "\"Content-Type\"", ",", "\"text/plain\"", ")", "self", ".", "end_headers", "(", ")", "self", ".", "wfile", ".", "write", "(", "\"No code found, try again!\"", ".", "encode", "(", "\"utf-8\"", ")", ")", "return", "self", ".", "server", ".", "response_code", "=", "parsed_query", "[", "\"code\"", "]", "[", "0", "]", "self", ".", "send_response", "(", "200", ")", "self", ".", "send_header", "(", "\"Content-Type\"", ",", "\"text/plain\"", ")", "self", ".", "end_headers", "(", ")", "self", ".", "wfile", ".", "write", "(", "\"Thank you for using OAuth2Util. The authorization was successful, \"", "\"you can now close this window.\"", ".", "encode", "(", "\"utf-8\"", ")", ")", "elif", "parsed_url", "[", "2", "]", "==", "\"/\"", "+", "SERVER_LINK_PATH", ":", "# 2 = Path", "self", ".", "send_response", "(", "200", ")", "self", ".", "send_header", "(", "\"Content-Type\"", ",", "\"text/html\"", ")", "self", ".", "end_headers", "(", ")", "self", ".", "wfile", ".", "write", "(", "\"<html><body>Hey there!<br/>Click <a href=\\\"{0}\\\">here</a> to claim your prize.</body></html>\"", ".", "format", "(", "self", ".", "server", ".", "authorize_url", ")", ".", "encode", "(", "\"utf-8\"", ")", ")", "else", ":", "self", ".", "send_response", "(", "404", ")", "self", ".", "send_header", "(", "\"Content-Type\"", ",", "\"text/plain\"", ")", "self", ".", "end_headers", "(", ")", "self", ".", "wfile", ".", "write", "(", "\"404 not found\"", ".", "encode", "(", "\"utf-8\"", ")", ")" ]
ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe
test
OAuth2Util._set_app_info
Set the app info (id & secret) read from the config file on the Reddit object
OAuth2Util/OAuth2Util.py
def _set_app_info(self): """ Set the app info (id & secret) read from the config file on the Reddit object """ redirect_url = "http://{0}:{1}/{2}".format(SERVER_URL, SERVER_PORT, SERVER_REDIRECT_PATH) self.r.set_oauth_app_info(self._get_value(CONFIGKEY_APP_KEY), self._get_value(CONFIGKEY_APP_SECRET), redirect_url)
def _set_app_info(self): """ Set the app info (id & secret) read from the config file on the Reddit object """ redirect_url = "http://{0}:{1}/{2}".format(SERVER_URL, SERVER_PORT, SERVER_REDIRECT_PATH) self.r.set_oauth_app_info(self._get_value(CONFIGKEY_APP_KEY), self._get_value(CONFIGKEY_APP_SECRET), redirect_url)
[ "Set", "the", "app", "info", "(", "id", "&", "secret", ")", "read", "from", "the", "config", "file", "on", "the", "Reddit", "object" ]
SmBe19/praw-OAuth2Util
python
https://github.com/SmBe19/praw-OAuth2Util/blob/ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe/OAuth2Util/OAuth2Util.py#L167-L175
[ "def", "_set_app_info", "(", "self", ")", ":", "redirect_url", "=", "\"http://{0}:{1}/{2}\"", ".", "format", "(", "SERVER_URL", ",", "SERVER_PORT", ",", "SERVER_REDIRECT_PATH", ")", "self", ".", "r", ".", "set_oauth_app_info", "(", "self", ".", "_get_value", "(", "CONFIGKEY_APP_KEY", ")", ",", "self", ".", "_get_value", "(", "CONFIGKEY_APP_SECRET", ")", ",", "redirect_url", ")" ]
ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe
test
OAuth2Util._get_value
Helper method to get a value from the config
OAuth2Util/OAuth2Util.py
def _get_value(self, key, func=None, split_val=None, as_boolean=False, exception_default=None): """ Helper method to get a value from the config """ try: if as_boolean: return self.config.getboolean(key[0], key[1]) value = self.config.get(key[0], key[1]) if split_val is not None: value = value.split(split_val) if func is not None: return func(value) return value except (KeyError, configparser.NoSectionError, configparser.NoOptionError) as e: if exception_default is not None: return exception_default raise KeyError(e)
def _get_value(self, key, func=None, split_val=None, as_boolean=False, exception_default=None): """ Helper method to get a value from the config """ try: if as_boolean: return self.config.getboolean(key[0], key[1]) value = self.config.get(key[0], key[1]) if split_val is not None: value = value.split(split_val) if func is not None: return func(value) return value except (KeyError, configparser.NoSectionError, configparser.NoOptionError) as e: if exception_default is not None: return exception_default raise KeyError(e)
[ "Helper", "method", "to", "get", "a", "value", "from", "the", "config" ]
SmBe19/praw-OAuth2Util
python
https://github.com/SmBe19/praw-OAuth2Util/blob/ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe/OAuth2Util/OAuth2Util.py#L177-L194
[ "def", "_get_value", "(", "self", ",", "key", ",", "func", "=", "None", ",", "split_val", "=", "None", ",", "as_boolean", "=", "False", ",", "exception_default", "=", "None", ")", ":", "try", ":", "if", "as_boolean", ":", "return", "self", ".", "config", ".", "getboolean", "(", "key", "[", "0", "]", ",", "key", "[", "1", "]", ")", "value", "=", "self", ".", "config", ".", "get", "(", "key", "[", "0", "]", ",", "key", "[", "1", "]", ")", "if", "split_val", "is", "not", "None", ":", "value", "=", "value", ".", "split", "(", "split_val", ")", "if", "func", "is", "not", "None", ":", "return", "func", "(", "value", ")", "return", "value", "except", "(", "KeyError", ",", "configparser", ".", "NoSectionError", ",", "configparser", ".", "NoOptionError", ")", "as", "e", ":", "if", "exception_default", "is", "not", "None", ":", "return", "exception_default", "raise", "KeyError", "(", "e", ")" ]
ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe
test
OAuth2Util._change_value
Change the value of the given key in the given file to the given value
OAuth2Util/OAuth2Util.py
def _change_value(self, key, value): """ Change the value of the given key in the given file to the given value """ if not self.config.has_section(key[0]): self.config.add_section(key[0]) self.config.set(key[0], key[1], str(value)) with open(self.configfile, "w") as f: self.config.write(f)
def _change_value(self, key, value): """ Change the value of the given key in the given file to the given value """ if not self.config.has_section(key[0]): self.config.add_section(key[0]) self.config.set(key[0], key[1], str(value)) with open(self.configfile, "w") as f: self.config.write(f)
[ "Change", "the", "value", "of", "the", "given", "key", "in", "the", "given", "file", "to", "the", "given", "value" ]
SmBe19/praw-OAuth2Util
python
https://github.com/SmBe19/praw-OAuth2Util/blob/ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe/OAuth2Util/OAuth2Util.py#L196-L206
[ "def", "_change_value", "(", "self", ",", "key", ",", "value", ")", ":", "if", "not", "self", ".", "config", ".", "has_section", "(", "key", "[", "0", "]", ")", ":", "self", ".", "config", ".", "add_section", "(", "key", "[", "0", "]", ")", "self", ".", "config", ".", "set", "(", "key", "[", "0", "]", ",", "key", "[", "1", "]", ",", "str", "(", "value", ")", ")", "with", "open", "(", "self", ".", "configfile", ",", "\"w\"", ")", "as", "f", ":", "self", ".", "config", ".", "write", "(", "f", ")" ]
ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe
test
OAuth2Util._migrate_config
Migrates the old config file format to the new one
OAuth2Util/OAuth2Util.py
def _migrate_config(self, oldname=DEFAULT_CONFIG, newname=DEFAULT_CONFIG): """ Migrates the old config file format to the new one """ self._log("Your OAuth2Util config file is in an old format and needs " "to be changed. I tried as best as I could to migrate it.", logging.WARNING) with open(oldname, "r") as old: with open(newname, "w") as new: new.write("[app]\n") new.write(old.read())
def _migrate_config(self, oldname=DEFAULT_CONFIG, newname=DEFAULT_CONFIG): """ Migrates the old config file format to the new one """ self._log("Your OAuth2Util config file is in an old format and needs " "to be changed. I tried as best as I could to migrate it.", logging.WARNING) with open(oldname, "r") as old: with open(newname, "w") as new: new.write("[app]\n") new.write(old.read())
[ "Migrates", "the", "old", "config", "file", "format", "to", "the", "new", "one" ]
SmBe19/praw-OAuth2Util
python
https://github.com/SmBe19/praw-OAuth2Util/blob/ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe/OAuth2Util/OAuth2Util.py#L208-L218
[ "def", "_migrate_config", "(", "self", ",", "oldname", "=", "DEFAULT_CONFIG", ",", "newname", "=", "DEFAULT_CONFIG", ")", ":", "self", ".", "_log", "(", "\"Your OAuth2Util config file is in an old format and needs \"", "\"to be changed. I tried as best as I could to migrate it.\"", ",", "logging", ".", "WARNING", ")", "with", "open", "(", "oldname", ",", "\"r\"", ")", "as", "old", ":", "with", "open", "(", "newname", ",", "\"w\"", ")", "as", "new", ":", "new", ".", "write", "(", "\"[app]\\n\"", ")", "new", ".", "write", "(", "old", ".", "read", "(", ")", ")" ]
ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe
test
OAuth2Util._start_webserver
Start the webserver that will receive the code
OAuth2Util/OAuth2Util.py
def _start_webserver(self, authorize_url=None): """ Start the webserver that will receive the code """ server_address = (SERVER_URL, SERVER_PORT) self.server = HTTPServer(server_address, OAuth2UtilRequestHandler) self.server.response_code = None self.server.authorize_url = authorize_url t = Thread(target=self.server.serve_forever) t.daemon = True t.start()
def _start_webserver(self, authorize_url=None): """ Start the webserver that will receive the code """ server_address = (SERVER_URL, SERVER_PORT) self.server = HTTPServer(server_address, OAuth2UtilRequestHandler) self.server.response_code = None self.server.authorize_url = authorize_url t = Thread(target=self.server.serve_forever) t.daemon = True t.start()
[ "Start", "the", "webserver", "that", "will", "receive", "the", "code" ]
SmBe19/praw-OAuth2Util
python
https://github.com/SmBe19/praw-OAuth2Util/blob/ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe/OAuth2Util/OAuth2Util.py#L222-L232
[ "def", "_start_webserver", "(", "self", ",", "authorize_url", "=", "None", ")", ":", "server_address", "=", "(", "SERVER_URL", ",", "SERVER_PORT", ")", "self", ".", "server", "=", "HTTPServer", "(", "server_address", ",", "OAuth2UtilRequestHandler", ")", "self", ".", "server", ".", "response_code", "=", "None", "self", ".", "server", ".", "authorize_url", "=", "authorize_url", "t", "=", "Thread", "(", "target", "=", "self", ".", "server", ".", "serve_forever", ")", "t", ".", "daemon", "=", "True", "t", ".", "start", "(", ")" ]
ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe
test
OAuth2Util._wait_for_response
Wait until the user accepted or rejected the request
OAuth2Util/OAuth2Util.py
def _wait_for_response(self): """ Wait until the user accepted or rejected the request """ while not self.server.response_code: time.sleep(2) time.sleep(5) self.server.shutdown()
def _wait_for_response(self): """ Wait until the user accepted or rejected the request """ while not self.server.response_code: time.sleep(2) time.sleep(5) self.server.shutdown()
[ "Wait", "until", "the", "user", "accepted", "or", "rejected", "the", "request" ]
SmBe19/praw-OAuth2Util
python
https://github.com/SmBe19/praw-OAuth2Util/blob/ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe/OAuth2Util/OAuth2Util.py#L234-L241
[ "def", "_wait_for_response", "(", "self", ")", ":", "while", "not", "self", ".", "server", ".", "response_code", ":", "time", ".", "sleep", "(", "2", ")", "time", ".", "sleep", "(", "5", ")", "self", ".", "server", ".", "shutdown", "(", ")" ]
ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe
test
OAuth2Util._get_new_access_information
Request new access information from reddit using the built in webserver
OAuth2Util/OAuth2Util.py
def _get_new_access_information(self): """ Request new access information from reddit using the built in webserver """ if not self.r.has_oauth_app_info: self._log('Cannot obtain authorize url from PRAW. Please check your configuration.', logging.ERROR) raise AttributeError('Reddit Session invalid, please check your designated config file.') url = self.r.get_authorize_url('UsingOAuth2Util', self._get_value(CONFIGKEY_SCOPE, set, split_val=','), self._get_value(CONFIGKEY_REFRESHABLE, as_boolean=True)) self._start_webserver(url) if not self._get_value(CONFIGKEY_SERVER_MODE, as_boolean=True): webbrowser.open(url) else: print("Webserver is waiting for you :D. Please open {0}:{1}/{2} " "in your browser" .format(SERVER_URL, SERVER_PORT, SERVER_LINK_PATH)) self._wait_for_response() try: access_information = self.r.get_access_information( self.server.response_code) except praw.errors.OAuthException: self._log("Can not authenticate, maybe the app infos (e.g. secret) are wrong.", logging.ERROR) raise self._change_value(CONFIGKEY_TOKEN, access_information["access_token"]) self._change_value(CONFIGKEY_REFRESH_TOKEN, access_information["refresh_token"]) self._change_value(CONFIGKEY_VALID_UNTIL, time.time() + TOKEN_VALID_DURATION)
def _get_new_access_information(self): """ Request new access information from reddit using the built in webserver """ if not self.r.has_oauth_app_info: self._log('Cannot obtain authorize url from PRAW. Please check your configuration.', logging.ERROR) raise AttributeError('Reddit Session invalid, please check your designated config file.') url = self.r.get_authorize_url('UsingOAuth2Util', self._get_value(CONFIGKEY_SCOPE, set, split_val=','), self._get_value(CONFIGKEY_REFRESHABLE, as_boolean=True)) self._start_webserver(url) if not self._get_value(CONFIGKEY_SERVER_MODE, as_boolean=True): webbrowser.open(url) else: print("Webserver is waiting for you :D. Please open {0}:{1}/{2} " "in your browser" .format(SERVER_URL, SERVER_PORT, SERVER_LINK_PATH)) self._wait_for_response() try: access_information = self.r.get_access_information( self.server.response_code) except praw.errors.OAuthException: self._log("Can not authenticate, maybe the app infos (e.g. secret) are wrong.", logging.ERROR) raise self._change_value(CONFIGKEY_TOKEN, access_information["access_token"]) self._change_value(CONFIGKEY_REFRESH_TOKEN, access_information["refresh_token"]) self._change_value(CONFIGKEY_VALID_UNTIL, time.time() + TOKEN_VALID_DURATION)
[ "Request", "new", "access", "information", "from", "reddit", "using", "the", "built", "in", "webserver" ]
SmBe19/praw-OAuth2Util
python
https://github.com/SmBe19/praw-OAuth2Util/blob/ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe/OAuth2Util/OAuth2Util.py#L243-L272
[ "def", "_get_new_access_information", "(", "self", ")", ":", "if", "not", "self", ".", "r", ".", "has_oauth_app_info", ":", "self", ".", "_log", "(", "'Cannot obtain authorize url from PRAW. Please check your configuration.'", ",", "logging", ".", "ERROR", ")", "raise", "AttributeError", "(", "'Reddit Session invalid, please check your designated config file.'", ")", "url", "=", "self", ".", "r", ".", "get_authorize_url", "(", "'UsingOAuth2Util'", ",", "self", ".", "_get_value", "(", "CONFIGKEY_SCOPE", ",", "set", ",", "split_val", "=", "','", ")", ",", "self", ".", "_get_value", "(", "CONFIGKEY_REFRESHABLE", ",", "as_boolean", "=", "True", ")", ")", "self", ".", "_start_webserver", "(", "url", ")", "if", "not", "self", ".", "_get_value", "(", "CONFIGKEY_SERVER_MODE", ",", "as_boolean", "=", "True", ")", ":", "webbrowser", ".", "open", "(", "url", ")", "else", ":", "print", "(", "\"Webserver is waiting for you :D. Please open {0}:{1}/{2} \"", "\"in your browser\"", ".", "format", "(", "SERVER_URL", ",", "SERVER_PORT", ",", "SERVER_LINK_PATH", ")", ")", "self", ".", "_wait_for_response", "(", ")", "try", ":", "access_information", "=", "self", ".", "r", ".", "get_access_information", "(", "self", ".", "server", ".", "response_code", ")", "except", "praw", ".", "errors", ".", "OAuthException", ":", "self", ".", "_log", "(", "\"Can not authenticate, maybe the app infos (e.g. secret) are wrong.\"", ",", "logging", ".", "ERROR", ")", "raise", "self", ".", "_change_value", "(", "CONFIGKEY_TOKEN", ",", "access_information", "[", "\"access_token\"", "]", ")", "self", ".", "_change_value", "(", "CONFIGKEY_REFRESH_TOKEN", ",", "access_information", "[", "\"refresh_token\"", "]", ")", "self", ".", "_change_value", "(", "CONFIGKEY_VALID_UNTIL", ",", "time", ".", "time", "(", ")", "+", "TOKEN_VALID_DURATION", ")" ]
ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe
test
OAuth2Util._check_token_present
Check whether the tokens are set and request new ones if not
OAuth2Util/OAuth2Util.py
def _check_token_present(self): """ Check whether the tokens are set and request new ones if not """ try: self._get_value(CONFIGKEY_TOKEN) self._get_value(CONFIGKEY_REFRESH_TOKEN) self._get_value(CONFIGKEY_REFRESHABLE) except KeyError: self._log("Request new Token (CTP)") self._get_new_access_information()
def _check_token_present(self): """ Check whether the tokens are set and request new ones if not """ try: self._get_value(CONFIGKEY_TOKEN) self._get_value(CONFIGKEY_REFRESH_TOKEN) self._get_value(CONFIGKEY_REFRESHABLE) except KeyError: self._log("Request new Token (CTP)") self._get_new_access_information()
[ "Check", "whether", "the", "tokens", "are", "set", "and", "request", "new", "ones", "if", "not" ]
SmBe19/praw-OAuth2Util
python
https://github.com/SmBe19/praw-OAuth2Util/blob/ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe/OAuth2Util/OAuth2Util.py#L274-L284
[ "def", "_check_token_present", "(", "self", ")", ":", "try", ":", "self", ".", "_get_value", "(", "CONFIGKEY_TOKEN", ")", "self", ".", "_get_value", "(", "CONFIGKEY_REFRESH_TOKEN", ")", "self", ".", "_get_value", "(", "CONFIGKEY_REFRESHABLE", ")", "except", "KeyError", ":", "self", ".", "_log", "(", "\"Request new Token (CTP)\"", ")", "self", ".", "_get_new_access_information", "(", ")" ]
ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe
test
OAuth2Util.set_access_credentials
Set the token on the Reddit Object again
OAuth2Util/OAuth2Util.py
def set_access_credentials(self, _retry=0): """ Set the token on the Reddit Object again """ if _retry >= 5: raise ConnectionAbortedError('Reddit is not accessible right now, cannot refresh OAuth2 tokens.') self._check_token_present() try: self.r.set_access_credentials(self._get_value(CONFIGKEY_SCOPE, set, split_val=","), self._get_value(CONFIGKEY_TOKEN), self._get_value(CONFIGKEY_REFRESH_TOKEN)) except (praw.errors.OAuthInvalidToken, praw.errors.HTTPException) as e: # todo check e status code # self._log('Retrying in 5s.') # time.sleep(5) # self.set_access_credentials(_retry=_retry + 1) self._log("Request new Token (SAC)") self._get_new_access_information()
def set_access_credentials(self, _retry=0): """ Set the token on the Reddit Object again """ if _retry >= 5: raise ConnectionAbortedError('Reddit is not accessible right now, cannot refresh OAuth2 tokens.') self._check_token_present() try: self.r.set_access_credentials(self._get_value(CONFIGKEY_SCOPE, set, split_val=","), self._get_value(CONFIGKEY_TOKEN), self._get_value(CONFIGKEY_REFRESH_TOKEN)) except (praw.errors.OAuthInvalidToken, praw.errors.HTTPException) as e: # todo check e status code # self._log('Retrying in 5s.') # time.sleep(5) # self.set_access_credentials(_retry=_retry + 1) self._log("Request new Token (SAC)") self._get_new_access_information()
[ "Set", "the", "token", "on", "the", "Reddit", "Object", "again" ]
SmBe19/praw-OAuth2Util
python
https://github.com/SmBe19/praw-OAuth2Util/blob/ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe/OAuth2Util/OAuth2Util.py#L296-L316
[ "def", "set_access_credentials", "(", "self", ",", "_retry", "=", "0", ")", ":", "if", "_retry", ">=", "5", ":", "raise", "ConnectionAbortedError", "(", "'Reddit is not accessible right now, cannot refresh OAuth2 tokens.'", ")", "self", ".", "_check_token_present", "(", ")", "try", ":", "self", ".", "r", ".", "set_access_credentials", "(", "self", ".", "_get_value", "(", "CONFIGKEY_SCOPE", ",", "set", ",", "split_val", "=", "\",\"", ")", ",", "self", ".", "_get_value", "(", "CONFIGKEY_TOKEN", ")", ",", "self", ".", "_get_value", "(", "CONFIGKEY_REFRESH_TOKEN", ")", ")", "except", "(", "praw", ".", "errors", ".", "OAuthInvalidToken", ",", "praw", ".", "errors", ".", "HTTPException", ")", "as", "e", ":", "# todo check e status code", "# self._log('Retrying in 5s.')", "# time.sleep(5)", "# self.set_access_credentials(_retry=_retry + 1)", "self", ".", "_log", "(", "\"Request new Token (SAC)\"", ")", "self", ".", "_get_new_access_information", "(", ")" ]
ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe
test
OAuth2Util.refresh
Check if the token is still valid and requests a new if it is not valid anymore Call this method before a call to praw if there might have passed more than one hour force: if true, a new token will be retrieved no matter what
OAuth2Util/OAuth2Util.py
def refresh(self, force=False, _retry=0): """ Check if the token is still valid and requests a new if it is not valid anymore Call this method before a call to praw if there might have passed more than one hour force: if true, a new token will be retrieved no matter what """ if _retry >= 5: raise ConnectionAbortedError('Reddit is not accessible right now, cannot refresh OAuth2 tokens.') self._check_token_present() # We check whether another instance already refreshed the token if time.time() > self._get_value(CONFIGKEY_VALID_UNTIL, float, exception_default=0) - REFRESH_MARGIN: self.config.read(self.configfile) if time.time() < self._get_value(CONFIGKEY_VALID_UNTIL, float, exception_default=0) - REFRESH_MARGIN: self._log("Found new token") self.set_access_credentials() if force or time.time() > self._get_value(CONFIGKEY_VALID_UNTIL, float, exception_default=0) - REFRESH_MARGIN: self._log("Refresh Token") try: new_token = self.r.refresh_access_information(self._get_value(CONFIGKEY_REFRESH_TOKEN)) self._change_value(CONFIGKEY_TOKEN, new_token["access_token"]) self._change_value(CONFIGKEY_VALID_UNTIL, time.time() + TOKEN_VALID_DURATION) self.set_access_credentials() except (praw.errors.OAuthInvalidToken, praw.errors.HTTPException) as e: # todo check e status code # self._log('Retrying in 5s.') # time.sleep(5) # self.refresh(_retry=_retry + 1) self._log("Request new Token (REF)") self._get_new_access_information()
def refresh(self, force=False, _retry=0): """ Check if the token is still valid and requests a new if it is not valid anymore Call this method before a call to praw if there might have passed more than one hour force: if true, a new token will be retrieved no matter what """ if _retry >= 5: raise ConnectionAbortedError('Reddit is not accessible right now, cannot refresh OAuth2 tokens.') self._check_token_present() # We check whether another instance already refreshed the token if time.time() > self._get_value(CONFIGKEY_VALID_UNTIL, float, exception_default=0) - REFRESH_MARGIN: self.config.read(self.configfile) if time.time() < self._get_value(CONFIGKEY_VALID_UNTIL, float, exception_default=0) - REFRESH_MARGIN: self._log("Found new token") self.set_access_credentials() if force or time.time() > self._get_value(CONFIGKEY_VALID_UNTIL, float, exception_default=0) - REFRESH_MARGIN: self._log("Refresh Token") try: new_token = self.r.refresh_access_information(self._get_value(CONFIGKEY_REFRESH_TOKEN)) self._change_value(CONFIGKEY_TOKEN, new_token["access_token"]) self._change_value(CONFIGKEY_VALID_UNTIL, time.time() + TOKEN_VALID_DURATION) self.set_access_credentials() except (praw.errors.OAuthInvalidToken, praw.errors.HTTPException) as e: # todo check e status code # self._log('Retrying in 5s.') # time.sleep(5) # self.refresh(_retry=_retry + 1) self._log("Request new Token (REF)") self._get_new_access_information()
[ "Check", "if", "the", "token", "is", "still", "valid", "and", "requests", "a", "new", "if", "it", "is", "not", "valid", "anymore" ]
SmBe19/praw-OAuth2Util
python
https://github.com/SmBe19/praw-OAuth2Util/blob/ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe/OAuth2Util/OAuth2Util.py#L320-L356
[ "def", "refresh", "(", "self", ",", "force", "=", "False", ",", "_retry", "=", "0", ")", ":", "if", "_retry", ">=", "5", ":", "raise", "ConnectionAbortedError", "(", "'Reddit is not accessible right now, cannot refresh OAuth2 tokens.'", ")", "self", ".", "_check_token_present", "(", ")", "# We check whether another instance already refreshed the token", "if", "time", ".", "time", "(", ")", ">", "self", ".", "_get_value", "(", "CONFIGKEY_VALID_UNTIL", ",", "float", ",", "exception_default", "=", "0", ")", "-", "REFRESH_MARGIN", ":", "self", ".", "config", ".", "read", "(", "self", ".", "configfile", ")", "if", "time", ".", "time", "(", ")", "<", "self", ".", "_get_value", "(", "CONFIGKEY_VALID_UNTIL", ",", "float", ",", "exception_default", "=", "0", ")", "-", "REFRESH_MARGIN", ":", "self", ".", "_log", "(", "\"Found new token\"", ")", "self", ".", "set_access_credentials", "(", ")", "if", "force", "or", "time", ".", "time", "(", ")", ">", "self", ".", "_get_value", "(", "CONFIGKEY_VALID_UNTIL", ",", "float", ",", "exception_default", "=", "0", ")", "-", "REFRESH_MARGIN", ":", "self", ".", "_log", "(", "\"Refresh Token\"", ")", "try", ":", "new_token", "=", "self", ".", "r", ".", "refresh_access_information", "(", "self", ".", "_get_value", "(", "CONFIGKEY_REFRESH_TOKEN", ")", ")", "self", ".", "_change_value", "(", "CONFIGKEY_TOKEN", ",", "new_token", "[", "\"access_token\"", "]", ")", "self", ".", "_change_value", "(", "CONFIGKEY_VALID_UNTIL", ",", "time", ".", "time", "(", ")", "+", "TOKEN_VALID_DURATION", ")", "self", ".", "set_access_credentials", "(", ")", "except", "(", "praw", ".", "errors", ".", "OAuthInvalidToken", ",", "praw", ".", "errors", ".", "HTTPException", ")", "as", "e", ":", "# todo check e status code", "# self._log('Retrying in 5s.')", "# time.sleep(5)", "# self.refresh(_retry=_retry + 1)", "self", ".", "_log", "(", "\"Request new Token (REF)\"", ")", "self", ".", "_get_new_access_information", "(", ")" ]
ca0a2d4d7eefcc681aac92c9cd4b83cd9ea6c5fe
test
create_manifest_table
Create DynamoDB table for run manifests Arguments: dynamodb_client - boto3 DynamoDB client (not service) table_name - string representing existing table name
snowplow_analytics_sdk/run_manifests.py
def create_manifest_table(dynamodb_client, table_name): """Create DynamoDB table for run manifests Arguments: dynamodb_client - boto3 DynamoDB client (not service) table_name - string representing existing table name """ try: dynamodb_client.create_table( AttributeDefinitions=[ { 'AttributeName': DYNAMODB_RUNID_ATTRIBUTE, 'AttributeType': 'S' }, ], TableName=table_name, KeySchema=[ { 'AttributeName': DYNAMODB_RUNID_ATTRIBUTE, 'KeyType': 'HASH' }, ], ProvisionedThroughput={ 'ReadCapacityUnits': 5, 'WriteCapacityUnits': 5 } ) dynamodb_client.get_waiter('table_exists').wait(TableName=table_name) except ClientError as e: # Table already exists if e.response['Error']['Code'] == 'ResourceInUseException': pass else: raise e
def create_manifest_table(dynamodb_client, table_name): """Create DynamoDB table for run manifests Arguments: dynamodb_client - boto3 DynamoDB client (not service) table_name - string representing existing table name """ try: dynamodb_client.create_table( AttributeDefinitions=[ { 'AttributeName': DYNAMODB_RUNID_ATTRIBUTE, 'AttributeType': 'S' }, ], TableName=table_name, KeySchema=[ { 'AttributeName': DYNAMODB_RUNID_ATTRIBUTE, 'KeyType': 'HASH' }, ], ProvisionedThroughput={ 'ReadCapacityUnits': 5, 'WriteCapacityUnits': 5 } ) dynamodb_client.get_waiter('table_exists').wait(TableName=table_name) except ClientError as e: # Table already exists if e.response['Error']['Code'] == 'ResourceInUseException': pass else: raise e
[ "Create", "DynamoDB", "table", "for", "run", "manifests" ]
snowplow/snowplow-python-analytics-sdk
python
https://github.com/snowplow/snowplow-python-analytics-sdk/blob/0ddca91e3f6d8bed88627fa557790aa4868bdace/snowplow_analytics_sdk/run_manifests.py#L38-L71
[ "def", "create_manifest_table", "(", "dynamodb_client", ",", "table_name", ")", ":", "try", ":", "dynamodb_client", ".", "create_table", "(", "AttributeDefinitions", "=", "[", "{", "'AttributeName'", ":", "DYNAMODB_RUNID_ATTRIBUTE", ",", "'AttributeType'", ":", "'S'", "}", ",", "]", ",", "TableName", "=", "table_name", ",", "KeySchema", "=", "[", "{", "'AttributeName'", ":", "DYNAMODB_RUNID_ATTRIBUTE", ",", "'KeyType'", ":", "'HASH'", "}", ",", "]", ",", "ProvisionedThroughput", "=", "{", "'ReadCapacityUnits'", ":", "5", ",", "'WriteCapacityUnits'", ":", "5", "}", ")", "dynamodb_client", ".", "get_waiter", "(", "'table_exists'", ")", ".", "wait", "(", "TableName", "=", "table_name", ")", "except", "ClientError", "as", "e", ":", "# Table already exists", "if", "e", ".", "response", "[", "'Error'", "]", "[", "'Code'", "]", "==", "'ResourceInUseException'", ":", "pass", "else", ":", "raise", "e" ]
0ddca91e3f6d8bed88627fa557790aa4868bdace
test
list_runids
Return list of all run ids inside S3 folder. It does not respect S3 pagination (`MaxKeys`) and returns **all** keys from bucket and won't list any prefixes with object archived to AWS Glacier Arguments: s3_client - boto3 S3 client (not service) full_path - full valid S3 path to events (such as enriched-archive) example: s3://acme-events-bucket/main-pipeline/enriched-archive
snowplow_analytics_sdk/run_manifests.py
def list_runids(s3_client, full_path): """Return list of all run ids inside S3 folder. It does not respect S3 pagination (`MaxKeys`) and returns **all** keys from bucket and won't list any prefixes with object archived to AWS Glacier Arguments: s3_client - boto3 S3 client (not service) full_path - full valid S3 path to events (such as enriched-archive) example: s3://acme-events-bucket/main-pipeline/enriched-archive """ listing_finished = False # last response was not truncated run_ids_buffer = [] last_continuation_token = None (bucket, prefix) = split_full_path(full_path) while not listing_finished: options = clean_dict({ 'Bucket': bucket, 'Prefix': prefix, 'Delimiter': '/', 'ContinuationToken': last_continuation_token }) response = s3_client.list_objects_v2(**options) keys = [extract_run_id(key['Prefix']) for key in response.get('CommonPrefixes', [])] run_ids_buffer.extend([key for key in keys if key is not None]) last_continuation_token = response.get('NextContinuationToken', None) if not response['IsTruncated']: listing_finished = True non_archived_run_ids = [run_id for run_id in run_ids_buffer if not is_glacier(s3_client, bucket, run_id)] return non_archived_run_ids
def list_runids(s3_client, full_path): """Return list of all run ids inside S3 folder. It does not respect S3 pagination (`MaxKeys`) and returns **all** keys from bucket and won't list any prefixes with object archived to AWS Glacier Arguments: s3_client - boto3 S3 client (not service) full_path - full valid S3 path to events (such as enriched-archive) example: s3://acme-events-bucket/main-pipeline/enriched-archive """ listing_finished = False # last response was not truncated run_ids_buffer = [] last_continuation_token = None (bucket, prefix) = split_full_path(full_path) while not listing_finished: options = clean_dict({ 'Bucket': bucket, 'Prefix': prefix, 'Delimiter': '/', 'ContinuationToken': last_continuation_token }) response = s3_client.list_objects_v2(**options) keys = [extract_run_id(key['Prefix']) for key in response.get('CommonPrefixes', [])] run_ids_buffer.extend([key for key in keys if key is not None]) last_continuation_token = response.get('NextContinuationToken', None) if not response['IsTruncated']: listing_finished = True non_archived_run_ids = [run_id for run_id in run_ids_buffer if not is_glacier(s3_client, bucket, run_id)] return non_archived_run_ids
[ "Return", "list", "of", "all", "run", "ids", "inside", "S3", "folder", ".", "It", "does", "not", "respect", "S3", "pagination", "(", "MaxKeys", ")", "and", "returns", "**", "all", "**", "keys", "from", "bucket", "and", "won", "t", "list", "any", "prefixes", "with", "object", "archived", "to", "AWS", "Glacier" ]
snowplow/snowplow-python-analytics-sdk
python
https://github.com/snowplow/snowplow-python-analytics-sdk/blob/0ddca91e3f6d8bed88627fa557790aa4868bdace/snowplow_analytics_sdk/run_manifests.py#L74-L109
[ "def", "list_runids", "(", "s3_client", ",", "full_path", ")", ":", "listing_finished", "=", "False", "# last response was not truncated", "run_ids_buffer", "=", "[", "]", "last_continuation_token", "=", "None", "(", "bucket", ",", "prefix", ")", "=", "split_full_path", "(", "full_path", ")", "while", "not", "listing_finished", ":", "options", "=", "clean_dict", "(", "{", "'Bucket'", ":", "bucket", ",", "'Prefix'", ":", "prefix", ",", "'Delimiter'", ":", "'/'", ",", "'ContinuationToken'", ":", "last_continuation_token", "}", ")", "response", "=", "s3_client", ".", "list_objects_v2", "(", "*", "*", "options", ")", "keys", "=", "[", "extract_run_id", "(", "key", "[", "'Prefix'", "]", ")", "for", "key", "in", "response", ".", "get", "(", "'CommonPrefixes'", ",", "[", "]", ")", "]", "run_ids_buffer", ".", "extend", "(", "[", "key", "for", "key", "in", "keys", "if", "key", "is", "not", "None", "]", ")", "last_continuation_token", "=", "response", ".", "get", "(", "'NextContinuationToken'", ",", "None", ")", "if", "not", "response", "[", "'IsTruncated'", "]", ":", "listing_finished", "=", "True", "non_archived_run_ids", "=", "[", "run_id", "for", "run_id", "in", "run_ids_buffer", "if", "not", "is_glacier", "(", "s3_client", ",", "bucket", ",", "run_id", ")", "]", "return", "non_archived_run_ids" ]
0ddca91e3f6d8bed88627fa557790aa4868bdace
test
split_full_path
Return pair of bucket without protocol and path Arguments: path - valid S3 path, such as s3://somebucket/events >>> split_full_path('s3://mybucket/path-to-events') ('mybucket', 'path-to-events/') >>> split_full_path('s3://mybucket') ('mybucket', None) >>> split_full_path('s3n://snowplow-bucket/some/prefix/') ('snowplow-bucket', 'some/prefix/')
snowplow_analytics_sdk/run_manifests.py
def split_full_path(path): """Return pair of bucket without protocol and path Arguments: path - valid S3 path, such as s3://somebucket/events >>> split_full_path('s3://mybucket/path-to-events') ('mybucket', 'path-to-events/') >>> split_full_path('s3://mybucket') ('mybucket', None) >>> split_full_path('s3n://snowplow-bucket/some/prefix/') ('snowplow-bucket', 'some/prefix/') """ if path.startswith('s3://'): path = path[5:] elif path.startswith('s3n://'): path = path[6:] elif path.startswith('s3a://'): path = path[6:] else: raise ValueError("S3 path should start with s3://, s3n:// or " "s3a:// prefix") parts = path.split('/') bucket = parts[0] path = '/'.join(parts[1:]) return bucket, normalize_prefix(path)
def split_full_path(path): """Return pair of bucket without protocol and path Arguments: path - valid S3 path, such as s3://somebucket/events >>> split_full_path('s3://mybucket/path-to-events') ('mybucket', 'path-to-events/') >>> split_full_path('s3://mybucket') ('mybucket', None) >>> split_full_path('s3n://snowplow-bucket/some/prefix/') ('snowplow-bucket', 'some/prefix/') """ if path.startswith('s3://'): path = path[5:] elif path.startswith('s3n://'): path = path[6:] elif path.startswith('s3a://'): path = path[6:] else: raise ValueError("S3 path should start with s3://, s3n:// or " "s3a:// prefix") parts = path.split('/') bucket = parts[0] path = '/'.join(parts[1:]) return bucket, normalize_prefix(path)
[ "Return", "pair", "of", "bucket", "without", "protocol", "and", "path" ]
snowplow/snowplow-python-analytics-sdk
python
https://github.com/snowplow/snowplow-python-analytics-sdk/blob/0ddca91e3f6d8bed88627fa557790aa4868bdace/snowplow_analytics_sdk/run_manifests.py#L112-L137
[ "def", "split_full_path", "(", "path", ")", ":", "if", "path", ".", "startswith", "(", "'s3://'", ")", ":", "path", "=", "path", "[", "5", ":", "]", "elif", "path", ".", "startswith", "(", "'s3n://'", ")", ":", "path", "=", "path", "[", "6", ":", "]", "elif", "path", ".", "startswith", "(", "'s3a://'", ")", ":", "path", "=", "path", "[", "6", ":", "]", "else", ":", "raise", "ValueError", "(", "\"S3 path should start with s3://, s3n:// or \"", "\"s3a:// prefix\"", ")", "parts", "=", "path", ".", "split", "(", "'/'", ")", "bucket", "=", "parts", "[", "0", "]", "path", "=", "'/'", ".", "join", "(", "parts", "[", "1", ":", "]", ")", "return", "bucket", ",", "normalize_prefix", "(", "path", ")" ]
0ddca91e3f6d8bed88627fa557790aa4868bdace
test
is_glacier
Check if prefix is archived in Glacier, by checking storage class of first object inside that prefix Arguments: s3_client - boto3 S3 client (not service) bucket - valid extracted bucket (without protocol and prefix) example: sowplow-events-data prefix - valid S3 prefix (usually, run_id) example: snowplow-archive/enriched/archive/
snowplow_analytics_sdk/run_manifests.py
def is_glacier(s3_client, bucket, prefix): """Check if prefix is archived in Glacier, by checking storage class of first object inside that prefix Arguments: s3_client - boto3 S3 client (not service) bucket - valid extracted bucket (without protocol and prefix) example: sowplow-events-data prefix - valid S3 prefix (usually, run_id) example: snowplow-archive/enriched/archive/ """ response = s3_client.list_objects_v2(Bucket=bucket, Prefix=prefix, MaxKeys=3) # 3 to not fetch _SUCCESS for key in response['Contents']: if key.get('StorageClass', 'STANDARD') == 'GLACIER': return True return False
def is_glacier(s3_client, bucket, prefix): """Check if prefix is archived in Glacier, by checking storage class of first object inside that prefix Arguments: s3_client - boto3 S3 client (not service) bucket - valid extracted bucket (without protocol and prefix) example: sowplow-events-data prefix - valid S3 prefix (usually, run_id) example: snowplow-archive/enriched/archive/ """ response = s3_client.list_objects_v2(Bucket=bucket, Prefix=prefix, MaxKeys=3) # 3 to not fetch _SUCCESS for key in response['Contents']: if key.get('StorageClass', 'STANDARD') == 'GLACIER': return True return False
[ "Check", "if", "prefix", "is", "archived", "in", "Glacier", "by", "checking", "storage", "class", "of", "first", "object", "inside", "that", "prefix" ]
snowplow/snowplow-python-analytics-sdk
python
https://github.com/snowplow/snowplow-python-analytics-sdk/blob/0ddca91e3f6d8bed88627fa557790aa4868bdace/snowplow_analytics_sdk/run_manifests.py#L140-L158
[ "def", "is_glacier", "(", "s3_client", ",", "bucket", ",", "prefix", ")", ":", "response", "=", "s3_client", ".", "list_objects_v2", "(", "Bucket", "=", "bucket", ",", "Prefix", "=", "prefix", ",", "MaxKeys", "=", "3", ")", "# 3 to not fetch _SUCCESS", "for", "key", "in", "response", "[", "'Contents'", "]", ":", "if", "key", ".", "get", "(", "'StorageClass'", ",", "'STANDARD'", ")", "==", "'GLACIER'", ":", "return", "True", "return", "False" ]
0ddca91e3f6d8bed88627fa557790aa4868bdace
test
extract_run_id
Extract date part from run id Arguments: key - full key name, such as shredded-archive/run=2012-12-11-01-31-33/ (trailing slash is required) >>> extract_run_id('shredded-archive/run=2012-12-11-01-11-33/') 'shredded-archive/run=2012-12-11-01-11-33/' >>> extract_run_id('shredded-archive/run=2012-12-11-01-11-33') >>> extract_run_id('shredded-archive/run=2012-13-11-01-11-33/')
snowplow_analytics_sdk/run_manifests.py
def extract_run_id(key): """Extract date part from run id Arguments: key - full key name, such as shredded-archive/run=2012-12-11-01-31-33/ (trailing slash is required) >>> extract_run_id('shredded-archive/run=2012-12-11-01-11-33/') 'shredded-archive/run=2012-12-11-01-11-33/' >>> extract_run_id('shredded-archive/run=2012-12-11-01-11-33') >>> extract_run_id('shredded-archive/run=2012-13-11-01-11-33/') """ filename = key.split('/')[-2] # -1 element is empty string run_id = filename.lstrip('run=') try: datetime.strptime(run_id, '%Y-%m-%d-%H-%M-%S') return key except ValueError: return None
def extract_run_id(key): """Extract date part from run id Arguments: key - full key name, such as shredded-archive/run=2012-12-11-01-31-33/ (trailing slash is required) >>> extract_run_id('shredded-archive/run=2012-12-11-01-11-33/') 'shredded-archive/run=2012-12-11-01-11-33/' >>> extract_run_id('shredded-archive/run=2012-12-11-01-11-33') >>> extract_run_id('shredded-archive/run=2012-13-11-01-11-33/') """ filename = key.split('/')[-2] # -1 element is empty string run_id = filename.lstrip('run=') try: datetime.strptime(run_id, '%Y-%m-%d-%H-%M-%S') return key except ValueError: return None
[ "Extract", "date", "part", "from", "run", "id" ]
snowplow/snowplow-python-analytics-sdk
python
https://github.com/snowplow/snowplow-python-analytics-sdk/blob/0ddca91e3f6d8bed88627fa557790aa4868bdace/snowplow_analytics_sdk/run_manifests.py#L161-L179
[ "def", "extract_run_id", "(", "key", ")", ":", "filename", "=", "key", ".", "split", "(", "'/'", ")", "[", "-", "2", "]", "# -1 element is empty string", "run_id", "=", "filename", ".", "lstrip", "(", "'run='", ")", "try", ":", "datetime", ".", "strptime", "(", "run_id", ",", "'%Y-%m-%d-%H-%M-%S'", ")", "return", "key", "except", "ValueError", ":", "return", "None" ]
0ddca91e3f6d8bed88627fa557790aa4868bdace
test
clean_dict
Remove all keys with Nones as values >>> clean_dict({'key': None}) {} >>> clean_dict({'empty_s': ''}) {'empty_s': ''}
snowplow_analytics_sdk/run_manifests.py
def clean_dict(dict): """Remove all keys with Nones as values >>> clean_dict({'key': None}) {} >>> clean_dict({'empty_s': ''}) {'empty_s': ''} """ if sys.version_info[0] < 3: return {k: v for k, v in dict.iteritems() if v is not None} else: return {k: v for k, v in dict.items() if v is not None}
def clean_dict(dict): """Remove all keys with Nones as values >>> clean_dict({'key': None}) {} >>> clean_dict({'empty_s': ''}) {'empty_s': ''} """ if sys.version_info[0] < 3: return {k: v for k, v in dict.iteritems() if v is not None} else: return {k: v for k, v in dict.items() if v is not None}
[ "Remove", "all", "keys", "with", "Nones", "as", "values" ]
snowplow/snowplow-python-analytics-sdk
python
https://github.com/snowplow/snowplow-python-analytics-sdk/blob/0ddca91e3f6d8bed88627fa557790aa4868bdace/snowplow_analytics_sdk/run_manifests.py#L198-L209
[ "def", "clean_dict", "(", "dict", ")", ":", "if", "sys", ".", "version_info", "[", "0", "]", "<", "3", ":", "return", "{", "k", ":", "v", "for", "k", ",", "v", "in", "dict", ".", "iteritems", "(", ")", "if", "v", "is", "not", "None", "}", "else", ":", "return", "{", "k", ":", "v", "for", "k", ",", "v", "in", "dict", ".", "items", "(", ")", "if", "v", "is", "not", "None", "}" ]
0ddca91e3f6d8bed88627fa557790aa4868bdace
test
add_to_manifest
Add run_id into DynamoDB manifest table Arguments: dynamodb_client - boto3 DynamoDB client (not service) table_name - string representing existing table name run_id - string representing run_id to store
snowplow_analytics_sdk/run_manifests.py
def add_to_manifest(dynamodb_client, table_name, run_id): """Add run_id into DynamoDB manifest table Arguments: dynamodb_client - boto3 DynamoDB client (not service) table_name - string representing existing table name run_id - string representing run_id to store """ dynamodb_client.put_item( TableName=table_name, Item={ DYNAMODB_RUNID_ATTRIBUTE: { 'S': run_id } } )
def add_to_manifest(dynamodb_client, table_name, run_id): """Add run_id into DynamoDB manifest table Arguments: dynamodb_client - boto3 DynamoDB client (not service) table_name - string representing existing table name run_id - string representing run_id to store """ dynamodb_client.put_item( TableName=table_name, Item={ DYNAMODB_RUNID_ATTRIBUTE: { 'S': run_id } } )
[ "Add", "run_id", "into", "DynamoDB", "manifest", "table" ]
snowplow/snowplow-python-analytics-sdk
python
https://github.com/snowplow/snowplow-python-analytics-sdk/blob/0ddca91e3f6d8bed88627fa557790aa4868bdace/snowplow_analytics_sdk/run_manifests.py#L212-L227
[ "def", "add_to_manifest", "(", "dynamodb_client", ",", "table_name", ",", "run_id", ")", ":", "dynamodb_client", ".", "put_item", "(", "TableName", "=", "table_name", ",", "Item", "=", "{", "DYNAMODB_RUNID_ATTRIBUTE", ":", "{", "'S'", ":", "run_id", "}", "}", ")" ]
0ddca91e3f6d8bed88627fa557790aa4868bdace
test
is_in_manifest
Check if run_id is stored in DynamoDB table. Return True if run_id is stored or False otherwise. Arguments: dynamodb_client - boto3 DynamoDB client (not service) table_name - string representing existing table name run_id - string representing run_id to store
snowplow_analytics_sdk/run_manifests.py
def is_in_manifest(dynamodb_client, table_name, run_id): """Check if run_id is stored in DynamoDB table. Return True if run_id is stored or False otherwise. Arguments: dynamodb_client - boto3 DynamoDB client (not service) table_name - string representing existing table name run_id - string representing run_id to store """ response = dynamodb_client.get_item( TableName=table_name, Key={ DYNAMODB_RUNID_ATTRIBUTE: { 'S': run_id } } ) return response.get('Item') is not None
def is_in_manifest(dynamodb_client, table_name, run_id): """Check if run_id is stored in DynamoDB table. Return True if run_id is stored or False otherwise. Arguments: dynamodb_client - boto3 DynamoDB client (not service) table_name - string representing existing table name run_id - string representing run_id to store """ response = dynamodb_client.get_item( TableName=table_name, Key={ DYNAMODB_RUNID_ATTRIBUTE: { 'S': run_id } } ) return response.get('Item') is not None
[ "Check", "if", "run_id", "is", "stored", "in", "DynamoDB", "table", ".", "Return", "True", "if", "run_id", "is", "stored", "or", "False", "otherwise", "." ]
snowplow/snowplow-python-analytics-sdk
python
https://github.com/snowplow/snowplow-python-analytics-sdk/blob/0ddca91e3f6d8bed88627fa557790aa4868bdace/snowplow_analytics_sdk/run_manifests.py#L230-L247
[ "def", "is_in_manifest", "(", "dynamodb_client", ",", "table_name", ",", "run_id", ")", ":", "response", "=", "dynamodb_client", ".", "get_item", "(", "TableName", "=", "table_name", ",", "Key", "=", "{", "DYNAMODB_RUNID_ATTRIBUTE", ":", "{", "'S'", ":", "run_id", "}", "}", ")", "return", "response", ".", "get", "(", "'Item'", ")", "is", "not", "None" ]
0ddca91e3f6d8bed88627fa557790aa4868bdace
test
extract_schema
Extracts Schema information from Iglu URI >>> extract_schema("iglu:com.acme-corporation_underscore/event_name-dash/jsonschema/1-10-1")['vendor'] 'com.acme-corporation_underscore'
snowplow_analytics_sdk/json_shredder.py
def extract_schema(uri): """ Extracts Schema information from Iglu URI >>> extract_schema("iglu:com.acme-corporation_underscore/event_name-dash/jsonschema/1-10-1")['vendor'] 'com.acme-corporation_underscore' """ match = re.match(SCHEMA_URI_REGEX, uri) if match: return { 'vendor': match.group(1), 'name': match.group(2), 'format': match.group(3), 'version': match.group(4) } else: raise SnowplowEventTransformationException([ "Schema {} does not conform to regular expression {}".format(uri, SCHEMA_URI) ])
def extract_schema(uri): """ Extracts Schema information from Iglu URI >>> extract_schema("iglu:com.acme-corporation_underscore/event_name-dash/jsonschema/1-10-1")['vendor'] 'com.acme-corporation_underscore' """ match = re.match(SCHEMA_URI_REGEX, uri) if match: return { 'vendor': match.group(1), 'name': match.group(2), 'format': match.group(3), 'version': match.group(4) } else: raise SnowplowEventTransformationException([ "Schema {} does not conform to regular expression {}".format(uri, SCHEMA_URI) ])
[ "Extracts", "Schema", "information", "from", "Iglu", "URI" ]
snowplow/snowplow-python-analytics-sdk
python
https://github.com/snowplow/snowplow-python-analytics-sdk/blob/0ddca91e3f6d8bed88627fa557790aa4868bdace/snowplow_analytics_sdk/json_shredder.py#L37-L56
[ "def", "extract_schema", "(", "uri", ")", ":", "match", "=", "re", ".", "match", "(", "SCHEMA_URI_REGEX", ",", "uri", ")", "if", "match", ":", "return", "{", "'vendor'", ":", "match", ".", "group", "(", "1", ")", ",", "'name'", ":", "match", ".", "group", "(", "2", ")", ",", "'format'", ":", "match", ".", "group", "(", "3", ")", ",", "'version'", ":", "match", ".", "group", "(", "4", ")", "}", "else", ":", "raise", "SnowplowEventTransformationException", "(", "[", "\"Schema {} does not conform to regular expression {}\"", ".", "format", "(", "uri", ",", "SCHEMA_URI", ")", "]", ")" ]
0ddca91e3f6d8bed88627fa557790aa4868bdace
test
fix_schema
Create an Elasticsearch field name from a schema string
snowplow_analytics_sdk/json_shredder.py
def fix_schema(prefix, schema): """ Create an Elasticsearch field name from a schema string """ schema_dict = extract_schema(schema) snake_case_organization = schema_dict['vendor'].replace('.', '_').lower() snake_case_name = re.sub('([^A-Z_])([A-Z])', '\g<1>_\g<2>', schema_dict['name']).lower() model = schema_dict['version'].split('-')[0] return "{}_{}_{}_{}".format(prefix, snake_case_organization, snake_case_name, model)
def fix_schema(prefix, schema): """ Create an Elasticsearch field name from a schema string """ schema_dict = extract_schema(schema) snake_case_organization = schema_dict['vendor'].replace('.', '_').lower() snake_case_name = re.sub('([^A-Z_])([A-Z])', '\g<1>_\g<2>', schema_dict['name']).lower() model = schema_dict['version'].split('-')[0] return "{}_{}_{}_{}".format(prefix, snake_case_organization, snake_case_name, model)
[ "Create", "an", "Elasticsearch", "field", "name", "from", "a", "schema", "string" ]
snowplow/snowplow-python-analytics-sdk
python
https://github.com/snowplow/snowplow-python-analytics-sdk/blob/0ddca91e3f6d8bed88627fa557790aa4868bdace/snowplow_analytics_sdk/json_shredder.py#L59-L67
[ "def", "fix_schema", "(", "prefix", ",", "schema", ")", ":", "schema_dict", "=", "extract_schema", "(", "schema", ")", "snake_case_organization", "=", "schema_dict", "[", "'vendor'", "]", ".", "replace", "(", "'.'", ",", "'_'", ")", ".", "lower", "(", ")", "snake_case_name", "=", "re", ".", "sub", "(", "'([^A-Z_])([A-Z])'", ",", "'\\g<1>_\\g<2>'", ",", "schema_dict", "[", "'name'", "]", ")", ".", "lower", "(", ")", "model", "=", "schema_dict", "[", "'version'", "]", ".", "split", "(", "'-'", ")", "[", "0", "]", "return", "\"{}_{}_{}_{}\"", ".", "format", "(", "prefix", ",", "snake_case_organization", ",", "snake_case_name", ",", "model", ")" ]
0ddca91e3f6d8bed88627fa557790aa4868bdace
test
parse_contexts
Convert a contexts JSON to an Elasticsearch-compatible list of key-value pairs For example, the JSON { "data": [ { "data": { "unique": true }, "schema": "iglu:com.acme/unduplicated/jsonschema/1-0-0" }, { "data": { "value": 1 }, "schema": "iglu:com.acme/duplicated/jsonschema/1-0-0" }, { "data": { "value": 2 }, "schema": "iglu:com.acme/duplicated/jsonschema/1-0-0" } ], "schema": "iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-0" } would become [ ("context_com_acme_duplicated_1", [{"value": 1}, {"value": 2}]), ("context_com_acme_unduplicated_1", [{"unique": true}]) ]
snowplow_analytics_sdk/json_shredder.py
def parse_contexts(contexts): """ Convert a contexts JSON to an Elasticsearch-compatible list of key-value pairs For example, the JSON { "data": [ { "data": { "unique": true }, "schema": "iglu:com.acme/unduplicated/jsonschema/1-0-0" }, { "data": { "value": 1 }, "schema": "iglu:com.acme/duplicated/jsonschema/1-0-0" }, { "data": { "value": 2 }, "schema": "iglu:com.acme/duplicated/jsonschema/1-0-0" } ], "schema": "iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-0" } would become [ ("context_com_acme_duplicated_1", [{"value": 1}, {"value": 2}]), ("context_com_acme_unduplicated_1", [{"unique": true}]) ] """ my_json = json.loads(contexts) data = my_json['data'] distinct_contexts = {} for context in data: schema = fix_schema("contexts", context['schema']) inner_data = context['data'] if schema not in distinct_contexts: distinct_contexts[schema] = [inner_data] else: distinct_contexts[schema].append(inner_data) output = [] for key in distinct_contexts: output.append((key, distinct_contexts[key])) return output
def parse_contexts(contexts): """ Convert a contexts JSON to an Elasticsearch-compatible list of key-value pairs For example, the JSON { "data": [ { "data": { "unique": true }, "schema": "iglu:com.acme/unduplicated/jsonschema/1-0-0" }, { "data": { "value": 1 }, "schema": "iglu:com.acme/duplicated/jsonschema/1-0-0" }, { "data": { "value": 2 }, "schema": "iglu:com.acme/duplicated/jsonschema/1-0-0" } ], "schema": "iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-0" } would become [ ("context_com_acme_duplicated_1", [{"value": 1}, {"value": 2}]), ("context_com_acme_unduplicated_1", [{"unique": true}]) ] """ my_json = json.loads(contexts) data = my_json['data'] distinct_contexts = {} for context in data: schema = fix_schema("contexts", context['schema']) inner_data = context['data'] if schema not in distinct_contexts: distinct_contexts[schema] = [inner_data] else: distinct_contexts[schema].append(inner_data) output = [] for key in distinct_contexts: output.append((key, distinct_contexts[key])) return output
[ "Convert", "a", "contexts", "JSON", "to", "an", "Elasticsearch", "-", "compatible", "list", "of", "key", "-", "value", "pairs", "For", "example", "the", "JSON" ]
snowplow/snowplow-python-analytics-sdk
python
https://github.com/snowplow/snowplow-python-analytics-sdk/blob/0ddca91e3f6d8bed88627fa557790aa4868bdace/snowplow_analytics_sdk/json_shredder.py#L70-L119
[ "def", "parse_contexts", "(", "contexts", ")", ":", "my_json", "=", "json", ".", "loads", "(", "contexts", ")", "data", "=", "my_json", "[", "'data'", "]", "distinct_contexts", "=", "{", "}", "for", "context", "in", "data", ":", "schema", "=", "fix_schema", "(", "\"contexts\"", ",", "context", "[", "'schema'", "]", ")", "inner_data", "=", "context", "[", "'data'", "]", "if", "schema", "not", "in", "distinct_contexts", ":", "distinct_contexts", "[", "schema", "]", "=", "[", "inner_data", "]", "else", ":", "distinct_contexts", "[", "schema", "]", ".", "append", "(", "inner_data", ")", "output", "=", "[", "]", "for", "key", "in", "distinct_contexts", ":", "output", ".", "append", "(", "(", "key", ",", "distinct_contexts", "[", "key", "]", ")", ")", "return", "output" ]
0ddca91e3f6d8bed88627fa557790aa4868bdace
test
parse_unstruct
Convert an unstructured event JSON to a list containing one Elasticsearch-compatible key-value pair For example, the JSON { "data": { "data": { "key": "value" }, "schema": "iglu:com.snowplowanalytics.snowplow/link_click/jsonschema/1-0-1" }, "schema": "iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0" } would become [ ( "unstruct_com_snowplowanalytics_snowplow_link_click_1", { "key": "value" } ) ]
snowplow_analytics_sdk/json_shredder.py
def parse_unstruct(unstruct): """ Convert an unstructured event JSON to a list containing one Elasticsearch-compatible key-value pair For example, the JSON { "data": { "data": { "key": "value" }, "schema": "iglu:com.snowplowanalytics.snowplow/link_click/jsonschema/1-0-1" }, "schema": "iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0" } would become [ ( "unstruct_com_snowplowanalytics_snowplow_link_click_1", { "key": "value" } ) ] """ my_json = json.loads(unstruct) data = my_json['data'] schema = data['schema'] if 'data' in data: inner_data = data['data'] else: raise SnowplowEventTransformationException(["Could not extract inner data field from unstructured event"]) fixed_schema = fix_schema("unstruct_event", schema) return [(fixed_schema, inner_data)]
def parse_unstruct(unstruct): """ Convert an unstructured event JSON to a list containing one Elasticsearch-compatible key-value pair For example, the JSON { "data": { "data": { "key": "value" }, "schema": "iglu:com.snowplowanalytics.snowplow/link_click/jsonschema/1-0-1" }, "schema": "iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0" } would become [ ( "unstruct_com_snowplowanalytics_snowplow_link_click_1", { "key": "value" } ) ] """ my_json = json.loads(unstruct) data = my_json['data'] schema = data['schema'] if 'data' in data: inner_data = data['data'] else: raise SnowplowEventTransformationException(["Could not extract inner data field from unstructured event"]) fixed_schema = fix_schema("unstruct_event", schema) return [(fixed_schema, inner_data)]
[ "Convert", "an", "unstructured", "event", "JSON", "to", "a", "list", "containing", "one", "Elasticsearch", "-", "compatible", "key", "-", "value", "pair", "For", "example", "the", "JSON" ]
snowplow/snowplow-python-analytics-sdk
python
https://github.com/snowplow/snowplow-python-analytics-sdk/blob/0ddca91e3f6d8bed88627fa557790aa4868bdace/snowplow_analytics_sdk/json_shredder.py#L122-L155
[ "def", "parse_unstruct", "(", "unstruct", ")", ":", "my_json", "=", "json", ".", "loads", "(", "unstruct", ")", "data", "=", "my_json", "[", "'data'", "]", "schema", "=", "data", "[", "'schema'", "]", "if", "'data'", "in", "data", ":", "inner_data", "=", "data", "[", "'data'", "]", "else", ":", "raise", "SnowplowEventTransformationException", "(", "[", "\"Could not extract inner data field from unstructured event\"", "]", ")", "fixed_schema", "=", "fix_schema", "(", "\"unstruct_event\"", ",", "schema", ")", "return", "[", "(", "fixed_schema", ",", "inner_data", ")", "]" ]
0ddca91e3f6d8bed88627fa557790aa4868bdace
test
transform
Convert a Snowplow enriched event TSV into a JSON
snowplow_analytics_sdk/event_transformer.py
def transform(line, known_fields=ENRICHED_EVENT_FIELD_TYPES, add_geolocation_data=True): """ Convert a Snowplow enriched event TSV into a JSON """ return jsonify_good_event(line.split('\t'), known_fields, add_geolocation_data)
def transform(line, known_fields=ENRICHED_EVENT_FIELD_TYPES, add_geolocation_data=True): """ Convert a Snowplow enriched event TSV into a JSON """ return jsonify_good_event(line.split('\t'), known_fields, add_geolocation_data)
[ "Convert", "a", "Snowplow", "enriched", "event", "TSV", "into", "a", "JSON" ]
snowplow/snowplow-python-analytics-sdk
python
https://github.com/snowplow/snowplow-python-analytics-sdk/blob/0ddca91e3f6d8bed88627fa557790aa4868bdace/snowplow_analytics_sdk/event_transformer.py#L192-L196
[ "def", "transform", "(", "line", ",", "known_fields", "=", "ENRICHED_EVENT_FIELD_TYPES", ",", "add_geolocation_data", "=", "True", ")", ":", "return", "jsonify_good_event", "(", "line", ".", "split", "(", "'\\t'", ")", ",", "known_fields", ",", "add_geolocation_data", ")" ]
0ddca91e3f6d8bed88627fa557790aa4868bdace
test
jsonify_good_event
Convert a Snowplow enriched event in the form of an array of fields into a JSON
snowplow_analytics_sdk/event_transformer.py
def jsonify_good_event(event, known_fields=ENRICHED_EVENT_FIELD_TYPES, add_geolocation_data=True): """ Convert a Snowplow enriched event in the form of an array of fields into a JSON """ if len(event) != len(known_fields): raise SnowplowEventTransformationException( ["Expected {} fields, received {} fields.".format(len(known_fields), len(event))] ) else: output = {} errors = [] if add_geolocation_data and event[LATITUDE_INDEX] != '' and event[LONGITUDE_INDEX] != '': output['geo_location'] = event[LATITUDE_INDEX] + ',' + event[LONGITUDE_INDEX] for i in range(len(event)): key = known_fields[i][0] if event[i] != '': try: kvpairs = known_fields[i][1](key, event[i]) for kvpair in kvpairs: output[kvpair[0]] = kvpair[1] except SnowplowEventTransformationException as sete: errors += sete.error_messages except Exception as e: errors += ["Unexpected exception parsing field with key {} and value {}: {}".format( known_fields[i][0], event[i], repr(e) )] if errors: raise SnowplowEventTransformationException(errors) else: return output
def jsonify_good_event(event, known_fields=ENRICHED_EVENT_FIELD_TYPES, add_geolocation_data=True): """ Convert a Snowplow enriched event in the form of an array of fields into a JSON """ if len(event) != len(known_fields): raise SnowplowEventTransformationException( ["Expected {} fields, received {} fields.".format(len(known_fields), len(event))] ) else: output = {} errors = [] if add_geolocation_data and event[LATITUDE_INDEX] != '' and event[LONGITUDE_INDEX] != '': output['geo_location'] = event[LATITUDE_INDEX] + ',' + event[LONGITUDE_INDEX] for i in range(len(event)): key = known_fields[i][0] if event[i] != '': try: kvpairs = known_fields[i][1](key, event[i]) for kvpair in kvpairs: output[kvpair[0]] = kvpair[1] except SnowplowEventTransformationException as sete: errors += sete.error_messages except Exception as e: errors += ["Unexpected exception parsing field with key {} and value {}: {}".format( known_fields[i][0], event[i], repr(e) )] if errors: raise SnowplowEventTransformationException(errors) else: return output
[ "Convert", "a", "Snowplow", "enriched", "event", "in", "the", "form", "of", "an", "array", "of", "fields", "into", "a", "JSON" ]
snowplow/snowplow-python-analytics-sdk
python
https://github.com/snowplow/snowplow-python-analytics-sdk/blob/0ddca91e3f6d8bed88627fa557790aa4868bdace/snowplow_analytics_sdk/event_transformer.py#L199-L230
[ "def", "jsonify_good_event", "(", "event", ",", "known_fields", "=", "ENRICHED_EVENT_FIELD_TYPES", ",", "add_geolocation_data", "=", "True", ")", ":", "if", "len", "(", "event", ")", "!=", "len", "(", "known_fields", ")", ":", "raise", "SnowplowEventTransformationException", "(", "[", "\"Expected {} fields, received {} fields.\"", ".", "format", "(", "len", "(", "known_fields", ")", ",", "len", "(", "event", ")", ")", "]", ")", "else", ":", "output", "=", "{", "}", "errors", "=", "[", "]", "if", "add_geolocation_data", "and", "event", "[", "LATITUDE_INDEX", "]", "!=", "''", "and", "event", "[", "LONGITUDE_INDEX", "]", "!=", "''", ":", "output", "[", "'geo_location'", "]", "=", "event", "[", "LATITUDE_INDEX", "]", "+", "','", "+", "event", "[", "LONGITUDE_INDEX", "]", "for", "i", "in", "range", "(", "len", "(", "event", ")", ")", ":", "key", "=", "known_fields", "[", "i", "]", "[", "0", "]", "if", "event", "[", "i", "]", "!=", "''", ":", "try", ":", "kvpairs", "=", "known_fields", "[", "i", "]", "[", "1", "]", "(", "key", ",", "event", "[", "i", "]", ")", "for", "kvpair", "in", "kvpairs", ":", "output", "[", "kvpair", "[", "0", "]", "]", "=", "kvpair", "[", "1", "]", "except", "SnowplowEventTransformationException", "as", "sete", ":", "errors", "+=", "sete", ".", "error_messages", "except", "Exception", "as", "e", ":", "errors", "+=", "[", "\"Unexpected exception parsing field with key {} and value {}: {}\"", ".", "format", "(", "known_fields", "[", "i", "]", "[", "0", "]", ",", "event", "[", "i", "]", ",", "repr", "(", "e", ")", ")", "]", "if", "errors", ":", "raise", "SnowplowEventTransformationException", "(", "errors", ")", "else", ":", "return", "output" ]
0ddca91e3f6d8bed88627fa557790aa4868bdace
test
ViewPanel._get_view_data
Extract the used view from the TemplateResponse context (ContextMixin)
debugtools/panels/view.py
def _get_view_data(self, context_data): """ Extract the used view from the TemplateResponse context (ContextMixin) """ view = context_data.get('view') if not isinstance(view, View): view = None # Denote interesting objects in the template context template_context = [] for key, obj in context_data.items(): if isinstance(obj, (BaseForm, BaseFormSet, Model)): template_context.append((key, _format_path(obj.__class__))) return { 'model': _get_view_model(view), 'form': _get_form_class(view), 'template_context': template_context, }
def _get_view_data(self, context_data): """ Extract the used view from the TemplateResponse context (ContextMixin) """ view = context_data.get('view') if not isinstance(view, View): view = None # Denote interesting objects in the template context template_context = [] for key, obj in context_data.items(): if isinstance(obj, (BaseForm, BaseFormSet, Model)): template_context.append((key, _format_path(obj.__class__))) return { 'model': _get_view_model(view), 'form': _get_form_class(view), 'template_context': template_context, }
[ "Extract", "the", "used", "view", "from", "the", "TemplateResponse", "context", "(", "ContextMixin", ")" ]
edoburu/django-debugtools
python
https://github.com/edoburu/django-debugtools/blob/5c609c00fa9954330cd135fc62a1e18b8e7fea8a/debugtools/panels/view.py#L47-L65
[ "def", "_get_view_data", "(", "self", ",", "context_data", ")", ":", "view", "=", "context_data", ".", "get", "(", "'view'", ")", "if", "not", "isinstance", "(", "view", ",", "View", ")", ":", "view", "=", "None", "# Denote interesting objects in the template context", "template_context", "=", "[", "]", "for", "key", ",", "obj", "in", "context_data", ".", "items", "(", ")", ":", "if", "isinstance", "(", "obj", ",", "(", "BaseForm", ",", "BaseFormSet", ",", "Model", ")", ")", ":", "template_context", ".", "append", "(", "(", "key", ",", "_format_path", "(", "obj", ".", "__class__", ")", ")", ")", "return", "{", "'model'", ":", "_get_view_model", "(", "view", ")", ",", "'form'", ":", "_get_form_class", "(", "view", ")", ",", "'template_context'", ":", "template_context", ",", "}" ]
5c609c00fa9954330cd135fc62a1e18b8e7fea8a
test
get_used_template
Get the template used in a TemplateResponse. This returns a tuple of "active choice, all choices"
debugtools/utils/xview.py
def get_used_template(response): """ Get the template used in a TemplateResponse. This returns a tuple of "active choice, all choices" """ if not hasattr(response, 'template_name'): return None, None template = response.template_name if template is None: return None, None if isinstance(template, (list, tuple)): # See which template name was really used. if len(template) == 1: return template[0], None else: used_name = _get_used_template_name(template) return used_name, template elif isinstance(template, six.string_types): # Single string return template, None else: # Template object. filename = _get_template_filename(template) template_name = '<template object from {0}>'.format(filename) if filename else '<template object>' return template_name, None
def get_used_template(response): """ Get the template used in a TemplateResponse. This returns a tuple of "active choice, all choices" """ if not hasattr(response, 'template_name'): return None, None template = response.template_name if template is None: return None, None if isinstance(template, (list, tuple)): # See which template name was really used. if len(template) == 1: return template[0], None else: used_name = _get_used_template_name(template) return used_name, template elif isinstance(template, six.string_types): # Single string return template, None else: # Template object. filename = _get_template_filename(template) template_name = '<template object from {0}>'.format(filename) if filename else '<template object>' return template_name, None
[ "Get", "the", "template", "used", "in", "a", "TemplateResponse", ".", "This", "returns", "a", "tuple", "of", "active", "choice", "all", "choices" ]
edoburu/django-debugtools
python
https://github.com/edoburu/django-debugtools/blob/5c609c00fa9954330cd135fc62a1e18b8e7fea8a/debugtools/utils/xview.py#L29-L55
[ "def", "get_used_template", "(", "response", ")", ":", "if", "not", "hasattr", "(", "response", ",", "'template_name'", ")", ":", "return", "None", ",", "None", "template", "=", "response", ".", "template_name", "if", "template", "is", "None", ":", "return", "None", ",", "None", "if", "isinstance", "(", "template", ",", "(", "list", ",", "tuple", ")", ")", ":", "# See which template name was really used.", "if", "len", "(", "template", ")", "==", "1", ":", "return", "template", "[", "0", "]", ",", "None", "else", ":", "used_name", "=", "_get_used_template_name", "(", "template", ")", "return", "used_name", ",", "template", "elif", "isinstance", "(", "template", ",", "six", ".", "string_types", ")", ":", "# Single string", "return", "template", ",", "None", "else", ":", "# Template object.", "filename", "=", "_get_template_filename", "(", "template", ")", "template_name", "=", "'<template object from {0}>'", ".", "format", "(", "filename", ")", "if", "filename", "else", "'<template object>'", "return", "template_name", ",", "None" ]
5c609c00fa9954330cd135fc62a1e18b8e7fea8a
test
PrintNode.print_context
Print the entire template context
debugtools/templatetags/debugtools_tags.py
def print_context(self, context): """ Print the entire template context """ text = [CONTEXT_TITLE] for i, context_scope in enumerate(context): dump1 = linebreaksbr(pformat_django_context_html(context_scope)) dump2 = pformat_dict_summary_html(context_scope) # Collapse long objects by default (e.g. request, LANGUAGES and sql_queries) if len(context_scope) <= 3 and dump1.count('<br />') > 20: (dump1, dump2) = (dump2, dump1) text.append(CONTEXT_BLOCK.format( style=PRE_STYLE, num=i, dump1=dump1, dump2=dump2 )) return u''.join(text)
def print_context(self, context): """ Print the entire template context """ text = [CONTEXT_TITLE] for i, context_scope in enumerate(context): dump1 = linebreaksbr(pformat_django_context_html(context_scope)) dump2 = pformat_dict_summary_html(context_scope) # Collapse long objects by default (e.g. request, LANGUAGES and sql_queries) if len(context_scope) <= 3 and dump1.count('<br />') > 20: (dump1, dump2) = (dump2, dump1) text.append(CONTEXT_BLOCK.format( style=PRE_STYLE, num=i, dump1=dump1, dump2=dump2 )) return u''.join(text)
[ "Print", "the", "entire", "template", "context" ]
edoburu/django-debugtools
python
https://github.com/edoburu/django-debugtools/blob/5c609c00fa9954330cd135fc62a1e18b8e7fea8a/debugtools/templatetags/debugtools_tags.py#L64-L83
[ "def", "print_context", "(", "self", ",", "context", ")", ":", "text", "=", "[", "CONTEXT_TITLE", "]", "for", "i", ",", "context_scope", "in", "enumerate", "(", "context", ")", ":", "dump1", "=", "linebreaksbr", "(", "pformat_django_context_html", "(", "context_scope", ")", ")", "dump2", "=", "pformat_dict_summary_html", "(", "context_scope", ")", "# Collapse long objects by default (e.g. request, LANGUAGES and sql_queries)", "if", "len", "(", "context_scope", ")", "<=", "3", "and", "dump1", ".", "count", "(", "'<br />'", ")", ">", "20", ":", "(", "dump1", ",", "dump2", ")", "=", "(", "dump2", ",", "dump1", ")", "text", ".", "append", "(", "CONTEXT_BLOCK", ".", "format", "(", "style", "=", "PRE_STYLE", ",", "num", "=", "i", ",", "dump1", "=", "dump1", ",", "dump2", "=", "dump2", ")", ")", "return", "u''", ".", "join", "(", "text", ")" ]
5c609c00fa9954330cd135fc62a1e18b8e7fea8a
test
PrintNode.print_variables
Print a set of variables
debugtools/templatetags/debugtools_tags.py
def print_variables(self, context): """ Print a set of variables """ text = [] for name, expr in self.variables: # Some extended resolving, to handle unknown variables data = '' try: if isinstance(expr.var, Variable): data = expr.var.resolve(context) else: data = expr.resolve(context) # could return TEMPLATE_STRING_IF_INVALID except VariableDoesNotExist as e: # Failed to resolve, display exception inline keys = [] for scope in context: keys += scope.keys() keys = sorted(set(keys)) # Remove duplicates, e.g. csrf_token return ERROR_TYPE_BLOCK.format(style=PRE_ALERT_STYLE, error=escape(u"Variable '{0}' not found! Available context variables are:\n\n{1}".format(expr, u', '.join(keys)))) else: # Regular format textdata = linebreaksbr(pformat_django_context_html(data)) # At top level, prefix class name if it's a longer result if isinstance(data, SHORT_NAME_TYPES): text.append(BASIC_TYPE_BLOCK.format(style=PRE_STYLE, name=name, value=textdata)) else: text.append(OBJECT_TYPE_BLOCK.format(style=PRE_STYLE, name=name, type=data.__class__.__name__, value=textdata)) return u''.join(text)
def print_variables(self, context): """ Print a set of variables """ text = [] for name, expr in self.variables: # Some extended resolving, to handle unknown variables data = '' try: if isinstance(expr.var, Variable): data = expr.var.resolve(context) else: data = expr.resolve(context) # could return TEMPLATE_STRING_IF_INVALID except VariableDoesNotExist as e: # Failed to resolve, display exception inline keys = [] for scope in context: keys += scope.keys() keys = sorted(set(keys)) # Remove duplicates, e.g. csrf_token return ERROR_TYPE_BLOCK.format(style=PRE_ALERT_STYLE, error=escape(u"Variable '{0}' not found! Available context variables are:\n\n{1}".format(expr, u', '.join(keys)))) else: # Regular format textdata = linebreaksbr(pformat_django_context_html(data)) # At top level, prefix class name if it's a longer result if isinstance(data, SHORT_NAME_TYPES): text.append(BASIC_TYPE_BLOCK.format(style=PRE_STYLE, name=name, value=textdata)) else: text.append(OBJECT_TYPE_BLOCK.format(style=PRE_STYLE, name=name, type=data.__class__.__name__, value=textdata)) return u''.join(text)
[ "Print", "a", "set", "of", "variables" ]
edoburu/django-debugtools
python
https://github.com/edoburu/django-debugtools/blob/5c609c00fa9954330cd135fc62a1e18b8e7fea8a/debugtools/templatetags/debugtools_tags.py#L85-L114
[ "def", "print_variables", "(", "self", ",", "context", ")", ":", "text", "=", "[", "]", "for", "name", ",", "expr", "in", "self", ".", "variables", ":", "# Some extended resolving, to handle unknown variables", "data", "=", "''", "try", ":", "if", "isinstance", "(", "expr", ".", "var", ",", "Variable", ")", ":", "data", "=", "expr", ".", "var", ".", "resolve", "(", "context", ")", "else", ":", "data", "=", "expr", ".", "resolve", "(", "context", ")", "# could return TEMPLATE_STRING_IF_INVALID", "except", "VariableDoesNotExist", "as", "e", ":", "# Failed to resolve, display exception inline", "keys", "=", "[", "]", "for", "scope", "in", "context", ":", "keys", "+=", "scope", ".", "keys", "(", ")", "keys", "=", "sorted", "(", "set", "(", "keys", ")", ")", "# Remove duplicates, e.g. csrf_token", "return", "ERROR_TYPE_BLOCK", ".", "format", "(", "style", "=", "PRE_ALERT_STYLE", ",", "error", "=", "escape", "(", "u\"Variable '{0}' not found! Available context variables are:\\n\\n{1}\"", ".", "format", "(", "expr", ",", "u', '", ".", "join", "(", "keys", ")", ")", ")", ")", "else", ":", "# Regular format", "textdata", "=", "linebreaksbr", "(", "pformat_django_context_html", "(", "data", ")", ")", "# At top level, prefix class name if it's a longer result", "if", "isinstance", "(", "data", ",", "SHORT_NAME_TYPES", ")", ":", "text", ".", "append", "(", "BASIC_TYPE_BLOCK", ".", "format", "(", "style", "=", "PRE_STYLE", ",", "name", "=", "name", ",", "value", "=", "textdata", ")", ")", "else", ":", "text", ".", "append", "(", "OBJECT_TYPE_BLOCK", ".", "format", "(", "style", "=", "PRE_STYLE", ",", "name", "=", "name", ",", "type", "=", "data", ".", "__class__", ".", "__name__", ",", "value", "=", "textdata", ")", ")", "return", "u''", ".", "join", "(", "text", ")" ]
5c609c00fa9954330cd135fc62a1e18b8e7fea8a
test
pformat_sql_html
Highlight common SQL words in a string.
debugtools/formatter.py
def pformat_sql_html(sql): """ Highlight common SQL words in a string. """ sql = escape(sql) sql = RE_SQL_NL.sub(u'<br>\n\\1', sql) sql = RE_SQL.sub(u'<strong>\\1</strong>', sql) return sql
def pformat_sql_html(sql): """ Highlight common SQL words in a string. """ sql = escape(sql) sql = RE_SQL_NL.sub(u'<br>\n\\1', sql) sql = RE_SQL.sub(u'<strong>\\1</strong>', sql) return sql
[ "Highlight", "common", "SQL", "words", "in", "a", "string", "." ]
edoburu/django-debugtools
python
https://github.com/edoburu/django-debugtools/blob/5c609c00fa9954330cd135fc62a1e18b8e7fea8a/debugtools/formatter.py#L52-L59
[ "def", "pformat_sql_html", "(", "sql", ")", ":", "sql", "=", "escape", "(", "sql", ")", "sql", "=", "RE_SQL_NL", ".", "sub", "(", "u'<br>\\n\\\\1'", ",", "sql", ")", "sql", "=", "RE_SQL", ".", "sub", "(", "u'<strong>\\\\1</strong>'", ",", "sql", ")", "return", "sql" ]
5c609c00fa9954330cd135fc62a1e18b8e7fea8a
test
pformat_django_context_html
Dump a variable to a HTML string with sensible output for template context fields. It filters out all fields which are not usable in a template context.
debugtools/formatter.py
def pformat_django_context_html(object): """ Dump a variable to a HTML string with sensible output for template context fields. It filters out all fields which are not usable in a template context. """ if isinstance(object, QuerySet): text = '' lineno = 0 for item in object.all()[:21]: lineno += 1 if lineno >= 21: text += u' (remaining items truncated...)' break text += u' {0}\n'.format(escape(repr(item))) return text elif isinstance(object, Manager): return mark_safe(u' (use <kbd>.all</kbd> to read it)') elif isinstance(object, six.string_types): return escape(repr(object)) elif isinstance(object, Promise): # lazy() object return escape(_format_lazy(object)) elif isinstance(object, dict): # This can also be a ContextDict return _format_dict(object) elif isinstance(object, list): return _format_list(object) elif hasattr(object, '__dict__'): return _format_object(object) else: # Use regular pprint as fallback. text = DebugPrettyPrinter(width=200).pformat(object) return _style_text(text)
def pformat_django_context_html(object): """ Dump a variable to a HTML string with sensible output for template context fields. It filters out all fields which are not usable in a template context. """ if isinstance(object, QuerySet): text = '' lineno = 0 for item in object.all()[:21]: lineno += 1 if lineno >= 21: text += u' (remaining items truncated...)' break text += u' {0}\n'.format(escape(repr(item))) return text elif isinstance(object, Manager): return mark_safe(u' (use <kbd>.all</kbd> to read it)') elif isinstance(object, six.string_types): return escape(repr(object)) elif isinstance(object, Promise): # lazy() object return escape(_format_lazy(object)) elif isinstance(object, dict): # This can also be a ContextDict return _format_dict(object) elif isinstance(object, list): return _format_list(object) elif hasattr(object, '__dict__'): return _format_object(object) else: # Use regular pprint as fallback. text = DebugPrettyPrinter(width=200).pformat(object) return _style_text(text)
[ "Dump", "a", "variable", "to", "a", "HTML", "string", "with", "sensible", "output", "for", "template", "context", "fields", ".", "It", "filters", "out", "all", "fields", "which", "are", "not", "usable", "in", "a", "template", "context", "." ]
edoburu/django-debugtools
python
https://github.com/edoburu/django-debugtools/blob/5c609c00fa9954330cd135fc62a1e18b8e7fea8a/debugtools/formatter.py#L62-L94
[ "def", "pformat_django_context_html", "(", "object", ")", ":", "if", "isinstance", "(", "object", ",", "QuerySet", ")", ":", "text", "=", "''", "lineno", "=", "0", "for", "item", "in", "object", ".", "all", "(", ")", "[", ":", "21", "]", ":", "lineno", "+=", "1", "if", "lineno", ">=", "21", ":", "text", "+=", "u' (remaining items truncated...)'", "break", "text", "+=", "u' {0}\\n'", ".", "format", "(", "escape", "(", "repr", "(", "item", ")", ")", ")", "return", "text", "elif", "isinstance", "(", "object", ",", "Manager", ")", ":", "return", "mark_safe", "(", "u' (use <kbd>.all</kbd> to read it)'", ")", "elif", "isinstance", "(", "object", ",", "six", ".", "string_types", ")", ":", "return", "escape", "(", "repr", "(", "object", ")", ")", "elif", "isinstance", "(", "object", ",", "Promise", ")", ":", "# lazy() object", "return", "escape", "(", "_format_lazy", "(", "object", ")", ")", "elif", "isinstance", "(", "object", ",", "dict", ")", ":", "# This can also be a ContextDict", "return", "_format_dict", "(", "object", ")", "elif", "isinstance", "(", "object", ",", "list", ")", ":", "return", "_format_list", "(", "object", ")", "elif", "hasattr", "(", "object", ",", "'__dict__'", ")", ":", "return", "_format_object", "(", "object", ")", "else", ":", "# Use regular pprint as fallback.", "text", "=", "DebugPrettyPrinter", "(", "width", "=", "200", ")", ".", "pformat", "(", "object", ")", "return", "_style_text", "(", "text", ")" ]
5c609c00fa9954330cd135fc62a1e18b8e7fea8a
test
pformat_dict_summary_html
Briefly print the dictionary keys.
debugtools/formatter.py
def pformat_dict_summary_html(dict): """ Briefly print the dictionary keys. """ if not dict: return ' {}' html = [] for key, value in sorted(six.iteritems(dict)): if not isinstance(value, DICT_EXPANDED_TYPES): value = '...' html.append(_format_dict_item(key, value)) return mark_safe(u'<br/>'.join(html))
def pformat_dict_summary_html(dict): """ Briefly print the dictionary keys. """ if not dict: return ' {}' html = [] for key, value in sorted(six.iteritems(dict)): if not isinstance(value, DICT_EXPANDED_TYPES): value = '...' html.append(_format_dict_item(key, value)) return mark_safe(u'<br/>'.join(html))
[ "Briefly", "print", "the", "dictionary", "keys", "." ]
edoburu/django-debugtools
python
https://github.com/edoburu/django-debugtools/blob/5c609c00fa9954330cd135fc62a1e18b8e7fea8a/debugtools/formatter.py#L97-L111
[ "def", "pformat_dict_summary_html", "(", "dict", ")", ":", "if", "not", "dict", ":", "return", "' {}'", "html", "=", "[", "]", "for", "key", ",", "value", "in", "sorted", "(", "six", ".", "iteritems", "(", "dict", ")", ")", ":", "if", "not", "isinstance", "(", "value", ",", "DICT_EXPANDED_TYPES", ")", ":", "value", "=", "'...'", "html", ".", "append", "(", "_format_dict_item", "(", "key", ",", "value", ")", ")", "return", "mark_safe", "(", "u'<br/>'", ".", "join", "(", "html", ")", ")" ]
5c609c00fa9954330cd135fc62a1e18b8e7fea8a
test
_style_text
Apply some HTML highlighting to the contents. This can't be done in the
debugtools/formatter.py
def _style_text(text): """ Apply some HTML highlighting to the contents. This can't be done in the """ # Escape text and apply some formatting. # To have really good highlighting, pprint would have to be re-implemented. text = escape(text) text = text.replace(' &lt;iterator object&gt;', " <small>&lt;<var>this object can be used in a 'for' loop</var>&gt;</small>") text = text.replace(' &lt;dynamic item&gt;', ' <small>&lt;<var>this object may have extra field names</var>&gt;</small>') text = text.replace(' &lt;dynamic attribute&gt;', ' <small>&lt;<var>this object may have extra field names</var>&gt;</small>') text = RE_PROXY.sub('\g<1><small>&lt;<var>proxy object</var>&gt;</small>', text) text = RE_FUNCTION.sub('\g<1><small>&lt;<var>object method</var>&gt;</small>', text) text = RE_GENERATOR.sub("\g<1><small>&lt;<var>generator, use 'for' to traverse it</var>&gt;</small>", text) text = RE_OBJECT_ADDRESS.sub('\g<1><small>&lt;<var>\g<2> object</var>&gt;</small>', text) text = RE_MANAGER.sub('\g<1><small>&lt;<var>manager, use <kbd>.all</kbd> to traverse it</var>&gt;</small>', text) text = RE_CLASS_REPR.sub('\g<1><small>&lt;<var>\g<2> class</var>&gt;</small>', text) # Since Django's WSGIRequest does a pprint like format for it's __repr__, make that styling consistent text = RE_REQUEST_FIELDNAME.sub('\g<1>:\n <strong style="color: #222;">\g<2></strong>: ', text) text = RE_REQUEST_CLEANUP1.sub('\g<1>', text) text = RE_REQUEST_CLEANUP2.sub(')', text) return mark_safe(text)
def _style_text(text): """ Apply some HTML highlighting to the contents. This can't be done in the """ # Escape text and apply some formatting. # To have really good highlighting, pprint would have to be re-implemented. text = escape(text) text = text.replace(' &lt;iterator object&gt;', " <small>&lt;<var>this object can be used in a 'for' loop</var>&gt;</small>") text = text.replace(' &lt;dynamic item&gt;', ' <small>&lt;<var>this object may have extra field names</var>&gt;</small>') text = text.replace(' &lt;dynamic attribute&gt;', ' <small>&lt;<var>this object may have extra field names</var>&gt;</small>') text = RE_PROXY.sub('\g<1><small>&lt;<var>proxy object</var>&gt;</small>', text) text = RE_FUNCTION.sub('\g<1><small>&lt;<var>object method</var>&gt;</small>', text) text = RE_GENERATOR.sub("\g<1><small>&lt;<var>generator, use 'for' to traverse it</var>&gt;</small>", text) text = RE_OBJECT_ADDRESS.sub('\g<1><small>&lt;<var>\g<2> object</var>&gt;</small>', text) text = RE_MANAGER.sub('\g<1><small>&lt;<var>manager, use <kbd>.all</kbd> to traverse it</var>&gt;</small>', text) text = RE_CLASS_REPR.sub('\g<1><small>&lt;<var>\g<2> class</var>&gt;</small>', text) # Since Django's WSGIRequest does a pprint like format for it's __repr__, make that styling consistent text = RE_REQUEST_FIELDNAME.sub('\g<1>:\n <strong style="color: #222;">\g<2></strong>: ', text) text = RE_REQUEST_CLEANUP1.sub('\g<1>', text) text = RE_REQUEST_CLEANUP2.sub(')', text) return mark_safe(text)
[ "Apply", "some", "HTML", "highlighting", "to", "the", "contents", ".", "This", "can", "t", "be", "done", "in", "the" ]
edoburu/django-debugtools
python
https://github.com/edoburu/django-debugtools/blob/5c609c00fa9954330cd135fc62a1e18b8e7fea8a/debugtools/formatter.py#L129-L152
[ "def", "_style_text", "(", "text", ")", ":", "# Escape text and apply some formatting.", "# To have really good highlighting, pprint would have to be re-implemented.", "text", "=", "escape", "(", "text", ")", "text", "=", "text", ".", "replace", "(", "' &lt;iterator object&gt;'", ",", "\" <small>&lt;<var>this object can be used in a 'for' loop</var>&gt;</small>\"", ")", "text", "=", "text", ".", "replace", "(", "' &lt;dynamic item&gt;'", ",", "' <small>&lt;<var>this object may have extra field names</var>&gt;</small>'", ")", "text", "=", "text", ".", "replace", "(", "' &lt;dynamic attribute&gt;'", ",", "' <small>&lt;<var>this object may have extra field names</var>&gt;</small>'", ")", "text", "=", "RE_PROXY", ".", "sub", "(", "'\\g<1><small>&lt;<var>proxy object</var>&gt;</small>'", ",", "text", ")", "text", "=", "RE_FUNCTION", ".", "sub", "(", "'\\g<1><small>&lt;<var>object method</var>&gt;</small>'", ",", "text", ")", "text", "=", "RE_GENERATOR", ".", "sub", "(", "\"\\g<1><small>&lt;<var>generator, use 'for' to traverse it</var>&gt;</small>\"", ",", "text", ")", "text", "=", "RE_OBJECT_ADDRESS", ".", "sub", "(", "'\\g<1><small>&lt;<var>\\g<2> object</var>&gt;</small>'", ",", "text", ")", "text", "=", "RE_MANAGER", ".", "sub", "(", "'\\g<1><small>&lt;<var>manager, use <kbd>.all</kbd> to traverse it</var>&gt;</small>'", ",", "text", ")", "text", "=", "RE_CLASS_REPR", ".", "sub", "(", "'\\g<1><small>&lt;<var>\\g<2> class</var>&gt;</small>'", ",", "text", ")", "# Since Django's WSGIRequest does a pprint like format for it's __repr__, make that styling consistent", "text", "=", "RE_REQUEST_FIELDNAME", ".", "sub", "(", "'\\g<1>:\\n <strong style=\"color: #222;\">\\g<2></strong>: '", ",", "text", ")", "text", "=", "RE_REQUEST_CLEANUP1", ".", "sub", "(", "'\\g<1>'", ",", "text", ")", "text", "=", "RE_REQUEST_CLEANUP2", ".", "sub", "(", "')'", ",", "text", ")", "return", "mark_safe", "(", "text", ")" ]
5c609c00fa9954330cd135fc62a1e18b8e7fea8a
test
_format_object
# Instead of just printing <SomeType at 0xfoobar>, expand the fields.
debugtools/formatter.py
def _format_object(object): """ # Instead of just printing <SomeType at 0xfoobar>, expand the fields. """ attrs = iter(object.__dict__.items()) if object.__class__: # Add class members too. attrs = chain(attrs, iter(object.__class__.__dict__.items())) # Remove private and protected variables # Filter needless exception classes which are added to each model. # Filter unremoved form.Meta (unline model.Meta) which makes no sense either is_model = isinstance(object, Model) is_form = isinstance(object, BaseForm) attrs = dict( (k, v) for k, v in attrs if not k.startswith('_') and not getattr(v, 'alters_data', False) and not (is_model and k in ('DoesNotExist', 'MultipleObjectsReturned')) and not (is_form and k in ('Meta',)) ) # Add members which are not found in __dict__. # This includes values such as auto_id, c, errors in a form. for member in dir(object): try: if member.startswith('_') or not hasattr(object, member): continue except HANDLED_EXCEPTIONS as e: attrs[member] = _format_exception(e) continue value = getattr(object, member) if callable(value) or member in attrs or getattr(value, 'alters_data', False): continue attrs[member] = value # Format property objects for name, value in list(attrs.items()): # not iteritems(), so can delete. if isinstance(value, property): attrs[name] = _try_call(lambda: getattr(object, name)) elif isinstance(value, types.FunctionType): if PY3: spec = inspect.getfullargspec(value) else: spec = inspect.getargspec(value) if len(spec.args) == 1 or len(spec.args) == len(spec.defaults or ()) + 1: if _is_unsafe_name(name): # The delete and save methods should have an alters_data = True set. # however, when delete or save methods are overridden, this is often missed. attrs[name] = LiteralStr('<Skipped for safety reasons (could alter the database)>') else: # should be simple method(self) signature to be callable in the template # function may have args (e.g. BoundField.as_textarea) as long as they have defaults. attrs[name] = _try_call(lambda: value(object)) else: del attrs[name] elif hasattr(value, '__get__'): # fetched the descriptor, e.g. django.db.models.fields.related.ForeignRelatedObjectsDescriptor attrs[name] = value = _try_call(lambda: getattr(object, name), return_exceptions=True) if isinstance(value, Manager): attrs[name] = LiteralStr('<{0} manager>'.format(value.__class__.__name__)) elif isinstance(value, AttributeError): del attrs[name] # e.g. Manager isn't accessible via Model instances. elif isinstance(value, HANDLED_EXCEPTIONS): attrs[name] = _format_exception(value) # Include representations which are relevant in template context. if getattr(object, '__str__', None) is not object.__str__: attrs['__str__'] = _try_call(lambda: smart_str(object)) elif getattr(object, '__unicode__', None) is not object.__unicode__: attrs['__unicode__'] = _try_call(lambda: smart_str(object)) if hasattr(object, '__getattr__'): attrs['__getattr__'] = LiteralStr('<dynamic attribute>') if hasattr(object, '__getitem__'): attrs['__getitem__'] = LiteralStr('<dynamic item>') if hasattr(object, '__iter__'): attrs['__iter__'] = LiteralStr('<iterator object>') if hasattr(object, '__len__'): attrs['__len__'] = len(object) # Add known __getattr__ members which are useful for template designers. if isinstance(object, BaseForm): for field_name in list(object.fields.keys()): attrs[field_name] = object[field_name] del attrs['__getitem__'] return _format_dict(attrs)
def _format_object(object): """ # Instead of just printing <SomeType at 0xfoobar>, expand the fields. """ attrs = iter(object.__dict__.items()) if object.__class__: # Add class members too. attrs = chain(attrs, iter(object.__class__.__dict__.items())) # Remove private and protected variables # Filter needless exception classes which are added to each model. # Filter unremoved form.Meta (unline model.Meta) which makes no sense either is_model = isinstance(object, Model) is_form = isinstance(object, BaseForm) attrs = dict( (k, v) for k, v in attrs if not k.startswith('_') and not getattr(v, 'alters_data', False) and not (is_model and k in ('DoesNotExist', 'MultipleObjectsReturned')) and not (is_form and k in ('Meta',)) ) # Add members which are not found in __dict__. # This includes values such as auto_id, c, errors in a form. for member in dir(object): try: if member.startswith('_') or not hasattr(object, member): continue except HANDLED_EXCEPTIONS as e: attrs[member] = _format_exception(e) continue value = getattr(object, member) if callable(value) or member in attrs or getattr(value, 'alters_data', False): continue attrs[member] = value # Format property objects for name, value in list(attrs.items()): # not iteritems(), so can delete. if isinstance(value, property): attrs[name] = _try_call(lambda: getattr(object, name)) elif isinstance(value, types.FunctionType): if PY3: spec = inspect.getfullargspec(value) else: spec = inspect.getargspec(value) if len(spec.args) == 1 or len(spec.args) == len(spec.defaults or ()) + 1: if _is_unsafe_name(name): # The delete and save methods should have an alters_data = True set. # however, when delete or save methods are overridden, this is often missed. attrs[name] = LiteralStr('<Skipped for safety reasons (could alter the database)>') else: # should be simple method(self) signature to be callable in the template # function may have args (e.g. BoundField.as_textarea) as long as they have defaults. attrs[name] = _try_call(lambda: value(object)) else: del attrs[name] elif hasattr(value, '__get__'): # fetched the descriptor, e.g. django.db.models.fields.related.ForeignRelatedObjectsDescriptor attrs[name] = value = _try_call(lambda: getattr(object, name), return_exceptions=True) if isinstance(value, Manager): attrs[name] = LiteralStr('<{0} manager>'.format(value.__class__.__name__)) elif isinstance(value, AttributeError): del attrs[name] # e.g. Manager isn't accessible via Model instances. elif isinstance(value, HANDLED_EXCEPTIONS): attrs[name] = _format_exception(value) # Include representations which are relevant in template context. if getattr(object, '__str__', None) is not object.__str__: attrs['__str__'] = _try_call(lambda: smart_str(object)) elif getattr(object, '__unicode__', None) is not object.__unicode__: attrs['__unicode__'] = _try_call(lambda: smart_str(object)) if hasattr(object, '__getattr__'): attrs['__getattr__'] = LiteralStr('<dynamic attribute>') if hasattr(object, '__getitem__'): attrs['__getitem__'] = LiteralStr('<dynamic item>') if hasattr(object, '__iter__'): attrs['__iter__'] = LiteralStr('<iterator object>') if hasattr(object, '__len__'): attrs['__len__'] = len(object) # Add known __getattr__ members which are useful for template designers. if isinstance(object, BaseForm): for field_name in list(object.fields.keys()): attrs[field_name] = object[field_name] del attrs['__getitem__'] return _format_dict(attrs)
[ "#", "Instead", "of", "just", "printing", "<SomeType", "at", "0xfoobar", ">", "expand", "the", "fields", "." ]
edoburu/django-debugtools
python
https://github.com/edoburu/django-debugtools/blob/5c609c00fa9954330cd135fc62a1e18b8e7fea8a/debugtools/formatter.py#L155-L246
[ "def", "_format_object", "(", "object", ")", ":", "attrs", "=", "iter", "(", "object", ".", "__dict__", ".", "items", "(", ")", ")", "if", "object", ".", "__class__", ":", "# Add class members too.", "attrs", "=", "chain", "(", "attrs", ",", "iter", "(", "object", ".", "__class__", ".", "__dict__", ".", "items", "(", ")", ")", ")", "# Remove private and protected variables", "# Filter needless exception classes which are added to each model.", "# Filter unremoved form.Meta (unline model.Meta) which makes no sense either", "is_model", "=", "isinstance", "(", "object", ",", "Model", ")", "is_form", "=", "isinstance", "(", "object", ",", "BaseForm", ")", "attrs", "=", "dict", "(", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "attrs", "if", "not", "k", ".", "startswith", "(", "'_'", ")", "and", "not", "getattr", "(", "v", ",", "'alters_data'", ",", "False", ")", "and", "not", "(", "is_model", "and", "k", "in", "(", "'DoesNotExist'", ",", "'MultipleObjectsReturned'", ")", ")", "and", "not", "(", "is_form", "and", "k", "in", "(", "'Meta'", ",", ")", ")", ")", "# Add members which are not found in __dict__.", "# This includes values such as auto_id, c, errors in a form.", "for", "member", "in", "dir", "(", "object", ")", ":", "try", ":", "if", "member", ".", "startswith", "(", "'_'", ")", "or", "not", "hasattr", "(", "object", ",", "member", ")", ":", "continue", "except", "HANDLED_EXCEPTIONS", "as", "e", ":", "attrs", "[", "member", "]", "=", "_format_exception", "(", "e", ")", "continue", "value", "=", "getattr", "(", "object", ",", "member", ")", "if", "callable", "(", "value", ")", "or", "member", "in", "attrs", "or", "getattr", "(", "value", ",", "'alters_data'", ",", "False", ")", ":", "continue", "attrs", "[", "member", "]", "=", "value", "# Format property objects", "for", "name", ",", "value", "in", "list", "(", "attrs", ".", "items", "(", ")", ")", ":", "# not iteritems(), so can delete.", "if", "isinstance", "(", "value", ",", "property", ")", ":", "attrs", "[", "name", "]", "=", "_try_call", "(", "lambda", ":", "getattr", "(", "object", ",", "name", ")", ")", "elif", "isinstance", "(", "value", ",", "types", ".", "FunctionType", ")", ":", "if", "PY3", ":", "spec", "=", "inspect", ".", "getfullargspec", "(", "value", ")", "else", ":", "spec", "=", "inspect", ".", "getargspec", "(", "value", ")", "if", "len", "(", "spec", ".", "args", ")", "==", "1", "or", "len", "(", "spec", ".", "args", ")", "==", "len", "(", "spec", ".", "defaults", "or", "(", ")", ")", "+", "1", ":", "if", "_is_unsafe_name", "(", "name", ")", ":", "# The delete and save methods should have an alters_data = True set.", "# however, when delete or save methods are overridden, this is often missed.", "attrs", "[", "name", "]", "=", "LiteralStr", "(", "'<Skipped for safety reasons (could alter the database)>'", ")", "else", ":", "# should be simple method(self) signature to be callable in the template", "# function may have args (e.g. BoundField.as_textarea) as long as they have defaults.", "attrs", "[", "name", "]", "=", "_try_call", "(", "lambda", ":", "value", "(", "object", ")", ")", "else", ":", "del", "attrs", "[", "name", "]", "elif", "hasattr", "(", "value", ",", "'__get__'", ")", ":", "# fetched the descriptor, e.g. django.db.models.fields.related.ForeignRelatedObjectsDescriptor", "attrs", "[", "name", "]", "=", "value", "=", "_try_call", "(", "lambda", ":", "getattr", "(", "object", ",", "name", ")", ",", "return_exceptions", "=", "True", ")", "if", "isinstance", "(", "value", ",", "Manager", ")", ":", "attrs", "[", "name", "]", "=", "LiteralStr", "(", "'<{0} manager>'", ".", "format", "(", "value", ".", "__class__", ".", "__name__", ")", ")", "elif", "isinstance", "(", "value", ",", "AttributeError", ")", ":", "del", "attrs", "[", "name", "]", "# e.g. Manager isn't accessible via Model instances.", "elif", "isinstance", "(", "value", ",", "HANDLED_EXCEPTIONS", ")", ":", "attrs", "[", "name", "]", "=", "_format_exception", "(", "value", ")", "# Include representations which are relevant in template context.", "if", "getattr", "(", "object", ",", "'__str__'", ",", "None", ")", "is", "not", "object", ".", "__str__", ":", "attrs", "[", "'__str__'", "]", "=", "_try_call", "(", "lambda", ":", "smart_str", "(", "object", ")", ")", "elif", "getattr", "(", "object", ",", "'__unicode__'", ",", "None", ")", "is", "not", "object", ".", "__unicode__", ":", "attrs", "[", "'__unicode__'", "]", "=", "_try_call", "(", "lambda", ":", "smart_str", "(", "object", ")", ")", "if", "hasattr", "(", "object", ",", "'__getattr__'", ")", ":", "attrs", "[", "'__getattr__'", "]", "=", "LiteralStr", "(", "'<dynamic attribute>'", ")", "if", "hasattr", "(", "object", ",", "'__getitem__'", ")", ":", "attrs", "[", "'__getitem__'", "]", "=", "LiteralStr", "(", "'<dynamic item>'", ")", "if", "hasattr", "(", "object", ",", "'__iter__'", ")", ":", "attrs", "[", "'__iter__'", "]", "=", "LiteralStr", "(", "'<iterator object>'", ")", "if", "hasattr", "(", "object", ",", "'__len__'", ")", ":", "attrs", "[", "'__len__'", "]", "=", "len", "(", "object", ")", "# Add known __getattr__ members which are useful for template designers.", "if", "isinstance", "(", "object", ",", "BaseForm", ")", ":", "for", "field_name", "in", "list", "(", "object", ".", "fields", ".", "keys", "(", ")", ")", ":", "attrs", "[", "field_name", "]", "=", "object", "[", "field_name", "]", "del", "attrs", "[", "'__getitem__'", "]", "return", "_format_dict", "(", "attrs", ")" ]
5c609c00fa9954330cd135fc62a1e18b8e7fea8a
test
_format_lazy
Expand a _("TEST") call to something meaningful.
debugtools/formatter.py
def _format_lazy(value): """ Expand a _("TEST") call to something meaningful. """ args = value._proxy____args kw = value._proxy____kw if not kw and len(args) == 1 and isinstance(args[0], six.string_types): # Found one of the Xgettext_lazy() calls. return LiteralStr(u'ugettext_lazy({0})'.format(repr(value._proxy____args[0]))) # Prints <django.functional.utils.__proxy__ object at ..> return value
def _format_lazy(value): """ Expand a _("TEST") call to something meaningful. """ args = value._proxy____args kw = value._proxy____kw if not kw and len(args) == 1 and isinstance(args[0], six.string_types): # Found one of the Xgettext_lazy() calls. return LiteralStr(u'ugettext_lazy({0})'.format(repr(value._proxy____args[0]))) # Prints <django.functional.utils.__proxy__ object at ..> return value
[ "Expand", "a", "_", "(", "TEST", ")", "call", "to", "something", "meaningful", "." ]
edoburu/django-debugtools
python
https://github.com/edoburu/django-debugtools/blob/5c609c00fa9954330cd135fc62a1e18b8e7fea8a/debugtools/formatter.py#L296-L307
[ "def", "_format_lazy", "(", "value", ")", ":", "args", "=", "value", ".", "_proxy____args", "kw", "=", "value", ".", "_proxy____kw", "if", "not", "kw", "and", "len", "(", "args", ")", "==", "1", "and", "isinstance", "(", "args", "[", "0", "]", ",", "six", ".", "string_types", ")", ":", "# Found one of the Xgettext_lazy() calls.", "return", "LiteralStr", "(", "u'ugettext_lazy({0})'", ".", "format", "(", "repr", "(", "value", ".", "_proxy____args", "[", "0", "]", ")", ")", ")", "# Prints <django.functional.utils.__proxy__ object at ..>", "return", "value" ]
5c609c00fa9954330cd135fc62a1e18b8e7fea8a
test
_try_call
Call a method, but :param func: :type func: :param extra_exceptions: :type extra_exceptions: :return: :rtype:
debugtools/formatter.py
def _try_call(func, extra_exceptions=(), return_exceptions=False): """ Call a method, but :param func: :type func: :param extra_exceptions: :type extra_exceptions: :return: :rtype: """ try: return func() except HANDLED_EXCEPTIONS as e: if return_exceptions: return e else: return _format_exception(e) except extra_exceptions as e: if return_exceptions: return e else: return _format_exception(e)
def _try_call(func, extra_exceptions=(), return_exceptions=False): """ Call a method, but :param func: :type func: :param extra_exceptions: :type extra_exceptions: :return: :rtype: """ try: return func() except HANDLED_EXCEPTIONS as e: if return_exceptions: return e else: return _format_exception(e) except extra_exceptions as e: if return_exceptions: return e else: return _format_exception(e)
[ "Call", "a", "method", "but", ":", "param", "func", ":", ":", "type", "func", ":", ":", "param", "extra_exceptions", ":", ":", "type", "extra_exceptions", ":", ":", "return", ":", ":", "rtype", ":" ]
edoburu/django-debugtools
python
https://github.com/edoburu/django-debugtools/blob/5c609c00fa9954330cd135fc62a1e18b8e7fea8a/debugtools/formatter.py#L326-L347
[ "def", "_try_call", "(", "func", ",", "extra_exceptions", "=", "(", ")", ",", "return_exceptions", "=", "False", ")", ":", "try", ":", "return", "func", "(", ")", "except", "HANDLED_EXCEPTIONS", "as", "e", ":", "if", "return_exceptions", ":", "return", "e", "else", ":", "return", "_format_exception", "(", "e", ")", "except", "extra_exceptions", "as", "e", ":", "if", "return_exceptions", ":", "return", "e", "else", ":", "return", "_format_exception", "(", "e", ")" ]
5c609c00fa9954330cd135fc62a1e18b8e7fea8a
test
DebugPrettyPrinter.format
Format an item in the result. Could be a dictionary key, value, etc..
debugtools/formatter.py
def format(self, object, context, maxlevels, level): """ Format an item in the result. Could be a dictionary key, value, etc.. """ try: return PrettyPrinter.format(self, object, context, maxlevels, level) except HANDLED_EXCEPTIONS as e: return _format_exception(e), True, False
def format(self, object, context, maxlevels, level): """ Format an item in the result. Could be a dictionary key, value, etc.. """ try: return PrettyPrinter.format(self, object, context, maxlevels, level) except HANDLED_EXCEPTIONS as e: return _format_exception(e), True, False
[ "Format", "an", "item", "in", "the", "result", ".", "Could", "be", "a", "dictionary", "key", "value", "etc", ".." ]
edoburu/django-debugtools
python
https://github.com/edoburu/django-debugtools/blob/5c609c00fa9954330cd135fc62a1e18b8e7fea8a/debugtools/formatter.py#L374-L382
[ "def", "format", "(", "self", ",", "object", ",", "context", ",", "maxlevels", ",", "level", ")", ":", "try", ":", "return", "PrettyPrinter", ".", "format", "(", "self", ",", "object", ",", "context", ",", "maxlevels", ",", "level", ")", "except", "HANDLED_EXCEPTIONS", "as", "e", ":", "return", "_format_exception", "(", "e", ")", ",", "True", ",", "False" ]
5c609c00fa9954330cd135fc62a1e18b8e7fea8a
test
DebugPrettyPrinter._format
Recursive part of the formatting
debugtools/formatter.py
def _format(self, object, stream, indent, allowance, context, level): """ Recursive part of the formatting """ try: PrettyPrinter._format(self, object, stream, indent, allowance, context, level) except Exception as e: stream.write(_format_exception(e))
def _format(self, object, stream, indent, allowance, context, level): """ Recursive part of the formatting """ try: PrettyPrinter._format(self, object, stream, indent, allowance, context, level) except Exception as e: stream.write(_format_exception(e))
[ "Recursive", "part", "of", "the", "formatting" ]
edoburu/django-debugtools
python
https://github.com/edoburu/django-debugtools/blob/5c609c00fa9954330cd135fc62a1e18b8e7fea8a/debugtools/formatter.py#L384-L391
[ "def", "_format", "(", "self", ",", "object", ",", "stream", ",", "indent", ",", "allowance", ",", "context", ",", "level", ")", ":", "try", ":", "PrettyPrinter", ".", "_format", "(", "self", ",", "object", ",", "stream", ",", "indent", ",", "allowance", ",", "context", ",", "level", ")", "except", "Exception", "as", "e", ":", "stream", ".", "write", "(", "_format_exception", "(", "e", ")", ")" ]
5c609c00fa9954330cd135fc62a1e18b8e7fea8a