_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3 values | text stringlengths 75 19.8k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q33100 | UserManager__Views.change_password_view | train | def change_password_view(self):
""" Prompt for old password and new password and change the user's password."""
# Initialize form
form = self.ChangePasswordFormClass(request.form)
# Process valid POST
if request.method == 'POST' and form.validate():
# Hash password
new_password = form.new_password.data
password_hash = self.hash_password(new_password)
# Update user.password
current_user.password = password_hash
self.db_manager.save_object(current_user)
self.db_manager.commit()
# Send password_changed email
if self.USER_ENABLE_EMAIL and self.USER_SEND_PASSWORD_CHANGED_EMAIL:
self.email_manager.send_password_changed_email(current_user)
# Send changed_password signal
signals.user_changed_password.send(current_app._get_current_object(), user=current_user)
# Flash a system message
flash(_('Your password has been changed successfully.'), 'success')
# Redirect to 'next' URL
safe_next_url = self._get_safe_next_url('next', self.USER_AFTER_CHANGE_PASSWORD_ENDPOINT)
return redirect(safe_next_url)
# Render form
self.prepare_domain_translations()
return render_template(self.USER_CHANGE_PASSWORD_TEMPLATE, form=form) | python | {
"resource": ""
} |
q33101 | UserManager__Views.change_username_view | train | def change_username_view(self):
""" Prompt for new username and old password and change the user's username."""
# Initialize form
form = self.ChangeUsernameFormClass(request.form)
# Process valid POST
if request.method == 'POST' and form.validate():
# Change username
new_username = form.new_username.data
current_user.username=new_username
self.db_manager.save_object(current_user)
self.db_manager.commit()
# Send username_changed email
self.email_manager.send_username_changed_email(current_user)
# Send changed_username signal
signals.user_changed_username.send(current_app._get_current_object(), user=current_user)
# Flash a system message
flash(_("Your username has been changed to '%(username)s'.", username=new_username), 'success')
# Redirect to 'next' URL
safe_next_url = self._get_safe_next_url('next', self.USER_AFTER_CHANGE_USERNAME_ENDPOINT)
return redirect(safe_next_url)
# Render form
self.prepare_domain_translations()
return render_template(self.USER_CHANGE_USERNAME_TEMPLATE, form=form) | python | {
"resource": ""
} |
q33102 | UserManager__Views.confirm_email_view | train | def confirm_email_view(self, token):
""" Verify email confirmation token and activate the user account."""
# Verify token
data_items = self.token_manager.verify_token(
token,
self.USER_CONFIRM_EMAIL_EXPIRATION)
# Retrieve user, user_email by ID
user = None
user_email = None
if data_items:
user, user_email = self.db_manager.get_user_and_user_email_by_id(data_items[0])
if not user or not user_email:
flash(_('Invalid confirmation token.'), 'error')
return redirect(url_for('user.login'))
# Set UserEmail.email_confirmed_at
user_email.email_confirmed_at=datetime.utcnow()
self.db_manager.save_user_and_user_email(user, user_email)
self.db_manager.commit()
# Send confirmed_email signal
signals.user_confirmed_email.send(current_app._get_current_object(), user=user)
# Flash a system message
flash(_('Your email has been confirmed.'), 'success')
# Auto-login after confirm or redirect to login page
safe_next_url = self._get_safe_next_url('next', self.USER_AFTER_CONFIRM_ENDPOINT)
if self.USER_AUTO_LOGIN_AFTER_CONFIRM:
return self._do_login_user(user, safe_next_url) # auto-login
else:
return redirect(url_for('user.login') + '?next=' + quote(safe_next_url)) | python | {
"resource": ""
} |
q33103 | UserManager__Views.email_action_view | train | def email_action_view(self, id, action):
""" Perform action 'action' on UserEmail object 'id'
"""
# Retrieve UserEmail by id
user_email = self.db_manager.get_user_email_by_id(id=id)
# Users may only change their own UserEmails
if not user_email or user_email.user_id != current_user.id:
return self.unauthorized_view()
# Delete UserEmail
if action == 'delete':
# Primary UserEmail can not be deleted
if user_email.is_primary:
return self.unauthorized_view()
# Delete UserEmail
self.db_manager.delete_object(user_email)
self.db_manager.commit()
# Set UserEmail.is_primary
elif action == 'make-primary':
# Disable previously primary emails
user_emails = self.db_manager.find_user_emails(current_user)
for other_user_email in user_emails:
if other_user_email.is_primary:
other_user_email.is_primary=False
self.db_manager.save_object(other_user_email)
# Enable current primary email
user_email.is_primary=True
self.db_manager.save_object(user_email)
self.db_manager.commit()
# Send confirm email
elif action == 'confirm':
self._send_confirm_email_email(user_email.user, user_email)
else:
return self.unauthorized_view()
return redirect(url_for('user.manage_emails')) | python | {
"resource": ""
} |
q33104 | UserManager__Views.forgot_password_view | train | def forgot_password_view(self):
"""Prompt for email and send reset password email."""
# Initialize form
form = self.ForgotPasswordFormClass(request.form)
# Process valid POST
if request.method == 'POST' and form.validate():
# Get User and UserEmail by email
email = form.email.data
user, user_email = self.db_manager.get_user_and_user_email_by_email(email)
if user and user_email:
# Send reset_password email
self.email_manager.send_reset_password_email(user, user_email)
# Send forgot_password signal
signals.user_forgot_password.send(current_app._get_current_object(), user=user)
# Flash a system message
flash(_(
"A reset password email has been sent to '%(email)s'. Open that email and follow the instructions to reset your password.",
email=email), 'success')
# Redirect to the login page
return redirect(self._endpoint_url(self.USER_AFTER_FORGOT_PASSWORD_ENDPOINT))
# Render form
self.prepare_domain_translations()
return render_template(self.USER_FORGOT_PASSWORD_TEMPLATE, form=form) | python | {
"resource": ""
} |
q33105 | UserManager__Views.invite_user_view | train | def invite_user_view(self):
""" Allows users to send invitations to register an account """
invite_user_form = self.InviteUserFormClass(request.form)
if request.method == 'POST' and invite_user_form.validate():
# Find User and UserEmail by email
email = invite_user_form.email.data
user, user_email = self.db_manager.get_user_and_user_email_by_email(email)
if user:
flash("User with that email has already registered", "error")
return redirect(url_for('user.invite_user'))
# Add UserInvitation
user_invitation = self.db_manager.add_user_invitation(
email=email,
invited_by_user_id=current_user.id)
self.db_manager.commit()
try:
# Send invite_user email
self.email_manager.send_invite_user_email(current_user, user_invitation)
except Exception as e:
# delete new UserInvitation object if send fails
self.db_manager.delete_object(user_invitation)
self.db_manager.commit()
raise
# Send sent_invitation signal
signals \
.user_sent_invitation \
.send(current_app._get_current_object(), user_invitation=user_invitation,
form=invite_user_form)
# Flash a system message
flash(_('Invitation has been sent.'), 'success')
# Redirect
safe_next_url = self._get_safe_next_url('next', self.USER_AFTER_INVITE_ENDPOINT)
return redirect(safe_next_url)
self.prepare_domain_translations()
return render_template(self.USER_INVITE_USER_TEMPLATE, form=invite_user_form) | python | {
"resource": ""
} |
q33106 | UserManager__Views.login_view | train | def login_view(self):
"""Prepare and process the login form."""
# Authenticate username/email and login authenticated users.
safe_next_url = self._get_safe_next_url('next', self.USER_AFTER_LOGIN_ENDPOINT)
safe_reg_next = self._get_safe_next_url('reg_next', self.USER_AFTER_REGISTER_ENDPOINT)
# Immediately redirect already logged in users
if self.call_or_get(current_user.is_authenticated) and self.USER_AUTO_LOGIN_AT_LOGIN:
return redirect(safe_next_url)
# Initialize form
login_form = self.LoginFormClass(request.form) # for login.html
register_form = self.RegisterFormClass() # for login_or_register.html
if request.method != 'POST':
login_form.next.data = register_form.next.data = safe_next_url
login_form.reg_next.data = register_form.reg_next.data = safe_reg_next
# Process valid POST
if request.method == 'POST' and login_form.validate():
# Retrieve User
user = None
user_email = None
if self.USER_ENABLE_USERNAME:
# Find user record by username
user = self.db_manager.find_user_by_username(login_form.username.data)
# Find user record by email (with form.username)
if not user and self.USER_ENABLE_EMAIL:
user, user_email = self.db_manager.get_user_and_user_email_by_email(login_form.username.data)
else:
# Find user by email (with form.email)
user, user_email = self.db_manager.get_user_and_user_email_by_email(login_form.email.data)
if user:
# Log user in
safe_next_url = self.make_safe_url(login_form.next.data)
return self._do_login_user(user, safe_next_url, login_form.remember_me.data)
# Render form
self.prepare_domain_translations()
template_filename = self.USER_LOGIN_AUTH0_TEMPLATE if self.USER_ENABLE_AUTH0 else self.USER_LOGIN_TEMPLATE
return render_template(template_filename,
form=login_form,
login_form=login_form,
register_form=register_form) | python | {
"resource": ""
} |
q33107 | UserManager__Views.logout_view | train | def logout_view(self):
"""Process the logout link."""
""" Sign the user out."""
# Send user_logged_out signal
signals.user_logged_out.send(current_app._get_current_object(), user=current_user)
# Use Flask-Login to sign out user
logout_user()
# Flash a system message
flash(_('You have signed out successfully.'), 'success')
# Redirect to logout_next endpoint or '/'
safe_next_url = self._get_safe_next_url('next', self.USER_AFTER_LOGOUT_ENDPOINT)
return redirect(safe_next_url) | python | {
"resource": ""
} |
q33108 | UserManager__Views.register_view | train | def register_view(self):
""" Display registration form and create new User."""
safe_next_url = self._get_safe_next_url('next', self.USER_AFTER_LOGIN_ENDPOINT)
safe_reg_next_url = self._get_safe_next_url('reg_next', self.USER_AFTER_REGISTER_ENDPOINT)
# Initialize form
login_form = self.LoginFormClass() # for login_or_register.html
register_form = self.RegisterFormClass(request.form) # for register.html
# invite token used to determine validity of registeree
invite_token = request.values.get("token")
# require invite without a token should disallow the user from registering
if self.USER_REQUIRE_INVITATION and not invite_token:
flash("Registration is invite only", "error")
return redirect(url_for('user.login'))
user_invitation = None
if invite_token and self.db_manager.UserInvitationClass:
data_items = self.token_manager.verify_token(invite_token, self.USER_INVITE_EXPIRATION)
if data_items:
user_invitation_id = data_items[0]
user_invitation = self.db_manager.get_user_invitation_by_id(user_invitation_id)
if not user_invitation:
flash("Invalid invitation token", "error")
return redirect(url_for('user.login'))
register_form.invite_token.data = invite_token
if request.method != 'POST':
login_form.next.data = register_form.next.data = safe_next_url
login_form.reg_next.data = register_form.reg_next.data = safe_reg_next_url
if user_invitation:
register_form.email.data = user_invitation.email
# Process valid POST
if request.method == 'POST' and register_form.validate():
user = self.db_manager.add_user()
register_form.populate_obj(user)
user_email = self.db_manager.add_user_email(user=user, is_primary=True)
register_form.populate_obj(user_email)
# Store password hash instead of password
user.password = self.hash_password(user.password)
# Email confirmation depends on the USER_ENABLE_CONFIRM_EMAIL setting
request_email_confirmation = self.USER_ENABLE_CONFIRM_EMAIL
# Users that register through an invitation, can skip this process
# but only when they register with an email that matches their invitation.
if user_invitation:
if user_invitation.email.lower() == register_form.email.data.lower():
user_email.email_confirmed_at=datetime.utcnow()
request_email_confirmation = False
self.db_manager.save_user_and_user_email(user, user_email)
self.db_manager.commit()
# Send 'registered' email and delete new User object if send fails
if self.USER_SEND_REGISTERED_EMAIL:
try:
# Send 'confirm email' or 'registered' email
self._send_registered_email(user, user_email, request_email_confirmation)
except Exception as e:
# delete new User object if send fails
self.db_manager.delete_object(user)
self.db_manager.commit()
raise
# Send user_registered signal
signals.user_registered.send(current_app._get_current_object(),
user=user,
user_invitation=user_invitation)
# Redirect if USER_ENABLE_CONFIRM_EMAIL is set
if self.USER_ENABLE_CONFIRM_EMAIL and request_email_confirmation:
safe_reg_next_url = self.make_safe_url(register_form.reg_next.data)
return redirect(safe_reg_next_url)
# Auto-login after register or redirect to login page
if 'reg_next' in request.args:
safe_reg_next_url = self.make_safe_url(register_form.reg_next.data)
else:
safe_reg_next_url = self._endpoint_url(self.USER_AFTER_CONFIRM_ENDPOINT)
if self.USER_AUTO_LOGIN_AFTER_REGISTER:
return self._do_login_user(user, safe_reg_next_url) # auto-login
else:
return redirect(url_for('user.login') + '?next=' + quote(safe_reg_next_url)) # redirect to login page
# Render form
self.prepare_domain_translations()
return render_template(self.USER_REGISTER_TEMPLATE,
form=register_form,
login_form=login_form,
register_form=register_form) | python | {
"resource": ""
} |
q33109 | UserManager__Views.resend_email_confirmation_view | train | def resend_email_confirmation_view(self):
"""Prompt for email and re-send email conformation email."""
# Initialize form
form = self.ResendEmailConfirmationFormClass(request.form)
# Process valid POST
if request.method == 'POST' and form.validate():
# Find user by email
email = form.email.data
user, user_email = self.db_manager.get_user_and_user_email_by_email(email)
# Send confirm_email email
if user:
self._send_confirm_email_email(user, user_email)
# Redirect to the login page
return redirect(self._endpoint_url(self.USER_AFTER_RESEND_EMAIL_CONFIRMATION_ENDPOINT))
# Render form
self.prepare_domain_translations()
return render_template(self.USER_RESEND_CONFIRM_EMAIL_TEMPLATE, form=form) | python | {
"resource": ""
} |
q33110 | UserManager__Views.reset_password_view | train | def reset_password_view(self, token):
""" Verify the password reset token, Prompt for new password, and set the user's password."""
# Verify token
if self.call_or_get(current_user.is_authenticated):
logout_user()
data_items = self.token_manager.verify_token(
token,
self.USER_RESET_PASSWORD_EXPIRATION)
user = None
if data_items:
# Get User by user ID
user_id = data_items[0]
user = self.db_manager.get_user_by_id(user_id)
# Mark email as confirmed
user_or_user_email_object = self.db_manager.get_primary_user_email_object(user)
user_or_user_email_object.email_confirmed_at = datetime.utcnow()
self.db_manager.save_object(user_or_user_email_object)
self.db_manager.commit()
if not user:
flash(_('Your reset password token is invalid.'), 'error')
return redirect(self._endpoint_url('user.login'))
# Initialize form
form = self.ResetPasswordFormClass(request.form)
# Process valid POST
if request.method == 'POST' and form.validate():
# Change password
password_hash = self.hash_password(form.new_password.data)
user.password=password_hash
self.db_manager.save_object(user)
self.db_manager.commit()
# Send 'password_changed' email
if self.USER_ENABLE_EMAIL and self.USER_SEND_PASSWORD_CHANGED_EMAIL:
self.email_manager.send_password_changed_email(user)
# Send reset_password signal
signals.user_reset_password.send(current_app._get_current_object(), user=user)
# Flash a system message
flash(_("Your password has been reset successfully."), 'success')
# Auto-login after reset password or redirect to login page
safe_next_url = self._get_safe_next_url('next', self.USER_AFTER_RESET_PASSWORD_ENDPOINT)
if self.USER_AUTO_LOGIN_AFTER_RESET_PASSWORD:
return self._do_login_user(user, safe_next_url) # auto-login
else:
return redirect(url_for('user.login') + '?next=' + quote(safe_next_url)) # redirect to login page
# Render form
self.prepare_domain_translations()
return render_template(self.USER_RESET_PASSWORD_TEMPLATE, form=form) | python | {
"resource": ""
} |
q33111 | UserManager__Views.unauthenticated_view | train | def unauthenticated_view(self):
""" Prepare a Flash message and redirect to USER_UNAUTHENTICATED_ENDPOINT"""
# Prepare Flash message
url = request.url
flash(_("You must be signed in to access '%(url)s'.", url=url), 'error')
# Redirect to USER_UNAUTHENTICATED_ENDPOINT
safe_next_url = self.make_safe_url(url)
return redirect(self._endpoint_url(self.USER_UNAUTHENTICATED_ENDPOINT)+'?next='+quote(safe_next_url)) | python | {
"resource": ""
} |
q33112 | UserManager__Views.unauthorized_view | train | def unauthorized_view(self):
""" Prepare a Flash message and redirect to USER_UNAUTHORIZED_ENDPOINT"""
# Prepare Flash message
url = request.script_root + request.path
flash(_("You do not have permission to access '%(url)s'.", url=url), 'error')
# Redirect to USER_UNAUTHORIZED_ENDPOINT
return redirect(self._endpoint_url(self.USER_UNAUTHORIZED_ENDPOINT)) | python | {
"resource": ""
} |
q33113 | _is_logged_in_with_confirmed_email | train | def _is_logged_in_with_confirmed_email(user_manager):
"""| Returns True if user is logged in and has a confirmed email address.
| Returns False otherwise.
"""
# User must be logged in
if user_manager.call_or_get(current_user.is_authenticated):
# Is unconfirmed email allowed for this view by @allow_unconfirmed_email?
unconfirmed_email_allowed = \
getattr(g, '_flask_user_allow_unconfirmed_email', False)
# unconfirmed_email_allowed must be True or
# User must have at least one confirmed email address
if unconfirmed_email_allowed or user_manager.db_manager.user_has_confirmed_email(current_user):
return True
return False | python | {
"resource": ""
} |
q33114 | login_required | train | def login_required(view_function):
""" This decorator ensures that the current user is logged in.
Example::
@route('/member_page')
@login_required
def member_page(): # User must be logged in
...
If USER_ENABLE_EMAIL is True and USER_ENABLE_CONFIRM_EMAIL is True,
this view decorator also ensures that the user has a confirmed email address.
| Calls unauthorized_view() when the user is not logged in
or when the user has not confirmed their email address.
| Calls the decorated view otherwise.
"""
@wraps(view_function) # Tells debuggers that is is a function wrapper
def decorator(*args, **kwargs):
user_manager = current_app.user_manager
# User must be logged in with a confirmed email address
allowed = _is_logged_in_with_confirmed_email(user_manager)
if not allowed:
# Redirect to unauthenticated page
return user_manager.unauthenticated_view()
# It's OK to call the view
return view_function(*args, **kwargs)
return decorator | python | {
"resource": ""
} |
q33115 | allow_unconfirmed_email | train | def allow_unconfirmed_email(view_function):
""" This decorator ensures that the user is logged in,
but allows users with or without a confirmed email addresses
to access this particular view.
It works in tandem with the ``USER_ALLOW_LOGIN_WITHOUT_CONFIRMED_EMAIL=True`` setting.
.. caution::
| Use ``USER_ALLOW_LOGIN_WITHOUT_CONFIRMED_EMAIL=True`` and
``@allow_unconfirmed_email`` with caution,
as they relax security requirements.
| Make sure that decorated views **never call other views directly**.
Allways use ``redirect()`` to ensure proper view protection.
Example::
@route('/show_promotion')
@allow_unconfirmed_emails
def show_promotion(): # Logged in, with or without
... # confirmed email address
It can also precede the ``@roles_required`` and ``@roles_accepted`` view decorators::
@route('/show_promotion')
@allow_unconfirmed_emails
@roles_required('Visitor')
def show_promotion(): # Logged in, with or without
... # confirmed email address
| Calls unauthorized_view() when the user is not logged in.
| Calls the decorated view otherwise.
"""
@wraps(view_function) # Tells debuggers that is is a function wrapper
def decorator(*args, **kwargs):
# Sets a boolean on the global request context
g._flask_user_allow_unconfirmed_email = True
# Catch exceptions to properly unset boolean on exceptions
try:
user_manager = current_app.user_manager
# User must be logged in with a confirmed email address
allowed = _is_logged_in_with_confirmed_email(user_manager)
if not allowed:
# Redirect to unauthenticated page
return user_manager.unauthenticated_view()
# It's OK to call the view
return view_function(*args, **kwargs)
finally:
# Allways unset the boolean, whether exceptions occurred or not
g._flask_user_allow_unconfirmed_email = False
return decorator | python | {
"resource": ""
} |
q33116 | TokenManager.verify_token | train | def verify_token(self, token, expiration_in_seconds=None):
""" Verify token signature, verify token expiration, and decrypt token.
| Returns None if token is expired or invalid.
| Returns a list of strings and integers on success.
Implemented as::
concatenated_str = self.decrypt_string(token, expiration_in_seconds)
data_items = self.decode_data_items(concatenated_str)
return data_items
Example:
::
# Verify that a User with ``user_id`` has a password that ends in ``password_ends_with``.
token_is_valid = False
data_items = token_manager.verify(token, expiration_in_seconds)
if data_items:
user_id = data_items[0]
password_ends_with = data_items[1]
user = user_manager.db_manager.get_user_by_id(user_id)
token_is_valid = user and user.password[-8:]==password_ends_with
"""
from cryptography.fernet import InvalidToken
try:
concatenated_str = self.decrypt_string(token, expiration_in_seconds)
data_items = self.decode_data_items(concatenated_str)
except InvalidToken:
data_items = None
return data_items | python | {
"resource": ""
} |
q33117 | TokenManager.encode_data_items | train | def encode_data_items(self, *args):
""" Encodes a list of integers and strings into a concatenated string.
- encode string items as-is.
- encode integer items as base-64 with a ``'~'`` prefix.
- concatenate encoded items with a ``'|'`` separator.
Example:
``encode_data_items('abc', 123, 'xyz')`` returns ``'abc|~B7|xyz'``
"""
str_list = []
for arg in args:
# encode string items as-is
if isinstance(arg, str):
arg_str = arg
# encode integer items as base-64 strings with a '~' character in front
elif isinstance(arg, int):
arg_str = self.INTEGER_PREFIX + self.encode_int(arg)
# convert other types to string
else:
arg_str = str(arg)
str_list.append(arg_str)
# Concatenate strings with '|' separators
concatenated_str = self.SEPARATOR.join(str_list)
return concatenated_str | python | {
"resource": ""
} |
q33118 | TokenManager.decode_data_items | train | def decode_data_items(self, concatenated_str):
"""Decodes a concatenated string into a list of integers and strings.
Example:
``decode_data_items('abc|~B7|xyz')`` returns ``['abc', 123, 'xyz']``
"""
data_items = []
str_list = concatenated_str.split(self.SEPARATOR)
for str in str_list:
# '~base-64-strings' are decoded into integers.
if len(str)>=1 and str[0]==self.INTEGER_PREFIX:
item = self.decode_int(str[1:])
# Strings are decoded as-is.
else:
item = str
data_items.append(item)
# Return list of data items
return data_items | python | {
"resource": ""
} |
q33119 | TokenManager.encode_int | train | def encode_int(self, n):
""" Encodes an integer into a short Base64 string.
Example:
``encode_int(123)`` returns ``'B7'``.
"""
str = []
while True:
n, r = divmod(n, self.BASE)
str.append(self.ALPHABET[r])
if n == 0: break
return ''.join(reversed(str)) | python | {
"resource": ""
} |
q33120 | TokenManager.decode_int | train | def decode_int(self, str):
""" Decodes a short Base64 string into an integer.
Example:
``decode_int('B7')`` returns ``123``.
"""
n = 0
for c in str:
n = n * self.BASE + self.ALPHABET_REVERSE[c]
return n | python | {
"resource": ""
} |
q33121 | EmailManager.send_confirm_email_email | train | def send_confirm_email_email(self, user, user_email):
"""Send the 'email confirmation' email."""
# Verify config settings
if not self.user_manager.USER_ENABLE_EMAIL: return
if not self.user_manager.USER_ENABLE_CONFIRM_EMAIL: return
# The confirm_email email is sent to a specific user_email.email or user.email
email = user_email.email if user_email else user.email
# Generate a confirm_email_link
object_id = user_email.id if user_email else user.id
token = self.user_manager.generate_token(object_id)
confirm_email_link = url_for('user.confirm_email', token=token, _external=True)
# Render email from templates and send it via the configured EmailAdapter
self._render_and_send_email(
email,
user,
self.user_manager.USER_CONFIRM_EMAIL_TEMPLATE,
confirm_email_link=confirm_email_link,
) | python | {
"resource": ""
} |
q33122 | EmailManager.send_password_changed_email | train | def send_password_changed_email(self, user):
"""Send the 'password has changed' notification email."""
# Verify config settings
if not self.user_manager.USER_ENABLE_EMAIL: return
if not self.user_manager.USER_SEND_PASSWORD_CHANGED_EMAIL: return
# Notification emails are sent to the user's primary email address
user_or_user_email_object = self.user_manager.db_manager.get_primary_user_email_object(user)
email = user_or_user_email_object.email
# Render email from templates and send it via the configured EmailAdapter
self._render_and_send_email(
email,
user,
self.user_manager.USER_PASSWORD_CHANGED_EMAIL_TEMPLATE,
) | python | {
"resource": ""
} |
q33123 | EmailManager.send_reset_password_email | train | def send_reset_password_email(self, user, user_email):
"""Send the 'reset password' email."""
# Verify config settings
if not self.user_manager.USER_ENABLE_EMAIL: return
assert self.user_manager.USER_ENABLE_FORGOT_PASSWORD
# The reset_password email is sent to a specific user_email.email or user.email
email = user_email.email if user_email else user.email
# Generate a reset_password_link
token = self.user_manager.generate_token(user.id)
reset_password_link = url_for('user.reset_password', token=token, _external=True)
# Render email from templates and send it via the configured EmailAdapter
self._render_and_send_email(
email,
user,
self.user_manager.USER_RESET_PASSWORD_EMAIL_TEMPLATE,
reset_password_link=reset_password_link,
) | python | {
"resource": ""
} |
q33124 | EmailManager.send_invite_user_email | train | def send_invite_user_email(self, user, user_invitation):
"""Send the 'user invitation' email."""
# Verify config settings
if not self.user_manager.USER_ENABLE_EMAIL: return
if not self.user_manager.USER_ENABLE_INVITE_USER: return
# The user param points to the inviter
# The user_invitation param points to the invitee
invited_by_user = user
# Use the invitation email
email = user_invitation.email
# Create a dummy user object to an empty name for the invitee
user = self.user_manager.db_manager.UserClass(email=email)
# Generate a accept_invitation_link
token = self.user_manager.generate_token(user_invitation.id)
accept_invitation_link = url_for('user.register', token=token, _external=True)
# Render email from templates and send it via the configured EmailAdapter
self._render_and_send_email(
email,
user,
self.user_manager.USER_INVITE_USER_EMAIL_TEMPLATE,
accept_invitation_link=accept_invitation_link,
invited_by_user=invited_by_user,
) | python | {
"resource": ""
} |
q33125 | EmailManager.send_registered_email | train | def send_registered_email(self, user, user_email, request_email_confirmation):
"""Send the 'user has registered' notification email."""
# Verify config settings
if not self.user_manager.USER_ENABLE_EMAIL: return
if not self.user_manager.USER_SEND_REGISTERED_EMAIL: return
# The registered email is sent to a specific user_email.email or user.email
email = user_email.email if user_email else user.email
# Add a request to confirm email if needed
if request_email_confirmation:
# Generate a confirm_email_link
token = self.user_manager.generate_token(user_email.id if user_email else user.id)
confirm_email_link = url_for('user.confirm_email', token=token, _external=True)
else:
confirm_email_link = None
# Render email from templates and send it via the configured EmailAdapter
self._render_and_send_email(
email,
user,
self.user_manager.USER_REGISTERED_EMAIL_TEMPLATE,
confirm_email_link=confirm_email_link,
) | python | {
"resource": ""
} |
q33126 | EmailManager.send_username_changed_email | train | def send_username_changed_email(self, user):
"""Send the 'username has changed' notification email."""
# Verify config settings
if not self.user_manager.USER_ENABLE_EMAIL: return
if not self.user_manager.USER_SEND_USERNAME_CHANGED_EMAIL: return
# Notification emails are sent to the user's primary email address
user_or_user_email_object = self.user_manager.db_manager.get_primary_user_email_object(user)
email = user_or_user_email_object.email
# Render email from templates and send it via the configured EmailAdapter
self._render_and_send_email(
email,
user,
self.user_manager.USER_USERNAME_CHANGED_EMAIL_TEMPLATE,
) | python | {
"resource": ""
} |
q33127 | UserMixin.get_id | train | def get_id(self):
"""Converts a User ID and parts of a User password hash to a token."""
# This function is used by Flask-Login to store a User ID securely as a browser cookie.
# The last part of the password is included to invalidate tokens when password change.
# user_id and password_ends_with are encrypted, timestamped and signed.
# This function works in tandem with UserMixin.get_user_by_token()
user_manager = current_app.user_manager
user_id = self.id
password_ends_with = '' if user_manager.USER_ENABLE_AUTH0 else self.password[-8:]
user_token = user_manager.generate_token(
user_id, # User ID
password_ends_with, # Last 8 characters of user password
)
# print("UserMixin.get_id: ID:", self.id, "token:", user_token)
return user_token | python | {
"resource": ""
} |
q33128 | UserMixin.has_roles | train | def has_roles(self, *requirements):
""" Return True if the user has all of the specified roles. Return False otherwise.
has_roles() accepts a list of requirements:
has_role(requirement1, requirement2, requirement3).
Each requirement is either a role_name, or a tuple_of_role_names.
role_name example: 'manager'
tuple_of_role_names: ('funny', 'witty', 'hilarious')
A role_name-requirement is accepted when the user has this role.
A tuple_of_role_names-requirement is accepted when the user has ONE of these roles.
has_roles() returns true if ALL of the requirements have been accepted.
For example:
has_roles('a', ('b', 'c'), d)
Translates to:
User has role 'a' AND (role 'b' OR role 'c') AND role 'd'"""
# Translates a list of role objects to a list of role_names
user_manager = current_app.user_manager
role_names = user_manager.db_manager.get_user_roles(self)
# has_role() accepts a list of requirements
for requirement in requirements:
if isinstance(requirement, (list, tuple)):
# this is a tuple_of_role_names requirement
tuple_of_role_names = requirement
authorized = False
for role_name in tuple_of_role_names:
if role_name in role_names:
# tuple_of_role_names requirement was met: break out of loop
authorized = True
break
if not authorized:
return False # tuple_of_role_names requirement failed: return False
else:
# this is a role_name requirement
role_name = requirement
# the user must have this role
if not role_name in role_names:
return False # role_name requirement failed: return False
# All requirements have been met: return True
return True | python | {
"resource": ""
} |
q33129 | UserManager__Utils.email_is_available | train | def email_is_available(self, new_email):
"""Check if ``new_email`` is available.
| Returns True if ``new_email`` does not exist or belongs to the current user.
| Return False otherwise.
"""
user, user_email = self.db_manager.get_user_and_user_email_by_email(new_email)
return (user == None) | python | {
"resource": ""
} |
q33130 | UserManager__Utils.make_safe_url | train | def make_safe_url(self, url):
"""Makes a URL safe by removing optional hostname and port.
Example:
| ``make_safe_url('https://hostname:80/path1/path2?q1=v1&q2=v2#fragment')``
| returns ``'/path1/path2?q1=v1&q2=v2#fragment'``
Override this method if you need to allow a list of safe hostnames.
"""
# Split the URL into scheme, netloc, path, query and fragment
parts = list(urlsplit(url))
# Clear scheme and netloc and rebuild URL
parts[0] = '' # Empty scheme
parts[1] = '' # Empty netloc (hostname:port)
safe_url = urlunsplit(parts)
return safe_url | python | {
"resource": ""
} |
q33131 | UserManager.password_validator | train | def password_validator(self, form, field):
"""Ensure that passwords have at least 6 characters with one lowercase letter, one uppercase letter and one number.
Override this method to customize the password validator.
"""
# Convert string to list of characters
password = list(field.data)
password_length = len(password)
# Count lowercase, uppercase and numbers
lowers = uppers = digits = 0
for ch in password:
if ch.islower(): lowers += 1
if ch.isupper(): uppers += 1
if ch.isdigit(): digits += 1
# Password must have one lowercase letter, one uppercase letter and one digit
is_valid = password_length >= 6 and lowers and uppers and digits
if not is_valid:
raise ValidationError(
_('Password must have at least 6 characters with one lowercase letter, one uppercase letter and one number')) | python | {
"resource": ""
} |
q33132 | UserManager.username_validator | train | def username_validator(self, form, field):
"""Ensure that Usernames contains at least 3 alphanumeric characters.
Override this method to customize the username validator.
"""
username = field.data
if len(username) < 3:
raise ValidationError(
_('Username must be at least 3 characters long'))
valid_chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-._'
chars = list(username)
for char in chars:
if char not in valid_chars:
raise ValidationError(
_("Username may only contain letters, numbers, '-', '.' and '_'")) | python | {
"resource": ""
} |
q33133 | UserManager._check_settings | train | def _check_settings(self, app):
"""Verify required settings. Produce a helpful error messages for incorrect settings."""
# Check for invalid settings
# --------------------------
# Check self.UserInvitationClass and USER_ENABLE_INVITE_USER
if self.USER_ENABLE_INVITE_USER and not self.UserInvitationClass:
raise ConfigError(
'UserInvitationClass is missing while USER_ENABLE_INVITE_USER is True.' \
' Specify UserInvitationClass with UserManager(app, db, User, UserInvitationClass=...' \
' or set USER_ENABLE_INVITE_USER=False.')
# Check for deprecated settings
# -----------------------------
# Check for deprecated USER_ENABLE_CONFIRM_EMAIL
setting = app.config.get('USER_ENABLE_LOGIN_WITHOUT_CONFIRM_EMAIL', None)
if setting is not None:
print(
'Deprecation warning: USER_ENABLE_LOGIN_WITHOUT_CONFIRM_EMAIL'\
' will be deprecated.' \
' It has been replaced by USER_ALLOW_LOGIN_WITHOUT_CONFIRMED_EMAIL.'\
' Please change this as soon as possible.')
self.USER_ALLOW_LOGIN_WITHOUT_CONFIRMED_EMAIL = setting
# Check for deprecated USER_ENABLE_RETYPE_PASSWORD
setting = app.config.get('USER_ENABLE_RETYPE_PASSWORD', None)
if setting is not None:
print(
'Deprecation warning: USER_ENABLE_RETYPE_PASSWORD'\
' will be deprecated.' \
' It has been replaced with USER_REQUIRE_RETYPE_PASSWORD.'\
' Please change this as soon as possible.')
self.USER_REQUIRE_RETYPE_PASSWORD = setting
# Check for deprecated USER_SHOW_USERNAME_EMAIL_DOES_NOT_EXIST
setting = app.config.get('USER_SHOW_USERNAME_EMAIL_DOES_NOT_EXIST', None)
if setting is not None:
print(
'Deprecation warning: USER_SHOW_USERNAME_EMAIL_DOES_NOT_EXIST' \
' will be deprecated.' \
' It has been replaced with USER_SHOW_USERNAME_DOES_NOT_EXIST'
' and USER_SHOW_EMAIL_DOES_NOT_EXIST.'
' Please change this as soon as possible.')
self.USER_SHOW_USERNAME_DOES_NOT_EXIST = setting
self.USER_SHOW_EMAIL_DOES_NOT_EXIST = setting
# Check for deprecated USER_PASSWORD_HASH
setting = app.config.get('USER_PASSWORD_HASH', None)
if setting is not None:
print(
"Deprecation warning: USER_PASSWORD_HASH=<string>"\
" will be deprecated."\
" It has been replaced with USER_PASSLIB_CRYPTCONTEXT_SCHEMES=<list>."
" Please change USER_PASSWORD_HASH='something' to"\
" USER_PASSLIB_CRYPTCONTEXT_SCHEMES=['something'] as soon as possible.")
self.USER_PASSLIB_CRYPTCONTEXT_SCHEMES = [setting]
# Check that USER_EMAIL_SENDER_EMAIL is set when USER_ENABLE_EMAIL is True
if not self.USER_EMAIL_SENDER_EMAIL and self.USER_ENABLE_EMAIL:
raise ConfigError(
'USER_EMAIL_SENDER_EMAIL is missing while USER_ENABLE_EMAIL is True.'\
' specify USER_EMAIL_SENDER_EMAIL (and USER_EMAIL_SENDER_NAME) or set USER_ENABLE_EMAIL to False.')
# Disable settings that rely on a feature setting that's not enabled
# ------------------------------------------------------------------
# USER_ENABLE_REGISTER=True must have USER_ENABLE_USERNAME=True or USER_ENABLE_EMAIL=True.
if not self.USER_ENABLE_USERNAME and not self.USER_ENABLE_EMAIL:
self.USER_ENABLE_REGISTER = False
# Settings that depend on USER_ENABLE_EMAIL
if not self.USER_ENABLE_EMAIL:
self.USER_ENABLE_CONFIRM_EMAIL = False
self.USER_ENABLE_MULTIPLE_EMAILS = False
self.USER_ENABLE_FORGOT_PASSWORD = False
self.USER_SEND_PASSWORD_CHANGED_EMAIL = False
self.USER_SEND_REGISTERED_EMAIL = False
self.USER_SEND_USERNAME_CHANGED_EMAIL = False
self.USER_REQUIRE_INVITATION = False
# Settings that depend on USER_ENABLE_USERNAME
if not self.USER_ENABLE_USERNAME:
self.USER_ENABLE_CHANGE_USERNAME = False | python | {
"resource": ""
} |
q33134 | MongoDbAdapter.drop_all_tables | train | def drop_all_tables(self):
"""Drop all document collections of the database.
.. warning:: ALL DATA WILL BE LOST. Use only for automated testing.
"""
# Retrieve database name from application config
app = self.db.app
mongo_settings = app.config['MONGODB_SETTINGS']
database_name = mongo_settings['db']
# Flask-MongoEngine is built on MongoEngine, which is built on PyMongo.
# To drop database collections, we need to access the PyMongo Database object,
# which is stored in the PyMongo MongoClient object,
# which is stored in app.extensions['mongoengine'][self]['conn']
py_mongo_mongo_client = app.extensions['mongoengine'][self.db]['conn']
py_mongo_database = py_mongo_mongo_client[database_name]
# Use the PyMongo Database object
for collection_name in py_mongo_database.collection_names():
py_mongo_database.drop_collection(collection_name) | python | {
"resource": ""
} |
q33135 | DynamoDbAdapter.add_object | train | def add_object(self, object):
"""Add object to db session. Only for session-centric object-database mappers."""
if object.id is None:
object.get_id()
self.db.engine.save(object) | python | {
"resource": ""
} |
q33136 | DynamoDbAdapter.delete_object | train | def delete_object(self, object):
""" Delete object specified by ``object``. """
#pdb.set_trace()
self.db.engine.delete_key(object)#, userid='abc123', id='1')
print('dynamo.delete_object(%s)' % object) | python | {
"resource": ""
} |
q33137 | unique_username_validator | train | def unique_username_validator(form, field):
""" Ensure that Username is unique. This validator may NOT be customized."""
user_manager = current_app.user_manager
if not user_manager.db_manager.username_is_available(field.data):
raise ValidationError(_('This Username is already in use. Please try another one.')) | python | {
"resource": ""
} |
q33138 | unique_email_validator | train | def unique_email_validator(form, field):
""" Username must be unique. This validator may NOT be customized."""
user_manager = current_app.user_manager
if not user_manager.email_is_available(field.data):
raise ValidationError(_('This Email is already in use. Please try another one.')) | python | {
"resource": ""
} |
q33139 | SMTPEmailAdapter.send_email_message | train | def send_email_message(self, recipient, subject, html_message, text_message, sender_email, sender_name):
""" Send email message via Flask-Mail.
Args:
recipient: Email address or tuple of (Name, Email-address).
subject: Subject line.
html_message: The message body in HTML.
text_message: The message body in plain text.
"""
# Construct sender from sender_name and sender_email
sender = '"%s" <%s>' % (sender_name, sender_email) if sender_name else sender_email
# Send email via SMTP except when we're testing
if not current_app.testing: # pragma: no cover
try:
# Prepare email message
from flask_mail import Message
message = Message(
subject,
sender=sender,
recipients=[recipient],
html=html_message,
body=text_message)
# Send email message
self.mail.send(message)
# Print helpful error messages on exceptions
except (socket.gaierror, socket.error) as e:
raise EmailError('SMTP Connection error: Check your MAIL_SERVER and MAIL_PORT settings.')
except smtplib.SMTPAuthenticationError:
raise EmailError('SMTP Authentication error: Check your MAIL_USERNAME and MAIL_PASSWORD settings.') | python | {
"resource": ""
} |
q33140 | PasswordManager.verify_password | train | def verify_password(self, password, password_hash):
"""Verify plaintext ``password`` against ``hashed password``.
Args:
password(str): Plaintext password that the user types in.
password_hash(str): Password hash generated by a previous call to ``hash_password()``.
Returns:
| True when ``password`` matches ``password_hash``.
| False otherwise.
Example:
::
if verify_password('mypassword', user.password):
login_user(user)
"""
# Print deprecation warning if called with (password, user) instead of (password, user.password)
if isinstance(password_hash, self.user_manager.db_manager.UserClass):
print(
'Deprecation warning: verify_password(password, user) has been changed'\
' to: verify_password(password, password_hash). The user param will be deprecated.'\
' Please change your call with verify_password(password, user) into'\
' a call with verify_password(password, user.password)'
' as soon as possible.')
password_hash = password_hash.password # effectively user.password
# Use passlib's CryptContext to verify a password
return self.password_crypt_context.verify(password, password_hash) | python | {
"resource": ""
} |
q33141 | SendgridEmailAdapter.send_email_message | train | def send_email_message(self, recipient, subject, html_message, text_message, sender_email, sender_name):
""" Send email message via sendgrid-python.
Args:
recipient: Email address or tuple of (Name, Email-address).
subject: Subject line.
html_message: The message body in HTML.
text_message: The message body in plain text.
"""
if not current_app.testing: # pragma: no cover
try:
# Prepare Sendgrid helper objects
from sendgrid.helpers.mail import Email, Content, Substitution, Mail
from_email = Email(sender_email, sender_name)
to_email = Email(recipient)
text_content = Content('text/plain', text_message)
html_content = Content('text/html', html_message)
# Prepare Sendgrid Mail object
# Note: RFC 1341: text must be first, followed by html
mail = Mail(from_email, subject, to_email, text_content)
mail.add_content(html_content)
# Send mail via the Sendgrid API
response = self.sg.client.mail.send.post(request_body=mail.get())
print(response.status_code)
print(response.body)
print(response.headers)
except ImportError:
raise ConfigError(SENDGRID_IMPORT_ERROR_MESSAGE)
except Exception as e:
print(e)
print(e.body)
raise | python | {
"resource": ""
} |
q33142 | PynamoDbAdapter.create_all_tables | train | def create_all_tables(self):
"""Create database tables for all known database data-models."""
for klass in self.__get_classes():
if not klass.exists():
klass.create_table(read_capacity_units=1, write_capacity_units=1, wait=True) | python | {
"resource": ""
} |
q33143 | BasePlot.draw | train | def draw(self):
"""
Draws the Plot to screen.
If there is a continuous datatype for the nodes, it will be reflected
in self.sm being constructed (in `compute_node_colors`). It will then
automatically add in a colorbar to the plot and scale the plot axes
accordingly.
"""
self.draw_nodes()
self.draw_edges()
# note that self.groups only exists on condition
# that group_label_position was given!
if hasattr(self, "groups") and self.groups:
self.draw_group_labels()
logging.debug("DRAW: {0}".format(self.sm))
if self.sm:
self.figure.subplots_adjust(right=0.8)
cax = self.figure.add_axes([0.85, 0.2, 0.05, 0.6])
self.figure.colorbar(self.sm, cax=cax)
self.ax.relim()
self.ax.autoscale_view()
self.ax.set_aspect("equal") | python | {
"resource": ""
} |
q33144 | BasePlot.compute_node_colors | train | def compute_node_colors(self):
"""Compute the node colors. Also computes the colorbar."""
data = [self.graph.node[n][self.node_color] for n in self.nodes]
if self.group_order == "alphabetically":
data_reduced = sorted(list(set(data)))
elif self.group_order == "default":
data_reduced = list(unique_everseen(data))
dtype = infer_data_type(data)
n_grps = num_discrete_groups(data)
if dtype == "categorical" or dtype == "ordinal":
if n_grps <= 8:
cmap = get_cmap(
cmaps["Accent_{0}".format(n_grps)].mpl_colormap
)
else:
cmap = n_group_colorpallet(n_grps)
elif dtype == "continuous" and not is_data_diverging(data):
cmap = get_cmap(cmaps["continuous"].mpl_colormap)
elif dtype == "continuous" and is_data_diverging(data):
cmap = get_cmap(cmaps["diverging"].mpl_colormap)
for d in data:
idx = data_reduced.index(d) / n_grps
self.node_colors.append(cmap(idx))
# Add colorbar if required.ListedColormap
logging.debug("length of data_reduced: {0}".format(len(data_reduced)))
logging.debug("dtype: {0}".format(dtype))
if len(data_reduced) > 1 and dtype == "continuous":
self.sm = plt.cm.ScalarMappable(
cmap=cmap,
norm=plt.Normalize(
vmin=min(data_reduced),
vmax=max(data_reduced), # noqa # noqa
),
)
self.sm._A = [] | python | {
"resource": ""
} |
q33145 | BasePlot.compute_group_colors | train | def compute_group_colors(self):
"""Computes the group colors according to node colors"""
seen = set()
self.group_label_color = [
x for x in self.node_colors if not (x in seen or seen.add(x))
] | python | {
"resource": ""
} |
q33146 | BasePlot.compute_edge_colors | train | def compute_edge_colors(self):
"""Compute the edge colors."""
data = [self.graph.edges[n][self.edge_color] for n in self.edges]
data_reduced = sorted(list(set(data)))
dtype = infer_data_type(data)
n_grps = num_discrete_groups(data)
if dtype == "categorical" or dtype == "ordinal":
if n_grps <= 8:
cmap = get_cmap(
cmaps["Accent_{0}".format(n_grps)].mpl_colormap
)
else:
cmap = n_group_colorpallet(n_grps)
elif dtype == "continuous" and not is_data_diverging(data):
cmap = get_cmap(cmaps["weights"])
for d in data:
idx = data_reduced.index(d) / n_grps
self.edge_colors.append(cmap(idx))
# Add colorbar if required.
logging.debug("length of data_reduced: {0}".format(len(data_reduced)))
logging.debug("dtype: {0}".format(dtype))
if len(data_reduced) > 1 and dtype == "continuous":
self.sm = plt.cm.ScalarMappable(
cmap=cmap,
norm=plt.Normalize(
vmin=min(data_reduced),
vmax=max(data_reduced), # noqa # noqa
),
)
self.sm._A = [] | python | {
"resource": ""
} |
q33147 | BasePlot.compute_node_sizes | train | def compute_node_sizes(self):
"""Compute the node sizes."""
if type(self.node_size) is str:
nodes = self.graph.nodes
self.node_sizes = [nodes[n][self.node_size] for n in self.nodes]
else:
self.node_sizes = self.node_size | python | {
"resource": ""
} |
q33148 | BasePlot.compute_edge_widths | train | def compute_edge_widths(self):
"""Compute the edge widths."""
if type(self.edge_width) is str:
edges = self.graph.edges
self.edge_widths = [edges[n][self.edge_width] for n in self.edges]
else:
self.edge_widths = self.edge_width | python | {
"resource": ""
} |
q33149 | BasePlot.group_and_sort_nodes | train | def group_and_sort_nodes(self):
"""
Groups and then sorts the nodes according to the criteria passed into
the Plot constructor.
"""
if self.node_grouping and not self.node_order:
if self.group_order == "alphabetically":
self.nodes = [
n
for n, d in sorted(
self.graph.nodes(data=True),
key=lambda x: x[1][self.node_grouping],
)
]
elif self.group_order == "default":
grp = [
d[self.node_grouping]
for _, d in self.graph.nodes(data=True)
]
grp_name = list(unique_everseen(grp))
nodes = []
for key in grp_name:
nodes.extend(
[
n
for n, d in self.graph.nodes(data=True)
if key in d.values()
]
)
self.nodes = nodes
elif self.node_order and not self.node_grouping:
self.nodes = [
n
for n, _ in sorted(
self.graph.nodes(data=True),
key=lambda x: x[1][self.node_order],
)
]
elif self.node_grouping and self.node_order:
if self.group_order == "alphabetically":
self.nodes = [
n
for n, d in sorted(
self.graph.nodes(data=True),
key=lambda x: (
x[1][self.node_grouping],
x[1][self.node_order],
),
)
]
elif self.group_order == "default":
grp = [
d[self.node_grouping]
for _, d in self.graph.nodes(data=True)
]
grp_name = list(unique_everseen(grp))
nodes = []
for key in grp_name:
nodes.extend(
[
n
for n, d in sorted(
self.graph.nodes(data=True),
key=lambda x: x[1][self.node_order],
)
if key in d.values()
]
)
self.nodes = nodes | python | {
"resource": ""
} |
q33150 | CircosPlot.compute_group_label_positions | train | def compute_group_label_positions(self):
"""
Computes the x,y positions of the group labels.
"""
assert self.group_label_position in ["beginning", "middle", "end"]
data = [self.graph.node[n][self.node_grouping] for n in self.nodes]
node_length = len(data)
groups = items_in_groups(data)
edge_of_plot = self.plot_radius + self.nodeprops["radius"]
# The 1.02 serves as padding
radius = 1.02 * edge_of_plot + self.group_label_offset
xs = []
ys = []
has = []
vas = []
node_idcs = np.cumsum(list(groups.values()))
node_idcs = np.insert(node_idcs, 0, 0)
if self.group_label_position == "beginning":
for idx in node_idcs[:-1]:
x, y = get_cartesian(
r=radius, theta=group_theta(node_length, idx)
)
ha, va = text_alignment(x, y)
xs.append(x)
ys.append(y)
has.append(ha)
vas.append(va)
elif self.group_label_position == "middle":
node_idcs = node_idcs.reshape(len(node_idcs), 1)
node_idcs = np.concatenate((node_idcs[:-1], node_idcs[1:]), axis=1)
for idx in node_idcs:
theta1 = group_theta(node_length, idx[0])
theta2 = group_theta(node_length, idx[1] - 1)
x, y = get_cartesian(r=radius, theta=(theta1 + theta2) / 2)
ha, va = text_alignment(x, y)
xs.append(x)
ys.append(y)
has.append(ha)
vas.append(va)
elif self.group_label_position == "end":
for idx in node_idcs[1::]:
x, y = get_cartesian(
r=radius, theta=group_theta(node_length, idx - 1)
)
ha, va = text_alignment(x, y)
xs.append(x)
ys.append(y)
has.append(ha)
vas.append(va)
self.group_label_coords = {"x": xs, "y": ys}
self.group_label_aligns = {"has": has, "vas": vas}
self.groups = groups.keys() | python | {
"resource": ""
} |
q33151 | CircosPlot.compute_node_positions | train | def compute_node_positions(self):
"""
Uses the get_cartesian function to compute the positions of each node
in the Circos plot.
"""
xs = []
ys = []
node_r = self.nodeprops["radius"]
radius = circos_radius(n_nodes=len(self.graph.nodes()), node_r=node_r)
self.plot_radius = radius
self.nodeprops["linewidth"] = radius * 0.01
for node in self.nodes:
x, y = get_cartesian(r=radius, theta=node_theta(self.nodes, node))
xs.append(x)
ys.append(y)
self.node_coords = {"x": xs, "y": ys} | python | {
"resource": ""
} |
q33152 | CircosPlot.compute_node_label_positions | train | def compute_node_label_positions(self):
"""
Uses the get_cartesian function to compute the positions of each node
label in the Circos plot.
This method is always called after the compute_node_positions
method, so that the plot_radius is pre-computed.
This will also add a new attribute, `node_label_rotation` to the object
which contains the rotation angles for each of the nodes. Together with
the node coordinates this can be used to add additional annotations
with rotated text.
"""
self.init_node_label_meta()
for node in self.nodes:
# Define radius 'radius' and circumference 'theta'
theta = node_theta(self.nodes, node)
# multiplication factor 1.02 moved below
radius = self.plot_radius + self.nodeprops["radius"]
# Coordinates of text inside nodes
if self.node_label_layout == "numbers":
radius_adjustment = 1.0 - (1.0 / radius)
else:
radius_adjustment = 1.02
x, y = get_cartesian(r=radius * radius_adjustment, theta=theta)
# ----- For numbered nodes -----
# Node label x-axis coordinate
tx, _ = get_cartesian(r=radius, theta=theta)
# Create the quasi-circular positioning on the x axis
tx *= 1 - np.log(np.cos(theta) * self.nonzero_sign(np.cos(theta)))
# Move each node a little further away from the circos
tx += self.nonzero_sign(x)
# Node label y-axis coordinate numerator
numerator = radius * (
theta % (self.nonzero_sign(y) * self.nonzero_sign(x) * np.pi)
)
# Node label y-axis coordinate denominator
denominator = self.nonzero_sign(x) * np.pi
# Node label y-axis coordinate
ty = 2 * (numerator / denominator)
# ----- For rotated nodes -----
# Computes the text rotation
theta_deg = to_degrees(theta)
if theta_deg >= -90 and theta_deg < 90: # right side
rot = theta_deg
else: # left side
rot = theta_deg - 180
# Store values
self.store_node_label_meta(x, y, tx, ty, rot) | python | {
"resource": ""
} |
q33153 | CircosPlot.store_node_label_meta | train | def store_node_label_meta(self, x, y, tx, ty, rot):
"""
This function stored coordinates-related metadate for a node
This function should not be called by the user
:param x: x location of node label or number
:type x: np.float64
:param y: y location of node label or number
:type y: np.float64
:param tx: text location x of node label (numbers)
:type tx: np.float64
:param ty: text location y of node label (numbers)
:type ty: np.float64
:param rot: rotation angle of the text (rotation)
:type rot: float
"""
# Store computed values
self.node_label_coords["x"].append(x)
self.node_label_coords["y"].append(y)
self.node_label_coords["tx"].append(tx)
self.node_label_coords["ty"].append(ty)
# Computes the text alignment for x
if x == 0:
self.node_label_aligns["has"].append("center")
elif x > 0:
self.node_label_aligns["has"].append("left")
else:
self.node_label_aligns["has"].append("right")
# Computes the text alignment for y
if self.node_label_layout == "rotate" or y == 0:
self.node_label_aligns["vas"].append("center")
elif y > 0:
self.node_label_aligns["vas"].append("bottom")
else:
self.node_label_aligns["vas"].append("top")
self.node_label_rotation.append(rot) | python | {
"resource": ""
} |
q33154 | CircosPlot.draw_nodes | train | def draw_nodes(self):
"""
Renders nodes to the figure.
"""
node_r = self.nodeprops["radius"]
lw = self.nodeprops["linewidth"]
for i, node in enumerate(self.nodes):
x = self.node_coords["x"][i]
y = self.node_coords["y"][i]
color = self.node_colors[i]
node_patch = patches.Circle(
(x, y), node_r, lw=lw, color=color, zorder=2
)
self.ax.add_patch(node_patch)
if self.node_labels:
label_x = self.node_label_coords["x"][i]
label_y = self.node_label_coords["y"][i]
label_tx = self.node_label_coords["tx"][i]
label_ty = self.node_label_coords["ty"][i]
label_ha = self.node_label_aligns["has"][i]
label_va = self.node_label_aligns["vas"][i]
# ----- Node label rotation layout -----
if self.node_label_layout == "rotation":
rot = self.node_label_rotation[i]
self.ax.text(
s=node,
x=label_x,
y=label_y,
ha=label_ha,
va=label_va,
rotation=rot,
rotation_mode="anchor",
color=self.node_label_color[i],
fontsize=self.fontsize,
family=self.fontfamily,
)
# ----- Node label numbering layout -----
elif self.node_label_layout == "numbers":
# Draw descriptions for labels
desc = "%s - %s" % ((i, node) if (x > 0) else (node, i))
self.ax.text(
s=desc,
x=label_tx,
y=label_ty,
ha=label_ha,
va=label_va,
color=self.node_label_color[i],
fontsize=self.fontsize,
family=self.fontfamily,
)
# Add numbers to nodes
self.ax.text(
s=i, x=label_x, y=label_y, ha="center", va="center"
)
# Standard node label layout
else:
# Draw node text straight from the nodes
self.ax.text(
s=node,
x=label_x,
y=label_y,
ha=label_ha,
va=label_va,
color=self.node_label_color[i],
fontsize=self.fontsize,
family=self.fontfamily,
) | python | {
"resource": ""
} |
q33155 | CircosPlot.draw_group_labels | train | def draw_group_labels(self):
"""
Renders group labels to the figure.
"""
for i, label in enumerate(self.groups):
label_x = self.group_label_coords["x"][i]
label_y = self.group_label_coords["y"][i]
label_ha = self.group_label_aligns["has"][i]
label_va = self.group_label_aligns["vas"][i]
color = self.group_label_color[i]
self.ax.text(
s=label,
x=label_x,
y=label_y,
ha=label_ha,
va=label_va,
color=color,
fontsize=self.fontsize,
family=self.fontfamily,
) | python | {
"resource": ""
} |
q33156 | MatrixPlot.draw | train | def draw(self):
"""
Draws the plot to screen.
Note to self: Do NOT call super(MatrixPlot, self).draw(); the
underlying logic for drawing here is completely different from other
plots, and as such necessitates a different implementation.
"""
matrix = nx.to_numpy_matrix(self.graph, nodelist=self.nodes)
self.ax.matshow(matrix, cmap=self.cmap) | python | {
"resource": ""
} |
q33157 | ArcPlot.compute_node_positions | train | def compute_node_positions(self):
"""
Computes nodes positions.
Arranges nodes in a line starting at (x,y) = (0,0). Node radius is
assumed to be equal to 0.5 units. Nodes are placed at integer
locations.
"""
xs = [0] * len(self.nodes)
ys = [0] * len(self.nodes)
for i, _ in enumerate(self.nodes[1:], start=1):
prev_r = self.node_sizes[i - 1] / 2
curr_r = self.node_sizes[i] / 2
xs[i] = xs[i - 1] + prev_r + curr_r
self.node_coords = {"x": xs, "y": ys} | python | {
"resource": ""
} |
q33158 | ArcPlot.draw_nodes | train | def draw_nodes(self):
"""
Draw nodes to screen.
"""
node_r = self.node_sizes
for i, node in enumerate(self.nodes):
x = self.node_coords["x"][i]
y = self.node_coords["y"][i]
color = self.node_colors[i]
node_patch = patches.Ellipse(
(x, y), node_r[i], node_r[i], lw=0, color=color, zorder=2
)
self.ax.add_patch(node_patch) | python | {
"resource": ""
} |
q33159 | GeoPlot.compute_node_positions | train | def compute_node_positions(self):
"""
Extracts the node positions based on the specified longitude and
latitude keyword arguments.
"""
xs = []
ys = []
self.locs = dict()
for node in self.nodes:
x = self.graph.node[node][self.node_lon]
y = self.graph.node[node][self.node_lat]
xs.append(x)
ys.append(y)
self.locs[node] = (x, y)
self.node_coords = {"x": xs, "y": ys} | python | {
"resource": ""
} |
q33160 | GeoPlot.draw_nodes | train | def draw_nodes(self):
"""
Draws nodes to the screen.
GeoPlot is the first plot kind to support an Altair backend in addition
to the usual matplotlib backend.
"""
if self.backend == "matplotlib":
node_r = 0.005 # temporarily hardcoded.
for i, node in enumerate(self.nodes):
x = self.node_coords["x"][i]
y = self.node_coords["y"][i]
color = self.node_colors[i]
node_patch = patches.Ellipse(
(x, y), node_r, node_r, lw=0, color=color, zorder=2
)
self.ax.add_patch(node_patch)
elif self.backend == "altair":
self.node_chart = (
alt.Chart(self.node_df)
.mark_point()
.encode(
alt.X(f"{self.node_lon}:Q", scale=alt.Scale(zero=False)),
alt.Y(f"{self.node_lat}:Q", scale=alt.Scale(zero=False)),
)
) | python | {
"resource": ""
} |
q33161 | GeoPlot.draw_edges | train | def draw_edges(self):
"""
Draws edges to screen.
"""
if self.backend == "matplotlib":
for i, (n1, n2) in enumerate(self.edges):
x1, y1 = self.locs[n1]
x2, y2 = self.locs[n2]
color = self.edge_colors[i]
line = Line2D(
xdata=[x1, x2],
ydata=[y1, y2],
color=color,
zorder=0,
alpha=0.3,
)
self.ax.add_line(line)
elif self.backend == "altair":
marker_attrs = dict()
marker_attrs["color"] = "black" # MAGICNUMBER
marker_attrs["strokeWidth"] = 1 # MAGICNUMBER
self.edge_chart = (
alt.Chart(self.edge_df)
.mark_line(**marker_attrs)
.encode(
alt.X(f"{self.node_lon}:Q"),
alt.Y(f"{self.node_lat}:Q"),
detail="edge",
)
) | python | {
"resource": ""
} |
q33162 | update_travis_deploy_password | train | def update_travis_deploy_password(encrypted_password):
"""Update the deploy section of the .travis.yml file
to use the given encrypted password.
"""
config = load_yaml_config(TRAVIS_CONFIG_FILE)
config["deploy"]["password"] = dict(secure=encrypted_password)
save_yaml_config(TRAVIS_CONFIG_FILE, config)
line = (
"# This file was autogenerated and will overwrite"
" each time you run travis_pypi_setup.py\n"
)
prepend_line(TRAVIS_CONFIG_FILE, line) | python | {
"resource": ""
} |
q33163 | graph_from_dataframe | train | def graph_from_dataframe(
dataframe,
threshold_by_percent_unique=0.1,
threshold_by_count_unique=None,
node_id_columns=[],
node_property_columns=[],
edge_property_columns=[],
node_type_key="type",
edge_type_key="type",
collapse_edges=True,
edge_agg_key="weight",
):
"""
Build an undirected graph from a pandas dataframe.
This function attempts to infer which cells should become nodes
based on either:
a. what percentage of the column are unique values (defaults to 10%)
b. an explicit count of unique values (i.e. any column with 7 unique
values or less)
c. an explicit list of column keys (i.e.
['employee_id', 'location_code'])
Column headers are preserved as node and edge 'types'. By default, this is
stored using the key 'type' which is used by some graph import processes
but can be reconfigured.
This function uses a MultiGraph structure during the build phase so that it
is possible to make multiple connections between nodes. By default, at the
end of the build phase, the MultiGraph is converted to a Graph and the
count of edges between each node-pair is written as a 'weight' property.
:param pandas.DataFrame dataframe: A pandas dataframe containing the data
to be converted into a graph.
:param float threshold_by_percent_unique: A percent value used to determine
whether a column should be used to generate nodes based on its
cardinality (i.e. in a dataframe with 100 rows, treat any column with
10 or less unique values as a node)
:param int threshold_by_count_unique: A numeric value used to determine
whether a column should be used to generate nodes based on its
cardinality (i.e. if 7 is supplied, treat any column with 7 or less
unique values as a node) - supplying a value will take priority over
percent_unique
:param list node_id_columns: A list of column headers to use for generating
nodes. Suppyling any value will take precedence over
threshold_by_percent_unique or threshold_by_count_unique. Note: this
can cause the size of the graph to expand significantly since every
unique value in a column will become a node.
:param list node_property_columns: A list of column headers to use for
generating properties of nodes. These can include the same column
headers used for the node id.
:param list edge_property_columns: A list of column headers to use for
generating properties of edges.
:param str node_type_key: A string that sets the key will be used to
preserve the column name as node property (this is useful for importing
networkx graphs to databases that distinguish between node 'types' or
for visually encoding those types in plots).
:param str edge_type_key: A string that sets the key will be used to keep
track of edge relationships an 'types' (this is useful for importing
networkx graphs to databases that distinguish between edge'types' or
for visually encoding those types in plots). Edge type values are
automatically set to <node_a_id>_<node_b_id>.
:param bool collapse_edges: Graphs are instantiated as a 'MultiGraph'
(allow multiple edges between nodes) and then collapsed into a 'Graph'
which only has a single edge between any two nodes. Information is
preserved by aggregating the count of those edges as a 'weight' value.
Set this value to False to return the MultiGraph. Note: this can cause
the size of the graph to expand significantly since each row can
potentially have n! edges where n is the number of columns in the
dataframe.
:param str edge_agg_key: A string that sets the key the edge count will be
assigned to when edges are aggregated.
:returns: A networkx Graph (or MultiGraph if collapse_edges is set to
False).
"""
assert isinstance(
dataframe, pd.DataFrame
), "{} is not a pandas DataFrame".format(dataframe)
M = MultiGraph()
# if explicit specification of node_id_columns is provided, use those
if len(node_id_columns) > 0:
node_columns = node_id_columns
else:
# otherwise, compute with thresholds based on the dataframe
if threshold_by_count_unique:
node_columns = sorted(
[
col
for col in dataframe.columns
if dataframe[col].nunique() <= threshold_by_count_unique
]
)
else:
node_columns = sorted(
[
col
for col in dataframe.columns
if dataframe[col].nunique() / dataframe.shape[0]
<= threshold_by_percent_unique # NOQA to preserve meaningful variable names
]
)
# use the unique values for each node column as node types
for node_type in node_columns:
M.add_nodes_from(
[
(node, {node_type_key: node_type})
for node in dataframe[node_type].unique()
]
)
# iterate over the rows and generate an edge for each pair of node columns
for i, row in dataframe.iterrows():
# assemble the edge properties as a dictionary
edge_properties = {k: row[k] for k in edge_property_columns}
# iterate over the node_ids in each node_column of the dataframe row
node_buffer = []
for node_type in node_columns:
node_id = row[node_type]
# get a reference to the node and assign any specified properties
node = M.nodes[node_id]
for k in node_property_columns:
# if values are not identical, append with a pipe delimiter
if k not in node:
node[k] = row[k]
elif isinstance(node[k], str) and str(row[k]) not in node[k]:
node[k] += "|{}".format(str(row[k]))
elif str(row[k]) not in str(node[k]):
node[k] = str(node[k]) + "|{}".format(str(row[k]))
# build edges using precomputed edge properties
for other_node_id, other_node_type in node_buffer:
# sort node_type so undirected edges all share the same type
ordered_name = "_".join(sorted([node_type, other_node_type]))
edge_properties[edge_type_key] = ordered_name
M.add_edge(node_id, other_node_id, **edge_properties)
# store the node from this column in the buffer for future edges
node_buffer.append((node_id, node_type))
if collapse_edges:
# convert the MultiGraph to a Graph
G = Graph(M)
k = edge_agg_key
# preserve the edge count as a sum of the weight values
for u, v, data in M.edges(data=True):
w = data[k] if k in data else 1.0
edge = G[u][v]
edge[k] = (w + edge[k]) if k in edge else w
return G
return M | python | {
"resource": ""
} |
q33164 | is_data_homogenous | train | def is_data_homogenous(data_container):
"""
Checks that all of the data in the container are of the same Python data
type. This function is called in every other function below, and as such
need not necessarily be called.
:param data_container: A generic container of data points.
:type data_container: `iterable`
"""
data_types = set([type(i) for i in data_container])
return len(data_types) == 1 | python | {
"resource": ""
} |
q33165 | infer_data_type | train | def infer_data_type(data_container):
"""
For a given container of data, infer the type of data as one of
continuous, categorical, or ordinal.
For now, it is a one-to-one mapping as such:
- str: categorical
- int: ordinal
- float: continuous
There may be better ways that are not currently implemented below. For
example, with a list of numbers, we can check whether the number of unique
entries is less than or equal to 12, but has over 10000+ entries. This
would be a good candidate for floats being categorical.
:param data_container: A generic container of data points.
:type data_container: `iterable`
"""
# Defensive programming checks.
# 0. Ensure that we are dealing with lists or tuples, and nothing else.
assert isinstance(data_container, list) or isinstance(
data_container, tuple
), "data_container should be a list or tuple."
# 1. Don't want to deal with only single values.
assert (
len(set(data_container)) > 1
), "There should be more than one value in the data container."
# 2. Don't want to deal with mixed data.
assert is_data_homogenous(
data_container
), "Data are not of a homogenous type!"
# Once we check that the data type of the container is homogenous, we only
# need to check the first element in the data container for its type.
datum = data_container[0]
# Return statements below
# treat binomial data as categorical
# TODO: make tests for this.
if len(set(data_container)) == 2:
return "categorical"
elif isinstance(datum, str):
return "categorical"
elif isinstance(datum, int):
return "ordinal"
elif isinstance(datum, float):
return "continuous"
else:
raise ValueError("Not possible to tell what the data type is.") | python | {
"resource": ""
} |
q33166 | is_data_diverging | train | def is_data_diverging(data_container):
"""
We want to use this to check whether the data are diverging or not.
This is a simple check, can be made much more sophisticated.
:param data_container: A generic container of data points.
:type data_container: `iterable`
"""
assert infer_data_type(data_container) in [
"ordinal",
"continuous",
], "Data type should be ordinal or continuous"
# Check whether the data contains negative and positive values.
has_negative = False
has_positive = False
for i in data_container:
if i < 0:
has_negative = True
elif i > 0:
has_positive = True
if has_negative and has_positive:
return True
else:
return False | python | {
"resource": ""
} |
q33167 | to_pandas_nodes | train | def to_pandas_nodes(G): # noqa: N803
"""
Convert nodes in the graph into a pandas DataFrame.
"""
data = []
for n, meta in G.nodes(data=True):
d = dict()
d["node"] = n
d.update(meta)
data.append(d)
return pd.DataFrame(data) | python | {
"resource": ""
} |
q33168 | to_pandas_edges | train | def to_pandas_edges(G, x_kw, y_kw, **kwargs): # noqa: N803
"""
Convert Graph edges to pandas DataFrame that's readable to Altair.
"""
# Get all attributes in nodes
attributes = ["source", "target", "x", "y", "edge", "pair"]
for e in G.edges():
attributes += list(G.edges[e].keys())
attributes = list(set(attributes))
# Build a dataframe for all edges and their attributes
df = pd.DataFrame(index=range(G.size() * 2), columns=attributes)
# Add node data to dataframe.
for i, (n1, n2, d) in enumerate(G.edges(data=True)):
idx = i * 2
x = G.node[n1][x_kw]
y = G.node[n1][y_kw]
data1 = dict(
edge=i, source=n1, target=n2, pair=(n1, n2), x=x, y=y, **d
)
data2 = dict(
edge=i, source=n1, target=n2, pair=(n1, n2), x=x, y=y, **d
)
df.loc[idx] = data1
df.loc[idx + 1] = data2
return df | python | {
"resource": ""
} |
q33169 | node_theta | train | def node_theta(nodelist, node):
"""
Maps node to Angle.
:param nodelist: Nodelist from the graph.
:type nodelist: list.
:param node: The node of interest. Must be in the nodelist.
:returns: theta -- the angle of the node in radians.
"""
assert len(nodelist) > 0, "nodelist must be a list of items."
assert node in nodelist, "node must be inside nodelist."
i = nodelist.index(node)
theta = -np.pi + i * 2 * np.pi / len(nodelist)
return theta | python | {
"resource": ""
} |
q33170 | group_theta | train | def group_theta(node_length, node_idx):
"""
Returns an angle corresponding to a node of interest.
Intended to be used for placing node group labels at the correct spot.
:param float node_length: total number of nodes in the graph.
:param int node_idx: the index of the node of interest.
:returns: theta -- the angle of the node of interest in radians.
"""
theta = -np.pi + node_idx * 2 * np.pi / node_length
return theta | python | {
"resource": ""
} |
q33171 | text_alignment | train | def text_alignment(x, y):
"""
Align text labels based on the x- and y-axis coordinate values.
This function is used for computing the appropriate alignment of the text
label.
For example, if the text is on the "right" side of the plot, we want it to
be left-aligned. If the text is on the "top" side of the plot, we want it
to be bottom-aligned.
:param x, y: (`int` or `float`) x- and y-axis coordinate respectively.
:returns: A 2-tuple of strings, the horizontal and vertical alignments
respectively.
"""
if x == 0:
ha = "center"
elif x > 0:
ha = "left"
else:
ha = "right"
if y == 0:
va = "center"
elif y > 0:
va = "bottom"
else:
va = "top"
return ha, va | python | {
"resource": ""
} |
q33172 | circos_radius | train | def circos_radius(n_nodes, node_r):
"""
Automatically computes the origin-to-node centre radius of the Circos plot
using the triangle equality sine rule.
a / sin(A) = b / sin(B) = c / sin(C)
:param n_nodes: the number of nodes in the plot.
:type n_nodes: int
:param node_r: the radius of each node.
:type node_r: float
:returns: Origin-to-node centre radius.
"""
A = 2 * np.pi / n_nodes # noqa
B = (np.pi - A) / 2 # noqa
a = 2 * node_r
return a * np.sin(B) / np.sin(A) | python | {
"resource": ""
} |
q33173 | to_polar | train | def to_polar(x, y, theta_units="radians"):
"""
Converts cartesian x, y to polar r, theta.
"""
assert theta_units in [
"radians",
"degrees",
], "kwarg theta_units must specified in radians or degrees"
theta = atan2(y, x)
r = sqrt(x ** 2 + y ** 2)
if theta_units == "degrees":
theta = to_degrees(theta)
return r, theta | python | {
"resource": ""
} |
q33174 | download_track | train | def download_track(track, album_name=u'', keep_previews=False, folders=False, filenames=[], custom_path=''):
"""
Given a track, force scrape it.
"""
hard_track_url = get_hard_track_url(track['id'])
# We have no info on this track whatsoever.
if not 'title' in track:
return None
if not keep_previews:
if (track.get('duration', 0) < track.get('full_duration', 0)):
puts_safe(colored.yellow("Skipping preview track") + colored.white(": " + track['title']))
return None
# May not have a "full name"
name = track['user'].get('full_name', '')
if name == '':
name = track['user']['username']
filename = sanitize_filename(name + ' - ' + track['title'] + '.mp3')
if folders:
name_path = join(custom_path, name)
if not exists(name_path):
mkdir(name_path)
filename = join(name_path, filename)
else:
filename = join(custom_path, filename)
if exists(filename):
puts_safe(colored.yellow("Track already downloaded: ") + colored.white(track['title']))
return None
# Skip already downloaded track.
if filename in filenames:
return None
if hard_track_url:
puts_safe(colored.green("Scraping") + colored.white(": " + track['title']))
else:
# Region coded?
puts_safe(colored.yellow("Unable to download") + colored.white(": " + track['title']))
return None
filename = download_file(hard_track_url, filename)
tagged = tag_file(filename,
artist=name,
title=track['title'],
year=track['created_at'][:4],
genre=track['genre'],
album=album_name,
artwork_url=track['artwork_url'])
if not tagged:
wav_filename = filename[:-3] + 'wav'
os.rename(filename, wav_filename)
filename = wav_filename
return filename | python | {
"resource": ""
} |
q33175 | get_soundcloud_data | train | def get_soundcloud_data(url):
"""
Scrapes a SoundCloud page for a track's important information.
Returns:
dict: of audio data
"""
data = {}
request = requests.get(url)
title_tag = request.text.split('<title>')[1].split('</title')[0]
data['title'] = title_tag.split(' by ')[0].strip()
data['artist'] = title_tag.split(' by ')[1].split('|')[0].strip()
# XXX Do more..
return data | python | {
"resource": ""
} |
q33176 | get_hard_track_url | train | def get_hard_track_url(item_id):
"""
Hard-scrapes a track.
"""
streams_url = "https://api.soundcloud.com/i1/tracks/%s/streams/?client_id=%s&app_version=%s" % (
item_id, AGGRESSIVE_CLIENT_ID, APP_VERSION)
response = requests.get(streams_url)
json_response = response.json()
if response.status_code == 200:
hard_track_url = json_response['http_mp3_128_url']
return hard_track_url
else:
return None | python | {
"resource": ""
} |
q33177 | process_bandcamp | train | def process_bandcamp(vargs):
"""
Main BandCamp path.
"""
artist_url = vargs['artist_url']
if 'bandcamp.com' in artist_url or ('://' in artist_url and vargs['bandcamp']):
bc_url = artist_url
else:
bc_url = 'https://' + artist_url + '.bandcamp.com/music'
filenames = scrape_bandcamp_url(bc_url, num_tracks=vargs['num_tracks'], folders=vargs['folders'], custom_path=vargs['path'])
# check if we have lists inside a list, which indicates the
# scraping has gone recursive, so we must format the output
# ( reference: http://stackoverflow.com/a/5251706 )
if any(isinstance(elem, list) for elem in filenames):
# first, remove any empty sublists inside our outter list
# ( reference: http://stackoverflow.com/a/19875634 )
filenames = [sub for sub in filenames if sub]
# now, make sure we "flatten" the list
# ( reference: http://stackoverflow.com/a/11264751 )
filenames = [val for sub in filenames for val in sub]
if vargs['open']:
open_files(filenames)
return | python | {
"resource": ""
} |
q33178 | scrape_bandcamp_url | train | def scrape_bandcamp_url(url, num_tracks=sys.maxsize, folders=False, custom_path=''):
"""
Pull out artist and track info from a Bandcamp URL.
Returns:
list: filenames to open
"""
filenames = []
album_data = get_bandcamp_metadata(url)
# If it's a list, we're dealing with a list of Album URLs,
# so we call the scrape_bandcamp_url() method for each one
if type(album_data) is list:
for album_url in album_data:
filenames.append(scrape_bandcamp_url(album_url, num_tracks, folders, custom_path))
return filenames
artist = album_data["artist"]
album_name = album_data["album_name"]
if folders:
if album_name:
directory = artist + " - " + album_name
else:
directory = artist
directory = sanitize_filename(directory)
directory = join(custom_path, directory)
if not exists(directory):
mkdir(directory)
for i, track in enumerate(album_data["trackinfo"]):
if i > num_tracks - 1:
continue
try:
track_name = track["title"]
if track["track_num"]:
track_number = str(track["track_num"]).zfill(2)
else:
track_number = None
if track_number and folders:
track_filename = '%s - %s.mp3' % (track_number, track_name)
else:
track_filename = '%s.mp3' % (track_name)
track_filename = sanitize_filename(track_filename)
if folders:
path = join(directory, track_filename)
else:
path = join(custom_path, sanitize_filename(artist) + ' - ' + track_filename)
if exists(path):
puts_safe(colored.yellow("Track already downloaded: ") + colored.white(track_name))
continue
if not track['file']:
puts_safe(colored.yellow("Track unavailble for scraping: ") + colored.white(track_name))
continue
puts_safe(colored.green("Downloading") + colored.white(": " + track_name))
path = download_file(track['file']['mp3-128'], path)
album_year = album_data['album_release_date']
if album_year:
album_year = datetime.strptime(album_year, "%d %b %Y %H:%M:%S GMT").year
tag_file(path,
artist,
track_name,
album=album_name,
year=album_year,
genre=album_data['genre'],
artwork_url=album_data['artFullsizeUrl'],
track_number=track_number,
url=album_data['url'])
filenames.append(path)
except Exception as e:
puts_safe(colored.red("Problem downloading ") + colored.white(track_name))
print(e)
return filenames | python | {
"resource": ""
} |
q33179 | process_mixcloud | train | def process_mixcloud(vargs):
"""
Main MixCloud path.
"""
artist_url = vargs['artist_url']
if 'mixcloud.com' in artist_url:
mc_url = artist_url
else:
mc_url = 'https://mixcloud.com/' + artist_url
filenames = scrape_mixcloud_url(mc_url, num_tracks=vargs['num_tracks'], folders=vargs['folders'], custom_path=vargs['path'])
if vargs['open']:
open_files(filenames)
return | python | {
"resource": ""
} |
q33180 | process_audiomack | train | def process_audiomack(vargs):
"""
Main Audiomack path.
"""
artist_url = vargs['artist_url']
if 'audiomack.com' in artist_url:
mc_url = artist_url
else:
mc_url = 'https://audiomack.com/' + artist_url
filenames = scrape_audiomack_url(mc_url, num_tracks=vargs['num_tracks'], folders=vargs['folders'], custom_path=vargs['path'])
if vargs['open']:
open_files(filenames)
return | python | {
"resource": ""
} |
q33181 | process_hive | train | def process_hive(vargs):
"""
Main Hive.co path.
"""
artist_url = vargs['artist_url']
if 'hive.co' in artist_url:
mc_url = artist_url
else:
mc_url = 'https://www.hive.co/downloads/download/' + artist_url
filenames = scrape_hive_url(mc_url, num_tracks=vargs['num_tracks'], folders=vargs['folders'], custom_path=vargs['path'])
if vargs['open']:
open_files(filenames)
return | python | {
"resource": ""
} |
q33182 | scrape_hive_url | train | def scrape_hive_url(mc_url, num_tracks=sys.maxsize, folders=False, custom_path=''):
"""
Scrape a Hive.co download page.
Returns:
list: filenames to open
"""
try:
data = get_hive_data(mc_url)
except Exception as e:
puts_safe(colored.red("Problem downloading ") + mc_url)
print(e)
filenames = []
# track_artist = sanitize_filename(data['artist'])
# track_title = sanitize_filename(data['title'])
# track_filename = track_artist + ' - ' + track_title + '.mp3'
# if folders:
# track_artist_path = join(custom_path, track_artist)
# if not exists(track_artist_path):
# mkdir(track_artist_path)
# track_filename = join(track_artist_path, track_filename)
# if exists(track_filename):
# puts_safe(colored.yellow("Skipping") + colored.white(': ' + data['title'] + " - it already exists!"))
# return []
# puts_safe(colored.green("Downloading") + colored.white(': ' + data['artist'] + " - " + data['title']))
# download_file(data['mp3_url'], track_filename)
# tag_file(track_filename,
# artist=data['artist'],
# title=data['title'],
# year=data['year'],
# genre=None,
# artwork_url=data['artwork_url'])
# filenames.append(track_filename)
return filenames | python | {
"resource": ""
} |
q33183 | process_musicbed | train | def process_musicbed(vargs):
"""
Main MusicBed path.
"""
# let's validate given MusicBed url
validated = False
if vargs['artist_url'].startswith( 'https://www.musicbed.com/' ):
splitted = vargs['artist_url'][len('https://www.musicbed.com/'):].split( '/' )
if len( splitted ) == 3:
if ( splitted[0] == 'artists' or splitted[0] == 'albums' or splitted[0] == 'songs' ) and splitted[2].isdigit():
validated = True
if not validated:
puts( colored.red( 'process_musicbed: you provided incorrect MusicBed url. Aborting.' ) )
puts( colored.white( 'Please make sure that url is either artist-url, album-url or song-url.' ) )
puts( colored.white( 'Example of correct artist-url: https://www.musicbed.com/artists/lights-motion/5188' ) )
puts( colored.white( 'Example of correct album-url: https://www.musicbed.com/albums/be-still/2828' ) )
puts( colored.white( 'Example of correct song-url: https://www.musicbed.com/songs/be-still/24540' ) )
return
filenames = scrape_musicbed_url(vargs['artist_url'], vargs['login'], vargs['password'], num_tracks=vargs['num_tracks'], folders=vargs['folders'], custom_path=vargs['path'])
if vargs['open']:
open_files(filenames) | python | {
"resource": ""
} |
q33184 | download_file | train | def download_file(url, path, session=None, params=None):
"""
Download an individual file.
"""
if url[0:2] == '//':
url = 'https://' + url[2:]
# Use a temporary file so that we don't import incomplete files.
tmp_path = path + '.tmp'
if session and params:
r = session.get( url, params=params, stream=True )
elif session and not params:
r = session.get( url, stream=True )
else:
r = requests.get(url, stream=True)
with open(tmp_path, 'wb') as f:
total_length = int(r.headers.get('content-length', 0))
for chunk in progress.bar(r.iter_content(chunk_size=1024), expected_size=(total_length / 1024) + 1):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
os.rename(tmp_path, path)
return path | python | {
"resource": ""
} |
q33185 | tag_file | train | def tag_file(filename, artist, title, year=None, genre=None, artwork_url=None, album=None, track_number=None, url=None):
"""
Attempt to put ID3 tags on a file.
Args:
artist (str):
title (str):
year (int):
genre (str):
artwork_url (str):
album (str):
track_number (str):
filename (str):
url (str):
"""
try:
audio = EasyMP3(filename)
audio.tags = None
audio["artist"] = artist
audio["title"] = title
if year:
audio["date"] = str(year)
if album:
audio["album"] = album
if track_number:
audio["tracknumber"] = track_number
if genre:
audio["genre"] = genre
if url: # saves the tag as WOAR
audio["website"] = url
audio.save()
if artwork_url:
artwork_url = artwork_url.replace('https', 'http')
mime = 'image/jpeg'
if '.jpg' in artwork_url:
mime = 'image/jpeg'
if '.png' in artwork_url:
mime = 'image/png'
if '-large' in artwork_url:
new_artwork_url = artwork_url.replace('-large', '-t500x500')
try:
image_data = requests.get(new_artwork_url).content
except Exception as e:
# No very large image available.
image_data = requests.get(artwork_url).content
else:
image_data = requests.get(artwork_url).content
audio = MP3(filename, ID3=OldID3)
audio.tags.add(
APIC(
encoding=3, # 3 is for utf-8
mime=mime,
type=3, # 3 is for the cover image
desc='Cover',
data=image_data
)
)
audio.save()
# because there is software that doesn't seem to use WOAR we save url tag again as WXXX
if url:
audio = MP3(filename, ID3=OldID3)
audio.tags.add( WXXX( encoding=3, url=url ) )
audio.save()
return True
except Exception as e:
puts(colored.red("Problem tagging file: ") + colored.white("Is this file a WAV?"))
return False | python | {
"resource": ""
} |
q33186 | open_files | train | def open_files(filenames):
"""
Call the system 'open' command on a file.
"""
command = ['open'] + filenames
process = Popen(command, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate() | python | {
"resource": ""
} |
q33187 | sanitize_filename | train | def sanitize_filename(filename):
"""
Make sure filenames are valid paths.
Returns:
str:
"""
sanitized_filename = re.sub(r'[/\\:*?"<>|]', '-', filename)
sanitized_filename = sanitized_filename.replace('&', 'and')
sanitized_filename = sanitized_filename.replace('"', '')
sanitized_filename = sanitized_filename.replace("'", '')
sanitized_filename = sanitized_filename.replace("/", '')
sanitized_filename = sanitized_filename.replace("\\", '')
# Annoying.
if sanitized_filename[0] == '.':
sanitized_filename = u'dot' + sanitized_filename[1:]
return sanitized_filename | python | {
"resource": ""
} |
q33188 | Crc.update | train | def update(self, byte_arr):
"""Read bytes and update the CRC computed."""
if byte_arr:
self.value = self.calculate(byte_arr, self.value) | python | {
"resource": ""
} |
q33189 | Crc.calculate | train | def calculate(cls, byte_arr, crc=0):
"""Compute CRC for input bytes."""
for byte in byte_iter(byte_arr):
# Taken verbatim from FIT SDK docs
tmp = cls.CRC_TABLE[crc & 0xF]
crc = (crc >> 4) & 0x0FFF
crc = crc ^ tmp ^ cls.CRC_TABLE[byte & 0xF]
tmp = cls.CRC_TABLE[crc & 0xF]
crc = (crc >> 4) & 0x0FFF
crc = crc ^ tmp ^ cls.CRC_TABLE[(byte >> 4) & 0xF]
return crc | python | {
"resource": ""
} |
q33190 | FitFileDataProcessor._scrub_method_name | train | def _scrub_method_name(self, method_name):
"""Scrubs a method name, returning result from local cache if available.
This method wraps fitparse.utils.scrub_method_name and memoizes results,
as scrubbing a method name is expensive.
Args:
method_name: Method name to scrub.
Returns:
Scrubbed method name.
"""
if method_name not in self._scrubbed_method_names:
self._scrubbed_method_names[method_name] = (
scrub_method_name(method_name))
return self._scrubbed_method_names[method_name] | python | {
"resource": ""
} |
q33191 | notify | train | def notify(title,
message,
prio='ALERT',
facility='LOCAL5',
fmt='[{title}] {message}',
retcode=None):
"""
Uses the ``syslog`` core Python module, which is not available on Windows
platforms.
Optional parameters:
* ``prio`` - Syslog prority level. Default is ``ALERT``. Possible
values are:
* EMERG
* ALERT
* CRIT
* ERR
* WARNING
* NOTICE
* INFO
* DEBUG
* ``facility`` - Syslog facility. Default is ``LOCAL5``. Possible
values are:
* KERN
* USER
* MAIL
* DAEMON
* AUTH
* LPR
* NEWS
* UUCP
* CRON
* SYSLOG
* LOCAL0
* LOCAL1
* LOCAL2
* LOCAL3
* LOCAL4
* LOCAL5
* LOCAL6
* LOCAL7
* ``fmt`` - Format of the message to be sent to the system logger. The
title and the message are specified using the following placeholders:
* ``{title}``
* ``{message}``
Default is ``[{title}] {message}``.
"""
prio_map = {
'EMERG': syslog.LOG_EMERG,
'ALERT': syslog.LOG_ALERT,
'CRIT': syslog.LOG_CRIT,
'ERR': syslog.LOG_ERR,
'WARNING': syslog.LOG_WARNING,
'NOTICE': syslog.LOG_NOTICE,
'INFO': syslog.LOG_INFO,
'DEBUG': syslog.LOG_DEBUG,
}
facility_map = {
'KERN': syslog.LOG_KERN,
'USER': syslog.LOG_USER,
'MAIL': syslog.LOG_MAIL,
'DAEMON': syslog.LOG_DAEMON,
'AUTH': syslog.LOG_AUTH,
'LPR': syslog.LOG_LPR,
'NEWS': syslog.LOG_NEWS,
'UUCP': syslog.LOG_UUCP,
'CRON': syslog.LOG_CRON,
'SYSLOG': syslog.LOG_SYSLOG,
'LOCAL0': syslog.LOG_LOCAL0,
'LOCAL1': syslog.LOG_LOCAL1,
'LOCAL2': syslog.LOG_LOCAL2,
'LOCAL3': syslog.LOG_LOCAL3,
'LOCAL4': syslog.LOG_LOCAL4,
'LOCAL5': syslog.LOG_LOCAL5,
'LOCAL6': syslog.LOG_LOCAL6,
'LOCAL7': syslog.LOG_LOCAL7,
}
if prio not in prio_map:
raise ValueError('invalid syslog priority')
elif facility not in facility_map:
raise ValueError('invalid syslog facility')
msg = fmt.format(title=title, message=message)
for line in msg.splitlines():
syslog.syslog(facility_map[facility] | prio_map[prio], line) | python | {
"resource": ""
} |
q33192 | notify | train | def notify(title, message, **kwargs):
"""
This backend automatically selects the correct desktop notification backend
for your operating system.
"""
for os in ['linux', 'win32', 'darwin']:
if platform.startswith(os):
module = import_module('ntfy.backends.{}'.format(os))
try:
module.notify(title=title, message=message, **kwargs)
except Exception as e:
raise DefaultNotifierError(e, module)
break | python | {
"resource": ""
} |
q33193 | notify | train | def notify(title, message, retcode=None):
"""Sends message over Telegram using telegram-send, title is ignored."""
if not path.exists(config_file):
if not path.exists(config_dir):
makedirs(config_dir)
print("Follow the instructions to configure the Telegram backend.\n")
configure(config_file)
send(messages=[message], conf=config_file) | python | {
"resource": ""
} |
q33194 | minute_change | train | def minute_change(device):
'''When we reach a minute change, animate it.'''
hours = datetime.now().strftime('%H')
minutes = datetime.now().strftime('%M')
def helper(current_y):
with canvas(device) as draw:
text(draw, (0, 1), hours, fill="white", font=proportional(CP437_FONT))
text(draw, (15, 1), ":", fill="white", font=proportional(TINY_FONT))
text(draw, (17, current_y), minutes, fill="white", font=proportional(CP437_FONT))
time.sleep(0.1)
for current_y in range(1, 9):
helper(current_y)
minutes = datetime.now().strftime('%M')
for current_y in range(9, 1, -1):
helper(current_y) | python | {
"resource": ""
} |
q33195 | clock | train | def clock(seg, seconds):
"""
Display current time on device.
"""
interval = 0.5
for i in range(int(seconds / interval)):
now = datetime.now()
seg.text = now.strftime("%H-%M-%S")
# calculate blinking dot
if i % 2 == 0:
seg.text = now.strftime("%H-%M-%S")
else:
seg.text = now.strftime("%H %M %S")
time.sleep(interval) | python | {
"resource": ""
} |
q33196 | ws2812.hide | train | def hide(self):
"""
Simulates switching the display mode OFF; this is achieved by setting
the contrast level to zero.
"""
if self._prev_contrast is None:
self._prev_contrast = self._contrast
self.contrast(0x00) | python | {
"resource": ""
} |
q33197 | ws2812.cleanup | train | def cleanup(self):
"""
Attempt to reset the device & switching it off prior to exiting the
python process.
"""
self.hide()
self.clear()
if self._leds is not None:
self._ws.ws2811_fini(self._leds)
self._ws.delete_ws2811_t(self._leds)
self._leds = None
self._channel = None | python | {
"resource": ""
} |
q33198 | rotate_image_180 | train | def rotate_image_180():
''' Rotate the image '''
# Create the media service
mycam = ONVIFCamera('192.168.0.112', 80, 'admin', '12345')
media_service = mycam.create_media_service()
profiles = media_service.GetProfiles()
# Use the first profile and Profiles have at least one
token = profiles[0]._token
# Get all video source configurations
configurations_list = media_service.GetVideoSourceConfigurations()
# Use the first profile and Profiles have at least one
video_source_configuration = configurations_list[0]
# Enable rotate
video_source_configuration.Extension[0].Rotate[0].Mode[0] = 'OFF'
# Create request type instance
request = media_service.create_type('SetVideoSourceConfiguration')
request.Configuration = video_source_configuration
# ForcePersistence is obsolete and should always be assumed to be True
request.ForcePersistence = True
# Set the video source configuration
media_service.SetVideoSourceConfiguration(request) | python | {
"resource": ""
} |
q33199 | ONVIFService.set_wsse | train | def set_wsse(self, user=None, passwd=None):
''' Basic ws-security auth '''
if user:
self.user = user
if passwd:
self.passwd = passwd
security = Security()
if self.encrypt:
token = UsernameDigestTokenDtDiff(self.user, self.passwd, dt_diff=self.dt_diff)
else:
token = UsernameToken(self.user, self.passwd)
token.setnonce()
token.setcreated()
security.tokens.append(token)
self.ws_client.set_options(wsse=security) | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.