id
stringlengths
28
33
content
stringlengths
14
265k
max_stars_repo_path
stringlengths
49
55
crossvul-python_data_bad_1224_1
404: Not Found
./CrossVul/dataset_final_sorted/CWE-287/py/bad_1224_1
crossvul-python_data_bad_3760_0
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 OpenStack LLC # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Main entry point into the Identity service.""" import uuid import urllib import urlparse from keystone import config from keystone import exception from keystone import policy from keystone import token from keystone.common import logging from keystone.common import manager from keystone.common import wsgi CONF = config.CONF LOG = logging.getLogger(__name__) class Manager(manager.Manager): """Default pivot point for the Identity backend. See :mod:`keystone.common.manager.Manager` for more details on how this dynamically calls the backend. """ def __init__(self): super(Manager, self).__init__(CONF.identity.driver) class Driver(object): """Interface description for an Identity driver.""" def authenticate(self, user_id=None, tenant_id=None, password=None): """Authenticate a given user, tenant and password. Returns: (user, tenant, metadata). """ raise exception.NotImplemented() def get_tenant(self, tenant_id): """Get a tenant by id. Returns: tenant_ref or None. """ raise exception.NotImplemented() def get_tenant_by_name(self, tenant_name): """Get a tenant by name. Returns: tenant_ref or None. """ raise exception.NotImplemented() def get_user(self, user_id): """Get a user by id. Returns: user_ref or None. """ raise exception.NotImplemented() def get_user_by_name(self, user_name): """Get a user by name. Returns: user_ref or None. """ raise exception.NotImplemented() def get_role(self, role_id): """Get a role by id. Returns: role_ref or None. """ raise exception.NotImplemented() def list_users(self): """List all users in the system. NOTE(termie): I'd prefer if this listed only the users for a given tenant. Returns: a list of user_refs or an empty list. """ raise exception.NotImplemented() def list_roles(self): """List all roles in the system. Returns: a list of role_refs or an empty list. """ raise exception.NotImplemented() # NOTE(termie): seven calls below should probably be exposed by the api # more clearly when the api redesign happens def add_user_to_tenant(self, tenant_id, user_id): raise exception.NotImplemented() def remove_user_from_tenant(self, tenant_id, user_id): raise exception.NotImplemented() def get_all_tenants(self): raise exception.NotImplemented() def get_tenants_for_user(self, user_id): """Get the tenants associated with a given user. Returns: a list of tenant ids. """ raise exception.NotImplemented() def get_roles_for_user_and_tenant(self, user_id, tenant_id): """Get the roles associated with a user within given tenant. Returns: a list of role ids. """ raise exception.NotImplemented() def add_role_to_user_and_tenant(self, user_id, tenant_id, role_id): """Add a role to a user within given tenant.""" raise exception.NotImplemented() def remove_role_from_user_and_tenant(self, user_id, tenant_id, role_id): """Remove a role from a user within given tenant.""" raise exception.NotImplemented() # user crud def create_user(self, user_id, user): raise exception.NotImplemented() def update_user(self, user_id, user): raise exception.NotImplemented() def delete_user(self, user_id): raise exception.NotImplemented() # tenant crud def create_tenant(self, tenant_id, tenant): raise exception.NotImplemented() def update_tenant(self, tenant_id, tenant): raise exception.NotImplemented() def delete_tenant(self, tenant_id, tenant): raise exception.NotImplemented() # metadata crud def get_metadata(self, user_id, tenant_id): raise exception.NotImplemented() def create_metadata(self, user_id, tenant_id, metadata): raise exception.NotImplemented() def update_metadata(self, user_id, tenant_id, metadata): raise exception.NotImplemented() def delete_metadata(self, user_id, tenant_id, metadata): raise exception.NotImplemented() # role crud def create_role(self, role_id, role): raise exception.NotImplemented() def update_role(self, role_id, role): raise exception.NotImplemented() def delete_role(self, role_id): raise exception.NotImplemented() class PublicRouter(wsgi.ComposableRouter): def add_routes(self, mapper): tenant_controller = TenantController() mapper.connect('/tenants', controller=tenant_controller, action='get_tenants_for_token', conditions=dict(methods=['GET'])) class AdminRouter(wsgi.ComposableRouter): def add_routes(self, mapper): # Tenant Operations tenant_controller = TenantController() mapper.connect('/tenants', controller=tenant_controller, action='get_all_tenants', conditions=dict(method=['GET'])) mapper.connect('/tenants/{tenant_id}', controller=tenant_controller, action='get_tenant', conditions=dict(method=['GET'])) # User Operations user_controller = UserController() mapper.connect('/users/{user_id}', controller=user_controller, action='get_user', conditions=dict(method=['GET'])) # Role Operations roles_controller = RoleController() mapper.connect('/tenants/{tenant_id}/users/{user_id}/roles', controller=roles_controller, action='get_user_roles', conditions=dict(method=['GET'])) mapper.connect('/users/{user_id}/roles', controller=roles_controller, action='get_user_roles', conditions=dict(method=['GET'])) class TenantController(wsgi.Application): def __init__(self): self.identity_api = Manager() self.policy_api = policy.Manager() self.token_api = token.Manager() super(TenantController, self).__init__() def get_all_tenants(self, context, **kw): """Gets a list of all tenants for an admin user.""" self.assert_admin(context) tenant_refs = self.identity_api.get_tenants(context) params = { 'limit': context['query_string'].get('limit'), 'marker': context['query_string'].get('marker'), } return self._format_tenant_list(tenant_refs, **params) def get_tenants_for_token(self, context, **kw): """Get valid tenants for token based on token used to authenticate. Pulls the token from the context, validates it and gets the valid tenants for the user in the token. Doesn't care about token scopedness. """ try: token_ref = self.token_api.get_token(context=context, token_id=context['token_id']) except exception.NotFound: raise exception.Unauthorized() user_ref = token_ref['user'] tenant_ids = self.identity_api.get_tenants_for_user( context, user_ref['id']) tenant_refs = [] for tenant_id in tenant_ids: tenant_refs.append(self.identity_api.get_tenant( context=context, tenant_id=tenant_id)) params = { 'limit': context['query_string'].get('limit'), 'marker': context['query_string'].get('marker'), } return self._format_tenant_list(tenant_refs, **params) def get_tenant(self, context, tenant_id): # TODO(termie): this stuff should probably be moved to middleware self.assert_admin(context) tenant = self.identity_api.get_tenant(context, tenant_id) if tenant is None: raise exception.TenantNotFound(tenant_id=tenant_id) return {'tenant': tenant} # CRUD Extension def create_tenant(self, context, tenant): tenant_ref = self._normalize_dict(tenant) if not 'name' in tenant_ref or not tenant_ref['name']: msg = 'Name field is required and cannot be empty' raise exception.ValidationError(message=msg) self.assert_admin(context) tenant_id = (tenant_ref.get('id') and tenant_ref.get('id') or uuid.uuid4().hex) tenant_ref['id'] = tenant_id tenant = self.identity_api.create_tenant( context, tenant_id, tenant_ref) return {'tenant': tenant} def update_tenant(self, context, tenant_id, tenant): self.assert_admin(context) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) tenant_ref = self.identity_api.update_tenant( context, tenant_id, tenant) return {'tenant': tenant_ref} def delete_tenant(self, context, tenant_id, **kw): self.assert_admin(context) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) self.identity_api.delete_tenant(context, tenant_id) def get_tenant_users(self, context, tenant_id, **kw): self.assert_admin(context) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) user_refs = self.identity_api.get_tenant_users(context, tenant_id) return {'users': user_refs} def _format_tenant_list(self, tenant_refs, **kwargs): marker = kwargs.get('marker') page_idx = 0 if marker is not None: for (marker_idx, tenant) in enumerate(tenant_refs): if tenant['id'] == marker: # we start pagination after the marker page_idx = marker_idx + 1 break else: msg = 'Marker could not be found' raise exception.ValidationError(message=msg) limit = kwargs.get('limit') if limit is not None: try: limit = int(limit) if limit < 0: raise AssertionError() except (ValueError, AssertionError): msg = 'Invalid limit value' raise exception.ValidationError(message=msg) tenant_refs = tenant_refs[page_idx:limit] for x in tenant_refs: if 'enabled' not in x: x['enabled'] = True o = {'tenants': tenant_refs, 'tenants_links': []} return o class UserController(wsgi.Application): def __init__(self): self.identity_api = Manager() self.policy_api = policy.Manager() self.token_api = token.Manager() super(UserController, self).__init__() def get_user(self, context, user_id): self.assert_admin(context) user_ref = self.identity_api.get_user(context, user_id) if not user_ref: raise exception.UserNotFound(user_id=user_id) return {'user': user_ref} def get_users(self, context): # NOTE(termie): i can't imagine that this really wants all the data # about every single user in the system... self.assert_admin(context) user_refs = self.identity_api.list_users(context) return {'users': user_refs} # CRUD extension def create_user(self, context, user): user = self._normalize_dict(user) self.assert_admin(context) if not 'name' in user or not user['name']: msg = 'Name field is required and cannot be empty' raise exception.ValidationError(message=msg) tenant_id = user.get('tenantId', None) if (tenant_id is not None and self.identity_api.get_tenant(context, tenant_id) is None): raise exception.TenantNotFound(tenant_id=tenant_id) user_id = uuid.uuid4().hex user_ref = user.copy() user_ref['id'] = user_id new_user_ref = self.identity_api.create_user( context, user_id, user_ref) if tenant_id: self.identity_api.add_user_to_tenant(context, tenant_id, user_id) return {'user': new_user_ref} def update_user(self, context, user_id, user): # NOTE(termie): this is really more of a patch than a put self.assert_admin(context) if self.identity_api.get_user(context, user_id) is None: raise exception.UserNotFound(user_id=user_id) user_ref = self.identity_api.update_user(context, user_id, user) # If the password was changed or the user was disabled we clear tokens if user.get('password') or not user.get('enabled', True): try: for token_id in self.token_api.list_tokens(context, user_id): self.token_api.delete_token(context, token_id) except exception.NotImplemented: # The users status has been changed but tokens remain valid for # backends that can't list tokens for users LOG.warning('User %s status has changed, but existing tokens ' 'remain valid' % user_id) return {'user': user_ref} def delete_user(self, context, user_id): self.assert_admin(context) if self.identity_api.get_user(context, user_id) is None: raise exception.UserNotFound(user_id=user_id) self.identity_api.delete_user(context, user_id) def set_user_enabled(self, context, user_id, user): return self.update_user(context, user_id, user) def set_user_password(self, context, user_id, user): return self.update_user(context, user_id, user) def update_user_tenant(self, context, user_id, user): """Update the default tenant.""" # ensure that we're a member of that tenant tenant_id = user.get('tenantId') self.identity_api.add_user_to_tenant(context, tenant_id, user_id) return self.update_user(context, user_id, user) class RoleController(wsgi.Application): def __init__(self): self.identity_api = Manager() self.token_api = token.Manager() self.policy_api = policy.Manager() super(RoleController, self).__init__() # COMPAT(essex-3) def get_user_roles(self, context, user_id, tenant_id=None): """Get the roles for a user and tenant pair. Since we're trying to ignore the idea of user-only roles we're not implementing them in hopes that the idea will die off. """ if tenant_id is None: raise exception.NotImplemented(message='User roles not supported: ' 'tenant ID required') user = self.identity_api.get_user(context, user_id) if user is None: raise exception.UserNotFound(user_id=user_id) tenant = self.identity_api.get_tenant(context, tenant_id) if tenant is None: raise exception.TenantNotFound(tenant_id=tenant_id) roles = self.identity_api.get_roles_for_user_and_tenant( context, user_id, tenant_id) return {'roles': [self.identity_api.get_role(context, x) for x in roles]} # CRUD extension def get_role(self, context, role_id): self.assert_admin(context) role_ref = self.identity_api.get_role(context, role_id) if not role_ref: raise exception.RoleNotFound(role_id=role_id) return {'role': role_ref} def create_role(self, context, role): role = self._normalize_dict(role) self.assert_admin(context) if not 'name' in role or not role['name']: msg = 'Name field is required and cannot be empty' raise exception.ValidationError(message=msg) role_id = uuid.uuid4().hex role['id'] = role_id role_ref = self.identity_api.create_role(context, role_id, role) return {'role': role_ref} def delete_role(self, context, role_id): self.assert_admin(context) self.get_role(context, role_id) self.identity_api.delete_role(context, role_id) def get_roles(self, context): self.assert_admin(context) roles = self.identity_api.list_roles(context) # TODO(termie): probably inefficient at some point return {'roles': roles} def add_role_to_user(self, context, user_id, role_id, tenant_id=None): """Add a role to a user and tenant pair. Since we're trying to ignore the idea of user-only roles we're not implementing them in hopes that the idea will die off. """ self.assert_admin(context) if tenant_id is None: raise exception.NotImplemented(message='User roles not supported: ' 'tenant_id required') if self.identity_api.get_user(context, user_id) is None: raise exception.UserNotFound(user_id=user_id) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) if self.identity_api.get_role(context, role_id) is None: raise exception.RoleNotFound(role_id=role_id) # This still has the weird legacy semantics that adding a role to # a user also adds them to a tenant self.identity_api.add_user_to_tenant(context, tenant_id, user_id) self.identity_api.add_role_to_user_and_tenant( context, user_id, tenant_id, role_id) role_ref = self.identity_api.get_role(context, role_id) return {'role': role_ref} def remove_role_from_user(self, context, user_id, role_id, tenant_id=None): """Remove a role from a user and tenant pair. Since we're trying to ignore the idea of user-only roles we're not implementing them in hopes that the idea will die off. """ self.assert_admin(context) if tenant_id is None: raise exception.NotImplemented(message='User roles not supported: ' 'tenant_id required') if self.identity_api.get_user(context, user_id) is None: raise exception.UserNotFound(user_id=user_id) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) if self.identity_api.get_role(context, role_id) is None: raise exception.RoleNotFound(role_id=role_id) # This still has the weird legacy semantics that adding a role to # a user also adds them to a tenant, so we must follow up on that self.identity_api.remove_role_from_user_and_tenant( context, user_id, tenant_id, role_id) roles = self.identity_api.get_roles_for_user_and_tenant( context, user_id, tenant_id) if not roles: self.identity_api.remove_user_from_tenant( context, tenant_id, user_id) return # COMPAT(diablo): CRUD extension def get_role_refs(self, context, user_id): """Ultimate hack to get around having to make role_refs first-class. This will basically iterate over the various roles the user has in all tenants the user is a member of and create fake role_refs where the id encodes the user-tenant-role information so we can look up the appropriate data when we need to delete them. """ self.assert_admin(context) # Ensure user exists by getting it first. self.identity_api.get_user(context, user_id) tenant_ids = self.identity_api.get_tenants_for_user(context, user_id) o = [] for tenant_id in tenant_ids: role_ids = self.identity_api.get_roles_for_user_and_tenant( context, user_id, tenant_id) for role_id in role_ids: ref = {'roleId': role_id, 'tenantId': tenant_id, 'userId': user_id} ref['id'] = urllib.urlencode(ref) o.append(ref) return {'roles': o} # COMPAT(diablo): CRUD extension def create_role_ref(self, context, user_id, role): """This is actually used for adding a user to a tenant. In the legacy data model adding a user to a tenant required setting a role. """ self.assert_admin(context) # TODO(termie): for now we're ignoring the actual role tenant_id = role.get('tenantId') role_id = role.get('roleId') self.identity_api.add_user_to_tenant(context, tenant_id, user_id) self.identity_api.add_role_to_user_and_tenant( context, user_id, tenant_id, role_id) role_ref = self.identity_api.get_role(context, role_id) return {'role': role_ref} # COMPAT(diablo): CRUD extension def delete_role_ref(self, context, user_id, role_ref_id): """This is actually used for deleting a user from a tenant. In the legacy data model removing a user from a tenant required deleting a role. To emulate this, we encode the tenant and role in the role_ref_id, and if this happens to be the last role for the user-tenant pair, we remove the user from the tenant. """ self.assert_admin(context) # TODO(termie): for now we're ignoring the actual role role_ref_ref = urlparse.parse_qs(role_ref_id) tenant_id = role_ref_ref.get('tenantId')[0] role_id = role_ref_ref.get('roleId')[0] self.identity_api.remove_role_from_user_and_tenant( context, user_id, tenant_id, role_id) roles = self.identity_api.get_roles_for_user_and_tenant( context, user_id, tenant_id) if not roles: self.identity_api.remove_user_from_tenant( context, tenant_id, user_id)
./CrossVul/dataset_final_sorted/CWE-287/py/bad_3760_0
crossvul-python_data_bad_650_1
# Copyright (C) 2003-2007 Robey Pointer <robeypointer@gmail.com> # # This file is part of paramiko. # # Paramiko is free software; you can redistribute it and/or modify it under the # terms of the GNU Lesser General Public License as published by the Free # Software Foundation; either version 2.1 of the License, or (at your option) # any later version. # # Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with Paramiko; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. """ Core protocol implementation """ from __future__ import print_function import os import socket import sys import threading import time import weakref from hashlib import md5, sha1, sha256, sha512 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.ciphers import algorithms, Cipher, modes import paramiko from paramiko import util from paramiko.auth_handler import AuthHandler from paramiko.ssh_gss import GSSAuth from paramiko.channel import Channel from paramiko.common import ( xffffffff, cMSG_CHANNEL_OPEN, cMSG_IGNORE, cMSG_GLOBAL_REQUEST, DEBUG, MSG_KEXINIT, MSG_IGNORE, MSG_DISCONNECT, MSG_DEBUG, ERROR, WARNING, cMSG_UNIMPLEMENTED, INFO, cMSG_KEXINIT, cMSG_NEWKEYS, MSG_NEWKEYS, cMSG_REQUEST_SUCCESS, cMSG_REQUEST_FAILURE, CONNECTION_FAILED_CODE, OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, OPEN_SUCCEEDED, cMSG_CHANNEL_OPEN_FAILURE, cMSG_CHANNEL_OPEN_SUCCESS, MSG_GLOBAL_REQUEST, MSG_REQUEST_SUCCESS, MSG_REQUEST_FAILURE, MSG_CHANNEL_OPEN_SUCCESS, MSG_CHANNEL_OPEN_FAILURE, MSG_CHANNEL_OPEN, MSG_CHANNEL_SUCCESS, MSG_CHANNEL_FAILURE, MSG_CHANNEL_DATA, MSG_CHANNEL_EXTENDED_DATA, MSG_CHANNEL_WINDOW_ADJUST, MSG_CHANNEL_REQUEST, MSG_CHANNEL_EOF, MSG_CHANNEL_CLOSE, MIN_WINDOW_SIZE, MIN_PACKET_SIZE, MAX_WINDOW_SIZE, DEFAULT_WINDOW_SIZE, DEFAULT_MAX_PACKET_SIZE, ) from paramiko.compress import ZlibCompressor, ZlibDecompressor from paramiko.dsskey import DSSKey from paramiko.kex_gex import KexGex, KexGexSHA256 from paramiko.kex_group1 import KexGroup1 from paramiko.kex_group14 import KexGroup14 from paramiko.kex_gss import KexGSSGex, KexGSSGroup1, KexGSSGroup14 from paramiko.message import Message from paramiko.packet import Packetizer, NeedRekeyException from paramiko.primes import ModulusPack from paramiko.py3compat import string_types, long, byte_ord, b, input, PY2 from paramiko.rsakey import RSAKey from paramiko.ecdsakey import ECDSAKey from paramiko.server import ServerInterface from paramiko.sftp_client import SFTPClient from paramiko.ssh_exception import ( SSHException, BadAuthenticationType, ChannelException, ProxyCommandFailure, ) from paramiko.util import retry_on_signal, ClosingContextManager, clamp_value # for thread cleanup _active_threads = [] def _join_lingering_threads(): for thr in _active_threads: thr.stop_thread() import atexit atexit.register(_join_lingering_threads) class Transport(threading.Thread, ClosingContextManager): """ An SSH Transport attaches to a stream (usually a socket), negotiates an encrypted session, authenticates, and then creates stream tunnels, called `channels <.Channel>`, across the session. Multiple channels can be multiplexed across a single session (and often are, in the case of port forwardings). Instances of this class may be used as context managers. """ _ENCRYPT = object() _DECRYPT = object() _PROTO_ID = '2.0' _CLIENT_ID = 'paramiko_%s' % paramiko.__version__ # These tuples of algorithm identifiers are in preference order; do not # reorder without reason! _preferred_ciphers = ( 'aes128-ctr', 'aes192-ctr', 'aes256-ctr', 'aes128-cbc', 'aes192-cbc', 'aes256-cbc', 'blowfish-cbc', '3des-cbc', ) _preferred_macs = ( 'hmac-sha2-256', 'hmac-sha2-512', 'hmac-sha1', 'hmac-md5', 'hmac-sha1-96', 'hmac-md5-96', ) _preferred_keys = ( 'ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'ssh-rsa', 'ssh-dss', ) _preferred_kex = ( 'diffie-hellman-group1-sha1', 'diffie-hellman-group14-sha1', 'diffie-hellman-group-exchange-sha1', 'diffie-hellman-group-exchange-sha256', ) _preferred_gsskex = ( 'gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==', 'gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==', 'gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==', ) _preferred_compression = ('none',) _cipher_info = { 'aes128-ctr': { 'class': algorithms.AES, 'mode': modes.CTR, 'block-size': 16, 'key-size': 16 }, 'aes192-ctr': { 'class': algorithms.AES, 'mode': modes.CTR, 'block-size': 16, 'key-size': 24 }, 'aes256-ctr': { 'class': algorithms.AES, 'mode': modes.CTR, 'block-size': 16, 'key-size': 32 }, 'blowfish-cbc': { 'class': algorithms.Blowfish, 'mode': modes.CBC, 'block-size': 8, 'key-size': 16 }, 'aes128-cbc': { 'class': algorithms.AES, 'mode': modes.CBC, 'block-size': 16, 'key-size': 16 }, 'aes192-cbc': { 'class': algorithms.AES, 'mode': modes.CBC, 'block-size': 16, 'key-size': 24 }, 'aes256-cbc': { 'class': algorithms.AES, 'mode': modes.CBC, 'block-size': 16, 'key-size': 32 }, '3des-cbc': { 'class': algorithms.TripleDES, 'mode': modes.CBC, 'block-size': 8, 'key-size': 24 }, } _mac_info = { 'hmac-sha1': {'class': sha1, 'size': 20}, 'hmac-sha1-96': {'class': sha1, 'size': 12}, 'hmac-sha2-256': {'class': sha256, 'size': 32}, 'hmac-sha2-512': {'class': sha512, 'size': 64}, 'hmac-md5': {'class': md5, 'size': 16}, 'hmac-md5-96': {'class': md5, 'size': 12}, } _key_info = { 'ssh-rsa': RSAKey, 'ssh-dss': DSSKey, 'ecdsa-sha2-nistp256': ECDSAKey, 'ecdsa-sha2-nistp384': ECDSAKey, 'ecdsa-sha2-nistp521': ECDSAKey, } _kex_info = { 'diffie-hellman-group1-sha1': KexGroup1, 'diffie-hellman-group14-sha1': KexGroup14, 'diffie-hellman-group-exchange-sha1': KexGex, 'diffie-hellman-group-exchange-sha256': KexGexSHA256, 'gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==': KexGSSGroup1, 'gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==': KexGSSGroup14, 'gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==': KexGSSGex } _compression_info = { # zlib@openssh.com is just zlib, but only turned on after a successful # authentication. openssh servers may only offer this type because # they've had troubles with security holes in zlib in the past. 'zlib@openssh.com': (ZlibCompressor, ZlibDecompressor), 'zlib': (ZlibCompressor, ZlibDecompressor), 'none': (None, None), } _modulus_pack = None _active_check_timeout = 0.1 def __init__(self, sock, default_window_size=DEFAULT_WINDOW_SIZE, default_max_packet_size=DEFAULT_MAX_PACKET_SIZE, gss_kex=False, gss_deleg_creds=True): """ Create a new SSH session over an existing socket, or socket-like object. This only creates the `.Transport` object; it doesn't begin the SSH session yet. Use `connect` or `start_client` to begin a client session, or `start_server` to begin a server session. If the object is not actually a socket, it must have the following methods: - ``send(str)``: Writes from 1 to ``len(str)`` bytes, and returns an int representing the number of bytes written. Returns 0 or raises ``EOFError`` if the stream has been closed. - ``recv(int)``: Reads from 1 to ``int`` bytes and returns them as a string. Returns 0 or raises ``EOFError`` if the stream has been closed. - ``close()``: Closes the socket. - ``settimeout(n)``: Sets a (float) timeout on I/O operations. For ease of use, you may also pass in an address (as a tuple) or a host string as the ``sock`` argument. (A host string is a hostname with an optional port (separated by ``":"``) which will be converted into a tuple of ``(hostname, port)``.) A socket will be connected to this address and used for communication. Exceptions from the ``socket`` call may be thrown in this case. .. note:: Modifying the the window and packet sizes might have adverse effects on your channels created from this transport. The default values are the same as in the OpenSSH code base and have been battle tested. :param socket sock: a socket or socket-like object to create the session over. :param int default_window_size: sets the default window size on the transport. (defaults to 2097152) :param int default_max_packet_size: sets the default max packet size on the transport. (defaults to 32768) .. versionchanged:: 1.15 Added the ``default_window_size`` and ``default_max_packet_size`` arguments. """ self.active = False if isinstance(sock, string_types): # convert "host:port" into (host, port) hl = sock.split(':', 1) if len(hl) == 1: sock = (hl[0], 22) else: sock = (hl[0], int(hl[1])) if type(sock) is tuple: # connect to the given (host, port) hostname, port = sock reason = 'No suitable address family' addrinfos = socket.getaddrinfo( hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM ) for family, socktype, proto, canonname, sockaddr in addrinfos: if socktype == socket.SOCK_STREAM: af = family # addr = sockaddr sock = socket.socket(af, socket.SOCK_STREAM) try: retry_on_signal(lambda: sock.connect((hostname, port))) except socket.error as e: reason = str(e) else: break else: raise SSHException( 'Unable to connect to %s: %s' % (hostname, reason)) # okay, normal socket-ish flow here... threading.Thread.__init__(self) self.setDaemon(True) self.sock = sock # we set the timeout so we can check self.active periodically to # see if we should bail. socket.timeout exception is never propagated. self.sock.settimeout(self._active_check_timeout) # negotiated crypto parameters self.packetizer = Packetizer(sock) self.local_version = 'SSH-' + self._PROTO_ID + '-' + self._CLIENT_ID self.remote_version = '' self.local_cipher = self.remote_cipher = '' self.local_kex_init = self.remote_kex_init = None self.local_mac = self.remote_mac = None self.local_compression = self.remote_compression = None self.session_id = None self.host_key_type = None self.host_key = None # GSS-API / SSPI Key Exchange self.use_gss_kex = gss_kex # This will be set to True if GSS-API Key Exchange was performed self.gss_kex_used = False self.kexgss_ctxt = None self.gss_host = None if self.use_gss_kex: self.kexgss_ctxt = GSSAuth("gssapi-keyex", gss_deleg_creds) self._preferred_kex = self._preferred_gsskex + self._preferred_kex # state used during negotiation self.kex_engine = None self.H = None self.K = None self.initial_kex_done = False self.in_kex = False self.authenticated = False self._expected_packet = tuple() # synchronization (always higher level than write_lock) self.lock = threading.Lock() # tracking open channels self._channels = ChannelMap() self.channel_events = {} # (id -> Event) self.channels_seen = {} # (id -> True) self._channel_counter = 0 self.default_max_packet_size = default_max_packet_size self.default_window_size = default_window_size self._forward_agent_handler = None self._x11_handler = None self._tcp_handler = None self.saved_exception = None self.clear_to_send = threading.Event() self.clear_to_send_lock = threading.Lock() self.clear_to_send_timeout = 30.0 self.log_name = 'paramiko.transport' self.logger = util.get_logger(self.log_name) self.packetizer.set_log(self.logger) self.auth_handler = None # response Message from an arbitrary global request self.global_response = None # user-defined event callbacks self.completion_event = None # how long (seconds) to wait for the SSH banner self.banner_timeout = 15 # how long (seconds) to wait for the handshake to finish after SSH # banner sent. self.handshake_timeout = 15 # server mode: self.server_mode = False self.server_object = None self.server_key_dict = {} self.server_accepts = [] self.server_accept_cv = threading.Condition(self.lock) self.subsystem_table = {} def __repr__(self): """ Returns a string representation of this object, for debugging. """ out = '<paramiko.Transport at %s' % hex(long(id(self)) & xffffffff) if not self.active: out += ' (unconnected)' else: if self.local_cipher != '': out += ' (cipher %s, %d bits)' % ( self.local_cipher, self._cipher_info[self.local_cipher]['key-size'] * 8 ) if self.is_authenticated(): out += ' (active; %d open channel(s))' % len(self._channels) elif self.initial_kex_done: out += ' (connected; awaiting auth)' else: out += ' (connecting)' out += '>' return out def atfork(self): """ Terminate this Transport without closing the session. On posix systems, if a Transport is open during process forking, both parent and child will share the underlying socket, but only one process can use the connection (without corrupting the session). Use this method to clean up a Transport object without disrupting the other process. .. versionadded:: 1.5.3 """ self.sock.close() self.close() def get_security_options(self): """ Return a `.SecurityOptions` object which can be used to tweak the encryption algorithms this transport will permit (for encryption, digest/hash operations, public keys, and key exchanges) and the order of preference for them. """ return SecurityOptions(self) def set_gss_host(self, gss_host): """ Setter for C{gss_host} if GSS-API Key Exchange is performed. :param str gss_host: The targets name in the kerberos database Default: The name of the host to connect to """ # We need the FQDN to get this working with SSPI self.gss_host = socket.getfqdn(gss_host) def start_client(self, event=None): """ Negotiate a new SSH2 session as a client. This is the first step after creating a new `.Transport`. A separate thread is created for protocol negotiation. If an event is passed in, this method returns immediately. When negotiation is done (successful or not), the given ``Event`` will be triggered. On failure, `is_active` will return ``False``. (Since 1.4) If ``event`` is ``None``, this method will not return until negotation is done. On success, the method returns normally. Otherwise an SSHException is raised. After a successful negotiation, you will usually want to authenticate, calling `auth_password <Transport.auth_password>` or `auth_publickey <Transport.auth_publickey>`. .. note:: `connect` is a simpler method for connecting as a client. .. note:: After calling this method (or `start_server` or `connect`), you should no longer directly read from or write to the original socket object. :param .threading.Event event: an event to trigger when negotiation is complete (optional) :raises: `.SSHException` -- if negotiation fails (and no ``event`` was passed in) """ self.active = True if event is not None: # async, return immediately and let the app poll for completion self.completion_event = event self.start() return # synchronous, wait for a result self.completion_event = event = threading.Event() self.start() while True: event.wait(0.1) if not self.active: e = self.get_exception() if e is not None: raise e raise SSHException('Negotiation failed.') if event.is_set(): break def start_server(self, event=None, server=None): """ Negotiate a new SSH2 session as a server. This is the first step after creating a new `.Transport` and setting up your server host key(s). A separate thread is created for protocol negotiation. If an event is passed in, this method returns immediately. When negotiation is done (successful or not), the given ``Event`` will be triggered. On failure, `is_active` will return ``False``. (Since 1.4) If ``event`` is ``None``, this method will not return until negotiation is done. On success, the method returns normally. Otherwise an SSHException is raised. After a successful negotiation, the client will need to authenticate. Override the methods `get_allowed_auths <.ServerInterface.get_allowed_auths>`, `check_auth_none <.ServerInterface.check_auth_none>`, `check_auth_password <.ServerInterface.check_auth_password>`, and `check_auth_publickey <.ServerInterface.check_auth_publickey>` in the given ``server`` object to control the authentication process. After a successful authentication, the client should request to open a channel. Override `check_channel_request <.ServerInterface.check_channel_request>` in the given ``server`` object to allow channels to be opened. .. note:: After calling this method (or `start_client` or `connect`), you should no longer directly read from or write to the original socket object. :param .threading.Event event: an event to trigger when negotiation is complete. :param .ServerInterface server: an object used to perform authentication and create `channels <.Channel>` :raises: `.SSHException` -- if negotiation fails (and no ``event`` was passed in) """ if server is None: server = ServerInterface() self.server_mode = True self.server_object = server self.active = True if event is not None: # async, return immediately and let the app poll for completion self.completion_event = event self.start() return # synchronous, wait for a result self.completion_event = event = threading.Event() self.start() while True: event.wait(0.1) if not self.active: e = self.get_exception() if e is not None: raise e raise SSHException('Negotiation failed.') if event.is_set(): break def add_server_key(self, key): """ Add a host key to the list of keys used for server mode. When behaving as a server, the host key is used to sign certain packets during the SSH2 negotiation, so that the client can trust that we are who we say we are. Because this is used for signing, the key must contain private key info, not just the public half. Only one key of each type (RSA or DSS) is kept. :param .PKey key: the host key to add, usually an `.RSAKey` or `.DSSKey`. """ self.server_key_dict[key.get_name()] = key def get_server_key(self): """ Return the active host key, in server mode. After negotiating with the client, this method will return the negotiated host key. If only one type of host key was set with `add_server_key`, that's the only key that will ever be returned. But in cases where you have set more than one type of host key (for example, an RSA key and a DSS key), the key type will be negotiated by the client, and this method will return the key of the type agreed on. If the host key has not been negotiated yet, ``None`` is returned. In client mode, the behavior is undefined. :return: host key (`.PKey`) of the type negotiated by the client, or ``None``. """ try: return self.server_key_dict[self.host_key_type] except KeyError: pass return None @staticmethod def load_server_moduli(filename=None): """ (optional) Load a file of prime moduli for use in doing group-exchange key negotiation in server mode. It's a rather obscure option and can be safely ignored. In server mode, the remote client may request "group-exchange" key negotiation, which asks the server to send a random prime number that fits certain criteria. These primes are pretty difficult to compute, so they can't be generated on demand. But many systems contain a file of suitable primes (usually named something like ``/etc/ssh/moduli``). If you call `load_server_moduli` and it returns ``True``, then this file of primes has been loaded and we will support "group-exchange" in server mode. Otherwise server mode will just claim that it doesn't support that method of key negotiation. :param str filename: optional path to the moduli file, if you happen to know that it's not in a standard location. :return: True if a moduli file was successfully loaded; False otherwise. .. note:: This has no effect when used in client mode. """ Transport._modulus_pack = ModulusPack() # places to look for the openssh "moduli" file file_list = ['/etc/ssh/moduli', '/usr/local/etc/moduli'] if filename is not None: file_list.insert(0, filename) for fn in file_list: try: Transport._modulus_pack.read_file(fn) return True except IOError: pass # none succeeded Transport._modulus_pack = None return False def close(self): """ Close this session, and any open channels that are tied to it. """ if not self.active: return self.stop_thread() for chan in list(self._channels.values()): chan._unlink() self.sock.close() def get_remote_server_key(self): """ Return the host key of the server (in client mode). .. note:: Previously this call returned a tuple of ``(key type, key string)``. You can get the same effect by calling `.PKey.get_name` for the key type, and ``str(key)`` for the key string. :raises: `.SSHException` -- if no session is currently active. :return: public key (`.PKey`) of the remote server """ if (not self.active) or (not self.initial_kex_done): raise SSHException('No existing session') return self.host_key def is_active(self): """ Return true if this session is active (open). :return: True if the session is still active (open); False if the session is closed """ return self.active def open_session( self, window_size=None, max_packet_size=None, timeout=None, ): """ Request a new channel to the server, of type ``"session"``. This is just an alias for calling `open_channel` with an argument of ``"session"``. .. note:: Modifying the the window and packet sizes might have adverse effects on the session created. The default values are the same as in the OpenSSH code base and have been battle tested. :param int window_size: optional window size for this session. :param int max_packet_size: optional max packet size for this session. :return: a new `.Channel` :raises: `.SSHException` -- if the request is rejected or the session ends prematurely .. versionchanged:: 1.13.4/1.14.3/1.15.3 Added the ``timeout`` argument. .. versionchanged:: 1.15 Added the ``window_size`` and ``max_packet_size`` arguments. """ return self.open_channel('session', window_size=window_size, max_packet_size=max_packet_size, timeout=timeout) def open_x11_channel(self, src_addr=None): """ Request a new channel to the client, of type ``"x11"``. This is just an alias for ``open_channel('x11', src_addr=src_addr)``. :param tuple src_addr: the source address (``(str, int)``) of the x11 server (port is the x11 port, ie. 6010) :return: a new `.Channel` :raises: `.SSHException` -- if the request is rejected or the session ends prematurely """ return self.open_channel('x11', src_addr=src_addr) def open_forward_agent_channel(self): """ Request a new channel to the client, of type ``"auth-agent@openssh.com"``. This is just an alias for ``open_channel('auth-agent@openssh.com')``. :return: a new `.Channel` :raises: `.SSHException` -- if the request is rejected or the session ends prematurely """ return self.open_channel('auth-agent@openssh.com') def open_forwarded_tcpip_channel(self, src_addr, dest_addr): """ Request a new channel back to the client, of type ``forwarded-tcpip``. This is used after a client has requested port forwarding, for sending incoming connections back to the client. :param src_addr: originator's address :param dest_addr: local (server) connected address """ return self.open_channel('forwarded-tcpip', dest_addr, src_addr) def open_channel(self, kind, dest_addr=None, src_addr=None, window_size=None, max_packet_size=None, timeout=None): """ Request a new channel to the server. `Channels <.Channel>` are socket-like objects used for the actual transfer of data across the session. You may only request a channel after negotiating encryption (using `connect` or `start_client`) and authenticating. .. note:: Modifying the the window and packet sizes might have adverse effects on the channel created. The default values are the same as in the OpenSSH code base and have been battle tested. :param str kind: the kind of channel requested (usually ``"session"``, ``"forwarded-tcpip"``, ``"direct-tcpip"``, or ``"x11"``) :param tuple dest_addr: the destination address (address + port tuple) of this port forwarding, if ``kind`` is ``"forwarded-tcpip"`` or ``"direct-tcpip"`` (ignored for other channel types) :param src_addr: the source address of this port forwarding, if ``kind`` is ``"forwarded-tcpip"``, ``"direct-tcpip"``, or ``"x11"`` :param int window_size: optional window size for this session. :param int max_packet_size: optional max packet size for this session. :param float timeout: optional timeout opening a channel, default 3600s (1h) :return: a new `.Channel` on success :raises: `.SSHException` -- if the request is rejected, the session ends prematurely or there is a timeout openning a channel .. versionchanged:: 1.15 Added the ``window_size`` and ``max_packet_size`` arguments. """ if not self.active: raise SSHException('SSH session not active') timeout = 3600 if timeout is None else timeout self.lock.acquire() try: window_size = self._sanitize_window_size(window_size) max_packet_size = self._sanitize_packet_size(max_packet_size) chanid = self._next_channel() m = Message() m.add_byte(cMSG_CHANNEL_OPEN) m.add_string(kind) m.add_int(chanid) m.add_int(window_size) m.add_int(max_packet_size) if (kind == 'forwarded-tcpip') or (kind == 'direct-tcpip'): m.add_string(dest_addr[0]) m.add_int(dest_addr[1]) m.add_string(src_addr[0]) m.add_int(src_addr[1]) elif kind == 'x11': m.add_string(src_addr[0]) m.add_int(src_addr[1]) chan = Channel(chanid) self._channels.put(chanid, chan) self.channel_events[chanid] = event = threading.Event() self.channels_seen[chanid] = True chan._set_transport(self) chan._set_window(window_size, max_packet_size) finally: self.lock.release() self._send_user_message(m) start_ts = time.time() while True: event.wait(0.1) if not self.active: e = self.get_exception() if e is None: e = SSHException('Unable to open channel.') raise e if event.is_set(): break elif start_ts + timeout < time.time(): raise SSHException('Timeout opening channel.') chan = self._channels.get(chanid) if chan is not None: return chan e = self.get_exception() if e is None: e = SSHException('Unable to open channel.') raise e def request_port_forward(self, address, port, handler=None): """ Ask the server to forward TCP connections from a listening port on the server, across this SSH session. If a handler is given, that handler is called from a different thread whenever a forwarded connection arrives. The handler parameters are:: handler( channel, (origin_addr, origin_port), (server_addr, server_port), ) where ``server_addr`` and ``server_port`` are the address and port that the server was listening on. If no handler is set, the default behavior is to send new incoming forwarded connections into the accept queue, to be picked up via `accept`. :param str address: the address to bind when forwarding :param int port: the port to forward, or 0 to ask the server to allocate any port :param callable handler: optional handler for incoming forwarded connections, of the form ``func(Channel, (str, int), (str, int))``. :return: the port number (`int`) allocated by the server :raises: `.SSHException` -- if the server refused the TCP forward request """ if not self.active: raise SSHException('SSH session not active') port = int(port) response = self.global_request( 'tcpip-forward', (address, port), wait=True ) if response is None: raise SSHException('TCP forwarding request denied') if port == 0: port = response.get_int() if handler is None: def default_handler(channel, src_addr, dest_addr_port): # src_addr, src_port = src_addr_port # dest_addr, dest_port = dest_addr_port self._queue_incoming_channel(channel) handler = default_handler self._tcp_handler = handler return port def cancel_port_forward(self, address, port): """ Ask the server to cancel a previous port-forwarding request. No more connections to the given address & port will be forwarded across this ssh connection. :param str address: the address to stop forwarding :param int port: the port to stop forwarding """ if not self.active: return self._tcp_handler = None self.global_request('cancel-tcpip-forward', (address, port), wait=True) def open_sftp_client(self): """ Create an SFTP client channel from an open transport. On success, an SFTP session will be opened with the remote host, and a new `.SFTPClient` object will be returned. :return: a new `.SFTPClient` referring to an sftp session (channel) across this transport """ return SFTPClient.from_transport(self) def send_ignore(self, byte_count=None): """ Send a junk packet across the encrypted link. This is sometimes used to add "noise" to a connection to confuse would-be attackers. It can also be used as a keep-alive for long lived connections traversing firewalls. :param int byte_count: the number of random bytes to send in the payload of the ignored packet -- defaults to a random number from 10 to 41. """ m = Message() m.add_byte(cMSG_IGNORE) if byte_count is None: byte_count = (byte_ord(os.urandom(1)) % 32) + 10 m.add_bytes(os.urandom(byte_count)) self._send_user_message(m) def renegotiate_keys(self): """ Force this session to switch to new keys. Normally this is done automatically after the session hits a certain number of packets or bytes sent or received, but this method gives you the option of forcing new keys whenever you want. Negotiating new keys causes a pause in traffic both ways as the two sides swap keys and do computations. This method returns when the session has switched to new keys. :raises: `.SSHException` -- if the key renegotiation failed (which causes the session to end) """ self.completion_event = threading.Event() self._send_kex_init() while True: self.completion_event.wait(0.1) if not self.active: e = self.get_exception() if e is not None: raise e raise SSHException('Negotiation failed.') if self.completion_event.is_set(): break return def set_keepalive(self, interval): """ Turn on/off keepalive packets (default is off). If this is set, after ``interval`` seconds without sending any data over the connection, a "keepalive" packet will be sent (and ignored by the remote host). This can be useful to keep connections alive over a NAT, for example. :param int interval: seconds to wait before sending a keepalive packet (or 0 to disable keepalives). """ def _request(x=weakref.proxy(self)): return x.global_request('keepalive@lag.net', wait=False) self.packetizer.set_keepalive(interval, _request) def global_request(self, kind, data=None, wait=True): """ Make a global request to the remote host. These are normally extensions to the SSH2 protocol. :param str kind: name of the request. :param tuple data: an optional tuple containing additional data to attach to the request. :param bool wait: ``True`` if this method should not return until a response is received; ``False`` otherwise. :return: a `.Message` containing possible additional data if the request was successful (or an empty `.Message` if ``wait`` was ``False``); ``None`` if the request was denied. """ if wait: self.completion_event = threading.Event() m = Message() m.add_byte(cMSG_GLOBAL_REQUEST) m.add_string(kind) m.add_boolean(wait) if data is not None: m.add(*data) self._log(DEBUG, 'Sending global request "%s"' % kind) self._send_user_message(m) if not wait: return None while True: self.completion_event.wait(0.1) if not self.active: return None if self.completion_event.is_set(): break return self.global_response def accept(self, timeout=None): """ Return the next channel opened by the client over this transport, in server mode. If no channel is opened before the given timeout, ``None`` is returned. :param int timeout: seconds to wait for a channel, or ``None`` to wait forever :return: a new `.Channel` opened by the client """ self.lock.acquire() try: if len(self.server_accepts) > 0: chan = self.server_accepts.pop(0) else: self.server_accept_cv.wait(timeout) if len(self.server_accepts) > 0: chan = self.server_accepts.pop(0) else: # timeout chan = None finally: self.lock.release() return chan def connect( self, hostkey=None, username='', password=None, pkey=None, gss_host=None, gss_auth=False, gss_kex=False, gss_deleg_creds=True, ): """ Negotiate an SSH2 session, and optionally verify the server's host key and authenticate using a password or private key. This is a shortcut for `start_client`, `get_remote_server_key`, and `Transport.auth_password` or `Transport.auth_publickey`. Use those methods if you want more control. You can use this method immediately after creating a Transport to negotiate encryption with a server. If it fails, an exception will be thrown. On success, the method will return cleanly, and an encrypted session exists. You may immediately call `open_channel` or `open_session` to get a `.Channel` object, which is used for data transfer. .. note:: If you fail to supply a password or private key, this method may succeed, but a subsequent `open_channel` or `open_session` call may fail because you haven't authenticated yet. :param .PKey hostkey: the host key expected from the server, or ``None`` if you don't want to do host key verification. :param str username: the username to authenticate as. :param str password: a password to use for authentication, if you want to use password authentication; otherwise ``None``. :param .PKey pkey: a private key to use for authentication, if you want to use private key authentication; otherwise ``None``. :param str gss_host: The target's name in the kerberos database. Default: hostname :param bool gss_auth: ``True`` if you want to use GSS-API authentication. :param bool gss_kex: Perform GSS-API Key Exchange and user authentication. :param bool gss_deleg_creds: Whether to delegate GSS-API client credentials. :raises: `.SSHException` -- if the SSH2 negotiation fails, the host key supplied by the server is incorrect, or authentication fails. """ if hostkey is not None: self._preferred_keys = [hostkey.get_name()] self.start_client() # check host key if we were given one # If GSS-API Key Exchange was performed, we are not required to check # the host key. if (hostkey is not None) and not gss_kex: key = self.get_remote_server_key() if ( key.get_name() != hostkey.get_name() or key.asbytes() != hostkey.asbytes() ): self._log(DEBUG, 'Bad host key from server') self._log(DEBUG, 'Expected: %s: %s' % ( hostkey.get_name(), repr(hostkey.asbytes())) ) self._log(DEBUG, 'Got : %s: %s' % ( key.get_name(), repr(key.asbytes())) ) raise SSHException('Bad host key from server') self._log(DEBUG, 'Host key verified (%s)' % hostkey.get_name()) if (pkey is not None) or (password is not None) or gss_auth or gss_kex: if gss_auth: self._log(DEBUG, 'Attempting GSS-API auth... (gssapi-with-mic)') # noqa self.auth_gssapi_with_mic(username, gss_host, gss_deleg_creds) elif gss_kex: self._log(DEBUG, 'Attempting GSS-API auth... (gssapi-keyex)') self.auth_gssapi_keyex(username) elif pkey is not None: self._log(DEBUG, 'Attempting public-key auth...') self.auth_publickey(username, pkey) else: self._log(DEBUG, 'Attempting password auth...') self.auth_password(username, password) return def get_exception(self): """ Return any exception that happened during the last server request. This can be used to fetch more specific error information after using calls like `start_client`. The exception (if any) is cleared after this call. :return: an exception, or ``None`` if there is no stored exception. .. versionadded:: 1.1 """ self.lock.acquire() try: e = self.saved_exception self.saved_exception = None return e finally: self.lock.release() def set_subsystem_handler(self, name, handler, *larg, **kwarg): """ Set the handler class for a subsystem in server mode. If a request for this subsystem is made on an open ssh channel later, this handler will be constructed and called -- see `.SubsystemHandler` for more detailed documentation. Any extra parameters (including keyword arguments) are saved and passed to the `.SubsystemHandler` constructor later. :param str name: name of the subsystem. :param handler: subclass of `.SubsystemHandler` that handles this subsystem. """ try: self.lock.acquire() self.subsystem_table[name] = (handler, larg, kwarg) finally: self.lock.release() def is_authenticated(self): """ Return true if this session is active and authenticated. :return: True if the session is still open and has been authenticated successfully; False if authentication failed and/or the session is closed. """ return ( self.active and self.auth_handler is not None and self.auth_handler.is_authenticated() ) def get_username(self): """ Return the username this connection is authenticated for. If the session is not authenticated (or authentication failed), this method returns ``None``. :return: username that was authenticated (a `str`), or ``None``. """ if not self.active or (self.auth_handler is None): return None return self.auth_handler.get_username() def get_banner(self): """ Return the banner supplied by the server upon connect. If no banner is supplied, this method returns ``None``. :returns: server supplied banner (`str`), or ``None``. .. versionadded:: 1.13 """ if not self.active or (self.auth_handler is None): return None return self.auth_handler.banner def auth_none(self, username): """ Try to authenticate to the server using no authentication at all. This will almost always fail. It may be useful for determining the list of authentication types supported by the server, by catching the `.BadAuthenticationType` exception raised. :param str username: the username to authenticate as :return: `list` of auth types permissible for the next stage of authentication (normally empty) :raises: `.BadAuthenticationType` -- if "none" authentication isn't allowed by the server for this user :raises: `.SSHException` -- if the authentication failed due to a network error .. versionadded:: 1.5 """ if (not self.active) or (not self.initial_kex_done): raise SSHException('No existing session') my_event = threading.Event() self.auth_handler = AuthHandler(self) self.auth_handler.auth_none(username, my_event) return self.auth_handler.wait_for_response(my_event) def auth_password(self, username, password, event=None, fallback=True): """ Authenticate to the server using a password. The username and password are sent over an encrypted link. If an ``event`` is passed in, this method will return immediately, and the event will be triggered once authentication succeeds or fails. On success, `is_authenticated` will return ``True``. On failure, you may use `get_exception` to get more detailed error information. Since 1.1, if no event is passed, this method will block until the authentication succeeds or fails. On failure, an exception is raised. Otherwise, the method simply returns. Since 1.5, if no event is passed and ``fallback`` is ``True`` (the default), if the server doesn't support plain password authentication but does support so-called "keyboard-interactive" mode, an attempt will be made to authenticate using this interactive mode. If it fails, the normal exception will be thrown as if the attempt had never been made. This is useful for some recent Gentoo and Debian distributions, which turn off plain password authentication in a misguided belief that interactive authentication is "more secure". (It's not.) If the server requires multi-step authentication (which is very rare), this method will return a list of auth types permissible for the next step. Otherwise, in the normal case, an empty list is returned. :param str username: the username to authenticate as :param basestring password: the password to authenticate with :param .threading.Event event: an event to trigger when the authentication attempt is complete (whether it was successful or not) :param bool fallback: ``True`` if an attempt at an automated "interactive" password auth should be made if the server doesn't support normal password auth :return: `list` of auth types permissible for the next stage of authentication (normally empty) :raises: `.BadAuthenticationType` -- if password authentication isn't allowed by the server for this user (and no event was passed in) :raises: `.AuthenticationException` -- if the authentication failed (and no event was passed in) :raises: `.SSHException` -- if there was a network error """ if (not self.active) or (not self.initial_kex_done): # we should never try to send the password unless we're on a secure # link raise SSHException('No existing session') if event is None: my_event = threading.Event() else: my_event = event self.auth_handler = AuthHandler(self) self.auth_handler.auth_password(username, password, my_event) if event is not None: # caller wants to wait for event themselves return [] try: return self.auth_handler.wait_for_response(my_event) except BadAuthenticationType as e: # if password auth isn't allowed, but keyboard-interactive *is*, # try to fudge it if not fallback or ('keyboard-interactive' not in e.allowed_types): raise try: def handler(title, instructions, fields): if len(fields) > 1: raise SSHException('Fallback authentication failed.') if len(fields) == 0: # for some reason, at least on os x, a 2nd request will # be made with zero fields requested. maybe it's just # to try to fake out automated scripting of the exact # type we're doing here. *shrug* :) return [] return [password] return self.auth_interactive(username, handler) except SSHException: # attempt failed; just raise the original exception raise e def auth_publickey(self, username, key, event=None): """ Authenticate to the server using a private key. The key is used to sign data from the server, so it must include the private part. If an ``event`` is passed in, this method will return immediately, and the event will be triggered once authentication succeeds or fails. On success, `is_authenticated` will return ``True``. On failure, you may use `get_exception` to get more detailed error information. Since 1.1, if no event is passed, this method will block until the authentication succeeds or fails. On failure, an exception is raised. Otherwise, the method simply returns. If the server requires multi-step authentication (which is very rare), this method will return a list of auth types permissible for the next step. Otherwise, in the normal case, an empty list is returned. :param str username: the username to authenticate as :param .PKey key: the private key to authenticate with :param .threading.Event event: an event to trigger when the authentication attempt is complete (whether it was successful or not) :return: `list` of auth types permissible for the next stage of authentication (normally empty) :raises: `.BadAuthenticationType` -- if public-key authentication isn't allowed by the server for this user (and no event was passed in) :raises: `.AuthenticationException` -- if the authentication failed (and no event was passed in) :raises: `.SSHException` -- if there was a network error """ if (not self.active) or (not self.initial_kex_done): # we should never try to authenticate unless we're on a secure link raise SSHException('No existing session') if event is None: my_event = threading.Event() else: my_event = event self.auth_handler = AuthHandler(self) self.auth_handler.auth_publickey(username, key, my_event) if event is not None: # caller wants to wait for event themselves return [] return self.auth_handler.wait_for_response(my_event) def auth_interactive(self, username, handler, submethods=''): """ Authenticate to the server interactively. A handler is used to answer arbitrary questions from the server. On many servers, this is just a dumb wrapper around PAM. This method will block until the authentication succeeds or fails, peroidically calling the handler asynchronously to get answers to authentication questions. The handler may be called more than once if the server continues to ask questions. The handler is expected to be a callable that will handle calls of the form: ``handler(title, instructions, prompt_list)``. The ``title`` is meant to be a dialog-window title, and the ``instructions`` are user instructions (both are strings). ``prompt_list`` will be a list of prompts, each prompt being a tuple of ``(str, bool)``. The string is the prompt and the boolean indicates whether the user text should be echoed. A sample call would thus be: ``handler('title', 'instructions', [('Password:', False)])``. The handler should return a list or tuple of answers to the server's questions. If the server requires multi-step authentication (which is very rare), this method will return a list of auth types permissible for the next step. Otherwise, in the normal case, an empty list is returned. :param str username: the username to authenticate as :param callable handler: a handler for responding to server questions :param str submethods: a string list of desired submethods (optional) :return: `list` of auth types permissible for the next stage of authentication (normally empty). :raises: `.BadAuthenticationType` -- if public-key authentication isn't allowed by the server for this user :raises: `.AuthenticationException` -- if the authentication failed :raises: `.SSHException` -- if there was a network error .. versionadded:: 1.5 """ if (not self.active) or (not self.initial_kex_done): # we should never try to authenticate unless we're on a secure link raise SSHException('No existing session') my_event = threading.Event() self.auth_handler = AuthHandler(self) self.auth_handler.auth_interactive( username, handler, my_event, submethods ) return self.auth_handler.wait_for_response(my_event) def auth_interactive_dumb(self, username, handler=None, submethods=''): """ Autenticate to the server interactively but dumber. Just print the prompt and / or instructions to stdout and send back the response. This is good for situations where partial auth is achieved by key and then the user has to enter a 2fac token. """ if not handler: def handler(title, instructions, prompt_list): answers = [] if title: print(title.strip()) if instructions: print(instructions.strip()) for prompt, show_input in prompt_list: print(prompt.strip(), end=' ') answers.append(input()) return answers return self.auth_interactive(username, handler, submethods) def auth_gssapi_with_mic(self, username, gss_host, gss_deleg_creds): """ Authenticate to the Server using GSS-API / SSPI. :param str username: The username to authenticate as :param str gss_host: The target host :param bool gss_deleg_creds: Delegate credentials or not :return: list of auth types permissible for the next stage of authentication (normally empty) :rtype: list :raises: `.BadAuthenticationType` -- if gssapi-with-mic isn't allowed by the server (and no event was passed in) :raises: `.AuthenticationException` -- if the authentication failed (and no event was passed in) :raises: `.SSHException` -- if there was a network error """ if (not self.active) or (not self.initial_kex_done): # we should never try to authenticate unless we're on a secure link raise SSHException('No existing session') my_event = threading.Event() self.auth_handler = AuthHandler(self) self.auth_handler.auth_gssapi_with_mic( username, gss_host, gss_deleg_creds, my_event ) return self.auth_handler.wait_for_response(my_event) def auth_gssapi_keyex(self, username): """ Authenticate to the server with GSS-API/SSPI if GSS-API kex is in use. :param str username: The username to authenticate as. :returns: a `list` of auth types permissible for the next stage of authentication (normally empty) :raises: `.BadAuthenticationType` -- if GSS-API Key Exchange was not performed (and no event was passed in) :raises: `.AuthenticationException` -- if the authentication failed (and no event was passed in) :raises: `.SSHException` -- if there was a network error """ if (not self.active) or (not self.initial_kex_done): # we should never try to authenticate unless we're on a secure link raise SSHException('No existing session') my_event = threading.Event() self.auth_handler = AuthHandler(self) self.auth_handler.auth_gssapi_keyex(username, my_event) return self.auth_handler.wait_for_response(my_event) def set_log_channel(self, name): """ Set the channel for this transport's logging. The default is ``"paramiko.transport"`` but it can be set to anything you want. (See the `.logging` module for more info.) SSH Channels will log to a sub-channel of the one specified. :param str name: new channel name for logging .. versionadded:: 1.1 """ self.log_name = name self.logger = util.get_logger(name) self.packetizer.set_log(self.logger) def get_log_channel(self): """ Return the channel name used for this transport's logging. :return: channel name as a `str` .. versionadded:: 1.2 """ return self.log_name def set_hexdump(self, hexdump): """ Turn on/off logging a hex dump of protocol traffic at DEBUG level in the logs. Normally you would want this off (which is the default), but if you are debugging something, it may be useful. :param bool hexdump: ``True`` to log protocol traffix (in hex) to the log; ``False`` otherwise. """ self.packetizer.set_hexdump(hexdump) def get_hexdump(self): """ Return ``True`` if the transport is currently logging hex dumps of protocol traffic. :return: ``True`` if hex dumps are being logged, else ``False``. .. versionadded:: 1.4 """ return self.packetizer.get_hexdump() def use_compression(self, compress=True): """ Turn on/off compression. This will only have an affect before starting the transport (ie before calling `connect`, etc). By default, compression is off since it negatively affects interactive sessions. :param bool compress: ``True`` to ask the remote client/server to compress traffic; ``False`` to refuse compression .. versionadded:: 1.5.2 """ if compress: self._preferred_compression = ('zlib@openssh.com', 'zlib', 'none') else: self._preferred_compression = ('none',) def getpeername(self): """ Return the address of the remote side of this Transport, if possible. This is effectively a wrapper around ``getpeername`` on the underlying socket. If the socket-like object has no ``getpeername`` method, then ``("unknown", 0)`` is returned. :return: the address of the remote host, if known, as a ``(str, int)`` tuple. """ gp = getattr(self.sock, 'getpeername', None) if gp is None: return 'unknown', 0 return gp() def stop_thread(self): self.active = False self.packetizer.close() if PY2: # Original join logic; #520 doesn't appear commonly present under # Python 2. while self.is_alive() and self is not threading.current_thread(): self.join(10) else: # Keep trying to join() our main thread, quickly, until: # * We join()ed successfully (self.is_alive() == False) # * Or it looks like we've hit issue #520 (socket.recv hitting some # race condition preventing it from timing out correctly), wherein # our socket and packetizer are both closed (but where we'd # otherwise be sitting forever on that recv()). while ( self.is_alive() and self is not threading.current_thread() and not self.sock._closed and not self.packetizer.closed ): self.join(0.1) # internals... def _log(self, level, msg, *args): if issubclass(type(msg), list): for m in msg: self.logger.log(level, m) else: self.logger.log(level, msg, *args) def _get_modulus_pack(self): """used by KexGex to find primes for group exchange""" return self._modulus_pack def _next_channel(self): """you are holding the lock""" chanid = self._channel_counter while self._channels.get(chanid) is not None: self._channel_counter = (self._channel_counter + 1) & 0xffffff chanid = self._channel_counter self._channel_counter = (self._channel_counter + 1) & 0xffffff return chanid def _unlink_channel(self, chanid): """used by a Channel to remove itself from the active channel list""" self._channels.delete(chanid) def _send_message(self, data): self.packetizer.send_message(data) def _send_user_message(self, data): """ send a message, but block if we're in key negotiation. this is used for user-initiated requests. """ start = time.time() while True: self.clear_to_send.wait(0.1) if not self.active: self._log(DEBUG, 'Dropping user packet because connection is dead.') # noqa return self.clear_to_send_lock.acquire() if self.clear_to_send.is_set(): break self.clear_to_send_lock.release() if time.time() > start + self.clear_to_send_timeout: raise SSHException('Key-exchange timed out waiting for key negotiation') # noqa try: self._send_message(data) finally: self.clear_to_send_lock.release() def _set_K_H(self, k, h): """ Used by a kex obj to set the K (root key) and H (exchange hash). """ self.K = k self.H = h if self.session_id is None: self.session_id = h def _expect_packet(self, *ptypes): """ Used by a kex obj to register the next packet type it expects to see. """ self._expected_packet = tuple(ptypes) def _verify_key(self, host_key, sig): key = self._key_info[self.host_key_type](Message(host_key)) if key is None: raise SSHException('Unknown host key type') if not key.verify_ssh_sig(self.H, Message(sig)): raise SSHException('Signature verification (%s) failed.' % self.host_key_type) # noqa self.host_key = key def _compute_key(self, id, nbytes): """id is 'A' - 'F' for the various keys used by ssh""" m = Message() m.add_mpint(self.K) m.add_bytes(self.H) m.add_byte(b(id)) m.add_bytes(self.session_id) # Fallback to SHA1 for kex engines that fail to specify a hex # algorithm, or for e.g. transport tests that don't run kexinit. hash_algo = getattr(self.kex_engine, 'hash_algo', None) hash_select_msg = "kex engine %s specified hash_algo %r" % ( self.kex_engine.__class__.__name__, hash_algo ) if hash_algo is None: hash_algo = sha1 hash_select_msg += ", falling back to sha1" if not hasattr(self, '_logged_hash_selection'): self._log(DEBUG, hash_select_msg) setattr(self, '_logged_hash_selection', True) out = sofar = hash_algo(m.asbytes()).digest() while len(out) < nbytes: m = Message() m.add_mpint(self.K) m.add_bytes(self.H) m.add_bytes(sofar) digest = hash_algo(m.asbytes()).digest() out += digest sofar += digest return out[:nbytes] def _get_cipher(self, name, key, iv, operation): if name not in self._cipher_info: raise SSHException('Unknown client cipher ' + name) else: cipher = Cipher( self._cipher_info[name]['class'](key), self._cipher_info[name]['mode'](iv), backend=default_backend(), ) if operation is self._ENCRYPT: return cipher.encryptor() else: return cipher.decryptor() def _set_forward_agent_handler(self, handler): if handler is None: def default_handler(channel): self._queue_incoming_channel(channel) self._forward_agent_handler = default_handler else: self._forward_agent_handler = handler def _set_x11_handler(self, handler): # only called if a channel has turned on x11 forwarding if handler is None: # by default, use the same mechanism as accept() def default_handler(channel, src_addr_port): self._queue_incoming_channel(channel) self._x11_handler = default_handler else: self._x11_handler = handler def _queue_incoming_channel(self, channel): self.lock.acquire() try: self.server_accepts.append(channel) self.server_accept_cv.notify() finally: self.lock.release() def _sanitize_window_size(self, window_size): if window_size is None: window_size = self.default_window_size return clamp_value(MIN_WINDOW_SIZE, window_size, MAX_WINDOW_SIZE) def _sanitize_packet_size(self, max_packet_size): if max_packet_size is None: max_packet_size = self.default_max_packet_size return clamp_value(MIN_PACKET_SIZE, max_packet_size, MAX_WINDOW_SIZE) def run(self): # (use the exposed "run" method, because if we specify a thread target # of a private method, threading.Thread will keep a reference to it # indefinitely, creating a GC cycle and not letting Transport ever be # GC'd. it's a bug in Thread.) # Hold reference to 'sys' so we can test sys.modules to detect # interpreter shutdown. self.sys = sys # active=True occurs before the thread is launched, to avoid a race _active_threads.append(self) tid = hex(long(id(self)) & xffffffff) if self.server_mode: self._log(DEBUG, 'starting thread (server mode): %s' % tid) else: self._log(DEBUG, 'starting thread (client mode): %s' % tid) try: try: self.packetizer.write_all(b(self.local_version + '\r\n')) self._log(DEBUG, 'Local version/idstring: %s' % self.local_version) # noqa self._check_banner() # The above is actually very much part of the handshake, but # sometimes the banner can be read but the machine is not # responding, for example when the remote ssh daemon is loaded # in to memory but we can not read from the disk/spawn a new # shell. # Make sure we can specify a timeout for the initial handshake. # Re-use the banner timeout for now. self.packetizer.start_handshake(self.handshake_timeout) self._send_kex_init() self._expect_packet(MSG_KEXINIT) while self.active: if self.packetizer.need_rekey() and not self.in_kex: self._send_kex_init() try: ptype, m = self.packetizer.read_message() except NeedRekeyException: continue if ptype == MSG_IGNORE: continue elif ptype == MSG_DISCONNECT: self._parse_disconnect(m) self.active = False self.packetizer.close() break elif ptype == MSG_DEBUG: self._parse_debug(m) continue if len(self._expected_packet) > 0: if ptype not in self._expected_packet: raise SSHException('Expecting packet from %r, got %d' % (self._expected_packet, ptype)) # noqa self._expected_packet = tuple() if (ptype >= 30) and (ptype <= 41): self.kex_engine.parse_next(ptype, m) continue if ptype in self._handler_table: self._handler_table[ptype](self, m) elif ptype in self._channel_handler_table: chanid = m.get_int() chan = self._channels.get(chanid) if chan is not None: self._channel_handler_table[ptype](chan, m) elif chanid in self.channels_seen: self._log(DEBUG, 'Ignoring message for dead channel %d' % chanid) # noqa else: self._log(ERROR, 'Channel request for unknown channel %d' % chanid) # noqa self.active = False self.packetizer.close() elif ( self.auth_handler is not None and ptype in self.auth_handler._handler_table ): handler = self.auth_handler._handler_table[ptype] handler(self.auth_handler, m) if len(self._expected_packet) > 0: continue else: self._log(WARNING, 'Oops, unhandled type %d' % ptype) msg = Message() msg.add_byte(cMSG_UNIMPLEMENTED) msg.add_int(m.seqno) self._send_message(msg) self.packetizer.complete_handshake() except SSHException as e: self._log(ERROR, 'Exception: ' + str(e)) self._log(ERROR, util.tb_strings()) self.saved_exception = e except EOFError as e: self._log(DEBUG, 'EOF in transport thread') self.saved_exception = e except socket.error as e: if type(e.args) is tuple: if e.args: emsg = '%s (%d)' % (e.args[1], e.args[0]) else: # empty tuple, e.g. socket.timeout emsg = str(e) or repr(e) else: emsg = e.args self._log(ERROR, 'Socket exception: ' + emsg) self.saved_exception = e except Exception as e: self._log(ERROR, 'Unknown exception: ' + str(e)) self._log(ERROR, util.tb_strings()) self.saved_exception = e _active_threads.remove(self) for chan in list(self._channels.values()): chan._unlink() if self.active: self.active = False self.packetizer.close() if self.completion_event is not None: self.completion_event.set() if self.auth_handler is not None: self.auth_handler.abort() for event in self.channel_events.values(): event.set() try: self.lock.acquire() self.server_accept_cv.notify() finally: self.lock.release() self.sock.close() except: # Don't raise spurious 'NoneType has no attribute X' errors when we # wake up during interpreter shutdown. Or rather -- raise # everything *if* sys.modules (used as a convenient sentinel) # appears to still exist. if self.sys.modules is not None: raise def _log_agreement(self, which, local, remote): # Log useful, non-duplicative line re: an agreed-upon algorithm. # Old code implied algorithms could be asymmetrical (different for # inbound vs outbound) so we preserve that possibility. msg = "{0} agreed: ".format(which) if local == remote: msg += local else: msg += "local={0}, remote={1}".format(local, remote) self._log(DEBUG, msg) # protocol stages def _negotiate_keys(self, m): # throws SSHException on anything unusual self.clear_to_send_lock.acquire() try: self.clear_to_send.clear() finally: self.clear_to_send_lock.release() if self.local_kex_init is None: # remote side wants to renegotiate self._send_kex_init() self._parse_kex_init(m) self.kex_engine.start_kex() def _check_banner(self): # this is slow, but we only have to do it once for i in range(100): # give them 15 seconds for the first line, then just 2 seconds # each additional line. (some sites have very high latency.) if i == 0: timeout = self.banner_timeout else: timeout = 2 try: buf = self.packetizer.readline(timeout) except ProxyCommandFailure: raise except Exception as e: raise SSHException( 'Error reading SSH protocol banner' + str(e) ) if buf[:4] == 'SSH-': break self._log(DEBUG, 'Banner: ' + buf) if buf[:4] != 'SSH-': raise SSHException('Indecipherable protocol version "' + buf + '"') # save this server version string for later self.remote_version = buf self._log(DEBUG, 'Remote version/idstring: %s' % buf) # pull off any attached comment # NOTE: comment used to be stored in a variable and then...never used. # since 2003. ca 877cd974b8182d26fa76d566072917ea67b64e67 i = buf.find(' ') if i >= 0: buf = buf[:i] # parse out version string and make sure it matches segs = buf.split('-', 2) if len(segs) < 3: raise SSHException('Invalid SSH banner') version = segs[1] client = segs[2] if version != '1.99' and version != '2.0': msg = 'Incompatible version ({0} instead of 2.0)' raise SSHException(msg.format(version)) msg = 'Connected (version {0}, client {1})'.format(version, client) self._log(INFO, msg) def _send_kex_init(self): """ announce to the other side that we'd like to negotiate keys, and what kind of key negotiation we support. """ self.clear_to_send_lock.acquire() try: self.clear_to_send.clear() finally: self.clear_to_send_lock.release() self.in_kex = True if self.server_mode: mp_required_prefix = 'diffie-hellman-group-exchange-sha' kex_mp = [ k for k in self._preferred_kex if k.startswith(mp_required_prefix) ] if (self._modulus_pack is None) and (len(kex_mp) > 0): # can't do group-exchange if we don't have a pack of potential # primes pkex = [ k for k in self.get_security_options().kex if not k.startswith(mp_required_prefix) ] self.get_security_options().kex = pkex available_server_keys = list(filter( list(self.server_key_dict.keys()).__contains__, self._preferred_keys )) else: available_server_keys = self._preferred_keys m = Message() m.add_byte(cMSG_KEXINIT) m.add_bytes(os.urandom(16)) m.add_list(self._preferred_kex) m.add_list(available_server_keys) m.add_list(self._preferred_ciphers) m.add_list(self._preferred_ciphers) m.add_list(self._preferred_macs) m.add_list(self._preferred_macs) m.add_list(self._preferred_compression) m.add_list(self._preferred_compression) m.add_string(bytes()) m.add_string(bytes()) m.add_boolean(False) m.add_int(0) # save a copy for later (needed to compute a hash) self.local_kex_init = m.asbytes() self._send_message(m) def _parse_kex_init(self, m): m.get_bytes(16) # cookie, discarded kex_algo_list = m.get_list() server_key_algo_list = m.get_list() client_encrypt_algo_list = m.get_list() server_encrypt_algo_list = m.get_list() client_mac_algo_list = m.get_list() server_mac_algo_list = m.get_list() client_compress_algo_list = m.get_list() server_compress_algo_list = m.get_list() client_lang_list = m.get_list() server_lang_list = m.get_list() kex_follows = m.get_boolean() m.get_int() # unused self._log(DEBUG, 'kex algos:' + str(kex_algo_list) + ' server key:' + str(server_key_algo_list) + ' client encrypt:' + str(client_encrypt_algo_list) + ' server encrypt:' + str(server_encrypt_algo_list) + ' client mac:' + str(client_mac_algo_list) + ' server mac:' + str(server_mac_algo_list) + ' client compress:' + str(client_compress_algo_list) + ' server compress:' + str(server_compress_algo_list) + ' client lang:' + str(client_lang_list) + ' server lang:' + str(server_lang_list) + ' kex follows?' + str(kex_follows) ) # as a server, we pick the first item in the client's list that we # support. # as a client, we pick the first item in our list that the server # supports. if self.server_mode: agreed_kex = list(filter( self._preferred_kex.__contains__, kex_algo_list )) else: agreed_kex = list(filter( kex_algo_list.__contains__, self._preferred_kex )) if len(agreed_kex) == 0: raise SSHException('Incompatible ssh peer (no acceptable kex algorithm)') # noqa self.kex_engine = self._kex_info[agreed_kex[0]](self) self._log(DEBUG, "Kex agreed: %s" % agreed_kex[0]) if self.server_mode: available_server_keys = list(filter( list(self.server_key_dict.keys()).__contains__, self._preferred_keys )) agreed_keys = list(filter( available_server_keys.__contains__, server_key_algo_list )) else: agreed_keys = list(filter( server_key_algo_list.__contains__, self._preferred_keys )) if len(agreed_keys) == 0: raise SSHException('Incompatible ssh peer (no acceptable host key)') # noqa self.host_key_type = agreed_keys[0] if self.server_mode and (self.get_server_key() is None): raise SSHException('Incompatible ssh peer (can\'t match requested host key type)') # noqa if self.server_mode: agreed_local_ciphers = list(filter( self._preferred_ciphers.__contains__, server_encrypt_algo_list )) agreed_remote_ciphers = list(filter( self._preferred_ciphers.__contains__, client_encrypt_algo_list )) else: agreed_local_ciphers = list(filter( client_encrypt_algo_list.__contains__, self._preferred_ciphers )) agreed_remote_ciphers = list(filter( server_encrypt_algo_list.__contains__, self._preferred_ciphers )) if len(agreed_local_ciphers) == 0 or len(agreed_remote_ciphers) == 0: raise SSHException('Incompatible ssh server (no acceptable ciphers)') # noqa self.local_cipher = agreed_local_ciphers[0] self.remote_cipher = agreed_remote_ciphers[0] self._log_agreement( 'Cipher', local=self.local_cipher, remote=self.remote_cipher ) if self.server_mode: agreed_remote_macs = list(filter( self._preferred_macs.__contains__, client_mac_algo_list )) agreed_local_macs = list(filter( self._preferred_macs.__contains__, server_mac_algo_list )) else: agreed_local_macs = list(filter( client_mac_algo_list.__contains__, self._preferred_macs )) agreed_remote_macs = list(filter( server_mac_algo_list.__contains__, self._preferred_macs )) if (len(agreed_local_macs) == 0) or (len(agreed_remote_macs) == 0): raise SSHException('Incompatible ssh server (no acceptable macs)') self.local_mac = agreed_local_macs[0] self.remote_mac = agreed_remote_macs[0] self._log_agreement( 'MAC', local=self.local_mac, remote=self.remote_mac ) if self.server_mode: agreed_remote_compression = list(filter( self._preferred_compression.__contains__, client_compress_algo_list )) agreed_local_compression = list(filter( self._preferred_compression.__contains__, server_compress_algo_list )) else: agreed_local_compression = list(filter( client_compress_algo_list.__contains__, self._preferred_compression )) agreed_remote_compression = list(filter( server_compress_algo_list.__contains__, self._preferred_compression )) if ( len(agreed_local_compression) == 0 or len(agreed_remote_compression) == 0 ): msg = 'Incompatible ssh server (no acceptable compression) {0!r} {1!r} {2!r}' # noqa raise SSHException(msg.format( agreed_local_compression, agreed_remote_compression, self._preferred_compression, )) self.local_compression = agreed_local_compression[0] self.remote_compression = agreed_remote_compression[0] self._log_agreement( 'Compression', local=self.local_compression, remote=self.remote_compression ) # save for computing hash later... # now wait! openssh has a bug (and others might too) where there are # actually some extra bytes (one NUL byte in openssh's case) added to # the end of the packet but not parsed. turns out we need to throw # away those bytes because they aren't part of the hash. self.remote_kex_init = cMSG_KEXINIT + m.get_so_far() def _activate_inbound(self): """switch on newly negotiated encryption parameters for inbound traffic""" block_size = self._cipher_info[self.remote_cipher]['block-size'] if self.server_mode: IV_in = self._compute_key('A', block_size) key_in = self._compute_key( 'C', self._cipher_info[self.remote_cipher]['key-size'] ) else: IV_in = self._compute_key('B', block_size) key_in = self._compute_key( 'D', self._cipher_info[self.remote_cipher]['key-size'] ) engine = self._get_cipher( self.remote_cipher, key_in, IV_in, self._DECRYPT ) mac_size = self._mac_info[self.remote_mac]['size'] mac_engine = self._mac_info[self.remote_mac]['class'] # initial mac keys are done in the hash's natural size (not the # potentially truncated transmission size) if self.server_mode: mac_key = self._compute_key('E', mac_engine().digest_size) else: mac_key = self._compute_key('F', mac_engine().digest_size) self.packetizer.set_inbound_cipher( engine, block_size, mac_engine, mac_size, mac_key ) compress_in = self._compression_info[self.remote_compression][1] if ( compress_in is not None and ( self.remote_compression != 'zlib@openssh.com' or self.authenticated ) ): self._log(DEBUG, 'Switching on inbound compression ...') self.packetizer.set_inbound_compressor(compress_in()) def _activate_outbound(self): """switch on newly negotiated encryption parameters for outbound traffic""" m = Message() m.add_byte(cMSG_NEWKEYS) self._send_message(m) block_size = self._cipher_info[self.local_cipher]['block-size'] if self.server_mode: IV_out = self._compute_key('B', block_size) key_out = self._compute_key( 'D', self._cipher_info[self.local_cipher]['key-size']) else: IV_out = self._compute_key('A', block_size) key_out = self._compute_key( 'C', self._cipher_info[self.local_cipher]['key-size']) engine = self._get_cipher( self.local_cipher, key_out, IV_out, self._ENCRYPT) mac_size = self._mac_info[self.local_mac]['size'] mac_engine = self._mac_info[self.local_mac]['class'] # initial mac keys are done in the hash's natural size (not the # potentially truncated transmission size) if self.server_mode: mac_key = self._compute_key('F', mac_engine().digest_size) else: mac_key = self._compute_key('E', mac_engine().digest_size) sdctr = self.local_cipher.endswith('-ctr') self.packetizer.set_outbound_cipher( engine, block_size, mac_engine, mac_size, mac_key, sdctr) compress_out = self._compression_info[self.local_compression][0] if ( compress_out is not None and ( self.local_compression != 'zlib@openssh.com' or self.authenticated ) ): self._log(DEBUG, 'Switching on outbound compression ...') self.packetizer.set_outbound_compressor(compress_out()) if not self.packetizer.need_rekey(): self.in_kex = False # we always expect to receive NEWKEYS now self._expect_packet(MSG_NEWKEYS) def _auth_trigger(self): self.authenticated = True # delayed initiation of compression if self.local_compression == 'zlib@openssh.com': compress_out = self._compression_info[self.local_compression][0] self._log(DEBUG, 'Switching on outbound compression ...') self.packetizer.set_outbound_compressor(compress_out()) if self.remote_compression == 'zlib@openssh.com': compress_in = self._compression_info[self.remote_compression][1] self._log(DEBUG, 'Switching on inbound compression ...') self.packetizer.set_inbound_compressor(compress_in()) def _parse_newkeys(self, m): self._log(DEBUG, 'Switch to new keys ...') self._activate_inbound() # can also free a bunch of stuff here self.local_kex_init = self.remote_kex_init = None self.K = None self.kex_engine = None if self.server_mode and (self.auth_handler is None): # create auth handler for server mode self.auth_handler = AuthHandler(self) if not self.initial_kex_done: # this was the first key exchange self.initial_kex_done = True # send an event? if self.completion_event is not None: self.completion_event.set() # it's now okay to send data again (if this was a re-key) if not self.packetizer.need_rekey(): self.in_kex = False self.clear_to_send_lock.acquire() try: self.clear_to_send.set() finally: self.clear_to_send_lock.release() return def _parse_disconnect(self, m): code = m.get_int() desc = m.get_text() self._log(INFO, 'Disconnect (code %d): %s' % (code, desc)) def _parse_global_request(self, m): kind = m.get_text() self._log(DEBUG, 'Received global request "%s"' % kind) want_reply = m.get_boolean() if not self.server_mode: self._log( DEBUG, 'Rejecting "%s" global request from server.' % kind ) ok = False elif kind == 'tcpip-forward': address = m.get_text() port = m.get_int() ok = self.server_object.check_port_forward_request(address, port) if ok: ok = (ok,) elif kind == 'cancel-tcpip-forward': address = m.get_text() port = m.get_int() self.server_object.cancel_port_forward_request(address, port) ok = True else: ok = self.server_object.check_global_request(kind, m) extra = () if type(ok) is tuple: extra = ok ok = True if want_reply: msg = Message() if ok: msg.add_byte(cMSG_REQUEST_SUCCESS) msg.add(*extra) else: msg.add_byte(cMSG_REQUEST_FAILURE) self._send_message(msg) def _parse_request_success(self, m): self._log(DEBUG, 'Global request successful.') self.global_response = m if self.completion_event is not None: self.completion_event.set() def _parse_request_failure(self, m): self._log(DEBUG, 'Global request denied.') self.global_response = None if self.completion_event is not None: self.completion_event.set() def _parse_channel_open_success(self, m): chanid = m.get_int() server_chanid = m.get_int() server_window_size = m.get_int() server_max_packet_size = m.get_int() chan = self._channels.get(chanid) if chan is None: self._log(WARNING, 'Success for unrequested channel! [??]') return self.lock.acquire() try: chan._set_remote_channel( server_chanid, server_window_size, server_max_packet_size) self._log(DEBUG, 'Secsh channel %d opened.' % chanid) if chanid in self.channel_events: self.channel_events[chanid].set() del self.channel_events[chanid] finally: self.lock.release() return def _parse_channel_open_failure(self, m): chanid = m.get_int() reason = m.get_int() reason_str = m.get_text() m.get_text() # ignored language reason_text = CONNECTION_FAILED_CODE.get(reason, '(unknown code)') self._log( ERROR, 'Secsh channel %d open FAILED: %s: %s' % ( chanid, reason_str, reason_text) ) self.lock.acquire() try: self.saved_exception = ChannelException(reason, reason_text) if chanid in self.channel_events: self._channels.delete(chanid) if chanid in self.channel_events: self.channel_events[chanid].set() del self.channel_events[chanid] finally: self.lock.release() return def _parse_channel_open(self, m): kind = m.get_text() chanid = m.get_int() initial_window_size = m.get_int() max_packet_size = m.get_int() reject = False if ( kind == 'auth-agent@openssh.com' and self._forward_agent_handler is not None ): self._log(DEBUG, 'Incoming forward agent connection') self.lock.acquire() try: my_chanid = self._next_channel() finally: self.lock.release() elif (kind == 'x11') and (self._x11_handler is not None): origin_addr = m.get_text() origin_port = m.get_int() self._log( DEBUG, 'Incoming x11 connection from %s:%d' % ( origin_addr, origin_port) ) self.lock.acquire() try: my_chanid = self._next_channel() finally: self.lock.release() elif (kind == 'forwarded-tcpip') and (self._tcp_handler is not None): server_addr = m.get_text() server_port = m.get_int() origin_addr = m.get_text() origin_port = m.get_int() self._log( DEBUG, 'Incoming tcp forwarded connection from %s:%d' % ( origin_addr, origin_port) ) self.lock.acquire() try: my_chanid = self._next_channel() finally: self.lock.release() elif not self.server_mode: self._log( DEBUG, 'Rejecting "%s" channel request from server.' % kind) reject = True reason = OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED else: self.lock.acquire() try: my_chanid = self._next_channel() finally: self.lock.release() if kind == 'direct-tcpip': # handle direct-tcpip requests coming from the client dest_addr = m.get_text() dest_port = m.get_int() origin_addr = m.get_text() origin_port = m.get_int() reason = self.server_object.check_channel_direct_tcpip_request( my_chanid, (origin_addr, origin_port), (dest_addr, dest_port) ) else: reason = self.server_object.check_channel_request( kind, my_chanid) if reason != OPEN_SUCCEEDED: self._log( DEBUG, 'Rejecting "%s" channel request from client.' % kind) reject = True if reject: msg = Message() msg.add_byte(cMSG_CHANNEL_OPEN_FAILURE) msg.add_int(chanid) msg.add_int(reason) msg.add_string('') msg.add_string('en') self._send_message(msg) return chan = Channel(my_chanid) self.lock.acquire() try: self._channels.put(my_chanid, chan) self.channels_seen[my_chanid] = True chan._set_transport(self) chan._set_window( self.default_window_size, self.default_max_packet_size) chan._set_remote_channel( chanid, initial_window_size, max_packet_size) finally: self.lock.release() m = Message() m.add_byte(cMSG_CHANNEL_OPEN_SUCCESS) m.add_int(chanid) m.add_int(my_chanid) m.add_int(self.default_window_size) m.add_int(self.default_max_packet_size) self._send_message(m) self._log(DEBUG, 'Secsh channel %d (%s) opened.', my_chanid, kind) if kind == 'auth-agent@openssh.com': self._forward_agent_handler(chan) elif kind == 'x11': self._x11_handler(chan, (origin_addr, origin_port)) elif kind == 'forwarded-tcpip': chan.origin_addr = (origin_addr, origin_port) self._tcp_handler( chan, (origin_addr, origin_port), (server_addr, server_port) ) else: self._queue_incoming_channel(chan) def _parse_debug(self, m): m.get_boolean() # always_display msg = m.get_string() m.get_string() # language self._log(DEBUG, 'Debug msg: {0}'.format(util.safe_string(msg))) def _get_subsystem_handler(self, name): try: self.lock.acquire() if name not in self.subsystem_table: return None, [], {} return self.subsystem_table[name] finally: self.lock.release() _handler_table = { MSG_NEWKEYS: _parse_newkeys, MSG_GLOBAL_REQUEST: _parse_global_request, MSG_REQUEST_SUCCESS: _parse_request_success, MSG_REQUEST_FAILURE: _parse_request_failure, MSG_CHANNEL_OPEN_SUCCESS: _parse_channel_open_success, MSG_CHANNEL_OPEN_FAILURE: _parse_channel_open_failure, MSG_CHANNEL_OPEN: _parse_channel_open, MSG_KEXINIT: _negotiate_keys, } _channel_handler_table = { MSG_CHANNEL_SUCCESS: Channel._request_success, MSG_CHANNEL_FAILURE: Channel._request_failed, MSG_CHANNEL_DATA: Channel._feed, MSG_CHANNEL_EXTENDED_DATA: Channel._feed_extended, MSG_CHANNEL_WINDOW_ADJUST: Channel._window_adjust, MSG_CHANNEL_REQUEST: Channel._handle_request, MSG_CHANNEL_EOF: Channel._handle_eof, MSG_CHANNEL_CLOSE: Channel._handle_close, } class SecurityOptions (object): """ Simple object containing the security preferences of an ssh transport. These are tuples of acceptable ciphers, digests, key types, and key exchange algorithms, listed in order of preference. Changing the contents and/or order of these fields affects the underlying `.Transport` (but only if you change them before starting the session). If you try to add an algorithm that paramiko doesn't recognize, ``ValueError`` will be raised. If you try to assign something besides a tuple to one of the fields, ``TypeError`` will be raised. """ __slots__ = '_transport' def __init__(self, transport): self._transport = transport def __repr__(self): """ Returns a string representation of this object, for debugging. """ return '<paramiko.SecurityOptions for %s>' % repr(self._transport) def _set(self, name, orig, x): if type(x) is list: x = tuple(x) if type(x) is not tuple: raise TypeError('expected tuple or list') possible = list(getattr(self._transport, orig).keys()) forbidden = [n for n in x if n not in possible] if len(forbidden) > 0: raise ValueError('unknown cipher') setattr(self._transport, name, x) @property def ciphers(self): """Symmetric encryption ciphers""" return self._transport._preferred_ciphers @ciphers.setter def ciphers(self, x): self._set('_preferred_ciphers', '_cipher_info', x) @property def digests(self): """Digest (one-way hash) algorithms""" return self._transport._preferred_macs @digests.setter def digests(self, x): self._set('_preferred_macs', '_mac_info', x) @property def key_types(self): """Public-key algorithms""" return self._transport._preferred_keys @key_types.setter def key_types(self, x): self._set('_preferred_keys', '_key_info', x) @property def kex(self): """Key exchange algorithms""" return self._transport._preferred_kex @kex.setter def kex(self, x): self._set('_preferred_kex', '_kex_info', x) @property def compression(self): """Compression algorithms""" return self._transport._preferred_compression @compression.setter def compression(self, x): self._set('_preferred_compression', '_compression_info', x) class ChannelMap (object): def __init__(self): # (id -> Channel) self._map = weakref.WeakValueDictionary() self._lock = threading.Lock() def put(self, chanid, chan): self._lock.acquire() try: self._map[chanid] = chan finally: self._lock.release() def get(self, chanid): self._lock.acquire() try: return self._map.get(chanid, None) finally: self._lock.release() def delete(self, chanid): self._lock.acquire() try: try: del self._map[chanid] except KeyError: pass finally: self._lock.release() def values(self): self._lock.acquire() try: return list(self._map.values()) finally: self._lock.release() def __len__(self): self._lock.acquire() try: return len(self._map) finally: self._lock.release()
./CrossVul/dataset_final_sorted/CWE-287/py/bad_650_1
crossvul-python_data_good_650_0
# Copyright (C) 2003-2007 Robey Pointer <robeypointer@gmail.com> # # This file is part of paramiko. # # Paramiko is free software; you can redistribute it and/or modify it under the # terms of the GNU Lesser General Public License as published by the Free # Software Foundation; either version 2.1 of the License, or (at your option) # any later version. # # Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with Paramiko; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. """ Common constants and global variables. """ import logging from paramiko.py3compat import byte_chr, PY2, bytes_types, text_type, long MSG_DISCONNECT, MSG_IGNORE, MSG_UNIMPLEMENTED, MSG_DEBUG, \ MSG_SERVICE_REQUEST, MSG_SERVICE_ACCEPT = range(1, 7) MSG_KEXINIT, MSG_NEWKEYS = range(20, 22) MSG_USERAUTH_REQUEST, MSG_USERAUTH_FAILURE, MSG_USERAUTH_SUCCESS, \ MSG_USERAUTH_BANNER = range(50, 54) MSG_USERAUTH_PK_OK = 60 MSG_USERAUTH_INFO_REQUEST, MSG_USERAUTH_INFO_RESPONSE = range(60, 62) MSG_USERAUTH_GSSAPI_RESPONSE, MSG_USERAUTH_GSSAPI_TOKEN = range(60, 62) MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE, MSG_USERAUTH_GSSAPI_ERROR,\ MSG_USERAUTH_GSSAPI_ERRTOK, MSG_USERAUTH_GSSAPI_MIC = range(63, 67) HIGHEST_USERAUTH_MESSAGE_ID = 79 MSG_GLOBAL_REQUEST, MSG_REQUEST_SUCCESS, MSG_REQUEST_FAILURE = range(80, 83) MSG_CHANNEL_OPEN, MSG_CHANNEL_OPEN_SUCCESS, MSG_CHANNEL_OPEN_FAILURE, \ MSG_CHANNEL_WINDOW_ADJUST, MSG_CHANNEL_DATA, MSG_CHANNEL_EXTENDED_DATA, \ MSG_CHANNEL_EOF, MSG_CHANNEL_CLOSE, MSG_CHANNEL_REQUEST, \ MSG_CHANNEL_SUCCESS, MSG_CHANNEL_FAILURE = range(90, 101) cMSG_DISCONNECT = byte_chr(MSG_DISCONNECT) cMSG_IGNORE = byte_chr(MSG_IGNORE) cMSG_UNIMPLEMENTED = byte_chr(MSG_UNIMPLEMENTED) cMSG_DEBUG = byte_chr(MSG_DEBUG) cMSG_SERVICE_REQUEST = byte_chr(MSG_SERVICE_REQUEST) cMSG_SERVICE_ACCEPT = byte_chr(MSG_SERVICE_ACCEPT) cMSG_KEXINIT = byte_chr(MSG_KEXINIT) cMSG_NEWKEYS = byte_chr(MSG_NEWKEYS) cMSG_USERAUTH_REQUEST = byte_chr(MSG_USERAUTH_REQUEST) cMSG_USERAUTH_FAILURE = byte_chr(MSG_USERAUTH_FAILURE) cMSG_USERAUTH_SUCCESS = byte_chr(MSG_USERAUTH_SUCCESS) cMSG_USERAUTH_BANNER = byte_chr(MSG_USERAUTH_BANNER) cMSG_USERAUTH_PK_OK = byte_chr(MSG_USERAUTH_PK_OK) cMSG_USERAUTH_INFO_REQUEST = byte_chr(MSG_USERAUTH_INFO_REQUEST) cMSG_USERAUTH_INFO_RESPONSE = byte_chr(MSG_USERAUTH_INFO_RESPONSE) cMSG_USERAUTH_GSSAPI_RESPONSE = byte_chr(MSG_USERAUTH_GSSAPI_RESPONSE) cMSG_USERAUTH_GSSAPI_TOKEN = byte_chr(MSG_USERAUTH_GSSAPI_TOKEN) cMSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE = \ byte_chr(MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE) cMSG_USERAUTH_GSSAPI_ERROR = byte_chr(MSG_USERAUTH_GSSAPI_ERROR) cMSG_USERAUTH_GSSAPI_ERRTOK = byte_chr(MSG_USERAUTH_GSSAPI_ERRTOK) cMSG_USERAUTH_GSSAPI_MIC = byte_chr(MSG_USERAUTH_GSSAPI_MIC) cMSG_GLOBAL_REQUEST = byte_chr(MSG_GLOBAL_REQUEST) cMSG_REQUEST_SUCCESS = byte_chr(MSG_REQUEST_SUCCESS) cMSG_REQUEST_FAILURE = byte_chr(MSG_REQUEST_FAILURE) cMSG_CHANNEL_OPEN = byte_chr(MSG_CHANNEL_OPEN) cMSG_CHANNEL_OPEN_SUCCESS = byte_chr(MSG_CHANNEL_OPEN_SUCCESS) cMSG_CHANNEL_OPEN_FAILURE = byte_chr(MSG_CHANNEL_OPEN_FAILURE) cMSG_CHANNEL_WINDOW_ADJUST = byte_chr(MSG_CHANNEL_WINDOW_ADJUST) cMSG_CHANNEL_DATA = byte_chr(MSG_CHANNEL_DATA) cMSG_CHANNEL_EXTENDED_DATA = byte_chr(MSG_CHANNEL_EXTENDED_DATA) cMSG_CHANNEL_EOF = byte_chr(MSG_CHANNEL_EOF) cMSG_CHANNEL_CLOSE = byte_chr(MSG_CHANNEL_CLOSE) cMSG_CHANNEL_REQUEST = byte_chr(MSG_CHANNEL_REQUEST) cMSG_CHANNEL_SUCCESS = byte_chr(MSG_CHANNEL_SUCCESS) cMSG_CHANNEL_FAILURE = byte_chr(MSG_CHANNEL_FAILURE) # for debugging: MSG_NAMES = { MSG_DISCONNECT: 'disconnect', MSG_IGNORE: 'ignore', MSG_UNIMPLEMENTED: 'unimplemented', MSG_DEBUG: 'debug', MSG_SERVICE_REQUEST: 'service-request', MSG_SERVICE_ACCEPT: 'service-accept', MSG_KEXINIT: 'kexinit', MSG_NEWKEYS: 'newkeys', 30: 'kex30', 31: 'kex31', 32: 'kex32', 33: 'kex33', 34: 'kex34', 40: 'kex40', 41: 'kex41', MSG_USERAUTH_REQUEST: 'userauth-request', MSG_USERAUTH_FAILURE: 'userauth-failure', MSG_USERAUTH_SUCCESS: 'userauth-success', MSG_USERAUTH_BANNER: 'userauth--banner', MSG_USERAUTH_PK_OK: 'userauth-60(pk-ok/info-request)', MSG_USERAUTH_INFO_RESPONSE: 'userauth-info-response', MSG_GLOBAL_REQUEST: 'global-request', MSG_REQUEST_SUCCESS: 'request-success', MSG_REQUEST_FAILURE: 'request-failure', MSG_CHANNEL_OPEN: 'channel-open', MSG_CHANNEL_OPEN_SUCCESS: 'channel-open-success', MSG_CHANNEL_OPEN_FAILURE: 'channel-open-failure', MSG_CHANNEL_WINDOW_ADJUST: 'channel-window-adjust', MSG_CHANNEL_DATA: 'channel-data', MSG_CHANNEL_EXTENDED_DATA: 'channel-extended-data', MSG_CHANNEL_EOF: 'channel-eof', MSG_CHANNEL_CLOSE: 'channel-close', MSG_CHANNEL_REQUEST: 'channel-request', MSG_CHANNEL_SUCCESS: 'channel-success', MSG_CHANNEL_FAILURE: 'channel-failure', MSG_USERAUTH_GSSAPI_RESPONSE: 'userauth-gssapi-response', MSG_USERAUTH_GSSAPI_TOKEN: 'userauth-gssapi-token', MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE: 'userauth-gssapi-exchange-complete', MSG_USERAUTH_GSSAPI_ERROR: 'userauth-gssapi-error', MSG_USERAUTH_GSSAPI_ERRTOK: 'userauth-gssapi-error-token', MSG_USERAUTH_GSSAPI_MIC: 'userauth-gssapi-mic' } # authentication request return codes: AUTH_SUCCESSFUL, AUTH_PARTIALLY_SUCCESSFUL, AUTH_FAILED = range(3) # channel request failed reasons: (OPEN_SUCCEEDED, OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, OPEN_FAILED_CONNECT_FAILED, OPEN_FAILED_UNKNOWN_CHANNEL_TYPE, OPEN_FAILED_RESOURCE_SHORTAGE) = range(0, 5) CONNECTION_FAILED_CODE = { 1: 'Administratively prohibited', 2: 'Connect failed', 3: 'Unknown channel type', 4: 'Resource shortage' } DISCONNECT_SERVICE_NOT_AVAILABLE, DISCONNECT_AUTH_CANCELLED_BY_USER, \ DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE = 7, 13, 14 zero_byte = byte_chr(0) one_byte = byte_chr(1) four_byte = byte_chr(4) max_byte = byte_chr(0xff) cr_byte = byte_chr(13) linefeed_byte = byte_chr(10) crlf = cr_byte + linefeed_byte if PY2: cr_byte_value = cr_byte linefeed_byte_value = linefeed_byte else: cr_byte_value = 13 linefeed_byte_value = 10 def asbytes(s): """Coerce to bytes if possible or return unchanged.""" if isinstance(s, bytes_types): return s if isinstance(s, text_type): # Accept text and encode as utf-8 for compatibility only. return s.encode("utf-8") asbytes = getattr(s, "asbytes", None) if asbytes is not None: return asbytes() # May be an object that implements the buffer api, let callers handle. return s xffffffff = long(0xffffffff) x80000000 = long(0x80000000) o666 = 438 o660 = 432 o644 = 420 o600 = 384 o777 = 511 o700 = 448 o70 = 56 DEBUG = logging.DEBUG INFO = logging.INFO WARNING = logging.WARNING ERROR = logging.ERROR CRITICAL = logging.CRITICAL # Common IO/select/etc sleep period, in seconds io_sleep = 0.01 DEFAULT_WINDOW_SIZE = 64 * 2 ** 15 DEFAULT_MAX_PACKET_SIZE = 2 ** 15 # lower bound on the max packet size we'll accept from the remote host # Minimum packet size is 32768 bytes according to # http://www.ietf.org/rfc/rfc4254.txt MIN_WINDOW_SIZE = 2 ** 15 # However, according to http://www.ietf.org/rfc/rfc4253.txt it is perfectly # legal to accept a size much smaller, as OpenSSH client does as size 16384. MIN_PACKET_SIZE = 2 ** 12 # Max windows size according to http://www.ietf.org/rfc/rfc4254.txt MAX_WINDOW_SIZE = 2 ** 32 - 1
./CrossVul/dataset_final_sorted/CWE-287/py/good_650_0
crossvul-python_data_good_649_1
# Copyright (c) 2013-2017 by Ron Frederick <ronf@timeheart.net>. # All rights reserved. # # This program and the accompanying materials are made available under # the terms of the Eclipse Public License v1.0 which accompanies this # distribution and is available at: # # http://www.eclipse.org/legal/epl-v10.html # # Contributors: # Ron Frederick - initial implementation, API, and documentation """AsyncSSH version information""" __author__ = 'Ron Frederick' __author_email__ = 'ronf@timeheart.net' __url__ = 'http://asyncssh.timeheart.net' __version__ = '1.12.1'
./CrossVul/dataset_final_sorted/CWE-287/py/good_649_1
crossvul-python_data_bad_3758_0
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 OpenStack LLC # Copyright 2012 Canonical Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Main entry point into the Catalog service.""" import uuid from keystone import config from keystone import exception from keystone import identity from keystone import policy from keystone import token from keystone.common import manager from keystone.common import wsgi CONF = config.CONF class Manager(manager.Manager): """Default pivot point for the Catalog backend. See :mod:`keystone.common.manager.Manager` for more details on how this dynamically calls the backend. """ def __init__(self): super(Manager, self).__init__(CONF.catalog.driver) class Driver(object): """Interface description for an Catalog driver.""" def list_services(self): """List all service ids in catalog. Returns: list of service_ids or an empty list. """ raise exception.NotImplemented() def get_service(self, service_id): """Get service by id. Returns: service_ref dict or None. """ raise exception.NotImplemented() def delete_service(self, service_id): raise exception.NotImplemented() def create_service(self, service_id, service_ref): raise exception.NotImplemented() def create_endpoint(self, endpoint_id, endpoint_ref): raise exception.NotImplemented() def delete_endpoint(self, endpoint_id): raise exception.NotImplemented() def get_endpoint(self, endpoint_id): """Get endpoint by id. Returns: endpoint_ref dict or None. """ raise exception.NotImplemented() def list_endpoints(self): """List all endpoint ids in catalog. Returns: list of endpoint_ids or an empty list. """ raise exception.NotImplemented() def get_catalog(self, user_id, tenant_id, metadata=None): """Retreive and format the current service catalog. Returns: A nested dict representing the service catalog or an empty dict. Example: { 'RegionOne': {'compute': { 'adminURL': u'http://host:8774/v1.1/tenantid', 'internalURL': u'http://host:8774/v1.1/tenant_id', 'name': 'Compute Service', 'publicURL': u'http://host:8774/v1.1/tenantid'}, 'ec2': { 'adminURL': 'http://host:8773/services/Admin', 'internalURL': 'http://host:8773/services/Cloud', 'name': 'EC2 Service', 'publicURL': 'http://host:8773/services/Cloud'}} """ raise exception.NotImplemented() class ServiceController(wsgi.Application): def __init__(self): self.catalog_api = Manager() super(ServiceController, self).__init__() # CRUD extensions # NOTE(termie): this OS-KSADM stuff is not very consistent def get_services(self, context): service_list = self.catalog_api.list_services(context) service_refs = [self.catalog_api.get_service(context, x) for x in service_list] return {'OS-KSADM:services': service_refs} def get_service(self, context, service_id): service_ref = self.catalog_api.get_service(context, service_id) if not service_ref: raise exception.ServiceNotFound(service_id=service_id) return {'OS-KSADM:service': service_ref} def delete_service(self, context, service_id): service_ref = self.catalog_api.get_service(context, service_id) if not service_ref: raise exception.ServiceNotFound(service_id=service_id) self.catalog_api.delete_service(context, service_id) def create_service(self, context, OS_KSADM_service): service_id = uuid.uuid4().hex service_ref = OS_KSADM_service.copy() service_ref['id'] = service_id new_service_ref = self.catalog_api.create_service( context, service_id, service_ref) return {'OS-KSADM:service': new_service_ref} class EndpointController(wsgi.Application): def __init__(self): self.catalog_api = Manager() self.identity_api = identity.Manager() self.policy_api = policy.Manager() self.token_api = token.Manager() super(EndpointController, self).__init__() def get_endpoints(self, context): self.assert_admin(context) endpoint_list = self.catalog_api.list_endpoints(context) endpoint_refs = [self.catalog_api.get_endpoint(context, e) for e in endpoint_list] return {'endpoints': endpoint_refs} def create_endpoint(self, context, endpoint): self.assert_admin(context) endpoint_id = uuid.uuid4().hex endpoint_ref = endpoint.copy() endpoint_ref['id'] = endpoint_id service_id = endpoint_ref['service_id'] if not self.catalog_api.get_service(context, service_id): raise exception.ServiceNotFound(service_id=service_id) new_endpoint_ref = self.catalog_api.create_endpoint( context, endpoint_id, endpoint_ref) return {'endpoint': new_endpoint_ref} def delete_endpoint(self, context, endpoint_id): self.assert_admin(context) self.catalog_api.delete_endpoint(context, endpoint_id)
./CrossVul/dataset_final_sorted/CWE-287/py/bad_3758_0
crossvul-python_data_bad_3762_0
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 OpenStack LLC # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import uuid import routes from keystone import catalog from keystone import exception from keystone import identity from keystone import policy from keystone import token from keystone.common import logging from keystone.common import utils from keystone.common import wsgi LOG = logging.getLogger(__name__) class AdminRouter(wsgi.ComposingRouter): def __init__(self): mapper = routes.Mapper() version_controller = VersionController('admin') mapper.connect('/', controller=version_controller, action='get_version') # Token Operations auth_controller = TokenController() mapper.connect('/tokens', controller=auth_controller, action='authenticate', conditions=dict(method=['POST'])) mapper.connect('/tokens/{token_id}', controller=auth_controller, action='validate_token', conditions=dict(method=['GET'])) mapper.connect('/tokens/{token_id}', controller=auth_controller, action='validate_token_head', conditions=dict(method=['HEAD'])) mapper.connect('/tokens/{token_id}', controller=auth_controller, action='delete_token', conditions=dict(method=['DELETE'])) mapper.connect('/tokens/{token_id}/endpoints', controller=auth_controller, action='endpoints', conditions=dict(method=['GET'])) # Miscellaneous Operations extensions_controller = AdminExtensionsController() mapper.connect('/extensions', controller=extensions_controller, action='get_extensions_info', conditions=dict(method=['GET'])) mapper.connect('/extensions/{extension_alias}', controller=extensions_controller, action='get_extension_info', conditions=dict(method=['GET'])) identity_router = identity.AdminRouter() routers = [identity_router] super(AdminRouter, self).__init__(mapper, routers) class PublicRouter(wsgi.ComposingRouter): def __init__(self): mapper = routes.Mapper() version_controller = VersionController('public') mapper.connect('/', controller=version_controller, action='get_version') # Token Operations auth_controller = TokenController() mapper.connect('/tokens', controller=auth_controller, action='authenticate', conditions=dict(method=['POST'])) # Miscellaneous extensions_controller = PublicExtensionsController() mapper.connect('/extensions', controller=extensions_controller, action='get_extensions_info', conditions=dict(method=['GET'])) mapper.connect('/extensions/{extension_alias}', controller=extensions_controller, action='get_extension_info', conditions=dict(method=['GET'])) identity_router = identity.PublicRouter() routers = [identity_router] super(PublicRouter, self).__init__(mapper, routers) class PublicVersionRouter(wsgi.ComposingRouter): def __init__(self): mapper = routes.Mapper() version_controller = VersionController('public') mapper.connect('/', controller=version_controller, action='get_versions') routers = [] super(PublicVersionRouter, self).__init__(mapper, routers) class AdminVersionRouter(wsgi.ComposingRouter): def __init__(self): mapper = routes.Mapper() version_controller = VersionController('admin') mapper.connect('/', controller=version_controller, action='get_versions') routers = [] super(AdminVersionRouter, self).__init__(mapper, routers) class VersionController(wsgi.Application): def __init__(self, version_type): self.catalog_api = catalog.Manager() self.url_key = "%sURL" % version_type super(VersionController, self).__init__() def _get_identity_url(self, context): catalog_ref = self.catalog_api.get_catalog( context=context, user_id=None, tenant_id=None) for region, region_ref in catalog_ref.iteritems(): for service, service_ref in region_ref.iteritems(): if service == 'identity': return service_ref[self.url_key] raise exception.NotImplemented() def _get_versions_list(self, context): """The list of versions is dependent on the context.""" identity_url = self._get_identity_url(context) if not identity_url.endswith('/'): identity_url = identity_url + '/' versions = {} versions['v2.0'] = { "id": "v2.0", "status": "beta", "updated": "2011-11-19T00:00:00Z", "links": [ { "rel": "self", "href": identity_url, }, { "rel": "describedby", "type": "text/html", "href": "http://docs.openstack.org/api/openstack-" "identity-service/2.0/content/" }, { "rel": "describedby", "type": "application/pdf", "href": "http://docs.openstack.org/api/openstack-" "identity-service/2.0/identity-dev-guide-" "2.0.pdf" } ], "media-types": [ { "base": "application/json", "type": "application/vnd.openstack.identity-v2.0" "+json" }, { "base": "application/xml", "type": "application/vnd.openstack.identity-v2.0" "+xml" } ] } return versions def get_versions(self, context): versions = self._get_versions_list(context) return wsgi.render_response(status=(300, 'Multiple Choices'), body={ "versions": { "values": versions.values() } }) def get_version(self, context): versions = self._get_versions_list(context) return wsgi.render_response(body={ "version": versions['v2.0'] }) class NoopController(wsgi.Application): def __init__(self): super(NoopController, self).__init__() def noop(self, context): return {} class TokenController(wsgi.Application): def __init__(self): self.catalog_api = catalog.Manager() self.identity_api = identity.Manager() self.token_api = token.Manager() self.policy_api = policy.Manager() super(TokenController, self).__init__() def authenticate(self, context, auth=None): """Authenticate credentials and return a token. Accept auth as a dict that looks like:: { "auth":{ "passwordCredentials":{ "username":"test_user", "password":"mypass" }, "tenantName":"customer-x" } } In this case, tenant is optional, if not provided the token will be considered "unscoped" and can later be used to get a scoped token. Alternatively, this call accepts auth with only a token and tenant that will return a token that is scoped to that tenant. """ token_id = uuid.uuid4().hex if 'passwordCredentials' in auth: username = auth['passwordCredentials'].get('username', '') password = auth['passwordCredentials'].get('password', '') tenant_name = auth.get('tenantName', None) user_id = auth['passwordCredentials'].get('userId', None) if username: user_ref = self.identity_api.get_user_by_name( context=context, user_name=username) if user_ref: user_id = user_ref['id'] # more compat tenant_id = auth.get('tenantId', None) if tenant_name: tenant_ref = self.identity_api.get_tenant_by_name( context=context, tenant_name=tenant_name) if tenant_ref: tenant_id = tenant_ref['id'] try: auth_info = self.identity_api.authenticate(context=context, user_id=user_id, password=password, tenant_id=tenant_id) (user_ref, tenant_ref, metadata_ref) = auth_info # If the user is disabled don't allow them to authenticate if not user_ref.get('enabled', True): LOG.warning('User %s is disabled' % user_id) raise exception.Unauthorized() except AssertionError as e: raise exception.Unauthorized(e.message) token_ref = self.token_api.create_token( context, token_id, dict(id=token_id, user=user_ref, tenant=tenant_ref, metadata=metadata_ref)) if tenant_ref: catalog_ref = self.catalog_api.get_catalog( context=context, user_id=user_ref['id'], tenant_id=tenant_ref['id'], metadata=metadata_ref) else: catalog_ref = {} elif 'token' in auth: token = auth['token'].get('id', None) tenant_name = auth.get('tenantName') # more compat if tenant_name: tenant_ref = self.identity_api.get_tenant_by_name( context=context, tenant_name=tenant_name) tenant_id = tenant_ref['id'] else: tenant_id = auth.get('tenantId', None) try: old_token_ref = self.token_api.get_token(context=context, token_id=token) except exception.NotFound: raise exception.Unauthorized() user_ref = old_token_ref['user'] # If the user is disabled don't allow them to authenticate current_user_ref = self.identity_api.get_user( context=context, user_id=user_ref['id']) if not current_user_ref.get('enabled', True): LOG.warning('User %s is disabled' % user_ref['id']) raise exception.Unauthorized() tenants = self.identity_api.get_tenants_for_user(context, user_ref['id']) if tenant_id: assert tenant_id in tenants tenant_ref = self.identity_api.get_tenant(context=context, tenant_id=tenant_id) if tenant_ref: metadata_ref = self.identity_api.get_metadata( context=context, user_id=user_ref['id'], tenant_id=tenant_ref['id']) catalog_ref = self.catalog_api.get_catalog( context=context, user_id=user_ref['id'], tenant_id=tenant_ref['id'], metadata=metadata_ref) else: metadata_ref = {} catalog_ref = {} token_ref = self.token_api.create_token( context, token_id, dict(id=token_id, user=user_ref, tenant=tenant_ref, metadata=metadata_ref, expires=old_token_ref['expires'])) # TODO(termie): optimize this call at some point and put it into the # the return for metadata # fill out the roles in the metadata roles_ref = [] for role_id in metadata_ref.get('roles', []): roles_ref.append(self.identity_api.get_role(context, role_id)) logging.debug('TOKEN_REF %s', token_ref) return self._format_authenticate(token_ref, roles_ref, catalog_ref) def _get_token_ref(self, context, token_id, belongs_to=None): """Returns a token if a valid one exists. Optionally, limited to a token owned by a specific tenant. """ # TODO(termie): this stuff should probably be moved to middleware self.assert_admin(context) token_ref = self.token_api.get_token(context=context, token_id=token_id) if belongs_to: assert token_ref['tenant']['id'] == belongs_to return token_ref # admin only def validate_token_head(self, context, token_id): """Check that a token is valid. Optionally, also ensure that it is owned by a specific tenant. Identical to ``validate_token``, except does not return a response. """ belongs_to = context['query_string'].get("belongsTo") assert self._get_token_ref(context, token_id, belongs_to) # admin only def validate_token(self, context, token_id): """Check that a token is valid. Optionally, also ensure that it is owned by a specific tenant. Returns metadata about the token along any associated roles. """ belongs_to = context['query_string'].get("belongsTo") token_ref = self._get_token_ref(context, token_id, belongs_to) # TODO(termie): optimize this call at some point and put it into the # the return for metadata # fill out the roles in the metadata metadata_ref = token_ref['metadata'] roles_ref = [] for role_id in metadata_ref.get('roles', []): roles_ref.append(self.identity_api.get_role(context, role_id)) # Get a service catalog if belongs_to is not none # This is needed for on-behalf-of requests catalog_ref = None if belongs_to is not None: catalog_ref = self.catalog_api.get_catalog( context=context, user_id=token_ref['user']['id'], tenant_id=token_ref['tenant']['id'], metadata=metadata_ref) return self._format_token(token_ref, roles_ref, catalog_ref) def delete_token(self, context, token_id): """Delete a token, effectively invalidating it for authz.""" # TODO(termie): this stuff should probably be moved to middleware self.assert_admin(context) self.token_api.delete_token(context=context, token_id=token_id) def endpoints(self, context, token_id): """Return a list of endpoints available to the token.""" raise exception.NotImplemented() def _format_authenticate(self, token_ref, roles_ref, catalog_ref): o = self._format_token(token_ref, roles_ref) o['access']['serviceCatalog'] = self._format_catalog(catalog_ref) return o def _format_token(self, token_ref, roles_ref, catalog_ref=None): user_ref = token_ref['user'] metadata_ref = token_ref['metadata'] expires = token_ref['expires'] if expires is not None: expires = utils.isotime(expires) o = {'access': {'token': {'id': token_ref['id'], 'expires': expires, }, 'user': {'id': user_ref['id'], 'name': user_ref['name'], 'username': user_ref['name'], 'roles': roles_ref, 'roles_links': metadata_ref.get('roles_links', []) } } } if 'tenant' in token_ref and token_ref['tenant']: token_ref['tenant']['enabled'] = True o['access']['token']['tenant'] = token_ref['tenant'] if catalog_ref is not None: o['access']['serviceCatalog'] = self._format_catalog(catalog_ref) return o def _format_catalog(self, catalog_ref): """Munge catalogs from internal to output format Internal catalogs look like: {$REGION: { {$SERVICE: { $key1: $value1, ... } } } The legacy api wants them to look like [{'name': $SERVICE[name], 'type': $SERVICE, 'endpoints': [{ 'tenantId': $tenant_id, ... 'region': $REGION, }], 'endpoints_links': [], }] """ if not catalog_ref: return {} services = {} for region, region_ref in catalog_ref.iteritems(): for service, service_ref in region_ref.iteritems(): new_service_ref = services.get(service, {}) new_service_ref['name'] = service_ref.pop('name') new_service_ref['type'] = service new_service_ref['endpoints_links'] = [] service_ref['region'] = region endpoints_ref = new_service_ref.get('endpoints', []) endpoints_ref.append(service_ref) new_service_ref['endpoints'] = endpoints_ref services[service] = new_service_ref return services.values() class ExtensionsController(wsgi.Application): """Base extensions controller to be extended by public and admin API's.""" def __init__(self, extensions=None): super(ExtensionsController, self).__init__() self.extensions = extensions or {} def get_extensions_info(self, context): return {'extensions': {'values': self.extensions.values()}} def get_extension_info(self, context, extension_alias): try: return {'extension': self.extensions[extension_alias]} except KeyError: raise exception.NotFound(target=extension_alias) class PublicExtensionsController(ExtensionsController): pass class AdminExtensionsController(ExtensionsController): def __init__(self, *args, **kwargs): super(AdminExtensionsController, self).__init__(*args, **kwargs) # TODO(dolph): Extensions should obviously provide this information # themselves, but hardcoding it here allows us to match # the API spec in the short term with minimal complexity. self.extensions['OS-KSADM'] = { 'name': 'Openstack Keystone Admin', 'namespace': 'http://docs.openstack.org/identity/api/ext/' 'OS-KSADM/v1.0', 'alias': 'OS-KSADM', 'updated': '2011-08-19T13:25:27-06:00', 'description': 'Openstack extensions to Keystone v2.0 API ' 'enabling Admin Operations.', 'links': [ { 'rel': 'describedby', # TODO(dolph): link needs to be revised after # bug 928059 merges 'type': 'text/html', 'href': ('https://github.com/openstack/' 'identity-api'), } ] } @logging.fail_gracefully def public_app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return PublicRouter() @logging.fail_gracefully def admin_app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return AdminRouter() @logging.fail_gracefully def public_version_app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return PublicVersionRouter() @logging.fail_gracefully def admin_version_app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return AdminVersionRouter()
./CrossVul/dataset_final_sorted/CWE-287/py/bad_3762_0
crossvul-python_data_good_3758_0
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 OpenStack LLC # Copyright 2012 Canonical Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Main entry point into the Catalog service.""" import uuid from keystone import config from keystone import exception from keystone import identity from keystone import policy from keystone import token from keystone.common import manager from keystone.common import wsgi CONF = config.CONF class Manager(manager.Manager): """Default pivot point for the Catalog backend. See :mod:`keystone.common.manager.Manager` for more details on how this dynamically calls the backend. """ def __init__(self): super(Manager, self).__init__(CONF.catalog.driver) class Driver(object): """Interface description for an Catalog driver.""" def list_services(self): """List all service ids in catalog. Returns: list of service_ids or an empty list. """ raise exception.NotImplemented() def get_service(self, service_id): """Get service by id. Returns: service_ref dict or None. """ raise exception.NotImplemented() def delete_service(self, service_id): raise exception.NotImplemented() def create_service(self, service_id, service_ref): raise exception.NotImplemented() def create_endpoint(self, endpoint_id, endpoint_ref): raise exception.NotImplemented() def delete_endpoint(self, endpoint_id): raise exception.NotImplemented() def get_endpoint(self, endpoint_id): """Get endpoint by id. Returns: endpoint_ref dict or None. """ raise exception.NotImplemented() def list_endpoints(self): """List all endpoint ids in catalog. Returns: list of endpoint_ids or an empty list. """ raise exception.NotImplemented() def get_catalog(self, user_id, tenant_id, metadata=None): """Retreive and format the current service catalog. Returns: A nested dict representing the service catalog or an empty dict. Example: { 'RegionOne': {'compute': { 'adminURL': u'http://host:8774/v1.1/tenantid', 'internalURL': u'http://host:8774/v1.1/tenant_id', 'name': 'Compute Service', 'publicURL': u'http://host:8774/v1.1/tenantid'}, 'ec2': { 'adminURL': 'http://host:8773/services/Admin', 'internalURL': 'http://host:8773/services/Cloud', 'name': 'EC2 Service', 'publicURL': 'http://host:8773/services/Cloud'}} """ raise exception.NotImplemented() class ServiceController(wsgi.Application): def __init__(self): self.catalog_api = Manager() self.identity_api = identity.Manager() self.policy_api = policy.Manager() self.token_api = token.Manager() super(ServiceController, self).__init__() # CRUD extensions # NOTE(termie): this OS-KSADM stuff is not very consistent def get_services(self, context): self.assert_admin(context) service_list = self.catalog_api.list_services(context) service_refs = [self.catalog_api.get_service(context, x) for x in service_list] return {'OS-KSADM:services': service_refs} def get_service(self, context, service_id): self.assert_admin(context) service_ref = self.catalog_api.get_service(context, service_id) if not service_ref: raise exception.ServiceNotFound(service_id=service_id) return {'OS-KSADM:service': service_ref} def delete_service(self, context, service_id): self.assert_admin(context) service_ref = self.catalog_api.get_service(context, service_id) if not service_ref: raise exception.ServiceNotFound(service_id=service_id) self.catalog_api.delete_service(context, service_id) def create_service(self, context, OS_KSADM_service): self.assert_admin(context) service_id = uuid.uuid4().hex service_ref = OS_KSADM_service.copy() service_ref['id'] = service_id new_service_ref = self.catalog_api.create_service( context, service_id, service_ref) return {'OS-KSADM:service': new_service_ref} class EndpointController(wsgi.Application): def __init__(self): self.catalog_api = Manager() self.identity_api = identity.Manager() self.policy_api = policy.Manager() self.token_api = token.Manager() super(EndpointController, self).__init__() def get_endpoints(self, context): self.assert_admin(context) endpoint_list = self.catalog_api.list_endpoints(context) endpoint_refs = [self.catalog_api.get_endpoint(context, e) for e in endpoint_list] return {'endpoints': endpoint_refs} def create_endpoint(self, context, endpoint): self.assert_admin(context) endpoint_id = uuid.uuid4().hex endpoint_ref = endpoint.copy() endpoint_ref['id'] = endpoint_id service_id = endpoint_ref['service_id'] if not self.catalog_api.get_service(context, service_id): raise exception.ServiceNotFound(service_id=service_id) new_endpoint_ref = self.catalog_api.create_endpoint( context, endpoint_id, endpoint_ref) return {'endpoint': new_endpoint_ref} def delete_endpoint(self, context, endpoint_id): self.assert_admin(context) self.catalog_api.delete_endpoint(context, endpoint_id)
./CrossVul/dataset_final_sorted/CWE-287/py/good_3758_0
crossvul-python_data_good_3762_0
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 OpenStack LLC # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import uuid import routes from keystone import catalog from keystone import exception from keystone import identity from keystone import policy from keystone import token from keystone.common import logging from keystone.common import utils from keystone.common import wsgi LOG = logging.getLogger(__name__) class AdminRouter(wsgi.ComposingRouter): def __init__(self): mapper = routes.Mapper() version_controller = VersionController('admin') mapper.connect('/', controller=version_controller, action='get_version') # Token Operations auth_controller = TokenController() mapper.connect('/tokens', controller=auth_controller, action='authenticate', conditions=dict(method=['POST'])) mapper.connect('/tokens/{token_id}', controller=auth_controller, action='validate_token', conditions=dict(method=['GET'])) mapper.connect('/tokens/{token_id}', controller=auth_controller, action='validate_token_head', conditions=dict(method=['HEAD'])) mapper.connect('/tokens/{token_id}', controller=auth_controller, action='delete_token', conditions=dict(method=['DELETE'])) mapper.connect('/tokens/{token_id}/endpoints', controller=auth_controller, action='endpoints', conditions=dict(method=['GET'])) # Miscellaneous Operations extensions_controller = AdminExtensionsController() mapper.connect('/extensions', controller=extensions_controller, action='get_extensions_info', conditions=dict(method=['GET'])) mapper.connect('/extensions/{extension_alias}', controller=extensions_controller, action='get_extension_info', conditions=dict(method=['GET'])) identity_router = identity.AdminRouter() routers = [identity_router] super(AdminRouter, self).__init__(mapper, routers) class PublicRouter(wsgi.ComposingRouter): def __init__(self): mapper = routes.Mapper() version_controller = VersionController('public') mapper.connect('/', controller=version_controller, action='get_version') # Token Operations auth_controller = TokenController() mapper.connect('/tokens', controller=auth_controller, action='authenticate', conditions=dict(method=['POST'])) # Miscellaneous extensions_controller = PublicExtensionsController() mapper.connect('/extensions', controller=extensions_controller, action='get_extensions_info', conditions=dict(method=['GET'])) mapper.connect('/extensions/{extension_alias}', controller=extensions_controller, action='get_extension_info', conditions=dict(method=['GET'])) identity_router = identity.PublicRouter() routers = [identity_router] super(PublicRouter, self).__init__(mapper, routers) class PublicVersionRouter(wsgi.ComposingRouter): def __init__(self): mapper = routes.Mapper() version_controller = VersionController('public') mapper.connect('/', controller=version_controller, action='get_versions') routers = [] super(PublicVersionRouter, self).__init__(mapper, routers) class AdminVersionRouter(wsgi.ComposingRouter): def __init__(self): mapper = routes.Mapper() version_controller = VersionController('admin') mapper.connect('/', controller=version_controller, action='get_versions') routers = [] super(AdminVersionRouter, self).__init__(mapper, routers) class VersionController(wsgi.Application): def __init__(self, version_type): self.catalog_api = catalog.Manager() self.url_key = "%sURL" % version_type super(VersionController, self).__init__() def _get_identity_url(self, context): catalog_ref = self.catalog_api.get_catalog( context=context, user_id=None, tenant_id=None) for region, region_ref in catalog_ref.iteritems(): for service, service_ref in region_ref.iteritems(): if service == 'identity': return service_ref[self.url_key] raise exception.NotImplemented() def _get_versions_list(self, context): """The list of versions is dependent on the context.""" identity_url = self._get_identity_url(context) if not identity_url.endswith('/'): identity_url = identity_url + '/' versions = {} versions['v2.0'] = { "id": "v2.0", "status": "beta", "updated": "2011-11-19T00:00:00Z", "links": [ { "rel": "self", "href": identity_url, }, { "rel": "describedby", "type": "text/html", "href": "http://docs.openstack.org/api/openstack-" "identity-service/2.0/content/" }, { "rel": "describedby", "type": "application/pdf", "href": "http://docs.openstack.org/api/openstack-" "identity-service/2.0/identity-dev-guide-" "2.0.pdf" } ], "media-types": [ { "base": "application/json", "type": "application/vnd.openstack.identity-v2.0" "+json" }, { "base": "application/xml", "type": "application/vnd.openstack.identity-v2.0" "+xml" } ] } return versions def get_versions(self, context): versions = self._get_versions_list(context) return wsgi.render_response(status=(300, 'Multiple Choices'), body={ "versions": { "values": versions.values() } }) def get_version(self, context): versions = self._get_versions_list(context) return wsgi.render_response(body={ "version": versions['v2.0'] }) class NoopController(wsgi.Application): def __init__(self): super(NoopController, self).__init__() def noop(self, context): return {} class TokenController(wsgi.Application): def __init__(self): self.catalog_api = catalog.Manager() self.identity_api = identity.Manager() self.token_api = token.Manager() self.policy_api = policy.Manager() super(TokenController, self).__init__() def authenticate(self, context, auth=None): """Authenticate credentials and return a token. Accept auth as a dict that looks like:: { "auth":{ "passwordCredentials":{ "username":"test_user", "password":"mypass" }, "tenantName":"customer-x" } } In this case, tenant is optional, if not provided the token will be considered "unscoped" and can later be used to get a scoped token. Alternatively, this call accepts auth with only a token and tenant that will return a token that is scoped to that tenant. """ token_id = uuid.uuid4().hex if 'passwordCredentials' in auth: username = auth['passwordCredentials'].get('username', '') password = auth['passwordCredentials'].get('password', '') tenant_name = auth.get('tenantName', None) user_id = auth['passwordCredentials'].get('userId', None) if username: user_ref = self.identity_api.get_user_by_name( context=context, user_name=username) if user_ref: user_id = user_ref['id'] # more compat tenant_id = auth.get('tenantId', None) if tenant_name: tenant_ref = self.identity_api.get_tenant_by_name( context=context, tenant_name=tenant_name) if tenant_ref: tenant_id = tenant_ref['id'] try: auth_info = self.identity_api.authenticate(context=context, user_id=user_id, password=password, tenant_id=tenant_id) (user_ref, tenant_ref, metadata_ref) = auth_info # If the user is disabled don't allow them to authenticate if not user_ref.get('enabled', True): LOG.warning('User %s is disabled' % user_id) raise exception.Unauthorized() # If the tenant is disabled don't allow them to authenticate if tenant_ref and not tenant_ref.get('enabled', True): LOG.warning('Tenant %s is disabled' % tenant_id) raise exception.Unauthorized() except AssertionError as e: raise exception.Unauthorized(e.message) token_ref = self.token_api.create_token( context, token_id, dict(id=token_id, user=user_ref, tenant=tenant_ref, metadata=metadata_ref)) if tenant_ref: catalog_ref = self.catalog_api.get_catalog( context=context, user_id=user_ref['id'], tenant_id=tenant_ref['id'], metadata=metadata_ref) else: catalog_ref = {} elif 'token' in auth: token = auth['token'].get('id', None) tenant_name = auth.get('tenantName') # more compat if tenant_name: tenant_ref = self.identity_api.get_tenant_by_name( context=context, tenant_name=tenant_name) tenant_id = tenant_ref['id'] else: tenant_id = auth.get('tenantId', None) try: old_token_ref = self.token_api.get_token(context=context, token_id=token) except exception.NotFound: raise exception.Unauthorized() user_ref = old_token_ref['user'] # If the user is disabled don't allow them to authenticate current_user_ref = self.identity_api.get_user( context=context, user_id=user_ref['id']) if not current_user_ref.get('enabled', True): LOG.warning('User %s is disabled' % user_ref['id']) raise exception.Unauthorized() tenants = self.identity_api.get_tenants_for_user(context, user_ref['id']) if tenant_id: assert tenant_id in tenants tenant_ref = self.identity_api.get_tenant(context=context, tenant_id=tenant_id) # If the tenant is disabled don't allow them to authenticate if tenant_ref and not tenant_ref.get('enabled', True): LOG.warning('Tenant %s is disabled' % tenant_id) raise exception.Unauthorized() if tenant_ref: metadata_ref = self.identity_api.get_metadata( context=context, user_id=user_ref['id'], tenant_id=tenant_ref['id']) catalog_ref = self.catalog_api.get_catalog( context=context, user_id=user_ref['id'], tenant_id=tenant_ref['id'], metadata=metadata_ref) else: metadata_ref = {} catalog_ref = {} token_ref = self.token_api.create_token( context, token_id, dict(id=token_id, user=user_ref, tenant=tenant_ref, metadata=metadata_ref, expires=old_token_ref['expires'])) # TODO(termie): optimize this call at some point and put it into the # the return for metadata # fill out the roles in the metadata roles_ref = [] for role_id in metadata_ref.get('roles', []): roles_ref.append(self.identity_api.get_role(context, role_id)) logging.debug('TOKEN_REF %s', token_ref) return self._format_authenticate(token_ref, roles_ref, catalog_ref) def _get_token_ref(self, context, token_id, belongs_to=None): """Returns a token if a valid one exists. Optionally, limited to a token owned by a specific tenant. """ # TODO(termie): this stuff should probably be moved to middleware self.assert_admin(context) token_ref = self.token_api.get_token(context=context, token_id=token_id) if belongs_to: assert token_ref['tenant']['id'] == belongs_to return token_ref # admin only def validate_token_head(self, context, token_id): """Check that a token is valid. Optionally, also ensure that it is owned by a specific tenant. Identical to ``validate_token``, except does not return a response. """ belongs_to = context['query_string'].get("belongsTo") assert self._get_token_ref(context, token_id, belongs_to) # admin only def validate_token(self, context, token_id): """Check that a token is valid. Optionally, also ensure that it is owned by a specific tenant. Returns metadata about the token along any associated roles. """ belongs_to = context['query_string'].get("belongsTo") token_ref = self._get_token_ref(context, token_id, belongs_to) # TODO(termie): optimize this call at some point and put it into the # the return for metadata # fill out the roles in the metadata metadata_ref = token_ref['metadata'] roles_ref = [] for role_id in metadata_ref.get('roles', []): roles_ref.append(self.identity_api.get_role(context, role_id)) # Get a service catalog if belongs_to is not none # This is needed for on-behalf-of requests catalog_ref = None if belongs_to is not None: catalog_ref = self.catalog_api.get_catalog( context=context, user_id=token_ref['user']['id'], tenant_id=token_ref['tenant']['id'], metadata=metadata_ref) return self._format_token(token_ref, roles_ref, catalog_ref) def delete_token(self, context, token_id): """Delete a token, effectively invalidating it for authz.""" # TODO(termie): this stuff should probably be moved to middleware self.assert_admin(context) self.token_api.delete_token(context=context, token_id=token_id) def endpoints(self, context, token_id): """Return a list of endpoints available to the token.""" raise exception.NotImplemented() def _format_authenticate(self, token_ref, roles_ref, catalog_ref): o = self._format_token(token_ref, roles_ref) o['access']['serviceCatalog'] = self._format_catalog(catalog_ref) return o def _format_token(self, token_ref, roles_ref, catalog_ref=None): user_ref = token_ref['user'] metadata_ref = token_ref['metadata'] expires = token_ref['expires'] if expires is not None: expires = utils.isotime(expires) o = {'access': {'token': {'id': token_ref['id'], 'expires': expires, }, 'user': {'id': user_ref['id'], 'name': user_ref['name'], 'username': user_ref['name'], 'roles': roles_ref, 'roles_links': metadata_ref.get('roles_links', []) } } } if 'tenant' in token_ref and token_ref['tenant']: token_ref['tenant']['enabled'] = True o['access']['token']['tenant'] = token_ref['tenant'] if catalog_ref is not None: o['access']['serviceCatalog'] = self._format_catalog(catalog_ref) return o def _format_catalog(self, catalog_ref): """Munge catalogs from internal to output format Internal catalogs look like: {$REGION: { {$SERVICE: { $key1: $value1, ... } } } The legacy api wants them to look like [{'name': $SERVICE[name], 'type': $SERVICE, 'endpoints': [{ 'tenantId': $tenant_id, ... 'region': $REGION, }], 'endpoints_links': [], }] """ if not catalog_ref: return {} services = {} for region, region_ref in catalog_ref.iteritems(): for service, service_ref in region_ref.iteritems(): new_service_ref = services.get(service, {}) new_service_ref['name'] = service_ref.pop('name') new_service_ref['type'] = service new_service_ref['endpoints_links'] = [] service_ref['region'] = region endpoints_ref = new_service_ref.get('endpoints', []) endpoints_ref.append(service_ref) new_service_ref['endpoints'] = endpoints_ref services[service] = new_service_ref return services.values() class ExtensionsController(wsgi.Application): """Base extensions controller to be extended by public and admin API's.""" def __init__(self, extensions=None): super(ExtensionsController, self).__init__() self.extensions = extensions or {} def get_extensions_info(self, context): return {'extensions': {'values': self.extensions.values()}} def get_extension_info(self, context, extension_alias): try: return {'extension': self.extensions[extension_alias]} except KeyError: raise exception.NotFound(target=extension_alias) class PublicExtensionsController(ExtensionsController): pass class AdminExtensionsController(ExtensionsController): def __init__(self, *args, **kwargs): super(AdminExtensionsController, self).__init__(*args, **kwargs) # TODO(dolph): Extensions should obviously provide this information # themselves, but hardcoding it here allows us to match # the API spec in the short term with minimal complexity. self.extensions['OS-KSADM'] = { 'name': 'Openstack Keystone Admin', 'namespace': 'http://docs.openstack.org/identity/api/ext/' 'OS-KSADM/v1.0', 'alias': 'OS-KSADM', 'updated': '2011-08-19T13:25:27-06:00', 'description': 'Openstack extensions to Keystone v2.0 API ' 'enabling Admin Operations.', 'links': [ { 'rel': 'describedby', # TODO(dolph): link needs to be revised after # bug 928059 merges 'type': 'text/html', 'href': ('https://github.com/openstack/' 'identity-api'), } ] } @logging.fail_gracefully def public_app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return PublicRouter() @logging.fail_gracefully def admin_app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return AdminRouter() @logging.fail_gracefully def public_version_app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return PublicVersionRouter() @logging.fail_gracefully def admin_version_app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return AdminVersionRouter()
./CrossVul/dataset_final_sorted/CWE-287/py/good_3762_0
crossvul-python_data_good_4331_0
import sys import ldap # pylint: disable=import-error from flask import current_app, jsonify, request from flask_cors import cross_origin from alerta.auth.utils import create_token, get_customers from alerta.exceptions import ApiError from alerta.models.permission import Permission from alerta.models.user import User from alerta.utils.audit import auth_audit_trail from . import auth @auth.route('/auth/login', methods=['OPTIONS', 'POST']) @cross_origin(supports_credentials=True) def login(): # Allow LDAP server to use a self signed certificate if current_app.config['LDAP_ALLOW_SELF_SIGNED_CERT']: ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_ALLOW) # Retrieve required fields from client request try: login = request.json.get('username', None) or request.json['email'] password = request.json['password'] except KeyError: raise ApiError("must supply 'username' and 'password'", 401) if not password: raise ApiError('password not allowed to be empty', 401) try: if '\\' in login: domain, username = login.split('\\') email = '' email_verified = False else: username, domain = login.split('@') email = login email_verified = True except ValueError: raise ApiError('expected username with domain', 401) # Validate LDAP domain if domain not in current_app.config['LDAP_DOMAINS']: raise ApiError('unauthorized domain', 403) userdn = current_app.config['LDAP_DOMAINS'][domain] % username # Attempt LDAP AUTH try: trace_level = 2 if current_app.debug else 0 ldap_connection = ldap.initialize(current_app.config['LDAP_URL'], trace_level=trace_level) ldap_connection.simple_bind_s(userdn, password) except ldap.INVALID_CREDENTIALS: raise ApiError('invalid username or password', 401) except Exception as e: raise ApiError(str(e), 500) # Get email address from LDAP if not email_verified: try: ldap_result = ldap_connection.search_s(userdn, ldap.SCOPE_SUBTREE, '(objectClass=*)', ['mail']) email = ldap_result[0][1]['mail'][0].decode(sys.stdout.encoding) email_verified = True except Exception: email = '{}@{}'.format(username, domain) # Create user if not yet there user = User.find_by_username(username=login) if not user: user = User(name=username, login=login, password='', email=email, roles=[], text='LDAP user', email_verified=email_verified) try: user = user.create() except Exception as e: ApiError(str(e), 500) # Assign customers & update last login time groups = list() try: groups_filters = current_app.config.get('LDAP_DOMAINS_GROUP', {}) base_dns = current_app.config.get('LDAP_DOMAINS_BASEDN', {}) if domain in groups_filters and domain in base_dns: resultID = ldap_connection.search( base_dns[domain], ldap.SCOPE_SUBTREE, groups_filters[domain].format(username=username, email=email, userdn=userdn), ['cn'] ) resultTypes, results = ldap_connection.result(resultID) for _dn, attributes in results: groups.append(attributes['cn'][0].decode('utf-8')) except ldap.LDAPError as e: raise ApiError(str(e), 500) # Check user is active if user.status != 'active': raise ApiError('User {} not active'.format(login), 403) user.update_last_login() scopes = Permission.lookup(login=login, roles=user.roles + groups) customers = get_customers(login=login, groups=[user.domain] + groups) auth_audit_trail.send(current_app._get_current_object(), event='basic-ldap-login', message='user login via LDAP', user=login, customers=customers, scopes=scopes, roles=user.roles, groups=groups, resource_id=user.id, type='user', request=request) # Generate token token = create_token(user_id=user.id, name=user.name, login=user.email, provider='ldap', customers=customers, scopes=scopes, roles=user.roles, groups=groups, email=user.email, email_verified=user.email_verified) return jsonify(token=token.tokenize)
./CrossVul/dataset_final_sorted/CWE-287/py/good_4331_0
crossvul-python_data_good_3760_0
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 OpenStack LLC # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Main entry point into the Identity service.""" import uuid import urllib import urlparse from keystone import config from keystone import exception from keystone import policy from keystone import token from keystone.common import logging from keystone.common import manager from keystone.common import wsgi CONF = config.CONF LOG = logging.getLogger(__name__) class Manager(manager.Manager): """Default pivot point for the Identity backend. See :mod:`keystone.common.manager.Manager` for more details on how this dynamically calls the backend. """ def __init__(self): super(Manager, self).__init__(CONF.identity.driver) class Driver(object): """Interface description for an Identity driver.""" def authenticate(self, user_id=None, tenant_id=None, password=None): """Authenticate a given user, tenant and password. Returns: (user, tenant, metadata). """ raise exception.NotImplemented() def get_tenant(self, tenant_id): """Get a tenant by id. Returns: tenant_ref or None. """ raise exception.NotImplemented() def get_tenant_by_name(self, tenant_name): """Get a tenant by name. Returns: tenant_ref or None. """ raise exception.NotImplemented() def get_user(self, user_id): """Get a user by id. Returns: user_ref or None. """ raise exception.NotImplemented() def get_user_by_name(self, user_name): """Get a user by name. Returns: user_ref or None. """ raise exception.NotImplemented() def get_role(self, role_id): """Get a role by id. Returns: role_ref or None. """ raise exception.NotImplemented() def list_users(self): """List all users in the system. NOTE(termie): I'd prefer if this listed only the users for a given tenant. Returns: a list of user_refs or an empty list. """ raise exception.NotImplemented() def list_roles(self): """List all roles in the system. Returns: a list of role_refs or an empty list. """ raise exception.NotImplemented() # NOTE(termie): seven calls below should probably be exposed by the api # more clearly when the api redesign happens def add_user_to_tenant(self, tenant_id, user_id): raise exception.NotImplemented() def remove_user_from_tenant(self, tenant_id, user_id): raise exception.NotImplemented() def get_all_tenants(self): raise exception.NotImplemented() def get_tenants_for_user(self, user_id): """Get the tenants associated with a given user. Returns: a list of tenant ids. """ raise exception.NotImplemented() def get_roles_for_user_and_tenant(self, user_id, tenant_id): """Get the roles associated with a user within given tenant. Returns: a list of role ids. """ raise exception.NotImplemented() def add_role_to_user_and_tenant(self, user_id, tenant_id, role_id): """Add a role to a user within given tenant.""" raise exception.NotImplemented() def remove_role_from_user_and_tenant(self, user_id, tenant_id, role_id): """Remove a role from a user within given tenant.""" raise exception.NotImplemented() # user crud def create_user(self, user_id, user): raise exception.NotImplemented() def update_user(self, user_id, user): raise exception.NotImplemented() def delete_user(self, user_id): raise exception.NotImplemented() # tenant crud def create_tenant(self, tenant_id, tenant): raise exception.NotImplemented() def update_tenant(self, tenant_id, tenant): raise exception.NotImplemented() def delete_tenant(self, tenant_id, tenant): raise exception.NotImplemented() # metadata crud def get_metadata(self, user_id, tenant_id): raise exception.NotImplemented() def create_metadata(self, user_id, tenant_id, metadata): raise exception.NotImplemented() def update_metadata(self, user_id, tenant_id, metadata): raise exception.NotImplemented() def delete_metadata(self, user_id, tenant_id, metadata): raise exception.NotImplemented() # role crud def create_role(self, role_id, role): raise exception.NotImplemented() def update_role(self, role_id, role): raise exception.NotImplemented() def delete_role(self, role_id): raise exception.NotImplemented() class PublicRouter(wsgi.ComposableRouter): def add_routes(self, mapper): tenant_controller = TenantController() mapper.connect('/tenants', controller=tenant_controller, action='get_tenants_for_token', conditions=dict(methods=['GET'])) class AdminRouter(wsgi.ComposableRouter): def add_routes(self, mapper): # Tenant Operations tenant_controller = TenantController() mapper.connect('/tenants', controller=tenant_controller, action='get_all_tenants', conditions=dict(method=['GET'])) mapper.connect('/tenants/{tenant_id}', controller=tenant_controller, action='get_tenant', conditions=dict(method=['GET'])) # User Operations user_controller = UserController() mapper.connect('/users/{user_id}', controller=user_controller, action='get_user', conditions=dict(method=['GET'])) # Role Operations roles_controller = RoleController() mapper.connect('/tenants/{tenant_id}/users/{user_id}/roles', controller=roles_controller, action='get_user_roles', conditions=dict(method=['GET'])) mapper.connect('/users/{user_id}/roles', controller=roles_controller, action='get_user_roles', conditions=dict(method=['GET'])) class TenantController(wsgi.Application): def __init__(self): self.identity_api = Manager() self.policy_api = policy.Manager() self.token_api = token.Manager() super(TenantController, self).__init__() def get_all_tenants(self, context, **kw): """Gets a list of all tenants for an admin user.""" self.assert_admin(context) tenant_refs = self.identity_api.get_tenants(context) params = { 'limit': context['query_string'].get('limit'), 'marker': context['query_string'].get('marker'), } return self._format_tenant_list(tenant_refs, **params) def get_tenants_for_token(self, context, **kw): """Get valid tenants for token based on token used to authenticate. Pulls the token from the context, validates it and gets the valid tenants for the user in the token. Doesn't care about token scopedness. """ try: token_ref = self.token_api.get_token(context=context, token_id=context['token_id']) except exception.NotFound: raise exception.Unauthorized() user_ref = token_ref['user'] tenant_ids = self.identity_api.get_tenants_for_user( context, user_ref['id']) tenant_refs = [] for tenant_id in tenant_ids: tenant_refs.append(self.identity_api.get_tenant( context=context, tenant_id=tenant_id)) params = { 'limit': context['query_string'].get('limit'), 'marker': context['query_string'].get('marker'), } return self._format_tenant_list(tenant_refs, **params) def get_tenant(self, context, tenant_id): # TODO(termie): this stuff should probably be moved to middleware self.assert_admin(context) tenant = self.identity_api.get_tenant(context, tenant_id) if tenant is None: raise exception.TenantNotFound(tenant_id=tenant_id) return {'tenant': tenant} # CRUD Extension def create_tenant(self, context, tenant): tenant_ref = self._normalize_dict(tenant) if not 'name' in tenant_ref or not tenant_ref['name']: msg = 'Name field is required and cannot be empty' raise exception.ValidationError(message=msg) self.assert_admin(context) tenant_id = (tenant_ref.get('id') and tenant_ref.get('id') or uuid.uuid4().hex) tenant_ref['id'] = tenant_id tenant = self.identity_api.create_tenant( context, tenant_id, tenant_ref) return {'tenant': tenant} def update_tenant(self, context, tenant_id, tenant): self.assert_admin(context) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) tenant_ref = self.identity_api.update_tenant( context, tenant_id, tenant) return {'tenant': tenant_ref} def delete_tenant(self, context, tenant_id, **kw): self.assert_admin(context) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) self.identity_api.delete_tenant(context, tenant_id) def get_tenant_users(self, context, tenant_id, **kw): self.assert_admin(context) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) user_refs = self.identity_api.get_tenant_users(context, tenant_id) return {'users': user_refs} def _format_tenant_list(self, tenant_refs, **kwargs): marker = kwargs.get('marker') page_idx = 0 if marker is not None: for (marker_idx, tenant) in enumerate(tenant_refs): if tenant['id'] == marker: # we start pagination after the marker page_idx = marker_idx + 1 break else: msg = 'Marker could not be found' raise exception.ValidationError(message=msg) limit = kwargs.get('limit') if limit is not None: try: limit = int(limit) if limit < 0: raise AssertionError() except (ValueError, AssertionError): msg = 'Invalid limit value' raise exception.ValidationError(message=msg) tenant_refs = tenant_refs[page_idx:limit] for x in tenant_refs: if 'enabled' not in x: x['enabled'] = True o = {'tenants': tenant_refs, 'tenants_links': []} return o class UserController(wsgi.Application): def __init__(self): self.identity_api = Manager() self.policy_api = policy.Manager() self.token_api = token.Manager() super(UserController, self).__init__() def get_user(self, context, user_id): self.assert_admin(context) user_ref = self.identity_api.get_user(context, user_id) if not user_ref: raise exception.UserNotFound(user_id=user_id) return {'user': user_ref} def get_users(self, context): # NOTE(termie): i can't imagine that this really wants all the data # about every single user in the system... self.assert_admin(context) user_refs = self.identity_api.list_users(context) return {'users': user_refs} # CRUD extension def create_user(self, context, user): user = self._normalize_dict(user) self.assert_admin(context) if not 'name' in user or not user['name']: msg = 'Name field is required and cannot be empty' raise exception.ValidationError(message=msg) tenant_id = user.get('tenantId', None) if (tenant_id is not None and self.identity_api.get_tenant(context, tenant_id) is None): raise exception.TenantNotFound(tenant_id=tenant_id) user_id = uuid.uuid4().hex user_ref = user.copy() user_ref['id'] = user_id new_user_ref = self.identity_api.create_user( context, user_id, user_ref) if tenant_id: self.identity_api.add_user_to_tenant(context, tenant_id, user_id) return {'user': new_user_ref} def update_user(self, context, user_id, user): # NOTE(termie): this is really more of a patch than a put self.assert_admin(context) if self.identity_api.get_user(context, user_id) is None: raise exception.UserNotFound(user_id=user_id) user_ref = self.identity_api.update_user(context, user_id, user) # If the password was changed or the user was disabled we clear tokens if user.get('password') or not user.get('enabled', True): try: for token_id in self.token_api.list_tokens(context, user_id): self.token_api.delete_token(context, token_id) except exception.NotImplemented: # The users status has been changed but tokens remain valid for # backends that can't list tokens for users LOG.warning('User %s status has changed, but existing tokens ' 'remain valid' % user_id) return {'user': user_ref} def delete_user(self, context, user_id): self.assert_admin(context) if self.identity_api.get_user(context, user_id) is None: raise exception.UserNotFound(user_id=user_id) self.identity_api.delete_user(context, user_id) def set_user_enabled(self, context, user_id, user): return self.update_user(context, user_id, user) def set_user_password(self, context, user_id, user): return self.update_user(context, user_id, user) def update_user_tenant(self, context, user_id, user): """Update the default tenant.""" # ensure that we're a member of that tenant tenant_id = user.get('tenantId') self.identity_api.add_user_to_tenant(context, tenant_id, user_id) return self.update_user(context, user_id, user) class RoleController(wsgi.Application): def __init__(self): self.identity_api = Manager() self.token_api = token.Manager() self.policy_api = policy.Manager() super(RoleController, self).__init__() # COMPAT(essex-3) def get_user_roles(self, context, user_id, tenant_id=None): """Get the roles for a user and tenant pair. Since we're trying to ignore the idea of user-only roles we're not implementing them in hopes that the idea will die off. """ self.assert_admin(context) if tenant_id is None: raise exception.NotImplemented(message='User roles not supported: ' 'tenant ID required') user = self.identity_api.get_user(context, user_id) if user is None: raise exception.UserNotFound(user_id=user_id) tenant = self.identity_api.get_tenant(context, tenant_id) if tenant is None: raise exception.TenantNotFound(tenant_id=tenant_id) roles = self.identity_api.get_roles_for_user_and_tenant( context, user_id, tenant_id) return {'roles': [self.identity_api.get_role(context, x) for x in roles]} # CRUD extension def get_role(self, context, role_id): self.assert_admin(context) role_ref = self.identity_api.get_role(context, role_id) if not role_ref: raise exception.RoleNotFound(role_id=role_id) return {'role': role_ref} def create_role(self, context, role): role = self._normalize_dict(role) self.assert_admin(context) if not 'name' in role or not role['name']: msg = 'Name field is required and cannot be empty' raise exception.ValidationError(message=msg) role_id = uuid.uuid4().hex role['id'] = role_id role_ref = self.identity_api.create_role(context, role_id, role) return {'role': role_ref} def delete_role(self, context, role_id): self.assert_admin(context) self.get_role(context, role_id) self.identity_api.delete_role(context, role_id) def get_roles(self, context): self.assert_admin(context) roles = self.identity_api.list_roles(context) # TODO(termie): probably inefficient at some point return {'roles': roles} def add_role_to_user(self, context, user_id, role_id, tenant_id=None): """Add a role to a user and tenant pair. Since we're trying to ignore the idea of user-only roles we're not implementing them in hopes that the idea will die off. """ self.assert_admin(context) if tenant_id is None: raise exception.NotImplemented(message='User roles not supported: ' 'tenant_id required') if self.identity_api.get_user(context, user_id) is None: raise exception.UserNotFound(user_id=user_id) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) if self.identity_api.get_role(context, role_id) is None: raise exception.RoleNotFound(role_id=role_id) # This still has the weird legacy semantics that adding a role to # a user also adds them to a tenant self.identity_api.add_user_to_tenant(context, tenant_id, user_id) self.identity_api.add_role_to_user_and_tenant( context, user_id, tenant_id, role_id) role_ref = self.identity_api.get_role(context, role_id) return {'role': role_ref} def remove_role_from_user(self, context, user_id, role_id, tenant_id=None): """Remove a role from a user and tenant pair. Since we're trying to ignore the idea of user-only roles we're not implementing them in hopes that the idea will die off. """ self.assert_admin(context) if tenant_id is None: raise exception.NotImplemented(message='User roles not supported: ' 'tenant_id required') if self.identity_api.get_user(context, user_id) is None: raise exception.UserNotFound(user_id=user_id) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) if self.identity_api.get_role(context, role_id) is None: raise exception.RoleNotFound(role_id=role_id) # This still has the weird legacy semantics that adding a role to # a user also adds them to a tenant, so we must follow up on that self.identity_api.remove_role_from_user_and_tenant( context, user_id, tenant_id, role_id) roles = self.identity_api.get_roles_for_user_and_tenant( context, user_id, tenant_id) if not roles: self.identity_api.remove_user_from_tenant( context, tenant_id, user_id) return # COMPAT(diablo): CRUD extension def get_role_refs(self, context, user_id): """Ultimate hack to get around having to make role_refs first-class. This will basically iterate over the various roles the user has in all tenants the user is a member of and create fake role_refs where the id encodes the user-tenant-role information so we can look up the appropriate data when we need to delete them. """ self.assert_admin(context) # Ensure user exists by getting it first. self.identity_api.get_user(context, user_id) tenant_ids = self.identity_api.get_tenants_for_user(context, user_id) o = [] for tenant_id in tenant_ids: role_ids = self.identity_api.get_roles_for_user_and_tenant( context, user_id, tenant_id) for role_id in role_ids: ref = {'roleId': role_id, 'tenantId': tenant_id, 'userId': user_id} ref['id'] = urllib.urlencode(ref) o.append(ref) return {'roles': o} # COMPAT(diablo): CRUD extension def create_role_ref(self, context, user_id, role): """This is actually used for adding a user to a tenant. In the legacy data model adding a user to a tenant required setting a role. """ self.assert_admin(context) # TODO(termie): for now we're ignoring the actual role tenant_id = role.get('tenantId') role_id = role.get('roleId') self.identity_api.add_user_to_tenant(context, tenant_id, user_id) self.identity_api.add_role_to_user_and_tenant( context, user_id, tenant_id, role_id) role_ref = self.identity_api.get_role(context, role_id) return {'role': role_ref} # COMPAT(diablo): CRUD extension def delete_role_ref(self, context, user_id, role_ref_id): """This is actually used for deleting a user from a tenant. In the legacy data model removing a user from a tenant required deleting a role. To emulate this, we encode the tenant and role in the role_ref_id, and if this happens to be the last role for the user-tenant pair, we remove the user from the tenant. """ self.assert_admin(context) # TODO(termie): for now we're ignoring the actual role role_ref_ref = urlparse.parse_qs(role_ref_id) tenant_id = role_ref_ref.get('tenantId')[0] role_id = role_ref_ref.get('roleId')[0] self.identity_api.remove_role_from_user_and_tenant( context, user_id, tenant_id, role_id) roles = self.identity_api.get_roles_for_user_and_tenant( context, user_id, tenant_id) if not roles: self.identity_api.remove_user_from_tenant( context, tenant_id, user_id)
./CrossVul/dataset_final_sorted/CWE-287/py/good_3760_0
crossvul-python_data_good_1224_4
# -*- coding: utf-8 -*- from typing import List, Dict, Optional from django.utils.translation import ugettext as _ from django.conf import settings from django.contrib.auth import authenticate, get_backends from django.urls import reverse from django.http import HttpResponseRedirect, HttpResponse, HttpRequest from django.shortcuts import redirect, render from django.core.exceptions import ValidationError from django.core import validators from zerver.context_processors import get_realm_from_request, login_context from zerver.models import UserProfile, Realm, Stream, MultiuseInvite, \ name_changes_disabled, email_to_username, email_allowed_for_realm, \ get_realm, get_user_by_delivery_email, get_default_stream_groups, DisposableEmailError, \ DomainNotAllowedForRealmError, get_source_profile, EmailContainsPlusError, \ PreregistrationUser from zerver.lib.send_email import send_email, FromAddress from zerver.lib.actions import do_change_password, do_change_full_name, \ do_activate_user, do_create_user, do_create_realm, \ validate_email_for_realm, \ do_set_user_display_setting, lookup_default_stream_groups, bulk_add_subscriptions from zerver.forms import RegistrationForm, HomepageForm, RealmCreationForm, \ FindMyTeamForm, RealmRedirectForm from django_auth_ldap.backend import LDAPBackend, _LDAPUser from zerver.decorator import require_post, \ do_login from zerver.lib.onboarding import send_initial_realm_messages, setup_realm_internal_bots from zerver.lib.subdomains import get_subdomain, is_root_domain_available from zerver.lib.timezone import get_all_timezones from zerver.lib.users import get_accounts_for_email from zerver.lib.zephyr import compute_mit_user_fullname from zerver.views.auth import create_preregistration_user, redirect_and_log_into_subdomain, \ redirect_to_deactivation_notice, get_safe_redirect_to from zproject.backends import ldap_auth_enabled, password_auth_enabled, \ ZulipLDAPExceptionNoMatchingLDAPUser, email_auth_enabled, ZulipLDAPAuthBackend from confirmation.models import Confirmation, RealmCreationKey, ConfirmationKeyException, \ validate_key, create_confirmation_link, get_object_from_key, \ render_confirmation_key_error import logging import smtplib import urllib def check_prereg_key_and_redirect(request: HttpRequest, confirmation_key: str) -> HttpResponse: # If the key isn't valid, show the error message on the original URL confirmation = Confirmation.objects.filter(confirmation_key=confirmation_key).first() if confirmation is None or confirmation.type not in [ Confirmation.USER_REGISTRATION, Confirmation.INVITATION, Confirmation.REALM_CREATION]: return render_confirmation_key_error( request, ConfirmationKeyException(ConfirmationKeyException.DOES_NOT_EXIST)) try: get_object_from_key(confirmation_key, confirmation.type) except ConfirmationKeyException as exception: return render_confirmation_key_error(request, exception) # confirm_preregistrationuser.html just extracts the confirmation_key # (and GET parameters) and redirects to /accounts/register, so that the # user can enter their information on a cleaner URL. return render(request, 'confirmation/confirm_preregistrationuser.html', context={ 'key': confirmation_key, 'full_name': request.GET.get("full_name", None)}) @require_post def accounts_register(request: HttpRequest) -> HttpResponse: key = request.POST['key'] confirmation = Confirmation.objects.get(confirmation_key=key) prereg_user = confirmation.content_object email = prereg_user.email realm_creation = prereg_user.realm_creation password_required = prereg_user.password_required is_realm_admin = prereg_user.invited_as == PreregistrationUser.INVITE_AS['REALM_ADMIN'] or realm_creation is_guest = prereg_user.invited_as == PreregistrationUser.INVITE_AS['GUEST_USER'] try: validators.validate_email(email) except ValidationError: return render(request, "zerver/invalid_email.html", context={"invalid_email": True}) if realm_creation: # For creating a new realm, there is no existing realm or domain realm = None else: if get_subdomain(request) != prereg_user.realm.string_id: return render_confirmation_key_error( request, ConfirmationKeyException(ConfirmationKeyException.DOES_NOT_EXIST)) realm = prereg_user.realm try: email_allowed_for_realm(email, realm) except DomainNotAllowedForRealmError: return render(request, "zerver/invalid_email.html", context={"realm_name": realm.name, "closed_domain": True}) except DisposableEmailError: return render(request, "zerver/invalid_email.html", context={"realm_name": realm.name, "disposable_emails_not_allowed": True}) except EmailContainsPlusError: return render(request, "zerver/invalid_email.html", context={"realm_name": realm.name, "email_contains_plus": True}) if realm.deactivated: # The user is trying to register for a deactivated realm. Advise them to # contact support. return redirect_to_deactivation_notice() try: validate_email_for_realm(realm, email) except ValidationError: return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.parse.quote_plus(email)) name_validated = False full_name = None require_ldap_password = False if request.POST.get('from_confirmation'): try: del request.session['authenticated_full_name'] except KeyError: pass if realm is not None and realm.is_zephyr_mirror_realm: # For MIT users, we can get an authoritative name from Hesiod. # Technically we should check that this is actually an MIT # realm, but we can cross that bridge if we ever get a non-MIT # zephyr mirroring realm. hesiod_name = compute_mit_user_fullname(email) form = RegistrationForm( initial={'full_name': hesiod_name if "@" not in hesiod_name else ""}, realm_creation=realm_creation) name_validated = True elif settings.POPULATE_PROFILE_VIA_LDAP: for backend in get_backends(): if isinstance(backend, LDAPBackend): try: ldap_username = backend.django_to_ldap_username(email) except ZulipLDAPExceptionNoMatchingLDAPUser: logging.warning("New account email %s could not be found in LDAP" % (email,)) form = RegistrationForm(realm_creation=realm_creation) break ldap_user = _LDAPUser(backend, ldap_username) try: ldap_full_name, _ = backend.get_mapped_name(ldap_user) request.session['authenticated_full_name'] = ldap_full_name name_validated = True # We don't use initial= here, because if the form is # complete (that is, no additional fields need to be # filled out by the user) we want the form to validate, # so they can be directly registered without having to # go through this interstitial. form = RegistrationForm({'full_name': ldap_full_name}, realm_creation=realm_creation) # Check whether this is ZulipLDAPAuthBackend, # which is responsible for authentication and # requires that LDAP accounts enter their LDAP # password to register, or ZulipLDAPUserPopulator, # which just populates UserProfile fields (no auth). require_ldap_password = isinstance(backend, ZulipLDAPAuthBackend) break except TypeError: # Let the user fill out a name and/or try another backend form = RegistrationForm(realm_creation=realm_creation) elif prereg_user.full_name: if prereg_user.full_name_validated: request.session['authenticated_full_name'] = prereg_user.full_name name_validated = True form = RegistrationForm({'full_name': prereg_user.full_name}, realm_creation=realm_creation) else: form = RegistrationForm(initial={'full_name': prereg_user.full_name}, realm_creation=realm_creation) elif 'full_name' in request.POST: form = RegistrationForm( initial={'full_name': request.POST.get('full_name')}, realm_creation=realm_creation ) else: form = RegistrationForm(realm_creation=realm_creation) else: postdata = request.POST.copy() if name_changes_disabled(realm): # If we populate profile information via LDAP and we have a # verified name from you on file, use that. Otherwise, fall # back to the full name in the request. try: postdata.update({'full_name': request.session['authenticated_full_name']}) name_validated = True except KeyError: pass form = RegistrationForm(postdata, realm_creation=realm_creation) if not (password_auth_enabled(realm) and password_required): form['password'].field.required = False if form.is_valid(): if password_auth_enabled(realm) and form['password'].field.required: password = form.cleaned_data['password'] else: # If the user wasn't prompted for a password when # completing the authentication form (because they're # signing up with SSO and no password is required), set # the password field to `None` (Which causes Django to # create an unusable password). password = None if realm_creation: string_id = form.cleaned_data['realm_subdomain'] realm_name = form.cleaned_data['realm_name'] realm = do_create_realm(string_id, realm_name) setup_realm_internal_bots(realm) assert(realm is not None) full_name = form.cleaned_data['full_name'] short_name = email_to_username(email) default_stream_group_names = request.POST.getlist('default_stream_group') default_stream_groups = lookup_default_stream_groups(default_stream_group_names, realm) timezone = "" if 'timezone' in request.POST and request.POST['timezone'] in get_all_timezones(): timezone = request.POST['timezone'] if 'source_realm' in request.POST and request.POST["source_realm"] != "on": source_profile = get_source_profile(email, request.POST["source_realm"]) else: source_profile = None if not realm_creation: try: existing_user_profile = get_user_by_delivery_email(email, realm) # type: Optional[UserProfile] except UserProfile.DoesNotExist: existing_user_profile = None else: existing_user_profile = None user_profile = None # type: Optional[UserProfile] return_data = {} # type: Dict[str, bool] if ldap_auth_enabled(realm): # If the user was authenticated using an external SSO # mechanism like Google or GitHub auth, then authentication # will have already been done before creating the # PreregistrationUser object with password_required=False, and # so we don't need to worry about passwords. # # If instead the realm is using EmailAuthBackend, we will # set their password above. # # But if the realm is using LDAPAuthBackend, we need to verify # their LDAP password (which will, as a side effect, create # the user account) here using authenticate. # pregeg_user.realm_creation carries the information about whether # we're in realm creation mode, and the ldap flow will handle # that and create the user with the appropriate parameters. user_profile = authenticate(request, username=email, password=password, realm=realm, prereg_user=prereg_user, return_data=return_data) if user_profile is None: if return_data.get("no_matching_ldap_user") and email_auth_enabled(realm): # If both the LDAP and Email auth backends are # enabled, and there's no matching user in the LDAP # directory then the intent is to create a user in the # realm with their email outside the LDAP organization # (with e.g. a password stored in the Zulip database, # not LDAP). So we fall through and create the new # account. # # It's likely that we can extend this block to the # Google and GitHub auth backends with no code changes # other than here. pass else: # TODO: This probably isn't going to give a # user-friendly error message, but it doesn't # particularly matter, because the registration form # is hidden for most users. return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.parse.quote_plus(email)) elif not realm_creation: # Since we'll have created a user, we now just log them in. return login_and_go_to_home(request, user_profile) else: # With realm_creation=True, we're going to return further down, # after finishing up the creation process. pass if existing_user_profile is not None and existing_user_profile.is_mirror_dummy: user_profile = existing_user_profile do_activate_user(user_profile) do_change_password(user_profile, password) do_change_full_name(user_profile, full_name, user_profile) do_set_user_display_setting(user_profile, 'timezone', timezone) # TODO: When we clean up the `do_activate_user` code path, # make it respect invited_as_admin / is_realm_admin. if user_profile is None: user_profile = do_create_user(email, password, realm, full_name, short_name, prereg_user=prereg_user, is_realm_admin=is_realm_admin, is_guest=is_guest, tos_version=settings.TOS_VERSION, timezone=timezone, newsletter_data={"IP": request.META['REMOTE_ADDR']}, default_stream_groups=default_stream_groups, source_profile=source_profile, realm_creation=realm_creation) if realm_creation: bulk_add_subscriptions([realm.signup_notifications_stream], [user_profile]) send_initial_realm_messages(realm) # Because for realm creation, registration happens on the # root domain, we need to log them into the subdomain for # their new realm. return redirect_and_log_into_subdomain(realm, full_name, email) # This dummy_backend check below confirms the user is # authenticating to the correct subdomain. auth_result = authenticate(username=user_profile.delivery_email, realm=realm, return_data=return_data, use_dummy_backend=True) if return_data.get('invalid_subdomain'): # By construction, this should never happen. logging.error("Subdomain mismatch in registration %s: %s" % ( realm.subdomain, user_profile.delivery_email,)) return redirect('/') return login_and_go_to_home(request, auth_result) return render( request, 'zerver/register.html', context={'form': form, 'email': email, 'key': key, 'full_name': request.session.get('authenticated_full_name', None), 'lock_name': name_validated and name_changes_disabled(realm), # password_auth_enabled is normally set via our context processor, # but for the registration form, there is no logged in user yet, so # we have to set it here. 'creating_new_team': realm_creation, 'password_required': password_auth_enabled(realm) and password_required, 'require_ldap_password': require_ldap_password, 'password_auth_enabled': password_auth_enabled(realm), 'root_domain_available': is_root_domain_available(), 'default_stream_groups': get_default_stream_groups(realm), 'accounts': get_accounts_for_email(email), 'MAX_REALM_NAME_LENGTH': str(Realm.MAX_REALM_NAME_LENGTH), 'MAX_NAME_LENGTH': str(UserProfile.MAX_NAME_LENGTH), 'MAX_PASSWORD_LENGTH': str(form.MAX_PASSWORD_LENGTH), 'MAX_REALM_SUBDOMAIN_LENGTH': str(Realm.MAX_REALM_SUBDOMAIN_LENGTH) } ) def login_and_go_to_home(request: HttpRequest, user_profile: UserProfile) -> HttpResponse: do_login(request, user_profile) return HttpResponseRedirect(user_profile.realm.uri + reverse('zerver.views.home.home')) def prepare_activation_url(email: str, request: HttpRequest, realm_creation: bool=False, streams: Optional[List[Stream]]=None, invited_as: Optional[int]=None) -> str: """ Send an email with a confirmation link to the provided e-mail so the user can complete their registration. """ prereg_user = create_preregistration_user(email, request, realm_creation) if streams is not None: prereg_user.streams.set(streams) if invited_as is not None: prereg_user.invited_as = invited_as prereg_user.save() confirmation_type = Confirmation.USER_REGISTRATION if realm_creation: confirmation_type = Confirmation.REALM_CREATION activation_url = create_confirmation_link(prereg_user, request.get_host(), confirmation_type) if settings.DEVELOPMENT and realm_creation: request.session['confirmation_key'] = {'confirmation_key': activation_url.split('/')[-1]} return activation_url def send_confirm_registration_email(email: str, activation_url: str, language: str) -> None: send_email('zerver/emails/confirm_registration', to_emails=[email], from_address=FromAddress.tokenized_no_reply_address(), language=language, context={'activate_url': activation_url}) def redirect_to_email_login_url(email: str) -> HttpResponseRedirect: login_url = reverse('django.contrib.auth.views.login') email = urllib.parse.quote_plus(email) redirect_url = login_url + '?already_registered=' + email return HttpResponseRedirect(redirect_url) def create_realm(request: HttpRequest, creation_key: Optional[str]=None) -> HttpResponse: try: key_record = validate_key(creation_key) except RealmCreationKey.Invalid: return render(request, "zerver/realm_creation_failed.html", context={'message': _('The organization creation link has expired' ' or is not valid.')}) if not settings.OPEN_REALM_CREATION: if key_record is None: return render(request, "zerver/realm_creation_failed.html", context={'message': _('New organization creation disabled')}) # When settings.OPEN_REALM_CREATION is enabled, anyone can create a new realm, # with a few restrictions on their email address. if request.method == 'POST': form = RealmCreationForm(request.POST) if form.is_valid(): email = form.cleaned_data['email'] activation_url = prepare_activation_url(email, request, realm_creation=True) if key_record is not None and key_record.presume_email_valid: # The user has a token created from the server command line; # skip confirming the email is theirs, taking their word for it. # This is essential on first install if the admin hasn't stopped # to configure outbound email up front, or it isn't working yet. key_record.delete() return HttpResponseRedirect(activation_url) try: send_confirm_registration_email(email, activation_url, request.LANGUAGE_CODE) except smtplib.SMTPException as e: logging.error('Error in create_realm: %s' % (str(e),)) return HttpResponseRedirect("/config-error/smtp") if key_record is not None: key_record.delete() return HttpResponseRedirect(reverse('new_realm_send_confirm', kwargs={'email': email})) else: form = RealmCreationForm() return render(request, 'zerver/create_realm.html', context={'form': form, 'current_url': request.get_full_path}, ) def accounts_home(request: HttpRequest, multiuse_object_key: Optional[str]="", multiuse_object: Optional[MultiuseInvite]=None) -> HttpResponse: try: realm = get_realm(get_subdomain(request)) except Realm.DoesNotExist: return HttpResponseRedirect(reverse('zerver.views.registration.find_account')) if realm.deactivated: return redirect_to_deactivation_notice() from_multiuse_invite = False streams_to_subscribe = None invited_as = None if multiuse_object: realm = multiuse_object.realm streams_to_subscribe = multiuse_object.streams.all() from_multiuse_invite = True invited_as = multiuse_object.invited_as if request.method == 'POST': form = HomepageForm(request.POST, realm=realm, from_multiuse_invite=from_multiuse_invite) if form.is_valid(): email = form.cleaned_data['email'] activation_url = prepare_activation_url(email, request, streams=streams_to_subscribe, invited_as=invited_as) try: send_confirm_registration_email(email, activation_url, request.LANGUAGE_CODE) except smtplib.SMTPException as e: logging.error('Error in accounts_home: %s' % (str(e),)) return HttpResponseRedirect("/config-error/smtp") return HttpResponseRedirect(reverse('signup_send_confirm', kwargs={'email': email})) email = request.POST['email'] try: validate_email_for_realm(realm, email) except ValidationError: return redirect_to_email_login_url(email) else: form = HomepageForm(realm=realm) context = login_context(request) context.update({'form': form, 'current_url': request.get_full_path, 'multiuse_object_key': multiuse_object_key, 'from_multiuse_invite': from_multiuse_invite}) return render(request, 'zerver/accounts_home.html', context=context) def accounts_home_from_multiuse_invite(request: HttpRequest, confirmation_key: str) -> HttpResponse: multiuse_object = None try: multiuse_object = get_object_from_key(confirmation_key, Confirmation.MULTIUSE_INVITE) # Required for oAuth2 except ConfirmationKeyException as exception: realm = get_realm_from_request(request) if realm is None or realm.invite_required: return render_confirmation_key_error(request, exception) return accounts_home(request, multiuse_object_key=confirmation_key, multiuse_object=multiuse_object) def generate_204(request: HttpRequest) -> HttpResponse: return HttpResponse(content=None, status=204) def find_account(request: HttpRequest) -> HttpResponse: from zerver.context_processors import common_context url = reverse('zerver.views.registration.find_account') emails = [] # type: List[str] if request.method == 'POST': form = FindMyTeamForm(request.POST) if form.is_valid(): emails = form.cleaned_data['emails'] for user in UserProfile.objects.filter( delivery_email__in=emails, is_active=True, is_bot=False, realm__deactivated=False): context = common_context(user) context.update({ 'email': user.delivery_email, }) send_email('zerver/emails/find_team', to_user_ids=[user.id], context=context) # Note: Show all the emails in the result otherwise this # feature can be used to ascertain which email addresses # are associated with Zulip. data = urllib.parse.urlencode({'emails': ','.join(emails)}) return redirect(url + "?" + data) else: form = FindMyTeamForm() result = request.GET.get('emails') # The below validation is perhaps unnecessary, in that we # shouldn't get able to get here with an invalid email unless # the user hand-edits the URLs. if result: for email in result.split(','): try: validators.validate_email(email) emails.append(email) except ValidationError: pass return render(request, 'zerver/find_account.html', context={'form': form, 'current_url': lambda: url, 'emails': emails},) def realm_redirect(request: HttpRequest) -> HttpResponse: if request.method == 'POST': form = RealmRedirectForm(request.POST) if form.is_valid(): subdomain = form.cleaned_data['subdomain'] realm = get_realm(subdomain) redirect_to = get_safe_redirect_to(request.GET.get("next", ""), realm.uri) return HttpResponseRedirect(redirect_to) else: form = RealmRedirectForm() return render(request, 'zerver/realm_redirect.html', context={'form': form})
./CrossVul/dataset_final_sorted/CWE-287/py/good_1224_4
crossvul-python_data_good_650_1
# Copyright (C) 2003-2007 Robey Pointer <robeypointer@gmail.com> # # This file is part of paramiko. # # Paramiko is free software; you can redistribute it and/or modify it under the # terms of the GNU Lesser General Public License as published by the Free # Software Foundation; either version 2.1 of the License, or (at your option) # any later version. # # Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with Paramiko; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. """ Core protocol implementation """ from __future__ import print_function import os import socket import sys import threading import time import weakref from hashlib import md5, sha1, sha256, sha512 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.ciphers import algorithms, Cipher, modes import paramiko from paramiko import util from paramiko.auth_handler import AuthHandler from paramiko.ssh_gss import GSSAuth from paramiko.channel import Channel from paramiko.common import ( xffffffff, cMSG_CHANNEL_OPEN, cMSG_IGNORE, cMSG_GLOBAL_REQUEST, DEBUG, MSG_KEXINIT, MSG_IGNORE, MSG_DISCONNECT, MSG_DEBUG, ERROR, WARNING, cMSG_UNIMPLEMENTED, INFO, cMSG_KEXINIT, cMSG_NEWKEYS, MSG_NEWKEYS, cMSG_REQUEST_SUCCESS, cMSG_REQUEST_FAILURE, CONNECTION_FAILED_CODE, OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, OPEN_SUCCEEDED, cMSG_CHANNEL_OPEN_FAILURE, cMSG_CHANNEL_OPEN_SUCCESS, MSG_GLOBAL_REQUEST, MSG_REQUEST_SUCCESS, MSG_REQUEST_FAILURE, MSG_CHANNEL_OPEN_SUCCESS, MSG_CHANNEL_OPEN_FAILURE, MSG_CHANNEL_OPEN, MSG_CHANNEL_SUCCESS, MSG_CHANNEL_FAILURE, MSG_CHANNEL_DATA, MSG_CHANNEL_EXTENDED_DATA, MSG_CHANNEL_WINDOW_ADJUST, MSG_CHANNEL_REQUEST, MSG_CHANNEL_EOF, MSG_CHANNEL_CLOSE, MIN_WINDOW_SIZE, MIN_PACKET_SIZE, MAX_WINDOW_SIZE, DEFAULT_WINDOW_SIZE, DEFAULT_MAX_PACKET_SIZE, HIGHEST_USERAUTH_MESSAGE_ID, ) from paramiko.compress import ZlibCompressor, ZlibDecompressor from paramiko.dsskey import DSSKey from paramiko.kex_gex import KexGex, KexGexSHA256 from paramiko.kex_group1 import KexGroup1 from paramiko.kex_group14 import KexGroup14 from paramiko.kex_gss import KexGSSGex, KexGSSGroup1, KexGSSGroup14 from paramiko.message import Message from paramiko.packet import Packetizer, NeedRekeyException from paramiko.primes import ModulusPack from paramiko.py3compat import string_types, long, byte_ord, b, input, PY2 from paramiko.rsakey import RSAKey from paramiko.ecdsakey import ECDSAKey from paramiko.server import ServerInterface from paramiko.sftp_client import SFTPClient from paramiko.ssh_exception import ( SSHException, BadAuthenticationType, ChannelException, ProxyCommandFailure, ) from paramiko.util import retry_on_signal, ClosingContextManager, clamp_value # for thread cleanup _active_threads = [] def _join_lingering_threads(): for thr in _active_threads: thr.stop_thread() import atexit atexit.register(_join_lingering_threads) class Transport(threading.Thread, ClosingContextManager): """ An SSH Transport attaches to a stream (usually a socket), negotiates an encrypted session, authenticates, and then creates stream tunnels, called `channels <.Channel>`, across the session. Multiple channels can be multiplexed across a single session (and often are, in the case of port forwardings). Instances of this class may be used as context managers. """ _ENCRYPT = object() _DECRYPT = object() _PROTO_ID = '2.0' _CLIENT_ID = 'paramiko_%s' % paramiko.__version__ # These tuples of algorithm identifiers are in preference order; do not # reorder without reason! _preferred_ciphers = ( 'aes128-ctr', 'aes192-ctr', 'aes256-ctr', 'aes128-cbc', 'aes192-cbc', 'aes256-cbc', 'blowfish-cbc', '3des-cbc', ) _preferred_macs = ( 'hmac-sha2-256', 'hmac-sha2-512', 'hmac-sha1', 'hmac-md5', 'hmac-sha1-96', 'hmac-md5-96', ) _preferred_keys = ( 'ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'ssh-rsa', 'ssh-dss', ) _preferred_kex = ( 'diffie-hellman-group1-sha1', 'diffie-hellman-group14-sha1', 'diffie-hellman-group-exchange-sha1', 'diffie-hellman-group-exchange-sha256', ) _preferred_gsskex = ( 'gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==', 'gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==', 'gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==', ) _preferred_compression = ('none',) _cipher_info = { 'aes128-ctr': { 'class': algorithms.AES, 'mode': modes.CTR, 'block-size': 16, 'key-size': 16 }, 'aes192-ctr': { 'class': algorithms.AES, 'mode': modes.CTR, 'block-size': 16, 'key-size': 24 }, 'aes256-ctr': { 'class': algorithms.AES, 'mode': modes.CTR, 'block-size': 16, 'key-size': 32 }, 'blowfish-cbc': { 'class': algorithms.Blowfish, 'mode': modes.CBC, 'block-size': 8, 'key-size': 16 }, 'aes128-cbc': { 'class': algorithms.AES, 'mode': modes.CBC, 'block-size': 16, 'key-size': 16 }, 'aes192-cbc': { 'class': algorithms.AES, 'mode': modes.CBC, 'block-size': 16, 'key-size': 24 }, 'aes256-cbc': { 'class': algorithms.AES, 'mode': modes.CBC, 'block-size': 16, 'key-size': 32 }, '3des-cbc': { 'class': algorithms.TripleDES, 'mode': modes.CBC, 'block-size': 8, 'key-size': 24 }, } _mac_info = { 'hmac-sha1': {'class': sha1, 'size': 20}, 'hmac-sha1-96': {'class': sha1, 'size': 12}, 'hmac-sha2-256': {'class': sha256, 'size': 32}, 'hmac-sha2-512': {'class': sha512, 'size': 64}, 'hmac-md5': {'class': md5, 'size': 16}, 'hmac-md5-96': {'class': md5, 'size': 12}, } _key_info = { 'ssh-rsa': RSAKey, 'ssh-dss': DSSKey, 'ecdsa-sha2-nistp256': ECDSAKey, 'ecdsa-sha2-nistp384': ECDSAKey, 'ecdsa-sha2-nistp521': ECDSAKey, } _kex_info = { 'diffie-hellman-group1-sha1': KexGroup1, 'diffie-hellman-group14-sha1': KexGroup14, 'diffie-hellman-group-exchange-sha1': KexGex, 'diffie-hellman-group-exchange-sha256': KexGexSHA256, 'gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==': KexGSSGroup1, 'gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==': KexGSSGroup14, 'gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==': KexGSSGex } _compression_info = { # zlib@openssh.com is just zlib, but only turned on after a successful # authentication. openssh servers may only offer this type because # they've had troubles with security holes in zlib in the past. 'zlib@openssh.com': (ZlibCompressor, ZlibDecompressor), 'zlib': (ZlibCompressor, ZlibDecompressor), 'none': (None, None), } _modulus_pack = None _active_check_timeout = 0.1 def __init__(self, sock, default_window_size=DEFAULT_WINDOW_SIZE, default_max_packet_size=DEFAULT_MAX_PACKET_SIZE, gss_kex=False, gss_deleg_creds=True): """ Create a new SSH session over an existing socket, or socket-like object. This only creates the `.Transport` object; it doesn't begin the SSH session yet. Use `connect` or `start_client` to begin a client session, or `start_server` to begin a server session. If the object is not actually a socket, it must have the following methods: - ``send(str)``: Writes from 1 to ``len(str)`` bytes, and returns an int representing the number of bytes written. Returns 0 or raises ``EOFError`` if the stream has been closed. - ``recv(int)``: Reads from 1 to ``int`` bytes and returns them as a string. Returns 0 or raises ``EOFError`` if the stream has been closed. - ``close()``: Closes the socket. - ``settimeout(n)``: Sets a (float) timeout on I/O operations. For ease of use, you may also pass in an address (as a tuple) or a host string as the ``sock`` argument. (A host string is a hostname with an optional port (separated by ``":"``) which will be converted into a tuple of ``(hostname, port)``.) A socket will be connected to this address and used for communication. Exceptions from the ``socket`` call may be thrown in this case. .. note:: Modifying the the window and packet sizes might have adverse effects on your channels created from this transport. The default values are the same as in the OpenSSH code base and have been battle tested. :param socket sock: a socket or socket-like object to create the session over. :param int default_window_size: sets the default window size on the transport. (defaults to 2097152) :param int default_max_packet_size: sets the default max packet size on the transport. (defaults to 32768) .. versionchanged:: 1.15 Added the ``default_window_size`` and ``default_max_packet_size`` arguments. """ self.active = False if isinstance(sock, string_types): # convert "host:port" into (host, port) hl = sock.split(':', 1) if len(hl) == 1: sock = (hl[0], 22) else: sock = (hl[0], int(hl[1])) if type(sock) is tuple: # connect to the given (host, port) hostname, port = sock reason = 'No suitable address family' addrinfos = socket.getaddrinfo( hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM ) for family, socktype, proto, canonname, sockaddr in addrinfos: if socktype == socket.SOCK_STREAM: af = family # addr = sockaddr sock = socket.socket(af, socket.SOCK_STREAM) try: retry_on_signal(lambda: sock.connect((hostname, port))) except socket.error as e: reason = str(e) else: break else: raise SSHException( 'Unable to connect to %s: %s' % (hostname, reason)) # okay, normal socket-ish flow here... threading.Thread.__init__(self) self.setDaemon(True) self.sock = sock # we set the timeout so we can check self.active periodically to # see if we should bail. socket.timeout exception is never propagated. self.sock.settimeout(self._active_check_timeout) # negotiated crypto parameters self.packetizer = Packetizer(sock) self.local_version = 'SSH-' + self._PROTO_ID + '-' + self._CLIENT_ID self.remote_version = '' self.local_cipher = self.remote_cipher = '' self.local_kex_init = self.remote_kex_init = None self.local_mac = self.remote_mac = None self.local_compression = self.remote_compression = None self.session_id = None self.host_key_type = None self.host_key = None # GSS-API / SSPI Key Exchange self.use_gss_kex = gss_kex # This will be set to True if GSS-API Key Exchange was performed self.gss_kex_used = False self.kexgss_ctxt = None self.gss_host = None if self.use_gss_kex: self.kexgss_ctxt = GSSAuth("gssapi-keyex", gss_deleg_creds) self._preferred_kex = self._preferred_gsskex + self._preferred_kex # state used during negotiation self.kex_engine = None self.H = None self.K = None self.initial_kex_done = False self.in_kex = False self.authenticated = False self._expected_packet = tuple() # synchronization (always higher level than write_lock) self.lock = threading.Lock() # tracking open channels self._channels = ChannelMap() self.channel_events = {} # (id -> Event) self.channels_seen = {} # (id -> True) self._channel_counter = 0 self.default_max_packet_size = default_max_packet_size self.default_window_size = default_window_size self._forward_agent_handler = None self._x11_handler = None self._tcp_handler = None self.saved_exception = None self.clear_to_send = threading.Event() self.clear_to_send_lock = threading.Lock() self.clear_to_send_timeout = 30.0 self.log_name = 'paramiko.transport' self.logger = util.get_logger(self.log_name) self.packetizer.set_log(self.logger) self.auth_handler = None # response Message from an arbitrary global request self.global_response = None # user-defined event callbacks self.completion_event = None # how long (seconds) to wait for the SSH banner self.banner_timeout = 15 # how long (seconds) to wait for the handshake to finish after SSH # banner sent. self.handshake_timeout = 15 # server mode: self.server_mode = False self.server_object = None self.server_key_dict = {} self.server_accepts = [] self.server_accept_cv = threading.Condition(self.lock) self.subsystem_table = {} def __repr__(self): """ Returns a string representation of this object, for debugging. """ out = '<paramiko.Transport at %s' % hex(long(id(self)) & xffffffff) if not self.active: out += ' (unconnected)' else: if self.local_cipher != '': out += ' (cipher %s, %d bits)' % ( self.local_cipher, self._cipher_info[self.local_cipher]['key-size'] * 8 ) if self.is_authenticated(): out += ' (active; %d open channel(s))' % len(self._channels) elif self.initial_kex_done: out += ' (connected; awaiting auth)' else: out += ' (connecting)' out += '>' return out def atfork(self): """ Terminate this Transport without closing the session. On posix systems, if a Transport is open during process forking, both parent and child will share the underlying socket, but only one process can use the connection (without corrupting the session). Use this method to clean up a Transport object without disrupting the other process. .. versionadded:: 1.5.3 """ self.sock.close() self.close() def get_security_options(self): """ Return a `.SecurityOptions` object which can be used to tweak the encryption algorithms this transport will permit (for encryption, digest/hash operations, public keys, and key exchanges) and the order of preference for them. """ return SecurityOptions(self) def set_gss_host(self, gss_host): """ Setter for C{gss_host} if GSS-API Key Exchange is performed. :param str gss_host: The targets name in the kerberos database Default: The name of the host to connect to """ # We need the FQDN to get this working with SSPI self.gss_host = socket.getfqdn(gss_host) def start_client(self, event=None): """ Negotiate a new SSH2 session as a client. This is the first step after creating a new `.Transport`. A separate thread is created for protocol negotiation. If an event is passed in, this method returns immediately. When negotiation is done (successful or not), the given ``Event`` will be triggered. On failure, `is_active` will return ``False``. (Since 1.4) If ``event`` is ``None``, this method will not return until negotation is done. On success, the method returns normally. Otherwise an SSHException is raised. After a successful negotiation, you will usually want to authenticate, calling `auth_password <Transport.auth_password>` or `auth_publickey <Transport.auth_publickey>`. .. note:: `connect` is a simpler method for connecting as a client. .. note:: After calling this method (or `start_server` or `connect`), you should no longer directly read from or write to the original socket object. :param .threading.Event event: an event to trigger when negotiation is complete (optional) :raises: `.SSHException` -- if negotiation fails (and no ``event`` was passed in) """ self.active = True if event is not None: # async, return immediately and let the app poll for completion self.completion_event = event self.start() return # synchronous, wait for a result self.completion_event = event = threading.Event() self.start() while True: event.wait(0.1) if not self.active: e = self.get_exception() if e is not None: raise e raise SSHException('Negotiation failed.') if event.is_set(): break def start_server(self, event=None, server=None): """ Negotiate a new SSH2 session as a server. This is the first step after creating a new `.Transport` and setting up your server host key(s). A separate thread is created for protocol negotiation. If an event is passed in, this method returns immediately. When negotiation is done (successful or not), the given ``Event`` will be triggered. On failure, `is_active` will return ``False``. (Since 1.4) If ``event`` is ``None``, this method will not return until negotiation is done. On success, the method returns normally. Otherwise an SSHException is raised. After a successful negotiation, the client will need to authenticate. Override the methods `get_allowed_auths <.ServerInterface.get_allowed_auths>`, `check_auth_none <.ServerInterface.check_auth_none>`, `check_auth_password <.ServerInterface.check_auth_password>`, and `check_auth_publickey <.ServerInterface.check_auth_publickey>` in the given ``server`` object to control the authentication process. After a successful authentication, the client should request to open a channel. Override `check_channel_request <.ServerInterface.check_channel_request>` in the given ``server`` object to allow channels to be opened. .. note:: After calling this method (or `start_client` or `connect`), you should no longer directly read from or write to the original socket object. :param .threading.Event event: an event to trigger when negotiation is complete. :param .ServerInterface server: an object used to perform authentication and create `channels <.Channel>` :raises: `.SSHException` -- if negotiation fails (and no ``event`` was passed in) """ if server is None: server = ServerInterface() self.server_mode = True self.server_object = server self.active = True if event is not None: # async, return immediately and let the app poll for completion self.completion_event = event self.start() return # synchronous, wait for a result self.completion_event = event = threading.Event() self.start() while True: event.wait(0.1) if not self.active: e = self.get_exception() if e is not None: raise e raise SSHException('Negotiation failed.') if event.is_set(): break def add_server_key(self, key): """ Add a host key to the list of keys used for server mode. When behaving as a server, the host key is used to sign certain packets during the SSH2 negotiation, so that the client can trust that we are who we say we are. Because this is used for signing, the key must contain private key info, not just the public half. Only one key of each type (RSA or DSS) is kept. :param .PKey key: the host key to add, usually an `.RSAKey` or `.DSSKey`. """ self.server_key_dict[key.get_name()] = key def get_server_key(self): """ Return the active host key, in server mode. After negotiating with the client, this method will return the negotiated host key. If only one type of host key was set with `add_server_key`, that's the only key that will ever be returned. But in cases where you have set more than one type of host key (for example, an RSA key and a DSS key), the key type will be negotiated by the client, and this method will return the key of the type agreed on. If the host key has not been negotiated yet, ``None`` is returned. In client mode, the behavior is undefined. :return: host key (`.PKey`) of the type negotiated by the client, or ``None``. """ try: return self.server_key_dict[self.host_key_type] except KeyError: pass return None @staticmethod def load_server_moduli(filename=None): """ (optional) Load a file of prime moduli for use in doing group-exchange key negotiation in server mode. It's a rather obscure option and can be safely ignored. In server mode, the remote client may request "group-exchange" key negotiation, which asks the server to send a random prime number that fits certain criteria. These primes are pretty difficult to compute, so they can't be generated on demand. But many systems contain a file of suitable primes (usually named something like ``/etc/ssh/moduli``). If you call `load_server_moduli` and it returns ``True``, then this file of primes has been loaded and we will support "group-exchange" in server mode. Otherwise server mode will just claim that it doesn't support that method of key negotiation. :param str filename: optional path to the moduli file, if you happen to know that it's not in a standard location. :return: True if a moduli file was successfully loaded; False otherwise. .. note:: This has no effect when used in client mode. """ Transport._modulus_pack = ModulusPack() # places to look for the openssh "moduli" file file_list = ['/etc/ssh/moduli', '/usr/local/etc/moduli'] if filename is not None: file_list.insert(0, filename) for fn in file_list: try: Transport._modulus_pack.read_file(fn) return True except IOError: pass # none succeeded Transport._modulus_pack = None return False def close(self): """ Close this session, and any open channels that are tied to it. """ if not self.active: return self.stop_thread() for chan in list(self._channels.values()): chan._unlink() self.sock.close() def get_remote_server_key(self): """ Return the host key of the server (in client mode). .. note:: Previously this call returned a tuple of ``(key type, key string)``. You can get the same effect by calling `.PKey.get_name` for the key type, and ``str(key)`` for the key string. :raises: `.SSHException` -- if no session is currently active. :return: public key (`.PKey`) of the remote server """ if (not self.active) or (not self.initial_kex_done): raise SSHException('No existing session') return self.host_key def is_active(self): """ Return true if this session is active (open). :return: True if the session is still active (open); False if the session is closed """ return self.active def open_session( self, window_size=None, max_packet_size=None, timeout=None, ): """ Request a new channel to the server, of type ``"session"``. This is just an alias for calling `open_channel` with an argument of ``"session"``. .. note:: Modifying the the window and packet sizes might have adverse effects on the session created. The default values are the same as in the OpenSSH code base and have been battle tested. :param int window_size: optional window size for this session. :param int max_packet_size: optional max packet size for this session. :return: a new `.Channel` :raises: `.SSHException` -- if the request is rejected or the session ends prematurely .. versionchanged:: 1.13.4/1.14.3/1.15.3 Added the ``timeout`` argument. .. versionchanged:: 1.15 Added the ``window_size`` and ``max_packet_size`` arguments. """ return self.open_channel('session', window_size=window_size, max_packet_size=max_packet_size, timeout=timeout) def open_x11_channel(self, src_addr=None): """ Request a new channel to the client, of type ``"x11"``. This is just an alias for ``open_channel('x11', src_addr=src_addr)``. :param tuple src_addr: the source address (``(str, int)``) of the x11 server (port is the x11 port, ie. 6010) :return: a new `.Channel` :raises: `.SSHException` -- if the request is rejected or the session ends prematurely """ return self.open_channel('x11', src_addr=src_addr) def open_forward_agent_channel(self): """ Request a new channel to the client, of type ``"auth-agent@openssh.com"``. This is just an alias for ``open_channel('auth-agent@openssh.com')``. :return: a new `.Channel` :raises: `.SSHException` -- if the request is rejected or the session ends prematurely """ return self.open_channel('auth-agent@openssh.com') def open_forwarded_tcpip_channel(self, src_addr, dest_addr): """ Request a new channel back to the client, of type ``forwarded-tcpip``. This is used after a client has requested port forwarding, for sending incoming connections back to the client. :param src_addr: originator's address :param dest_addr: local (server) connected address """ return self.open_channel('forwarded-tcpip', dest_addr, src_addr) def open_channel(self, kind, dest_addr=None, src_addr=None, window_size=None, max_packet_size=None, timeout=None): """ Request a new channel to the server. `Channels <.Channel>` are socket-like objects used for the actual transfer of data across the session. You may only request a channel after negotiating encryption (using `connect` or `start_client`) and authenticating. .. note:: Modifying the the window and packet sizes might have adverse effects on the channel created. The default values are the same as in the OpenSSH code base and have been battle tested. :param str kind: the kind of channel requested (usually ``"session"``, ``"forwarded-tcpip"``, ``"direct-tcpip"``, or ``"x11"``) :param tuple dest_addr: the destination address (address + port tuple) of this port forwarding, if ``kind`` is ``"forwarded-tcpip"`` or ``"direct-tcpip"`` (ignored for other channel types) :param src_addr: the source address of this port forwarding, if ``kind`` is ``"forwarded-tcpip"``, ``"direct-tcpip"``, or ``"x11"`` :param int window_size: optional window size for this session. :param int max_packet_size: optional max packet size for this session. :param float timeout: optional timeout opening a channel, default 3600s (1h) :return: a new `.Channel` on success :raises: `.SSHException` -- if the request is rejected, the session ends prematurely or there is a timeout openning a channel .. versionchanged:: 1.15 Added the ``window_size`` and ``max_packet_size`` arguments. """ if not self.active: raise SSHException('SSH session not active') timeout = 3600 if timeout is None else timeout self.lock.acquire() try: window_size = self._sanitize_window_size(window_size) max_packet_size = self._sanitize_packet_size(max_packet_size) chanid = self._next_channel() m = Message() m.add_byte(cMSG_CHANNEL_OPEN) m.add_string(kind) m.add_int(chanid) m.add_int(window_size) m.add_int(max_packet_size) if (kind == 'forwarded-tcpip') or (kind == 'direct-tcpip'): m.add_string(dest_addr[0]) m.add_int(dest_addr[1]) m.add_string(src_addr[0]) m.add_int(src_addr[1]) elif kind == 'x11': m.add_string(src_addr[0]) m.add_int(src_addr[1]) chan = Channel(chanid) self._channels.put(chanid, chan) self.channel_events[chanid] = event = threading.Event() self.channels_seen[chanid] = True chan._set_transport(self) chan._set_window(window_size, max_packet_size) finally: self.lock.release() self._send_user_message(m) start_ts = time.time() while True: event.wait(0.1) if not self.active: e = self.get_exception() if e is None: e = SSHException('Unable to open channel.') raise e if event.is_set(): break elif start_ts + timeout < time.time(): raise SSHException('Timeout opening channel.') chan = self._channels.get(chanid) if chan is not None: return chan e = self.get_exception() if e is None: e = SSHException('Unable to open channel.') raise e def request_port_forward(self, address, port, handler=None): """ Ask the server to forward TCP connections from a listening port on the server, across this SSH session. If a handler is given, that handler is called from a different thread whenever a forwarded connection arrives. The handler parameters are:: handler( channel, (origin_addr, origin_port), (server_addr, server_port), ) where ``server_addr`` and ``server_port`` are the address and port that the server was listening on. If no handler is set, the default behavior is to send new incoming forwarded connections into the accept queue, to be picked up via `accept`. :param str address: the address to bind when forwarding :param int port: the port to forward, or 0 to ask the server to allocate any port :param callable handler: optional handler for incoming forwarded connections, of the form ``func(Channel, (str, int), (str, int))``. :return: the port number (`int`) allocated by the server :raises: `.SSHException` -- if the server refused the TCP forward request """ if not self.active: raise SSHException('SSH session not active') port = int(port) response = self.global_request( 'tcpip-forward', (address, port), wait=True ) if response is None: raise SSHException('TCP forwarding request denied') if port == 0: port = response.get_int() if handler is None: def default_handler(channel, src_addr, dest_addr_port): # src_addr, src_port = src_addr_port # dest_addr, dest_port = dest_addr_port self._queue_incoming_channel(channel) handler = default_handler self._tcp_handler = handler return port def cancel_port_forward(self, address, port): """ Ask the server to cancel a previous port-forwarding request. No more connections to the given address & port will be forwarded across this ssh connection. :param str address: the address to stop forwarding :param int port: the port to stop forwarding """ if not self.active: return self._tcp_handler = None self.global_request('cancel-tcpip-forward', (address, port), wait=True) def open_sftp_client(self): """ Create an SFTP client channel from an open transport. On success, an SFTP session will be opened with the remote host, and a new `.SFTPClient` object will be returned. :return: a new `.SFTPClient` referring to an sftp session (channel) across this transport """ return SFTPClient.from_transport(self) def send_ignore(self, byte_count=None): """ Send a junk packet across the encrypted link. This is sometimes used to add "noise" to a connection to confuse would-be attackers. It can also be used as a keep-alive for long lived connections traversing firewalls. :param int byte_count: the number of random bytes to send in the payload of the ignored packet -- defaults to a random number from 10 to 41. """ m = Message() m.add_byte(cMSG_IGNORE) if byte_count is None: byte_count = (byte_ord(os.urandom(1)) % 32) + 10 m.add_bytes(os.urandom(byte_count)) self._send_user_message(m) def renegotiate_keys(self): """ Force this session to switch to new keys. Normally this is done automatically after the session hits a certain number of packets or bytes sent or received, but this method gives you the option of forcing new keys whenever you want. Negotiating new keys causes a pause in traffic both ways as the two sides swap keys and do computations. This method returns when the session has switched to new keys. :raises: `.SSHException` -- if the key renegotiation failed (which causes the session to end) """ self.completion_event = threading.Event() self._send_kex_init() while True: self.completion_event.wait(0.1) if not self.active: e = self.get_exception() if e is not None: raise e raise SSHException('Negotiation failed.') if self.completion_event.is_set(): break return def set_keepalive(self, interval): """ Turn on/off keepalive packets (default is off). If this is set, after ``interval`` seconds without sending any data over the connection, a "keepalive" packet will be sent (and ignored by the remote host). This can be useful to keep connections alive over a NAT, for example. :param int interval: seconds to wait before sending a keepalive packet (or 0 to disable keepalives). """ def _request(x=weakref.proxy(self)): return x.global_request('keepalive@lag.net', wait=False) self.packetizer.set_keepalive(interval, _request) def global_request(self, kind, data=None, wait=True): """ Make a global request to the remote host. These are normally extensions to the SSH2 protocol. :param str kind: name of the request. :param tuple data: an optional tuple containing additional data to attach to the request. :param bool wait: ``True`` if this method should not return until a response is received; ``False`` otherwise. :return: a `.Message` containing possible additional data if the request was successful (or an empty `.Message` if ``wait`` was ``False``); ``None`` if the request was denied. """ if wait: self.completion_event = threading.Event() m = Message() m.add_byte(cMSG_GLOBAL_REQUEST) m.add_string(kind) m.add_boolean(wait) if data is not None: m.add(*data) self._log(DEBUG, 'Sending global request "%s"' % kind) self._send_user_message(m) if not wait: return None while True: self.completion_event.wait(0.1) if not self.active: return None if self.completion_event.is_set(): break return self.global_response def accept(self, timeout=None): """ Return the next channel opened by the client over this transport, in server mode. If no channel is opened before the given timeout, ``None`` is returned. :param int timeout: seconds to wait for a channel, or ``None`` to wait forever :return: a new `.Channel` opened by the client """ self.lock.acquire() try: if len(self.server_accepts) > 0: chan = self.server_accepts.pop(0) else: self.server_accept_cv.wait(timeout) if len(self.server_accepts) > 0: chan = self.server_accepts.pop(0) else: # timeout chan = None finally: self.lock.release() return chan def connect( self, hostkey=None, username='', password=None, pkey=None, gss_host=None, gss_auth=False, gss_kex=False, gss_deleg_creds=True, ): """ Negotiate an SSH2 session, and optionally verify the server's host key and authenticate using a password or private key. This is a shortcut for `start_client`, `get_remote_server_key`, and `Transport.auth_password` or `Transport.auth_publickey`. Use those methods if you want more control. You can use this method immediately after creating a Transport to negotiate encryption with a server. If it fails, an exception will be thrown. On success, the method will return cleanly, and an encrypted session exists. You may immediately call `open_channel` or `open_session` to get a `.Channel` object, which is used for data transfer. .. note:: If you fail to supply a password or private key, this method may succeed, but a subsequent `open_channel` or `open_session` call may fail because you haven't authenticated yet. :param .PKey hostkey: the host key expected from the server, or ``None`` if you don't want to do host key verification. :param str username: the username to authenticate as. :param str password: a password to use for authentication, if you want to use password authentication; otherwise ``None``. :param .PKey pkey: a private key to use for authentication, if you want to use private key authentication; otherwise ``None``. :param str gss_host: The target's name in the kerberos database. Default: hostname :param bool gss_auth: ``True`` if you want to use GSS-API authentication. :param bool gss_kex: Perform GSS-API Key Exchange and user authentication. :param bool gss_deleg_creds: Whether to delegate GSS-API client credentials. :raises: `.SSHException` -- if the SSH2 negotiation fails, the host key supplied by the server is incorrect, or authentication fails. """ if hostkey is not None: self._preferred_keys = [hostkey.get_name()] self.start_client() # check host key if we were given one # If GSS-API Key Exchange was performed, we are not required to check # the host key. if (hostkey is not None) and not gss_kex: key = self.get_remote_server_key() if ( key.get_name() != hostkey.get_name() or key.asbytes() != hostkey.asbytes() ): self._log(DEBUG, 'Bad host key from server') self._log(DEBUG, 'Expected: %s: %s' % ( hostkey.get_name(), repr(hostkey.asbytes())) ) self._log(DEBUG, 'Got : %s: %s' % ( key.get_name(), repr(key.asbytes())) ) raise SSHException('Bad host key from server') self._log(DEBUG, 'Host key verified (%s)' % hostkey.get_name()) if (pkey is not None) or (password is not None) or gss_auth or gss_kex: if gss_auth: self._log(DEBUG, 'Attempting GSS-API auth... (gssapi-with-mic)') # noqa self.auth_gssapi_with_mic(username, gss_host, gss_deleg_creds) elif gss_kex: self._log(DEBUG, 'Attempting GSS-API auth... (gssapi-keyex)') self.auth_gssapi_keyex(username) elif pkey is not None: self._log(DEBUG, 'Attempting public-key auth...') self.auth_publickey(username, pkey) else: self._log(DEBUG, 'Attempting password auth...') self.auth_password(username, password) return def get_exception(self): """ Return any exception that happened during the last server request. This can be used to fetch more specific error information after using calls like `start_client`. The exception (if any) is cleared after this call. :return: an exception, or ``None`` if there is no stored exception. .. versionadded:: 1.1 """ self.lock.acquire() try: e = self.saved_exception self.saved_exception = None return e finally: self.lock.release() def set_subsystem_handler(self, name, handler, *larg, **kwarg): """ Set the handler class for a subsystem in server mode. If a request for this subsystem is made on an open ssh channel later, this handler will be constructed and called -- see `.SubsystemHandler` for more detailed documentation. Any extra parameters (including keyword arguments) are saved and passed to the `.SubsystemHandler` constructor later. :param str name: name of the subsystem. :param handler: subclass of `.SubsystemHandler` that handles this subsystem. """ try: self.lock.acquire() self.subsystem_table[name] = (handler, larg, kwarg) finally: self.lock.release() def is_authenticated(self): """ Return true if this session is active and authenticated. :return: True if the session is still open and has been authenticated successfully; False if authentication failed and/or the session is closed. """ return ( self.active and self.auth_handler is not None and self.auth_handler.is_authenticated() ) def get_username(self): """ Return the username this connection is authenticated for. If the session is not authenticated (or authentication failed), this method returns ``None``. :return: username that was authenticated (a `str`), or ``None``. """ if not self.active or (self.auth_handler is None): return None return self.auth_handler.get_username() def get_banner(self): """ Return the banner supplied by the server upon connect. If no banner is supplied, this method returns ``None``. :returns: server supplied banner (`str`), or ``None``. .. versionadded:: 1.13 """ if not self.active or (self.auth_handler is None): return None return self.auth_handler.banner def auth_none(self, username): """ Try to authenticate to the server using no authentication at all. This will almost always fail. It may be useful for determining the list of authentication types supported by the server, by catching the `.BadAuthenticationType` exception raised. :param str username: the username to authenticate as :return: `list` of auth types permissible for the next stage of authentication (normally empty) :raises: `.BadAuthenticationType` -- if "none" authentication isn't allowed by the server for this user :raises: `.SSHException` -- if the authentication failed due to a network error .. versionadded:: 1.5 """ if (not self.active) or (not self.initial_kex_done): raise SSHException('No existing session') my_event = threading.Event() self.auth_handler = AuthHandler(self) self.auth_handler.auth_none(username, my_event) return self.auth_handler.wait_for_response(my_event) def auth_password(self, username, password, event=None, fallback=True): """ Authenticate to the server using a password. The username and password are sent over an encrypted link. If an ``event`` is passed in, this method will return immediately, and the event will be triggered once authentication succeeds or fails. On success, `is_authenticated` will return ``True``. On failure, you may use `get_exception` to get more detailed error information. Since 1.1, if no event is passed, this method will block until the authentication succeeds or fails. On failure, an exception is raised. Otherwise, the method simply returns. Since 1.5, if no event is passed and ``fallback`` is ``True`` (the default), if the server doesn't support plain password authentication but does support so-called "keyboard-interactive" mode, an attempt will be made to authenticate using this interactive mode. If it fails, the normal exception will be thrown as if the attempt had never been made. This is useful for some recent Gentoo and Debian distributions, which turn off plain password authentication in a misguided belief that interactive authentication is "more secure". (It's not.) If the server requires multi-step authentication (which is very rare), this method will return a list of auth types permissible for the next step. Otherwise, in the normal case, an empty list is returned. :param str username: the username to authenticate as :param basestring password: the password to authenticate with :param .threading.Event event: an event to trigger when the authentication attempt is complete (whether it was successful or not) :param bool fallback: ``True`` if an attempt at an automated "interactive" password auth should be made if the server doesn't support normal password auth :return: `list` of auth types permissible for the next stage of authentication (normally empty) :raises: `.BadAuthenticationType` -- if password authentication isn't allowed by the server for this user (and no event was passed in) :raises: `.AuthenticationException` -- if the authentication failed (and no event was passed in) :raises: `.SSHException` -- if there was a network error """ if (not self.active) or (not self.initial_kex_done): # we should never try to send the password unless we're on a secure # link raise SSHException('No existing session') if event is None: my_event = threading.Event() else: my_event = event self.auth_handler = AuthHandler(self) self.auth_handler.auth_password(username, password, my_event) if event is not None: # caller wants to wait for event themselves return [] try: return self.auth_handler.wait_for_response(my_event) except BadAuthenticationType as e: # if password auth isn't allowed, but keyboard-interactive *is*, # try to fudge it if not fallback or ('keyboard-interactive' not in e.allowed_types): raise try: def handler(title, instructions, fields): if len(fields) > 1: raise SSHException('Fallback authentication failed.') if len(fields) == 0: # for some reason, at least on os x, a 2nd request will # be made with zero fields requested. maybe it's just # to try to fake out automated scripting of the exact # type we're doing here. *shrug* :) return [] return [password] return self.auth_interactive(username, handler) except SSHException: # attempt failed; just raise the original exception raise e def auth_publickey(self, username, key, event=None): """ Authenticate to the server using a private key. The key is used to sign data from the server, so it must include the private part. If an ``event`` is passed in, this method will return immediately, and the event will be triggered once authentication succeeds or fails. On success, `is_authenticated` will return ``True``. On failure, you may use `get_exception` to get more detailed error information. Since 1.1, if no event is passed, this method will block until the authentication succeeds or fails. On failure, an exception is raised. Otherwise, the method simply returns. If the server requires multi-step authentication (which is very rare), this method will return a list of auth types permissible for the next step. Otherwise, in the normal case, an empty list is returned. :param str username: the username to authenticate as :param .PKey key: the private key to authenticate with :param .threading.Event event: an event to trigger when the authentication attempt is complete (whether it was successful or not) :return: `list` of auth types permissible for the next stage of authentication (normally empty) :raises: `.BadAuthenticationType` -- if public-key authentication isn't allowed by the server for this user (and no event was passed in) :raises: `.AuthenticationException` -- if the authentication failed (and no event was passed in) :raises: `.SSHException` -- if there was a network error """ if (not self.active) or (not self.initial_kex_done): # we should never try to authenticate unless we're on a secure link raise SSHException('No existing session') if event is None: my_event = threading.Event() else: my_event = event self.auth_handler = AuthHandler(self) self.auth_handler.auth_publickey(username, key, my_event) if event is not None: # caller wants to wait for event themselves return [] return self.auth_handler.wait_for_response(my_event) def auth_interactive(self, username, handler, submethods=''): """ Authenticate to the server interactively. A handler is used to answer arbitrary questions from the server. On many servers, this is just a dumb wrapper around PAM. This method will block until the authentication succeeds or fails, peroidically calling the handler asynchronously to get answers to authentication questions. The handler may be called more than once if the server continues to ask questions. The handler is expected to be a callable that will handle calls of the form: ``handler(title, instructions, prompt_list)``. The ``title`` is meant to be a dialog-window title, and the ``instructions`` are user instructions (both are strings). ``prompt_list`` will be a list of prompts, each prompt being a tuple of ``(str, bool)``. The string is the prompt and the boolean indicates whether the user text should be echoed. A sample call would thus be: ``handler('title', 'instructions', [('Password:', False)])``. The handler should return a list or tuple of answers to the server's questions. If the server requires multi-step authentication (which is very rare), this method will return a list of auth types permissible for the next step. Otherwise, in the normal case, an empty list is returned. :param str username: the username to authenticate as :param callable handler: a handler for responding to server questions :param str submethods: a string list of desired submethods (optional) :return: `list` of auth types permissible for the next stage of authentication (normally empty). :raises: `.BadAuthenticationType` -- if public-key authentication isn't allowed by the server for this user :raises: `.AuthenticationException` -- if the authentication failed :raises: `.SSHException` -- if there was a network error .. versionadded:: 1.5 """ if (not self.active) or (not self.initial_kex_done): # we should never try to authenticate unless we're on a secure link raise SSHException('No existing session') my_event = threading.Event() self.auth_handler = AuthHandler(self) self.auth_handler.auth_interactive( username, handler, my_event, submethods ) return self.auth_handler.wait_for_response(my_event) def auth_interactive_dumb(self, username, handler=None, submethods=''): """ Autenticate to the server interactively but dumber. Just print the prompt and / or instructions to stdout and send back the response. This is good for situations where partial auth is achieved by key and then the user has to enter a 2fac token. """ if not handler: def handler(title, instructions, prompt_list): answers = [] if title: print(title.strip()) if instructions: print(instructions.strip()) for prompt, show_input in prompt_list: print(prompt.strip(), end=' ') answers.append(input()) return answers return self.auth_interactive(username, handler, submethods) def auth_gssapi_with_mic(self, username, gss_host, gss_deleg_creds): """ Authenticate to the Server using GSS-API / SSPI. :param str username: The username to authenticate as :param str gss_host: The target host :param bool gss_deleg_creds: Delegate credentials or not :return: list of auth types permissible for the next stage of authentication (normally empty) :rtype: list :raises: `.BadAuthenticationType` -- if gssapi-with-mic isn't allowed by the server (and no event was passed in) :raises: `.AuthenticationException` -- if the authentication failed (and no event was passed in) :raises: `.SSHException` -- if there was a network error """ if (not self.active) or (not self.initial_kex_done): # we should never try to authenticate unless we're on a secure link raise SSHException('No existing session') my_event = threading.Event() self.auth_handler = AuthHandler(self) self.auth_handler.auth_gssapi_with_mic( username, gss_host, gss_deleg_creds, my_event ) return self.auth_handler.wait_for_response(my_event) def auth_gssapi_keyex(self, username): """ Authenticate to the server with GSS-API/SSPI if GSS-API kex is in use. :param str username: The username to authenticate as. :returns: a `list` of auth types permissible for the next stage of authentication (normally empty) :raises: `.BadAuthenticationType` -- if GSS-API Key Exchange was not performed (and no event was passed in) :raises: `.AuthenticationException` -- if the authentication failed (and no event was passed in) :raises: `.SSHException` -- if there was a network error """ if (not self.active) or (not self.initial_kex_done): # we should never try to authenticate unless we're on a secure link raise SSHException('No existing session') my_event = threading.Event() self.auth_handler = AuthHandler(self) self.auth_handler.auth_gssapi_keyex(username, my_event) return self.auth_handler.wait_for_response(my_event) def set_log_channel(self, name): """ Set the channel for this transport's logging. The default is ``"paramiko.transport"`` but it can be set to anything you want. (See the `.logging` module for more info.) SSH Channels will log to a sub-channel of the one specified. :param str name: new channel name for logging .. versionadded:: 1.1 """ self.log_name = name self.logger = util.get_logger(name) self.packetizer.set_log(self.logger) def get_log_channel(self): """ Return the channel name used for this transport's logging. :return: channel name as a `str` .. versionadded:: 1.2 """ return self.log_name def set_hexdump(self, hexdump): """ Turn on/off logging a hex dump of protocol traffic at DEBUG level in the logs. Normally you would want this off (which is the default), but if you are debugging something, it may be useful. :param bool hexdump: ``True`` to log protocol traffix (in hex) to the log; ``False`` otherwise. """ self.packetizer.set_hexdump(hexdump) def get_hexdump(self): """ Return ``True`` if the transport is currently logging hex dumps of protocol traffic. :return: ``True`` if hex dumps are being logged, else ``False``. .. versionadded:: 1.4 """ return self.packetizer.get_hexdump() def use_compression(self, compress=True): """ Turn on/off compression. This will only have an affect before starting the transport (ie before calling `connect`, etc). By default, compression is off since it negatively affects interactive sessions. :param bool compress: ``True`` to ask the remote client/server to compress traffic; ``False`` to refuse compression .. versionadded:: 1.5.2 """ if compress: self._preferred_compression = ('zlib@openssh.com', 'zlib', 'none') else: self._preferred_compression = ('none',) def getpeername(self): """ Return the address of the remote side of this Transport, if possible. This is effectively a wrapper around ``getpeername`` on the underlying socket. If the socket-like object has no ``getpeername`` method, then ``("unknown", 0)`` is returned. :return: the address of the remote host, if known, as a ``(str, int)`` tuple. """ gp = getattr(self.sock, 'getpeername', None) if gp is None: return 'unknown', 0 return gp() def stop_thread(self): self.active = False self.packetizer.close() if PY2: # Original join logic; #520 doesn't appear commonly present under # Python 2. while self.is_alive() and self is not threading.current_thread(): self.join(10) else: # Keep trying to join() our main thread, quickly, until: # * We join()ed successfully (self.is_alive() == False) # * Or it looks like we've hit issue #520 (socket.recv hitting some # race condition preventing it from timing out correctly), wherein # our socket and packetizer are both closed (but where we'd # otherwise be sitting forever on that recv()). while ( self.is_alive() and self is not threading.current_thread() and not self.sock._closed and not self.packetizer.closed ): self.join(0.1) # internals... def _log(self, level, msg, *args): if issubclass(type(msg), list): for m in msg: self.logger.log(level, m) else: self.logger.log(level, msg, *args) def _get_modulus_pack(self): """used by KexGex to find primes for group exchange""" return self._modulus_pack def _next_channel(self): """you are holding the lock""" chanid = self._channel_counter while self._channels.get(chanid) is not None: self._channel_counter = (self._channel_counter + 1) & 0xffffff chanid = self._channel_counter self._channel_counter = (self._channel_counter + 1) & 0xffffff return chanid def _unlink_channel(self, chanid): """used by a Channel to remove itself from the active channel list""" self._channels.delete(chanid) def _send_message(self, data): self.packetizer.send_message(data) def _send_user_message(self, data): """ send a message, but block if we're in key negotiation. this is used for user-initiated requests. """ start = time.time() while True: self.clear_to_send.wait(0.1) if not self.active: self._log(DEBUG, 'Dropping user packet because connection is dead.') # noqa return self.clear_to_send_lock.acquire() if self.clear_to_send.is_set(): break self.clear_to_send_lock.release() if time.time() > start + self.clear_to_send_timeout: raise SSHException('Key-exchange timed out waiting for key negotiation') # noqa try: self._send_message(data) finally: self.clear_to_send_lock.release() def _set_K_H(self, k, h): """ Used by a kex obj to set the K (root key) and H (exchange hash). """ self.K = k self.H = h if self.session_id is None: self.session_id = h def _expect_packet(self, *ptypes): """ Used by a kex obj to register the next packet type it expects to see. """ self._expected_packet = tuple(ptypes) def _verify_key(self, host_key, sig): key = self._key_info[self.host_key_type](Message(host_key)) if key is None: raise SSHException('Unknown host key type') if not key.verify_ssh_sig(self.H, Message(sig)): raise SSHException('Signature verification (%s) failed.' % self.host_key_type) # noqa self.host_key = key def _compute_key(self, id, nbytes): """id is 'A' - 'F' for the various keys used by ssh""" m = Message() m.add_mpint(self.K) m.add_bytes(self.H) m.add_byte(b(id)) m.add_bytes(self.session_id) # Fallback to SHA1 for kex engines that fail to specify a hex # algorithm, or for e.g. transport tests that don't run kexinit. hash_algo = getattr(self.kex_engine, 'hash_algo', None) hash_select_msg = "kex engine %s specified hash_algo %r" % ( self.kex_engine.__class__.__name__, hash_algo ) if hash_algo is None: hash_algo = sha1 hash_select_msg += ", falling back to sha1" if not hasattr(self, '_logged_hash_selection'): self._log(DEBUG, hash_select_msg) setattr(self, '_logged_hash_selection', True) out = sofar = hash_algo(m.asbytes()).digest() while len(out) < nbytes: m = Message() m.add_mpint(self.K) m.add_bytes(self.H) m.add_bytes(sofar) digest = hash_algo(m.asbytes()).digest() out += digest sofar += digest return out[:nbytes] def _get_cipher(self, name, key, iv, operation): if name not in self._cipher_info: raise SSHException('Unknown client cipher ' + name) else: cipher = Cipher( self._cipher_info[name]['class'](key), self._cipher_info[name]['mode'](iv), backend=default_backend(), ) if operation is self._ENCRYPT: return cipher.encryptor() else: return cipher.decryptor() def _set_forward_agent_handler(self, handler): if handler is None: def default_handler(channel): self._queue_incoming_channel(channel) self._forward_agent_handler = default_handler else: self._forward_agent_handler = handler def _set_x11_handler(self, handler): # only called if a channel has turned on x11 forwarding if handler is None: # by default, use the same mechanism as accept() def default_handler(channel, src_addr_port): self._queue_incoming_channel(channel) self._x11_handler = default_handler else: self._x11_handler = handler def _queue_incoming_channel(self, channel): self.lock.acquire() try: self.server_accepts.append(channel) self.server_accept_cv.notify() finally: self.lock.release() def _sanitize_window_size(self, window_size): if window_size is None: window_size = self.default_window_size return clamp_value(MIN_WINDOW_SIZE, window_size, MAX_WINDOW_SIZE) def _sanitize_packet_size(self, max_packet_size): if max_packet_size is None: max_packet_size = self.default_max_packet_size return clamp_value(MIN_PACKET_SIZE, max_packet_size, MAX_WINDOW_SIZE) def _ensure_authed(self, ptype, message): """ Checks message type against current auth state. If server mode, and auth has not succeeded, and the message is of a post-auth type (channel open or global request) an appropriate error response Message is crafted and returned to caller for sending. Otherwise (client mode, authed, or pre-auth message) returns None. """ if ( not self.server_mode or ptype <= HIGHEST_USERAUTH_MESSAGE_ID or self.is_authenticated() ): return None # WELP. We must be dealing with someone trying to do non-auth things # without being authed. Tell them off, based on message class. reply = Message() # Global requests have no details, just failure. if ptype == MSG_GLOBAL_REQUEST: reply.add_byte(cMSG_REQUEST_FAILURE) # Channel opens let us reject w/ a specific type + message. elif ptype == MSG_CHANNEL_OPEN: kind = message.get_text() chanid = message.get_int() reply.add_byte(cMSG_CHANNEL_OPEN_FAILURE) reply.add_int(chanid) reply.add_int(OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED) reply.add_string('') reply.add_string('en') # NOTE: Post-open channel messages do not need checking; the above will # reject attemps to open channels, meaning that even if a malicious # user tries to send a MSG_CHANNEL_REQUEST, it will simply fall under # the logic that handles unknown channel IDs (as the channel list will # be empty.) return reply def run(self): # (use the exposed "run" method, because if we specify a thread target # of a private method, threading.Thread will keep a reference to it # indefinitely, creating a GC cycle and not letting Transport ever be # GC'd. it's a bug in Thread.) # Hold reference to 'sys' so we can test sys.modules to detect # interpreter shutdown. self.sys = sys # active=True occurs before the thread is launched, to avoid a race _active_threads.append(self) tid = hex(long(id(self)) & xffffffff) if self.server_mode: self._log(DEBUG, 'starting thread (server mode): %s' % tid) else: self._log(DEBUG, 'starting thread (client mode): %s' % tid) try: try: self.packetizer.write_all(b(self.local_version + '\r\n')) self._log(DEBUG, 'Local version/idstring: %s' % self.local_version) # noqa self._check_banner() # The above is actually very much part of the handshake, but # sometimes the banner can be read but the machine is not # responding, for example when the remote ssh daemon is loaded # in to memory but we can not read from the disk/spawn a new # shell. # Make sure we can specify a timeout for the initial handshake. # Re-use the banner timeout for now. self.packetizer.start_handshake(self.handshake_timeout) self._send_kex_init() self._expect_packet(MSG_KEXINIT) while self.active: if self.packetizer.need_rekey() and not self.in_kex: self._send_kex_init() try: ptype, m = self.packetizer.read_message() except NeedRekeyException: continue if ptype == MSG_IGNORE: continue elif ptype == MSG_DISCONNECT: self._parse_disconnect(m) self.active = False self.packetizer.close() break elif ptype == MSG_DEBUG: self._parse_debug(m) continue if len(self._expected_packet) > 0: if ptype not in self._expected_packet: raise SSHException('Expecting packet from %r, got %d' % (self._expected_packet, ptype)) # noqa self._expected_packet = tuple() if (ptype >= 30) and (ptype <= 41): self.kex_engine.parse_next(ptype, m) continue if ptype in self._handler_table: error_msg = self._ensure_authed(ptype, m) if error_msg: self._send_message(error_msg) else: self._handler_table[ptype](self, m) elif ptype in self._channel_handler_table: chanid = m.get_int() chan = self._channels.get(chanid) if chan is not None: self._channel_handler_table[ptype](chan, m) elif chanid in self.channels_seen: self._log(DEBUG, 'Ignoring message for dead channel %d' % chanid) # noqa else: self._log(ERROR, 'Channel request for unknown channel %d' % chanid) # noqa self.active = False self.packetizer.close() elif ( self.auth_handler is not None and ptype in self.auth_handler._handler_table ): handler = self.auth_handler._handler_table[ptype] handler(self.auth_handler, m) if len(self._expected_packet) > 0: continue else: self._log(WARNING, 'Oops, unhandled type %d' % ptype) msg = Message() msg.add_byte(cMSG_UNIMPLEMENTED) msg.add_int(m.seqno) self._send_message(msg) self.packetizer.complete_handshake() except SSHException as e: self._log(ERROR, 'Exception: ' + str(e)) self._log(ERROR, util.tb_strings()) self.saved_exception = e except EOFError as e: self._log(DEBUG, 'EOF in transport thread') self.saved_exception = e except socket.error as e: if type(e.args) is tuple: if e.args: emsg = '%s (%d)' % (e.args[1], e.args[0]) else: # empty tuple, e.g. socket.timeout emsg = str(e) or repr(e) else: emsg = e.args self._log(ERROR, 'Socket exception: ' + emsg) self.saved_exception = e except Exception as e: self._log(ERROR, 'Unknown exception: ' + str(e)) self._log(ERROR, util.tb_strings()) self.saved_exception = e _active_threads.remove(self) for chan in list(self._channels.values()): chan._unlink() if self.active: self.active = False self.packetizer.close() if self.completion_event is not None: self.completion_event.set() if self.auth_handler is not None: self.auth_handler.abort() for event in self.channel_events.values(): event.set() try: self.lock.acquire() self.server_accept_cv.notify() finally: self.lock.release() self.sock.close() except: # Don't raise spurious 'NoneType has no attribute X' errors when we # wake up during interpreter shutdown. Or rather -- raise # everything *if* sys.modules (used as a convenient sentinel) # appears to still exist. if self.sys.modules is not None: raise def _log_agreement(self, which, local, remote): # Log useful, non-duplicative line re: an agreed-upon algorithm. # Old code implied algorithms could be asymmetrical (different for # inbound vs outbound) so we preserve that possibility. msg = "{0} agreed: ".format(which) if local == remote: msg += local else: msg += "local={0}, remote={1}".format(local, remote) self._log(DEBUG, msg) # protocol stages def _negotiate_keys(self, m): # throws SSHException on anything unusual self.clear_to_send_lock.acquire() try: self.clear_to_send.clear() finally: self.clear_to_send_lock.release() if self.local_kex_init is None: # remote side wants to renegotiate self._send_kex_init() self._parse_kex_init(m) self.kex_engine.start_kex() def _check_banner(self): # this is slow, but we only have to do it once for i in range(100): # give them 15 seconds for the first line, then just 2 seconds # each additional line. (some sites have very high latency.) if i == 0: timeout = self.banner_timeout else: timeout = 2 try: buf = self.packetizer.readline(timeout) except ProxyCommandFailure: raise except Exception as e: raise SSHException( 'Error reading SSH protocol banner' + str(e) ) if buf[:4] == 'SSH-': break self._log(DEBUG, 'Banner: ' + buf) if buf[:4] != 'SSH-': raise SSHException('Indecipherable protocol version "' + buf + '"') # save this server version string for later self.remote_version = buf self._log(DEBUG, 'Remote version/idstring: %s' % buf) # pull off any attached comment # NOTE: comment used to be stored in a variable and then...never used. # since 2003. ca 877cd974b8182d26fa76d566072917ea67b64e67 i = buf.find(' ') if i >= 0: buf = buf[:i] # parse out version string and make sure it matches segs = buf.split('-', 2) if len(segs) < 3: raise SSHException('Invalid SSH banner') version = segs[1] client = segs[2] if version != '1.99' and version != '2.0': msg = 'Incompatible version ({0} instead of 2.0)' raise SSHException(msg.format(version)) msg = 'Connected (version {0}, client {1})'.format(version, client) self._log(INFO, msg) def _send_kex_init(self): """ announce to the other side that we'd like to negotiate keys, and what kind of key negotiation we support. """ self.clear_to_send_lock.acquire() try: self.clear_to_send.clear() finally: self.clear_to_send_lock.release() self.in_kex = True if self.server_mode: mp_required_prefix = 'diffie-hellman-group-exchange-sha' kex_mp = [ k for k in self._preferred_kex if k.startswith(mp_required_prefix) ] if (self._modulus_pack is None) and (len(kex_mp) > 0): # can't do group-exchange if we don't have a pack of potential # primes pkex = [ k for k in self.get_security_options().kex if not k.startswith(mp_required_prefix) ] self.get_security_options().kex = pkex available_server_keys = list(filter( list(self.server_key_dict.keys()).__contains__, self._preferred_keys )) else: available_server_keys = self._preferred_keys m = Message() m.add_byte(cMSG_KEXINIT) m.add_bytes(os.urandom(16)) m.add_list(self._preferred_kex) m.add_list(available_server_keys) m.add_list(self._preferred_ciphers) m.add_list(self._preferred_ciphers) m.add_list(self._preferred_macs) m.add_list(self._preferred_macs) m.add_list(self._preferred_compression) m.add_list(self._preferred_compression) m.add_string(bytes()) m.add_string(bytes()) m.add_boolean(False) m.add_int(0) # save a copy for later (needed to compute a hash) self.local_kex_init = m.asbytes() self._send_message(m) def _parse_kex_init(self, m): m.get_bytes(16) # cookie, discarded kex_algo_list = m.get_list() server_key_algo_list = m.get_list() client_encrypt_algo_list = m.get_list() server_encrypt_algo_list = m.get_list() client_mac_algo_list = m.get_list() server_mac_algo_list = m.get_list() client_compress_algo_list = m.get_list() server_compress_algo_list = m.get_list() client_lang_list = m.get_list() server_lang_list = m.get_list() kex_follows = m.get_boolean() m.get_int() # unused self._log(DEBUG, 'kex algos:' + str(kex_algo_list) + ' server key:' + str(server_key_algo_list) + ' client encrypt:' + str(client_encrypt_algo_list) + ' server encrypt:' + str(server_encrypt_algo_list) + ' client mac:' + str(client_mac_algo_list) + ' server mac:' + str(server_mac_algo_list) + ' client compress:' + str(client_compress_algo_list) + ' server compress:' + str(server_compress_algo_list) + ' client lang:' + str(client_lang_list) + ' server lang:' + str(server_lang_list) + ' kex follows?' + str(kex_follows) ) # as a server, we pick the first item in the client's list that we # support. # as a client, we pick the first item in our list that the server # supports. if self.server_mode: agreed_kex = list(filter( self._preferred_kex.__contains__, kex_algo_list )) else: agreed_kex = list(filter( kex_algo_list.__contains__, self._preferred_kex )) if len(agreed_kex) == 0: raise SSHException('Incompatible ssh peer (no acceptable kex algorithm)') # noqa self.kex_engine = self._kex_info[agreed_kex[0]](self) self._log(DEBUG, "Kex agreed: %s" % agreed_kex[0]) if self.server_mode: available_server_keys = list(filter( list(self.server_key_dict.keys()).__contains__, self._preferred_keys )) agreed_keys = list(filter( available_server_keys.__contains__, server_key_algo_list )) else: agreed_keys = list(filter( server_key_algo_list.__contains__, self._preferred_keys )) if len(agreed_keys) == 0: raise SSHException('Incompatible ssh peer (no acceptable host key)') # noqa self.host_key_type = agreed_keys[0] if self.server_mode and (self.get_server_key() is None): raise SSHException('Incompatible ssh peer (can\'t match requested host key type)') # noqa if self.server_mode: agreed_local_ciphers = list(filter( self._preferred_ciphers.__contains__, server_encrypt_algo_list )) agreed_remote_ciphers = list(filter( self._preferred_ciphers.__contains__, client_encrypt_algo_list )) else: agreed_local_ciphers = list(filter( client_encrypt_algo_list.__contains__, self._preferred_ciphers )) agreed_remote_ciphers = list(filter( server_encrypt_algo_list.__contains__, self._preferred_ciphers )) if len(agreed_local_ciphers) == 0 or len(agreed_remote_ciphers) == 0: raise SSHException('Incompatible ssh server (no acceptable ciphers)') # noqa self.local_cipher = agreed_local_ciphers[0] self.remote_cipher = agreed_remote_ciphers[0] self._log_agreement( 'Cipher', local=self.local_cipher, remote=self.remote_cipher ) if self.server_mode: agreed_remote_macs = list(filter( self._preferred_macs.__contains__, client_mac_algo_list )) agreed_local_macs = list(filter( self._preferred_macs.__contains__, server_mac_algo_list )) else: agreed_local_macs = list(filter( client_mac_algo_list.__contains__, self._preferred_macs )) agreed_remote_macs = list(filter( server_mac_algo_list.__contains__, self._preferred_macs )) if (len(agreed_local_macs) == 0) or (len(agreed_remote_macs) == 0): raise SSHException('Incompatible ssh server (no acceptable macs)') self.local_mac = agreed_local_macs[0] self.remote_mac = agreed_remote_macs[0] self._log_agreement( 'MAC', local=self.local_mac, remote=self.remote_mac ) if self.server_mode: agreed_remote_compression = list(filter( self._preferred_compression.__contains__, client_compress_algo_list )) agreed_local_compression = list(filter( self._preferred_compression.__contains__, server_compress_algo_list )) else: agreed_local_compression = list(filter( client_compress_algo_list.__contains__, self._preferred_compression )) agreed_remote_compression = list(filter( server_compress_algo_list.__contains__, self._preferred_compression )) if ( len(agreed_local_compression) == 0 or len(agreed_remote_compression) == 0 ): msg = 'Incompatible ssh server (no acceptable compression) {0!r} {1!r} {2!r}' # noqa raise SSHException(msg.format( agreed_local_compression, agreed_remote_compression, self._preferred_compression, )) self.local_compression = agreed_local_compression[0] self.remote_compression = agreed_remote_compression[0] self._log_agreement( 'Compression', local=self.local_compression, remote=self.remote_compression ) # save for computing hash later... # now wait! openssh has a bug (and others might too) where there are # actually some extra bytes (one NUL byte in openssh's case) added to # the end of the packet but not parsed. turns out we need to throw # away those bytes because they aren't part of the hash. self.remote_kex_init = cMSG_KEXINIT + m.get_so_far() def _activate_inbound(self): """switch on newly negotiated encryption parameters for inbound traffic""" block_size = self._cipher_info[self.remote_cipher]['block-size'] if self.server_mode: IV_in = self._compute_key('A', block_size) key_in = self._compute_key( 'C', self._cipher_info[self.remote_cipher]['key-size'] ) else: IV_in = self._compute_key('B', block_size) key_in = self._compute_key( 'D', self._cipher_info[self.remote_cipher]['key-size'] ) engine = self._get_cipher( self.remote_cipher, key_in, IV_in, self._DECRYPT ) mac_size = self._mac_info[self.remote_mac]['size'] mac_engine = self._mac_info[self.remote_mac]['class'] # initial mac keys are done in the hash's natural size (not the # potentially truncated transmission size) if self.server_mode: mac_key = self._compute_key('E', mac_engine().digest_size) else: mac_key = self._compute_key('F', mac_engine().digest_size) self.packetizer.set_inbound_cipher( engine, block_size, mac_engine, mac_size, mac_key ) compress_in = self._compression_info[self.remote_compression][1] if ( compress_in is not None and ( self.remote_compression != 'zlib@openssh.com' or self.authenticated ) ): self._log(DEBUG, 'Switching on inbound compression ...') self.packetizer.set_inbound_compressor(compress_in()) def _activate_outbound(self): """switch on newly negotiated encryption parameters for outbound traffic""" m = Message() m.add_byte(cMSG_NEWKEYS) self._send_message(m) block_size = self._cipher_info[self.local_cipher]['block-size'] if self.server_mode: IV_out = self._compute_key('B', block_size) key_out = self._compute_key( 'D', self._cipher_info[self.local_cipher]['key-size']) else: IV_out = self._compute_key('A', block_size) key_out = self._compute_key( 'C', self._cipher_info[self.local_cipher]['key-size']) engine = self._get_cipher( self.local_cipher, key_out, IV_out, self._ENCRYPT) mac_size = self._mac_info[self.local_mac]['size'] mac_engine = self._mac_info[self.local_mac]['class'] # initial mac keys are done in the hash's natural size (not the # potentially truncated transmission size) if self.server_mode: mac_key = self._compute_key('F', mac_engine().digest_size) else: mac_key = self._compute_key('E', mac_engine().digest_size) sdctr = self.local_cipher.endswith('-ctr') self.packetizer.set_outbound_cipher( engine, block_size, mac_engine, mac_size, mac_key, sdctr) compress_out = self._compression_info[self.local_compression][0] if ( compress_out is not None and ( self.local_compression != 'zlib@openssh.com' or self.authenticated ) ): self._log(DEBUG, 'Switching on outbound compression ...') self.packetizer.set_outbound_compressor(compress_out()) if not self.packetizer.need_rekey(): self.in_kex = False # we always expect to receive NEWKEYS now self._expect_packet(MSG_NEWKEYS) def _auth_trigger(self): self.authenticated = True # delayed initiation of compression if self.local_compression == 'zlib@openssh.com': compress_out = self._compression_info[self.local_compression][0] self._log(DEBUG, 'Switching on outbound compression ...') self.packetizer.set_outbound_compressor(compress_out()) if self.remote_compression == 'zlib@openssh.com': compress_in = self._compression_info[self.remote_compression][1] self._log(DEBUG, 'Switching on inbound compression ...') self.packetizer.set_inbound_compressor(compress_in()) def _parse_newkeys(self, m): self._log(DEBUG, 'Switch to new keys ...') self._activate_inbound() # can also free a bunch of stuff here self.local_kex_init = self.remote_kex_init = None self.K = None self.kex_engine = None if self.server_mode and (self.auth_handler is None): # create auth handler for server mode self.auth_handler = AuthHandler(self) if not self.initial_kex_done: # this was the first key exchange self.initial_kex_done = True # send an event? if self.completion_event is not None: self.completion_event.set() # it's now okay to send data again (if this was a re-key) if not self.packetizer.need_rekey(): self.in_kex = False self.clear_to_send_lock.acquire() try: self.clear_to_send.set() finally: self.clear_to_send_lock.release() return def _parse_disconnect(self, m): code = m.get_int() desc = m.get_text() self._log(INFO, 'Disconnect (code %d): %s' % (code, desc)) def _parse_global_request(self, m): kind = m.get_text() self._log(DEBUG, 'Received global request "%s"' % kind) want_reply = m.get_boolean() if not self.server_mode: self._log( DEBUG, 'Rejecting "%s" global request from server.' % kind ) ok = False elif kind == 'tcpip-forward': address = m.get_text() port = m.get_int() ok = self.server_object.check_port_forward_request(address, port) if ok: ok = (ok,) elif kind == 'cancel-tcpip-forward': address = m.get_text() port = m.get_int() self.server_object.cancel_port_forward_request(address, port) ok = True else: ok = self.server_object.check_global_request(kind, m) extra = () if type(ok) is tuple: extra = ok ok = True if want_reply: msg = Message() if ok: msg.add_byte(cMSG_REQUEST_SUCCESS) msg.add(*extra) else: msg.add_byte(cMSG_REQUEST_FAILURE) self._send_message(msg) def _parse_request_success(self, m): self._log(DEBUG, 'Global request successful.') self.global_response = m if self.completion_event is not None: self.completion_event.set() def _parse_request_failure(self, m): self._log(DEBUG, 'Global request denied.') self.global_response = None if self.completion_event is not None: self.completion_event.set() def _parse_channel_open_success(self, m): chanid = m.get_int() server_chanid = m.get_int() server_window_size = m.get_int() server_max_packet_size = m.get_int() chan = self._channels.get(chanid) if chan is None: self._log(WARNING, 'Success for unrequested channel! [??]') return self.lock.acquire() try: chan._set_remote_channel( server_chanid, server_window_size, server_max_packet_size) self._log(DEBUG, 'Secsh channel %d opened.' % chanid) if chanid in self.channel_events: self.channel_events[chanid].set() del self.channel_events[chanid] finally: self.lock.release() return def _parse_channel_open_failure(self, m): chanid = m.get_int() reason = m.get_int() reason_str = m.get_text() m.get_text() # ignored language reason_text = CONNECTION_FAILED_CODE.get(reason, '(unknown code)') self._log( ERROR, 'Secsh channel %d open FAILED: %s: %s' % ( chanid, reason_str, reason_text) ) self.lock.acquire() try: self.saved_exception = ChannelException(reason, reason_text) if chanid in self.channel_events: self._channels.delete(chanid) if chanid in self.channel_events: self.channel_events[chanid].set() del self.channel_events[chanid] finally: self.lock.release() return def _parse_channel_open(self, m): kind = m.get_text() chanid = m.get_int() initial_window_size = m.get_int() max_packet_size = m.get_int() reject = False if ( kind == 'auth-agent@openssh.com' and self._forward_agent_handler is not None ): self._log(DEBUG, 'Incoming forward agent connection') self.lock.acquire() try: my_chanid = self._next_channel() finally: self.lock.release() elif (kind == 'x11') and (self._x11_handler is not None): origin_addr = m.get_text() origin_port = m.get_int() self._log( DEBUG, 'Incoming x11 connection from %s:%d' % ( origin_addr, origin_port) ) self.lock.acquire() try: my_chanid = self._next_channel() finally: self.lock.release() elif (kind == 'forwarded-tcpip') and (self._tcp_handler is not None): server_addr = m.get_text() server_port = m.get_int() origin_addr = m.get_text() origin_port = m.get_int() self._log( DEBUG, 'Incoming tcp forwarded connection from %s:%d' % ( origin_addr, origin_port) ) self.lock.acquire() try: my_chanid = self._next_channel() finally: self.lock.release() elif not self.server_mode: self._log( DEBUG, 'Rejecting "%s" channel request from server.' % kind) reject = True reason = OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED else: self.lock.acquire() try: my_chanid = self._next_channel() finally: self.lock.release() if kind == 'direct-tcpip': # handle direct-tcpip requests coming from the client dest_addr = m.get_text() dest_port = m.get_int() origin_addr = m.get_text() origin_port = m.get_int() reason = self.server_object.check_channel_direct_tcpip_request( my_chanid, (origin_addr, origin_port), (dest_addr, dest_port) ) else: reason = self.server_object.check_channel_request( kind, my_chanid) if reason != OPEN_SUCCEEDED: self._log( DEBUG, 'Rejecting "%s" channel request from client.' % kind) reject = True if reject: msg = Message() msg.add_byte(cMSG_CHANNEL_OPEN_FAILURE) msg.add_int(chanid) msg.add_int(reason) msg.add_string('') msg.add_string('en') self._send_message(msg) return chan = Channel(my_chanid) self.lock.acquire() try: self._channels.put(my_chanid, chan) self.channels_seen[my_chanid] = True chan._set_transport(self) chan._set_window( self.default_window_size, self.default_max_packet_size) chan._set_remote_channel( chanid, initial_window_size, max_packet_size) finally: self.lock.release() m = Message() m.add_byte(cMSG_CHANNEL_OPEN_SUCCESS) m.add_int(chanid) m.add_int(my_chanid) m.add_int(self.default_window_size) m.add_int(self.default_max_packet_size) self._send_message(m) self._log(DEBUG, 'Secsh channel %d (%s) opened.', my_chanid, kind) if kind == 'auth-agent@openssh.com': self._forward_agent_handler(chan) elif kind == 'x11': self._x11_handler(chan, (origin_addr, origin_port)) elif kind == 'forwarded-tcpip': chan.origin_addr = (origin_addr, origin_port) self._tcp_handler( chan, (origin_addr, origin_port), (server_addr, server_port) ) else: self._queue_incoming_channel(chan) def _parse_debug(self, m): m.get_boolean() # always_display msg = m.get_string() m.get_string() # language self._log(DEBUG, 'Debug msg: {0}'.format(util.safe_string(msg))) def _get_subsystem_handler(self, name): try: self.lock.acquire() if name not in self.subsystem_table: return None, [], {} return self.subsystem_table[name] finally: self.lock.release() _handler_table = { MSG_NEWKEYS: _parse_newkeys, MSG_GLOBAL_REQUEST: _parse_global_request, MSG_REQUEST_SUCCESS: _parse_request_success, MSG_REQUEST_FAILURE: _parse_request_failure, MSG_CHANNEL_OPEN_SUCCESS: _parse_channel_open_success, MSG_CHANNEL_OPEN_FAILURE: _parse_channel_open_failure, MSG_CHANNEL_OPEN: _parse_channel_open, MSG_KEXINIT: _negotiate_keys, } _channel_handler_table = { MSG_CHANNEL_SUCCESS: Channel._request_success, MSG_CHANNEL_FAILURE: Channel._request_failed, MSG_CHANNEL_DATA: Channel._feed, MSG_CHANNEL_EXTENDED_DATA: Channel._feed_extended, MSG_CHANNEL_WINDOW_ADJUST: Channel._window_adjust, MSG_CHANNEL_REQUEST: Channel._handle_request, MSG_CHANNEL_EOF: Channel._handle_eof, MSG_CHANNEL_CLOSE: Channel._handle_close, } class SecurityOptions (object): """ Simple object containing the security preferences of an ssh transport. These are tuples of acceptable ciphers, digests, key types, and key exchange algorithms, listed in order of preference. Changing the contents and/or order of these fields affects the underlying `.Transport` (but only if you change them before starting the session). If you try to add an algorithm that paramiko doesn't recognize, ``ValueError`` will be raised. If you try to assign something besides a tuple to one of the fields, ``TypeError`` will be raised. """ __slots__ = '_transport' def __init__(self, transport): self._transport = transport def __repr__(self): """ Returns a string representation of this object, for debugging. """ return '<paramiko.SecurityOptions for %s>' % repr(self._transport) def _set(self, name, orig, x): if type(x) is list: x = tuple(x) if type(x) is not tuple: raise TypeError('expected tuple or list') possible = list(getattr(self._transport, orig).keys()) forbidden = [n for n in x if n not in possible] if len(forbidden) > 0: raise ValueError('unknown cipher') setattr(self._transport, name, x) @property def ciphers(self): """Symmetric encryption ciphers""" return self._transport._preferred_ciphers @ciphers.setter def ciphers(self, x): self._set('_preferred_ciphers', '_cipher_info', x) @property def digests(self): """Digest (one-way hash) algorithms""" return self._transport._preferred_macs @digests.setter def digests(self, x): self._set('_preferred_macs', '_mac_info', x) @property def key_types(self): """Public-key algorithms""" return self._transport._preferred_keys @key_types.setter def key_types(self, x): self._set('_preferred_keys', '_key_info', x) @property def kex(self): """Key exchange algorithms""" return self._transport._preferred_kex @kex.setter def kex(self, x): self._set('_preferred_kex', '_kex_info', x) @property def compression(self): """Compression algorithms""" return self._transport._preferred_compression @compression.setter def compression(self, x): self._set('_preferred_compression', '_compression_info', x) class ChannelMap (object): def __init__(self): # (id -> Channel) self._map = weakref.WeakValueDictionary() self._lock = threading.Lock() def put(self, chanid, chan): self._lock.acquire() try: self._map[chanid] = chan finally: self._lock.release() def get(self, chanid): self._lock.acquire() try: return self._map.get(chanid, None) finally: self._lock.release() def delete(self, chanid): self._lock.acquire() try: try: del self._map[chanid] except KeyError: pass finally: self._lock.release() def values(self): self._lock.acquire() try: return list(self._map.values()) finally: self._lock.release() def __len__(self): self._lock.acquire() try: return len(self._map) finally: self._lock.release()
./CrossVul/dataset_final_sorted/CWE-287/py/good_650_1
crossvul-python_data_bad_4331_1
from datetime import datetime from typing import Any, Dict, List, Optional, Tuple, Union from uuid import uuid4 from flask import g from alerta.app import db from alerta.database.base import Query from alerta.models.enums import ChangeType, NoteType from alerta.models.history import History from alerta.utils.format import DateTime from alerta.utils.response import absolute_url JSON = Dict[str, Any] class Note: def __init__(self, text: str, user: str, note_type: str, **kwargs) -> None: self.id = kwargs.get('id') or str(uuid4()) self.text = text self.user = user self.note_type = note_type self.attributes = kwargs.get('attributes', None) or dict() self.create_time = kwargs['create_time'] if 'create_time' in kwargs else datetime.utcnow() self.update_time = kwargs.get('update_time') self.alert = kwargs.get('alert') self.customer = kwargs.get('customer') @classmethod def parse(cls, json: JSON) -> 'Note': return Note( id=json.get('id', None), text=json.get('status', None), user=json.get('status', None), attributes=json.get('attributes', dict()), note_type=json.get('type', None), create_time=DateTime.parse(json['createTime']) if 'createTime' in json else None, update_time=DateTime.parse(json['updateTime']) if 'updateTime' in json else None, alert=json.get('related', {}).get('alert'), customer=json.get('customer', None) ) @property def serialize(self) -> Dict[str, Any]: note = { 'id': self.id, 'href': absolute_url('/note/' + self.id), 'text': self.text, 'user': self.user, 'attributes': self.attributes, 'type': self.note_type, 'createTime': self.create_time, 'updateTime': self.update_time, '_links': dict(), 'customer': self.customer } if self.alert: note['_links'] = { 'alert': absolute_url('/alert/' + self.alert) } return note def __repr__(self) -> str: return 'Note(id={!r}, text={!r}, user={!r}, type={!r}, customer={!r})'.format( self.id, self.text, self.user, self.note_type, self.customer ) @classmethod def from_document(cls, doc: Dict[str, Any]) -> 'Note': return Note( id=doc.get('id', None) or doc.get('_id'), text=doc.get('text', None), user=doc.get('user', None), attributes=doc.get('attributes', dict()), note_type=doc.get('type', None), create_time=doc.get('createTime'), update_time=doc.get('updateTime'), alert=doc.get('alert'), customer=doc.get('customer') ) @classmethod def from_record(cls, rec) -> 'Note': return Note( id=rec.id, text=rec.text, user=rec.user, attributes=dict(rec.attributes), note_type=rec.type, create_time=rec.create_time, update_time=rec.update_time, alert=rec.alert, customer=rec.customer ) @classmethod def from_db(cls, r: Union[Dict, Tuple]) -> 'Note': if isinstance(r, dict): return cls.from_document(r) elif isinstance(r, tuple): return cls.from_record(r) def create(self) -> 'Note': return Note.from_db(db.create_note(self)) @staticmethod def from_alert(alert, text): note = Note( text=text, user=g.login, note_type=NoteType.alert, attributes=dict( resource=alert.resource, event=alert.event, environment=alert.environment, severity=alert.severity, status=alert.status ), alert=alert.id, customer=alert.customer ) history = History( id=note.id, event=alert.event, severity=alert.severity, status=alert.status, value=alert.value, text=text, change_type=ChangeType.note, update_time=datetime.utcnow(), user=g.login ) db.add_history(alert.id, history) return note.create() @staticmethod def find_by_id(id: str) -> Optional['Note']: return Note.from_db(db.get_note(id)) @staticmethod def find_all(query: Query = None) -> List['Note']: return [Note.from_db(note) for note in db.get_notes(query)] def update(self, **kwargs) -> 'Note': return Note.from_db(db.update_note(self.id, **kwargs)) def delete(self) -> bool: return db.delete_note(self.id)
./CrossVul/dataset_final_sorted/CWE-287/py/bad_4331_1
crossvul-python_data_bad_2505_4
# -*- coding: utf-8 -*- from typing import Any, List, Dict, Mapping, Optional, Text from django.utils.translation import ugettext as _ from django.conf import settings from django.contrib.auth import authenticate, get_backends from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect, HttpResponseForbidden, HttpResponse, HttpRequest from django.shortcuts import redirect, render from django.template import RequestContext, loader from django.utils.timezone import now from django.core.exceptions import ValidationError from django.core import validators from zerver.context_processors import get_realm_from_request from zerver.models import UserProfile, Realm, Stream, PreregistrationUser, MultiuseInvite, \ name_changes_disabled, email_to_username, email_allowed_for_realm, \ get_realm, get_user_profile_by_email from zerver.lib.send_email import send_email, FromAddress from zerver.lib.events import do_events_register from zerver.lib.actions import do_change_password, do_change_full_name, do_change_is_admin, \ do_activate_user, do_create_user, do_create_realm, \ user_email_is_unique, compute_mit_user_fullname, validate_email_for_realm, \ do_set_user_display_setting from zerver.forms import RegistrationForm, HomepageForm, RealmCreationForm, \ CreateUserForm, FindMyTeamForm from django_auth_ldap.backend import LDAPBackend, _LDAPUser from zerver.decorator import require_post, has_request_variables, \ JsonableError, REQ, do_login from zerver.lib.onboarding import send_initial_pms, setup_initial_streams, \ setup_initial_private_stream, send_initial_realm_messages from zerver.lib.response import json_success from zerver.lib.subdomains import get_subdomain, is_root_domain_available from zerver.lib.timezone import get_all_timezones from zproject.backends import ldap_auth_enabled, password_auth_enabled, ZulipLDAPAuthBackend from confirmation.models import Confirmation, RealmCreationKey, ConfirmationKeyException, \ check_key_is_valid, create_confirmation_link, get_object_from_key, \ render_confirmation_key_error import logging import requests import smtplib import ujson from six.moves import urllib def redirect_and_log_into_subdomain(realm, full_name, email_address, is_signup=False): # type: (Realm, Text, Text, bool) -> HttpResponse subdomain_login_uri = ''.join([ realm.uri, reverse('zerver.views.auth.log_into_subdomain') ]) domain = settings.EXTERNAL_HOST.split(':')[0] response = redirect(subdomain_login_uri) data = {'name': full_name, 'email': email_address, 'subdomain': realm.subdomain, 'is_signup': is_signup} # Creating a singed cookie so that it cannot be tampered with. # Cookie and the signature expire in 15 seconds. response.set_signed_cookie('subdomain.signature', ujson.dumps(data), expires=15, domain=domain, salt='zerver.views.auth') return response @require_post def accounts_register(request): # type: (HttpRequest) -> HttpResponse key = request.POST['key'] confirmation = Confirmation.objects.get(confirmation_key=key) prereg_user = confirmation.content_object email = prereg_user.email realm_creation = prereg_user.realm_creation password_required = prereg_user.password_required validators.validate_email(email) if realm_creation: # For creating a new realm, there is no existing realm or domain realm = None else: realm = get_realm(get_subdomain(request)) if prereg_user.realm is not None and prereg_user.realm != realm: return render(request, 'confirmation/link_does_not_exist.html') if realm and not email_allowed_for_realm(email, realm): return render(request, "zerver/closed_realm.html", context={"closed_domain_name": realm.name}) if realm and realm.deactivated: # The user is trying to register for a deactivated realm. Advise them to # contact support. return redirect_to_deactivation_notice() try: validate_email_for_realm(realm, email) except ValidationError: return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.parse.quote_plus(email)) name_validated = False full_name = None if request.POST.get('from_confirmation'): try: del request.session['authenticated_full_name'] except KeyError: pass if realm is not None and realm.is_zephyr_mirror_realm: # For MIT users, we can get an authoritative name from Hesiod. # Technically we should check that this is actually an MIT # realm, but we can cross that bridge if we ever get a non-MIT # zephyr mirroring realm. hesiod_name = compute_mit_user_fullname(email) form = RegistrationForm( initial={'full_name': hesiod_name if "@" not in hesiod_name else ""}, realm_creation=realm_creation) name_validated = True elif settings.POPULATE_PROFILE_VIA_LDAP: for backend in get_backends(): if isinstance(backend, LDAPBackend): ldap_attrs = _LDAPUser(backend, backend.django_to_ldap_username(email)).attrs try: ldap_full_name = ldap_attrs[settings.AUTH_LDAP_USER_ATTR_MAP['full_name']][0] request.session['authenticated_full_name'] = ldap_full_name name_validated = True # We don't use initial= here, because if the form is # complete (that is, no additional fields need to be # filled out by the user) we want the form to validate, # so they can be directly registered without having to # go through this interstitial. form = RegistrationForm({'full_name': ldap_full_name}, realm_creation=realm_creation) # FIXME: This will result in the user getting # validation errors if they have to enter a password. # Not relevant for ONLY_SSO, though. break except TypeError: # Let the user fill out a name and/or try another backend form = RegistrationForm(realm_creation=realm_creation) elif 'full_name' in request.POST: form = RegistrationForm( initial={'full_name': request.POST.get('full_name')}, realm_creation=realm_creation ) else: form = RegistrationForm(realm_creation=realm_creation) else: postdata = request.POST.copy() if name_changes_disabled(realm): # If we populate profile information via LDAP and we have a # verified name from you on file, use that. Otherwise, fall # back to the full name in the request. try: postdata.update({'full_name': request.session['authenticated_full_name']}) name_validated = True except KeyError: pass form = RegistrationForm(postdata, realm_creation=realm_creation) if not (password_auth_enabled(realm) and password_required): form['password'].field.required = False if form.is_valid(): if password_auth_enabled(realm): password = form.cleaned_data['password'] else: # SSO users don't need no passwords password = None if realm_creation: string_id = form.cleaned_data['realm_subdomain'] realm_name = form.cleaned_data['realm_name'] realm = do_create_realm(string_id, realm_name) setup_initial_streams(realm) assert(realm is not None) full_name = form.cleaned_data['full_name'] short_name = email_to_username(email) timezone = u"" if 'timezone' in request.POST and request.POST['timezone'] in get_all_timezones(): timezone = request.POST['timezone'] try: existing_user_profile = get_user_profile_by_email(email) except UserProfile.DoesNotExist: existing_user_profile = None return_data = {} # type: Dict[str, bool] if ldap_auth_enabled(realm): # If the user was authenticated using an external SSO # mechanism like Google or GitHub auth, then authentication # will have already been done before creating the # PreregistrationUser object with password_required=False, and # so we don't need to worry about passwords. # # If instead the realm is using EmailAuthBackend, we will # set their password above. # # But if the realm is using LDAPAuthBackend, we need to verify # their LDAP password (which will, as a side effect, create # the user account) here using authenticate. auth_result = authenticate(request, username=email, password=password, realm_subdomain=realm.subdomain, return_data=return_data) if auth_result is None: # TODO: This probably isn't going to give a # user-friendly error message, but it doesn't # particularly matter, because the registration form # is hidden for most users. return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.parse.quote_plus(email)) # Since we'll have created a user, we now just log them in. return login_and_go_to_home(request, auth_result) elif existing_user_profile is not None and existing_user_profile.is_mirror_dummy: user_profile = existing_user_profile do_activate_user(user_profile) do_change_password(user_profile, password) do_change_full_name(user_profile, full_name, user_profile) do_set_user_display_setting(user_profile, 'timezone', timezone) else: user_profile = do_create_user(email, password, realm, full_name, short_name, prereg_user=prereg_user, is_realm_admin=realm_creation, tos_version=settings.TOS_VERSION, timezone=timezone, newsletter_data={"IP": request.META['REMOTE_ADDR']}) # Note: Any logic like this must also be replicated in # ZulipLDAPAuthBackend and zerver/views/users.py. This is # ripe for a refactoring, though care is required to avoid # import loops with zerver/lib/actions.py and zerver/lib/onboarding.py. send_initial_pms(user_profile) if realm_creation: setup_initial_private_stream(user_profile) send_initial_realm_messages(realm) if realm_creation: # Because for realm creation, registration happens on the # root domain, we need to log them into the subdomain for # their new realm. return redirect_and_log_into_subdomain(realm, full_name, email) # This dummy_backend check below confirms the user is # authenticating to the correct subdomain. auth_result = authenticate(username=user_profile.email, realm_subdomain=realm.subdomain, return_data=return_data, use_dummy_backend=True) if return_data.get('invalid_subdomain'): # By construction, this should never happen. logging.error("Subdomain mismatch in registration %s: %s" % ( realm.subdomain, user_profile.email,)) return redirect('/') return login_and_go_to_home(request, auth_result) return render( request, 'zerver/register.html', context={'form': form, 'email': email, 'key': key, 'full_name': request.session.get('authenticated_full_name', None), 'lock_name': name_validated and name_changes_disabled(realm), # password_auth_enabled is normally set via our context processor, # but for the registration form, there is no logged in user yet, so # we have to set it here. 'creating_new_team': realm_creation, 'password_required': password_auth_enabled(realm) and password_required, 'password_auth_enabled': password_auth_enabled(realm), 'root_domain_available': is_root_domain_available(), 'MAX_REALM_NAME_LENGTH': str(Realm.MAX_REALM_NAME_LENGTH), 'MAX_NAME_LENGTH': str(UserProfile.MAX_NAME_LENGTH), 'MAX_PASSWORD_LENGTH': str(form.MAX_PASSWORD_LENGTH), 'MAX_REALM_SUBDOMAIN_LENGTH': str(Realm.MAX_REALM_SUBDOMAIN_LENGTH) } ) def login_and_go_to_home(request, user_profile): # type: (HttpRequest, UserProfile) -> HttpResponse # Mark the user as having been just created, so no "new login" email is sent user_profile.just_registered = True do_login(request, user_profile) return HttpResponseRedirect(user_profile.realm.uri + reverse('zerver.views.home.home')) def create_preregistration_user(email, request, realm_creation=False, password_required=True): # type: (Text, HttpRequest, bool, bool) -> HttpResponse realm = None if not realm_creation: realm = get_realm(get_subdomain(request)) return PreregistrationUser.objects.create(email=email, realm_creation=realm_creation, password_required=password_required, realm=realm) def send_registration_completion_email(email, request, realm_creation=False, streams=None): # type: (str, HttpRequest, bool, Optional[List[Stream]]) -> None """ Send an email with a confirmation link to the provided e-mail so the user can complete their registration. """ prereg_user = create_preregistration_user(email, request, realm_creation) if streams is not None: prereg_user.streams = streams prereg_user.save() activation_url = create_confirmation_link(prereg_user, request.get_host(), Confirmation.USER_REGISTRATION) send_email('zerver/emails/confirm_registration', to_email=email, from_address=FromAddress.NOREPLY, context={'activate_url': activation_url}) if settings.DEVELOPMENT and realm_creation: request.session['confirmation_key'] = {'confirmation_key': activation_url.split('/')[-1]} def redirect_to_email_login_url(email): # type: (str) -> HttpResponseRedirect login_url = reverse('django.contrib.auth.views.login') email = urllib.parse.quote_plus(email) redirect_url = login_url + '?already_registered=' + email return HttpResponseRedirect(redirect_url) def create_realm(request, creation_key=None): # type: (HttpRequest, Optional[Text]) -> HttpResponse if not settings.OPEN_REALM_CREATION: if creation_key is None: return render(request, "zerver/realm_creation_failed.html", context={'message': _('New organization creation disabled.')}) elif not check_key_is_valid(creation_key): return render(request, "zerver/realm_creation_failed.html", context={'message': _('The organization creation link has expired' ' or is not valid.')}) # When settings.OPEN_REALM_CREATION is enabled, anyone can create a new realm, # subject to a few restrictions on their email address. if request.method == 'POST': form = RealmCreationForm(request.POST) if form.is_valid(): email = form.cleaned_data['email'] try: send_registration_completion_email(email, request, realm_creation=True) except smtplib.SMTPException as e: logging.error('Error in create_realm: %s' % (str(e),)) return HttpResponseRedirect("/config-error/smtp") if (creation_key is not None and check_key_is_valid(creation_key)): RealmCreationKey.objects.get(creation_key=creation_key).delete() return HttpResponseRedirect(reverse('send_confirm', kwargs={'email': email})) try: email = request.POST['email'] user_email_is_unique(email) except ValidationError: # Maybe the user is trying to log in return redirect_to_email_login_url(email) else: form = RealmCreationForm() return render(request, 'zerver/create_realm.html', context={'form': form, 'current_url': request.get_full_path}, ) def confirmation_key(request): # type: (HttpRequest) -> HttpResponse return json_success(request.session.get('confirmation_key')) def show_deactivation_notice(request): # type: (HttpRequest) -> HttpResponse realm = get_realm_from_request(request) if realm and realm.deactivated: return render(request, "zerver/deactivated.html", context={"deactivated_domain_name": realm.name}) return HttpResponseRedirect(reverse('zerver.views.auth.login_page')) def redirect_to_deactivation_notice(): # type: () -> HttpResponse return HttpResponseRedirect(reverse('zerver.views.registration.show_deactivation_notice')) def accounts_home(request, multiuse_object=None): # type: (HttpRequest, Optional[MultiuseInvite]) -> HttpResponse realm = get_realm(get_subdomain(request)) if realm and realm.deactivated: return redirect_to_deactivation_notice() from_multiuse_invite = False streams_to_subscribe = None if multiuse_object: realm = multiuse_object.realm streams_to_subscribe = multiuse_object.streams.all() from_multiuse_invite = True if request.method == 'POST': form = HomepageForm(request.POST, realm=realm, from_multiuse_invite=from_multiuse_invite) if form.is_valid(): email = form.cleaned_data['email'] try: send_registration_completion_email(email, request, streams=streams_to_subscribe) except smtplib.SMTPException as e: logging.error('Error in accounts_home: %s' % (str(e),)) return HttpResponseRedirect("/config-error/smtp") return HttpResponseRedirect(reverse('send_confirm', kwargs={'email': email})) email = request.POST['email'] try: validate_email_for_realm(realm, email) except ValidationError: return redirect_to_email_login_url(email) else: form = HomepageForm(realm=realm) return render(request, 'zerver/accounts_home.html', context={'form': form, 'current_url': request.get_full_path, 'from_multiuse_invite': from_multiuse_invite}, ) def accounts_home_from_multiuse_invite(request, confirmation_key): # type: (HttpRequest, str) -> HttpResponse multiuse_object = None try: multiuse_object = get_object_from_key(confirmation_key) # Required for oAuth2 request.session["multiuse_object_key"] = confirmation_key except ConfirmationKeyException as exception: realm = get_realm_from_request(request) if realm is None or realm.invite_required: return render_confirmation_key_error(request, exception) return accounts_home(request, multiuse_object=multiuse_object) def generate_204(request): # type: (HttpRequest) -> HttpResponse return HttpResponse(content=None, status=204) def find_account(request): # type: (HttpRequest) -> HttpResponse url = reverse('zerver.views.registration.find_account') emails = [] # type: List[Text] if request.method == 'POST': form = FindMyTeamForm(request.POST) if form.is_valid(): emails = form.cleaned_data['emails'] for user_profile in UserProfile.objects.filter( email__in=emails, is_active=True, is_bot=False, realm__deactivated=False): send_email('zerver/emails/find_team', to_user_id=user_profile.id, context={'user_profile': user_profile}) # Note: Show all the emails in the result otherwise this # feature can be used to ascertain which email addresses # are associated with Zulip. data = urllib.parse.urlencode({'emails': ','.join(emails)}) return redirect(url + "?" + data) else: form = FindMyTeamForm() result = request.GET.get('emails') # The below validation is perhaps unnecessary, in that we # shouldn't get able to get here with an invalid email unless # the user hand-edits the URLs. if result: for email in result.split(','): try: validators.validate_email(email) emails.append(email) except ValidationError: pass return render(request, 'zerver/find_account.html', context={'form': form, 'current_url': lambda: url, 'emails': emails},)
./CrossVul/dataset_final_sorted/CWE-287/py/bad_2505_4
crossvul-python_data_bad_650_0
# Copyright (C) 2003-2007 Robey Pointer <robeypointer@gmail.com> # # This file is part of paramiko. # # Paramiko is free software; you can redistribute it and/or modify it under the # terms of the GNU Lesser General Public License as published by the Free # Software Foundation; either version 2.1 of the License, or (at your option) # any later version. # # Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with Paramiko; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. """ Common constants and global variables. """ import logging from paramiko.py3compat import byte_chr, PY2, bytes_types, text_type, long MSG_DISCONNECT, MSG_IGNORE, MSG_UNIMPLEMENTED, MSG_DEBUG, \ MSG_SERVICE_REQUEST, MSG_SERVICE_ACCEPT = range(1, 7) MSG_KEXINIT, MSG_NEWKEYS = range(20, 22) MSG_USERAUTH_REQUEST, MSG_USERAUTH_FAILURE, MSG_USERAUTH_SUCCESS, \ MSG_USERAUTH_BANNER = range(50, 54) MSG_USERAUTH_PK_OK = 60 MSG_USERAUTH_INFO_REQUEST, MSG_USERAUTH_INFO_RESPONSE = range(60, 62) MSG_USERAUTH_GSSAPI_RESPONSE, MSG_USERAUTH_GSSAPI_TOKEN = range(60, 62) MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE, MSG_USERAUTH_GSSAPI_ERROR,\ MSG_USERAUTH_GSSAPI_ERRTOK, MSG_USERAUTH_GSSAPI_MIC = range(63, 67) MSG_GLOBAL_REQUEST, MSG_REQUEST_SUCCESS, MSG_REQUEST_FAILURE = range(80, 83) MSG_CHANNEL_OPEN, MSG_CHANNEL_OPEN_SUCCESS, MSG_CHANNEL_OPEN_FAILURE, \ MSG_CHANNEL_WINDOW_ADJUST, MSG_CHANNEL_DATA, MSG_CHANNEL_EXTENDED_DATA, \ MSG_CHANNEL_EOF, MSG_CHANNEL_CLOSE, MSG_CHANNEL_REQUEST, \ MSG_CHANNEL_SUCCESS, MSG_CHANNEL_FAILURE = range(90, 101) cMSG_DISCONNECT = byte_chr(MSG_DISCONNECT) cMSG_IGNORE = byte_chr(MSG_IGNORE) cMSG_UNIMPLEMENTED = byte_chr(MSG_UNIMPLEMENTED) cMSG_DEBUG = byte_chr(MSG_DEBUG) cMSG_SERVICE_REQUEST = byte_chr(MSG_SERVICE_REQUEST) cMSG_SERVICE_ACCEPT = byte_chr(MSG_SERVICE_ACCEPT) cMSG_KEXINIT = byte_chr(MSG_KEXINIT) cMSG_NEWKEYS = byte_chr(MSG_NEWKEYS) cMSG_USERAUTH_REQUEST = byte_chr(MSG_USERAUTH_REQUEST) cMSG_USERAUTH_FAILURE = byte_chr(MSG_USERAUTH_FAILURE) cMSG_USERAUTH_SUCCESS = byte_chr(MSG_USERAUTH_SUCCESS) cMSG_USERAUTH_BANNER = byte_chr(MSG_USERAUTH_BANNER) cMSG_USERAUTH_PK_OK = byte_chr(MSG_USERAUTH_PK_OK) cMSG_USERAUTH_INFO_REQUEST = byte_chr(MSG_USERAUTH_INFO_REQUEST) cMSG_USERAUTH_INFO_RESPONSE = byte_chr(MSG_USERAUTH_INFO_RESPONSE) cMSG_USERAUTH_GSSAPI_RESPONSE = byte_chr(MSG_USERAUTH_GSSAPI_RESPONSE) cMSG_USERAUTH_GSSAPI_TOKEN = byte_chr(MSG_USERAUTH_GSSAPI_TOKEN) cMSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE = \ byte_chr(MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE) cMSG_USERAUTH_GSSAPI_ERROR = byte_chr(MSG_USERAUTH_GSSAPI_ERROR) cMSG_USERAUTH_GSSAPI_ERRTOK = byte_chr(MSG_USERAUTH_GSSAPI_ERRTOK) cMSG_USERAUTH_GSSAPI_MIC = byte_chr(MSG_USERAUTH_GSSAPI_MIC) cMSG_GLOBAL_REQUEST = byte_chr(MSG_GLOBAL_REQUEST) cMSG_REQUEST_SUCCESS = byte_chr(MSG_REQUEST_SUCCESS) cMSG_REQUEST_FAILURE = byte_chr(MSG_REQUEST_FAILURE) cMSG_CHANNEL_OPEN = byte_chr(MSG_CHANNEL_OPEN) cMSG_CHANNEL_OPEN_SUCCESS = byte_chr(MSG_CHANNEL_OPEN_SUCCESS) cMSG_CHANNEL_OPEN_FAILURE = byte_chr(MSG_CHANNEL_OPEN_FAILURE) cMSG_CHANNEL_WINDOW_ADJUST = byte_chr(MSG_CHANNEL_WINDOW_ADJUST) cMSG_CHANNEL_DATA = byte_chr(MSG_CHANNEL_DATA) cMSG_CHANNEL_EXTENDED_DATA = byte_chr(MSG_CHANNEL_EXTENDED_DATA) cMSG_CHANNEL_EOF = byte_chr(MSG_CHANNEL_EOF) cMSG_CHANNEL_CLOSE = byte_chr(MSG_CHANNEL_CLOSE) cMSG_CHANNEL_REQUEST = byte_chr(MSG_CHANNEL_REQUEST) cMSG_CHANNEL_SUCCESS = byte_chr(MSG_CHANNEL_SUCCESS) cMSG_CHANNEL_FAILURE = byte_chr(MSG_CHANNEL_FAILURE) # for debugging: MSG_NAMES = { MSG_DISCONNECT: 'disconnect', MSG_IGNORE: 'ignore', MSG_UNIMPLEMENTED: 'unimplemented', MSG_DEBUG: 'debug', MSG_SERVICE_REQUEST: 'service-request', MSG_SERVICE_ACCEPT: 'service-accept', MSG_KEXINIT: 'kexinit', MSG_NEWKEYS: 'newkeys', 30: 'kex30', 31: 'kex31', 32: 'kex32', 33: 'kex33', 34: 'kex34', 40: 'kex40', 41: 'kex41', MSG_USERAUTH_REQUEST: 'userauth-request', MSG_USERAUTH_FAILURE: 'userauth-failure', MSG_USERAUTH_SUCCESS: 'userauth-success', MSG_USERAUTH_BANNER: 'userauth--banner', MSG_USERAUTH_PK_OK: 'userauth-60(pk-ok/info-request)', MSG_USERAUTH_INFO_RESPONSE: 'userauth-info-response', MSG_GLOBAL_REQUEST: 'global-request', MSG_REQUEST_SUCCESS: 'request-success', MSG_REQUEST_FAILURE: 'request-failure', MSG_CHANNEL_OPEN: 'channel-open', MSG_CHANNEL_OPEN_SUCCESS: 'channel-open-success', MSG_CHANNEL_OPEN_FAILURE: 'channel-open-failure', MSG_CHANNEL_WINDOW_ADJUST: 'channel-window-adjust', MSG_CHANNEL_DATA: 'channel-data', MSG_CHANNEL_EXTENDED_DATA: 'channel-extended-data', MSG_CHANNEL_EOF: 'channel-eof', MSG_CHANNEL_CLOSE: 'channel-close', MSG_CHANNEL_REQUEST: 'channel-request', MSG_CHANNEL_SUCCESS: 'channel-success', MSG_CHANNEL_FAILURE: 'channel-failure', MSG_USERAUTH_GSSAPI_RESPONSE: 'userauth-gssapi-response', MSG_USERAUTH_GSSAPI_TOKEN: 'userauth-gssapi-token', MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE: 'userauth-gssapi-exchange-complete', MSG_USERAUTH_GSSAPI_ERROR: 'userauth-gssapi-error', MSG_USERAUTH_GSSAPI_ERRTOK: 'userauth-gssapi-error-token', MSG_USERAUTH_GSSAPI_MIC: 'userauth-gssapi-mic' } # authentication request return codes: AUTH_SUCCESSFUL, AUTH_PARTIALLY_SUCCESSFUL, AUTH_FAILED = range(3) # channel request failed reasons: (OPEN_SUCCEEDED, OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, OPEN_FAILED_CONNECT_FAILED, OPEN_FAILED_UNKNOWN_CHANNEL_TYPE, OPEN_FAILED_RESOURCE_SHORTAGE) = range(0, 5) CONNECTION_FAILED_CODE = { 1: 'Administratively prohibited', 2: 'Connect failed', 3: 'Unknown channel type', 4: 'Resource shortage' } DISCONNECT_SERVICE_NOT_AVAILABLE, DISCONNECT_AUTH_CANCELLED_BY_USER, \ DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE = 7, 13, 14 zero_byte = byte_chr(0) one_byte = byte_chr(1) four_byte = byte_chr(4) max_byte = byte_chr(0xff) cr_byte = byte_chr(13) linefeed_byte = byte_chr(10) crlf = cr_byte + linefeed_byte if PY2: cr_byte_value = cr_byte linefeed_byte_value = linefeed_byte else: cr_byte_value = 13 linefeed_byte_value = 10 def asbytes(s): """Coerce to bytes if possible or return unchanged.""" if isinstance(s, bytes_types): return s if isinstance(s, text_type): # Accept text and encode as utf-8 for compatibility only. return s.encode("utf-8") asbytes = getattr(s, "asbytes", None) if asbytes is not None: return asbytes() # May be an object that implements the buffer api, let callers handle. return s xffffffff = long(0xffffffff) x80000000 = long(0x80000000) o666 = 438 o660 = 432 o644 = 420 o600 = 384 o777 = 511 o700 = 448 o70 = 56 DEBUG = logging.DEBUG INFO = logging.INFO WARNING = logging.WARNING ERROR = logging.ERROR CRITICAL = logging.CRITICAL # Common IO/select/etc sleep period, in seconds io_sleep = 0.01 DEFAULT_WINDOW_SIZE = 64 * 2 ** 15 DEFAULT_MAX_PACKET_SIZE = 2 ** 15 # lower bound on the max packet size we'll accept from the remote host # Minimum packet size is 32768 bytes according to # http://www.ietf.org/rfc/rfc4254.txt MIN_WINDOW_SIZE = 2 ** 15 # However, according to http://www.ietf.org/rfc/rfc4253.txt it is perfectly # legal to accept a size much smaller, as OpenSSH client does as size 16384. MIN_PACKET_SIZE = 2 ** 12 # Max windows size according to http://www.ietf.org/rfc/rfc4254.txt MAX_WINDOW_SIZE = 2 ** 32 - 1
./CrossVul/dataset_final_sorted/CWE-287/py/bad_650_0
crossvul-python_data_good_1224_6
# Documentation for Zulip's authentication backends is split across a few places: # # * https://zulip.readthedocs.io/en/latest/production/authentication-methods.html and # zproject/prod_settings_template.py have user-level configuration documentation. # * https://zulip.readthedocs.io/en/latest/development/authentication.html # has developer-level documentation, especially on testing authentication backends # in the Zulip development environment. # # Django upstream's documentation for authentication backends is also # helpful background. The most important detail to understand for # reading this file is that the Django authenticate() function will # call the authenticate methods of all backends registered in # settings.AUTHENTICATION_BACKENDS that have a function signature # matching the args/kwargs passed in the authenticate() call. import copy import logging import magic import ujson from typing import Any, Dict, List, Optional, Set, Tuple, Union from typing_extensions import TypedDict from django_auth_ldap.backend import LDAPBackend, LDAPReverseEmailSearch, \ _LDAPUser, ldap_error from django.contrib.auth import get_backends from django.contrib.auth.backends import RemoteUserBackend from django.conf import settings from django.core.exceptions import ValidationError from django.core.validators import validate_email from django.dispatch import receiver, Signal from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.urls import reverse from requests import HTTPError from onelogin.saml2.errors import OneLogin_Saml2_Error from social_core.backends.github import GithubOAuth2, GithubOrganizationOAuth2, \ GithubTeamOAuth2 from social_core.backends.azuread import AzureADOAuth2 from social_core.backends.base import BaseAuth from social_core.backends.google import GoogleOAuth2 from social_core.backends.oauth import BaseOAuth2 from social_core.backends.saml import SAMLAuth from social_core.pipeline.partial import partial from social_core.exceptions import AuthFailed, SocialAuthBaseException from zerver.lib.actions import do_create_user, do_reactivate_user, do_deactivate_user, \ do_update_user_custom_profile_data_if_changed, validate_email_for_realm from zerver.lib.avatar import is_avatar_new, avatar_url from zerver.lib.avatar_hash import user_avatar_content_hash from zerver.lib.dev_ldap_directory import init_fakeldap from zerver.lib.request import JsonableError from zerver.lib.users import check_full_name, validate_user_custom_profile_field from zerver.lib.utils import generate_random_token from zerver.lib.redis_utils import get_redis_client from zerver.models import CustomProfileField, DisposableEmailError, DomainNotAllowedForRealmError, \ EmailContainsPlusError, PreregistrationUser, UserProfile, Realm, custom_profile_fields_for_realm, \ email_allowed_for_realm, get_default_stream_groups, get_user_profile_by_id, remote_user_to_email, \ email_to_username, get_realm, get_user_by_delivery_email, supported_auth_backends redis_client = get_redis_client() # This first batch of methods is used by other code in Zulip to check # whether a given authentication backend is enabled for a given realm. # In each case, we both needs to check at the server level (via # `settings.AUTHENTICATION_BACKENDS`, queried via # `django.contrib.auth.get_backends`) and at the realm level (via the # `Realm.authentication_methods` BitField). def pad_method_dict(method_dict: Dict[str, bool]) -> Dict[str, bool]: """Pads an authentication methods dict to contain all auth backends supported by the software, regardless of whether they are configured on this server""" for key in AUTH_BACKEND_NAME_MAP: if key not in method_dict: method_dict[key] = False return method_dict def auth_enabled_helper(backends_to_check: List[str], realm: Optional[Realm]) -> bool: if realm is not None: enabled_method_dict = realm.authentication_methods_dict() pad_method_dict(enabled_method_dict) else: enabled_method_dict = dict((method, True) for method in Realm.AUTHENTICATION_FLAGS) pad_method_dict(enabled_method_dict) for supported_backend in supported_auth_backends(): for backend_name in backends_to_check: backend = AUTH_BACKEND_NAME_MAP[backend_name] if enabled_method_dict[backend_name] and isinstance(supported_backend, backend): return True return False def ldap_auth_enabled(realm: Optional[Realm]=None) -> bool: return auth_enabled_helper(['LDAP'], realm) def email_auth_enabled(realm: Optional[Realm]=None) -> bool: return auth_enabled_helper(['Email'], realm) def password_auth_enabled(realm: Optional[Realm]=None) -> bool: return ldap_auth_enabled(realm) or email_auth_enabled(realm) def dev_auth_enabled(realm: Optional[Realm]=None) -> bool: return auth_enabled_helper(['Dev'], realm) def google_auth_enabled(realm: Optional[Realm]=None) -> bool: return auth_enabled_helper(['Google'], realm) def github_auth_enabled(realm: Optional[Realm]=None) -> bool: return auth_enabled_helper(['GitHub'], realm) def saml_auth_enabled(realm: Optional[Realm]=None) -> bool: return auth_enabled_helper(['SAML'], realm) def any_social_backend_enabled(realm: Optional[Realm]=None) -> bool: """Used by the login page process to determine whether to show the 'OR' for login with Google""" social_backend_names = [social_auth_subclass.auth_backend_name for social_auth_subclass in SOCIAL_AUTH_BACKENDS] return auth_enabled_helper(social_backend_names, realm) def redirect_to_config_error(error_type: str) -> HttpResponseRedirect: return HttpResponseRedirect("/config-error/%s" % (error_type,)) def require_email_format_usernames(realm: Optional[Realm]=None) -> bool: if ldap_auth_enabled(realm): if settings.LDAP_EMAIL_ATTR or settings.LDAP_APPEND_DOMAIN: return False return True def is_user_active(user_profile: UserProfile, return_data: Optional[Dict[str, Any]]=None) -> bool: if not user_profile.is_active: if return_data is not None: if user_profile.is_mirror_dummy: # Record whether it's a mirror dummy account return_data['is_mirror_dummy'] = True return_data['inactive_user'] = True return False if user_profile.realm.deactivated: if return_data is not None: return_data['inactive_realm'] = True return False return True def common_get_active_user(email: str, realm: Realm, return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]: """This is the core common function used by essentially all authentication backends to check if there's an active user account with a given email address in the organization, handling both user-level and realm-level deactivation correctly. """ try: user_profile = get_user_by_delivery_email(email, realm) except UserProfile.DoesNotExist: # If the user doesn't have an account in the target realm, we # check whether they might have an account in another realm, # and if so, provide a helpful error message via # `invalid_subdomain`. if not UserProfile.objects.filter(delivery_email__iexact=email).exists(): return None if return_data is not None: return_data['invalid_subdomain'] = True return None if not is_user_active(user_profile, return_data): return None return user_profile class ZulipAuthMixin: """This common mixin is used to override Django's default behavior for looking up a logged-in user by ID to use a version that fetches from memcached before checking the database (avoiding a database query in most cases). """ def get_user(self, user_profile_id: int) -> Optional[UserProfile]: """Override the Django method for getting a UserProfile object from the user_profile_id,.""" try: return get_user_profile_by_id(user_profile_id) except UserProfile.DoesNotExist: return None class ZulipDummyBackend(ZulipAuthMixin): """Used when we want to log you in without checking any authentication (i.e. new user registration or when otherwise authentication has already been checked earlier in the process). We ensure that this backend only ever successfully authenticates when explicitly requested by including the use_dummy_backend kwarg. """ def authenticate(self, *, username: str, realm: Realm, use_dummy_backend: bool=False, return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]: if use_dummy_backend: return common_get_active_user(username, realm, return_data) return None class EmailAuthBackend(ZulipAuthMixin): """ Email+Password Authentication Backend (the default). Allows a user to sign in using an email/password pair. """ def authenticate(self, *, username: str, password: str, realm: Realm, return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]: """ Authenticate a user based on email address as the user name. """ if not password_auth_enabled(realm): if return_data is not None: return_data['password_auth_disabled'] = True return None if not email_auth_enabled(realm): if return_data is not None: return_data['email_auth_disabled'] = True return None if password == "": # Never allow an empty password. This is defensive code; # a user having password "" should only be possible # through a bug somewhere else. return None user_profile = common_get_active_user(username, realm, return_data=return_data) if user_profile is None: return None if user_profile.check_password(password): return user_profile return None class ZulipRemoteUserBackend(RemoteUserBackend): """Authentication backend that reads the Apache REMOTE_USER variable. Used primarily in enterprise environments with an SSO solution that has an Apache REMOTE_USER integration. For manual testing, see https://zulip.readthedocs.io/en/latest/production/authentication-methods.html See also remote_user_sso in zerver/views/auth.py. """ create_unknown_user = False def authenticate(self, *, remote_user: str, realm: Realm, return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]: if not auth_enabled_helper(["RemoteUser"], realm): return None email = remote_user_to_email(remote_user) return common_get_active_user(email, realm, return_data=return_data) def is_valid_email(email: str) -> bool: try: validate_email(email) except ValidationError: return False return True def check_ldap_config() -> None: if not settings.LDAP_APPEND_DOMAIN: # Email search needs to be configured in this case. assert settings.AUTH_LDAP_USERNAME_ATTR and settings.AUTH_LDAP_REVERSE_EMAIL_SEARCH def find_ldap_users_by_email(email: str) -> Optional[List[_LDAPUser]]: """ Returns list of _LDAPUsers matching the email search, or None if no matches are found. """ email_search = LDAPReverseEmailSearch(LDAPBackend(), email) return email_search.search_for_users(should_populate=False) def email_belongs_to_ldap(realm: Realm, email: str) -> bool: """Used to make determinations on whether a user's email address is managed by LDAP. For environments using both LDAP and Email+Password authentication, we do not allow EmailAuthBackend authentication for email addresses managed by LDAP (to avoid a security issue where one create separate credentials for an LDAP user), and this function is used to enforce that rule. """ if not ldap_auth_enabled(realm): return False check_ldap_config() if settings.LDAP_APPEND_DOMAIN: # Check if the email ends with LDAP_APPEND_DOMAIN return email.strip().lower().endswith("@" + settings.LDAP_APPEND_DOMAIN) # If we don't have an LDAP domain, we have to do a lookup for the email. if find_ldap_users_by_email(email): return True else: return False class ZulipLDAPException(_LDAPUser.AuthenticationFailed): """Since this inherits from _LDAPUser.AuthenticationFailed, these will be caught and logged at debug level inside django-auth-ldap's authenticate()""" pass class ZulipLDAPExceptionNoMatchingLDAPUser(ZulipLDAPException): pass class ZulipLDAPExceptionOutsideDomain(ZulipLDAPExceptionNoMatchingLDAPUser): pass class ZulipLDAPConfigurationError(Exception): pass LDAP_USER_ACCOUNT_CONTROL_DISABLED_MASK = 2 class ZulipLDAPAuthBackendBase(ZulipAuthMixin, LDAPBackend): """Common code between LDAP authentication (ZulipLDAPAuthBackend) and using LDAP just to sync user data (ZulipLDAPUserPopulator). To fully understand our LDAP backend, you may want to skim django_auth_ldap/backend.py from the upstream django-auth-ldap library. It's not a lot of code, and searching around in that file makes the flow for LDAP authentication clear. """ def __init__(self) -> None: # Used to initialize a fake LDAP directly for both manual # and automated testing in a development environment where # there is no actual LDAP server. if settings.DEVELOPMENT and settings.FAKE_LDAP_MODE: # nocoverage init_fakeldap() check_ldap_config() # Disable django-auth-ldap's permissions functions -- we don't use # the standard Django user/group permissions system because they # are prone to performance issues. def has_perm(self, user: Optional[UserProfile], perm: Any, obj: Any=None) -> bool: return False def has_module_perms(self, user: Optional[UserProfile], app_label: Optional[str]) -> bool: return False def get_all_permissions(self, user: Optional[UserProfile], obj: Any=None) -> Set[Any]: return set() def get_group_permissions(self, user: Optional[UserProfile], obj: Any=None) -> Set[Any]: return set() def django_to_ldap_username(self, username: str) -> str: """ Translates django username (user_profile.email or whatever the user typed in the login field when authenticating via the ldap backend) into ldap username. Guarantees that the username it returns actually has an entry in the ldap directory. Raises ZulipLDAPExceptionNoMatchingLDAPUser if that's not possible. """ result = username if settings.LDAP_APPEND_DOMAIN: if is_valid_email(username): if not username.endswith("@" + settings.LDAP_APPEND_DOMAIN): raise ZulipLDAPExceptionOutsideDomain("Email %s does not match LDAP domain %s." % ( username, settings.LDAP_APPEND_DOMAIN)) result = email_to_username(username) else: # We can use find_ldap_users_by_email if is_valid_email(username): email_search_result = find_ldap_users_by_email(username) if email_search_result is None: result = username elif len(email_search_result) == 1: return email_search_result[0]._username elif len(email_search_result) > 1: # This is possible, but strange, so worth logging a warning about. # We can't translate the email to a unique username, # so we don't do anything else here. logging.warning("Multiple users with email {} found in LDAP.".format(username)) result = username if _LDAPUser(self, result).attrs is None: # Check that there actually is an ldap entry matching the result username # we want to return. Otherwise, raise an exception. raise ZulipLDAPExceptionNoMatchingLDAPUser() return result def user_email_from_ldapuser(self, username: str, ldap_user: _LDAPUser) -> str: if hasattr(ldap_user, '_username'): # In tests, we sometimes pass a simplified _LDAPUser without _username attr, # and with the intended username in the username argument. username = ldap_user._username if settings.LDAP_APPEND_DOMAIN: return "@".join((username, settings.LDAP_APPEND_DOMAIN)) if settings.LDAP_EMAIL_ATTR is not None: # Get email from ldap attributes. if settings.LDAP_EMAIL_ATTR not in ldap_user.attrs: raise ZulipLDAPException("LDAP user doesn't have the needed %s attribute" % ( settings.LDAP_EMAIL_ATTR,)) else: return ldap_user.attrs[settings.LDAP_EMAIL_ATTR][0] return username def ldap_to_django_username(self, username: str) -> str: """ This is called inside django_auth_ldap with only one role: to convert _LDAPUser._username to django username (so in Zulip, the email) and pass that as "username" argument to get_or_build_user(username, ldapuser). In many cases, the email is stored in the _LDAPUser's attributes, so it can't be constructed just from the username. We choose to do nothing in this function, and our overrides of get_or_build_user() obtain that username from the _LDAPUser object on their own, through our user_email_from_ldapuser function. """ return username def sync_avatar_from_ldap(self, user: UserProfile, ldap_user: _LDAPUser) -> None: if 'avatar' in settings.AUTH_LDAP_USER_ATTR_MAP: # We do local imports here to avoid import loops from zerver.lib.upload import upload_avatar_image from zerver.lib.actions import do_change_avatar_fields from io import BytesIO avatar_attr_name = settings.AUTH_LDAP_USER_ATTR_MAP['avatar'] if avatar_attr_name not in ldap_user.attrs: # nocoverage # If this specific user doesn't have e.g. a # thumbnailPhoto set in LDAP, just skip that user. return ldap_avatar = ldap_user.attrs[avatar_attr_name][0] avatar_changed = is_avatar_new(ldap_avatar, user) if not avatar_changed: # Don't do work to replace the avatar with itself. return io = BytesIO(ldap_avatar) # Structurally, to make the S3 backend happy, we need to # provide a Content-Type; since that isn't specified in # any metadata, we auto-detect it. content_type = magic.from_buffer(copy.deepcopy(io).read()[0:1024], mime=True) if content_type.startswith("image/"): upload_avatar_image(io, user, user, content_type=content_type) do_change_avatar_fields(user, UserProfile.AVATAR_FROM_USER) # Update avatar hash. user.avatar_hash = user_avatar_content_hash(ldap_avatar) user.save(update_fields=["avatar_hash"]) else: logging.warning("Could not parse %s field for user %s" % (avatar_attr_name, user.id)) def is_account_control_disabled_user(self, ldap_user: _LDAPUser) -> bool: """Implements the userAccountControl check for whether a user has been disabled in an Active Directory server being integrated with Zulip via LDAP.""" account_control_value = ldap_user.attrs[settings.AUTH_LDAP_USER_ATTR_MAP['userAccountControl']][0] ldap_disabled = bool(int(account_control_value) & LDAP_USER_ACCOUNT_CONTROL_DISABLED_MASK) return ldap_disabled @classmethod def get_mapped_name(cls, ldap_user: _LDAPUser) -> Tuple[str, str]: """Constructs the user's Zulip full_name and short_name fields from the LDAP data""" if "full_name" in settings.AUTH_LDAP_USER_ATTR_MAP: full_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["full_name"] short_name = full_name = ldap_user.attrs[full_name_attr][0] elif all(key in settings.AUTH_LDAP_USER_ATTR_MAP for key in {"first_name", "last_name"}): first_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["first_name"] last_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["last_name"] short_name = ldap_user.attrs[first_name_attr][0] full_name = short_name + ' ' + ldap_user.attrs[last_name_attr][0] else: raise ZulipLDAPException("Missing required mapping for user's full name") if "short_name" in settings.AUTH_LDAP_USER_ATTR_MAP: short_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["short_name"] short_name = ldap_user.attrs[short_name_attr][0] return full_name, short_name def sync_full_name_from_ldap(self, user_profile: UserProfile, ldap_user: _LDAPUser) -> None: from zerver.lib.actions import do_change_full_name full_name, _ = self.get_mapped_name(ldap_user) if full_name != user_profile.full_name: try: full_name = check_full_name(full_name) except JsonableError as e: raise ZulipLDAPException(e.msg) do_change_full_name(user_profile, full_name, None) def sync_custom_profile_fields_from_ldap(self, user_profile: UserProfile, ldap_user: _LDAPUser) -> None: values_by_var_name = {} # type: Dict[str, Union[int, str, List[int]]] for attr, ldap_attr in settings.AUTH_LDAP_USER_ATTR_MAP.items(): if not attr.startswith('custom_profile_field__'): continue var_name = attr.split('custom_profile_field__')[1] try: value = ldap_user.attrs[ldap_attr][0] except KeyError: # If this user doesn't have this field set then ignore this # field and continue syncing other fields. `django-auth-ldap` # automatically logs error about missing field. continue values_by_var_name[var_name] = value fields_by_var_name = {} # type: Dict[str, CustomProfileField] custom_profile_fields = custom_profile_fields_for_realm(user_profile.realm.id) for field in custom_profile_fields: var_name = '_'.join(field.name.lower().split(' ')) fields_by_var_name[var_name] = field existing_values = {} for data in user_profile.profile_data: var_name = '_'.join(data['name'].lower().split(' ')) existing_values[var_name] = data['value'] profile_data = [] # type: List[Dict[str, Union[int, str, List[int]]]] for var_name, value in values_by_var_name.items(): try: field = fields_by_var_name[var_name] except KeyError: raise ZulipLDAPException('Custom profile field with name %s not found.' % (var_name,)) if existing_values.get(var_name) == value: continue result = validate_user_custom_profile_field(user_profile.realm.id, field, value) if result is not None: raise ZulipLDAPException('Invalid data for %s field: %s' % (var_name, result)) profile_data.append({ 'id': field.id, 'value': value, }) do_update_user_custom_profile_data_if_changed(user_profile, profile_data) class ZulipLDAPAuthBackend(ZulipLDAPAuthBackendBase): REALM_IS_NONE_ERROR = 1 def authenticate(self, *, username: str, password: str, realm: Realm, prereg_user: Optional[PreregistrationUser]=None, return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]: self._realm = realm self._prereg_user = prereg_user if not ldap_auth_enabled(realm): return None try: # We want to apss the user's LDAP username into # authenticate() below. If an email address was entered # in the login form, we need to use # django_to_ldap_username to translate the email address # to the user's LDAP username before calling the # django-auth-ldap authenticate(). username = self.django_to_ldap_username(username) except ZulipLDAPExceptionNoMatchingLDAPUser: if return_data is not None: return_data['no_matching_ldap_user'] = True return None # Call into (ultimately) the django-auth-ldap authenticate # function. This will check the username/password pair # against the LDAP database, and assuming those are correct, # end up calling `self.get_or_build_user` with the # authenticated user's data from LDAP. return ZulipLDAPAuthBackendBase.authenticate(self, request=None, username=username, password=password) def get_or_build_user(self, username: str, ldap_user: _LDAPUser) -> Tuple[UserProfile, bool]: """The main function of our authentication backend extension of django-auth-ldap. When this is called (from `authenticate`), django-auth-ldap will already have verified that the provided username and password match those in the LDAP database. This function's responsibility is to check (1) whether the email address for this user obtained from LDAP has an active account in this Zulip realm. If so, it will log them in. Otherwise, to provide a seamless Single Sign-On experience with LDAP, this function can automatically create a new Zulip user account in the realm (assuming the realm is configured to allow that email address to sign up). """ return_data = {} # type: Dict[str, Any] username = self.user_email_from_ldapuser(username, ldap_user) if 'userAccountControl' in settings.AUTH_LDAP_USER_ATTR_MAP: # nocoverage ldap_disabled = self.is_account_control_disabled_user(ldap_user) if ldap_disabled: # Treat disabled users as deactivated in Zulip. return_data["inactive_user"] = True raise ZulipLDAPException("User has been deactivated") user_profile = common_get_active_user(username, self._realm, return_data) if user_profile is not None: # An existing user, successfully authed; return it. return user_profile, False if return_data.get("inactive_realm"): # This happens if there is a user account in a deactivated realm raise ZulipLDAPException("Realm has been deactivated") if return_data.get("inactive_user"): raise ZulipLDAPException("User has been deactivated") # An invalid_subdomain `return_data` value here is ignored, # since that just means we're trying to create an account in a # second realm on the server (`ldap_auth_enabled(realm)` would # have been false if this user wasn't meant to have an account # in this second realm). if self._realm.deactivated: # This happens if no account exists, but the realm is # deactivated, so we shouldn't create a new user account raise ZulipLDAPException("Realm has been deactivated") # Makes sure that email domain hasn't be restricted for this # realm. The main thing here is email_allowed_for_realm; but # we also call validate_email_for_realm just for consistency, # even though its checks were already done above. try: email_allowed_for_realm(username, self._realm) validate_email_for_realm(self._realm, username) except DomainNotAllowedForRealmError: raise ZulipLDAPException("This email domain isn't allowed in this organization.") except (DisposableEmailError, EmailContainsPlusError): raise ZulipLDAPException("Email validation failed.") # We have valid LDAP credentials; time to create an account. full_name, short_name = self.get_mapped_name(ldap_user) try: full_name = check_full_name(full_name) except JsonableError as e: raise ZulipLDAPException(e.msg) opts = {} # type: Dict[str, Any] if self._prereg_user: invited_as = self._prereg_user.invited_as realm_creation = self._prereg_user.realm_creation opts['prereg_user'] = self._prereg_user opts['is_realm_admin'] = ( invited_as == PreregistrationUser.INVITE_AS['REALM_ADMIN']) or realm_creation opts['is_guest'] = invited_as == PreregistrationUser.INVITE_AS['GUEST_USER'] opts['realm_creation'] = realm_creation opts['default_stream_groups'] = get_default_stream_groups(self._realm) user_profile = do_create_user(username, None, self._realm, full_name, short_name, **opts) self.sync_avatar_from_ldap(user_profile, ldap_user) self.sync_custom_profile_fields_from_ldap(user_profile, ldap_user) return user_profile, True class ZulipLDAPUserPopulator(ZulipLDAPAuthBackendBase): """Just like ZulipLDAPAuthBackend, but doesn't let you log in. Used for syncing data like names, avatars, and custom profile fields from LDAP in `manage.py sync_ldap_user_data` as well as in registration for organizations that use a different SSO solution for managing login (often via RemoteUserBackend). """ def authenticate(self, *, username: str, password: str, realm: Realm, return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]: return None def get_or_build_user(self, username: str, ldap_user: _LDAPUser) -> Tuple[UserProfile, bool]: """This is used only in non-authentication contexts such as: ./manage.py sync_ldap_user_data """ # Obtain the django username from the ldap_user object: username = self.user_email_from_ldapuser(username, ldap_user) # Call the library get_or_build_user for building the UserProfile # with the username we obtained: (user, built) = super().get_or_build_user(username, ldap_user) # Synchronise the UserProfile with its LDAP attributes: if 'userAccountControl' in settings.AUTH_LDAP_USER_ATTR_MAP: user_disabled_in_ldap = self.is_account_control_disabled_user(ldap_user) if user_disabled_in_ldap: if user.is_active: logging.info("Deactivating user %s because they are disabled in LDAP." % (user.email,)) do_deactivate_user(user) # Do an early return to avoid trying to sync additional data. return (user, built) elif not user.is_active: logging.info("Reactivating user %s because they are not disabled in LDAP." % (user.email,)) do_reactivate_user(user) self.sync_avatar_from_ldap(user, ldap_user) self.sync_full_name_from_ldap(user, ldap_user) self.sync_custom_profile_fields_from_ldap(user, ldap_user) return (user, built) class PopulateUserLDAPError(ZulipLDAPException): pass @receiver(ldap_error, sender=ZulipLDAPUserPopulator) def catch_ldap_error(signal: Signal, **kwargs: Any) -> None: """ Inside django_auth_ldap populate_user(), if LDAPError is raised, e.g. due to invalid connection credentials, the function catches it and emits a signal (ldap_error) to communicate this error to others. We normally don't use signals, but here there's no choice, so in this function we essentially convert the signal to a normal exception that will properly propagate out of django_auth_ldap internals. """ if kwargs['context'] == 'populate_user': # The exception message can contain the password (if it was invalid), # so it seems better not to log that, and only use the original exception's name here. raise PopulateUserLDAPError(kwargs['exception'].__class__.__name__) def sync_user_from_ldap(user_profile: UserProfile, logger: logging.Logger) -> bool: backend = ZulipLDAPUserPopulator() try: ldap_username = backend.django_to_ldap_username(user_profile.email) except ZulipLDAPExceptionNoMatchingLDAPUser: if settings.LDAP_DEACTIVATE_NON_MATCHING_USERS: do_deactivate_user(user_profile) logger.info("Deactivated non-matching user: %s" % (user_profile.email,)) return True elif user_profile.is_active: logger.warning("Did not find %s in LDAP." % (user_profile.email,)) return False updated_user = backend.populate_user(ldap_username) if updated_user: logger.info("Updated %s." % (user_profile.email,)) return True raise PopulateUserLDAPError("populate_user unexpectedly returned {}".format(updated_user)) # Quick tool to test whether you're correctly authenticating to LDAP def query_ldap(email: str) -> List[str]: values = [] backend = next((backend for backend in get_backends() if isinstance(backend, LDAPBackend)), None) if backend is not None: try: ldap_username = backend.django_to_ldap_username(email) except ZulipLDAPExceptionNoMatchingLDAPUser: values.append("No such user found") return values ldap_attrs = _LDAPUser(backend, ldap_username).attrs for django_field, ldap_field in settings.AUTH_LDAP_USER_ATTR_MAP.items(): value = ldap_attrs.get(ldap_field, ["LDAP field not present", ])[0] if django_field == "avatar": if isinstance(value, bytes): value = "(An avatar image file)" values.append("%s: %s" % (django_field, value)) if settings.LDAP_EMAIL_ATTR is not None: values.append("%s: %s" % ('email', ldap_attrs[settings.LDAP_EMAIL_ATTR][0])) else: values.append("LDAP backend not configured on this server.") return values class DevAuthBackend(ZulipAuthMixin): """Allow logging in as any user without a password. This is used for convenience when developing Zulip, and is disabled in production.""" def authenticate(self, *, dev_auth_username: str, realm: Realm, return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]: if not dev_auth_enabled(realm): return None return common_get_active_user(dev_auth_username, realm, return_data=return_data) def redirect_deactivated_user_to_login() -> HttpResponseRedirect: # Specifying the template name makes sure that the user is not redirected to dev_login in case of # a deactivated account on a test server. login_url = reverse('zerver.views.auth.login_page', kwargs = {'template_name': 'zerver/login.html'}) redirect_url = login_url + '?is_deactivated=true' return HttpResponseRedirect(redirect_url) def social_associate_user_helper(backend: BaseAuth, return_data: Dict[str, Any], *args: Any, **kwargs: Any) -> Optional[UserProfile]: """Responsible for doing the Zulip-account lookup and validation parts of the Zulip Social auth pipeline (similar to the authenticate() methods in most other auth backends in this file). Returns a UserProfile object for successful authentication, and None otherwise. """ subdomain = backend.strategy.session_get('subdomain') try: realm = get_realm(subdomain) except Realm.DoesNotExist: return_data["invalid_realm"] = True return None return_data["realm_id"] = realm.id if not auth_enabled_helper([backend.auth_backend_name], realm): return_data["auth_backend_disabled"] = True return None if 'auth_failed_reason' in kwargs.get('response', {}): return_data["social_auth_failed_reason"] = kwargs['response']["auth_failed_reason"] return None elif hasattr(backend, 'get_verified_emails'): # Some social backends, like GitHubAuthBackend, don't # guarantee that the `details` data is validated (i.e., it's # possible users can put any string they want in the "email" # field of the `details` object). For those backends, we have # custom per-backend code to properly fetch only verified # email addresses from the appropriate third-party API. verified_emails = backend.get_verified_emails(*args, **kwargs) verified_emails_length = len(verified_emails) if verified_emails_length == 0: # TODO: Provide a nice error message screen to the user # for this case, rather than just logging a warning. logging.warning("Social auth (%s) failed because user has no verified emails" % (backend.auth_backend_name,)) return_data["email_not_verified"] = True return None if verified_emails_length == 1: chosen_email = verified_emails[0] else: chosen_email = backend.strategy.request_data().get('email') if not chosen_email: avatars = {} # Dict[str, str] for email in verified_emails: existing_account = common_get_active_user(email, realm, {}) if existing_account is not None: avatars[email] = avatar_url(existing_account) return render(backend.strategy.request, 'zerver/social_auth_select_email.html', context = { 'primary_email': verified_emails[0], 'verified_non_primary_emails': verified_emails[1:], 'backend': 'github', 'avatar_urls': avatars, }) try: validate_email(chosen_email) except ValidationError: return_data['invalid_email'] = True return None if chosen_email not in verified_emails: # If a user edits the submit value for the choose email form, we might # end up with a wrong email associated with the account. The below code # takes care of that. logging.warning("Social auth (%s) failed because user has no verified" " emails associated with the account" % (backend.auth_backend_name,)) return_data["email_not_associated"] = True return None validated_email = chosen_email else: try: validate_email(kwargs["details"].get("email")) except ValidationError: return_data['invalid_email'] = True return None validated_email = kwargs["details"].get("email") if not validated_email: # nocoverage # This code path isn't used with GitHubAuthBackend, but may be relevant for other # social auth backends. return_data['invalid_email'] = True return None return_data["valid_attestation"] = True return_data['validated_email'] = validated_email user_profile = common_get_active_user(validated_email, realm, return_data) full_name = kwargs['details'].get('fullname') first_name = kwargs['details'].get('first_name', '') last_name = kwargs['details'].get('last_name', '') if full_name is None: if not first_name and not last_name: # If we add support for any of the social auth backends that # don't provide this feature, we'll need to add code here. raise AssertionError("Social auth backend doesn't provide name") if full_name: return_data["full_name"] = full_name else: # In SAML authentication, the IdP may support only sending # the first and last name as separate attributes - in that case # we construct the full name from them. return_data["full_name"] = "{} {}".format( first_name, last_name ).strip() # strip removes the unnecessary ' ' return user_profile @partial def social_auth_associate_user( backend: BaseAuth, *args: Any, **kwargs: Any) -> Union[HttpResponse, Dict[str, Any]]: """A simple wrapper function to reformat the return data from social_associate_user_helper as a dictionary. The python-social-auth infrastructure will then pass those values into later stages of settings.SOCIAL_AUTH_PIPELINE, such as social_auth_finish, as kwargs. """ partial_token = backend.strategy.request_data().get('partial_token') return_data = {} # type: Dict[str, Any] user_profile = social_associate_user_helper( backend, return_data, *args, **kwargs) if type(user_profile) == HttpResponse: return user_profile else: return {'user_profile': user_profile, 'return_data': return_data, 'partial_token': partial_token, 'partial_backend_name': backend} def social_auth_finish(backend: Any, details: Dict[str, Any], response: HttpResponse, *args: Any, **kwargs: Any) -> Optional[UserProfile]: """Given the determination in social_auth_associate_user for whether the user should be authenticated, this takes care of actually logging in the user (if appropriate) and redirecting the browser to the appropriate next page depending on the situation. Read the comments below as well as login_or_register_remote_user in `zerver/views/auth.py` for the details on how that dispatch works. """ from zerver.views.auth import (login_or_register_remote_user, redirect_and_log_into_subdomain) user_profile = kwargs['user_profile'] return_data = kwargs['return_data'] no_verified_email = return_data.get("email_not_verified") auth_backend_disabled = return_data.get('auth_backend_disabled') inactive_user = return_data.get('inactive_user') inactive_realm = return_data.get('inactive_realm') invalid_realm = return_data.get('invalid_realm') invalid_email = return_data.get('invalid_email') auth_failed_reason = return_data.get("social_auth_failed_reason") email_not_associated = return_data.get("email_not_associated") if invalid_realm: from zerver.views.auth import redirect_to_subdomain_login_url return redirect_to_subdomain_login_url() if inactive_user: return redirect_deactivated_user_to_login() if auth_backend_disabled or inactive_realm or no_verified_email or email_not_associated: # Redirect to login page. We can't send to registration # workflow with these errors. We will redirect to login page. return None if invalid_email: # In case of invalid email, we will end up on registration page. # This seems better than redirecting to login page. logging.warning( "{} got invalid email argument.".format(backend.auth_backend_name) ) return None if auth_failed_reason: logging.info(auth_failed_reason) return None # Structurally, all the cases where we don't have an authenticated # email for the user should be handled above; this assertion helps # prevent any violations of that contract from resulting in a user # being incorrectly authenticated. assert return_data.get('valid_attestation') is True strategy = backend.strategy full_name_validated = backend.full_name_validated email_address = return_data['validated_email'] full_name = return_data['full_name'] is_signup = strategy.session_get('is_signup') == '1' redirect_to = strategy.session_get('next') realm = Realm.objects.get(id=return_data["realm_id"]) multiuse_object_key = strategy.session_get('multiuse_object_key', '') mobile_flow_otp = strategy.session_get('mobile_flow_otp') # At this point, we have now confirmed that the user has # demonstrated control over the target email address. # # The next step is to call login_or_register_remote_user, but # there are two code paths here because of an optimization to save # a redirect on mobile. if mobile_flow_otp is not None: # For mobile app authentication, login_or_register_remote_user # will redirect to a special zulip:// URL that is handled by # the app after a successful authentication; so we can # redirect directly from here, saving a round trip over what # we need to do to create session cookies on the right domain # in the web login flow (below). return login_or_register_remote_user( strategy.request, email_address, user_profile, full_name, mobile_flow_otp=mobile_flow_otp, is_signup=is_signup, redirect_to=redirect_to, full_name_validated=full_name_validated ) # If this authentication code were executing on # subdomain.zulip.example.com, we would just call # login_or_register_remote_user as in the mobile code path. # However, because third-party SSO providers generally don't allow # wildcard addresses in their redirect URLs, for multi-realm # servers, we will have just completed authentication on e.g. # auth.zulip.example.com (depending on # settings.SOCIAL_AUTH_SUBDOMAIN), which cannot store cookies on # the subdomain.zulip.example.com domain. So instead we serve a # redirect (encoding the authentication result data in a # cryptographically signed token) to a route on # subdomain.zulip.example.com that will verify the signature and # then call login_or_register_remote_user. return redirect_and_log_into_subdomain( realm, full_name, email_address, is_signup=is_signup, redirect_to=redirect_to, multiuse_object_key=multiuse_object_key, full_name_validated=full_name_validated ) class SocialAuthMixin(ZulipAuthMixin): auth_backend_name = "undeclared" name = "undeclared" display_icon = None # type: Optional[str] # Used to determine how to order buttons on login form, backend with # higher sort order are displayed first. sort_order = 0 # Whether we expect that the full_name value obtained by the # social backend is definitely how the user should be referred to # in Zulip, which in turn determines whether we should always show # a registration form in the event with a default value of the # user's name when using this social backend so they can change # it. For social backends like SAML that are expected to be a # central database, this should be True; for backends like GitHub # where the user might not have a name set or have it set to # something other than the name they will prefer to use in Zulip, # it should be False. full_name_validated = False def auth_complete(self, *args: Any, **kwargs: Any) -> Optional[HttpResponse]: """This is a small wrapper around the core `auth_complete` method of python-social-auth, designed primarily to prevent 500s for exceptions in the social auth code from situations that are really user errors. Returning `None` from this function will redirect the browser to the login page. """ try: # Call the auth_complete method of social_core.backends.oauth.BaseOAuth2 return super().auth_complete(*args, **kwargs) # type: ignore # monkey-patching except AuthFailed as e: # When a user's social authentication fails (e.g. because # they did something funny with reloading in the middle of # the flow), don't throw a 500, just send them back to the # login page and record the event at the info log level. logging.info(str(e)) return None except SocialAuthBaseException as e: # Other python-social-auth exceptions are likely # interesting enough that we should log a warning. logging.warning(str(e)) return None class GitHubAuthBackend(SocialAuthMixin, GithubOAuth2): name = "github" auth_backend_name = "GitHub" sort_order = 100 display_icon = "/static/images/landing-page/logos/github-icon.png" def get_verified_emails(self, *args: Any, **kwargs: Any) -> List[str]: access_token = kwargs["response"]["access_token"] try: emails = self._user_data(access_token, '/emails') except (HTTPError, ValueError, TypeError): # nocoverage # We don't really need an explicit test for this code # path, since the outcome will be the same as any other # case without any verified emails emails = [] verified_emails = [] # type: List[str] for email_obj in self.filter_usable_emails(emails): # social_associate_user_helper assumes that the first email in # verified_emails is primary. if email_obj.get("primary"): verified_emails.insert(0, email_obj["email"]) else: verified_emails.append(email_obj["email"]) return verified_emails def filter_usable_emails(self, emails: List[Dict[str, Any]]) -> List[Dict[str, Any]]: # We only let users login using email addresses that are # verified by GitHub, because the whole point is for the user # to demonstrate that they control the target email address. # We also disallow the # @noreply.github.com/@users.noreply.github.com email # addresses, because structurally, we only want to allow email # addresses that can receive emails, and those cannot. return [ email for email in emails if email.get('verified') and not email["email"].endswith("noreply.github.com") ] def user_data(self, access_token: str, *args: Any, **kwargs: Any) -> Dict[str, str]: """This patched user_data function lets us combine together the 3 social auth backends into a single Zulip backend for GitHub Oauth2""" team_id = settings.SOCIAL_AUTH_GITHUB_TEAM_ID org_name = settings.SOCIAL_AUTH_GITHUB_ORG_NAME if team_id is None and org_name is None: # I believe this can't raise AuthFailed, so we don't try to catch it here. return super().user_data( access_token, *args, **kwargs ) elif team_id is not None: backend = GithubTeamOAuth2(self.strategy, self.redirect_uri) try: return backend.user_data(access_token, *args, **kwargs) except AuthFailed: return dict(auth_failed_reason="GitHub user is not member of required team") elif org_name is not None: backend = GithubOrganizationOAuth2(self.strategy, self.redirect_uri) try: return backend.user_data(access_token, *args, **kwargs) except AuthFailed: return dict(auth_failed_reason="GitHub user is not member of required organization") raise AssertionError("Invalid configuration") class AzureADAuthBackend(SocialAuthMixin, AzureADOAuth2): sort_order = 50 name = "azuread-oauth2" auth_backend_name = "AzureAD" display_icon = "/static/images/landing-page/logos/azuread-icon.png" class GoogleAuthBackend(SocialAuthMixin, GoogleOAuth2): sort_order = 150 auth_backend_name = "Google" name = "google" display_icon = "/static/images/landing-page/logos/googl_e-icon.png" def get_verified_emails(self, *args: Any, **kwargs: Any) -> List[str]: verified_emails = [] # type: List[str] details = kwargs["response"] email_verified = details.get("email_verified") if email_verified: verified_emails.append(details["email"]) return verified_emails class SAMLAuthBackend(SocialAuthMixin, SAMLAuth): auth_backend_name = "SAML" standard_relay_params = ["subdomain", "multiuse_object_key", "mobile_flow_otp", "next", "is_signup"] REDIS_EXPIRATION_SECONDS = 60 * 15 name = "saml" # Organization which go through the trouble of setting up SAML are most likely # to have it as their main authentication method, so it seems appropriate to have # SAML buttons at the top. sort_order = 9999 # There's no common default logo for SAML authentication. display_icon = None # The full_name provided by the IdP is very likely the standard # employee directory name for the user, and thus what they and # their organization want to use in Zulip. So don't unnecessarily # provide a registration flow prompt for them to set their name. full_name_validated = True def auth_url(self) -> str: """Get the URL to which we must redirect in order to authenticate the user. Overriding the original SAMLAuth.auth_url. Runs when someone accesses the /login/saml/ endpoint.""" try: idp_name = self.strategy.request_data()['idp'] auth = self._create_saml_auth(idp=self.get_idp(idp_name)) except KeyError: # If the above raise KeyError, it means invalid or no idp was specified, # we should log that and redirect to the login page. logging.info("/login/saml/ : Bad idp param.") return reverse('zerver.views.auth.login_page', kwargs = {'template_name': 'zerver/login.html'}) # This where we change things. We need to pass some params # (`mobile_flow_otp`, `next`, etc.) through RelayState, which # then the IdP will pass back to us so we can read those # parameters in the final part of the authentication flow, at # the /complete/saml/ endpoint. # # To protect against network eavesdropping of these # parameters, we send just a random token to the IdP in # RelayState, which is used as a key into our redis data store # for fetching the actual parameters after the IdP has # returned a successful authentication. params_to_relay = ["idp"] + self.standard_relay_params request_data = self.strategy.request_data().dict() data_to_relay = { key: request_data[key] for key in params_to_relay if key in request_data } relay_state = self.put_data_in_redis(data_to_relay) return auth.login(return_to=relay_state) @classmethod def put_data_in_redis(cls, data_to_relay: Dict[str, Any]) -> str: with redis_client.pipeline() as pipeline: token = generate_random_token(64) key = "saml_token_{}".format(token) pipeline.set(key, ujson.dumps(data_to_relay)) pipeline.expire(key, cls.REDIS_EXPIRATION_SECONDS) pipeline.execute() return key @classmethod def get_data_from_redis(cls, key: str) -> Optional[Dict[str, Any]]: redis_data = None if key.startswith('saml_token_'): # Safety if statement, to not allow someone to poke around arbitrary redis keys here. redis_data = redis_client.get(key) if redis_data is None: # TODO: We will need some sort of user-facing message # about the authentication session having expired here. logging.info("SAML authentication failed: bad RelayState token.") return None return ujson.loads(redis_data) def auth_complete(self, *args: Any, **kwargs: Any) -> Optional[HttpResponse]: """ Additional ugly wrapping on top of auth_complete in SocialAuthMixin. We handle two things here: 1. Working around bad RelayState or SAMLResponse parameters in the request. Both parameters should be present if the user came to /complete/saml/ through the IdP as intended. The errors can happen if someone simply types the endpoint into their browsers, or generally tries messing with it in some ways. 2. The first part of our SAML authentication flow will encode important parameters into the RelayState. We need to read them and set those values in the session, and then change the RelayState param to the idp_name, because that's what SAMLAuth.auth_complete() expects. """ if 'RelayState' not in self.strategy.request_data(): logging.info("SAML authentication failed: missing RelayState.") return None # Set the relevant params that we transported in the RelayState: redis_key = self.strategy.request_data()['RelayState'] relayed_params = self.get_data_from_redis(redis_key) if relayed_params is None: return None result = None try: for param, value in relayed_params.items(): if param in self.standard_relay_params: self.strategy.session_set(param, value) # super().auth_complete expects to have RelayState set to the idp_name, # so we need to replace this param. post_params = self.strategy.request.POST.copy() post_params['RelayState'] = relayed_params["idp"] self.strategy.request.POST = post_params # Call the auth_complete method of SocialAuthMixIn result = super().auth_complete(*args, **kwargs) # type: ignore # monkey-patching except OneLogin_Saml2_Error as e: # This will be raised if SAMLResponse is missing. logging.info(str(e)) # Fall through to returning None. finally: if result is None: for param in self.standard_relay_params: # If an attacker managed to eavesdrop on the RelayState token, # they may pass it here to the endpoint with an invalid SAMLResponse. # We remove these potentially sensitive parameters that we have set in the session # ealier, to avoid leaking their values. self.strategy.session_set(param, None) return result @classmethod def check_config(cls) -> Optional[HttpResponse]: obligatory_saml_settings_list = [ settings.SOCIAL_AUTH_SAML_SP_ENTITY_ID, settings.SOCIAL_AUTH_SAML_ORG_INFO, settings.SOCIAL_AUTH_SAML_TECHNICAL_CONTACT, settings.SOCIAL_AUTH_SAML_SUPPORT_CONTACT, settings.SOCIAL_AUTH_SAML_ENABLED_IDPS ] if any(not setting for setting in obligatory_saml_settings_list): return redirect_to_config_error("saml") return None SocialBackendDictT = TypedDict('SocialBackendDictT', { 'name': str, 'display_name': str, 'display_icon': Optional[str], 'login_url': str, 'signup_url': str, }) def create_standard_social_backend_dict(social_backend: SocialAuthMixin) -> SocialBackendDictT: return dict( name=social_backend.name, display_name=social_backend.auth_backend_name, display_icon=social_backend.display_icon, login_url=reverse('login-social', args=(social_backend.name,)), signup_url=reverse('signup-social', args=(social_backend.name,)), ) def list_saml_backend_dicts(realm: Optional[Realm]=None) -> List[SocialBackendDictT]: result = [] # type: List[SocialBackendDictT] for idp_name, idp_dict in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.items(): saml_dict = dict( name='saml:{}'.format(idp_name), display_name=idp_dict.get('display_name', SAMLAuthBackend.auth_backend_name), display_icon=idp_dict.get('display_icon', SAMLAuthBackend.display_icon), login_url=reverse('login-social-extra-arg', args=('saml', idp_name)), signup_url=reverse('signup-social-extra-arg', args=('saml', idp_name)), ) # type: SocialBackendDictT result.append(saml_dict) return result def get_social_backend_dicts(realm: Optional[Realm]=None) -> List[SocialBackendDictT]: """ Returns a list of dictionaries that represent social backends, sorted in the order in which they should be displayed. """ result = [] for backend in SOCIAL_AUTH_BACKENDS: # SOCIAL_AUTH_BACKENDS is already sorted in the correct order, # so we don't need to worry about sorting here. if auth_enabled_helper([backend.auth_backend_name], realm): if backend != SAMLAuthBackend: result.append(create_standard_social_backend_dict(backend)) else: result += list_saml_backend_dicts(realm) return result AUTH_BACKEND_NAME_MAP = { 'Dev': DevAuthBackend, 'Email': EmailAuthBackend, 'LDAP': ZulipLDAPAuthBackend, 'RemoteUser': ZulipRemoteUserBackend, } # type: Dict[str, Any] SOCIAL_AUTH_BACKENDS = [] # type: List[BaseOAuth2] # Authomatically add all of our social auth backends to relevant data structures. for social_auth_subclass in SocialAuthMixin.__subclasses__(): AUTH_BACKEND_NAME_MAP[social_auth_subclass.auth_backend_name] = social_auth_subclass SOCIAL_AUTH_BACKENDS.append(social_auth_subclass) SOCIAL_AUTH_BACKENDS = sorted(SOCIAL_AUTH_BACKENDS, key=lambda x: x.sort_order, reverse=True) # Provide this alternative name for backwards compatibility with # installations that had the old backend enabled. GoogleMobileOauth2Backend = GoogleAuthBackend
./CrossVul/dataset_final_sorted/CWE-287/py/good_1224_6
crossvul-python_data_good_4354_0
import os import time import json from secrets import token_bytes, token_hex, randbits from hashlib import sha256 import asyncio import aiosqlite as sql from responses import * class Database: def __init__(self, session): loop = asyncio.get_event_loop() # lock to prevent race conditions when SELECT then fetchone self.lock = asyncio.Lock(loop=loop) self.dbw = loop.run_until_complete(sql.connect(DATABASE_FILENAME)) self.dbw.row_factory = sql.Row self.db = loop.run_until_complete(self.dbw.cursor()) with open(os.path.join(os.path.dirname(__file__), 'sql', 'startup.sql')) as startup: loop.run_until_complete(self.db.executescript(startup.read())) self.session = session async def close(self): await self.dbw.commit() await self.dbw.close() ### TABLE: clients ### async def client_matches(self, client_id, token): async with self.lock: await self.db.execute('SELECT client_id FROM scratchverifier_clients \ WHERE client_id=? AND token=?', (client_id, token)) if (await self.db.fetchone()): return True return False ### TABLE: clients and sessions ### async def username_from_session(self, session_id): if session_id == 0: # 0 means debug mode return 'kenny2scratch' async with self.lock: await self.db.execute('SELECT username FROM scratchverifier_sessions \ WHERE session_id=?', (session_id,)) row = await self.db.fetchone() if row is None: return None return row[0] async def new_client(self, session_id): if session_id == 0: # 0 means debug mode # don't create a client, because other funcs return a dummy one # when under debug mode return {'client_id': 0, 'username': 'kenny2scratch', 'token': 'This client is newly created.'} username = await self.username_from_session(session_id) if username is None: return None async with self.session.get(USERS_API.format(username)) as resp: assert resp.status == 200 data = await resp.json() client_id = data['id'] token = token_hex(32) await self.db.execute('INSERT INTO scratchverifier_clients (client_id, \ token, username) VALUES (?, ?, ?)', (client_id, token, username)) return {'client_id': client_id, 'token': token, 'username': username} async def get_client(self, session_id): if session_id == 0: # 0 means debug mode return {'client_id': 0, 'username': 'kenny2scratch', 'token': 'This is an example token that can be censored.'} username = await self.username_from_session(session_id) if username is None: return None async with self.lock: await self.db.execute('SELECT * FROM scratchverifier_clients \ WHERE username=?', (username,)) row = await self.db.fetchone() if row is None: return None return dict(row) async def get_client_info(self, client_id): if client_id == 0: # 0 means debug mode return {'client_id': 0, 'username': 'kenny2scratch', 'token': 'This is an example token that can be censored.'} async with self.lock: await self.db.execute('SELECT * FROM scratchverifier_clients \ WHERE client_id=?', (client_id,)) row = await self.db.fetchone() if row is None: return None return dict(row) async def reset_token(self, session_id): if session_id == 0: # 0 means debug mode return {'client_id': 0, 'username': 'kenny2scratch', 'token': 'Yes, the token was reset.'} username = await self.username_from_session(session_id) if username is None: return None await self.db.execute('UPDATE scratchverifier_clients SET token=? \ WHERE username=?', (token_hex(32), username)) return self.get_client(session_id) async def del_client(self, session_id): if session_id == 0: # 0 means debug mode return username = await self.username_from_session(session_id) if username is None: return await self.db.execute('DELETE FROM scratchverifier_clients \ WHERE username=?', (username,)) ### TABLE: sessions ### async def new_session(self, username): while 1: session_id = randbits(32) async with self.lock: await self.db.execute('SELECT session_id FROM \ scratchverifier_sessions WHERE session_id=?', (session_id,)) if (await self.db.fetchone()) is None: break await self.db.execute('INSERT INTO scratchverifier_sessions \ (session_id, expiry, username) VALUES (?, ?, ?)', ( session_id, int(time.time()) + SESSION_EXPIRY, username )) await self.db.execute('DELETE FROM scratchverifier_sessions WHERE \ expiry<=?', (int(time.time()),)) return session_id async def get_expired(self, session_id): async with self.lock: await self.db.execute('SELECT expiry FROM scratchverifier_sessions \ WHERE session_id=?', (session_id,)) expiry = await self.db.fetchone() if expiry is None: # "expired" if session doesn't exist in the first place return True expiry = expiry[0] if time.time() > expiry: await self.db.execute('DELETE FROM scratchverifier_sessions \ WHERE session_id=?', (session_id,)) return True return False async def logout(self, session_id): await self.db.execute('DELETE FROM scratchverifier_sessions \ WHERE session_id=?', (session_id,)) async def logout_user(self, username): await self.db.execute('DELETE FROM scratchverifier_sessions \ WHERE username=?', (username,)) ### TABLE: usage ### async def start_verification(self, client_id, username): async with self.lock: await self.db.execute('SELECT code FROM scratchverifier_usage WHERE \ client_id=? AND username=?', (client_id, username)) row = await self.db.fetchone() code = sha256( str(client_id).encode() + str(time.time()).encode() + username.encode() + token_bytes() # 0->A, 1->B, etc, to avoid Scratch's phone number censor ).hexdigest().translate({ord('0') + i: ord('A') + i for i in range(10)}) if row is not None: await self.db.execute( 'UPDATE scratchverifier_usage SET expiry=?, code=? \ WHERE client_id=? AND username=?', (int(time.time()) + VERIFY_EXPIRY, code, client_id, username)) return code await self.db.execute('INSERT INTO scratchverifier_usage (client_id, \ code, username, expiry) VALUES (?, ?, ?, ?)', (client_id, code, username, int(time.time() + VERIFY_EXPIRY))) await self.db.execute('INSERT INTO scratchverifier_logs (client_id, \ username, log_time, log_type) VALUES (?, ?, ?, ?)', (client_id, username, int(time.time()), 1)) await self.db.execute('DELETE FROM scratchverifier_usage WHERE \ expiry<=?', (int(time.time()),)) return code async def get_code(self, client_id, username): async with self.lock: await self.db.execute('SELECT code, expiry FROM scratchverifier_usage \ WHERE client_id=? AND username=?', (client_id, username)) row = await self.db.fetchone() if row is None: return None if time.time() > row['expiry']: await self.end_verification(client_id, username, False) return None return row['code'] async def end_verification(self, client_id, username, succ=True): await self.db.execute('DELETE FROM scratchverifier_usage WHERE \ client_id=? AND username=?', (client_id, username)) await self.db.execute('INSERT INTO scratchverifier_logs (client_id, \ username, log_time, log_type) \ VALUES (?, ?, ?, ?)', (client_id, username, int(time.time()), 3 - succ)) ### TABLE: logs solely ### async def get_logs(self, table='logs', **params): query = f'SELECT * FROM scratchverifier_{table} WHERE 1=1' id_col = 'log_id' if table == 'logs' else 'id' time_col = 'log_time' if table == 'logs' else 'time' type_col = 'log_type' if table == 'logs' else 'type' if 'start' in params: query += f' AND {id_col}<:start' if 'before' in params: query += f' AND {time_col}<=:before' if 'end' in params: query += f' AND {id_col}>:end' if 'after' in params: query += f' AND {time_col}>=:after' if 'client_id' in params: query += ' AND client_id=:client_id' if 'username' in params: query += ' AND username=:username' if 'type' in params: query += f' AND {type_col}=:type' query += f' ORDER BY {id_col} DESC LIMIT :limit' for k, v in params.items(): if k in {'start', 'before', 'end', 'after', 'client_id', 'type'}: params[k] = int(v) params['limit'] = int(params['limit']) async with self.lock: await self.db.execute(query, params) rows = await self.db.fetchall() return [dict(i) for i in rows] async def get_log(self, log_id, table='logs'): id_col = 'log_id' if table == 'logs' else 'id' async with self.lock: await self.db.execute(f'SELECT * FROM scratchverifier_{table} \ WHERE {id_col}=?', (log_id,)) row = await self.db.fetchone() if row is None: return None return dict(row) ### TABLE: ratelimits ### async def get_ratelimits(self): async with self.lock: await self.db.execute('SELECT * FROM scratchverifier_ratelimits') rows = await self.db.fetchall() return [dict(i) for i in rows] async def get_ratelimit(self, username): async with self.lock: await self.db.execute('SELECT * FROM scratchverifier_ratelimits \ WHERE username=?', (username,)) row = await self.db.fetchone() if row is None: return None return row async def set_ratelimits(self, data, performer): await self.db.executemany('INSERT OR REPLACE INTO \ scratchverifier_ratelimits (username, ratelimit) \ VALUES (:username, :ratelimit)', data) if performer is not None: await self.db.executemany( 'INSERT INTO scratchverifier_auditlogs \ (username, time, type, data) VALUES \ (:username, :time, :type, :data)', ({ 'username': performer, 'time': int(time.time()), 'type': 2, # ratelimit update 'data': json.dumps(i) } for i in data) ) ### TABLE: bans ### async def get_bans(self): async with self.lock: await self.db.execute('SELECT * FROM scratchverifier_bans') rows = await self.db.fetchall() return [dict(i) for i in rows] async def get_ban(self, username): async with self.lock: await self.db.execute('SELECT * FROM scratchverifier_bans \ WHERE username=?', (username,)) row = await self.db.fetchone() if row is None: return None if row['expiry'] is not None and row['expiry'] < time.time(): # ban has expired, delete it and return no ban await self.db.execute('DELETE FROM scratchverifier_bans \ WHERE username=?', (username,)) return None return row async def set_bans(self, data, performer): await self.db.executemany('INSERT OR REPLACE INTO scratchverifier_bans \ (username, expiry) VALUES (:username, :expiry)', data) await self.db.executemany('DELETE FROM scratchverifier_clients \ WHERE username=?', ((i['username'],) for i in data)) await self.db.executemany('DELETE FROM scratchverifier_sessions \ WHERE username=?', ((i['username'],) for i in data)) await self.db.executemany( 'INSERT INTO scratchverifier_auditlogs \ (username, time, type, data) VALUES \ (:username, :time, :type, :data)', ({ 'username': performer, 'time': int(time.time()), 'type': 1, # ban 'data': json.dumps(i) } for i in data) ) async def del_ban(self, username, performer): await self.db.execute('DELETE FROM scratchverifier_bans \ WHERE username=?', (username,)) await self.db.execute( 'INSERT INTO scratchverifier_auditlogs \ (username, time, type, data) VALUES \ (:username, :time, :type, :data)', { 'username': performer, 'time': int(time.time()), 'type': 3, # unban 'data': json.dumps({'username': username}) } )
./CrossVul/dataset_final_sorted/CWE-287/py/good_4354_0
crossvul-python_data_good_1224_1
# -*- coding: utf-8 -*- # Generated by Django 1.11.24 on 2019-10-16 22:48 from __future__ import unicode_literals from django.conf import settings from django.contrib.auth import get_backends from django.db import migrations from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor from django.db.migrations.state import StateApps from django.contrib.auth.hashers import check_password, make_password from django.utils.timezone import now as timezone_now from zerver.lib.cache import cache_delete, user_profile_by_api_key_cache_key from zerver.lib.queue import queue_json_publish from zerver.lib.utils import generate_api_key from zproject.backends import EmailAuthBackend from typing import Any, Set, Union import ujson def ensure_no_empty_passwords(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: """With CVE-2019-18933, it was possible for certain users created using social login (e.g. Google/GitHub auth) to have the empty string as their password in the Zulip database, rather than Django's "unusable password" (i.e. no password at all). This was a serious security issue for organizations with both password and Google/GitHub authentication enabled. Combined with the code changes to prevent new users from entering this buggy state, this migration sets the intended "no password" state for any users who are in this buggy state, as had been intended. While this bug was discovered by our own development team and we believe it hasn't been exploited in the wild, out of an abundance of caution, this migration also resets the personal API keys for all users where Zulip's database-level logging cannot **prove** that user's current personal API key was never accessed using this bug. There are a few ways this can be proven: (1) the user's password has never been changed and is not the empty string, or (2) the user's personal API key has changed since that user last changed their password (which is not ''). Both constitute proof because this bug cannot be used to gain the access required to change or reset a user's password. Resetting those API keys has the effect of logging many users out of the Zulip mobile and terminal apps unnecessarily (e.g. because the user changed their password at any point in the past, even though the user never was affected by the bug), but we're comfortable with that cost for ensuring that this bug is completely fixed. To avoid this inconvenience for self-hosted servers which don't even have EmailAuthBackend enabled, we skip resetting any API keys if the server doesn't have EmailAuthBackend configured. """ UserProfile = apps.get_model('zerver', 'UserProfile') RealmAuditLog = apps.get_model('zerver', 'RealmAuditLog') # Because we're backporting this migration to the Zulip 2.0.x # series, we've given it migration number 0209, which is a # duplicate with an existing migration already merged into Zulip # master. Migration 0247_realmauditlog_event_type_to_int.py # changes the format of RealmAuditLog.event_type, so we need the # following conditional block to determine what values to use when # searching for the relevant events in that log. event_type_class = RealmAuditLog._meta.get_field('event_type').get_internal_type() if event_type_class == 'CharField': USER_PASSWORD_CHANGED = 'user_password_changed' # type: Union[int, str] USER_API_KEY_CHANGED = 'user_api_key_changed' # type: Union[int, str] else: USER_PASSWORD_CHANGED = 122 USER_API_KEY_CHANGED = 127 # First, we do some bulk queries to collect data we'll find useful # in the loop over all users below. # Users who changed their password at any time since account # creation. These users could theoretically have started with an # empty password, but set a password later via the password reset # flow. If their API key has changed since they changed their # password, we can prove their current API key cannot have been # exposed; we store those users in # password_change_user_ids_no_reset_needed. password_change_user_ids = set(RealmAuditLog.objects.filter( event_type=USER_PASSWORD_CHANGED).values_list("modified_user_id", flat=True)) password_change_user_ids_api_key_reset_needed = set() # type: Set[int] password_change_user_ids_no_reset_needed = set() # type: Set[int] for user_id in password_change_user_ids: # Here, we check the timing for users who have changed # their password. # We check if the user changed their API key since their first password change. query = RealmAuditLog.objects.filter( modified_user=user_id, event_type__in=[USER_PASSWORD_CHANGED, USER_API_KEY_CHANGED] ).order_by("event_time") earliest_password_change = query.filter(event_type=USER_PASSWORD_CHANGED).first() # Since these users are in password_change_user_ids, this must not be None. assert earliest_password_change is not None latest_api_key_change = query.filter(event_type=USER_API_KEY_CHANGED).last() if latest_api_key_change is None: # This user has never changed their API key. As a # result, even though it's very likely this user never # had an empty password, they have changed their # password, and we have no record of the password's # original hash, so we can't prove the user's API key # was never affected. We schedule this user's API key # to be reset. password_change_user_ids_api_key_reset_needed.add(user_id) elif earliest_password_change.event_time <= latest_api_key_change.event_time: # This user has changed their password before # generating their current personal API key, so we can # prove their current personal API key could not have # been exposed by this bug. password_change_user_ids_no_reset_needed.add(user_id) else: password_change_user_ids_api_key_reset_needed.add(user_id) if password_change_user_ids_no_reset_needed and settings.PRODUCTION: # We record in this log file users whose current API key was # generated after a real password was set, so there's no need # to reset their API key, but because they've changed their # password, we don't know whether or not they originally had a # buggy password. # # In theory, this list can be recalculated using the above # algorithm modified to only look at events before the time # this migration was installed, but it's helpful to log it as well. with open("/var/log/zulip/0209_password_migration.log", "w") as log_file: line = "No reset needed, but changed password: {}\n" log_file.write(line.format(password_change_user_ids_no_reset_needed)) AFFECTED_USER_TYPE_EMPTY_PASSWORD = 'empty_password' AFFECTED_USER_TYPE_CHANGED_PASSWORD = 'changed_password' MIGRATION_ID = '0209_user_profile_no_empty_password' def write_realm_audit_log_entry(user_profile: Any, event_time: Any, event_type: Any, affected_user_type: str) -> None: RealmAuditLog.objects.create( realm=user_profile.realm, modified_user=user_profile, event_type=event_type, event_time=event_time, extra_data=ujson.dumps({ 'migration_id': MIGRATION_ID, 'affected_user_type': affected_user_type, }) ) # If Zulip's built-in password authentication is not enabled on # the server level, then we plan to skip resetting any users' API # keys, since the bug requires EmailAuthBackend. email_auth_enabled = any(isinstance(backend, EmailAuthBackend) for backend in get_backends()) # A quick note: This query could in theory exclude users with # is_active=False, is_bot=True, or realm__deactivated=True here to # accessing only active human users in non-deactivated realms. # But it's better to just be thorough; users can be reactivated, # and e.g. a server admin could manually edit the database to # change a bot into a human user if they really wanted to. And # there's essentially no harm in rewriting state for a deactivated # account. for user_profile in UserProfile.objects.all(): event_time = timezone_now() if check_password('', user_profile.password): # This user currently has the empty string as their password. # Change their password and record that we did so. user_profile.password = make_password(None) update_fields = ["password"] write_realm_audit_log_entry(user_profile, event_time, USER_PASSWORD_CHANGED, AFFECTED_USER_TYPE_EMPTY_PASSWORD) if email_auth_enabled and not user_profile.is_bot: # As explained above, if the built-in password authentication # is enabled, reset the API keys. We can skip bot accounts here, # because the `password` attribute on a bot user is useless. reset_user_api_key(user_profile) update_fields.append("api_key") event_time = timezone_now() write_realm_audit_log_entry(user_profile, event_time, USER_API_KEY_CHANGED, AFFECTED_USER_TYPE_EMPTY_PASSWORD) user_profile.save(update_fields=update_fields) continue elif email_auth_enabled and \ user_profile.id in password_change_user_ids_api_key_reset_needed: # For these users, we just need to reset the API key. reset_user_api_key(user_profile) user_profile.save(update_fields=["api_key"]) write_realm_audit_log_entry(user_profile, event_time, USER_API_KEY_CHANGED, AFFECTED_USER_TYPE_CHANGED_PASSWORD) def reset_user_api_key(user_profile: Any) -> None: old_api_key = user_profile.api_key user_profile.api_key = generate_api_key() cache_delete(user_profile_by_api_key_cache_key(old_api_key)) # Like with any API key change, we need to clear any server-side # state for sending push notifications to mobile app clients that # could have been registered with the old API key. Fortunately, # we can just write to the queue processor that handles sending # those notices to the push notifications bouncer service. event = {'type': 'clear_push_device_tokens', 'user_profile_id': user_profile.id} queue_json_publish("deferred_work", event) class Migration(migrations.Migration): atomic = False dependencies = [ ('zerver', '0208_add_realm_night_logo_fields'), ] operations = [ migrations.RunPython(ensure_no_empty_passwords, reverse_code=migrations.RunPython.noop), ]
./CrossVul/dataset_final_sorted/CWE-287/py/good_1224_1
crossvul-python_data_good_3759_0
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 OpenStack LLC # Copyright 2012 Canonical Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Main entry point into the Catalog service.""" import uuid from keystone import config from keystone import exception from keystone import identity from keystone import policy from keystone import token from keystone.common import manager from keystone.common import wsgi CONF = config.CONF class Manager(manager.Manager): """Default pivot point for the Catalog backend. See :mod:`keystone.common.manager.Manager` for more details on how this dynamically calls the backend. """ def __init__(self): super(Manager, self).__init__(CONF.catalog.driver) class Driver(object): """Interface description for an Catalog driver.""" def list_services(self): """List all service ids in catalog. Returns: list of service_ids or an empty list. """ raise exception.NotImplemented() def get_service(self, service_id): """Get service by id. Returns: service_ref dict or None. """ raise exception.NotImplemented() def delete_service(self, service_id): raise exception.NotImplemented() def create_service(self, service_id, service_ref): raise exception.NotImplemented() def create_endpoint(self, endpoint_id, endpoint_ref): raise exception.NotImplemented() def delete_endpoint(self, endpoint_id): raise exception.NotImplemented() def get_endpoint(self, endpoint_id): """Get endpoint by id. Returns: endpoint_ref dict or None. """ raise exception.NotImplemented() def list_endpoints(self): """List all endpoint ids in catalog. Returns: list of endpoint_ids or an empty list. """ raise exception.NotImplemented() def get_catalog(self, user_id, tenant_id, metadata=None): """Retreive and format the current service catalog. Returns: A nested dict representing the service catalog or an empty dict. Example: { 'RegionOne': {'compute': { 'adminURL': u'http://host:8774/v1.1/tenantid', 'internalURL': u'http://host:8774/v1.1/tenant_id', 'name': 'Compute Service', 'publicURL': u'http://host:8774/v1.1/tenantid'}, 'ec2': { 'adminURL': 'http://host:8773/services/Admin', 'internalURL': 'http://host:8773/services/Cloud', 'name': 'EC2 Service', 'publicURL': 'http://host:8773/services/Cloud'}} """ raise exception.NotImplemented() class ServiceController(wsgi.Application): def __init__(self): self.catalog_api = Manager() self.identity_api = identity.Manager() self.policy_api = policy.Manager() self.token_api = token.Manager() super(ServiceController, self).__init__() # CRUD extensions # NOTE(termie): this OS-KSADM stuff is not very consistent def get_services(self, context): self.assert_admin(context) service_list = self.catalog_api.list_services(context) service_refs = [self.catalog_api.get_service(context, x) for x in service_list] return {'OS-KSADM:services': service_refs} def get_service(self, context, service_id): self.assert_admin(context) service_ref = self.catalog_api.get_service(context, service_id) if not service_ref: raise exception.ServiceNotFound(service_id=service_id) return {'OS-KSADM:service': service_ref} def delete_service(self, context, service_id): self.assert_admin(context) service_ref = self.catalog_api.get_service(context, service_id) if not service_ref: raise exception.ServiceNotFound(service_id=service_id) self.catalog_api.delete_service(context, service_id) def create_service(self, context, OS_KSADM_service): self.assert_admin(context) service_id = uuid.uuid4().hex service_ref = OS_KSADM_service.copy() service_ref['id'] = service_id new_service_ref = self.catalog_api.create_service( context, service_id, service_ref) return {'OS-KSADM:service': new_service_ref} class EndpointController(wsgi.Application): def __init__(self): self.catalog_api = Manager() self.identity_api = identity.Manager() self.policy_api = policy.Manager() self.token_api = token.Manager() super(EndpointController, self).__init__() def get_endpoints(self, context): self.assert_admin(context) endpoint_list = self.catalog_api.list_endpoints(context) endpoint_refs = [self.catalog_api.get_endpoint(context, e) for e in endpoint_list] return {'endpoints': endpoint_refs} def create_endpoint(self, context, endpoint): self.assert_admin(context) endpoint_id = uuid.uuid4().hex endpoint_ref = endpoint.copy() endpoint_ref['id'] = endpoint_id service_id = endpoint_ref['service_id'] if not self.catalog_api.get_service(context, service_id): raise exception.ServiceNotFound(service_id=service_id) new_endpoint_ref = self.catalog_api.create_endpoint( context, endpoint_id, endpoint_ref) return {'endpoint': new_endpoint_ref} def delete_endpoint(self, context, endpoint_id): self.assert_admin(context) endpoint_ref = self.catalog_api.delete_endpoint(context, endpoint_id)
./CrossVul/dataset_final_sorted/CWE-287/py/good_3759_0
crossvul-python_data_bad_1224_0
# -*- coding: utf-8 -*- from __future__ import print_function from __future__ import absolute_import from typing import List, TYPE_CHECKING from zulint.custom_rules import RuleList if TYPE_CHECKING: from zulint.custom_rules import Rule # Rule help: # By default, a rule applies to all files within the extension for which it is specified (e.g. all .py files) # There are three operators we can use to manually include or exclude files from linting for a rule: # 'exclude': 'set([<path>, ...])' - if <path> is a filename, excludes that file. # if <path> is a directory, excludes all files directly below the directory <path>. # 'exclude_line': 'set([(<path>, <line>), ...])' - excludes all lines matching <line> in the file <path> from linting. # 'include_only': 'set([<path>, ...])' - includes only those files where <path> is a substring of the filepath. PYDELIMS = r'''"'()\[\]{}#\\''' PYREG = r"[^{}]".format(PYDELIMS) PYSQ = r'"(?:[^"\\]|\\.)*"' PYDQ = r"'(?:[^'\\]|\\.)*'" PYLEFT = r"[(\[{]" PYRIGHT = r"[)\]}]" PYCODE = PYREG for depth in range(5): PYGROUP = r"""(?:{}|{}|{}{}*{})""".format(PYSQ, PYDQ, PYLEFT, PYCODE, PYRIGHT) PYCODE = r"""(?:{}|{})""".format(PYREG, PYGROUP) FILES_WITH_LEGACY_SUBJECT = { # This basically requires a big DB migration: 'zerver/lib/topic.py', # This is for backward compatibility. 'zerver/tests/test_legacy_subject.py', # Other migration-related changes require extreme care. 'zerver/lib/fix_unreads.py', 'zerver/tests/test_migrations.py', # These use subject in the email sense, and will # probably always be exempt: 'zerver/lib/email_mirror.py', 'zerver/lib/feedback.py', 'zerver/tests/test_new_users.py', 'zerver/tests/test_email_mirror.py', # These are tied more to our API than our DB model. 'zerver/openapi/python_examples.py', 'zerver/tests/test_openapi.py', # This has lots of query data embedded, so it's hard # to fix everything until we migrate the DB to "topic". 'zerver/tests/test_narrow.py', } shebang_rules = [ {'pattern': '^#!', 'description': "zerver library code shouldn't have a shebang line.", 'include_only': set(['zerver/'])}, # /bin/sh and /usr/bin/env are the only two binaries # that NixOS provides at a fixed path (outside a # buildFHSUserEnv sandbox). {'pattern': '^#!(?! *(?:/usr/bin/env|/bin/sh)(?: |$))', 'description': "Use `#!/usr/bin/env foo` instead of `#!/path/foo`" " for interpreters other than sh."}, {'pattern': '^#!/usr/bin/env python$', 'description': "Use `#!/usr/bin/env python3` instead of `#!/usr/bin/env python`."} ] # type: List[Rule] trailing_whitespace_rule = { 'pattern': r'\s+$', 'strip': '\n', 'description': 'Fix trailing whitespace' } # type: Rule whitespace_rules = [ # This linter should be first since bash_rules depends on it. trailing_whitespace_rule, {'pattern': 'http://zulip.readthedocs.io', 'description': 'Use HTTPS when linking to ReadTheDocs', }, {'pattern': '\t', 'strip': '\n', 'exclude': set(['tools/ci/success-http-headers.txt']), 'description': 'Fix tab-based whitespace'}, ] # type: List[Rule] comma_whitespace_rule = [ {'pattern': ', {2,}[^#/ ]', 'exclude': set(['zerver/tests', 'frontend_tests/node_tests', 'corporate/tests']), 'description': "Remove multiple whitespaces after ','", 'good_lines': ['foo(1, 2, 3)', 'foo = bar # some inline comment'], 'bad_lines': ['foo(1, 2, 3)', 'foo(1, 2, 3)']}, ] # type: List[Rule] markdown_whitespace_rules = list([rule for rule in whitespace_rules if rule['pattern'] != r'\s+$']) + [ # Two spaces trailing a line with other content is okay--it's a markdown line break. # This rule finds one space trailing a non-space, three or more trailing spaces, and # spaces on an empty line. {'pattern': r'((?<!\s)\s$)|(\s\s\s+$)|(^\s+$)', 'strip': '\n', 'description': 'Fix trailing whitespace'}, {'pattern': '^#+[A-Za-z0-9]', 'strip': '\n', 'description': 'Missing space after # in heading', 'good_lines': ['### some heading', '# another heading'], 'bad_lines': ['###some heading', '#another heading']}, ] js_rules = RuleList( langs=['js'], rules=[ {'pattern': 'subject|SUBJECT', 'exclude': set(['static/js/util.js', 'frontend_tests/']), 'exclude_pattern': 'emails', 'description': 'avoid subject in JS code', 'good_lines': ['topic_name'], 'bad_lines': ['subject="foo"', ' MAX_SUBJECT_LEN']}, {'pattern': r'[^_]function\(', 'description': 'The keyword "function" should be followed by a space'}, {'pattern': 'msgid|MSGID', 'description': 'Avoid using "msgid" as a variable name; use "message_id" instead.'}, {'pattern': r'.*blueslip.warning\(.*', 'description': 'The module blueslip has no function warning, try using blueslip.warn'}, {'pattern': '[)]{$', 'description': 'Missing space between ) and {'}, {'pattern': r'i18n\.t\([^)]+[^,\{\)]$', 'description': 'i18n string should not be a multiline string'}, {'pattern': r'''i18n\.t\(['"].+?['"]\s*\+''', 'description': 'Do not concatenate arguments within i18n.t()'}, {'pattern': r'i18n\.t\(.+\).*\+', 'description': 'Do not concatenate i18n strings'}, {'pattern': r'\+.*i18n\.t\(.+\)', 'description': 'Do not concatenate i18n strings'}, {'pattern': '[.]includes[(]', 'exclude': {'frontend_tests/'}, 'description': '.includes() is incompatible with Internet Explorer. Use .indexOf() !== -1 instead.'}, {'pattern': '[.]html[(]', 'exclude_pattern': r'''[.]html[(]("|'|render_|html|message.content|sub.rendered_description|i18n.t|rendered_|$|[)]|error_text|widget_elem|[$]error|[$][(]"<p>"[)])''', 'exclude': {'static/js/portico', 'static/js/lightbox.js', 'static/js/ui_report.js', 'static/js/confirm_dialog.js', 'frontend_tests/'}, 'description': 'Setting HTML content with jQuery .html() can lead to XSS security bugs. Consider .text() or using rendered_foo as a variable name if content comes from handlebars and thus is already sanitized.'}, {'pattern': '["\']json/', 'description': 'Relative URL for JSON route not supported by i18n'}, # This rule is constructed with + to avoid triggering on itself {'pattern': " =" + '[^ =>~"]', 'description': 'Missing whitespace after "="'}, {'pattern': '^[ ]*//[A-Za-z0-9]', 'description': 'Missing space after // in comment'}, {'pattern': 'if[(]', 'description': 'Missing space between if and ('}, {'pattern': 'else{$', 'description': 'Missing space between else and {'}, {'pattern': '^else {$', 'description': 'Write JS else statements on same line as }'}, {'pattern': '^else if', 'description': 'Write JS else statements on same line as }'}, {'pattern': 'console[.][a-z]', 'exclude': set(['static/js/blueslip.js', 'frontend_tests/zjsunit', 'frontend_tests/casper_lib/common.js', 'frontend_tests/node_tests', 'static/js/debug.js']), 'description': 'console.log and similar should not be used in webapp'}, {'pattern': r'''[.]text\(["'][a-zA-Z]''', 'description': 'Strings passed to $().text should be wrapped in i18n.t() for internationalization', 'exclude': set(['frontend_tests/node_tests/'])}, {'pattern': r'''compose_error\(["']''', 'description': 'Argument to compose_error should be a literal string enclosed ' 'by i18n.t()'}, {'pattern': r'ui.report_success\(', 'description': 'Deprecated function, use ui_report.success.'}, {'pattern': r'''report.success\(["']''', 'description': 'Argument to report_success should be a literal string enclosed ' 'by i18n.t()'}, {'pattern': r'ui.report_error\(', 'description': 'Deprecated function, use ui_report.error.'}, {'pattern': r'''report.error\(["'][^'"]''', 'description': 'Argument to ui_report.error should be a literal string enclosed ' 'by i18n.t()', 'good_lines': ['ui_report.error("")', 'ui_report.error(_("text"))'], 'bad_lines': ['ui_report.error("test")']}, {'pattern': r'\$\(document\)\.ready\(', 'description': "`Use $(f) rather than `$(document).ready(f)`", 'good_lines': ['$(function () {foo();}'], 'bad_lines': ['$(document).ready(function () {foo();}']}, {'pattern': '[$][.](get|post|patch|delete|ajax)[(]', 'description': "Use channel module for AJAX calls", 'exclude': set([ # Internal modules can do direct network calls 'static/js/blueslip.js', 'static/js/channel.js', # External modules that don't include channel.js 'static/js/stats/', 'static/js/portico/', 'static/js/billing/', ]), 'good_lines': ['channel.get(...)'], 'bad_lines': ['$.get()', '$.post()', '$.ajax()']}, {'pattern': 'style ?=', 'description': "Avoid using the `style=` attribute; we prefer styling in CSS files", 'exclude': set([ 'frontend_tests/node_tests/copy_and_paste.js', 'frontend_tests/node_tests/upload.js', 'frontend_tests/node_tests/templates.js', 'static/js/upload.js', 'static/js/stream_color.js', ]), 'good_lines': ['#my-style {color: blue;}'], 'bad_lines': ['<p style="color: blue;">Foo</p>', 'style = "color: blue;"']}, *whitespace_rules, *comma_whitespace_rule, ], ) python_rules = RuleList( langs=['py'], rules=[ {'pattern': 'subject|SUBJECT', 'exclude_pattern': 'subject to the|email|outbox', 'description': 'avoid subject as a var', 'good_lines': ['topic_name'], 'bad_lines': ['subject="foo"', ' MAX_SUBJECT_LEN'], 'exclude': FILES_WITH_LEGACY_SUBJECT, 'include_only': set([ 'zerver/data_import/', 'zerver/lib/', 'zerver/tests/', 'zerver/views/'])}, {'pattern': 'msgid|MSGID', 'exclude': set(['tools/check-capitalization', 'tools/i18n/tagmessages']), 'description': 'Avoid using "msgid" as a variable name; use "message_id" instead.'}, {'pattern': '^(?!#)@login_required', 'description': '@login_required is unsupported; use @zulip_login_required', 'good_lines': ['@zulip_login_required', '# foo @login_required'], 'bad_lines': ['@login_required', ' @login_required']}, {'pattern': '^user_profile[.]save[(][)]', 'description': 'Always pass update_fields when saving user_profile objects', 'exclude_line': set([ ('zerver/lib/actions.py', "user_profile.save() # Can't use update_fields because of how the foreign key works."), ]), 'exclude': set(['zerver/tests', 'zerver/lib/create_user.py']), 'good_lines': ['user_profile.save(update_fields=["pointer"])'], 'bad_lines': ['user_profile.save()']}, {'pattern': r'^[^"]*"[^"]*"%\(', 'description': 'Missing space around "%"', 'good_lines': ['"%s" % ("foo")', '"%s" % (foo)'], 'bad_lines': ['"%s"%("foo")', '"%s"%(foo)']}, {'pattern': r"^[^']*'[^']*'%\(", 'description': 'Missing space around "%"', 'good_lines': ["'%s' % ('foo')", "'%s' % (foo)"], 'bad_lines': ["'%s'%('foo')", "'%s'%(foo)"]}, {'pattern': 'self: Any', 'description': 'you can omit Any annotation for self', 'good_lines': ['def foo (self):'], 'bad_lines': ['def foo(self: Any):']}, # This rule is constructed with + to avoid triggering on itself {'pattern': " =" + '[^ =>~"]', 'description': 'Missing whitespace after "="', 'good_lines': ['a = b', '5 == 6'], 'bad_lines': ['a =b', 'asdf =42']}, {'pattern': r'":\w[^"]*$', 'description': 'Missing whitespace after ":"', 'exclude': set(['zerver/tests/test_push_notifications.py']), 'good_lines': ['"foo": bar', '"some:string:with:colons"'], 'bad_lines': ['"foo":bar', '"foo":1']}, {'pattern': r"':\w[^']*$", 'description': 'Missing whitespace after ":"', 'good_lines': ["'foo': bar", "'some:string:with:colons'"], 'bad_lines': ["'foo':bar", "'foo':1"]}, {'pattern': r"^\s+#\w", 'strip': '\n', 'exclude': set(['tools/droplets/create.py']), 'description': 'Missing whitespace after "#"', 'good_lines': ['a = b # some operation', '1+2 # 3 is the result'], 'bad_lines': [' #some operation', ' #not valid!!!']}, {'pattern': "assertEquals[(]", 'description': 'Use assertEqual, not assertEquals (which is deprecated).', 'good_lines': ['assertEqual(1, 2)'], 'bad_lines': ['assertEquals(1, 2)']}, {'pattern': "== None", 'description': 'Use `is None` to check whether something is None', 'good_lines': ['if foo is None'], 'bad_lines': ['foo == None']}, {'pattern': "type:[(]", 'description': 'Missing whitespace after ":" in type annotation', 'good_lines': ['# type: (Any, Any)', 'colon:separated:string:containing:type:as:keyword'], 'bad_lines': ['# type:(Any, Any)']}, {'pattern': "type: ignore$", 'exclude': set(['tools/tests', 'zerver/lib/test_runner.py', 'zerver/tests']), 'description': '"type: ignore" should always end with "# type: ignore # explanation for why"', 'good_lines': ['foo = bar # type: ignore # explanation'], 'bad_lines': ['foo = bar # type: ignore']}, {'pattern': "# type [(]", 'description': 'Missing : after type in type annotation', 'good_lines': ['foo = 42 # type: int', '# type: (str, int) -> None'], 'bad_lines': ['# type (str, int) -> None']}, {'pattern': "#type", 'description': 'Missing whitespace after "#" in type annotation', 'good_lines': ['foo = 42 # type: int'], 'bad_lines': ['foo = 42 #type: int']}, {'pattern': r'\b(if|else|while)[(]', 'description': 'Put a space between statements like if, else, etc. and (.', 'good_lines': ['if (1 == 2):', 'while (foo == bar):'], 'bad_lines': ['if(1 == 2):', 'while(foo == bar):']}, {'pattern': ", [)]", 'description': 'Unnecessary whitespace between "," and ")"', 'good_lines': ['foo = (1, 2, 3,)', 'foo(bar, 42)'], 'bad_lines': ['foo = (1, 2, 3, )']}, {'pattern': "% [(]", 'description': 'Unnecessary whitespace between "%" and "("', 'good_lines': ['"foo %s bar" % ("baz",)'], 'bad_lines': ['"foo %s bar" % ("baz",)']}, # This next check could have false positives, but it seems pretty # rare; if we find any, they can be added to the exclude list for # this rule. {'pattern': r"""^(?:[^'"#\\]|{}|{})*(?:{}|{})\s*%\s*(?![\s({{\\]|dict\(|tuple\()(?:[^,{}]|{})+(?:$|[,#\\]|{})""".format( PYSQ, PYDQ, PYSQ, PYDQ, PYDELIMS, PYGROUP, PYRIGHT), 'description': 'Used % formatting without a tuple', 'good_lines': ['"foo %s bar" % ("baz",)'], 'bad_lines': ['"foo %s bar" % "baz"']}, {'pattern': r"""^(?:[^'"#\\]|{}|{})*(?:{}|{})\s*%\s*\((?:[^,{}]|{})*\)""".format( PYSQ, PYDQ, PYSQ, PYDQ, PYDELIMS, PYGROUP), 'description': 'Used % formatting with parentheses that do not form a tuple', 'good_lines': ['"foo %s bar" % ("baz",)"'], 'bad_lines': ['"foo %s bar" % ("baz")']}, {'pattern': 'sudo', 'include_only': set(['scripts/']), 'exclude': set(['scripts/lib/setup_venv.py']), 'exclude_line': set([ ('scripts/lib/zulip_tools.py', 'sudo_args = kwargs.pop(\'sudo_args\', [])'), ('scripts/lib/zulip_tools.py', 'args = [\'sudo\'] + sudo_args + [\'--\'] + args'), ]), 'description': 'Most scripts are intended to run on systems without sudo.', 'good_lines': ['subprocess.check_call(["ls"])'], 'bad_lines': ['subprocess.check_call(["sudo", "ls"])']}, {'pattern': 'django.utils.translation', 'include_only': set(['test/', 'zerver/views/development/']), 'description': 'Test strings should not be tagged for translation', 'good_lines': [''], 'bad_lines': ['django.utils.translation']}, {'pattern': 'userid', 'description': 'We prefer user_id over userid.', 'good_lines': ['id = alice.user_id'], 'bad_lines': ['id = alice.userid']}, {'pattern': r'json_success\({}\)', 'description': 'Use json_success() to return nothing', 'good_lines': ['return json_success()'], 'bad_lines': ['return json_success({})']}, {'pattern': r'\Wjson_error\(_\(?\w+\)', 'exclude': set(['zerver/tests', 'zerver/views/development/']), 'description': 'Argument to json_error should be a literal string enclosed by _()', 'good_lines': ['return json_error(_("string"))'], 'bad_lines': ['return json_error(_variable)', 'return json_error(_(variable))']}, {'pattern': r'''\Wjson_error\(['"].+[),]$''', 'exclude': set(['zerver/tests']), 'description': 'Argument to json_error should a literal string enclosed by _()'}, # To avoid JsonableError(_variable) and JsonableError(_(variable)) {'pattern': r'\WJsonableError\(_\(?\w.+\)', 'exclude': set(['zerver/tests', 'zerver/views/development/']), 'description': 'Argument to JsonableError should be a literal string enclosed by _()'}, {'pattern': r'''\WJsonableError\(["'].+\)''', 'exclude': set(['zerver/tests', 'zerver/views/development/']), 'description': 'Argument to JsonableError should be a literal string enclosed by _()'}, {'pattern': r"""\b_\((?:\s|{}|{})*[^\s'")]""".format(PYSQ, PYDQ), 'description': 'Called _() on a computed string', 'exclude_line': set([ ('zerver/lib/i18n.py', 'result = _(string)'), ]), 'good_lines': ["return json_error(_('No presence data for %s') % (target.email,))"], 'bad_lines': ["return json_error(_('No presence data for %s' % (target.email,)))"]}, {'pattern': r'''([a-zA-Z0-9_]+)=REQ\(['"]\1['"]''', 'description': 'REQ\'s first argument already defaults to parameter name'}, {'pattern': r'self\.client\.(get|post|patch|put|delete)', 'description': \ '''Do not call self.client directly for put/patch/post/get. See WRAPPER_COMMENT in test_helpers.py for details. '''}, # Directly fetching Message objects in e.g. views code is often a security bug. {'pattern': '[^r]Message.objects.get', 'exclude': set(["zerver/tests", "zerver/lib/onboarding.py", "zilencer/management/commands/add_mock_conversation.py", "zerver/worker/queue_processors.py", "zerver/management/commands/export.py", "zerver/lib/export.py"]), 'description': 'Please use access_message() to fetch Message objects', }, {'pattern': 'Stream.objects.get', 'include_only': set(["zerver/views/"]), 'description': 'Please use access_stream_by_*() to fetch Stream objects', }, {'pattern': 'get_stream[(]', 'include_only': set(["zerver/views/", "zerver/lib/actions.py"]), 'exclude_line': set([ # This one in check_message is kinda terrible, since it's # how most instances are written, but better to exclude something than nothing ('zerver/lib/actions.py', 'stream = get_stream(stream_name, realm)'), ('zerver/lib/actions.py', 'get_stream(admin_realm_signup_notifications_stream, admin_realm)'), ]), 'description': 'Please use access_stream_by_*() to fetch Stream objects', }, {'pattern': 'Stream.objects.filter', 'include_only': set(["zerver/views/"]), 'description': 'Please use access_stream_by_*() to fetch Stream objects', }, {'pattern': '^from (zerver|analytics|confirmation)', 'include_only': set(["/migrations/"]), 'exclude': set([ 'zerver/migrations/0032_verify_all_medium_avatar_images.py', 'zerver/migrations/0060_move_avatars_to_be_uid_based.py', 'zerver/migrations/0104_fix_unreads.py', 'zerver/migrations/0206_stream_rendered_description.py', 'pgroonga/migrations/0002_html_escape_subject.py', ]), 'description': "Don't import models or other code in migrations; see docs/subsystems/schema-migrations.md", }, {'pattern': 'datetime[.](now|utcnow)', 'include_only': set(["zerver/", "analytics/"]), 'description': "Don't use datetime in backend code.\n" "See https://zulip.readthedocs.io/en/latest/contributing/code-style.html#naive-datetime-objects", }, {'pattern': r'render_to_response\(', 'description': "Use render() instead of render_to_response().", }, {'pattern': 'from os.path', 'description': "Don't use from when importing from the standard library", }, {'pattern': 'import os.path', 'description': "Use import os instead of import os.path", }, {'pattern': r'(logging|logger)\.warn\W', 'description': "Logger.warn is a deprecated alias for Logger.warning; Use 'warning' instead of 'warn'.", 'good_lines': ["logging.warning('I am a warning.')", "logger.warning('warning')"], 'bad_lines': ["logging.warn('I am a warning.')", "logger.warn('warning')"]}, {'pattern': r'\.pk', 'exclude_pattern': '[.]_meta[.]pk', 'description': "Use `id` instead of `pk`.", 'good_lines': ['if my_django_model.id == 42', 'self.user_profile._meta.pk'], 'bad_lines': ['if my_django_model.pk == 42']}, {'pattern': r'^[ ]*# type: \(', 'exclude': set([ # These directories, especially scripts/ and puppet/, # have tools that need to run before a Zulip environment # is provisioned; in some of those, the `typing` module # might not be available yet, so care is required. 'scripts/', 'tools/', 'puppet/', # Zerver files that we should just clean. 'zerver/tests', 'zerver/openapi/python_examples.py', 'zerver/lib/request.py', 'zerver/views/streams.py', # thumbor is (currently) python2 only 'zthumbor/', ]), 'description': 'Comment-style function type annotation. Use Python3 style annotations instead.', }, {'pattern': r' = models[.].*null=True.*\) # type: (?!Optional)', 'include_only': {"zerver/models.py"}, 'description': 'Model variable with null=true not annotated as Optional.', 'good_lines': ['desc = models.TextField(null=True) # type: Optional[Text]', 'stream = models.ForeignKey(Stream, null=True, on_delete=CASCADE) # type: Optional[Stream]', 'desc = models.TextField() # type: Text', 'stream = models.ForeignKey(Stream, on_delete=CASCADE) # type: Stream'], 'bad_lines': ['desc = models.CharField(null=True) # type: Text', 'stream = models.ForeignKey(Stream, null=True, on_delete=CASCADE) # type: Stream'], }, {'pattern': r' = models[.](?!NullBoolean).*\) # type: Optional', # Optional tag, except NullBoolean(Field) 'exclude_pattern': 'null=True', 'include_only': {"zerver/models.py"}, 'description': 'Model variable annotated with Optional but variable does not have null=true.', 'good_lines': ['desc = models.TextField(null=True) # type: Optional[Text]', 'stream = models.ForeignKey(Stream, null=True, on_delete=CASCADE) # type: Optional[Stream]', 'desc = models.TextField() # type: Text', 'stream = models.ForeignKey(Stream, on_delete=CASCADE) # type: Stream'], 'bad_lines': ['desc = models.TextField() # type: Optional[Text]', 'stream = models.ForeignKey(Stream, on_delete=CASCADE) # type: Optional[Stream]'], }, {'pattern': r'[\s([]Text([^\s\w]|$)', 'exclude': set([ # We are likely to want to keep these dirs Python 2+3 compatible, # since the plan includes extracting them to a separate project eventually. 'tools/lib', # TODO: Update our migrations from Text->str. 'zerver/migrations/', # thumbor is (currently) python2 only 'zthumbor/', ]), 'description': "Now that we're a Python 3 only codebase, we don't need to use typing.Text. Please use str instead.", }, {'pattern': 'exit[(]1[)]', 'include_only': set(["/management/commands/"]), 'description': 'Raise CommandError to exit with failure in management commands', }, *whitespace_rules, *comma_whitespace_rule, ], max_length=110, shebang_rules=shebang_rules, ) bash_rules = RuleList( langs=['bash'], rules=[ {'pattern': '#!.*sh [-xe]', 'description': 'Fix shebang line with proper call to /usr/bin/env for Bash path, change -x|-e switches' ' to set -x|set -e'}, {'pattern': 'sudo', 'description': 'Most scripts are intended to work on systems without sudo', 'include_only': set(['scripts/']), 'exclude': set([ 'scripts/lib/install', 'scripts/setup/configure-rabbitmq' ]), }, *whitespace_rules[0:1], ], shebang_rules=shebang_rules, ) css_rules = RuleList( langs=['css', 'scss'], rules=[ {'pattern': r'calc\([^+]+\+[^+]+\)', 'description': "Avoid using calc with '+' operator. See #8403 : in CSS.", 'good_lines': ["width: calc(20% - -14px);"], 'bad_lines': ["width: calc(20% + 14px);"]}, {'pattern': r'^[^:]*:\S[^:]*;$', 'description': "Missing whitespace after : in CSS", 'good_lines': ["background-color: white;", "text-size: 16px;"], 'bad_lines': ["background-color:white;", "text-size:16px;"]}, {'pattern': '[a-z]{', 'description': "Missing whitespace before '{' in CSS.", 'good_lines': ["input {", "body {"], 'bad_lines': ["input{", "body{"]}, {'pattern': 'https://', 'description': "Zulip CSS should have no dependencies on external resources", 'good_lines': ['background: url(/static/images/landing-page/pycon.jpg);'], 'bad_lines': ['background: url(https://example.com/image.png);']}, {'pattern': '^[ ][ ][a-zA-Z0-9]', 'description': "Incorrect 2-space indentation in CSS", 'strip': '\n', 'good_lines': [" color: white;", "color: white;"], 'bad_lines': [" color: white;"]}, {'pattern': r'{\w', 'description': "Missing whitespace after '{' in CSS (should be newline).", 'good_lines': ["{\n"], 'bad_lines': ["{color: LightGoldenRodYellow;"]}, {'pattern': ' thin[ ;]', 'description': "thin CSS attribute is under-specified, please use 1px.", 'good_lines': ["border-width: 1px;"], 'bad_lines': ["border-width: thin;", "border-width: thin solid black;"]}, {'pattern': ' medium[ ;]', 'description': "medium CSS attribute is under-specified, please use pixels.", 'good_lines': ["border-width: 3px;"], 'bad_lines': ["border-width: medium;", "border: medium solid black;"]}, {'pattern': ' thick[ ;]', 'description': "thick CSS attribute is under-specified, please use pixels.", 'good_lines': ["border-width: 5px;"], 'bad_lines': ["border-width: thick;", "border: thick solid black;"]}, {'pattern': r'rgba?\(', 'description': 'Use of rgb(a) format is banned, Please use hsl(a) instead', 'good_lines': ['hsl(0, 0%, 0%)', 'hsla(0, 0%, 100%, 0.1)'], 'bad_lines': ['rgb(0, 0, 0)', 'rgba(255, 255, 255, 0.1)']}, *whitespace_rules, *comma_whitespace_rule, ], ) prose_style_rules = [ {'pattern': r'[^\/\#\-"]([jJ]avascript)', # exclude usage in hrefs/divs 'exclude': set(["docs/documentation/api.md"]), 'description': "javascript should be spelled JavaScript"}, {'pattern': r'''[^\/\-\."'\_\=\>]([gG]ithub)[^\.\-\_"\<]''', # exclude usage in hrefs/divs 'description': "github should be spelled GitHub"}, {'pattern': '[oO]rganisation', # exclude usage in hrefs/divs 'description': "Organization is spelled with a z", 'exclude_line': {('docs/translating/french.md', '* organization - **organisation**')}}, {'pattern': '!!! warning', 'description': "!!! warning is invalid; it's spelled '!!! warn'"}, {'pattern': 'Terms of service', 'description': "The S in Terms of Service is capitalized"}, {'pattern': '[^-_p]botserver(?!rc)|bot server', 'description': "Use Botserver instead of botserver or bot server."}, *comma_whitespace_rule, ] # type: List[Rule] html_rules = whitespace_rules + prose_style_rules + [ {'pattern': 'subject|SUBJECT', 'exclude': set(['templates/zerver/email.html']), 'exclude_pattern': 'email subject', 'description': 'avoid subject in templates', 'good_lines': ['topic_name'], 'bad_lines': ['subject="foo"', ' MAX_SUBJECT_LEN']}, {'pattern': r'placeholder="[^{#](?:(?!\.com).)+$', 'description': "`placeholder` value should be translatable.", 'exclude_line': {('templates/zerver/register.html', 'placeholder="acme"'), ('templates/zerver/register.html', 'placeholder="Acme or Aκμή"')}, 'exclude': set(["templates/analytics/support.html"]), 'good_lines': ['<input class="stream-list-filter" type="text" placeholder="{{ _(\'Search streams\') }}" />'], 'bad_lines': ['<input placeholder="foo">']}, {'pattern': "placeholder='[^{]", 'description': "`placeholder` value should be translatable.", 'good_lines': ['<input class="stream-list-filter" type="text" placeholder="{{ _(\'Search streams\') }}" />'], 'bad_lines': ["<input placeholder='foo'>"]}, {'pattern': "aria-label='[^{]", 'description': "`aria-label` value should be translatable.", 'good_lines': ['<button type="button" class="close close-alert-word-status" aria-label="{{t \'Close\' }}">'], 'bad_lines': ["<button aria-label='foo'></button>"]}, {'pattern': 'aria-label="[^{]', 'description': "`aria-label` value should be translatable.", 'good_lines': ['<button type="button" class="close close-alert-word-status" aria-label="{{t \'Close\' }}">'], 'bad_lines': ['<button aria-label="foo"></button>']}, {'pattern': 'script src="http', 'description': "Don't directly load dependencies from CDNs. See docs/subsystems/html-css.md", 'exclude': set(["templates/corporate/billing.html", "templates/zerver/hello.html", "templates/corporate/upgrade.html"]), 'good_lines': ["{{ render_entrypoint('landing-page') }}"], 'bad_lines': ['<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>']}, {'pattern': "title='[^{]", 'description': "`title` value should be translatable.", 'good_lines': ['<link rel="author" title="{{ _(\'About these documents\') }}" />'], 'bad_lines': ["<p title='foo'></p>"]}, {'pattern': r'title="[^{\:]', 'exclude_line': set([ ('templates/zerver/app/markdown_help.html', '<td class="rendered_markdown"><img alt=":heart:" class="emoji" src="/static/generated/emoji/images/emoji/heart.png" title=":heart:" /></td>') ]), 'exclude': set(["templates/zerver/emails", "templates/analytics/realm_details.html", "templates/analytics/support.html"]), 'description': "`title` value should be translatable."}, {'pattern': r'''\Walt=["'][^{"']''', 'description': "alt argument should be enclosed by _() or it should be an empty string.", 'exclude': set(['static/templates/settings/display_settings.hbs', 'templates/zerver/app/keyboard_shortcuts.html', 'templates/zerver/app/markdown_help.html']), 'good_lines': ['<img src="{{source_url}}" alt="{{ _(name) }}" />', '<img alg="" />'], 'bad_lines': ['<img alt="Foo Image" />']}, {'pattern': r'''\Walt=["']{{ ?["']''', 'description': "alt argument should be enclosed by _().", 'good_lines': ['<img src="{{source_url}}" alt="{{ _(name) }}" />'], 'bad_lines': ['<img alt="{{ " />']}, {'pattern': r'\bon\w+ ?=', 'description': "Don't use inline event handlers (onclick=, etc. attributes) in HTML. Instead," "attach a jQuery event handler ($('#foo').on('click', function () {...})) when " "the DOM is ready (inside a $(function () {...}) block).", 'exclude': set(['templates/zerver/dev_login.html', 'templates/corporate/upgrade.html']), 'good_lines': ["($('#foo').on('click', function () {}"], 'bad_lines': ["<button id='foo' onclick='myFunction()'>Foo</button>", "<input onchange='myFunction()'>"]}, {'pattern': 'style ?=', 'description': "Avoid using the `style=` attribute; we prefer styling in CSS files", 'exclude_pattern': r'.*style ?=["' + "'" + '](display: ?none|background: {{|color: {{|background-color: {{).*', 'exclude': set([ # KaTeX output uses style attribute 'templates/zerver/app/markdown_help.html', # 5xx page doesn't have external CSS 'static/html/5xx.html', # Group PMs color is dynamically calculated 'static/templates/group_pms.hbs', # exclude_pattern above handles color, but have other issues: 'static/templates/draft.hbs', 'static/templates/subscription.hbs', 'static/templates/single_message.hbs', # Old-style email templates need to use inline style # attributes; it should be possible to clean these up # when we convert these templates to use premailer. 'templates/zerver/emails/email_base_messages.html', # Email log templates; should clean up. 'templates/zerver/email.html', 'templates/zerver/email_log.html', # Social backend logos are dynamically loaded 'templates/zerver/accounts_home.html', 'templates/zerver/login.html', # Probably just needs to be changed to display: none so the exclude works 'templates/zerver/app/navbar.html', # Needs the width cleaned up; display: none is fine 'static/templates/settings/account_settings.hbs', # background image property is dynamically generated 'static/templates/user_profile_modal.hbs', 'static/templates/sidebar_private_message_list.hbs', # Inline styling for an svg; could be moved to CSS files? 'templates/zerver/landing_nav.html', 'templates/zerver/billing_nav.html', 'templates/zerver/app/home.html', 'templates/zerver/features.html', 'templates/zerver/portico-header.html', 'templates/corporate/billing.html', 'templates/corporate/upgrade.html', # Miscellaneous violations to be cleaned up 'static/templates/user_info_popover_title.hbs', 'static/templates/subscription_invites_warning_modal.hbs', 'templates/zerver/reset_confirm.html', 'templates/zerver/config_error.html', 'templates/zerver/dev_env_email_access_details.html', 'templates/zerver/confirm_continue_registration.html', 'templates/zerver/register.html', 'templates/zerver/accounts_send_confirm.html', 'templates/zerver/integrations/index.html', 'templates/zerver/documentation_main.html', 'templates/analytics/realm_summary_table.html', 'templates/corporate/zephyr.html', 'templates/corporate/zephyr-mirror.html', ]), 'good_lines': ['#my-style {color: blue;}', 'style="display: none"', "style='display: none"], 'bad_lines': ['<p style="color: blue;">Foo</p>', 'style = "color: blue;"']}, ] # type: List[Rule] handlebars_rules = RuleList( langs=['hbs'], rules=html_rules + [ {'pattern': "[<]script", 'description': "Do not use inline <script> tags here; put JavaScript in static/js instead."}, {'pattern': '{{ t ("|\')', 'description': 'There should be no spaces before the "t" in a translation tag.'}, {'pattern': r"{{t '.*' }}[\.\?!]", 'description': "Period should be part of the translatable string."}, {'pattern': r'{{t ".*" }}[\.\?!]', 'description': "Period should be part of the translatable string."}, {'pattern': r"{{/tr}}[\.\?!]", 'description': "Period should be part of the translatable string."}, {'pattern': '{{t ("|\') ', 'description': 'Translatable strings should not have leading spaces.'}, {'pattern': "{{t '[^']+ ' }}", 'description': 'Translatable strings should not have trailing spaces.'}, {'pattern': '{{t "[^"]+ " }}', 'description': 'Translatable strings should not have trailing spaces.'}, ], ) jinja2_rules = RuleList( langs=['html'], rules=html_rules + [ {'pattern': r"{% endtrans %}[\.\?!]", 'description': "Period should be part of the translatable string."}, {'pattern': r"{{ _(.+) }}[\.\?!]", 'description': "Period should be part of the translatable string."}, ], ) json_rules = RuleList( langs=['json'], rules=[ # Here, we don't use `whitespace_rules`, because the tab-based # whitespace rule flags a lot of third-party JSON fixtures # under zerver/webhooks that we want preserved verbatim. So # we just include the trailing whitespace rule and a modified # version of the tab-based whitespace rule (we can't just use # exclude in whitespace_rules, since we only want to ignore # JSON files with tab-based whitespace, not webhook code). trailing_whitespace_rule, {'pattern': '\t', 'strip': '\n', 'exclude': set(['zerver/webhooks/']), 'description': 'Fix tab-based whitespace'}, {'pattern': r'":["\[\{]', 'exclude': set(['zerver/webhooks/', 'zerver/tests/fixtures/']), 'description': 'Require space after : in JSON'}, ] ) markdown_docs_length_exclude = { # Has some example Vagrant output that's very long "docs/development/setup-vagrant.md", # Have wide output in code blocks "docs/subsystems/logging.md", "docs/subsystems/schema-migrations.md", # Have curl commands with JSON that would be messy to wrap "zerver/webhooks/helloworld/doc.md", "zerver/webhooks/trello/doc.md", # Has a very long configuration line "templates/zerver/integrations/perforce.md", # Has some example code that could perhaps be wrapped "templates/zerver/api/incoming-webhooks-walkthrough.md", # This macro has a long indented URL "templates/zerver/help/include/git-webhook-url-with-branches-indented.md", "templates/zerver/api/update-notification-settings.md", # These two are the same file and have some too-long lines for GitHub badges "README.md", "docs/overview/readme.md", } markdown_rules = RuleList( langs=['md'], rules=markdown_whitespace_rules + prose_style_rules + [ {'pattern': r'\[(?P<url>[^\]]+)\]\((?P=url)\)', 'description': 'Linkified markdown URLs should use cleaner <http://example.com> syntax.'}, {'pattern': 'https://zulip.readthedocs.io/en/latest/[a-zA-Z0-9]', 'exclude': {'docs/overview/contributing.md', 'docs/overview/readme.md', 'docs/README.md'}, 'include_only': set(['docs/']), 'description': "Use relative links (../foo/bar.html) to other documents in docs/", }, {'pattern': "su zulip -c [^']", 'include_only': set(['docs/']), 'description': "Always quote arguments using `su zulip -c '` to avoid confusion about how su works.", }, {'pattern': r'\][(][^#h]', 'include_only': set(['README.md', 'CONTRIBUTING.md']), 'description': "Use absolute links from docs served by GitHub", }, ], max_length=120, length_exclude=markdown_docs_length_exclude, exclude_files_in='templates/zerver/help/' ) help_markdown_rules = RuleList( langs=['md'], rules=markdown_rules.rules + [ {'pattern': '[a-z][.][A-Z]', 'description': "Likely missing space after end of sentence", 'include_only': set(['templates/zerver/help/']), }, {'pattern': r'\b[rR]ealm[s]?\b', 'include_only': set(['templates/zerver/help/']), 'good_lines': ['Organization', 'deactivate_realm', 'realm_filter'], 'bad_lines': ['Users are in a realm', 'Realm is the best model'], 'description': "Realms are referred to as Organizations in user-facing docs."}, ], length_exclude=markdown_docs_length_exclude, ) txt_rules = RuleList( langs=['txt', 'text', 'yaml', 'rst'], rules=whitespace_rules, ) non_py_rules = [ handlebars_rules, jinja2_rules, css_rules, js_rules, json_rules, markdown_rules, help_markdown_rules, bash_rules, txt_rules, ]
./CrossVul/dataset_final_sorted/CWE-287/py/bad_1224_0
crossvul-python_data_bad_3757_0
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 OpenStack LLC # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Main entry point into the Identity service.""" import uuid import urllib import urlparse from keystone import config from keystone import exception from keystone import policy from keystone import token from keystone.common import logging from keystone.common import manager from keystone.common import wsgi CONF = config.CONF LOG = logging.getLogger(__name__) class Manager(manager.Manager): """Default pivot point for the Identity backend. See :mod:`keystone.common.manager.Manager` for more details on how this dynamically calls the backend. """ def __init__(self): super(Manager, self).__init__(CONF.identity.driver) class Driver(object): """Interface description for an Identity driver.""" def authenticate(self, user_id=None, tenant_id=None, password=None): """Authenticate a given user, tenant and password. Returns: (user, tenant, metadata). """ raise exception.NotImplemented() def get_tenant(self, tenant_id): """Get a tenant by id. Returns: tenant_ref or None. """ raise exception.NotImplemented() def get_tenant_by_name(self, tenant_name): """Get a tenant by name. Returns: tenant_ref or None. """ raise exception.NotImplemented() def get_user(self, user_id): """Get a user by id. Returns: user_ref or None. """ raise exception.NotImplemented() def get_user_by_name(self, user_name): """Get a user by name. Returns: user_ref or None. """ raise exception.NotImplemented() def get_role(self, role_id): """Get a role by id. Returns: role_ref or None. """ raise exception.NotImplemented() def list_users(self): """List all users in the system. NOTE(termie): I'd prefer if this listed only the users for a given tenant. Returns: a list of user_refs or an empty list. """ raise exception.NotImplemented() def list_roles(self): """List all roles in the system. Returns: a list of role_refs or an empty list. """ raise exception.NotImplemented() # NOTE(termie): seven calls below should probably be exposed by the api # more clearly when the api redesign happens def add_user_to_tenant(self, tenant_id, user_id): raise exception.NotImplemented() def remove_user_from_tenant(self, tenant_id, user_id): raise exception.NotImplemented() def get_all_tenants(self): raise exception.NotImplemented() def get_tenants_for_user(self, user_id): """Get the tenants associated with a given user. Returns: a list of tenant ids. """ raise exception.NotImplemented() def get_roles_for_user_and_tenant(self, user_id, tenant_id): """Get the roles associated with a user within given tenant. Returns: a list of role ids. """ raise exception.NotImplemented() def add_role_to_user_and_tenant(self, user_id, tenant_id, role_id): """Add a role to a user within given tenant.""" raise exception.NotImplemented() def remove_role_from_user_and_tenant(self, user_id, tenant_id, role_id): """Remove a role from a user within given tenant.""" raise exception.NotImplemented() # user crud def create_user(self, user_id, user): raise exception.NotImplemented() def update_user(self, user_id, user): raise exception.NotImplemented() def delete_user(self, user_id): raise exception.NotImplemented() # tenant crud def create_tenant(self, tenant_id, tenant): raise exception.NotImplemented() def update_tenant(self, tenant_id, tenant): raise exception.NotImplemented() def delete_tenant(self, tenant_id, tenant): raise exception.NotImplemented() # metadata crud def get_metadata(self, user_id, tenant_id): raise exception.NotImplemented() def create_metadata(self, user_id, tenant_id, metadata): raise exception.NotImplemented() def update_metadata(self, user_id, tenant_id, metadata): raise exception.NotImplemented() def delete_metadata(self, user_id, tenant_id, metadata): raise exception.NotImplemented() # role crud def create_role(self, role_id, role): raise exception.NotImplemented() def update_role(self, role_id, role): raise exception.NotImplemented() def delete_role(self, role_id): raise exception.NotImplemented() class PublicRouter(wsgi.ComposableRouter): def add_routes(self, mapper): tenant_controller = TenantController() mapper.connect('/tenants', controller=tenant_controller, action='get_tenants_for_token', conditions=dict(methods=['GET'])) class AdminRouter(wsgi.ComposableRouter): def add_routes(self, mapper): # Tenant Operations tenant_controller = TenantController() mapper.connect('/tenants', controller=tenant_controller, action='get_all_tenants', conditions=dict(method=['GET'])) mapper.connect('/tenants/{tenant_id}', controller=tenant_controller, action='get_tenant', conditions=dict(method=['GET'])) # User Operations user_controller = UserController() mapper.connect('/users/{user_id}', controller=user_controller, action='get_user', conditions=dict(method=['GET'])) # Role Operations roles_controller = RoleController() mapper.connect('/tenants/{tenant_id}/users/{user_id}/roles', controller=roles_controller, action='get_user_roles', conditions=dict(method=['GET'])) mapper.connect('/users/{user_id}/roles', controller=user_controller, action='get_user_roles', conditions=dict(method=['GET'])) class TenantController(wsgi.Application): def __init__(self): self.identity_api = Manager() self.policy_api = policy.Manager() self.token_api = token.Manager() super(TenantController, self).__init__() def get_all_tenants(self, context, **kw): """Gets a list of all tenants for an admin user.""" self.assert_admin(context) tenant_refs = self.identity_api.get_tenants(context) params = { 'limit': context['query_string'].get('limit'), 'marker': context['query_string'].get('marker'), } return self._format_tenant_list(tenant_refs, **params) def get_tenants_for_token(self, context, **kw): """Get valid tenants for token based on token used to authenticate. Pulls the token from the context, validates it and gets the valid tenants for the user in the token. Doesn't care about token scopedness. """ try: token_ref = self.token_api.get_token(context=context, token_id=context['token_id']) except exception.NotFound: raise exception.Unauthorized() user_ref = token_ref['user'] tenant_ids = self.identity_api.get_tenants_for_user( context, user_ref['id']) tenant_refs = [] for tenant_id in tenant_ids: tenant_refs.append(self.identity_api.get_tenant( context=context, tenant_id=tenant_id)) params = { 'limit': context['query_string'].get('limit'), 'marker': context['query_string'].get('marker'), } return self._format_tenant_list(tenant_refs, **params) def get_tenant(self, context, tenant_id): # TODO(termie): this stuff should probably be moved to middleware self.assert_admin(context) tenant = self.identity_api.get_tenant(context, tenant_id) if tenant is None: raise exception.TenantNotFound(tenant_id=tenant_id) return {'tenant': tenant} # CRUD Extension def create_tenant(self, context, tenant): tenant_ref = self._normalize_dict(tenant) self.assert_admin(context) tenant_id = (tenant_ref.get('id') and tenant_ref.get('id') or uuid.uuid4().hex) tenant_ref['id'] = tenant_id tenant = self.identity_api.create_tenant( context, tenant_id, tenant_ref) return {'tenant': tenant} def update_tenant(self, context, tenant_id, tenant): self.assert_admin(context) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) tenant_ref = self.identity_api.update_tenant( context, tenant_id, tenant) return {'tenant': tenant_ref} def delete_tenant(self, context, tenant_id, **kw): self.assert_admin(context) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) self.identity_api.delete_tenant(context, tenant_id) def get_tenant_users(self, context, tenant_id, **kw): self.assert_admin(context) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) user_refs = self.identity_api.get_tenant_users(context, tenant_id) return {'users': user_refs} def _format_tenant_list(self, tenant_refs, **kwargs): marker = kwargs.get('marker') page_idx = 0 if marker is not None: for (marker_idx, tenant) in enumerate(tenant_refs): if tenant['id'] == marker: # we start pagination after the marker page_idx = marker_idx + 1 break else: msg = 'Marker could not be found' raise exception.ValidationError(message=msg) limit = kwargs.get('limit') if limit is not None: try: limit = int(limit) if limit < 0: raise AssertionError() except (ValueError, AssertionError): msg = 'Invalid limit value' raise exception.ValidationError(message=msg) tenant_refs = tenant_refs[page_idx:limit] for x in tenant_refs: if 'enabled' not in x: x['enabled'] = True o = {'tenants': tenant_refs, 'tenants_links': []} return o class UserController(wsgi.Application): def __init__(self): self.identity_api = Manager() self.policy_api = policy.Manager() self.token_api = token.Manager() super(UserController, self).__init__() def get_user(self, context, user_id): self.assert_admin(context) user_ref = self.identity_api.get_user(context, user_id) if not user_ref: raise exception.UserNotFound(user_id=user_id) return {'user': user_ref} def get_users(self, context): # NOTE(termie): i can't imagine that this really wants all the data # about every single user in the system... self.assert_admin(context) user_refs = self.identity_api.list_users(context) return {'users': user_refs} # CRUD extension def create_user(self, context, user): user = self._normalize_dict(user) self.assert_admin(context) tenant_id = user.get('tenantId', None) if (tenant_id is not None and self.identity_api.get_tenant(context, tenant_id) is None): raise exception.TenantNotFound(tenant_id=tenant_id) user_id = uuid.uuid4().hex user_ref = user.copy() user_ref['id'] = user_id new_user_ref = self.identity_api.create_user( context, user_id, user_ref) if tenant_id: self.identity_api.add_user_to_tenant(context, tenant_id, user_id) return {'user': new_user_ref} def update_user(self, context, user_id, user): # NOTE(termie): this is really more of a patch than a put self.assert_admin(context) if self.identity_api.get_user(context, user_id) is None: raise exception.UserNotFound(user_id=user_id) user_ref = self.identity_api.update_user(context, user_id, user) # If the password was changed or the user was disabled we clear tokens if user.get('password') or user.get('enabled', True) == False: try: for token_id in self.token_api.list_tokens(context, user_id): self.token_api.delete_token(context, token_id) except exception.NotImplemented: # The users status has been changed but tokens remain valid for # backends that can't list tokens for users LOG.warning('User %s status has changed, but existing tokens ' 'remain valid' % user_id) return {'user': user_ref} def delete_user(self, context, user_id): self.assert_admin(context) if self.identity_api.get_user(context, user_id) is None: raise exception.UserNotFound(user_id=user_id) self.identity_api.delete_user(context, user_id) def set_user_enabled(self, context, user_id, user): return self.update_user(context, user_id, user) def set_user_password(self, context, user_id, user): return self.update_user(context, user_id, user) def update_user_tenant(self, context, user_id, user): """Update the default tenant.""" # ensure that we're a member of that tenant tenant_id = user.get('tenantId') self.identity_api.add_user_to_tenant(context, tenant_id, user_id) return self.update_user(context, user_id, user) class RoleController(wsgi.Application): def __init__(self): self.identity_api = Manager() self.token_api = token.Manager() self.policy_api = policy.Manager() super(RoleController, self).__init__() # COMPAT(essex-3) def get_user_roles(self, context, user_id, tenant_id=None): """Get the roles for a user and tenant pair. Since we're trying to ignore the idea of user-only roles we're not implementing them in hopes that the idea will die off. """ if tenant_id is None: raise exception.NotImplemented(message='User roles not supported: ' 'tenant ID required') user = self.identity_api.get_user(context, user_id) if user is None: raise exception.UserNotFound(user_id=user_id) tenant = self.identity_api.get_tenant(context, tenant_id) if tenant is None: raise exception.TenantNotFound(tenant_id=tenant_id) roles = self.identity_api.get_roles_for_user_and_tenant( context, user_id, tenant_id) return {'roles': [self.identity_api.get_role(context, x) for x in roles]} # CRUD extension def get_role(self, context, role_id): self.assert_admin(context) role_ref = self.identity_api.get_role(context, role_id) if not role_ref: raise exception.RoleNotFound(role_id=role_id) return {'role': role_ref} def create_role(self, context, role): role = self._normalize_dict(role) self.assert_admin(context) role_id = uuid.uuid4().hex role['id'] = role_id role_ref = self.identity_api.create_role(context, role_id, role) return {'role': role_ref} def delete_role(self, context, role_id): self.assert_admin(context) self.get_role(context, role_id) self.identity_api.delete_role(context, role_id) def get_roles(self, context): self.assert_admin(context) roles = self.identity_api.list_roles(context) # TODO(termie): probably inefficient at some point return {'roles': roles} def add_role_to_user(self, context, user_id, role_id, tenant_id=None): """Add a role to a user and tenant pair. Since we're trying to ignore the idea of user-only roles we're not implementing them in hopes that the idea will die off. """ self.assert_admin(context) if tenant_id is None: raise exception.NotImplemented(message='User roles not supported: ' 'tenant_id required') if self.identity_api.get_user(context, user_id) is None: raise exception.UserNotFound(user_id=user_id) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) if self.identity_api.get_role(context, role_id) is None: raise exception.RoleNotFound(role_id=role_id) # This still has the weird legacy semantics that adding a role to # a user also adds them to a tenant self.identity_api.add_user_to_tenant(context, tenant_id, user_id) self.identity_api.add_role_to_user_and_tenant( context, user_id, tenant_id, role_id) role_ref = self.identity_api.get_role(context, role_id) return {'role': role_ref} def remove_role_from_user(self, context, user_id, role_id, tenant_id=None): """Remove a role from a user and tenant pair. Since we're trying to ignore the idea of user-only roles we're not implementing them in hopes that the idea will die off. """ self.assert_admin(context) if tenant_id is None: raise exception.NotImplemented(message='User roles not supported: ' 'tenant_id required') if self.identity_api.get_user(context, user_id) is None: raise exception.UserNotFound(user_id=user_id) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) if self.identity_api.get_role(context, role_id) is None: raise exception.RoleNotFound(role_id=role_id) # This still has the weird legacy semantics that adding a role to # a user also adds them to a tenant, so we must follow up on that self.identity_api.remove_role_from_user_and_tenant( context, user_id, tenant_id, role_id) roles = self.identity_api.get_roles_for_user_and_tenant( context, user_id, tenant_id) if not roles: self.identity_api.remove_user_from_tenant( context, tenant_id, user_id) return # COMPAT(diablo): CRUD extension def get_role_refs(self, context, user_id): """Ultimate hack to get around having to make role_refs first-class. This will basically iterate over the various roles the user has in all tenants the user is a member of and create fake role_refs where the id encodes the user-tenant-role information so we can look up the appropriate data when we need to delete them. """ self.assert_admin(context) user_ref = self.identity_api.get_user(context, user_id) tenant_ids = self.identity_api.get_tenants_for_user(context, user_id) o = [] for tenant_id in tenant_ids: role_ids = self.identity_api.get_roles_for_user_and_tenant( context, user_id, tenant_id) for role_id in role_ids: ref = {'roleId': role_id, 'tenantId': tenant_id, 'userId': user_id} ref['id'] = urllib.urlencode(ref) o.append(ref) return {'roles': o} # COMPAT(diablo): CRUD extension def create_role_ref(self, context, user_id, role): """This is actually used for adding a user to a tenant. In the legacy data model adding a user to a tenant required setting a role. """ self.assert_admin(context) # TODO(termie): for now we're ignoring the actual role tenant_id = role.get('tenantId') role_id = role.get('roleId') self.identity_api.add_user_to_tenant(context, tenant_id, user_id) self.identity_api.add_role_to_user_and_tenant( context, user_id, tenant_id, role_id) role_ref = self.identity_api.get_role(context, role_id) return {'role': role_ref} # COMPAT(diablo): CRUD extension def delete_role_ref(self, context, user_id, role_ref_id): """This is actually used for deleting a user from a tenant. In the legacy data model removing a user from a tenant required deleting a role. To emulate this, we encode the tenant and role in the role_ref_id, and if this happens to be the last role for the user-tenant pair, we remove the user from the tenant. """ self.assert_admin(context) # TODO(termie): for now we're ignoring the actual role role_ref_ref = urlparse.parse_qs(role_ref_id) tenant_id = role_ref_ref.get('tenantId')[0] role_id = role_ref_ref.get('roleId')[0] self.identity_api.remove_role_from_user_and_tenant( context, user_id, tenant_id, role_id) roles = self.identity_api.get_roles_for_user_and_tenant( context, user_id, tenant_id) if not roles: self.identity_api.remove_user_from_tenant( context, tenant_id, user_id)
./CrossVul/dataset_final_sorted/CWE-287/py/bad_3757_0
crossvul-python_data_good_3282_0
#!/usr/bin/python from k5test import * # Skip this test if pkinit wasn't built. if not os.path.exists(os.path.join(plugins, 'preauth', 'pkinit.so')): skip_rest('PKINIT tests', 'PKINIT module not built') # Check if soft-pkcs11.so is available. try: import ctypes lib = ctypes.LibraryLoader(ctypes.CDLL).LoadLibrary('soft-pkcs11.so') del lib have_soft_pkcs11 = True except: have_soft_pkcs11 = False # Construct a krb5.conf fragment configuring pkinit. certs = os.path.join(srctop, 'tests', 'dejagnu', 'pkinit-certs') ca_pem = os.path.join(certs, 'ca.pem') kdc_pem = os.path.join(certs, 'kdc.pem') user_pem = os.path.join(certs, 'user.pem') privkey_pem = os.path.join(certs, 'privkey.pem') privkey_enc_pem = os.path.join(certs, 'privkey-enc.pem') user_p12 = os.path.join(certs, 'user.p12') user_enc_p12 = os.path.join(certs, 'user-enc.p12') user_upn_p12 = os.path.join(certs, 'user-upn.p12') user_upn2_p12 = os.path.join(certs, 'user-upn2.p12') user_upn3_p12 = os.path.join(certs, 'user-upn3.p12') generic_p12 = os.path.join(certs, 'generic.p12') path = os.path.join(os.getcwd(), 'testdir', 'tmp-pkinit-certs') path_enc = os.path.join(os.getcwd(), 'testdir', 'tmp-pkinit-certs-enc') pkinit_krb5_conf = {'realms': {'$realm': { 'pkinit_anchors': 'FILE:%s' % ca_pem}}} pkinit_kdc_conf = {'realms': {'$realm': { 'default_principal_flags': '+preauth', 'pkinit_eku_checking': 'none', 'pkinit_identity': 'FILE:%s,%s' % (kdc_pem, privkey_pem), 'pkinit_indicator': ['indpkinit1', 'indpkinit2']}}} restrictive_kdc_conf = {'realms': {'$realm': { 'restrict_anonymous_to_tgt': 'true' }}} testprincs = {'krbtgt/KRBTEST.COM': {'keys': 'aes128-cts'}, 'user': {'keys': 'aes128-cts', 'flags': '+preauth'}, 'user2': {'keys': 'aes128-cts', 'flags': '+preauth'}} alias_kdc_conf = {'realms': {'$realm': { 'default_principal_flags': '+preauth', 'pkinit_eku_checking': 'none', 'pkinit_allow_upn': 'true', 'pkinit_identity': 'FILE:%s,%s' % (kdc_pem, privkey_pem), 'database_module': 'test'}}, 'dbmodules': {'test': { 'db_library': 'test', 'alias': {'user@krbtest.com': 'user'}, 'princs': testprincs}}} file_identity = 'FILE:%s,%s' % (user_pem, privkey_pem) file_enc_identity = 'FILE:%s,%s' % (user_pem, privkey_enc_pem) dir_identity = 'DIR:%s' % path dir_enc_identity = 'DIR:%s' % path_enc dir_file_identity = 'FILE:%s,%s' % (os.path.join(path, 'user.crt'), os.path.join(path, 'user.key')) dir_file_enc_identity = 'FILE:%s,%s' % (os.path.join(path_enc, 'user.crt'), os.path.join(path_enc, 'user.key')) p12_identity = 'PKCS12:%s' % user_p12 p12_upn_identity = 'PKCS12:%s' % user_upn_p12 p12_upn2_identity = 'PKCS12:%s' % user_upn2_p12 p12_upn3_identity = 'PKCS12:%s' % user_upn3_p12 p12_generic_identity = 'PKCS12:%s' % generic_p12 p12_enc_identity = 'PKCS12:%s' % user_enc_p12 p11_identity = 'PKCS11:soft-pkcs11.so' p11_token_identity = ('PKCS11:module_name=soft-pkcs11.so:' 'slotid=1:token=SoftToken (token)') # Start a realm with the test kdb module for the following UPN SAN tests. realm = K5Realm(krb5_conf=pkinit_krb5_conf, kdc_conf=alias_kdc_conf, create_kdb=False) realm.start_kdc() # Compatibility check: cert contains UPN "user", which matches the # request principal user@KRBTEST.COM if parsed as a normal principal. realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % p12_upn2_identity]) # Compatibility check: cert contains UPN "user@KRBTEST.COM", which matches # the request principal user@KRBTEST.COM if parsed as a normal principal. realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % p12_upn3_identity]) # Cert contains UPN "user@krbtest.com" which is aliased to the request # principal. realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % p12_upn_identity]) # Test an id-pkinit-san match to a post-canonical principal. realm.kinit('user@krbtest.com', flags=['-E', '-X', 'X509_user_identity=%s' % p12_identity]) # Test a UPN match to a post-canonical principal. (This only works # for the cert with the UPN containing just "user", as we don't allow # UPN reparsing when comparing to the canonicalized client principal.) realm.kinit('user@krbtest.com', flags=['-E', '-X', 'X509_user_identity=%s' % p12_upn2_identity]) # Test a mismatch. msg = 'kinit: Client name mismatch while getting initial credentials' realm.run([kinit, '-X', 'X509_user_identity=%s' % p12_upn2_identity, 'user2'], expected_code=1, expected_msg=msg) realm.stop() realm = K5Realm(krb5_conf=pkinit_krb5_conf, kdc_conf=pkinit_kdc_conf, get_creds=False) # Sanity check - password-based preauth should still work. realm.run(['./responder', '-r', 'password=%s' % password('user'), realm.user_princ]) realm.kinit(realm.user_princ, password=password('user')) realm.klist(realm.user_princ) realm.run([kvno, realm.host_princ]) # Test anonymous PKINIT. realm.kinit('@%s' % realm.realm, flags=['-n'], expected_code=1, expected_msg='not found in Kerberos database') realm.addprinc('WELLKNOWN/ANONYMOUS') realm.kinit('@%s' % realm.realm, flags=['-n']) realm.klist('WELLKNOWN/ANONYMOUS@WELLKNOWN:ANONYMOUS') realm.run([kvno, realm.host_princ]) out = realm.run(['./adata', realm.host_princ]) if '97:' in out: fail('auth indicators seen in anonymous PKINIT ticket') # Test anonymous kadmin. f = open(os.path.join(realm.testdir, 'acl'), 'a') f.write('WELLKNOWN/ANONYMOUS@WELLKNOWN:ANONYMOUS a *') f.close() realm.start_kadmind() realm.run([kadmin, '-n', 'addprinc', '-pw', 'test', 'testadd']) realm.run([kadmin, '-n', 'getprinc', 'testadd'], expected_code=1, expected_msg="Operation requires ``get'' privilege") realm.stop_kadmind() # Test with anonymous restricted; FAST should work but kvno should fail. r_env = realm.special_env('restrict', True, kdc_conf=restrictive_kdc_conf) realm.stop_kdc() realm.start_kdc(env=r_env) realm.kinit('@%s' % realm.realm, flags=['-n']) realm.kinit('@%s' % realm.realm, flags=['-n', '-T', realm.ccache]) realm.run([kvno, realm.host_princ], expected_code=1, expected_msg='KDC policy rejects request') # Regression test for #8458: S4U2Self requests crash the KDC if # anonymous is restricted. realm.kinit(realm.host_princ, flags=['-k']) realm.run([kvno, '-U', 'user', realm.host_princ]) # Go back to a normal KDC and disable anonymous PKINIT. realm.stop_kdc() realm.start_kdc() realm.run([kadminl, 'delprinc', 'WELLKNOWN/ANONYMOUS']) # Run the basic test - PKINIT with FILE: identity, with no password on the key. realm.run(['./responder', '-x', 'pkinit=', '-X', 'X509_user_identity=%s' % file_identity, realm.user_princ]) realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % file_identity]) realm.klist(realm.user_princ) realm.run([kvno, realm.host_princ]) # Try again using RSA instead of DH. realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % file_identity, '-X', 'flag_RSA_PROTOCOL=yes']) realm.klist(realm.user_princ) # Test a DH parameter renegotiation by temporarily setting a 4096-bit # minimum on the KDC. (Preauth type 16 is PKINIT PA_PK_AS_REQ; # 109 is PKINIT TD_DH_PARAMETERS; 133 is FAST PA-FX-COOKIE.) minbits_kdc_conf = {'realms': {'$realm': {'pkinit_dh_min_bits': '4096'}}} minbits_env = realm.special_env('restrict', True, kdc_conf=minbits_kdc_conf) realm.stop_kdc() realm.start_kdc(env=minbits_env) expected_trace = ('Sending unauthenticated request', '/Additional pre-authentication required', 'Preauthenticating using KDC method data', 'Preauth module pkinit (16) (real) returned: 0/Success', 'Produced preauth for next request: 133, 16', '/Key parameters not accepted', 'Preauth tryagain input types (16): 109, 133', 'trying again with KDC-provided parameters', 'Preauth module pkinit (16) tryagain returned: 0/Success', 'Followup preauth for next request: 16, 133') realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % file_identity], expected_trace=expected_trace) realm.stop_kdc() realm.start_kdc() # Run the basic test - PKINIT with FILE: identity, with a password on the key, # supplied by the prompter. # Expect failure if the responder does nothing, and we have no prompter. realm.run(['./responder', '-x', 'pkinit={"%s": 0}' % file_enc_identity, '-X', 'X509_user_identity=%s' % file_enc_identity, realm.user_princ], expected_code=2) realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % file_enc_identity], password='encrypted') realm.klist(realm.user_princ) realm.run([kvno, realm.host_princ]) realm.run(['./adata', realm.host_princ], expected_msg='+97: [indpkinit1, indpkinit2]') # Run the basic test - PKINIT with FILE: identity, with a password on the key, # supplied by the responder. # Supply the response in raw form. realm.run(['./responder', '-x', 'pkinit={"%s": 0}' % file_enc_identity, '-r', 'pkinit={"%s": "encrypted"}' % file_enc_identity, '-X', 'X509_user_identity=%s' % file_enc_identity, realm.user_princ]) # Supply the response through the convenience API. realm.run(['./responder', '-X', 'X509_user_identity=%s' % file_enc_identity, '-p', '%s=%s' % (file_enc_identity, 'encrypted'), realm.user_princ]) realm.klist(realm.user_princ) realm.run([kvno, realm.host_princ]) # PKINIT with DIR: identity, with no password on the key. os.mkdir(path) os.mkdir(path_enc) shutil.copy(privkey_pem, os.path.join(path, 'user.key')) shutil.copy(privkey_enc_pem, os.path.join(path_enc, 'user.key')) shutil.copy(user_pem, os.path.join(path, 'user.crt')) shutil.copy(user_pem, os.path.join(path_enc, 'user.crt')) realm.run(['./responder', '-x', 'pkinit=', '-X', 'X509_user_identity=%s' % dir_identity, realm.user_princ]) realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % dir_identity]) realm.klist(realm.user_princ) realm.run([kvno, realm.host_princ]) # PKINIT with DIR: identity, with a password on the key, supplied by the # prompter. # Expect failure if the responder does nothing, and we have no prompter. realm.run(['./responder', '-x', 'pkinit={"%s": 0}' % dir_file_enc_identity, '-X', 'X509_user_identity=%s' % dir_enc_identity, realm.user_princ], expected_code=2) realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % dir_enc_identity], password='encrypted') realm.klist(realm.user_princ) realm.run([kvno, realm.host_princ]) # PKINIT with DIR: identity, with a password on the key, supplied by the # responder. # Supply the response in raw form. realm.run(['./responder', '-x', 'pkinit={"%s": 0}' % dir_file_enc_identity, '-r', 'pkinit={"%s": "encrypted"}' % dir_file_enc_identity, '-X', 'X509_user_identity=%s' % dir_enc_identity, realm.user_princ]) # Supply the response through the convenience API. realm.run(['./responder', '-X', 'X509_user_identity=%s' % dir_enc_identity, '-p', '%s=%s' % (dir_file_enc_identity, 'encrypted'), realm.user_princ]) realm.klist(realm.user_princ) realm.run([kvno, realm.host_princ]) # PKINIT with PKCS12: identity, with no password on the bundle. realm.run(['./responder', '-x', 'pkinit=', '-X', 'X509_user_identity=%s' % p12_identity, realm.user_princ]) realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % p12_identity]) realm.klist(realm.user_princ) realm.run([kvno, realm.host_princ]) # PKINIT with PKCS12: identity, with a password on the bundle, supplied by the # prompter. # Expect failure if the responder does nothing, and we have no prompter. realm.run(['./responder', '-x', 'pkinit={"%s": 0}' % p12_enc_identity, '-X', 'X509_user_identity=%s' % p12_enc_identity, realm.user_princ], expected_code=2) realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % p12_enc_identity], password='encrypted') realm.klist(realm.user_princ) realm.run([kvno, realm.host_princ]) # PKINIT with PKCS12: identity, with a password on the bundle, supplied by the # responder. # Supply the response in raw form. realm.run(['./responder', '-x', 'pkinit={"%s": 0}' % p12_enc_identity, '-r', 'pkinit={"%s": "encrypted"}' % p12_enc_identity, '-X', 'X509_user_identity=%s' % p12_enc_identity, realm.user_princ]) # Supply the response through the convenience API. realm.run(['./responder', '-X', 'X509_user_identity=%s' % p12_enc_identity, '-p', '%s=%s' % (p12_enc_identity, 'encrypted'), realm.user_princ]) realm.klist(realm.user_princ) realm.run([kvno, realm.host_princ]) # Match a single rule. rule = '<SAN>^user@KRBTEST.COM$' realm.run([kadminl, 'setstr', realm.user_princ, 'pkinit_cert_match', rule]) realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % p12_identity]) realm.klist(realm.user_princ) # Match a combined rule (default prefix is &&). rule = '<SUBJECT>CN=user$<KU>digitalSignature,keyEncipherment' realm.run([kadminl, 'setstr', realm.user_princ, 'pkinit_cert_match', rule]) realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % p12_identity]) realm.klist(realm.user_princ) # Fail an && rule. rule = '&&<SUBJECT>O=OTHER.COM<SAN>^user@KRBTEST.COM$' realm.run([kadminl, 'setstr', realm.user_princ, 'pkinit_cert_match', rule]) msg = 'kinit: Certificate mismatch while getting initial credentials' realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % p12_identity], expected_code=1, expected_msg=msg) # Pass an || rule. rule = '||<SUBJECT>O=KRBTEST.COM<SAN>^otheruser@KRBTEST.COM$' realm.run([kadminl, 'setstr', realm.user_princ, 'pkinit_cert_match', rule]) realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % p12_identity]) realm.klist(realm.user_princ) # Fail an || rule. rule = '||<SUBJECT>O=OTHER.COM<SAN>^otheruser@KRBTEST.COM$' realm.run([kadminl, 'setstr', realm.user_princ, 'pkinit_cert_match', rule]) msg = 'kinit: Certificate mismatch while getting initial credentials' realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % p12_identity], expected_code=1, expected_msg=msg) # Authorize a client cert with no PKINIT extensions using subject and # issuer. (Relies on EKU checking being turned off.) rule = '&&<SUBJECT>CN=user$<ISSUER>O=MIT,' realm.run([kadminl, 'setstr', realm.user_princ, 'pkinit_cert_match', rule]) realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % p12_generic_identity]) realm.klist(realm.user_princ) if not have_soft_pkcs11: skip_rest('PKINIT PKCS11 tests', 'soft-pkcs11.so not found') softpkcs11rc = os.path.join(os.getcwd(), 'testdir', 'soft-pkcs11.rc') realm.env['SOFTPKCS11RC'] = softpkcs11rc # PKINIT with PKCS11: identity, with no need for a PIN. conf = open(softpkcs11rc, 'w') conf.write("%s\t%s\t%s\t%s\n" % ('user', 'user token', user_pem, privkey_pem)) conf.close() # Expect to succeed without having to supply any more information. realm.run(['./responder', '-x', 'pkinit=', '-X', 'X509_user_identity=%s' % p11_identity, realm.user_princ]) realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % p11_identity]) realm.klist(realm.user_princ) realm.run([kvno, realm.host_princ]) # PKINIT with PKCS11: identity, with a PIN supplied by the prompter. os.remove(softpkcs11rc) conf = open(softpkcs11rc, 'w') conf.write("%s\t%s\t%s\t%s\n" % ('user', 'user token', user_pem, privkey_enc_pem)) conf.close() # Expect failure if the responder does nothing, and there's no prompter realm.run(['./responder', '-x', 'pkinit={"%s": 0}' % p11_token_identity, '-X', 'X509_user_identity=%s' % p11_identity, realm.user_princ], expected_code=2) realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % p11_identity], password='encrypted') realm.klist(realm.user_princ) realm.run([kvno, realm.host_princ]) # Supply the wrong PIN, and verify that we ignore the draft9 padata offer # in the KDC method data after RFC 4556 PKINIT fails. expected_trace = ('PKINIT client has no configured identity; giving up', 'PKINIT client ignoring draft 9 offer from RFC 4556 KDC') realm.kinit(realm.user_princ, flags=['-X', 'X509_user_identity=%s' % p11_identity], password='wrong', expected_code=1, expected_trace=expected_trace) # PKINIT with PKCS11: identity, with a PIN supplied by the responder. # Supply the response in raw form. realm.run(['./responder', '-x', 'pkinit={"%s": 0}' % p11_token_identity, '-r', 'pkinit={"%s": "encrypted"}' % p11_token_identity, '-X', 'X509_user_identity=%s' % p11_identity, realm.user_princ]) # Supply the response through the convenience API. realm.run(['./responder', '-X', 'X509_user_identity=%s' % p11_identity, '-p', '%s=%s' % (p11_token_identity, 'encrypted'), realm.user_princ]) realm.klist(realm.user_princ) realm.run([kvno, realm.host_princ]) success('PKINIT tests')
./CrossVul/dataset_final_sorted/CWE-287/py/good_3282_0
crossvul-python_data_bad_1224_4
# -*- coding: utf-8 -*- from typing import List, Dict, Optional from django.utils.translation import ugettext as _ from django.conf import settings from django.contrib.auth import authenticate, get_backends from django.urls import reverse from django.http import HttpResponseRedirect, HttpResponse, HttpRequest from django.shortcuts import redirect, render from django.core.exceptions import ValidationError from django.core import validators from zerver.context_processors import get_realm_from_request, login_context from zerver.models import UserProfile, Realm, Stream, MultiuseInvite, \ name_changes_disabled, email_to_username, email_allowed_for_realm, \ get_realm, get_user_by_delivery_email, get_default_stream_groups, DisposableEmailError, \ DomainNotAllowedForRealmError, get_source_profile, EmailContainsPlusError, \ PreregistrationUser from zerver.lib.send_email import send_email, FromAddress from zerver.lib.actions import do_change_password, do_change_full_name, \ do_activate_user, do_create_user, do_create_realm, \ validate_email_for_realm, \ do_set_user_display_setting, lookup_default_stream_groups, bulk_add_subscriptions from zerver.forms import RegistrationForm, HomepageForm, RealmCreationForm, \ FindMyTeamForm, RealmRedirectForm from django_auth_ldap.backend import LDAPBackend, _LDAPUser from zerver.decorator import require_post, \ do_login from zerver.lib.onboarding import send_initial_realm_messages, setup_realm_internal_bots from zerver.lib.subdomains import get_subdomain, is_root_domain_available from zerver.lib.timezone import get_all_timezones from zerver.lib.users import get_accounts_for_email from zerver.lib.zephyr import compute_mit_user_fullname from zerver.views.auth import create_preregistration_user, redirect_and_log_into_subdomain, \ redirect_to_deactivation_notice, get_safe_redirect_to from zproject.backends import ldap_auth_enabled, password_auth_enabled, \ ZulipLDAPExceptionNoMatchingLDAPUser, email_auth_enabled, ZulipLDAPAuthBackend from confirmation.models import Confirmation, RealmCreationKey, ConfirmationKeyException, \ validate_key, create_confirmation_link, get_object_from_key, \ render_confirmation_key_error import logging import smtplib import urllib def check_prereg_key_and_redirect(request: HttpRequest, confirmation_key: str) -> HttpResponse: # If the key isn't valid, show the error message on the original URL confirmation = Confirmation.objects.filter(confirmation_key=confirmation_key).first() if confirmation is None or confirmation.type not in [ Confirmation.USER_REGISTRATION, Confirmation.INVITATION, Confirmation.REALM_CREATION]: return render_confirmation_key_error( request, ConfirmationKeyException(ConfirmationKeyException.DOES_NOT_EXIST)) try: get_object_from_key(confirmation_key, confirmation.type) except ConfirmationKeyException as exception: return render_confirmation_key_error(request, exception) # confirm_preregistrationuser.html just extracts the confirmation_key # (and GET parameters) and redirects to /accounts/register, so that the # user can enter their information on a cleaner URL. return render(request, 'confirmation/confirm_preregistrationuser.html', context={ 'key': confirmation_key, 'full_name': request.GET.get("full_name", None)}) @require_post def accounts_register(request: HttpRequest) -> HttpResponse: key = request.POST['key'] confirmation = Confirmation.objects.get(confirmation_key=key) prereg_user = confirmation.content_object email = prereg_user.email realm_creation = prereg_user.realm_creation password_required = prereg_user.password_required is_realm_admin = prereg_user.invited_as == PreregistrationUser.INVITE_AS['REALM_ADMIN'] or realm_creation is_guest = prereg_user.invited_as == PreregistrationUser.INVITE_AS['GUEST_USER'] try: validators.validate_email(email) except ValidationError: return render(request, "zerver/invalid_email.html", context={"invalid_email": True}) if realm_creation: # For creating a new realm, there is no existing realm or domain realm = None else: if get_subdomain(request) != prereg_user.realm.string_id: return render_confirmation_key_error( request, ConfirmationKeyException(ConfirmationKeyException.DOES_NOT_EXIST)) realm = prereg_user.realm try: email_allowed_for_realm(email, realm) except DomainNotAllowedForRealmError: return render(request, "zerver/invalid_email.html", context={"realm_name": realm.name, "closed_domain": True}) except DisposableEmailError: return render(request, "zerver/invalid_email.html", context={"realm_name": realm.name, "disposable_emails_not_allowed": True}) except EmailContainsPlusError: return render(request, "zerver/invalid_email.html", context={"realm_name": realm.name, "email_contains_plus": True}) if realm.deactivated: # The user is trying to register for a deactivated realm. Advise them to # contact support. return redirect_to_deactivation_notice() try: validate_email_for_realm(realm, email) except ValidationError: return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.parse.quote_plus(email)) name_validated = False full_name = None require_ldap_password = False if request.POST.get('from_confirmation'): try: del request.session['authenticated_full_name'] except KeyError: pass if realm is not None and realm.is_zephyr_mirror_realm: # For MIT users, we can get an authoritative name from Hesiod. # Technically we should check that this is actually an MIT # realm, but we can cross that bridge if we ever get a non-MIT # zephyr mirroring realm. hesiod_name = compute_mit_user_fullname(email) form = RegistrationForm( initial={'full_name': hesiod_name if "@" not in hesiod_name else ""}, realm_creation=realm_creation) name_validated = True elif settings.POPULATE_PROFILE_VIA_LDAP: for backend in get_backends(): if isinstance(backend, LDAPBackend): try: ldap_username = backend.django_to_ldap_username(email) except ZulipLDAPExceptionNoMatchingLDAPUser: logging.warning("New account email %s could not be found in LDAP" % (email,)) form = RegistrationForm(realm_creation=realm_creation) break ldap_user = _LDAPUser(backend, ldap_username) try: ldap_full_name, _ = backend.get_mapped_name(ldap_user) request.session['authenticated_full_name'] = ldap_full_name name_validated = True # We don't use initial= here, because if the form is # complete (that is, no additional fields need to be # filled out by the user) we want the form to validate, # so they can be directly registered without having to # go through this interstitial. form = RegistrationForm({'full_name': ldap_full_name}, realm_creation=realm_creation) # Check whether this is ZulipLDAPAuthBackend, # which is responsible for authentication and # requires that LDAP accounts enter their LDAP # password to register, or ZulipLDAPUserPopulator, # which just populates UserProfile fields (no auth). require_ldap_password = isinstance(backend, ZulipLDAPAuthBackend) break except TypeError: # Let the user fill out a name and/or try another backend form = RegistrationForm(realm_creation=realm_creation) elif prereg_user.full_name: if prereg_user.full_name_validated: request.session['authenticated_full_name'] = prereg_user.full_name name_validated = True form = RegistrationForm({'full_name': prereg_user.full_name}, realm_creation=realm_creation) else: form = RegistrationForm(initial={'full_name': prereg_user.full_name}, realm_creation=realm_creation) elif 'full_name' in request.POST: form = RegistrationForm( initial={'full_name': request.POST.get('full_name')}, realm_creation=realm_creation ) else: form = RegistrationForm(realm_creation=realm_creation) else: postdata = request.POST.copy() if name_changes_disabled(realm): # If we populate profile information via LDAP and we have a # verified name from you on file, use that. Otherwise, fall # back to the full name in the request. try: postdata.update({'full_name': request.session['authenticated_full_name']}) name_validated = True except KeyError: pass form = RegistrationForm(postdata, realm_creation=realm_creation) if not (password_auth_enabled(realm) and password_required): form['password'].field.required = False if form.is_valid(): if password_auth_enabled(realm): password = form.cleaned_data['password'] else: # SSO users don't need no passwords password = None if realm_creation: string_id = form.cleaned_data['realm_subdomain'] realm_name = form.cleaned_data['realm_name'] realm = do_create_realm(string_id, realm_name) setup_realm_internal_bots(realm) assert(realm is not None) full_name = form.cleaned_data['full_name'] short_name = email_to_username(email) default_stream_group_names = request.POST.getlist('default_stream_group') default_stream_groups = lookup_default_stream_groups(default_stream_group_names, realm) timezone = "" if 'timezone' in request.POST and request.POST['timezone'] in get_all_timezones(): timezone = request.POST['timezone'] if 'source_realm' in request.POST and request.POST["source_realm"] != "on": source_profile = get_source_profile(email, request.POST["source_realm"]) else: source_profile = None if not realm_creation: try: existing_user_profile = get_user_by_delivery_email(email, realm) # type: Optional[UserProfile] except UserProfile.DoesNotExist: existing_user_profile = None else: existing_user_profile = None user_profile = None # type: Optional[UserProfile] return_data = {} # type: Dict[str, bool] if ldap_auth_enabled(realm): # If the user was authenticated using an external SSO # mechanism like Google or GitHub auth, then authentication # will have already been done before creating the # PreregistrationUser object with password_required=False, and # so we don't need to worry about passwords. # # If instead the realm is using EmailAuthBackend, we will # set their password above. # # But if the realm is using LDAPAuthBackend, we need to verify # their LDAP password (which will, as a side effect, create # the user account) here using authenticate. # pregeg_user.realm_creation carries the information about whether # we're in realm creation mode, and the ldap flow will handle # that and create the user with the appropriate parameters. user_profile = authenticate(request, username=email, password=password, realm=realm, prereg_user=prereg_user, return_data=return_data) if user_profile is None: if return_data.get("no_matching_ldap_user") and email_auth_enabled(realm): # If both the LDAP and Email auth backends are # enabled, and there's no matching user in the LDAP # directory then the intent is to create a user in the # realm with their email outside the LDAP organization # (with e.g. a password stored in the Zulip database, # not LDAP). So we fall through and create the new # account. # # It's likely that we can extend this block to the # Google and GitHub auth backends with no code changes # other than here. pass else: # TODO: This probably isn't going to give a # user-friendly error message, but it doesn't # particularly matter, because the registration form # is hidden for most users. return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.parse.quote_plus(email)) elif not realm_creation: # Since we'll have created a user, we now just log them in. return login_and_go_to_home(request, user_profile) else: # With realm_creation=True, we're going to return further down, # after finishing up the creation process. pass if existing_user_profile is not None and existing_user_profile.is_mirror_dummy: user_profile = existing_user_profile do_activate_user(user_profile) do_change_password(user_profile, password) do_change_full_name(user_profile, full_name, user_profile) do_set_user_display_setting(user_profile, 'timezone', timezone) # TODO: When we clean up the `do_activate_user` code path, # make it respect invited_as_admin / is_realm_admin. if user_profile is None: user_profile = do_create_user(email, password, realm, full_name, short_name, prereg_user=prereg_user, is_realm_admin=is_realm_admin, is_guest=is_guest, tos_version=settings.TOS_VERSION, timezone=timezone, newsletter_data={"IP": request.META['REMOTE_ADDR']}, default_stream_groups=default_stream_groups, source_profile=source_profile, realm_creation=realm_creation) if realm_creation: bulk_add_subscriptions([realm.signup_notifications_stream], [user_profile]) send_initial_realm_messages(realm) # Because for realm creation, registration happens on the # root domain, we need to log them into the subdomain for # their new realm. return redirect_and_log_into_subdomain(realm, full_name, email) # This dummy_backend check below confirms the user is # authenticating to the correct subdomain. auth_result = authenticate(username=user_profile.delivery_email, realm=realm, return_data=return_data, use_dummy_backend=True) if return_data.get('invalid_subdomain'): # By construction, this should never happen. logging.error("Subdomain mismatch in registration %s: %s" % ( realm.subdomain, user_profile.delivery_email,)) return redirect('/') return login_and_go_to_home(request, auth_result) return render( request, 'zerver/register.html', context={'form': form, 'email': email, 'key': key, 'full_name': request.session.get('authenticated_full_name', None), 'lock_name': name_validated and name_changes_disabled(realm), # password_auth_enabled is normally set via our context processor, # but for the registration form, there is no logged in user yet, so # we have to set it here. 'creating_new_team': realm_creation, 'password_required': password_auth_enabled(realm) and password_required, 'require_ldap_password': require_ldap_password, 'password_auth_enabled': password_auth_enabled(realm), 'root_domain_available': is_root_domain_available(), 'default_stream_groups': get_default_stream_groups(realm), 'accounts': get_accounts_for_email(email), 'MAX_REALM_NAME_LENGTH': str(Realm.MAX_REALM_NAME_LENGTH), 'MAX_NAME_LENGTH': str(UserProfile.MAX_NAME_LENGTH), 'MAX_PASSWORD_LENGTH': str(form.MAX_PASSWORD_LENGTH), 'MAX_REALM_SUBDOMAIN_LENGTH': str(Realm.MAX_REALM_SUBDOMAIN_LENGTH) } ) def login_and_go_to_home(request: HttpRequest, user_profile: UserProfile) -> HttpResponse: do_login(request, user_profile) return HttpResponseRedirect(user_profile.realm.uri + reverse('zerver.views.home.home')) def prepare_activation_url(email: str, request: HttpRequest, realm_creation: bool=False, streams: Optional[List[Stream]]=None, invited_as: Optional[int]=None) -> str: """ Send an email with a confirmation link to the provided e-mail so the user can complete their registration. """ prereg_user = create_preregistration_user(email, request, realm_creation) if streams is not None: prereg_user.streams.set(streams) if invited_as is not None: prereg_user.invited_as = invited_as prereg_user.save() confirmation_type = Confirmation.USER_REGISTRATION if realm_creation: confirmation_type = Confirmation.REALM_CREATION activation_url = create_confirmation_link(prereg_user, request.get_host(), confirmation_type) if settings.DEVELOPMENT and realm_creation: request.session['confirmation_key'] = {'confirmation_key': activation_url.split('/')[-1]} return activation_url def send_confirm_registration_email(email: str, activation_url: str, language: str) -> None: send_email('zerver/emails/confirm_registration', to_emails=[email], from_address=FromAddress.tokenized_no_reply_address(), language=language, context={'activate_url': activation_url}) def redirect_to_email_login_url(email: str) -> HttpResponseRedirect: login_url = reverse('django.contrib.auth.views.login') email = urllib.parse.quote_plus(email) redirect_url = login_url + '?already_registered=' + email return HttpResponseRedirect(redirect_url) def create_realm(request: HttpRequest, creation_key: Optional[str]=None) -> HttpResponse: try: key_record = validate_key(creation_key) except RealmCreationKey.Invalid: return render(request, "zerver/realm_creation_failed.html", context={'message': _('The organization creation link has expired' ' or is not valid.')}) if not settings.OPEN_REALM_CREATION: if key_record is None: return render(request, "zerver/realm_creation_failed.html", context={'message': _('New organization creation disabled')}) # When settings.OPEN_REALM_CREATION is enabled, anyone can create a new realm, # with a few restrictions on their email address. if request.method == 'POST': form = RealmCreationForm(request.POST) if form.is_valid(): email = form.cleaned_data['email'] activation_url = prepare_activation_url(email, request, realm_creation=True) if key_record is not None and key_record.presume_email_valid: # The user has a token created from the server command line; # skip confirming the email is theirs, taking their word for it. # This is essential on first install if the admin hasn't stopped # to configure outbound email up front, or it isn't working yet. key_record.delete() return HttpResponseRedirect(activation_url) try: send_confirm_registration_email(email, activation_url, request.LANGUAGE_CODE) except smtplib.SMTPException as e: logging.error('Error in create_realm: %s' % (str(e),)) return HttpResponseRedirect("/config-error/smtp") if key_record is not None: key_record.delete() return HttpResponseRedirect(reverse('new_realm_send_confirm', kwargs={'email': email})) else: form = RealmCreationForm() return render(request, 'zerver/create_realm.html', context={'form': form, 'current_url': request.get_full_path}, ) def accounts_home(request: HttpRequest, multiuse_object_key: Optional[str]="", multiuse_object: Optional[MultiuseInvite]=None) -> HttpResponse: try: realm = get_realm(get_subdomain(request)) except Realm.DoesNotExist: return HttpResponseRedirect(reverse('zerver.views.registration.find_account')) if realm.deactivated: return redirect_to_deactivation_notice() from_multiuse_invite = False streams_to_subscribe = None invited_as = None if multiuse_object: realm = multiuse_object.realm streams_to_subscribe = multiuse_object.streams.all() from_multiuse_invite = True invited_as = multiuse_object.invited_as if request.method == 'POST': form = HomepageForm(request.POST, realm=realm, from_multiuse_invite=from_multiuse_invite) if form.is_valid(): email = form.cleaned_data['email'] activation_url = prepare_activation_url(email, request, streams=streams_to_subscribe, invited_as=invited_as) try: send_confirm_registration_email(email, activation_url, request.LANGUAGE_CODE) except smtplib.SMTPException as e: logging.error('Error in accounts_home: %s' % (str(e),)) return HttpResponseRedirect("/config-error/smtp") return HttpResponseRedirect(reverse('signup_send_confirm', kwargs={'email': email})) email = request.POST['email'] try: validate_email_for_realm(realm, email) except ValidationError: return redirect_to_email_login_url(email) else: form = HomepageForm(realm=realm) context = login_context(request) context.update({'form': form, 'current_url': request.get_full_path, 'multiuse_object_key': multiuse_object_key, 'from_multiuse_invite': from_multiuse_invite}) return render(request, 'zerver/accounts_home.html', context=context) def accounts_home_from_multiuse_invite(request: HttpRequest, confirmation_key: str) -> HttpResponse: multiuse_object = None try: multiuse_object = get_object_from_key(confirmation_key, Confirmation.MULTIUSE_INVITE) # Required for oAuth2 except ConfirmationKeyException as exception: realm = get_realm_from_request(request) if realm is None or realm.invite_required: return render_confirmation_key_error(request, exception) return accounts_home(request, multiuse_object_key=confirmation_key, multiuse_object=multiuse_object) def generate_204(request: HttpRequest) -> HttpResponse: return HttpResponse(content=None, status=204) def find_account(request: HttpRequest) -> HttpResponse: from zerver.context_processors import common_context url = reverse('zerver.views.registration.find_account') emails = [] # type: List[str] if request.method == 'POST': form = FindMyTeamForm(request.POST) if form.is_valid(): emails = form.cleaned_data['emails'] for user in UserProfile.objects.filter( delivery_email__in=emails, is_active=True, is_bot=False, realm__deactivated=False): context = common_context(user) context.update({ 'email': user.delivery_email, }) send_email('zerver/emails/find_team', to_user_ids=[user.id], context=context) # Note: Show all the emails in the result otherwise this # feature can be used to ascertain which email addresses # are associated with Zulip. data = urllib.parse.urlencode({'emails': ','.join(emails)}) return redirect(url + "?" + data) else: form = FindMyTeamForm() result = request.GET.get('emails') # The below validation is perhaps unnecessary, in that we # shouldn't get able to get here with an invalid email unless # the user hand-edits the URLs. if result: for email in result.split(','): try: validators.validate_email(email) emails.append(email) except ValidationError: pass return render(request, 'zerver/find_account.html', context={'form': form, 'current_url': lambda: url, 'emails': emails},) def realm_redirect(request: HttpRequest) -> HttpResponse: if request.method == 'POST': form = RealmRedirectForm(request.POST) if form.is_valid(): subdomain = form.cleaned_data['subdomain'] realm = get_realm(subdomain) redirect_to = get_safe_redirect_to(request.GET.get("next", ""), realm.uri) return HttpResponseRedirect(redirect_to) else: form = RealmRedirectForm() return render(request, 'zerver/realm_redirect.html', context={'form': form})
./CrossVul/dataset_final_sorted/CWE-287/py/bad_1224_4
crossvul-python_data_good_3761_0
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 OpenStack LLC # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import uuid import routes from keystone import catalog from keystone.common import logging from keystone.common import wsgi from keystone import exception from keystone import identity from keystone.openstack.common import timeutils from keystone import policy from keystone import token LOG = logging.getLogger(__name__) class AdminRouter(wsgi.ComposingRouter): def __init__(self): mapper = routes.Mapper() version_controller = VersionController('admin') mapper.connect('/', controller=version_controller, action='get_version') # Token Operations auth_controller = TokenController() mapper.connect('/tokens', controller=auth_controller, action='authenticate', conditions=dict(method=['POST'])) mapper.connect('/tokens/{token_id}', controller=auth_controller, action='validate_token', conditions=dict(method=['GET'])) mapper.connect('/tokens/{token_id}', controller=auth_controller, action='validate_token_head', conditions=dict(method=['HEAD'])) mapper.connect('/tokens/{token_id}', controller=auth_controller, action='delete_token', conditions=dict(method=['DELETE'])) mapper.connect('/tokens/{token_id}/endpoints', controller=auth_controller, action='endpoints', conditions=dict(method=['GET'])) # Miscellaneous Operations extensions_controller = AdminExtensionsController() mapper.connect('/extensions', controller=extensions_controller, action='get_extensions_info', conditions=dict(method=['GET'])) mapper.connect('/extensions/{extension_alias}', controller=extensions_controller, action='get_extension_info', conditions=dict(method=['GET'])) identity_router = identity.AdminRouter() routers = [identity_router] super(AdminRouter, self).__init__(mapper, routers) class PublicRouter(wsgi.ComposingRouter): def __init__(self): mapper = routes.Mapper() version_controller = VersionController('public') mapper.connect('/', controller=version_controller, action='get_version') # Token Operations auth_controller = TokenController() mapper.connect('/tokens', controller=auth_controller, action='authenticate', conditions=dict(method=['POST'])) # Miscellaneous extensions_controller = PublicExtensionsController() mapper.connect('/extensions', controller=extensions_controller, action='get_extensions_info', conditions=dict(method=['GET'])) mapper.connect('/extensions/{extension_alias}', controller=extensions_controller, action='get_extension_info', conditions=dict(method=['GET'])) identity_router = identity.PublicRouter() routers = [identity_router] super(PublicRouter, self).__init__(mapper, routers) class PublicVersionRouter(wsgi.ComposingRouter): def __init__(self): mapper = routes.Mapper() version_controller = VersionController('public') mapper.connect('/', controller=version_controller, action='get_versions') routers = [] super(PublicVersionRouter, self).__init__(mapper, routers) class AdminVersionRouter(wsgi.ComposingRouter): def __init__(self): mapper = routes.Mapper() version_controller = VersionController('admin') mapper.connect('/', controller=version_controller, action='get_versions') routers = [] super(AdminVersionRouter, self).__init__(mapper, routers) class VersionController(wsgi.Application): def __init__(self, version_type): self.catalog_api = catalog.Manager() self.url_key = "%sURL" % version_type super(VersionController, self).__init__() def _get_identity_url(self, context): catalog_ref = self.catalog_api.get_catalog(context=context, user_id=None, tenant_id=None) for region, region_ref in catalog_ref.iteritems(): for service, service_ref in region_ref.iteritems(): if service == 'identity': return service_ref[self.url_key] raise exception.NotImplemented() def _get_versions_list(self, context): """The list of versions is dependent on the context.""" identity_url = self._get_identity_url(context) if not identity_url.endswith('/'): identity_url = identity_url + '/' versions = {} versions['v2.0'] = { "id": "v2.0", "status": "beta", "updated": "2011-11-19T00:00:00Z", "links": [ { "rel": "self", "href": identity_url, }, { "rel": "describedby", "type": "text/html", "href": "http://docs.openstack.org/api/openstack-" "identity-service/2.0/content/" }, { "rel": "describedby", "type": "application/pdf", "href": "http://docs.openstack.org/api/openstack-" "identity-service/2.0/identity-dev-guide-" "2.0.pdf" } ], "media-types": [ { "base": "application/json", "type": "application/vnd.openstack.identity-v2.0" "+json" }, { "base": "application/xml", "type": "application/vnd.openstack.identity-v2.0" "+xml" } ] } return versions def get_versions(self, context): versions = self._get_versions_list(context) return wsgi.render_response(status=(300, 'Multiple Choices'), body={ "versions": { "values": versions.values() } }) def get_version(self, context): versions = self._get_versions_list(context) return wsgi.render_response(body={ "version": versions['v2.0'] }) class NoopController(wsgi.Application): def __init__(self): super(NoopController, self).__init__() def noop(self, context): return {} class TokenController(wsgi.Application): def __init__(self): self.catalog_api = catalog.Manager() self.identity_api = identity.Manager() self.token_api = token.Manager() self.policy_api = policy.Manager() super(TokenController, self).__init__() def authenticate(self, context, auth=None): """Authenticate credentials and return a token. Accept auth as a dict that looks like:: { "auth":{ "passwordCredentials":{ "username":"test_user", "password":"mypass" }, "tenantName":"customer-x" } } In this case, tenant is optional, if not provided the token will be considered "unscoped" and can later be used to get a scoped token. Alternatively, this call accepts auth with only a token and tenant that will return a token that is scoped to that tenant. """ token_id = uuid.uuid4().hex if 'passwordCredentials' in auth: user_id = auth['passwordCredentials'].get('userId', None) username = auth['passwordCredentials'].get('username', '') password = auth['passwordCredentials'].get('password', '') tenant_name = auth.get('tenantName', None) if username: try: user_ref = self.identity_api.get_user_by_name( context=context, user_name=username) user_id = user_ref['id'] except exception.UserNotFound: raise exception.Unauthorized() # more compat tenant_id = auth.get('tenantId', None) if tenant_name: try: tenant_ref = self.identity_api.get_tenant_by_name( context=context, tenant_name=tenant_name) tenant_id = tenant_ref['id'] except exception.TenantNotFound: raise exception.Unauthorized() try: auth_info = self.identity_api.authenticate(context=context, user_id=user_id, password=password, tenant_id=tenant_id) (user_ref, tenant_ref, metadata_ref) = auth_info # If the user is disabled don't allow them to authenticate if not user_ref.get('enabled', True): LOG.warning('User %s is disabled' % user_id) raise exception.Unauthorized() # If the tenant is disabled don't allow them to authenticate if tenant_ref and not tenant_ref.get('enabled', True): LOG.warning('Tenant %s is disabled' % tenant_id) raise exception.Unauthorized() except AssertionError as e: raise exception.Unauthorized(e.message) token_ref = self.token_api.create_token( context, token_id, dict(id=token_id, user=user_ref, tenant=tenant_ref, metadata=metadata_ref)) if tenant_ref: catalog_ref = self.catalog_api.get_catalog( context=context, user_id=user_ref['id'], tenant_id=tenant_ref['id'], metadata=metadata_ref) else: catalog_ref = {} elif 'token' in auth: token = auth['token'].get('id', None) tenant_name = auth.get('tenantName') # more compat if tenant_name: tenant_ref = self.identity_api.get_tenant_by_name( context=context, tenant_name=tenant_name) tenant_id = tenant_ref['id'] else: tenant_id = auth.get('tenantId', None) try: old_token_ref = self.token_api.get_token(context=context, token_id=token) except exception.NotFound: raise exception.Unauthorized() user_ref = old_token_ref['user'] # If the user is disabled don't allow them to authenticate current_user_ref = self.identity_api.get_user( context=context, user_id=user_ref['id']) if not current_user_ref.get('enabled', True): LOG.warning('User %s is disabled' % user_ref['id']) raise exception.Unauthorized() tenants = self.identity_api.get_tenants_for_user(context, user_ref['id']) if tenant_id and tenant_id not in tenants: raise exception.Unauthorized() try: tenant_ref = self.identity_api.get_tenant( context=context, tenant_id=tenant_id) metadata_ref = self.identity_api.get_metadata( context=context, user_id=user_ref['id'], tenant_id=tenant_ref['id']) catalog_ref = self.catalog_api.get_catalog( context=context, user_id=user_ref['id'], tenant_id=tenant_ref['id'], metadata=metadata_ref) except exception.TenantNotFound: tenant_ref = None metadata_ref = {} catalog_ref = {} except exception.MetadataNotFound: metadata_ref = {} catalog_ref = {} # If the tenant is disabled don't allow them to authenticate if tenant_ref and not tenant_ref.get('enabled', True): LOG.warning('Tenant %s is disabled' % tenant_id) raise exception.Unauthorized() token_ref = self.token_api.create_token( context, token_id, dict(id=token_id, user=user_ref, tenant=tenant_ref, metadata=metadata_ref, expires=old_token_ref['expires'])) # TODO(termie): optimize this call at some point and put it into the # the return for metadata # fill out the roles in the metadata roles_ref = [] for role_id in metadata_ref.get('roles', []): roles_ref.append(self.identity_api.get_role(context, role_id)) logging.debug('TOKEN_REF %s', token_ref) return self._format_authenticate(token_ref, roles_ref, catalog_ref) def _get_token_ref(self, context, token_id, belongs_to=None): """Returns a token if a valid one exists. Optionally, limited to a token owned by a specific tenant. """ # TODO(termie): this stuff should probably be moved to middleware self.assert_admin(context) token_ref = self.token_api.get_token(context=context, token_id=token_id) if belongs_to: assert token_ref['tenant']['id'] == belongs_to return token_ref # admin only def validate_token_head(self, context, token_id): """Check that a token is valid. Optionally, also ensure that it is owned by a specific tenant. Identical to ``validate_token``, except does not return a response. """ belongs_to = context['query_string'].get("belongsTo") assert self._get_token_ref(context, token_id, belongs_to) # admin only def validate_token(self, context, token_id): """Check that a token is valid. Optionally, also ensure that it is owned by a specific tenant. Returns metadata about the token along any associated roles. """ belongs_to = context['query_string'].get("belongsTo") token_ref = self._get_token_ref(context, token_id, belongs_to) # TODO(termie): optimize this call at some point and put it into the # the return for metadata # fill out the roles in the metadata metadata_ref = token_ref['metadata'] roles_ref = [] for role_id in metadata_ref.get('roles', []): roles_ref.append(self.identity_api.get_role(context, role_id)) # Get a service catalog if possible # This is needed for on-behalf-of requests catalog_ref = None if token_ref.get('tenant'): catalog_ref = self.catalog_api.get_catalog( context=context, user_id=token_ref['user']['id'], tenant_id=token_ref['tenant']['id'], metadata=metadata_ref) return self._format_token(token_ref, roles_ref, catalog_ref) def delete_token(self, context, token_id): """Delete a token, effectively invalidating it for authz.""" # TODO(termie): this stuff should probably be moved to middleware self.assert_admin(context) self.token_api.delete_token(context=context, token_id=token_id) def endpoints(self, context, token_id): """Return a list of endpoints available to the token.""" raise exception.NotImplemented() def _format_authenticate(self, token_ref, roles_ref, catalog_ref): o = self._format_token(token_ref, roles_ref) o['access']['serviceCatalog'] = self._format_catalog(catalog_ref) return o def _format_token(self, token_ref, roles_ref, catalog_ref=None): user_ref = token_ref['user'] metadata_ref = token_ref['metadata'] expires = token_ref['expires'] if expires is not None: expires = timeutils.isotime(expires) o = {'access': {'token': {'id': token_ref['id'], 'expires': expires, }, 'user': {'id': user_ref['id'], 'name': user_ref['name'], 'username': user_ref['name'], 'roles': roles_ref, 'roles_links': metadata_ref.get('roles_links', []) } } } if 'tenant' in token_ref and token_ref['tenant']: token_ref['tenant']['enabled'] = True o['access']['token']['tenant'] = token_ref['tenant'] if catalog_ref is not None: o['access']['serviceCatalog'] = self._format_catalog(catalog_ref) return o def _format_catalog(self, catalog_ref): """Munge catalogs from internal to output format Internal catalogs look like: {$REGION: { {$SERVICE: { $key1: $value1, ... } } } The legacy api wants them to look like [{'name': $SERVICE[name], 'type': $SERVICE, 'endpoints': [{ 'tenantId': $tenant_id, ... 'region': $REGION, }], 'endpoints_links': [], }] """ if not catalog_ref: return {} services = {} for region, region_ref in catalog_ref.iteritems(): for service, service_ref in region_ref.iteritems(): new_service_ref = services.get(service, {}) new_service_ref['name'] = service_ref.pop('name') new_service_ref['type'] = service new_service_ref['endpoints_links'] = [] service_ref['region'] = region endpoints_ref = new_service_ref.get('endpoints', []) endpoints_ref.append(service_ref) new_service_ref['endpoints'] = endpoints_ref services[service] = new_service_ref return services.values() class ExtensionsController(wsgi.Application): """Base extensions controller to be extended by public and admin API's.""" def __init__(self, extensions=None): super(ExtensionsController, self).__init__() self.extensions = extensions or {} def get_extensions_info(self, context): return {'extensions': {'values': self.extensions.values()}} def get_extension_info(self, context, extension_alias): try: return {'extension': self.extensions[extension_alias]} except KeyError: raise exception.NotFound(target=extension_alias) class PublicExtensionsController(ExtensionsController): pass class AdminExtensionsController(ExtensionsController): def __init__(self, *args, **kwargs): super(AdminExtensionsController, self).__init__(*args, **kwargs) # TODO(dolph): Extensions should obviously provide this information # themselves, but hardcoding it here allows us to match # the API spec in the short term with minimal complexity. self.extensions['OS-KSADM'] = { 'name': 'Openstack Keystone Admin', 'namespace': 'http://docs.openstack.org/identity/api/ext/' 'OS-KSADM/v1.0', 'alias': 'OS-KSADM', 'updated': '2011-08-19T13:25:27-06:00', 'description': 'Openstack extensions to Keystone v2.0 API ' 'enabling Admin Operations.', 'links': [ { 'rel': 'describedby', # TODO(dolph): link needs to be revised after # bug 928059 merges 'type': 'text/html', 'href': 'https://github.com/openstack/identity-api', } ] } @logging.fail_gracefully def public_app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return PublicRouter() @logging.fail_gracefully def admin_app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return AdminRouter() @logging.fail_gracefully def public_version_app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return PublicVersionRouter() @logging.fail_gracefully def admin_version_app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return AdminVersionRouter()
./CrossVul/dataset_final_sorted/CWE-287/py/good_3761_0
crossvul-python_data_bad_1224_6
# Documentation for Zulip's authentication backends is split across a few places: # # * https://zulip.readthedocs.io/en/latest/production/authentication-methods.html and # zproject/prod_settings_template.py have user-level configuration documentation. # * https://zulip.readthedocs.io/en/latest/development/authentication.html # has developer-level documentation, especially on testing authentication backends # in the Zulip development environment. # # Django upstream's documentation for authentication backends is also # helpful background. The most important detail to understand for # reading this file is that the Django authenticate() function will # call the authenticate methods of all backends registered in # settings.AUTHENTICATION_BACKENDS that have a function signature # matching the args/kwargs passed in the authenticate() call. import copy import logging import magic import ujson from typing import Any, Dict, List, Optional, Set, Tuple, Union from typing_extensions import TypedDict from django_auth_ldap.backend import LDAPBackend, LDAPReverseEmailSearch, \ _LDAPUser, ldap_error from django.contrib.auth import get_backends from django.contrib.auth.backends import RemoteUserBackend from django.conf import settings from django.core.exceptions import ValidationError from django.core.validators import validate_email from django.dispatch import receiver, Signal from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.urls import reverse from requests import HTTPError from onelogin.saml2.errors import OneLogin_Saml2_Error from social_core.backends.github import GithubOAuth2, GithubOrganizationOAuth2, \ GithubTeamOAuth2 from social_core.backends.azuread import AzureADOAuth2 from social_core.backends.base import BaseAuth from social_core.backends.google import GoogleOAuth2 from social_core.backends.oauth import BaseOAuth2 from social_core.backends.saml import SAMLAuth from social_core.pipeline.partial import partial from social_core.exceptions import AuthFailed, SocialAuthBaseException from zerver.lib.actions import do_create_user, do_reactivate_user, do_deactivate_user, \ do_update_user_custom_profile_data_if_changed, validate_email_for_realm from zerver.lib.avatar import is_avatar_new, avatar_url from zerver.lib.avatar_hash import user_avatar_content_hash from zerver.lib.dev_ldap_directory import init_fakeldap from zerver.lib.request import JsonableError from zerver.lib.users import check_full_name, validate_user_custom_profile_field from zerver.lib.utils import generate_random_token from zerver.lib.redis_utils import get_redis_client from zerver.models import CustomProfileField, DisposableEmailError, DomainNotAllowedForRealmError, \ EmailContainsPlusError, PreregistrationUser, UserProfile, Realm, custom_profile_fields_for_realm, \ email_allowed_for_realm, get_default_stream_groups, get_user_profile_by_id, remote_user_to_email, \ email_to_username, get_realm, get_user_by_delivery_email, supported_auth_backends redis_client = get_redis_client() # This first batch of methods is used by other code in Zulip to check # whether a given authentication backend is enabled for a given realm. # In each case, we both needs to check at the server level (via # `settings.AUTHENTICATION_BACKENDS`, queried via # `django.contrib.auth.get_backends`) and at the realm level (via the # `Realm.authentication_methods` BitField). def pad_method_dict(method_dict: Dict[str, bool]) -> Dict[str, bool]: """Pads an authentication methods dict to contain all auth backends supported by the software, regardless of whether they are configured on this server""" for key in AUTH_BACKEND_NAME_MAP: if key not in method_dict: method_dict[key] = False return method_dict def auth_enabled_helper(backends_to_check: List[str], realm: Optional[Realm]) -> bool: if realm is not None: enabled_method_dict = realm.authentication_methods_dict() pad_method_dict(enabled_method_dict) else: enabled_method_dict = dict((method, True) for method in Realm.AUTHENTICATION_FLAGS) pad_method_dict(enabled_method_dict) for supported_backend in supported_auth_backends(): for backend_name in backends_to_check: backend = AUTH_BACKEND_NAME_MAP[backend_name] if enabled_method_dict[backend_name] and isinstance(supported_backend, backend): return True return False def ldap_auth_enabled(realm: Optional[Realm]=None) -> bool: return auth_enabled_helper(['LDAP'], realm) def email_auth_enabled(realm: Optional[Realm]=None) -> bool: return auth_enabled_helper(['Email'], realm) def password_auth_enabled(realm: Optional[Realm]=None) -> bool: return ldap_auth_enabled(realm) or email_auth_enabled(realm) def dev_auth_enabled(realm: Optional[Realm]=None) -> bool: return auth_enabled_helper(['Dev'], realm) def google_auth_enabled(realm: Optional[Realm]=None) -> bool: return auth_enabled_helper(['Google'], realm) def github_auth_enabled(realm: Optional[Realm]=None) -> bool: return auth_enabled_helper(['GitHub'], realm) def saml_auth_enabled(realm: Optional[Realm]=None) -> bool: return auth_enabled_helper(['SAML'], realm) def any_social_backend_enabled(realm: Optional[Realm]=None) -> bool: """Used by the login page process to determine whether to show the 'OR' for login with Google""" social_backend_names = [social_auth_subclass.auth_backend_name for social_auth_subclass in SOCIAL_AUTH_BACKENDS] return auth_enabled_helper(social_backend_names, realm) def redirect_to_config_error(error_type: str) -> HttpResponseRedirect: return HttpResponseRedirect("/config-error/%s" % (error_type,)) def require_email_format_usernames(realm: Optional[Realm]=None) -> bool: if ldap_auth_enabled(realm): if settings.LDAP_EMAIL_ATTR or settings.LDAP_APPEND_DOMAIN: return False return True def is_user_active(user_profile: UserProfile, return_data: Optional[Dict[str, Any]]=None) -> bool: if not user_profile.is_active: if return_data is not None: if user_profile.is_mirror_dummy: # Record whether it's a mirror dummy account return_data['is_mirror_dummy'] = True return_data['inactive_user'] = True return False if user_profile.realm.deactivated: if return_data is not None: return_data['inactive_realm'] = True return False return True def common_get_active_user(email: str, realm: Realm, return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]: """This is the core common function used by essentially all authentication backends to check if there's an active user account with a given email address in the organization, handling both user-level and realm-level deactivation correctly. """ try: user_profile = get_user_by_delivery_email(email, realm) except UserProfile.DoesNotExist: # If the user doesn't have an account in the target realm, we # check whether they might have an account in another realm, # and if so, provide a helpful error message via # `invalid_subdomain`. if not UserProfile.objects.filter(delivery_email__iexact=email).exists(): return None if return_data is not None: return_data['invalid_subdomain'] = True return None if not is_user_active(user_profile, return_data): return None return user_profile class ZulipAuthMixin: """This common mixin is used to override Django's default behavior for looking up a logged-in user by ID to use a version that fetches from memcached before checking the database (avoiding a database query in most cases). """ def get_user(self, user_profile_id: int) -> Optional[UserProfile]: """Override the Django method for getting a UserProfile object from the user_profile_id,.""" try: return get_user_profile_by_id(user_profile_id) except UserProfile.DoesNotExist: return None class ZulipDummyBackend(ZulipAuthMixin): """Used when we want to log you in without checking any authentication (i.e. new user registration or when otherwise authentication has already been checked earlier in the process). We ensure that this backend only ever successfully authenticates when explicitly requested by including the use_dummy_backend kwarg. """ def authenticate(self, *, username: str, realm: Realm, use_dummy_backend: bool=False, return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]: if use_dummy_backend: return common_get_active_user(username, realm, return_data) return None class EmailAuthBackend(ZulipAuthMixin): """ Email+Password Authentication Backend (the default). Allows a user to sign in using an email/password pair. """ def authenticate(self, *, username: str, password: str, realm: Realm, return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]: """ Authenticate a user based on email address as the user name. """ if not password_auth_enabled(realm): if return_data is not None: return_data['password_auth_disabled'] = True return None if not email_auth_enabled(realm): if return_data is not None: return_data['email_auth_disabled'] = True return None user_profile = common_get_active_user(username, realm, return_data=return_data) if user_profile is None: return None if user_profile.check_password(password): return user_profile return None class ZulipRemoteUserBackend(RemoteUserBackend): """Authentication backend that reads the Apache REMOTE_USER variable. Used primarily in enterprise environments with an SSO solution that has an Apache REMOTE_USER integration. For manual testing, see https://zulip.readthedocs.io/en/latest/production/authentication-methods.html See also remote_user_sso in zerver/views/auth.py. """ create_unknown_user = False def authenticate(self, *, remote_user: str, realm: Realm, return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]: if not auth_enabled_helper(["RemoteUser"], realm): return None email = remote_user_to_email(remote_user) return common_get_active_user(email, realm, return_data=return_data) def is_valid_email(email: str) -> bool: try: validate_email(email) except ValidationError: return False return True def check_ldap_config() -> None: if not settings.LDAP_APPEND_DOMAIN: # Email search needs to be configured in this case. assert settings.AUTH_LDAP_USERNAME_ATTR and settings.AUTH_LDAP_REVERSE_EMAIL_SEARCH def find_ldap_users_by_email(email: str) -> Optional[List[_LDAPUser]]: """ Returns list of _LDAPUsers matching the email search, or None if no matches are found. """ email_search = LDAPReverseEmailSearch(LDAPBackend(), email) return email_search.search_for_users(should_populate=False) def email_belongs_to_ldap(realm: Realm, email: str) -> bool: """Used to make determinations on whether a user's email address is managed by LDAP. For environments using both LDAP and Email+Password authentication, we do not allow EmailAuthBackend authentication for email addresses managed by LDAP (to avoid a security issue where one create separate credentials for an LDAP user), and this function is used to enforce that rule. """ if not ldap_auth_enabled(realm): return False check_ldap_config() if settings.LDAP_APPEND_DOMAIN: # Check if the email ends with LDAP_APPEND_DOMAIN return email.strip().lower().endswith("@" + settings.LDAP_APPEND_DOMAIN) # If we don't have an LDAP domain, we have to do a lookup for the email. if find_ldap_users_by_email(email): return True else: return False class ZulipLDAPException(_LDAPUser.AuthenticationFailed): """Since this inherits from _LDAPUser.AuthenticationFailed, these will be caught and logged at debug level inside django-auth-ldap's authenticate()""" pass class ZulipLDAPExceptionNoMatchingLDAPUser(ZulipLDAPException): pass class ZulipLDAPExceptionOutsideDomain(ZulipLDAPExceptionNoMatchingLDAPUser): pass class ZulipLDAPConfigurationError(Exception): pass LDAP_USER_ACCOUNT_CONTROL_DISABLED_MASK = 2 class ZulipLDAPAuthBackendBase(ZulipAuthMixin, LDAPBackend): """Common code between LDAP authentication (ZulipLDAPAuthBackend) and using LDAP just to sync user data (ZulipLDAPUserPopulator). To fully understand our LDAP backend, you may want to skim django_auth_ldap/backend.py from the upstream django-auth-ldap library. It's not a lot of code, and searching around in that file makes the flow for LDAP authentication clear. """ def __init__(self) -> None: # Used to initialize a fake LDAP directly for both manual # and automated testing in a development environment where # there is no actual LDAP server. if settings.DEVELOPMENT and settings.FAKE_LDAP_MODE: # nocoverage init_fakeldap() check_ldap_config() # Disable django-auth-ldap's permissions functions -- we don't use # the standard Django user/group permissions system because they # are prone to performance issues. def has_perm(self, user: Optional[UserProfile], perm: Any, obj: Any=None) -> bool: return False def has_module_perms(self, user: Optional[UserProfile], app_label: Optional[str]) -> bool: return False def get_all_permissions(self, user: Optional[UserProfile], obj: Any=None) -> Set[Any]: return set() def get_group_permissions(self, user: Optional[UserProfile], obj: Any=None) -> Set[Any]: return set() def django_to_ldap_username(self, username: str) -> str: """ Translates django username (user_profile.email or whatever the user typed in the login field when authenticating via the ldap backend) into ldap username. Guarantees that the username it returns actually has an entry in the ldap directory. Raises ZulipLDAPExceptionNoMatchingLDAPUser if that's not possible. """ result = username if settings.LDAP_APPEND_DOMAIN: if is_valid_email(username): if not username.endswith("@" + settings.LDAP_APPEND_DOMAIN): raise ZulipLDAPExceptionOutsideDomain("Email %s does not match LDAP domain %s." % ( username, settings.LDAP_APPEND_DOMAIN)) result = email_to_username(username) else: # We can use find_ldap_users_by_email if is_valid_email(username): email_search_result = find_ldap_users_by_email(username) if email_search_result is None: result = username elif len(email_search_result) == 1: return email_search_result[0]._username elif len(email_search_result) > 1: # This is possible, but strange, so worth logging a warning about. # We can't translate the email to a unique username, # so we don't do anything else here. logging.warning("Multiple users with email {} found in LDAP.".format(username)) result = username if _LDAPUser(self, result).attrs is None: # Check that there actually is an ldap entry matching the result username # we want to return. Otherwise, raise an exception. raise ZulipLDAPExceptionNoMatchingLDAPUser() return result def user_email_from_ldapuser(self, username: str, ldap_user: _LDAPUser) -> str: if hasattr(ldap_user, '_username'): # In tests, we sometimes pass a simplified _LDAPUser without _username attr, # and with the intended username in the username argument. username = ldap_user._username if settings.LDAP_APPEND_DOMAIN: return "@".join((username, settings.LDAP_APPEND_DOMAIN)) if settings.LDAP_EMAIL_ATTR is not None: # Get email from ldap attributes. if settings.LDAP_EMAIL_ATTR not in ldap_user.attrs: raise ZulipLDAPException("LDAP user doesn't have the needed %s attribute" % ( settings.LDAP_EMAIL_ATTR,)) else: return ldap_user.attrs[settings.LDAP_EMAIL_ATTR][0] return username def ldap_to_django_username(self, username: str) -> str: """ This is called inside django_auth_ldap with only one role: to convert _LDAPUser._username to django username (so in Zulip, the email) and pass that as "username" argument to get_or_build_user(username, ldapuser). In many cases, the email is stored in the _LDAPUser's attributes, so it can't be constructed just from the username. We choose to do nothing in this function, and our overrides of get_or_build_user() obtain that username from the _LDAPUser object on their own, through our user_email_from_ldapuser function. """ return username def sync_avatar_from_ldap(self, user: UserProfile, ldap_user: _LDAPUser) -> None: if 'avatar' in settings.AUTH_LDAP_USER_ATTR_MAP: # We do local imports here to avoid import loops from zerver.lib.upload import upload_avatar_image from zerver.lib.actions import do_change_avatar_fields from io import BytesIO avatar_attr_name = settings.AUTH_LDAP_USER_ATTR_MAP['avatar'] if avatar_attr_name not in ldap_user.attrs: # nocoverage # If this specific user doesn't have e.g. a # thumbnailPhoto set in LDAP, just skip that user. return ldap_avatar = ldap_user.attrs[avatar_attr_name][0] avatar_changed = is_avatar_new(ldap_avatar, user) if not avatar_changed: # Don't do work to replace the avatar with itself. return io = BytesIO(ldap_avatar) # Structurally, to make the S3 backend happy, we need to # provide a Content-Type; since that isn't specified in # any metadata, we auto-detect it. content_type = magic.from_buffer(copy.deepcopy(io).read()[0:1024], mime=True) if content_type.startswith("image/"): upload_avatar_image(io, user, user, content_type=content_type) do_change_avatar_fields(user, UserProfile.AVATAR_FROM_USER) # Update avatar hash. user.avatar_hash = user_avatar_content_hash(ldap_avatar) user.save(update_fields=["avatar_hash"]) else: logging.warning("Could not parse %s field for user %s" % (avatar_attr_name, user.id)) def is_account_control_disabled_user(self, ldap_user: _LDAPUser) -> bool: """Implements the userAccountControl check for whether a user has been disabled in an Active Directory server being integrated with Zulip via LDAP.""" account_control_value = ldap_user.attrs[settings.AUTH_LDAP_USER_ATTR_MAP['userAccountControl']][0] ldap_disabled = bool(int(account_control_value) & LDAP_USER_ACCOUNT_CONTROL_DISABLED_MASK) return ldap_disabled @classmethod def get_mapped_name(cls, ldap_user: _LDAPUser) -> Tuple[str, str]: """Constructs the user's Zulip full_name and short_name fields from the LDAP data""" if "full_name" in settings.AUTH_LDAP_USER_ATTR_MAP: full_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["full_name"] short_name = full_name = ldap_user.attrs[full_name_attr][0] elif all(key in settings.AUTH_LDAP_USER_ATTR_MAP for key in {"first_name", "last_name"}): first_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["first_name"] last_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["last_name"] short_name = ldap_user.attrs[first_name_attr][0] full_name = short_name + ' ' + ldap_user.attrs[last_name_attr][0] else: raise ZulipLDAPException("Missing required mapping for user's full name") if "short_name" in settings.AUTH_LDAP_USER_ATTR_MAP: short_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["short_name"] short_name = ldap_user.attrs[short_name_attr][0] return full_name, short_name def sync_full_name_from_ldap(self, user_profile: UserProfile, ldap_user: _LDAPUser) -> None: from zerver.lib.actions import do_change_full_name full_name, _ = self.get_mapped_name(ldap_user) if full_name != user_profile.full_name: try: full_name = check_full_name(full_name) except JsonableError as e: raise ZulipLDAPException(e.msg) do_change_full_name(user_profile, full_name, None) def sync_custom_profile_fields_from_ldap(self, user_profile: UserProfile, ldap_user: _LDAPUser) -> None: values_by_var_name = {} # type: Dict[str, Union[int, str, List[int]]] for attr, ldap_attr in settings.AUTH_LDAP_USER_ATTR_MAP.items(): if not attr.startswith('custom_profile_field__'): continue var_name = attr.split('custom_profile_field__')[1] try: value = ldap_user.attrs[ldap_attr][0] except KeyError: # If this user doesn't have this field set then ignore this # field and continue syncing other fields. `django-auth-ldap` # automatically logs error about missing field. continue values_by_var_name[var_name] = value fields_by_var_name = {} # type: Dict[str, CustomProfileField] custom_profile_fields = custom_profile_fields_for_realm(user_profile.realm.id) for field in custom_profile_fields: var_name = '_'.join(field.name.lower().split(' ')) fields_by_var_name[var_name] = field existing_values = {} for data in user_profile.profile_data: var_name = '_'.join(data['name'].lower().split(' ')) existing_values[var_name] = data['value'] profile_data = [] # type: List[Dict[str, Union[int, str, List[int]]]] for var_name, value in values_by_var_name.items(): try: field = fields_by_var_name[var_name] except KeyError: raise ZulipLDAPException('Custom profile field with name %s not found.' % (var_name,)) if existing_values.get(var_name) == value: continue result = validate_user_custom_profile_field(user_profile.realm.id, field, value) if result is not None: raise ZulipLDAPException('Invalid data for %s field: %s' % (var_name, result)) profile_data.append({ 'id': field.id, 'value': value, }) do_update_user_custom_profile_data_if_changed(user_profile, profile_data) class ZulipLDAPAuthBackend(ZulipLDAPAuthBackendBase): REALM_IS_NONE_ERROR = 1 def authenticate(self, *, username: str, password: str, realm: Realm, prereg_user: Optional[PreregistrationUser]=None, return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]: self._realm = realm self._prereg_user = prereg_user if not ldap_auth_enabled(realm): return None try: # We want to apss the user's LDAP username into # authenticate() below. If an email address was entered # in the login form, we need to use # django_to_ldap_username to translate the email address # to the user's LDAP username before calling the # django-auth-ldap authenticate(). username = self.django_to_ldap_username(username) except ZulipLDAPExceptionNoMatchingLDAPUser: if return_data is not None: return_data['no_matching_ldap_user'] = True return None # Call into (ultimately) the django-auth-ldap authenticate # function. This will check the username/password pair # against the LDAP database, and assuming those are correct, # end up calling `self.get_or_build_user` with the # authenticated user's data from LDAP. return ZulipLDAPAuthBackendBase.authenticate(self, request=None, username=username, password=password) def get_or_build_user(self, username: str, ldap_user: _LDAPUser) -> Tuple[UserProfile, bool]: """The main function of our authentication backend extension of django-auth-ldap. When this is called (from `authenticate`), django-auth-ldap will already have verified that the provided username and password match those in the LDAP database. This function's responsibility is to check (1) whether the email address for this user obtained from LDAP has an active account in this Zulip realm. If so, it will log them in. Otherwise, to provide a seamless Single Sign-On experience with LDAP, this function can automatically create a new Zulip user account in the realm (assuming the realm is configured to allow that email address to sign up). """ return_data = {} # type: Dict[str, Any] username = self.user_email_from_ldapuser(username, ldap_user) if 'userAccountControl' in settings.AUTH_LDAP_USER_ATTR_MAP: # nocoverage ldap_disabled = self.is_account_control_disabled_user(ldap_user) if ldap_disabled: # Treat disabled users as deactivated in Zulip. return_data["inactive_user"] = True raise ZulipLDAPException("User has been deactivated") user_profile = common_get_active_user(username, self._realm, return_data) if user_profile is not None: # An existing user, successfully authed; return it. return user_profile, False if return_data.get("inactive_realm"): # This happens if there is a user account in a deactivated realm raise ZulipLDAPException("Realm has been deactivated") if return_data.get("inactive_user"): raise ZulipLDAPException("User has been deactivated") # An invalid_subdomain `return_data` value here is ignored, # since that just means we're trying to create an account in a # second realm on the server (`ldap_auth_enabled(realm)` would # have been false if this user wasn't meant to have an account # in this second realm). if self._realm.deactivated: # This happens if no account exists, but the realm is # deactivated, so we shouldn't create a new user account raise ZulipLDAPException("Realm has been deactivated") # Makes sure that email domain hasn't be restricted for this # realm. The main thing here is email_allowed_for_realm; but # we also call validate_email_for_realm just for consistency, # even though its checks were already done above. try: email_allowed_for_realm(username, self._realm) validate_email_for_realm(self._realm, username) except DomainNotAllowedForRealmError: raise ZulipLDAPException("This email domain isn't allowed in this organization.") except (DisposableEmailError, EmailContainsPlusError): raise ZulipLDAPException("Email validation failed.") # We have valid LDAP credentials; time to create an account. full_name, short_name = self.get_mapped_name(ldap_user) try: full_name = check_full_name(full_name) except JsonableError as e: raise ZulipLDAPException(e.msg) opts = {} # type: Dict[str, Any] if self._prereg_user: invited_as = self._prereg_user.invited_as realm_creation = self._prereg_user.realm_creation opts['prereg_user'] = self._prereg_user opts['is_realm_admin'] = ( invited_as == PreregistrationUser.INVITE_AS['REALM_ADMIN']) or realm_creation opts['is_guest'] = invited_as == PreregistrationUser.INVITE_AS['GUEST_USER'] opts['realm_creation'] = realm_creation opts['default_stream_groups'] = get_default_stream_groups(self._realm) user_profile = do_create_user(username, None, self._realm, full_name, short_name, **opts) self.sync_avatar_from_ldap(user_profile, ldap_user) self.sync_custom_profile_fields_from_ldap(user_profile, ldap_user) return user_profile, True class ZulipLDAPUserPopulator(ZulipLDAPAuthBackendBase): """Just like ZulipLDAPAuthBackend, but doesn't let you log in. Used for syncing data like names, avatars, and custom profile fields from LDAP in `manage.py sync_ldap_user_data` as well as in registration for organizations that use a different SSO solution for managing login (often via RemoteUserBackend). """ def authenticate(self, *, username: str, password: str, realm: Realm, return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]: return None def get_or_build_user(self, username: str, ldap_user: _LDAPUser) -> Tuple[UserProfile, bool]: """This is used only in non-authentication contexts such as: ./manage.py sync_ldap_user_data """ # Obtain the django username from the ldap_user object: username = self.user_email_from_ldapuser(username, ldap_user) # Call the library get_or_build_user for building the UserProfile # with the username we obtained: (user, built) = super().get_or_build_user(username, ldap_user) # Synchronise the UserProfile with its LDAP attributes: if 'userAccountControl' in settings.AUTH_LDAP_USER_ATTR_MAP: user_disabled_in_ldap = self.is_account_control_disabled_user(ldap_user) if user_disabled_in_ldap: if user.is_active: logging.info("Deactivating user %s because they are disabled in LDAP." % (user.email,)) do_deactivate_user(user) # Do an early return to avoid trying to sync additional data. return (user, built) elif not user.is_active: logging.info("Reactivating user %s because they are not disabled in LDAP." % (user.email,)) do_reactivate_user(user) self.sync_avatar_from_ldap(user, ldap_user) self.sync_full_name_from_ldap(user, ldap_user) self.sync_custom_profile_fields_from_ldap(user, ldap_user) return (user, built) class PopulateUserLDAPError(ZulipLDAPException): pass @receiver(ldap_error, sender=ZulipLDAPUserPopulator) def catch_ldap_error(signal: Signal, **kwargs: Any) -> None: """ Inside django_auth_ldap populate_user(), if LDAPError is raised, e.g. due to invalid connection credentials, the function catches it and emits a signal (ldap_error) to communicate this error to others. We normally don't use signals, but here there's no choice, so in this function we essentially convert the signal to a normal exception that will properly propagate out of django_auth_ldap internals. """ if kwargs['context'] == 'populate_user': # The exception message can contain the password (if it was invalid), # so it seems better not to log that, and only use the original exception's name here. raise PopulateUserLDAPError(kwargs['exception'].__class__.__name__) def sync_user_from_ldap(user_profile: UserProfile, logger: logging.Logger) -> bool: backend = ZulipLDAPUserPopulator() try: ldap_username = backend.django_to_ldap_username(user_profile.email) except ZulipLDAPExceptionNoMatchingLDAPUser: if settings.LDAP_DEACTIVATE_NON_MATCHING_USERS: do_deactivate_user(user_profile) logger.info("Deactivated non-matching user: %s" % (user_profile.email,)) return True elif user_profile.is_active: logger.warning("Did not find %s in LDAP." % (user_profile.email,)) return False updated_user = backend.populate_user(ldap_username) if updated_user: logger.info("Updated %s." % (user_profile.email,)) return True raise PopulateUserLDAPError("populate_user unexpectedly returned {}".format(updated_user)) # Quick tool to test whether you're correctly authenticating to LDAP def query_ldap(email: str) -> List[str]: values = [] backend = next((backend for backend in get_backends() if isinstance(backend, LDAPBackend)), None) if backend is not None: try: ldap_username = backend.django_to_ldap_username(email) except ZulipLDAPExceptionNoMatchingLDAPUser: values.append("No such user found") return values ldap_attrs = _LDAPUser(backend, ldap_username).attrs for django_field, ldap_field in settings.AUTH_LDAP_USER_ATTR_MAP.items(): value = ldap_attrs.get(ldap_field, ["LDAP field not present", ])[0] if django_field == "avatar": if isinstance(value, bytes): value = "(An avatar image file)" values.append("%s: %s" % (django_field, value)) if settings.LDAP_EMAIL_ATTR is not None: values.append("%s: %s" % ('email', ldap_attrs[settings.LDAP_EMAIL_ATTR][0])) else: values.append("LDAP backend not configured on this server.") return values class DevAuthBackend(ZulipAuthMixin): """Allow logging in as any user without a password. This is used for convenience when developing Zulip, and is disabled in production.""" def authenticate(self, *, dev_auth_username: str, realm: Realm, return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]: if not dev_auth_enabled(realm): return None return common_get_active_user(dev_auth_username, realm, return_data=return_data) def redirect_deactivated_user_to_login() -> HttpResponseRedirect: # Specifying the template name makes sure that the user is not redirected to dev_login in case of # a deactivated account on a test server. login_url = reverse('zerver.views.auth.login_page', kwargs = {'template_name': 'zerver/login.html'}) redirect_url = login_url + '?is_deactivated=true' return HttpResponseRedirect(redirect_url) def social_associate_user_helper(backend: BaseAuth, return_data: Dict[str, Any], *args: Any, **kwargs: Any) -> Optional[UserProfile]: """Responsible for doing the Zulip-account lookup and validation parts of the Zulip Social auth pipeline (similar to the authenticate() methods in most other auth backends in this file). Returns a UserProfile object for successful authentication, and None otherwise. """ subdomain = backend.strategy.session_get('subdomain') try: realm = get_realm(subdomain) except Realm.DoesNotExist: return_data["invalid_realm"] = True return None return_data["realm_id"] = realm.id if not auth_enabled_helper([backend.auth_backend_name], realm): return_data["auth_backend_disabled"] = True return None if 'auth_failed_reason' in kwargs.get('response', {}): return_data["social_auth_failed_reason"] = kwargs['response']["auth_failed_reason"] return None elif hasattr(backend, 'get_verified_emails'): # Some social backends, like GitHubAuthBackend, don't # guarantee that the `details` data is validated (i.e., it's # possible users can put any string they want in the "email" # field of the `details` object). For those backends, we have # custom per-backend code to properly fetch only verified # email addresses from the appropriate third-party API. verified_emails = backend.get_verified_emails(*args, **kwargs) verified_emails_length = len(verified_emails) if verified_emails_length == 0: # TODO: Provide a nice error message screen to the user # for this case, rather than just logging a warning. logging.warning("Social auth (%s) failed because user has no verified emails" % (backend.auth_backend_name,)) return_data["email_not_verified"] = True return None if verified_emails_length == 1: chosen_email = verified_emails[0] else: chosen_email = backend.strategy.request_data().get('email') if not chosen_email: avatars = {} # Dict[str, str] for email in verified_emails: existing_account = common_get_active_user(email, realm, {}) if existing_account is not None: avatars[email] = avatar_url(existing_account) return render(backend.strategy.request, 'zerver/social_auth_select_email.html', context = { 'primary_email': verified_emails[0], 'verified_non_primary_emails': verified_emails[1:], 'backend': 'github', 'avatar_urls': avatars, }) try: validate_email(chosen_email) except ValidationError: return_data['invalid_email'] = True return None if chosen_email not in verified_emails: # If a user edits the submit value for the choose email form, we might # end up with a wrong email associated with the account. The below code # takes care of that. logging.warning("Social auth (%s) failed because user has no verified" " emails associated with the account" % (backend.auth_backend_name,)) return_data["email_not_associated"] = True return None validated_email = chosen_email else: try: validate_email(kwargs["details"].get("email")) except ValidationError: return_data['invalid_email'] = True return None validated_email = kwargs["details"].get("email") if not validated_email: # nocoverage # This code path isn't used with GitHubAuthBackend, but may be relevant for other # social auth backends. return_data['invalid_email'] = True return None return_data["valid_attestation"] = True return_data['validated_email'] = validated_email user_profile = common_get_active_user(validated_email, realm, return_data) full_name = kwargs['details'].get('fullname') first_name = kwargs['details'].get('first_name', '') last_name = kwargs['details'].get('last_name', '') if full_name is None: if not first_name and not last_name: # If we add support for any of the social auth backends that # don't provide this feature, we'll need to add code here. raise AssertionError("Social auth backend doesn't provide name") if full_name: return_data["full_name"] = full_name else: # In SAML authentication, the IdP may support only sending # the first and last name as separate attributes - in that case # we construct the full name from them. return_data["full_name"] = "{} {}".format( first_name, last_name ).strip() # strip removes the unnecessary ' ' return user_profile @partial def social_auth_associate_user( backend: BaseAuth, *args: Any, **kwargs: Any) -> Union[HttpResponse, Dict[str, Any]]: """A simple wrapper function to reformat the return data from social_associate_user_helper as a dictionary. The python-social-auth infrastructure will then pass those values into later stages of settings.SOCIAL_AUTH_PIPELINE, such as social_auth_finish, as kwargs. """ partial_token = backend.strategy.request_data().get('partial_token') return_data = {} # type: Dict[str, Any] user_profile = social_associate_user_helper( backend, return_data, *args, **kwargs) if type(user_profile) == HttpResponse: return user_profile else: return {'user_profile': user_profile, 'return_data': return_data, 'partial_token': partial_token, 'partial_backend_name': backend} def social_auth_finish(backend: Any, details: Dict[str, Any], response: HttpResponse, *args: Any, **kwargs: Any) -> Optional[UserProfile]: """Given the determination in social_auth_associate_user for whether the user should be authenticated, this takes care of actually logging in the user (if appropriate) and redirecting the browser to the appropriate next page depending on the situation. Read the comments below as well as login_or_register_remote_user in `zerver/views/auth.py` for the details on how that dispatch works. """ from zerver.views.auth import (login_or_register_remote_user, redirect_and_log_into_subdomain) user_profile = kwargs['user_profile'] return_data = kwargs['return_data'] no_verified_email = return_data.get("email_not_verified") auth_backend_disabled = return_data.get('auth_backend_disabled') inactive_user = return_data.get('inactive_user') inactive_realm = return_data.get('inactive_realm') invalid_realm = return_data.get('invalid_realm') invalid_email = return_data.get('invalid_email') auth_failed_reason = return_data.get("social_auth_failed_reason") email_not_associated = return_data.get("email_not_associated") if invalid_realm: from zerver.views.auth import redirect_to_subdomain_login_url return redirect_to_subdomain_login_url() if inactive_user: return redirect_deactivated_user_to_login() if auth_backend_disabled or inactive_realm or no_verified_email or email_not_associated: # Redirect to login page. We can't send to registration # workflow with these errors. We will redirect to login page. return None if invalid_email: # In case of invalid email, we will end up on registration page. # This seems better than redirecting to login page. logging.warning( "{} got invalid email argument.".format(backend.auth_backend_name) ) return None if auth_failed_reason: logging.info(auth_failed_reason) return None # Structurally, all the cases where we don't have an authenticated # email for the user should be handled above; this assertion helps # prevent any violations of that contract from resulting in a user # being incorrectly authenticated. assert return_data.get('valid_attestation') is True strategy = backend.strategy full_name_validated = backend.full_name_validated email_address = return_data['validated_email'] full_name = return_data['full_name'] is_signup = strategy.session_get('is_signup') == '1' redirect_to = strategy.session_get('next') realm = Realm.objects.get(id=return_data["realm_id"]) multiuse_object_key = strategy.session_get('multiuse_object_key', '') mobile_flow_otp = strategy.session_get('mobile_flow_otp') # At this point, we have now confirmed that the user has # demonstrated control over the target email address. # # The next step is to call login_or_register_remote_user, but # there are two code paths here because of an optimization to save # a redirect on mobile. if mobile_flow_otp is not None: # For mobile app authentication, login_or_register_remote_user # will redirect to a special zulip:// URL that is handled by # the app after a successful authentication; so we can # redirect directly from here, saving a round trip over what # we need to do to create session cookies on the right domain # in the web login flow (below). return login_or_register_remote_user( strategy.request, email_address, user_profile, full_name, mobile_flow_otp=mobile_flow_otp, is_signup=is_signup, redirect_to=redirect_to, full_name_validated=full_name_validated ) # If this authentication code were executing on # subdomain.zulip.example.com, we would just call # login_or_register_remote_user as in the mobile code path. # However, because third-party SSO providers generally don't allow # wildcard addresses in their redirect URLs, for multi-realm # servers, we will have just completed authentication on e.g. # auth.zulip.example.com (depending on # settings.SOCIAL_AUTH_SUBDOMAIN), which cannot store cookies on # the subdomain.zulip.example.com domain. So instead we serve a # redirect (encoding the authentication result data in a # cryptographically signed token) to a route on # subdomain.zulip.example.com that will verify the signature and # then call login_or_register_remote_user. return redirect_and_log_into_subdomain( realm, full_name, email_address, is_signup=is_signup, redirect_to=redirect_to, multiuse_object_key=multiuse_object_key, full_name_validated=full_name_validated ) class SocialAuthMixin(ZulipAuthMixin): auth_backend_name = "undeclared" name = "undeclared" display_icon = None # type: Optional[str] # Used to determine how to order buttons on login form, backend with # higher sort order are displayed first. sort_order = 0 # Whether we expect that the full_name value obtained by the # social backend is definitely how the user should be referred to # in Zulip, which in turn determines whether we should always show # a registration form in the event with a default value of the # user's name when using this social backend so they can change # it. For social backends like SAML that are expected to be a # central database, this should be True; for backends like GitHub # where the user might not have a name set or have it set to # something other than the name they will prefer to use in Zulip, # it should be False. full_name_validated = False def auth_complete(self, *args: Any, **kwargs: Any) -> Optional[HttpResponse]: """This is a small wrapper around the core `auth_complete` method of python-social-auth, designed primarily to prevent 500s for exceptions in the social auth code from situations that are really user errors. Returning `None` from this function will redirect the browser to the login page. """ try: # Call the auth_complete method of social_core.backends.oauth.BaseOAuth2 return super().auth_complete(*args, **kwargs) # type: ignore # monkey-patching except AuthFailed as e: # When a user's social authentication fails (e.g. because # they did something funny with reloading in the middle of # the flow), don't throw a 500, just send them back to the # login page and record the event at the info log level. logging.info(str(e)) return None except SocialAuthBaseException as e: # Other python-social-auth exceptions are likely # interesting enough that we should log a warning. logging.warning(str(e)) return None class GitHubAuthBackend(SocialAuthMixin, GithubOAuth2): name = "github" auth_backend_name = "GitHub" sort_order = 100 display_icon = "/static/images/landing-page/logos/github-icon.png" def get_verified_emails(self, *args: Any, **kwargs: Any) -> List[str]: access_token = kwargs["response"]["access_token"] try: emails = self._user_data(access_token, '/emails') except (HTTPError, ValueError, TypeError): # nocoverage # We don't really need an explicit test for this code # path, since the outcome will be the same as any other # case without any verified emails emails = [] verified_emails = [] # type: List[str] for email_obj in self.filter_usable_emails(emails): # social_associate_user_helper assumes that the first email in # verified_emails is primary. if email_obj.get("primary"): verified_emails.insert(0, email_obj["email"]) else: verified_emails.append(email_obj["email"]) return verified_emails def filter_usable_emails(self, emails: List[Dict[str, Any]]) -> List[Dict[str, Any]]: # We only let users login using email addresses that are # verified by GitHub, because the whole point is for the user # to demonstrate that they control the target email address. # We also disallow the # @noreply.github.com/@users.noreply.github.com email # addresses, because structurally, we only want to allow email # addresses that can receive emails, and those cannot. return [ email for email in emails if email.get('verified') and not email["email"].endswith("noreply.github.com") ] def user_data(self, access_token: str, *args: Any, **kwargs: Any) -> Dict[str, str]: """This patched user_data function lets us combine together the 3 social auth backends into a single Zulip backend for GitHub Oauth2""" team_id = settings.SOCIAL_AUTH_GITHUB_TEAM_ID org_name = settings.SOCIAL_AUTH_GITHUB_ORG_NAME if team_id is None and org_name is None: # I believe this can't raise AuthFailed, so we don't try to catch it here. return super().user_data( access_token, *args, **kwargs ) elif team_id is not None: backend = GithubTeamOAuth2(self.strategy, self.redirect_uri) try: return backend.user_data(access_token, *args, **kwargs) except AuthFailed: return dict(auth_failed_reason="GitHub user is not member of required team") elif org_name is not None: backend = GithubOrganizationOAuth2(self.strategy, self.redirect_uri) try: return backend.user_data(access_token, *args, **kwargs) except AuthFailed: return dict(auth_failed_reason="GitHub user is not member of required organization") raise AssertionError("Invalid configuration") class AzureADAuthBackend(SocialAuthMixin, AzureADOAuth2): sort_order = 50 name = "azuread-oauth2" auth_backend_name = "AzureAD" display_icon = "/static/images/landing-page/logos/azuread-icon.png" class GoogleAuthBackend(SocialAuthMixin, GoogleOAuth2): sort_order = 150 auth_backend_name = "Google" name = "google" display_icon = "/static/images/landing-page/logos/googl_e-icon.png" def get_verified_emails(self, *args: Any, **kwargs: Any) -> List[str]: verified_emails = [] # type: List[str] details = kwargs["response"] email_verified = details.get("email_verified") if email_verified: verified_emails.append(details["email"]) return verified_emails class SAMLAuthBackend(SocialAuthMixin, SAMLAuth): auth_backend_name = "SAML" standard_relay_params = ["subdomain", "multiuse_object_key", "mobile_flow_otp", "next", "is_signup"] REDIS_EXPIRATION_SECONDS = 60 * 15 name = "saml" # Organization which go through the trouble of setting up SAML are most likely # to have it as their main authentication method, so it seems appropriate to have # SAML buttons at the top. sort_order = 9999 # There's no common default logo for SAML authentication. display_icon = None # The full_name provided by the IdP is very likely the standard # employee directory name for the user, and thus what they and # their organization want to use in Zulip. So don't unnecessarily # provide a registration flow prompt for them to set their name. full_name_validated = True def auth_url(self) -> str: """Get the URL to which we must redirect in order to authenticate the user. Overriding the original SAMLAuth.auth_url. Runs when someone accesses the /login/saml/ endpoint.""" try: idp_name = self.strategy.request_data()['idp'] auth = self._create_saml_auth(idp=self.get_idp(idp_name)) except KeyError: # If the above raise KeyError, it means invalid or no idp was specified, # we should log that and redirect to the login page. logging.info("/login/saml/ : Bad idp param.") return reverse('zerver.views.auth.login_page', kwargs = {'template_name': 'zerver/login.html'}) # This where we change things. We need to pass some params # (`mobile_flow_otp`, `next`, etc.) through RelayState, which # then the IdP will pass back to us so we can read those # parameters in the final part of the authentication flow, at # the /complete/saml/ endpoint. # # To protect against network eavesdropping of these # parameters, we send just a random token to the IdP in # RelayState, which is used as a key into our redis data store # for fetching the actual parameters after the IdP has # returned a successful authentication. params_to_relay = ["idp"] + self.standard_relay_params request_data = self.strategy.request_data().dict() data_to_relay = { key: request_data[key] for key in params_to_relay if key in request_data } relay_state = self.put_data_in_redis(data_to_relay) return auth.login(return_to=relay_state) @classmethod def put_data_in_redis(cls, data_to_relay: Dict[str, Any]) -> str: with redis_client.pipeline() as pipeline: token = generate_random_token(64) key = "saml_token_{}".format(token) pipeline.set(key, ujson.dumps(data_to_relay)) pipeline.expire(key, cls.REDIS_EXPIRATION_SECONDS) pipeline.execute() return key @classmethod def get_data_from_redis(cls, key: str) -> Optional[Dict[str, Any]]: redis_data = None if key.startswith('saml_token_'): # Safety if statement, to not allow someone to poke around arbitrary redis keys here. redis_data = redis_client.get(key) if redis_data is None: # TODO: We will need some sort of user-facing message # about the authentication session having expired here. logging.info("SAML authentication failed: bad RelayState token.") return None return ujson.loads(redis_data) def auth_complete(self, *args: Any, **kwargs: Any) -> Optional[HttpResponse]: """ Additional ugly wrapping on top of auth_complete in SocialAuthMixin. We handle two things here: 1. Working around bad RelayState or SAMLResponse parameters in the request. Both parameters should be present if the user came to /complete/saml/ through the IdP as intended. The errors can happen if someone simply types the endpoint into their browsers, or generally tries messing with it in some ways. 2. The first part of our SAML authentication flow will encode important parameters into the RelayState. We need to read them and set those values in the session, and then change the RelayState param to the idp_name, because that's what SAMLAuth.auth_complete() expects. """ if 'RelayState' not in self.strategy.request_data(): logging.info("SAML authentication failed: missing RelayState.") return None # Set the relevant params that we transported in the RelayState: redis_key = self.strategy.request_data()['RelayState'] relayed_params = self.get_data_from_redis(redis_key) if relayed_params is None: return None result = None try: for param, value in relayed_params.items(): if param in self.standard_relay_params: self.strategy.session_set(param, value) # super().auth_complete expects to have RelayState set to the idp_name, # so we need to replace this param. post_params = self.strategy.request.POST.copy() post_params['RelayState'] = relayed_params["idp"] self.strategy.request.POST = post_params # Call the auth_complete method of SocialAuthMixIn result = super().auth_complete(*args, **kwargs) # type: ignore # monkey-patching except OneLogin_Saml2_Error as e: # This will be raised if SAMLResponse is missing. logging.info(str(e)) # Fall through to returning None. finally: if result is None: for param in self.standard_relay_params: # If an attacker managed to eavesdrop on the RelayState token, # they may pass it here to the endpoint with an invalid SAMLResponse. # We remove these potentially sensitive parameters that we have set in the session # ealier, to avoid leaking their values. self.strategy.session_set(param, None) return result @classmethod def check_config(cls) -> Optional[HttpResponse]: obligatory_saml_settings_list = [ settings.SOCIAL_AUTH_SAML_SP_ENTITY_ID, settings.SOCIAL_AUTH_SAML_ORG_INFO, settings.SOCIAL_AUTH_SAML_TECHNICAL_CONTACT, settings.SOCIAL_AUTH_SAML_SUPPORT_CONTACT, settings.SOCIAL_AUTH_SAML_ENABLED_IDPS ] if any(not setting for setting in obligatory_saml_settings_list): return redirect_to_config_error("saml") return None SocialBackendDictT = TypedDict('SocialBackendDictT', { 'name': str, 'display_name': str, 'display_icon': Optional[str], 'login_url': str, 'signup_url': str, }) def create_standard_social_backend_dict(social_backend: SocialAuthMixin) -> SocialBackendDictT: return dict( name=social_backend.name, display_name=social_backend.auth_backend_name, display_icon=social_backend.display_icon, login_url=reverse('login-social', args=(social_backend.name,)), signup_url=reverse('signup-social', args=(social_backend.name,)), ) def list_saml_backend_dicts(realm: Optional[Realm]=None) -> List[SocialBackendDictT]: result = [] # type: List[SocialBackendDictT] for idp_name, idp_dict in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.items(): saml_dict = dict( name='saml:{}'.format(idp_name), display_name=idp_dict.get('display_name', SAMLAuthBackend.auth_backend_name), display_icon=idp_dict.get('display_icon', SAMLAuthBackend.display_icon), login_url=reverse('login-social-extra-arg', args=('saml', idp_name)), signup_url=reverse('signup-social-extra-arg', args=('saml', idp_name)), ) # type: SocialBackendDictT result.append(saml_dict) return result def get_social_backend_dicts(realm: Optional[Realm]=None) -> List[SocialBackendDictT]: """ Returns a list of dictionaries that represent social backends, sorted in the order in which they should be displayed. """ result = [] for backend in SOCIAL_AUTH_BACKENDS: # SOCIAL_AUTH_BACKENDS is already sorted in the correct order, # so we don't need to worry about sorting here. if auth_enabled_helper([backend.auth_backend_name], realm): if backend != SAMLAuthBackend: result.append(create_standard_social_backend_dict(backend)) else: result += list_saml_backend_dicts(realm) return result AUTH_BACKEND_NAME_MAP = { 'Dev': DevAuthBackend, 'Email': EmailAuthBackend, 'LDAP': ZulipLDAPAuthBackend, 'RemoteUser': ZulipRemoteUserBackend, } # type: Dict[str, Any] SOCIAL_AUTH_BACKENDS = [] # type: List[BaseOAuth2] # Authomatically add all of our social auth backends to relevant data structures. for social_auth_subclass in SocialAuthMixin.__subclasses__(): AUTH_BACKEND_NAME_MAP[social_auth_subclass.auth_backend_name] = social_auth_subclass SOCIAL_AUTH_BACKENDS.append(social_auth_subclass) SOCIAL_AUTH_BACKENDS = sorted(SOCIAL_AUTH_BACKENDS, key=lambda x: x.sort_order, reverse=True) # Provide this alternative name for backwards compatibility with # installations that had the old backend enabled. GoogleMobileOauth2Backend = GoogleAuthBackend
./CrossVul/dataset_final_sorted/CWE-287/py/bad_1224_6
crossvul-python_data_bad_4354_0
import os import time import json from secrets import token_bytes, token_hex, randbits from hashlib import sha256 import asyncio import aiosqlite as sql from responses import * class Database: def __init__(self, session): loop = asyncio.get_event_loop() # lock to prevent race conditions when SELECT then fetchone self.lock = asyncio.Lock(loop=loop) self.dbw = loop.run_until_complete(sql.connect(DATABASE_FILENAME)) self.dbw.row_factory = sql.Row self.db = loop.run_until_complete(self.dbw.cursor()) with open(os.path.join(os.path.dirname(__file__), 'sql', 'startup.sql')) as startup: loop.run_until_complete(self.db.executescript(startup.read())) self.session = session async def close(self): await self.dbw.commit() await self.dbw.close() ### TABLE: clients ### async def client_matches(self, client_id, token): async with self.lock: await self.db.execute('SELECT client_id FROM scratchverifier_clients \ WHERE client_id=? AND token=?', (client_id, token)) if (await self.db.fetchone()): return True return False ### TABLE: clients and sessions ### async def username_from_session(self, session_id): if session_id == 0: # 0 means debug mode return 'kenny2scratch' async with self.lock: await self.db.execute('SELECT username FROM scratchverifier_sessions \ WHERE session_id=?', (session_id,)) row = await self.db.fetchone() if row is None: return None return row[0] async def new_client(self, session_id): if session_id == 0: # 0 means debug mode # don't create a client, because other funcs return a dummy one # when under debug mode return {'client_id': 0, 'username': 'kenny2scratch', 'token': 'This client is newly created.'} username = await self.username_from_session(session_id) if username is None: return None async with self.session.get(USERS_API.format(username)) as resp: assert resp.status == 200 data = await resp.json() client_id = data['id'] token = token_hex(32) await self.db.execute('INSERT INTO scratchverifier_clients (client_id, \ token, username) VALUES (?, ?, ?)', (client_id, token, username)) return {'client_id': client_id, 'token': token, 'username': username} async def get_client(self, session_id): if session_id == 0: # 0 means debug mode return {'client_id': 0, 'username': 'kenny2scratch', 'token': 'This is an example token that can be censored.'} username = await self.username_from_session(session_id) if username is None: return None async with self.lock: await self.db.execute('SELECT * FROM scratchverifier_clients \ WHERE username=?', (username,)) row = await self.db.fetchone() if row is None: return None return dict(row) async def get_client_info(self, client_id): if client_id == 0: # 0 means debug mode return {'client_id': 0, 'username': 'kenny2scratch', 'token': 'This is an example token that can be censored.'} async with self.lock: await self.db.execute('SELECT * FROM scratchverifier_clients \ WHERE client_id=?', (client_id,)) row = await self.db.fetchone() if row is None: return None return dict(row) async def reset_token(self, session_id): if session_id == 0: # 0 means debug mode return {'client_id': 0, 'username': 'kenny2scratch', 'token': 'Yes, the token was reset.'} username = await self.username_from_session(session_id) if username is None: return None await self.db.execute('UPDATE scratchverifier_clients SET token=? \ WHERE username=?', (token_hex(32), username)) return self.get_client(session_id) async def del_client(self, session_id): if session_id == 0: # 0 means debug mode return username = await self.username_from_session(session_id) if username is None: return await self.db.execute('DELETE FROM scratchverifier_clients \ WHERE username=?', (username,)) ### TABLE: sessions ### async def new_session(self, username): while 1: session_id = randbits(32) async with self.lock: await self.db.execute('SELECT session_id FROM \ scratchverifier_sessions WHERE session_id=?', (session_id,)) if (await self.db.fetchone()) is None: break await self.db.execute('INSERT INTO scratchverifier_sessions \ (session_id, expiry, username) VALUES (?, ?, ?)', ( session_id, int(time.time()) + SESSION_EXPIRY, username )) await self.db.execute('DELETE FROM scratchverifier_sessions WHERE \ expiry<=?', (int(time.time()),)) return session_id async def get_expired(self, session_id): async with self.lock: await self.db.execute('SELECT expiry FROM scratchverifier_sessions \ WHERE session_id=?', (session_id,)) expiry = await self.db.fetchone() if expiry is None: # "expired" if session doesn't exist in the first place return True expiry = expiry[0] if time.time() > expiry: await self.db.execute('DELETE FROM scratchverifier_sessions \ WHERE session_id=?', (session_id,)) return True return False async def logout(self, session_id): await self.db.execute('DELETE FROM scratchverifier_sessions \ WHERE session_id=?', (session_id,)) async def logout_user(self, username): await self.db.execute('DELETE FROM scratchverifier_sessions \ WHERE username=?', (username,)) ### TABLE: usage ### async def start_verification(self, client_id, username): async with self.lock: await self.db.execute('SELECT code FROM scratchverifier_usage WHERE \ client_id=? AND username=?', (client_id, username)) row = await self.db.fetchone() if row is not None: await self.db.execute('UPDATE scratchverifier_usage SET expiry=? \ WHERE client_id=? AND username=? AND code=?', (int(time.time()) + VERIFY_EXPIRY, client_id, username, row[0])) return row[0] code = sha256( str(client_id).encode() + str(time.time()).encode() + username.encode() + token_bytes() # 0->A, 1->B, etc, to avoid Scratch's phone number censor ).hexdigest().translate({ord('0') + i: ord('A') + i for i in range(10)}) await self.db.execute('INSERT INTO scratchverifier_usage (client_id, \ code, username, expiry) VALUES (?, ?, ?, ?)', (client_id, code, username, int(time.time() + VERIFY_EXPIRY))) await self.db.execute('INSERT INTO scratchverifier_logs (client_id, \ username, log_time, log_type) VALUES (?, ?, ?, ?)', (client_id, username, int(time.time()), 1)) await self.db.execute('DELETE FROM scratchverifier_usage WHERE \ expiry<=?', (int(time.time()),)) return code async def get_code(self, client_id, username): async with self.lock: await self.db.execute('SELECT code, expiry FROM scratchverifier_usage \ WHERE client_id=? AND username=?', (client_id, username)) row = await self.db.fetchone() if row is None: return None if time.time() > row['expiry']: await self.end_verification(client_id, username, False) return None return row['code'] async def end_verification(self, client_id, username, succ=True): await self.db.execute('DELETE FROM scratchverifier_usage WHERE \ client_id=? AND username=?', (client_id, username)) await self.db.execute('INSERT INTO scratchverifier_logs (client_id, \ username, log_time, log_type) \ VALUES (?, ?, ?, ?)', (client_id, username, int(time.time()), 3 - succ)) ### TABLE: logs solely ### async def get_logs(self, table='logs', **params): query = f'SELECT * FROM scratchverifier_{table} WHERE 1=1' id_col = 'log_id' if table == 'logs' else 'id' time_col = 'log_time' if table == 'logs' else 'time' type_col = 'log_type' if table == 'logs' else 'type' if 'start' in params: query += f' AND {id_col}<:start' if 'before' in params: query += f' AND {time_col}<=:before' if 'end' in params: query += f' AND {id_col}>:end' if 'after' in params: query += f' AND {time_col}>=:after' if 'client_id' in params: query += ' AND client_id=:client_id' if 'username' in params: query += ' AND username=:username' if 'type' in params: query += f' AND {type_col}=:type' query += f' ORDER BY {id_col} DESC LIMIT :limit' for k, v in params.items(): if k in {'start', 'before', 'end', 'after', 'client_id', 'type'}: params[k] = int(v) params['limit'] = int(params['limit']) async with self.lock: await self.db.execute(query, params) rows = await self.db.fetchall() return [dict(i) for i in rows] async def get_log(self, log_id, table='logs'): id_col = 'log_id' if table == 'logs' else 'id' async with self.lock: await self.db.execute(f'SELECT * FROM scratchverifier_{table} \ WHERE {id_col}=?', (log_id,)) row = await self.db.fetchone() if row is None: return None return dict(row) ### TABLE: ratelimits ### async def get_ratelimits(self): async with self.lock: await self.db.execute('SELECT * FROM scratchverifier_ratelimits') rows = await self.db.fetchall() return [dict(i) for i in rows] async def get_ratelimit(self, username): async with self.lock: await self.db.execute('SELECT * FROM scratchverifier_ratelimits \ WHERE username=?', (username,)) row = await self.db.fetchone() if row is None: return None return row async def set_ratelimits(self, data, performer): await self.db.executemany('INSERT OR REPLACE INTO \ scratchverifier_ratelimits (username, ratelimit) \ VALUES (:username, :ratelimit)', data) if performer is not None: await self.db.executemany( 'INSERT INTO scratchverifier_auditlogs \ (username, time, type, data) VALUES \ (:username, :time, :type, :data)', ({ 'username': performer, 'time': int(time.time()), 'type': 2, # ratelimit update 'data': json.dumps(i) } for i in data) ) ### TABLE: bans ### async def get_bans(self): async with self.lock: await self.db.execute('SELECT * FROM scratchverifier_bans') rows = await self.db.fetchall() return [dict(i) for i in rows] async def get_ban(self, username): async with self.lock: await self.db.execute('SELECT * FROM scratchverifier_bans \ WHERE username=?', (username,)) row = await self.db.fetchone() if row is None: return None if row['expiry'] is not None and row['expiry'] < time.time(): # ban has expired, delete it and return no ban await self.db.execute('DELETE FROM scratchverifier_bans \ WHERE username=?', (username,)) return None return row async def set_bans(self, data, performer): await self.db.executemany('INSERT OR REPLACE INTO scratchverifier_bans \ (username, expiry) VALUES (:username, :expiry)', data) await self.db.executemany('DELETE FROM scratchverifier_clients \ WHERE username=?', ((i['username'],) for i in data)) await self.db.executemany('DELETE FROM scratchverifier_sessions \ WHERE username=?', ((i['username'],) for i in data)) await self.db.executemany( 'INSERT INTO scratchverifier_auditlogs \ (username, time, type, data) VALUES \ (:username, :time, :type, :data)', ({ 'username': performer, 'time': int(time.time()), 'type': 1, # ban 'data': json.dumps(i) } for i in data) ) async def del_ban(self, username, performer): await self.db.execute('DELETE FROM scratchverifier_bans \ WHERE username=?', (username,)) await self.db.execute( 'INSERT INTO scratchverifier_auditlogs \ (username, time, type, data) VALUES \ (:username, :time, :type, :data)', { 'username': performer, 'time': int(time.time()), 'type': 3, # unban 'data': json.dumps({'username': username}) } )
./CrossVul/dataset_final_sorted/CWE-287/py/bad_4354_0
crossvul-python_data_bad_1224_5
from typing import Union, Optional, Dict, Any, List import ujson from django.http import HttpRequest, HttpResponse from django.utils.translation import ugettext as _ from django.shortcuts import redirect, render from django.conf import settings from zerver.decorator import require_realm_admin, require_member_or_admin from zerver.forms import CreateUserForm from zerver.lib.events import get_raw_user_data from zerver.lib.actions import do_change_avatar_fields, do_change_bot_owner, \ do_change_is_admin, do_change_default_all_public_streams, \ do_change_default_events_register_stream, do_change_default_sending_stream, \ do_create_user, do_deactivate_user, do_reactivate_user, do_regenerate_api_key, \ check_change_full_name, notify_created_bot, do_update_outgoing_webhook_service, \ do_update_bot_config_data, check_change_bot_full_name, do_change_is_guest, \ do_update_user_custom_profile_data_if_changed, check_remove_custom_profile_field_value from zerver.lib.avatar import avatar_url, get_gravatar_url from zerver.lib.bot_config import set_bot_config from zerver.lib.exceptions import CannotDeactivateLastUserError from zerver.lib.integrations import EMBEDDED_BOTS from zerver.lib.request import has_request_variables, REQ from zerver.lib.response import json_error, json_success from zerver.lib.storage import static_path from zerver.lib.streams import access_stream_by_name from zerver.lib.upload import upload_avatar_image from zerver.lib.users import get_api_key from zerver.lib.validator import check_bool, check_string, check_int, check_url, check_dict, check_list from zerver.lib.users import check_valid_bot_type, check_bot_creation_policy, \ check_full_name, check_short_name, check_valid_interface_type, check_valid_bot_config, \ access_bot_by_id, add_service, access_user_by_id, check_bot_name_available, \ validate_user_custom_profile_data from zerver.lib.utils import generate_api_key, generate_random_token from zerver.models import UserProfile, Stream, Message, email_allowed_for_realm, \ get_user_by_delivery_email, Service, get_user_including_cross_realm, \ DomainNotAllowedForRealmError, DisposableEmailError, get_user_profile_by_id_in_realm, \ EmailContainsPlusError, get_user_by_id_in_realm_including_cross_realm, Realm, \ InvalidFakeEmailDomain def deactivate_user_backend(request: HttpRequest, user_profile: UserProfile, user_id: int) -> HttpResponse: target = access_user_by_id(user_profile, user_id) if check_last_admin(target): return json_error(_('Cannot deactivate the only organization administrator')) return _deactivate_user_profile_backend(request, user_profile, target) def deactivate_user_own_backend(request: HttpRequest, user_profile: UserProfile) -> HttpResponse: if UserProfile.objects.filter(realm=user_profile.realm, is_active=True).count() == 1: raise CannotDeactivateLastUserError(is_last_admin=False) if user_profile.is_realm_admin and check_last_admin(user_profile): raise CannotDeactivateLastUserError(is_last_admin=True) do_deactivate_user(user_profile, acting_user=user_profile) return json_success() def check_last_admin(user_profile: UserProfile) -> bool: admins = set(user_profile.realm.get_human_admin_users()) return user_profile.is_realm_admin and not user_profile.is_bot and len(admins) == 1 def deactivate_bot_backend(request: HttpRequest, user_profile: UserProfile, bot_id: int) -> HttpResponse: target = access_bot_by_id(user_profile, bot_id) return _deactivate_user_profile_backend(request, user_profile, target) def _deactivate_user_profile_backend(request: HttpRequest, user_profile: UserProfile, target: UserProfile) -> HttpResponse: do_deactivate_user(target, acting_user=user_profile) return json_success() def reactivate_user_backend(request: HttpRequest, user_profile: UserProfile, user_id: int) -> HttpResponse: target = access_user_by_id(user_profile, user_id, allow_deactivated=True, allow_bots=True) if target.is_bot: assert target.bot_type is not None check_bot_creation_policy(user_profile, target.bot_type) do_reactivate_user(target, acting_user=user_profile) return json_success() @has_request_variables def update_user_backend(request: HttpRequest, user_profile: UserProfile, user_id: int, full_name: Optional[str]=REQ(default="", validator=check_string), is_admin: Optional[bool]=REQ(default=None, validator=check_bool), is_guest: Optional[bool]=REQ(default=None, validator=check_bool), profile_data: Optional[List[Dict[str, Union[int, str, List[int]]]]]= REQ(default=None, validator=check_list(check_dict([('id', check_int)])))) -> HttpResponse: target = access_user_by_id(user_profile, user_id, allow_deactivated=True, allow_bots=True) # Historically, UserProfile had two fields, is_guest and is_realm_admin. # This condition protected against situations where update_user_backend # could cause both is_guest and is_realm_admin to be set. # Once we update the frontend to just send a 'role' value, we can remove this check. if (((is_guest is None and target.is_guest) or is_guest) and ((is_admin is None and target.is_realm_admin) or is_admin)): return json_error(_("Guests cannot be organization administrators")) if is_admin is not None and target.is_realm_admin != is_admin: if not is_admin and check_last_admin(user_profile): return json_error(_('Cannot remove the only organization administrator')) do_change_is_admin(target, is_admin) if is_guest is not None and target.is_guest != is_guest: do_change_is_guest(target, is_guest) if (full_name is not None and target.full_name != full_name and full_name.strip() != ""): # We don't respect `name_changes_disabled` here because the request # is on behalf of the administrator. check_change_full_name(target, full_name, user_profile) if profile_data is not None: clean_profile_data = [] for entry in profile_data: if not entry["value"]: field_id = entry["id"] check_remove_custom_profile_field_value(target, field_id) else: clean_profile_data.append(entry) validate_user_custom_profile_data(target.realm.id, clean_profile_data) do_update_user_custom_profile_data_if_changed(target, clean_profile_data) return json_success() def avatar(request: HttpRequest, user_profile: UserProfile, email_or_id: str, medium: bool=False) -> HttpResponse: """Accepts an email address or user ID and returns the avatar""" is_email = False try: int(email_or_id) except ValueError: is_email = True try: realm = user_profile.realm if is_email: avatar_user_profile = get_user_including_cross_realm(email_or_id, realm) else: avatar_user_profile = get_user_by_id_in_realm_including_cross_realm(int(email_or_id), realm) # If there is a valid user account passed in, use its avatar url = avatar_url(avatar_user_profile, medium=medium) except UserProfile.DoesNotExist: # If there is no such user, treat it as a new gravatar email = email_or_id avatar_version = 1 url = get_gravatar_url(email, avatar_version, medium) # We can rely on the url already having query parameters. Because # our templates depend on being able to use the ampersand to # add query parameters to our url, get_avatar_url does '?x=x' # hacks to prevent us from having to jump through decode/encode hoops. assert url is not None assert '?' in url url += '&' + request.META['QUERY_STRING'] return redirect(url) def get_stream_name(stream: Optional[Stream]) -> Optional[str]: if stream: return stream.name return None @require_member_or_admin @has_request_variables def patch_bot_backend( request: HttpRequest, user_profile: UserProfile, bot_id: int, full_name: Optional[str]=REQ(default=None), bot_owner_id: Optional[int]=REQ(validator=check_int, default=None), config_data: Optional[Dict[str, str]]=REQ(default=None, validator=check_dict(value_validator=check_string)), service_payload_url: Optional[str]=REQ(validator=check_url, default=None), service_interface: Optional[int]=REQ(validator=check_int, default=1), default_sending_stream: Optional[str]=REQ(default=None), default_events_register_stream: Optional[str]=REQ(default=None), default_all_public_streams: Optional[bool]=REQ(default=None, validator=check_bool) ) -> HttpResponse: bot = access_bot_by_id(user_profile, bot_id) if full_name is not None: check_change_bot_full_name(bot, full_name, user_profile) if bot_owner_id is not None: try: owner = get_user_profile_by_id_in_realm(bot_owner_id, user_profile.realm) except UserProfile.DoesNotExist: return json_error(_('Failed to change owner, no such user')) if not owner.is_active: return json_error(_('Failed to change owner, user is deactivated')) if owner.is_bot: return json_error(_("Failed to change owner, bots can't own other bots")) previous_owner = bot.bot_owner if previous_owner != owner: do_change_bot_owner(bot, owner, user_profile) if default_sending_stream is not None: if default_sending_stream == "": stream = None # type: Optional[Stream] else: (stream, recipient, sub) = access_stream_by_name( user_profile, default_sending_stream) do_change_default_sending_stream(bot, stream) if default_events_register_stream is not None: if default_events_register_stream == "": stream = None else: (stream, recipient, sub) = access_stream_by_name( user_profile, default_events_register_stream) do_change_default_events_register_stream(bot, stream) if default_all_public_streams is not None: do_change_default_all_public_streams(bot, default_all_public_streams) if service_payload_url is not None: check_valid_interface_type(service_interface) assert service_interface is not None do_update_outgoing_webhook_service(bot, service_interface, service_payload_url) if config_data is not None: do_update_bot_config_data(bot, config_data) if len(request.FILES) == 0: pass elif len(request.FILES) == 1: user_file = list(request.FILES.values())[0] upload_avatar_image(user_file, user_profile, bot) avatar_source = UserProfile.AVATAR_FROM_USER do_change_avatar_fields(bot, avatar_source) else: return json_error(_("You may only upload one file at a time")) json_result = dict( full_name=bot.full_name, avatar_url=avatar_url(bot), service_interface = service_interface, service_payload_url = service_payload_url, config_data = config_data, default_sending_stream=get_stream_name(bot.default_sending_stream), default_events_register_stream=get_stream_name(bot.default_events_register_stream), default_all_public_streams=bot.default_all_public_streams, ) # Don't include the bot owner in case it is not set. # Default bots have no owner. if bot.bot_owner is not None: json_result['bot_owner'] = bot.bot_owner.email return json_success(json_result) @require_member_or_admin @has_request_variables def regenerate_bot_api_key(request: HttpRequest, user_profile: UserProfile, bot_id: int) -> HttpResponse: bot = access_bot_by_id(user_profile, bot_id) new_api_key = do_regenerate_api_key(bot, user_profile) json_result = dict( api_key=new_api_key ) return json_success(json_result) @require_member_or_admin @has_request_variables def add_bot_backend( request: HttpRequest, user_profile: UserProfile, full_name_raw: str=REQ("full_name"), short_name_raw: str=REQ("short_name"), bot_type: int=REQ(validator=check_int, default=UserProfile.DEFAULT_BOT), payload_url: Optional[str]=REQ(validator=check_url, default=""), service_name: Optional[str]=REQ(default=None), config_data: Dict[str, str]=REQ(default={}, validator=check_dict(value_validator=check_string)), interface_type: int=REQ(validator=check_int, default=Service.GENERIC), default_sending_stream_name: Optional[str]=REQ('default_sending_stream', default=None), default_events_register_stream_name: Optional[str]=REQ('default_events_register_stream', default=None), default_all_public_streams: Optional[bool]=REQ(validator=check_bool, default=None) ) -> HttpResponse: short_name = check_short_name(short_name_raw) if bot_type != UserProfile.INCOMING_WEBHOOK_BOT: service_name = service_name or short_name short_name += "-bot" full_name = check_full_name(full_name_raw) try: email = '%s@%s' % (short_name, user_profile.realm.get_bot_domain()) except InvalidFakeEmailDomain: return json_error(_("Can't create bots until FAKE_EMAIL_DOMAIN is correctly configured.\n" "Please contact your server administrator.")) form = CreateUserForm({'full_name': full_name, 'email': email}) if bot_type == UserProfile.EMBEDDED_BOT: if not settings.EMBEDDED_BOTS_ENABLED: return json_error(_("Embedded bots are not enabled.")) if service_name not in [bot.name for bot in EMBEDDED_BOTS]: return json_error(_("Invalid embedded bot name.")) if not form.is_valid(): # We validate client-side as well return json_error(_('Bad name or username')) try: get_user_by_delivery_email(email, user_profile.realm) return json_error(_("Username already in use")) except UserProfile.DoesNotExist: pass check_bot_name_available( realm_id=user_profile.realm_id, full_name=full_name, ) check_bot_creation_policy(user_profile, bot_type) check_valid_bot_type(user_profile, bot_type) check_valid_interface_type(interface_type) if len(request.FILES) == 0: avatar_source = UserProfile.AVATAR_FROM_GRAVATAR elif len(request.FILES) != 1: return json_error(_("You may only upload one file at a time")) else: avatar_source = UserProfile.AVATAR_FROM_USER default_sending_stream = None if default_sending_stream_name is not None: (default_sending_stream, ignored_rec, ignored_sub) = access_stream_by_name( user_profile, default_sending_stream_name) default_events_register_stream = None if default_events_register_stream_name is not None: (default_events_register_stream, ignored_rec, ignored_sub) = access_stream_by_name( user_profile, default_events_register_stream_name) if bot_type in (UserProfile.INCOMING_WEBHOOK_BOT, UserProfile.EMBEDDED_BOT) and service_name: check_valid_bot_config(bot_type, service_name, config_data) bot_profile = do_create_user(email=email, password='', realm=user_profile.realm, full_name=full_name, short_name=short_name, bot_type=bot_type, bot_owner=user_profile, avatar_source=avatar_source, default_sending_stream=default_sending_stream, default_events_register_stream=default_events_register_stream, default_all_public_streams=default_all_public_streams) if len(request.FILES) == 1: user_file = list(request.FILES.values())[0] upload_avatar_image(user_file, user_profile, bot_profile) if bot_type in (UserProfile.OUTGOING_WEBHOOK_BOT, UserProfile.EMBEDDED_BOT): assert(isinstance(service_name, str)) add_service(name=service_name, user_profile=bot_profile, base_url=payload_url, interface=interface_type, token=generate_api_key()) if bot_type == UserProfile.INCOMING_WEBHOOK_BOT and service_name: set_bot_config(bot_profile, "integration_id", service_name) if bot_type in (UserProfile.INCOMING_WEBHOOK_BOT, UserProfile.EMBEDDED_BOT): for key, value in config_data.items(): set_bot_config(bot_profile, key, value) notify_created_bot(bot_profile) api_key = get_api_key(bot_profile) json_result = dict( api_key=api_key, avatar_url=avatar_url(bot_profile), default_sending_stream=get_stream_name(bot_profile.default_sending_stream), default_events_register_stream=get_stream_name(bot_profile.default_events_register_stream), default_all_public_streams=bot_profile.default_all_public_streams, ) return json_success(json_result) @require_member_or_admin def get_bots_backend(request: HttpRequest, user_profile: UserProfile) -> HttpResponse: bot_profiles = UserProfile.objects.filter(is_bot=True, is_active=True, bot_owner=user_profile) bot_profiles = bot_profiles.select_related('default_sending_stream', 'default_events_register_stream') bot_profiles = bot_profiles.order_by('date_joined') def bot_info(bot_profile: UserProfile) -> Dict[str, Any]: default_sending_stream = get_stream_name(bot_profile.default_sending_stream) default_events_register_stream = get_stream_name(bot_profile.default_events_register_stream) # Bots are supposed to have only one API key, at least for now. # Therefore we can safely asume that one and only valid API key will be # the first one. api_key = get_api_key(bot_profile) return dict( username=bot_profile.email, full_name=bot_profile.full_name, api_key=api_key, avatar_url=avatar_url(bot_profile), default_sending_stream=default_sending_stream, default_events_register_stream=default_events_register_stream, default_all_public_streams=bot_profile.default_all_public_streams, ) return json_success({'bots': list(map(bot_info, bot_profiles))}) @has_request_variables def get_members_backend(request: HttpRequest, user_profile: UserProfile, include_custom_profile_fields: bool=REQ(validator=check_bool, default=False), client_gravatar: bool=REQ(validator=check_bool, default=False) ) -> HttpResponse: ''' The client_gravatar field here is set to True if clients can compute their own gravatars, which saves us bandwidth. We want to eventually make this the default behavior, but we have old clients that expect the server to compute this for us. ''' realm = user_profile.realm if realm.email_address_visibility == Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS: # If email addresses are only available to administrators, # clients cannot compute gravatars, so we force-set it to false. client_gravatar = False members = get_raw_user_data(realm, user_profile=user_profile, client_gravatar=client_gravatar, include_custom_profile_fields=include_custom_profile_fields) return json_success({'members': members.values()}) @require_realm_admin @has_request_variables def create_user_backend(request: HttpRequest, user_profile: UserProfile, email: str=REQ(), password: str=REQ(), full_name_raw: str=REQ("full_name"), short_name: str=REQ()) -> HttpResponse: full_name = check_full_name(full_name_raw) form = CreateUserForm({'full_name': full_name, 'email': email}) if not form.is_valid(): return json_error(_('Bad name or username')) # Check that the new user's email address belongs to the admin's realm # (Since this is an admin API, we don't require the user to have been # invited first.) realm = user_profile.realm try: email_allowed_for_realm(email, user_profile.realm) except DomainNotAllowedForRealmError: return json_error(_("Email '%(email)s' not allowed in this organization") % {'email': email}) except DisposableEmailError: return json_error(_("Disposable email addresses are not allowed in this organization")) except EmailContainsPlusError: return json_error(_("Email addresses containing + are not allowed.")) try: get_user_by_delivery_email(email, user_profile.realm) return json_error(_("Email '%s' already in use") % (email,)) except UserProfile.DoesNotExist: pass do_create_user(email, password, realm, full_name, short_name) return json_success() def generate_client_id() -> str: return generate_random_token(32) def get_profile_backend(request: HttpRequest, user_profile: UserProfile) -> HttpResponse: result = dict(pointer = user_profile.pointer, client_id = generate_client_id(), max_message_id = -1, user_id = user_profile.id, avatar_url = avatar_url(user_profile), full_name = user_profile.full_name, email = user_profile.email, is_bot = user_profile.is_bot, is_admin = user_profile.is_realm_admin, short_name = user_profile.short_name) if not user_profile.is_bot: custom_profile_field_values = user_profile.customprofilefieldvalue_set.all() profile_data = dict() # type: Dict[int, Dict[str, Any]] for profile_field in custom_profile_field_values: if profile_field.field.is_renderable(): profile_data[profile_field.field_id] = { "value": profile_field.value, "rendered_value": profile_field.rendered_value } else: profile_data[profile_field.field_id] = { "value": profile_field.value } result["profile_data"] = profile_data messages = Message.objects.filter(usermessage__user_profile=user_profile).order_by('-id')[:1] if messages: result['max_message_id'] = messages[0].id return json_success(result) def team_view(request: HttpRequest) -> HttpResponse: with open(static_path('generated/github-contributors.json')) as f: data = ujson.load(f) return render( request, 'zerver/team.html', context={ 'page_params': { 'contrib': data['contrib'], }, 'date': data['date'], }, )
./CrossVul/dataset_final_sorted/CWE-287/py/bad_1224_5
crossvul-python_data_bad_1224_2
404: Not Found
./CrossVul/dataset_final_sorted/CWE-287/py/bad_1224_2
crossvul-python_data_bad_3759_0
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 OpenStack LLC # Copyright 2012 Canonical Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Main entry point into the Catalog service.""" import uuid from keystone import config from keystone import exception from keystone import identity from keystone import policy from keystone import token from keystone.common import manager from keystone.common import wsgi CONF = config.CONF class Manager(manager.Manager): """Default pivot point for the Catalog backend. See :mod:`keystone.common.manager.Manager` for more details on how this dynamically calls the backend. """ def __init__(self): super(Manager, self).__init__(CONF.catalog.driver) class Driver(object): """Interface description for an Catalog driver.""" def list_services(self): """List all service ids in catalog. Returns: list of service_ids or an empty list. """ raise exception.NotImplemented() def get_service(self, service_id): """Get service by id. Returns: service_ref dict or None. """ raise exception.NotImplemented() def delete_service(self, service_id): raise exception.NotImplemented() def create_service(self, service_id, service_ref): raise exception.NotImplemented() def create_endpoint(self, endpoint_id, endpoint_ref): raise exception.NotImplemented() def delete_endpoint(self, endpoint_id): raise exception.NotImplemented() def get_endpoint(self, endpoint_id): """Get endpoint by id. Returns: endpoint_ref dict or None. """ raise exception.NotImplemented() def list_endpoints(self): """List all endpoint ids in catalog. Returns: list of endpoint_ids or an empty list. """ raise exception.NotImplemented() def get_catalog(self, user_id, tenant_id, metadata=None): """Retreive and format the current service catalog. Returns: A nested dict representing the service catalog or an empty dict. Example: { 'RegionOne': {'compute': { 'adminURL': u'http://host:8774/v1.1/tenantid', 'internalURL': u'http://host:8774/v1.1/tenant_id', 'name': 'Compute Service', 'publicURL': u'http://host:8774/v1.1/tenantid'}, 'ec2': { 'adminURL': 'http://host:8773/services/Admin', 'internalURL': 'http://host:8773/services/Cloud', 'name': 'EC2 Service', 'publicURL': 'http://host:8773/services/Cloud'}} """ raise exception.NotImplemented() class ServiceController(wsgi.Application): def __init__(self): self.catalog_api = Manager() super(ServiceController, self).__init__() # CRUD extensions # NOTE(termie): this OS-KSADM stuff is not very consistent def get_services(self, context): service_list = self.catalog_api.list_services(context) service_refs = [self.catalog_api.get_service(context, x) for x in service_list] return {'OS-KSADM:services': service_refs} def get_service(self, context, service_id): service_ref = self.catalog_api.get_service(context, service_id) if not service_ref: raise exception.ServiceNotFound(service_id=service_id) return {'OS-KSADM:service': service_ref} def delete_service(self, context, service_id): service_ref = self.catalog_api.get_service(context, service_id) if not service_ref: raise exception.ServiceNotFound(service_id=service_id) self.catalog_api.delete_service(context, service_id) def create_service(self, context, OS_KSADM_service): service_id = uuid.uuid4().hex service_ref = OS_KSADM_service.copy() service_ref['id'] = service_id new_service_ref = self.catalog_api.create_service( context, service_id, service_ref) return {'OS-KSADM:service': new_service_ref} class EndpointController(wsgi.Application): def __init__(self): self.catalog_api = Manager() self.identity_api = identity.Manager() self.policy_api = policy.Manager() self.token_api = token.Manager() super(EndpointController, self).__init__() def get_endpoints(self, context): self.assert_admin(context) endpoint_list = self.catalog_api.list_endpoints(context) endpoint_refs = [self.catalog_api.get_endpoint(context, e) for e in endpoint_list] return {'endpoints': endpoint_refs} def create_endpoint(self, context, endpoint): self.assert_admin(context) endpoint_id = uuid.uuid4().hex endpoint_ref = endpoint.copy() endpoint_ref['id'] = endpoint_id service_id = endpoint_ref['service_id'] if not self.catalog_api.get_service(context, service_id): raise exception.ServiceNotFound(service_id=service_id) new_endpoint_ref = self.catalog_api.create_endpoint( context, endpoint_id, endpoint_ref) return {'endpoint': new_endpoint_ref} def delete_endpoint(self, context, endpoint_id): self.assert_admin(context) endpoint_ref = self.catalog_api.delete_endpoint(context, endpoint_id)
./CrossVul/dataset_final_sorted/CWE-287/py/bad_3759_0
crossvul-python_data_bad_649_1
# Copyright (c) 2013-2017 by Ron Frederick <ronf@timeheart.net>. # All rights reserved. # # This program and the accompanying materials are made available under # the terms of the Eclipse Public License v1.0 which accompanies this # distribution and is available at: # # http://www.eclipse.org/legal/epl-v10.html # # Contributors: # Ron Frederick - initial implementation, API, and documentation """AsyncSSH version information""" __author__ = 'Ron Frederick' __author_email__ = 'ronf@timeheart.net' __url__ = 'http://asyncssh.timeheart.net' __version__ = '1.12.0'
./CrossVul/dataset_final_sorted/CWE-287/py/bad_649_1
crossvul-python_data_good_2920_0
# Copyright (c) 2010-2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 from hashlib import sha1 from hashlib import sha512 import hmac from httplib import HTTPConnection from httplib import HTTPSConnection import json import swift from time import gmtime from time import strftime from time import time from traceback import format_exc from urllib import quote from urllib import unquote from uuid import uuid4 from eventlet.timeout import Timeout from eventlet import TimeoutError from swift.common.swob import HTTPAccepted from swift.common.swob import HTTPBadRequest from swift.common.swob import HTTPConflict from swift.common.swob import HTTPCreated from swift.common.swob import HTTPForbidden from swift.common.swob import HTTPMethodNotAllowed from swift.common.swob import HTTPMovedPermanently from swift.common.swob import HTTPNoContent from swift.common.swob import HTTPNotFound from swift.common.swob import HTTPUnauthorized from swift.common.swob import Request from swift.common.swob import Response from swift.common.bufferedhttp import http_connect_raw as http_connect from swift.common.middleware.acl import clean_acl from swift.common.middleware.acl import parse_acl from swift.common.middleware.acl import referrer_allowed from swift.common.utils import cache_from_env from swift.common.utils import get_logger from swift.common.utils import get_remote_client from swift.common.utils import HASH_PATH_PREFIX from swift.common.utils import HASH_PATH_SUFFIX from swift.common.utils import split_path from swift.common.utils import TRUE_VALUES from swift.common.utils import urlparse import swift.common.wsgi import swauth.authtypes from swauth import swift_version SWIFT_MIN_VERSION = "2.2.0" CONTENT_TYPE_JSON = 'application/json' class Swauth(object): """Scalable authentication and authorization system that uses Swift as its backing store. :param app: The next WSGI app in the pipeline :param conf: The dict of configuration values """ def __init__(self, app, conf): self.app = app self.conf = conf self.logger = get_logger(conf, log_route='swauth') if not swift_version.at_least(SWIFT_MIN_VERSION): msg = ("Your Swift installation is too old (%s). You need at " "least %s." % (swift.__version__, SWIFT_MIN_VERSION)) self.logger.critical(msg) raise ValueError(msg) self.log_headers = conf.get('log_headers', 'no').lower() in TRUE_VALUES self.reseller_prefix = conf.get('reseller_prefix', 'AUTH').strip() if self.reseller_prefix and self.reseller_prefix[-1] != '_': self.reseller_prefix += '_' self.auth_prefix = conf.get('auth_prefix', '/auth/') if not self.auth_prefix: self.auth_prefix = '/auth/' if self.auth_prefix[0] != '/': self.auth_prefix = '/' + self.auth_prefix if self.auth_prefix[-1] != '/': self.auth_prefix += '/' self.swauth_remote = conf.get('swauth_remote') if self.swauth_remote: self.swauth_remote = self.swauth_remote.rstrip('/') if not self.swauth_remote: msg = _('Invalid swauth_remote set in conf file! Exiting.') self.logger.critical(msg) raise ValueError(msg) self.swauth_remote_parsed = urlparse(self.swauth_remote) if self.swauth_remote_parsed.scheme not in ('http', 'https'): msg = _('Cannot handle protocol scheme %(schema)s ' 'for url %(url)s!') % \ (self.swauth_remote_parsed.scheme, repr(self.swauth_remote)) self.logger.critical(msg) raise ValueError(msg) self.swauth_remote_timeout = int(conf.get('swauth_remote_timeout', 10)) self.auth_account = '%s.auth' % self.reseller_prefix self.default_swift_cluster = conf.get('default_swift_cluster', 'local#http://127.0.0.1:8080/v1') # This setting is a little messy because of the options it has to # provide. The basic format is cluster_name#url, such as the default # value of local#http://127.0.0.1:8080/v1. # If the URL given to the user needs to differ from the url used by # Swauth to create/delete accounts, there's a more complex format: # cluster_name#url#url, such as # local#https://public.com:8080/v1#http://private.com:8080/v1. cluster_parts = self.default_swift_cluster.split('#', 2) self.dsc_name = cluster_parts[0] if len(cluster_parts) == 3: self.dsc_url = cluster_parts[1].rstrip('/') self.dsc_url2 = cluster_parts[2].rstrip('/') elif len(cluster_parts) == 2: self.dsc_url = self.dsc_url2 = cluster_parts[1].rstrip('/') else: raise ValueError('Invalid cluster format') self.dsc_parsed = urlparse(self.dsc_url) if self.dsc_parsed.scheme not in ('http', 'https'): raise ValueError('Cannot handle protocol scheme %s for url %s' % (self.dsc_parsed.scheme, repr(self.dsc_url))) self.dsc_parsed2 = urlparse(self.dsc_url2) if self.dsc_parsed2.scheme not in ('http', 'https'): raise ValueError('Cannot handle protocol scheme %s for url %s' % (self.dsc_parsed2.scheme, repr(self.dsc_url2))) self.super_admin_key = conf.get('super_admin_key') if not self.super_admin_key and not self.swauth_remote: msg = _('No super_admin_key set in conf file; Swauth ' 'administration features will be disabled.') self.logger.warning(msg) self.token_life = int(conf.get('token_life', 86400)) self.max_token_life = int(conf.get('max_token_life', self.token_life)) self.timeout = int(conf.get('node_timeout', 10)) self.itoken = None self.itoken_expires = None self.allowed_sync_hosts = [h.strip() for h in conf.get('allowed_sync_hosts', '127.0.0.1').split(',') if h.strip()] # Get an instance of our auth_type encoder for saving and checking the # user's key self.auth_type = conf.get('auth_type', 'Plaintext').title() self.auth_encoder = getattr(swauth.authtypes, self.auth_type, None) if self.auth_encoder is None: raise ValueError('Invalid auth_type in config file: %s' % self.auth_type) # If auth_type_salt is not set in conf file, a random salt will be # generated for each new password to be encoded. self.auth_encoder.salt = conf.get('auth_type_salt', None) # Due to security concerns, S3 support is disabled by default. self.s3_support = conf.get('s3_support', 'off').lower() in TRUE_VALUES if self.s3_support and self.auth_type != 'Plaintext' \ and not self.auth_encoder.salt: msg = _('S3 support requires salt to be manually set in conf ' 'file using auth_type_salt config option.') self.logger.warning(msg) self.s3_support = False self.allow_overrides = \ conf.get('allow_overrides', 't').lower() in TRUE_VALUES self.agent = '%(orig)s Swauth' self.swift_source = 'SWTH' self.default_storage_policy = conf.get('default_storage_policy', None) def make_pre_authed_request(self, env, method=None, path=None, body=None, headers=None): """Nearly the same as swift.common.wsgi.make_pre_authed_request except that this also always sets the 'swift.source' and user agent. Newer Swift code will support swift_source as a kwarg, but we do it this way so we don't have to have a newer Swift. Since we're doing this anyway, we may as well set the user agent too since we always do that. """ if self.default_storage_policy: sp = self.default_storage_policy if headers: headers.update({'X-Storage-Policy': sp}) else: headers = {'X-Storage-Policy': sp} subreq = swift.common.wsgi.make_pre_authed_request( env, method=method, path=path, body=body, headers=headers, agent=self.agent) subreq.environ['swift.source'] = self.swift_source return subreq def __call__(self, env, start_response): """Accepts a standard WSGI application call, authenticating the request and installing callback hooks for authorization and ACL header validation. For an authenticated request, REMOTE_USER will be set to a comma separated list of the user's groups. With a non-empty reseller prefix, acts as the definitive auth service for just tokens and accounts that begin with that prefix, but will deny requests outside this prefix if no other auth middleware overrides it. With an empty reseller prefix, acts as the definitive auth service only for tokens that validate to a non-empty set of groups. For all other requests, acts as the fallback auth service when no other auth middleware overrides it. Alternatively, if the request matches the self.auth_prefix, the request will be routed through the internal auth request handler (self.handle). This is to handle creating users, accounts, granting tokens, etc. """ if 'keystone.identity' in env: return self.app(env, start_response) # We're going to consider OPTIONS requests harmless and the CORS # support in the Swift proxy needs to get them. if env.get('REQUEST_METHOD') == 'OPTIONS': return self.app(env, start_response) if self.allow_overrides and env.get('swift.authorize_override', False): return self.app(env, start_response) if not self.swauth_remote: if env.get('PATH_INFO', '') == self.auth_prefix[:-1]: return HTTPMovedPermanently(add_slash=True)(env, start_response) elif env.get('PATH_INFO', '').startswith(self.auth_prefix): return self.handle(env, start_response) s3 = env.get('HTTP_AUTHORIZATION') if s3 and not self.s3_support: msg = 'S3 support is disabled in swauth.' return HTTPBadRequest(body=msg)(env, start_response) token = env.get('HTTP_X_AUTH_TOKEN', env.get('HTTP_X_STORAGE_TOKEN')) if token and len(token) > swauth.authtypes.MAX_TOKEN_LENGTH: return HTTPBadRequest(body='Token exceeds maximum length.')(env, start_response) if s3 or (token and token.startswith(self.reseller_prefix)): # Note: Empty reseller_prefix will match all tokens. groups = self.get_groups(env, token) if groups: env['REMOTE_USER'] = groups user = groups and groups.split(',', 1)[0] or '' # We know the proxy logs the token, so we augment it just a bit # to also log the authenticated user. env['HTTP_X_AUTH_TOKEN'] = \ '%s,%s' % (user, 's3' if s3 else token) env['swift.authorize'] = self.authorize env['swift.clean_acl'] = clean_acl if '.reseller_admin' in groups: env['reseller_request'] = True else: # Unauthorized token if self.reseller_prefix and token and \ token.startswith(self.reseller_prefix): # Because I know I'm the definitive auth for this token, I # can deny it outright. return HTTPUnauthorized()(env, start_response) # Because I'm not certain if I'm the definitive auth, I won't # overwrite swift.authorize and I'll just set a delayed denial # if nothing else overrides me. elif 'swift.authorize' not in env: env['swift.authorize'] = self.denied_response else: if self.reseller_prefix: # With a non-empty reseller_prefix, I would like to be called # back for anonymous access to accounts I know I'm the # definitive auth for. try: version, rest = split_path(env.get('PATH_INFO', ''), 1, 2, True) except ValueError: rest = None if rest and rest.startswith(self.reseller_prefix): # Handle anonymous access to accounts I'm the definitive # auth for. env['swift.authorize'] = self.authorize env['swift.clean_acl'] = clean_acl # Not my token, not my account, I can't authorize this request, # deny all is a good idea if not already set... elif 'swift.authorize' not in env: env['swift.authorize'] = self.denied_response # Because I'm not certain if I'm the definitive auth for empty # reseller_prefixed accounts, I won't overwrite swift.authorize. elif 'swift.authorize' not in env: env['swift.authorize'] = self.authorize env['swift.clean_acl'] = clean_acl return self.app(env, start_response) def _get_concealed_token(self, token): """Returns hashed token to be used as object name in Swift. Tokens are stored in auth account but object names are visible in Swift logs. Object names are hashed from token. """ enc_key = "%s:%s:%s" % (HASH_PATH_PREFIX, token, HASH_PATH_SUFFIX) return sha512(enc_key).hexdigest() def get_groups(self, env, token): """Get groups for the given token. :param env: The current WSGI environment dictionary. :param token: Token to validate and return a group string for. :returns: None if the token is invalid or a string containing a comma separated list of groups the authenticated user is a member of. The first group in the list is also considered a unique identifier for that user. """ groups = None memcache_client = cache_from_env(env) if memcache_client: memcache_key = '%s/auth/%s' % (self.reseller_prefix, token) cached_auth_data = memcache_client.get(memcache_key) if cached_auth_data: expires, groups = cached_auth_data if expires < time(): groups = None if env.get('HTTP_AUTHORIZATION'): if not self.s3_support: self.logger.warning('S3 support is disabled in swauth.') return None if self.swauth_remote: # TODO(gholt): Support S3-style authorization with # swauth_remote mode self.logger.warning('S3-style authorization not supported yet ' 'with swauth_remote mode.') return None try: account = env['HTTP_AUTHORIZATION'].split(' ')[1] account, user, sign = account.split(':') except Exception: self.logger.debug( 'Swauth cannot parse Authorization header value %r' % env['HTTP_AUTHORIZATION']) return None path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( env, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: return None if 'x-object-meta-account-id' in resp.headers: account_id = resp.headers['x-object-meta-account-id'] else: path = quote('/v1/%s/%s' % (self.auth_account, account)) resp2 = self.make_pre_authed_request( env, 'HEAD', path).get_response(self.app) if resp2.status_int // 100 != 2: return None account_id = resp2.headers['x-container-meta-account-id'] path = env['PATH_INFO'] env['PATH_INFO'] = path.replace("%s:%s" % (account, user), account_id, 1) detail = json.loads(resp.body) if detail: creds = detail.get('auth') try: auth_encoder, creds_dict = \ swauth.authtypes.validate_creds(creds) except ValueError as e: self.logger.error('%s' % e.args[0]) return None password = creds_dict['hash'] msg = base64.urlsafe_b64decode(unquote(token)) # https://bugs.python.org/issue5285 if isinstance(password, unicode): password = password.encode('utf-8') if isinstance(msg, unicode): msg = msg.encode('utf-8') s = base64.encodestring(hmac.new(password, msg, sha1).digest()).strip() if s != sign: return None groups = [g['name'] for g in detail['groups']] if '.admin' in groups: groups.remove('.admin') groups.append(account_id) groups = ','.join(groups) return groups if not groups: if self.swauth_remote: with Timeout(self.swauth_remote_timeout): conn = http_connect(self.swauth_remote_parsed.hostname, self.swauth_remote_parsed.port, 'GET', '%s/v2/.token/%s' % (self.swauth_remote_parsed.path, quote(token)), ssl=(self.swauth_remote_parsed.scheme == 'https')) resp = conn.getresponse() resp.read() conn.close() if resp.status // 100 != 2: return None expires_from_now = float(resp.getheader('x-auth-ttl')) groups = resp.getheader('x-auth-groups') if memcache_client: memcache_client.set( memcache_key, (time() + expires_from_now, groups), time=expires_from_now) else: object_name = self._get_concealed_token(token) path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, object_name[-1], object_name)) resp = self.make_pre_authed_request( env, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: return None detail = json.loads(resp.body) if detail['expires'] < time(): self.make_pre_authed_request( env, 'DELETE', path).get_response(self.app) return None groups = [g['name'] for g in detail['groups']] if '.admin' in groups: groups.remove('.admin') groups.append(detail['account_id']) groups = ','.join(groups) if memcache_client: memcache_client.set( memcache_key, (detail['expires'], groups), time=float(detail['expires'] - time())) return groups def authorize(self, req): """Returns None if the request is authorized to continue or a standard WSGI response callable if not. """ try: version, account, container, obj = split_path(req.path, 1, 4, True) except ValueError: return HTTPNotFound(request=req) if not account or not account.startswith(self.reseller_prefix): return self.denied_response(req) user_groups = (req.remote_user or '').split(',') if '.reseller_admin' in user_groups and \ account != self.reseller_prefix and \ account[len(self.reseller_prefix)] != '.': req.environ['swift_owner'] = True return None if account in user_groups and \ (req.method not in ('DELETE', 'PUT') or container): # If the user is admin for the account and is not trying to do an # account DELETE or PUT... req.environ['swift_owner'] = True return None if (req.environ.get('swift_sync_key') and req.environ['swift_sync_key'] == req.headers.get('x-container-sync-key', None) and 'x-timestamp' in req.headers and (req.remote_addr in self.allowed_sync_hosts or get_remote_client(req) in self.allowed_sync_hosts)): return None referrers, groups = parse_acl(getattr(req, 'acl', None)) if referrer_allowed(req.referer, referrers): if obj or '.rlistings' in groups: return None return self.denied_response(req) if not req.remote_user: return self.denied_response(req) for user_group in user_groups: if user_group in groups: return None return self.denied_response(req) def denied_response(self, req): """Returns a standard WSGI response callable with the status of 403 or 401 depending on whether the REMOTE_USER is set or not. """ if not hasattr(req, 'credentials_valid'): req.credentials_valid = None if req.remote_user or req.credentials_valid: return HTTPForbidden(request=req) else: return HTTPUnauthorized(request=req) def handle(self, env, start_response): """WSGI entry point for auth requests (ones that match the self.auth_prefix). Wraps env in swob.Request object and passes it down. :param env: WSGI environment dictionary :param start_response: WSGI callable """ try: req = Request(env) if self.auth_prefix: req.path_info_pop() req.bytes_transferred = '-' req.client_disconnect = False if 'x-storage-token' in req.headers and \ 'x-auth-token' not in req.headers: req.headers['x-auth-token'] = req.headers['x-storage-token'] if 'eventlet.posthooks' in env: env['eventlet.posthooks'].append( (self.posthooklogger, (req,), {})) return self.handle_request(req)(env, start_response) else: # Lack of posthook support means that we have to log on the # start of the response, rather than after all the data has # been sent. This prevents logging client disconnects # differently than full transmissions. response = self.handle_request(req)(env, start_response) self.posthooklogger(env, req) return response except (Exception, TimeoutError): print("EXCEPTION IN handle: %s: %s" % (format_exc(), env)) start_response('500 Server Error', [('Content-Type', 'text/plain')]) return ['Internal server error.\n'] def handle_request(self, req): """Entry point for auth requests (ones that match the self.auth_prefix). Should return a WSGI-style callable (such as swob.Response). :param req: swob.Request object """ req.start_time = time() handler = None try: version, account, user, _junk = split_path(req.path_info, minsegs=0, maxsegs=4, rest_with_last=True) except ValueError: return HTTPNotFound(request=req) if version in ('v1', 'v1.0', 'auth'): if req.method == 'GET': handler = self.handle_get_token elif version == 'v2': if not self.super_admin_key: return HTTPNotFound(request=req) req.path_info_pop() if req.method == 'GET': if not account and not user: handler = self.handle_get_reseller elif account: if not user: handler = self.handle_get_account elif account == '.token': req.path_info_pop() handler = self.handle_validate_token else: handler = self.handle_get_user elif req.method == 'PUT': if not user: handler = self.handle_put_account else: handler = self.handle_put_user elif req.method == 'DELETE': if not user: handler = self.handle_delete_account else: handler = self.handle_delete_user elif req.method == 'POST': if account == '.prep': handler = self.handle_prep elif user == '.services': handler = self.handle_set_services else: handler = self.handle_webadmin if not handler: req.response = HTTPBadRequest(request=req) else: req.response = handler(req) return req.response def handle_webadmin(self, req): if req.method not in ('GET', 'HEAD'): return HTTPMethodNotAllowed(request=req) subpath = req.path[len(self.auth_prefix):] or 'index.html' path = quote('/v1/%s/.webadmin/%s' % (self.auth_account, subpath)) req.response = self.make_pre_authed_request( req.environ, req.method, path).get_response(self.app) return req.response def handle_prep(self, req): """Handles the POST v2/.prep call for preparing the backing store Swift cluster for use with the auth subsystem. Can only be called by .super_admin. :param req: The swob.Request to process. :returns: swob.Response, 204 on success """ if not self.is_super_admin(req): return self.denied_response(req) path = quote('/v1/%s' % self.auth_account) resp = self.make_pre_authed_request( req.environ, 'PUT', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create the main auth account: %s %s' % (path, resp.status)) path = quote('/v1/%s/.account_id' % self.auth_account) resp = self.make_pre_authed_request( req.environ, 'PUT', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create container: %s %s' % (path, resp.status)) for container in xrange(16): path = quote('/v1/%s/.token_%x' % (self.auth_account, container)) resp = self.make_pre_authed_request( req.environ, 'PUT', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create container: %s %s' % (path, resp.status)) return HTTPNoContent(request=req) def handle_get_reseller(self, req): """Handles the GET v2 call for getting general reseller information (currently just a list of accounts). Can only be called by a .reseller_admin. On success, a JSON dictionary will be returned with a single `accounts` key whose value is list of dicts. Each dict represents an account and currently only contains the single key `name`. For example:: {"accounts": [{"name": "reseller"}, {"name": "test"}, {"name": "test2"}]} :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with a JSON dictionary as explained above. """ if not self.is_reseller_admin(req): return self.denied_response(req) listing = [] marker = '' while True: path = '/v1/%s?format=json&marker=%s' % (quote(self.auth_account), quote(marker)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not list main auth account: %s %s' % (path, resp.status)) sublisting = json.loads(resp.body) if not sublisting: break for container in sublisting: if container['name'][0] != '.': listing.append({'name': container['name']}) marker = sublisting[-1]['name'].encode('utf-8') return Response(body=json.dumps({'accounts': listing}), content_type=CONTENT_TYPE_JSON) def handle_get_account(self, req): """Handles the GET v2/<account> call for getting account information. Can only be called by an account .admin. On success, a JSON dictionary will be returned containing the keys `account_id`, `services`, and `users`. The `account_id` is the value used when creating service accounts. The `services` value is a dict as described in the :func:`handle_get_token` call. The `users` value is a list of dicts, each dict representing a user and currently only containing the single key `name`. For example:: {"account_id": "AUTH_018c3946-23f8-4efb-a8fb-b67aae8e4162", "services": {"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_018c3946"}}, "users": [{"name": "tester"}, {"name": "tester3"}]} :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with a JSON dictionary as explained above. """ account = req.path_info_pop() if req.path_info or not account or account[0] == '.': return HTTPBadRequest(request=req) if not self.is_account_admin(req, account): return self.denied_response(req) path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not obtain the .services object: %s %s' % (path, resp.status)) services = json.loads(resp.body) listing = [] marker = '' while True: path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' % (self.auth_account, account)), quote(marker)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not list in main auth account: %s %s' % (path, resp.status)) account_id = resp.headers['X-Container-Meta-Account-Id'] sublisting = json.loads(resp.body) if not sublisting: break for obj in sublisting: if obj['name'][0] != '.': listing.append({'name': obj['name']}) marker = sublisting[-1]['name'].encode('utf-8') return Response(content_type=CONTENT_TYPE_JSON, body=json.dumps({'account_id': account_id, 'services': services, 'users': listing})) def handle_set_services(self, req): """Handles the POST v2/<account>/.services call for setting services information. Can only be called by a reseller .admin. In the :func:`handle_get_account` (GET v2/<account>) call, a section of the returned JSON dict is `services`. This section looks something like this:: "services": {"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_018c3946"}} Making use of this section is described in :func:`handle_get_token`. This function allows setting values within this section for the <account>, allowing the addition of new service end points or updating existing ones. The body of the POST request should contain a JSON dict with the following format:: {"service_name": {"end_point_name": "end_point_value"}} There can be multiple services and multiple end points in the same call. Any new services or end points will be added to the existing set of services and end points. Any existing services with the same service name will be merged with the new end points. Any existing end points with the same end point name will have their values updated. The updated services dictionary will be returned on success. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with the udpated services JSON dict as described above """ if not self.is_reseller_admin(req): return self.denied_response(req) account = req.path_info_pop() if req.path_info != '/.services' or not account or account[0] == '.': return HTTPBadRequest(request=req) try: new_services = json.loads(req.body) except ValueError as err: return HTTPBadRequest(body=str(err)) # Get the current services information path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not obtain services info: %s %s' % (path, resp.status)) services = json.loads(resp.body) for new_service, value in new_services.iteritems(): if new_service in services: services[new_service].update(value) else: services[new_service] = value # Save the new services information services = json.dumps(services) resp = self.make_pre_authed_request( req.environ, 'PUT', path, services).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not save .services object: %s %s' % (path, resp.status)) return Response(request=req, body=services, content_type=CONTENT_TYPE_JSON) def handle_put_account(self, req): """Handles the PUT v2/<account> call for adding an account to the auth system. Can only be called by a .reseller_admin. By default, a newly created UUID4 will be used with the reseller prefix as the account id used when creating corresponding service accounts. However, you can provide an X-Account-Suffix header to replace the UUID4 part. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success. """ if not self.is_reseller_admin(req): return self.denied_response(req) account = req.path_info_pop() if req.path_info or not account or account[0] == '.': return HTTPBadRequest(request=req) account_suffix = req.headers.get('x-account-suffix') if not account_suffix: account_suffix = str(uuid4()) # Create the new account in the Swift cluster path = quote('%s/%s%s' % (self.dsc_parsed2.path, self.reseller_prefix, account_suffix)) try: conn = self.get_conn() conn.request('PUT', path, headers={'X-Auth-Token': self.get_itoken(req.environ), 'Content-Length': '0'}) resp = conn.getresponse() resp.read() if resp.status // 100 != 2: raise Exception('Could not create account on the Swift ' 'cluster: %s %s %s' % (path, resp.status, resp.reason)) except (Exception, TimeoutError): self.logger.error(_('ERROR: Exception while trying to communicate ' 'with %(scheme)s://%(host)s:%(port)s/%(path)s'), {'scheme': self.dsc_parsed2.scheme, 'host': self.dsc_parsed2.hostname, 'port': self.dsc_parsed2.port, 'path': path}) raise # Ensure the container in the main auth account exists (this # container represents the new account) path = quote('/v1/%s/%s' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'HEAD', path).get_response(self.app) if resp.status_int == 404: resp = self.make_pre_authed_request( req.environ, 'PUT', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create account within main auth ' 'account: %s %s' % (path, resp.status)) elif resp.status_int // 100 == 2: if 'x-container-meta-account-id' in resp.headers: # Account was already created return HTTPAccepted(request=req) else: raise Exception('Could not verify account within main auth ' 'account: %s %s' % (path, resp.status)) # Record the mapping from account id back to account name path = quote('/v1/%s/.account_id/%s%s' % (self.auth_account, self.reseller_prefix, account_suffix)) resp = self.make_pre_authed_request( req.environ, 'PUT', path, account).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create account id mapping: %s %s' % (path, resp.status)) # Record the cluster url(s) for the account path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) services = {'storage': {}} services['storage'][self.dsc_name] = '%s/%s%s' % (self.dsc_url, self.reseller_prefix, account_suffix) services['storage']['default'] = self.dsc_name resp = self.make_pre_authed_request( req.environ, 'PUT', path, json.dumps(services)).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create .services object: %s %s' % (path, resp.status)) # Record the mapping from account name to the account id path = quote('/v1/%s/%s' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'POST', path, headers={'X-Container-Meta-Account-Id': '%s%s' % ( self.reseller_prefix, account_suffix)}).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not record the account id on the account: ' '%s %s' % (path, resp.status)) return HTTPCreated(request=req) def handle_delete_account(self, req): """Handles the DELETE v2/<account> call for removing an account from the auth system. Can only be called by a .reseller_admin. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success. """ if not self.is_reseller_admin(req): return self.denied_response(req) account = req.path_info_pop() if req.path_info or not account or account[0] == '.': return HTTPBadRequest(request=req) # Make sure the account has no users and get the account_id marker = '' while True: path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' % (self.auth_account, account)), quote(marker)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not list in main auth account: %s %s' % (path, resp.status)) account_id = resp.headers['x-container-meta-account-id'] sublisting = json.loads(resp.body) if not sublisting: break for obj in sublisting: if obj['name'][0] != '.': return HTTPConflict(request=req) marker = sublisting[-1]['name'].encode('utf-8') # Obtain the listing of services the account is on. path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2 and resp.status_int != 404: raise Exception('Could not obtain .services object: %s %s' % (path, resp.status)) if resp.status_int // 100 == 2: services = json.loads(resp.body) # Delete the account on each cluster it is on. deleted_any = False for name, url in services['storage'].iteritems(): if name != 'default': parsed = urlparse(url) conn = self.get_conn(parsed) conn.request('DELETE', parsed.path, headers={'X-Auth-Token': self.get_itoken(req.environ)}) resp = conn.getresponse() resp.read() if resp.status == 409: if deleted_any: raise Exception('Managed to delete one or more ' 'service end points, but failed with: ' '%s %s %s' % (url, resp.status, resp.reason)) else: return HTTPConflict(request=req) if resp.status // 100 != 2 and resp.status != 404: raise Exception('Could not delete account on the ' 'Swift cluster: %s %s %s' % (url, resp.status, resp.reason)) deleted_any = True # Delete the .services object itself. path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) if resp.status_int // 100 != 2 and resp.status_int != 404: raise Exception('Could not delete .services object: %s %s' % (path, resp.status)) # Delete the account id mapping for the account. path = quote('/v1/%s/.account_id/%s' % (self.auth_account, account_id)) resp = self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) if resp.status_int // 100 != 2 and resp.status_int != 404: raise Exception('Could not delete account id mapping: %s %s' % (path, resp.status)) # Delete the account marker itself. path = quote('/v1/%s/%s' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) if resp.status_int // 100 != 2 and resp.status_int != 404: raise Exception('Could not delete account marked: %s %s' % (path, resp.status)) return HTTPNoContent(request=req) def handle_get_user(self, req): """Handles the GET v2/<account>/<user> call for getting user information. Can only be called by an account .admin. On success, a JSON dict will be returned as described:: {"groups": [ # List of groups the user is a member of {"name": "<act>:<usr>"}, # The first group is a unique user identifier {"name": "<account>"}, # The second group is the auth account name {"name": "<additional-group>"} # There may be additional groups, .admin being a special # group indicating an account admin and .reseller_admin # indicating a reseller admin. ], "auth": "plaintext:<key>" # The auth-type and key for the user; currently only plaintext is # implemented. } For example:: {"groups": [{"name": "test:tester"}, {"name": "test"}, {"name": ".admin"}], "auth": "plaintext:testing"} If the <user> in the request is the special user `.groups`, the JSON dict will contain a single key of `groups` whose value is a list of dicts representing the active groups within the account. Each dict currently has the single key `name`. For example:: {"groups": [{"name": ".admin"}, {"name": "test"}, {"name": "test:tester"}, {"name": "test:tester3"}]} :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with a JSON dictionary as explained above. """ account = req.path_info_pop() user = req.path_info_pop() if req.path_info or not account or account[0] == '.' or not user or \ (user[0] == '.' and user != '.groups'): return HTTPBadRequest(request=req) if not self.is_account_admin(req, account): return self.denied_response(req) # get information for each user for the specified # account and create a list of all groups that the users # are part of if user == '.groups': # TODO(gholt): This could be very slow for accounts with a really # large number of users. Speed could be improved by concurrently # requesting user group information. Then again, I don't *know* # it's slow for `normal` use cases, so testing should be done. groups = set() marker = '' while True: path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' % (self.auth_account, account)), quote(marker)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not list in main auth account: ' '%s %s' % (path, resp.status)) sublisting = json.loads(resp.body) if not sublisting: break for obj in sublisting: if obj['name'][0] != '.': # get list of groups for each user user_json = self.get_user_detail(req, account, obj['name']) if user_json is None: raise Exception('Could not retrieve user object: ' '%s:%s %s' % (account, user, 404)) groups.update( g['name'] for g in json.loads(user_json)['groups']) marker = sublisting[-1]['name'].encode('utf-8') body = json.dumps( {'groups': [{'name': g} for g in sorted(groups)]}) else: # get information for specific user, # if user doesn't exist, return HTTPNotFound body = self.get_user_detail(req, account, user) if body is None: return HTTPNotFound(request=req) display_groups = [g['name'] for g in json.loads(body)['groups']] if ('.admin' in display_groups and not self.is_reseller_admin(req)) or \ ('.reseller_admin' in display_groups and not self.is_super_admin(req)): return self.denied_response(req) return Response(body=body, content_type=CONTENT_TYPE_JSON) def handle_put_user(self, req): """Handles the PUT v2/<account>/<user> call for adding a user to an account. X-Auth-User-Key represents the user's key (url encoded), - OR - X-Auth-User-Key-Hash represents the user's hashed key (url encoded), X-Auth-User-Admin may be set to `true` to create an account .admin, and X-Auth-User-Reseller-Admin may be set to `true` to create a .reseller_admin. Creating users ************** Can only be called by an account .admin unless the user is to be a .reseller_admin, in which case the request must be by .super_admin. Changing password/key ********************* 1) reseller_admin key can be changed by super_admin and by himself. 2) admin key can be changed by any admin in same account, reseller_admin, super_admin and himself. 3) Regular user key can be changed by any admin in his account, reseller_admin, super_admin and himself. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success. """ # Validate path info account = req.path_info_pop() user = req.path_info_pop() key = unquote(req.headers.get('x-auth-user-key', '')) key_hash = unquote(req.headers.get('x-auth-user-key-hash', '')) admin = req.headers.get('x-auth-user-admin') == 'true' reseller_admin = \ req.headers.get('x-auth-user-reseller-admin') == 'true' if reseller_admin: admin = True if req.path_info or not account or account[0] == '.' or not user or \ user[0] == '.' or (not key and not key_hash): return HTTPBadRequest(request=req) if key_hash: try: swauth.authtypes.validate_creds(key_hash) except ValueError: return HTTPBadRequest(request=req) user_arg = account + ':' + user if reseller_admin: if not self.is_super_admin(req) and\ not self.is_user_changing_own_key(req, user_arg): return self.denied_response(req) elif not self.is_account_admin(req, account) and\ not self.is_user_changing_own_key(req, user_arg): return self.denied_response(req) path = quote('/v1/%s/%s' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'HEAD', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not retrieve account id value: %s %s' % (path, resp.status)) headers = {'X-Object-Meta-Account-Id': resp.headers['x-container-meta-account-id']} # Create the object in the main auth account (this object represents # the user) path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) groups = ['%s:%s' % (account, user), account] if admin: groups.append('.admin') if reseller_admin: groups.append('.reseller_admin') auth_value = key_hash or self.auth_encoder().encode(key) resp = self.make_pre_authed_request( req.environ, 'PUT', path, json.dumps({'auth': auth_value, 'groups': [{'name': g} for g in groups]}), headers=headers).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not create user object: %s %s' % (path, resp.status)) return HTTPCreated(request=req) def handle_delete_user(self, req): """Handles the DELETE v2/<account>/<user> call for deleting a user from an account. Can only be called by an account .admin. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success. """ # Validate path info account = req.path_info_pop() user = req.path_info_pop() if req.path_info or not account or account[0] == '.' or not user or \ user[0] == '.': return HTTPBadRequest(request=req) # if user to be deleted is reseller_admin, then requesting # user must be the super_admin is_reseller_admin = self.is_user_reseller_admin(req, account, user) if not is_reseller_admin and not req.credentials_valid: # if user to be deleted can't be found, return 404 return HTTPNotFound(request=req) elif is_reseller_admin and not self.is_super_admin(req): return HTTPForbidden(request=req) if not self.is_account_admin(req, account): return self.denied_response(req) # Delete the user's existing token, if any. path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( req.environ, 'HEAD', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) elif resp.status_int // 100 != 2: raise Exception('Could not obtain user details: %s %s' % (path, resp.status)) candidate_token = resp.headers.get('x-object-meta-auth-token') if candidate_token: object_name = self._get_concealed_token(candidate_token) path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, object_name[-1], object_name)) resp = self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) if resp.status_int // 100 != 2 and resp.status_int != 404: raise Exception('Could not delete possibly existing token: ' '%s %s' % (path, resp.status)) # Delete the user entry itself. path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) if resp.status_int // 100 != 2 and resp.status_int != 404: raise Exception('Could not delete the user object: %s %s' % (path, resp.status)) return HTTPNoContent(request=req) def is_user_reseller_admin(self, req, account, user): """Returns True if the user is a .reseller_admin. :param account: account user is part of :param user: the user :returns: True if user .reseller_admin, False if user is not a reseller_admin and None if the user doesn't exist. """ req.credentials_valid = True user_json = self.get_user_detail(req, account, user) if user_json is None: req.credentials_valid = False return False user_detail = json.loads(user_json) return '.reseller_admin' in (g['name'] for g in user_detail['groups']) def handle_get_token(self, req): """Handles the various `request for token and service end point(s)` calls. There are various formats to support the various auth servers in the past. Examples:: GET <auth-prefix>/v1/<act>/auth X-Auth-User: <act>:<usr> or X-Storage-User: <usr> X-Auth-Key: <key> or X-Storage-Pass: <key> GET <auth-prefix>/auth X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr> X-Auth-Key: <key> or X-Storage-Pass: <key> GET <auth-prefix>/v1.0 X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr> X-Auth-Key: <key> or X-Storage-Pass: <key> Values should be url encoded, "act%3Ausr" instead of "act:usr" for example; however, for backwards compatibility the colon may be included unencoded. On successful authentication, the response will have X-Auth-Token and X-Storage-Token set to the token to use with Swift and X-Storage-URL set to the URL to the default Swift cluster to use. The response body will be set to the account's services JSON object as described here:: {"storage": { # Represents the Swift storage service end points "default": "cluster1", # Indicates which cluster is the default "cluster1": "<URL to use with Swift>", # A Swift cluster that can be used with this account, # "cluster1" is the name of the cluster which is usually a # location indicator (like "dfw" for a datacenter region). "cluster2": "<URL to use with Swift>" # Another Swift cluster that can be used with this account, # there will always be at least one Swift cluster to use or # this whole "storage" dict won't be included at all. }, "servers": { # Represents the Nova server service end points # Expected to be similar to the "storage" dict, but not # implemented yet. }, # Possibly other service dicts, not implemented yet. } One can also include an "X-Auth-New-Token: true" header to force issuing a new token and revoking any old token, even if it hasn't expired yet. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with data set as explained above. """ # Validate the request info try: pathsegs = split_path(req.path_info, minsegs=1, maxsegs=3, rest_with_last=True) except ValueError: return HTTPNotFound(request=req) if pathsegs[0] == 'v1' and pathsegs[2] == 'auth': account = pathsegs[1] user = req.headers.get('x-storage-user') if not user: user = unquote(req.headers.get('x-auth-user', '')) if not user or ':' not in user: return HTTPUnauthorized(request=req) account2, user = user.split(':', 1) if account != account2: return HTTPUnauthorized(request=req) key = req.headers.get('x-storage-pass') if not key: key = unquote(req.headers.get('x-auth-key', '')) elif pathsegs[0] in ('auth', 'v1.0'): user = unquote(req.headers.get('x-auth-user', '')) if not user: user = req.headers.get('x-storage-user') if not user or ':' not in user: return HTTPUnauthorized(request=req) account, user = user.split(':', 1) key = unquote(req.headers.get('x-auth-key', '')) if not key: key = req.headers.get('x-storage-pass') else: return HTTPBadRequest(request=req) if not all((account, user, key)): return HTTPUnauthorized(request=req) if user == '.super_admin' and self.super_admin_key and \ key == self.super_admin_key: token = self.get_itoken(req.environ) url = '%s/%s.auth' % (self.dsc_url, self.reseller_prefix) return Response( request=req, content_type=CONTENT_TYPE_JSON, body=json.dumps({'storage': {'default': 'local', 'local': url}}), headers={'x-auth-token': token, 'x-storage-token': token, 'x-storage-url': url}) # Authenticate user path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPUnauthorized(request=req) if resp.status_int // 100 != 2: raise Exception('Could not obtain user details: %s %s' % (path, resp.status)) user_detail = json.loads(resp.body) if not self.credentials_match(user_detail, key): return HTTPUnauthorized(request=req) # See if a token already exists and hasn't expired token = None expires = None candidate_token = resp.headers.get('x-object-meta-auth-token') if candidate_token: object_name = self._get_concealed_token(candidate_token) path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, object_name[-1], object_name)) delete_token = False try: if req.headers.get('x-auth-new-token', 'false').lower() in \ TRUE_VALUES: delete_token = True else: resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 == 2: token_detail = json.loads(resp.body) if token_detail['expires'] > time(): token = candidate_token expires = token_detail['expires'] else: delete_token = True elif resp.status_int != 404: raise Exception( 'Could not detect whether a token already exists: ' '%s %s' % (path, resp.status)) finally: if delete_token: self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) memcache_client = cache_from_env(req.environ) if memcache_client: memcache_key = '%s/auth/%s' % (self.reseller_prefix, candidate_token) memcache_client.delete(memcache_key) # Create a new token if one didn't exist if not token: # Retrieve account id, we'll save this in the token path = quote('/v1/%s/%s' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'HEAD', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not retrieve account id value: ' '%s %s' % (path, resp.status)) account_id = \ resp.headers['x-container-meta-account-id'] # Generate new token token = '%stk%s' % (self.reseller_prefix, uuid4().hex) # Save token info object_name = self._get_concealed_token(token) path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, object_name[-1], object_name)) try: token_life = min( int(req.headers.get('x-auth-token-lifetime', self.token_life)), self.max_token_life) except ValueError: token_life = self.token_life expires = int(time() + token_life) resp = self.make_pre_authed_request( req.environ, 'PUT', path, json.dumps({'account': account, 'user': user, 'account_id': account_id, 'groups': user_detail['groups'], 'expires': expires})).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create new token: %s %s' % (path, resp.status)) # Record the token with the user info for future use. path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( req.environ, 'POST', path, headers={'X-Object-Meta-Auth-Token': token} ).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not save new token: %s %s' % (path, resp.status)) # Get the services information path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not obtain services info: %s %s' % (path, resp.status)) detail = json.loads(resp.body) url = detail['storage'][detail['storage']['default']] return Response( request=req, body=resp.body, content_type=CONTENT_TYPE_JSON, headers={'x-auth-token': token, 'x-storage-token': token, 'x-auth-token-expires': str(int(expires - time())), 'x-storage-url': url}) def handle_validate_token(self, req): """Handles the GET v2/.token/<token> call for validating a token, usually called by a service like Swift. On a successful validation, X-Auth-TTL will be set for how much longer this token is valid and X-Auth-Groups will contain a comma separated list of groups the user belongs to. The first group listed will be a unique identifier for the user the token represents. .reseller_admin is a special group that indicates the user should be allowed to do anything on any account. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with data set as explained above. """ token = req.path_info_pop() if req.path_info or not token.startswith(self.reseller_prefix): return HTTPBadRequest(request=req) expires = groups = None memcache_client = cache_from_env(req.environ) if memcache_client: memcache_key = '%s/auth/%s' % (self.reseller_prefix, token) cached_auth_data = memcache_client.get(memcache_key) if cached_auth_data: expires, groups = cached_auth_data if expires < time(): groups = None if not groups: object_name = self._get_concealed_token(token) path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, object_name[-1], object_name)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: return HTTPNotFound(request=req) detail = json.loads(resp.body) expires = detail['expires'] if expires < time(): self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) return HTTPNotFound(request=req) groups = [g['name'] for g in detail['groups']] if '.admin' in groups: groups.remove('.admin') groups.append(detail['account_id']) groups = ','.join(groups) return HTTPNoContent(headers={'X-Auth-TTL': expires - time(), 'X-Auth-Groups': groups}) def get_conn(self, urlparsed=None): """Returns an HTTPConnection based on the urlparse result given or the default Swift cluster (internal url) urlparse result. :param urlparsed: The result from urlparse.urlparse or None to use the default Swift cluster's value """ if not urlparsed: urlparsed = self.dsc_parsed2 if urlparsed.scheme == 'http': return HTTPConnection(urlparsed.netloc) else: return HTTPSConnection(urlparsed.netloc) def get_itoken(self, env): """Returns the current internal token to use for the auth system's own actions with other services. Each process will create its own itoken and the token will be deleted and recreated based on the token_life configuration value. The itoken information is stored in memcache because the auth process that is asked by Swift to validate the token may not be the same as the auth process that created the token. """ if not self.itoken or self.itoken_expires < time() or \ env.get('HTTP_X_AUTH_NEW_TOKEN', 'false').lower() in \ TRUE_VALUES: self.itoken = '%sitk%s' % (self.reseller_prefix, uuid4().hex) memcache_key = '%s/auth/%s' % (self.reseller_prefix, self.itoken) self.itoken_expires = time() + self.token_life memcache_client = cache_from_env(env) if not memcache_client: raise Exception( 'No memcache set up; required for Swauth middleware') memcache_client.set( memcache_key, (self.itoken_expires, '.auth,.reseller_admin,%s.auth' % self.reseller_prefix), time=self.token_life) return self.itoken def get_admin_detail(self, req): """Returns the dict for the user specified as the admin in the request with the addition of an `account` key set to the admin user's account. :param req: The swob request to retrieve X-Auth-Admin-User and X-Auth-Admin-Key from. :returns: The dict for the admin user with the addition of the `account` key. """ if ':' not in req.headers.get('x-auth-admin-user', ''): return None admin_account, admin_user = \ req.headers.get('x-auth-admin-user').split(':', 1) user_json = self.get_user_detail(req, admin_account, admin_user) if user_json is None: return None admin_detail = json.loads(user_json) admin_detail['account'] = admin_account return admin_detail def get_user_detail(self, req, account, user): """Returns the response body of a GET request for the specified user The body is in JSON format and contains all user information. :param req: The swob request :param account: the account the user is a member of :param user: the user :returns: A JSON response with the user detail information, None if the user doesn't exist """ path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return None if resp.status_int // 100 != 2: raise Exception('Could not get user object: %s %s' % (path, resp.status)) return resp.body def credentials_match(self, user_detail, key): """Returns True if the key is valid for the user_detail. It will use auth_encoder type the password was encoded with, to check for a key match. :param user_detail: The dict for the user. :param key: The key to validate for the user. :returns: True if the key is valid for the user, False if not. """ if user_detail: creds = user_detail.get('auth') try: auth_encoder, creds_dict = \ swauth.authtypes.validate_creds(creds) except ValueError as e: self.logger.error('%s' % e.args[0]) return False return user_detail and auth_encoder.match(key, creds, **creds_dict) def is_user_changing_own_key(self, req, user): """Check if the user is changing his own key. :param req: The swob.Request to check. This contains x-auth-admin-user and x-auth-admin-key headers which are credentials of the user sending the request. :param user: User whose password is to be changed. :returns: True if user is changing his own key, False if not. """ admin_detail = self.get_admin_detail(req) if not admin_detail: # The user does not exist return False # If user is not admin/reseller_admin and x-auth-user-admin or # x-auth-user-reseller-admin headers are present in request, he may be # attempting to escalate himself as admin/reseller_admin! if '.admin' not in (g['name'] for g in admin_detail['groups']): if req.headers.get('x-auth-user-admin') == 'true' or \ req.headers.get('x-auth-user-reseller-admin') == 'true': return False if '.reseller_admin' not in \ (g['name'] for g in admin_detail['groups']) and \ req.headers.get('x-auth-user-reseller-admin') == 'true': return False return req.headers.get('x-auth-admin-user') == user and \ self.credentials_match(admin_detail, req.headers.get('x-auth-admin-key')) def is_super_admin(self, req): """Returns True if the admin specified in the request represents the .super_admin. :param req: The swob.Request to check. :param returns: True if .super_admin. """ return req.headers.get('x-auth-admin-user') == '.super_admin' and \ self.super_admin_key and \ req.headers.get('x-auth-admin-key') == self.super_admin_key def is_reseller_admin(self, req, admin_detail=None): """Returns True if the admin specified in the request represents a .reseller_admin. :param req: The swob.Request to check. :param admin_detail: The previously retrieved dict from :func:`get_admin_detail` or None for this function to retrieve the admin_detail itself. :param returns: True if .reseller_admin. """ req.credentials_valid = False if self.is_super_admin(req): return True if not admin_detail: admin_detail = self.get_admin_detail(req) if not self.credentials_match(admin_detail, req.headers.get('x-auth-admin-key')): return False req.credentials_valid = True return '.reseller_admin' in (g['name'] for g in admin_detail['groups']) def is_account_admin(self, req, account): """Returns True if the admin specified in the request represents a .admin for the account specified. :param req: The swob.Request to check. :param account: The account to check for .admin against. :param returns: True if .admin. """ req.credentials_valid = False if self.is_super_admin(req): return True admin_detail = self.get_admin_detail(req) if admin_detail: if self.is_reseller_admin(req, admin_detail=admin_detail): return True if not self.credentials_match(admin_detail, req.headers.get('x-auth-admin-key')): return False req.credentials_valid = True return admin_detail and admin_detail['account'] == account and \ '.admin' in (g['name'] for g in admin_detail['groups']) return False def posthooklogger(self, env, req): if not req.path.startswith(self.auth_prefix): return response = getattr(req, 'response', None) if not response: return trans_time = '%.4f' % (time() - req.start_time) the_request = quote(unquote(req.path)) if req.query_string: the_request = the_request + '?' + req.query_string # remote user for zeus client = req.headers.get('x-cluster-client-ip') if not client and 'x-forwarded-for' in req.headers: # remote user for other lbs client = req.headers['x-forwarded-for'].split(',')[0].strip() logged_headers = None if self.log_headers: logged_headers = '\n'.join('%s: %s' % (k, v) for k, v in req.headers.items()) status_int = response.status_int if getattr(req, 'client_disconnect', False) or \ getattr(response, 'client_disconnect', False): status_int = 499 self.logger.info(' '.join(quote(str(x)) for x in (client or '-', req.remote_addr or '-', strftime('%d/%b/%Y/%H/%M/%S', gmtime()), req.method, the_request, req.environ['SERVER_PROTOCOL'], status_int, req.referer or '-', req.user_agent or '-', req.headers.get('x-auth-token', req.headers.get('x-auth-admin-user', '-')), getattr(req, 'bytes_transferred', 0) or '-', getattr(response, 'bytes_transferred', 0) or '-', req.headers.get('etag', '-'), req.headers.get('x-trans-id', '-'), logged_headers or '-', trans_time))) def filter_factory(global_conf, **local_conf): """Returns a WSGI filter app for use with paste.deploy.""" conf = global_conf.copy() conf.update(local_conf) def auth_filter(app): return Swauth(app, conf) return auth_filter
./CrossVul/dataset_final_sorted/CWE-287/py/good_2920_0
crossvul-python_data_bad_3761_0
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 OpenStack LLC # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import uuid import routes from keystone import catalog from keystone.common import logging from keystone.common import utils from keystone.common import wsgi from keystone import exception from keystone import identity from keystone.openstack.common import timeutils from keystone import policy from keystone import token LOG = logging.getLogger(__name__) class AdminRouter(wsgi.ComposingRouter): def __init__(self): mapper = routes.Mapper() version_controller = VersionController('admin') mapper.connect('/', controller=version_controller, action='get_version') # Token Operations auth_controller = TokenController() mapper.connect('/tokens', controller=auth_controller, action='authenticate', conditions=dict(method=['POST'])) mapper.connect('/tokens/{token_id}', controller=auth_controller, action='validate_token', conditions=dict(method=['GET'])) mapper.connect('/tokens/{token_id}', controller=auth_controller, action='validate_token_head', conditions=dict(method=['HEAD'])) mapper.connect('/tokens/{token_id}', controller=auth_controller, action='delete_token', conditions=dict(method=['DELETE'])) mapper.connect('/tokens/{token_id}/endpoints', controller=auth_controller, action='endpoints', conditions=dict(method=['GET'])) # Miscellaneous Operations extensions_controller = AdminExtensionsController() mapper.connect('/extensions', controller=extensions_controller, action='get_extensions_info', conditions=dict(method=['GET'])) mapper.connect('/extensions/{extension_alias}', controller=extensions_controller, action='get_extension_info', conditions=dict(method=['GET'])) identity_router = identity.AdminRouter() routers = [identity_router] super(AdminRouter, self).__init__(mapper, routers) class PublicRouter(wsgi.ComposingRouter): def __init__(self): mapper = routes.Mapper() version_controller = VersionController('public') mapper.connect('/', controller=version_controller, action='get_version') # Token Operations auth_controller = TokenController() mapper.connect('/tokens', controller=auth_controller, action='authenticate', conditions=dict(method=['POST'])) # Miscellaneous extensions_controller = PublicExtensionsController() mapper.connect('/extensions', controller=extensions_controller, action='get_extensions_info', conditions=dict(method=['GET'])) mapper.connect('/extensions/{extension_alias}', controller=extensions_controller, action='get_extension_info', conditions=dict(method=['GET'])) identity_router = identity.PublicRouter() routers = [identity_router] super(PublicRouter, self).__init__(mapper, routers) class PublicVersionRouter(wsgi.ComposingRouter): def __init__(self): mapper = routes.Mapper() version_controller = VersionController('public') mapper.connect('/', controller=version_controller, action='get_versions') routers = [] super(PublicVersionRouter, self).__init__(mapper, routers) class AdminVersionRouter(wsgi.ComposingRouter): def __init__(self): mapper = routes.Mapper() version_controller = VersionController('admin') mapper.connect('/', controller=version_controller, action='get_versions') routers = [] super(AdminVersionRouter, self).__init__(mapper, routers) class VersionController(wsgi.Application): def __init__(self, version_type): self.catalog_api = catalog.Manager() self.url_key = "%sURL" % version_type super(VersionController, self).__init__() def _get_identity_url(self, context): catalog_ref = self.catalog_api.get_catalog(context=context, user_id=None, tenant_id=None) for region, region_ref in catalog_ref.iteritems(): for service, service_ref in region_ref.iteritems(): if service == 'identity': return service_ref[self.url_key] raise exception.NotImplemented() def _get_versions_list(self, context): """The list of versions is dependent on the context.""" identity_url = self._get_identity_url(context) if not identity_url.endswith('/'): identity_url = identity_url + '/' versions = {} versions['v2.0'] = { "id": "v2.0", "status": "beta", "updated": "2011-11-19T00:00:00Z", "links": [ { "rel": "self", "href": identity_url, }, { "rel": "describedby", "type": "text/html", "href": "http://docs.openstack.org/api/openstack-" "identity-service/2.0/content/" }, { "rel": "describedby", "type": "application/pdf", "href": "http://docs.openstack.org/api/openstack-" "identity-service/2.0/identity-dev-guide-" "2.0.pdf" } ], "media-types": [ { "base": "application/json", "type": "application/vnd.openstack.identity-v2.0" "+json" }, { "base": "application/xml", "type": "application/vnd.openstack.identity-v2.0" "+xml" } ] } return versions def get_versions(self, context): versions = self._get_versions_list(context) return wsgi.render_response(status=(300, 'Multiple Choices'), body={ "versions": { "values": versions.values() } }) def get_version(self, context): versions = self._get_versions_list(context) return wsgi.render_response(body={ "version": versions['v2.0'] }) class NoopController(wsgi.Application): def __init__(self): super(NoopController, self).__init__() def noop(self, context): return {} class TokenController(wsgi.Application): def __init__(self): self.catalog_api = catalog.Manager() self.identity_api = identity.Manager() self.token_api = token.Manager() self.policy_api = policy.Manager() super(TokenController, self).__init__() def authenticate(self, context, auth=None): """Authenticate credentials and return a token. Accept auth as a dict that looks like:: { "auth":{ "passwordCredentials":{ "username":"test_user", "password":"mypass" }, "tenantName":"customer-x" } } In this case, tenant is optional, if not provided the token will be considered "unscoped" and can later be used to get a scoped token. Alternatively, this call accepts auth with only a token and tenant that will return a token that is scoped to that tenant. """ token_id = uuid.uuid4().hex if 'passwordCredentials' in auth: user_id = auth['passwordCredentials'].get('userId', None) username = auth['passwordCredentials'].get('username', '') password = auth['passwordCredentials'].get('password', '') tenant_name = auth.get('tenantName', None) if username: try: user_ref = self.identity_api.get_user_by_name( context=context, user_name=username) user_id = user_ref['id'] except exception.UserNotFound: raise exception.Unauthorized() # more compat tenant_id = auth.get('tenantId', None) if tenant_name: try: tenant_ref = self.identity_api.get_tenant_by_name( context=context, tenant_name=tenant_name) tenant_id = tenant_ref['id'] except exception.TenantNotFound: raise exception.Unauthorized() try: auth_info = self.identity_api.authenticate(context=context, user_id=user_id, password=password, tenant_id=tenant_id) (user_ref, tenant_ref, metadata_ref) = auth_info # If the user is disabled don't allow them to authenticate if not user_ref.get('enabled', True): LOG.warning('User %s is disabled' % user_id) raise exception.Unauthorized() except AssertionError as e: raise exception.Unauthorized(e.message) token_ref = self.token_api.create_token( context, token_id, dict(id=token_id, user=user_ref, tenant=tenant_ref, metadata=metadata_ref)) if tenant_ref: catalog_ref = self.catalog_api.get_catalog( context=context, user_id=user_ref['id'], tenant_id=tenant_ref['id'], metadata=metadata_ref) else: catalog_ref = {} elif 'token' in auth: token = auth['token'].get('id', None) tenant_name = auth.get('tenantName') # more compat if tenant_name: tenant_ref = self.identity_api.get_tenant_by_name( context=context, tenant_name=tenant_name) tenant_id = tenant_ref['id'] else: tenant_id = auth.get('tenantId', None) try: old_token_ref = self.token_api.get_token(context=context, token_id=token) except exception.NotFound: raise exception.Unauthorized() user_ref = old_token_ref['user'] # If the user is disabled don't allow them to authenticate current_user_ref = self.identity_api.get_user( context=context, user_id=user_ref['id']) if not current_user_ref.get('enabled', True): LOG.warning('User %s is disabled' % user_ref['id']) raise exception.Unauthorized() tenants = self.identity_api.get_tenants_for_user(context, user_ref['id']) if tenant_id and tenant_id not in tenants: raise exception.Unauthorized() try: tenant_ref = self.identity_api.get_tenant( context=context, tenant_id=tenant_id) metadata_ref = self.identity_api.get_metadata( context=context, user_id=user_ref['id'], tenant_id=tenant_ref['id']) catalog_ref = self.catalog_api.get_catalog( context=context, user_id=user_ref['id'], tenant_id=tenant_ref['id'], metadata=metadata_ref) except exception.TenantNotFound: tenant_ref = None metadata_ref = {} catalog_ref = {} token_ref = self.token_api.create_token( context, token_id, dict(id=token_id, user=user_ref, tenant=tenant_ref, metadata=metadata_ref, expires=old_token_ref['expires'])) # TODO(termie): optimize this call at some point and put it into the # the return for metadata # fill out the roles in the metadata roles_ref = [] for role_id in metadata_ref.get('roles', []): roles_ref.append(self.identity_api.get_role(context, role_id)) logging.debug('TOKEN_REF %s', token_ref) return self._format_authenticate(token_ref, roles_ref, catalog_ref) def _get_token_ref(self, context, token_id, belongs_to=None): """Returns a token if a valid one exists. Optionally, limited to a token owned by a specific tenant. """ # TODO(termie): this stuff should probably be moved to middleware self.assert_admin(context) token_ref = self.token_api.get_token(context=context, token_id=token_id) if belongs_to: assert token_ref['tenant']['id'] == belongs_to return token_ref # admin only def validate_token_head(self, context, token_id): """Check that a token is valid. Optionally, also ensure that it is owned by a specific tenant. Identical to ``validate_token``, except does not return a response. """ belongs_to = context['query_string'].get("belongsTo") assert self._get_token_ref(context, token_id, belongs_to) # admin only def validate_token(self, context, token_id): """Check that a token is valid. Optionally, also ensure that it is owned by a specific tenant. Returns metadata about the token along any associated roles. """ belongs_to = context['query_string'].get("belongsTo") token_ref = self._get_token_ref(context, token_id, belongs_to) # TODO(termie): optimize this call at some point and put it into the # the return for metadata # fill out the roles in the metadata metadata_ref = token_ref['metadata'] roles_ref = [] for role_id in metadata_ref.get('roles', []): roles_ref.append(self.identity_api.get_role(context, role_id)) # Get a service catalog if possible # This is needed for on-behalf-of requests catalog_ref = None if token_ref.get('tenant'): catalog_ref = self.catalog_api.get_catalog( context=context, user_id=token_ref['user']['id'], tenant_id=token_ref['tenant']['id'], metadata=metadata_ref) return self._format_token(token_ref, roles_ref, catalog_ref) def delete_token(self, context, token_id): """Delete a token, effectively invalidating it for authz.""" # TODO(termie): this stuff should probably be moved to middleware self.assert_admin(context) self.token_api.delete_token(context=context, token_id=token_id) def endpoints(self, context, token_id): """Return a list of endpoints available to the token.""" raise exception.NotImplemented() def _format_authenticate(self, token_ref, roles_ref, catalog_ref): o = self._format_token(token_ref, roles_ref) o['access']['serviceCatalog'] = self._format_catalog(catalog_ref) return o def _format_token(self, token_ref, roles_ref, catalog_ref=None): user_ref = token_ref['user'] metadata_ref = token_ref['metadata'] expires = token_ref['expires'] if expires is not None: expires = timeutils.isotime(expires) o = {'access': {'token': {'id': token_ref['id'], 'expires': expires, }, 'user': {'id': user_ref['id'], 'name': user_ref['name'], 'username': user_ref['name'], 'roles': roles_ref, 'roles_links': metadata_ref.get('roles_links', []) } } } if 'tenant' in token_ref and token_ref['tenant']: token_ref['tenant']['enabled'] = True o['access']['token']['tenant'] = token_ref['tenant'] if catalog_ref is not None: o['access']['serviceCatalog'] = self._format_catalog(catalog_ref) return o def _format_catalog(self, catalog_ref): """Munge catalogs from internal to output format Internal catalogs look like: {$REGION: { {$SERVICE: { $key1: $value1, ... } } } The legacy api wants them to look like [{'name': $SERVICE[name], 'type': $SERVICE, 'endpoints': [{ 'tenantId': $tenant_id, ... 'region': $REGION, }], 'endpoints_links': [], }] """ if not catalog_ref: return {} services = {} for region, region_ref in catalog_ref.iteritems(): for service, service_ref in region_ref.iteritems(): new_service_ref = services.get(service, {}) new_service_ref['name'] = service_ref.pop('name') new_service_ref['type'] = service new_service_ref['endpoints_links'] = [] service_ref['region'] = region endpoints_ref = new_service_ref.get('endpoints', []) endpoints_ref.append(service_ref) new_service_ref['endpoints'] = endpoints_ref services[service] = new_service_ref return services.values() class ExtensionsController(wsgi.Application): """Base extensions controller to be extended by public and admin API's.""" def __init__(self, extensions=None): super(ExtensionsController, self).__init__() self.extensions = extensions or {} def get_extensions_info(self, context): return {'extensions': {'values': self.extensions.values()}} def get_extension_info(self, context, extension_alias): try: return {'extension': self.extensions[extension_alias]} except KeyError: raise exception.NotFound(target=extension_alias) class PublicExtensionsController(ExtensionsController): pass class AdminExtensionsController(ExtensionsController): def __init__(self, *args, **kwargs): super(AdminExtensionsController, self).__init__(*args, **kwargs) # TODO(dolph): Extensions should obviously provide this information # themselves, but hardcoding it here allows us to match # the API spec in the short term with minimal complexity. self.extensions['OS-KSADM'] = { 'name': 'Openstack Keystone Admin', 'namespace': 'http://docs.openstack.org/identity/api/ext/' 'OS-KSADM/v1.0', 'alias': 'OS-KSADM', 'updated': '2011-08-19T13:25:27-06:00', 'description': 'Openstack extensions to Keystone v2.0 API ' 'enabling Admin Operations.', 'links': [ { 'rel': 'describedby', # TODO(dolph): link needs to be revised after # bug 928059 merges 'type': 'text/html', 'href': 'https://github.com/openstack/identity-api', } ] } @logging.fail_gracefully def public_app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return PublicRouter() @logging.fail_gracefully def admin_app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return AdminRouter() @logging.fail_gracefully def public_version_app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return PublicVersionRouter() @logging.fail_gracefully def admin_version_app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return AdminVersionRouter()
./CrossVul/dataset_final_sorted/CWE-287/py/bad_3761_0
crossvul-python_data_good_4331_1
from datetime import datetime from typing import Any, Dict, List, Optional, Tuple, Union from uuid import uuid4 from flask import g from alerta.app import db from alerta.database.base import Query from alerta.models.enums import ChangeType, NoteType from alerta.models.history import History from alerta.utils.format import DateTime from alerta.utils.response import absolute_url JSON = Dict[str, Any] class Note: def __init__(self, text: str, user: str, note_type: str, **kwargs) -> None: self.id = kwargs.get('id') or str(uuid4()) self.text = text self.user = user self.note_type = note_type self.attributes = kwargs.get('attributes', None) or dict() self.create_time = kwargs['create_time'] if 'create_time' in kwargs else datetime.utcnow() self.update_time = kwargs.get('update_time') self.alert = kwargs.get('alert') self.customer = kwargs.get('customer') @classmethod def parse(cls, json: JSON) -> 'Note': return Note( id=json.get('id', None), text=json.get('status', None), user=json.get('status', None), attributes=json.get('attributes', dict()), note_type=json.get('type', None), create_time=DateTime.parse(json['createTime']) if 'createTime' in json else None, update_time=DateTime.parse(json['updateTime']) if 'updateTime' in json else None, alert=json.get('related', {}).get('alert'), customer=json.get('customer', None) ) @property def serialize(self) -> Dict[str, Any]: note = { 'id': self.id, 'href': absolute_url('/note/' + self.id), 'text': self.text, 'user': self.user, 'attributes': self.attributes, 'type': self.note_type, 'createTime': self.create_time, 'updateTime': self.update_time, '_links': dict(), 'customer': self.customer } # type: Dict[str, Any] if self.alert: note['_links'] = { 'alert': absolute_url('/alert/' + self.alert) } return note def __repr__(self) -> str: return 'Note(id={!r}, text={!r}, user={!r}, type={!r}, customer={!r})'.format( self.id, self.text, self.user, self.note_type, self.customer ) @classmethod def from_document(cls, doc: Dict[str, Any]) -> 'Note': return Note( id=doc.get('id', None) or doc.get('_id'), text=doc.get('text', None), user=doc.get('user', None), attributes=doc.get('attributes', dict()), note_type=doc.get('type', None), create_time=doc.get('createTime'), update_time=doc.get('updateTime'), alert=doc.get('alert'), customer=doc.get('customer') ) @classmethod def from_record(cls, rec) -> 'Note': return Note( id=rec.id, text=rec.text, user=rec.user, attributes=dict(rec.attributes), note_type=rec.type, create_time=rec.create_time, update_time=rec.update_time, alert=rec.alert, customer=rec.customer ) @classmethod def from_db(cls, r: Union[Dict, Tuple]) -> 'Note': if isinstance(r, dict): return cls.from_document(r) elif isinstance(r, tuple): return cls.from_record(r) def create(self) -> 'Note': return Note.from_db(db.create_note(self)) @staticmethod def from_alert(alert, text): note = Note( text=text, user=g.login, note_type=NoteType.alert, attributes=dict( resource=alert.resource, event=alert.event, environment=alert.environment, severity=alert.severity, status=alert.status ), alert=alert.id, customer=alert.customer ) history = History( id=note.id, event=alert.event, severity=alert.severity, status=alert.status, value=alert.value, text=text, change_type=ChangeType.note, update_time=datetime.utcnow(), user=g.login ) db.add_history(alert.id, history) return note.create() @staticmethod def find_by_id(id: str) -> Optional['Note']: return Note.from_db(db.get_note(id)) @staticmethod def find_all(query: Query = None) -> List['Note']: return [Note.from_db(note) for note in db.get_notes(query)] def update(self, **kwargs) -> 'Note': return Note.from_db(db.update_note(self.id, **kwargs)) def delete(self) -> bool: return db.delete_note(self.id)
./CrossVul/dataset_final_sorted/CWE-287/py/good_4331_1
crossvul-python_data_bad_2920_0
# Copyright (c) 2010-2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 from hashlib import sha1 import hmac from httplib import HTTPConnection from httplib import HTTPSConnection import json import swift from time import gmtime from time import strftime from time import time from traceback import format_exc from urllib import quote from urllib import unquote from uuid import uuid4 from eventlet.timeout import Timeout from eventlet import TimeoutError from swift.common.swob import HTTPAccepted from swift.common.swob import HTTPBadRequest from swift.common.swob import HTTPConflict from swift.common.swob import HTTPCreated from swift.common.swob import HTTPForbidden from swift.common.swob import HTTPMethodNotAllowed from swift.common.swob import HTTPMovedPermanently from swift.common.swob import HTTPNoContent from swift.common.swob import HTTPNotFound from swift.common.swob import HTTPUnauthorized from swift.common.swob import Request from swift.common.swob import Response from swift.common.bufferedhttp import http_connect_raw as http_connect from swift.common.middleware.acl import clean_acl from swift.common.middleware.acl import parse_acl from swift.common.middleware.acl import referrer_allowed from swift.common.utils import cache_from_env from swift.common.utils import get_logger from swift.common.utils import get_remote_client from swift.common.utils import split_path from swift.common.utils import TRUE_VALUES from swift.common.utils import urlparse import swift.common.wsgi import swauth.authtypes from swauth import swift_version SWIFT_MIN_VERSION = "2.2.0" CONTENT_TYPE_JSON = 'application/json' class Swauth(object): """Scalable authentication and authorization system that uses Swift as its backing store. :param app: The next WSGI app in the pipeline :param conf: The dict of configuration values """ def __init__(self, app, conf): self.app = app self.conf = conf self.logger = get_logger(conf, log_route='swauth') if not swift_version.at_least(SWIFT_MIN_VERSION): msg = ("Your Swift installation is too old (%s). You need at " "least %s." % (swift.__version__, SWIFT_MIN_VERSION)) self.logger.critical(msg) raise ValueError(msg) self.log_headers = conf.get('log_headers', 'no').lower() in TRUE_VALUES self.reseller_prefix = conf.get('reseller_prefix', 'AUTH').strip() if self.reseller_prefix and self.reseller_prefix[-1] != '_': self.reseller_prefix += '_' self.auth_prefix = conf.get('auth_prefix', '/auth/') if not self.auth_prefix: self.auth_prefix = '/auth/' if self.auth_prefix[0] != '/': self.auth_prefix = '/' + self.auth_prefix if self.auth_prefix[-1] != '/': self.auth_prefix += '/' self.swauth_remote = conf.get('swauth_remote') if self.swauth_remote: self.swauth_remote = self.swauth_remote.rstrip('/') if not self.swauth_remote: msg = _('Invalid swauth_remote set in conf file! Exiting.') self.logger.critical(msg) raise ValueError(msg) self.swauth_remote_parsed = urlparse(self.swauth_remote) if self.swauth_remote_parsed.scheme not in ('http', 'https'): msg = _('Cannot handle protocol scheme %(schema)s ' 'for url %(url)s!') % \ (self.swauth_remote_parsed.scheme, repr(self.swauth_remote)) self.logger.critical(msg) raise ValueError(msg) self.swauth_remote_timeout = int(conf.get('swauth_remote_timeout', 10)) self.auth_account = '%s.auth' % self.reseller_prefix self.default_swift_cluster = conf.get('default_swift_cluster', 'local#http://127.0.0.1:8080/v1') # This setting is a little messy because of the options it has to # provide. The basic format is cluster_name#url, such as the default # value of local#http://127.0.0.1:8080/v1. # If the URL given to the user needs to differ from the url used by # Swauth to create/delete accounts, there's a more complex format: # cluster_name#url#url, such as # local#https://public.com:8080/v1#http://private.com:8080/v1. cluster_parts = self.default_swift_cluster.split('#', 2) self.dsc_name = cluster_parts[0] if len(cluster_parts) == 3: self.dsc_url = cluster_parts[1].rstrip('/') self.dsc_url2 = cluster_parts[2].rstrip('/') elif len(cluster_parts) == 2: self.dsc_url = self.dsc_url2 = cluster_parts[1].rstrip('/') else: raise ValueError('Invalid cluster format') self.dsc_parsed = urlparse(self.dsc_url) if self.dsc_parsed.scheme not in ('http', 'https'): raise ValueError('Cannot handle protocol scheme %s for url %s' % (self.dsc_parsed.scheme, repr(self.dsc_url))) self.dsc_parsed2 = urlparse(self.dsc_url2) if self.dsc_parsed2.scheme not in ('http', 'https'): raise ValueError('Cannot handle protocol scheme %s for url %s' % (self.dsc_parsed2.scheme, repr(self.dsc_url2))) self.super_admin_key = conf.get('super_admin_key') if not self.super_admin_key and not self.swauth_remote: msg = _('No super_admin_key set in conf file; Swauth ' 'administration features will be disabled.') self.logger.warning(msg) self.token_life = int(conf.get('token_life', 86400)) self.max_token_life = int(conf.get('max_token_life', self.token_life)) self.timeout = int(conf.get('node_timeout', 10)) self.itoken = None self.itoken_expires = None self.allowed_sync_hosts = [h.strip() for h in conf.get('allowed_sync_hosts', '127.0.0.1').split(',') if h.strip()] # Get an instance of our auth_type encoder for saving and checking the # user's key self.auth_type = conf.get('auth_type', 'Plaintext').title() self.auth_encoder = getattr(swauth.authtypes, self.auth_type, None) if self.auth_encoder is None: raise ValueError('Invalid auth_type in config file: %s' % self.auth_type) # If auth_type_salt is not set in conf file, a random salt will be # generated for each new password to be encoded. self.auth_encoder.salt = conf.get('auth_type_salt', None) # Due to security concerns, S3 support is disabled by default. self.s3_support = conf.get('s3_support', 'off').lower() in TRUE_VALUES if self.s3_support and self.auth_type != 'Plaintext' \ and not self.auth_encoder.salt: msg = _('S3 support requires salt to be manually set in conf ' 'file using auth_type_salt config option.') self.logger.warning(msg) self.s3_support = False self.allow_overrides = \ conf.get('allow_overrides', 't').lower() in TRUE_VALUES self.agent = '%(orig)s Swauth' self.swift_source = 'SWTH' self.default_storage_policy = conf.get('default_storage_policy', None) def make_pre_authed_request(self, env, method=None, path=None, body=None, headers=None): """Nearly the same as swift.common.wsgi.make_pre_authed_request except that this also always sets the 'swift.source' and user agent. Newer Swift code will support swift_source as a kwarg, but we do it this way so we don't have to have a newer Swift. Since we're doing this anyway, we may as well set the user agent too since we always do that. """ if self.default_storage_policy: sp = self.default_storage_policy if headers: headers.update({'X-Storage-Policy': sp}) else: headers = {'X-Storage-Policy': sp} subreq = swift.common.wsgi.make_pre_authed_request( env, method=method, path=path, body=body, headers=headers, agent=self.agent) subreq.environ['swift.source'] = self.swift_source return subreq def __call__(self, env, start_response): """Accepts a standard WSGI application call, authenticating the request and installing callback hooks for authorization and ACL header validation. For an authenticated request, REMOTE_USER will be set to a comma separated list of the user's groups. With a non-empty reseller prefix, acts as the definitive auth service for just tokens and accounts that begin with that prefix, but will deny requests outside this prefix if no other auth middleware overrides it. With an empty reseller prefix, acts as the definitive auth service only for tokens that validate to a non-empty set of groups. For all other requests, acts as the fallback auth service when no other auth middleware overrides it. Alternatively, if the request matches the self.auth_prefix, the request will be routed through the internal auth request handler (self.handle). This is to handle creating users, accounts, granting tokens, etc. """ if 'keystone.identity' in env: return self.app(env, start_response) # We're going to consider OPTIONS requests harmless and the CORS # support in the Swift proxy needs to get them. if env.get('REQUEST_METHOD') == 'OPTIONS': return self.app(env, start_response) if self.allow_overrides and env.get('swift.authorize_override', False): return self.app(env, start_response) if not self.swauth_remote: if env.get('PATH_INFO', '') == self.auth_prefix[:-1]: return HTTPMovedPermanently(add_slash=True)(env, start_response) elif env.get('PATH_INFO', '').startswith(self.auth_prefix): return self.handle(env, start_response) s3 = env.get('HTTP_AUTHORIZATION') if s3 and not self.s3_support: msg = 'S3 support is disabled in swauth.' return HTTPBadRequest(body=msg)(env, start_response) token = env.get('HTTP_X_AUTH_TOKEN', env.get('HTTP_X_STORAGE_TOKEN')) if token and len(token) > swauth.authtypes.MAX_TOKEN_LENGTH: return HTTPBadRequest(body='Token exceeds maximum length.')(env, start_response) if s3 or (token and token.startswith(self.reseller_prefix)): # Note: Empty reseller_prefix will match all tokens. groups = self.get_groups(env, token) if groups: env['REMOTE_USER'] = groups user = groups and groups.split(',', 1)[0] or '' # We know the proxy logs the token, so we augment it just a bit # to also log the authenticated user. env['HTTP_X_AUTH_TOKEN'] = \ '%s,%s' % (user, 's3' if s3 else token) env['swift.authorize'] = self.authorize env['swift.clean_acl'] = clean_acl if '.reseller_admin' in groups: env['reseller_request'] = True else: # Unauthorized token if self.reseller_prefix and token and \ token.startswith(self.reseller_prefix): # Because I know I'm the definitive auth for this token, I # can deny it outright. return HTTPUnauthorized()(env, start_response) # Because I'm not certain if I'm the definitive auth, I won't # overwrite swift.authorize and I'll just set a delayed denial # if nothing else overrides me. elif 'swift.authorize' not in env: env['swift.authorize'] = self.denied_response else: if self.reseller_prefix: # With a non-empty reseller_prefix, I would like to be called # back for anonymous access to accounts I know I'm the # definitive auth for. try: version, rest = split_path(env.get('PATH_INFO', ''), 1, 2, True) except ValueError: rest = None if rest and rest.startswith(self.reseller_prefix): # Handle anonymous access to accounts I'm the definitive # auth for. env['swift.authorize'] = self.authorize env['swift.clean_acl'] = clean_acl # Not my token, not my account, I can't authorize this request, # deny all is a good idea if not already set... elif 'swift.authorize' not in env: env['swift.authorize'] = self.denied_response # Because I'm not certain if I'm the definitive auth for empty # reseller_prefixed accounts, I won't overwrite swift.authorize. elif 'swift.authorize' not in env: env['swift.authorize'] = self.authorize env['swift.clean_acl'] = clean_acl return self.app(env, start_response) def get_groups(self, env, token): """Get groups for the given token. :param env: The current WSGI environment dictionary. :param token: Token to validate and return a group string for. :returns: None if the token is invalid or a string containing a comma separated list of groups the authenticated user is a member of. The first group in the list is also considered a unique identifier for that user. """ groups = None memcache_client = cache_from_env(env) if memcache_client: memcache_key = '%s/auth/%s' % (self.reseller_prefix, token) cached_auth_data = memcache_client.get(memcache_key) if cached_auth_data: expires, groups = cached_auth_data if expires < time(): groups = None if env.get('HTTP_AUTHORIZATION'): if not self.s3_support: self.logger.warning('S3 support is disabled in swauth.') return None if self.swauth_remote: # TODO(gholt): Support S3-style authorization with # swauth_remote mode self.logger.warning('S3-style authorization not supported yet ' 'with swauth_remote mode.') return None try: account = env['HTTP_AUTHORIZATION'].split(' ')[1] account, user, sign = account.split(':') except Exception: self.logger.debug( 'Swauth cannot parse Authorization header value %r' % env['HTTP_AUTHORIZATION']) return None path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( env, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: return None if 'x-object-meta-account-id' in resp.headers: account_id = resp.headers['x-object-meta-account-id'] else: path = quote('/v1/%s/%s' % (self.auth_account, account)) resp2 = self.make_pre_authed_request( env, 'HEAD', path).get_response(self.app) if resp2.status_int // 100 != 2: return None account_id = resp2.headers['x-container-meta-account-id'] path = env['PATH_INFO'] env['PATH_INFO'] = path.replace("%s:%s" % (account, user), account_id, 1) detail = json.loads(resp.body) if detail: creds = detail.get('auth') try: auth_encoder, creds_dict = \ swauth.authtypes.validate_creds(creds) except ValueError as e: self.logger.error('%s' % e.args[0]) return None password = creds_dict['hash'] msg = base64.urlsafe_b64decode(unquote(token)) # https://bugs.python.org/issue5285 if isinstance(password, unicode): password = password.encode('utf-8') if isinstance(msg, unicode): msg = msg.encode('utf-8') s = base64.encodestring(hmac.new(password, msg, sha1).digest()).strip() if s != sign: return None groups = [g['name'] for g in detail['groups']] if '.admin' in groups: groups.remove('.admin') groups.append(account_id) groups = ','.join(groups) return groups if not groups: if self.swauth_remote: with Timeout(self.swauth_remote_timeout): conn = http_connect(self.swauth_remote_parsed.hostname, self.swauth_remote_parsed.port, 'GET', '%s/v2/.token/%s' % (self.swauth_remote_parsed.path, quote(token)), ssl=(self.swauth_remote_parsed.scheme == 'https')) resp = conn.getresponse() resp.read() conn.close() if resp.status // 100 != 2: return None expires_from_now = float(resp.getheader('x-auth-ttl')) groups = resp.getheader('x-auth-groups') if memcache_client: memcache_client.set( memcache_key, (time() + expires_from_now, groups), time=expires_from_now) else: path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, token[-1], token)) resp = self.make_pre_authed_request( env, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: return None detail = json.loads(resp.body) if detail['expires'] < time(): self.make_pre_authed_request( env, 'DELETE', path).get_response(self.app) return None groups = [g['name'] for g in detail['groups']] if '.admin' in groups: groups.remove('.admin') groups.append(detail['account_id']) groups = ','.join(groups) if memcache_client: memcache_client.set( memcache_key, (detail['expires'], groups), time=float(detail['expires'] - time())) return groups def authorize(self, req): """Returns None if the request is authorized to continue or a standard WSGI response callable if not. """ try: version, account, container, obj = split_path(req.path, 1, 4, True) except ValueError: return HTTPNotFound(request=req) if not account or not account.startswith(self.reseller_prefix): return self.denied_response(req) user_groups = (req.remote_user or '').split(',') if '.reseller_admin' in user_groups and \ account != self.reseller_prefix and \ account[len(self.reseller_prefix)] != '.': req.environ['swift_owner'] = True return None if account in user_groups and \ (req.method not in ('DELETE', 'PUT') or container): # If the user is admin for the account and is not trying to do an # account DELETE or PUT... req.environ['swift_owner'] = True return None if (req.environ.get('swift_sync_key') and req.environ['swift_sync_key'] == req.headers.get('x-container-sync-key', None) and 'x-timestamp' in req.headers and (req.remote_addr in self.allowed_sync_hosts or get_remote_client(req) in self.allowed_sync_hosts)): return None referrers, groups = parse_acl(getattr(req, 'acl', None)) if referrer_allowed(req.referer, referrers): if obj or '.rlistings' in groups: return None return self.denied_response(req) if not req.remote_user: return self.denied_response(req) for user_group in user_groups: if user_group in groups: return None return self.denied_response(req) def denied_response(self, req): """Returns a standard WSGI response callable with the status of 403 or 401 depending on whether the REMOTE_USER is set or not. """ if not hasattr(req, 'credentials_valid'): req.credentials_valid = None if req.remote_user or req.credentials_valid: return HTTPForbidden(request=req) else: return HTTPUnauthorized(request=req) def handle(self, env, start_response): """WSGI entry point for auth requests (ones that match the self.auth_prefix). Wraps env in swob.Request object and passes it down. :param env: WSGI environment dictionary :param start_response: WSGI callable """ try: req = Request(env) if self.auth_prefix: req.path_info_pop() req.bytes_transferred = '-' req.client_disconnect = False if 'x-storage-token' in req.headers and \ 'x-auth-token' not in req.headers: req.headers['x-auth-token'] = req.headers['x-storage-token'] if 'eventlet.posthooks' in env: env['eventlet.posthooks'].append( (self.posthooklogger, (req,), {})) return self.handle_request(req)(env, start_response) else: # Lack of posthook support means that we have to log on the # start of the response, rather than after all the data has # been sent. This prevents logging client disconnects # differently than full transmissions. response = self.handle_request(req)(env, start_response) self.posthooklogger(env, req) return response except (Exception, TimeoutError): print("EXCEPTION IN handle: %s: %s" % (format_exc(), env)) start_response('500 Server Error', [('Content-Type', 'text/plain')]) return ['Internal server error.\n'] def handle_request(self, req): """Entry point for auth requests (ones that match the self.auth_prefix). Should return a WSGI-style callable (such as swob.Response). :param req: swob.Request object """ req.start_time = time() handler = None try: version, account, user, _junk = split_path(req.path_info, minsegs=0, maxsegs=4, rest_with_last=True) except ValueError: return HTTPNotFound(request=req) if version in ('v1', 'v1.0', 'auth'): if req.method == 'GET': handler = self.handle_get_token elif version == 'v2': if not self.super_admin_key: return HTTPNotFound(request=req) req.path_info_pop() if req.method == 'GET': if not account and not user: handler = self.handle_get_reseller elif account: if not user: handler = self.handle_get_account elif account == '.token': req.path_info_pop() handler = self.handle_validate_token else: handler = self.handle_get_user elif req.method == 'PUT': if not user: handler = self.handle_put_account else: handler = self.handle_put_user elif req.method == 'DELETE': if not user: handler = self.handle_delete_account else: handler = self.handle_delete_user elif req.method == 'POST': if account == '.prep': handler = self.handle_prep elif user == '.services': handler = self.handle_set_services else: handler = self.handle_webadmin if not handler: req.response = HTTPBadRequest(request=req) else: req.response = handler(req) return req.response def handle_webadmin(self, req): if req.method not in ('GET', 'HEAD'): return HTTPMethodNotAllowed(request=req) subpath = req.path[len(self.auth_prefix):] or 'index.html' path = quote('/v1/%s/.webadmin/%s' % (self.auth_account, subpath)) req.response = self.make_pre_authed_request( req.environ, req.method, path).get_response(self.app) return req.response def handle_prep(self, req): """Handles the POST v2/.prep call for preparing the backing store Swift cluster for use with the auth subsystem. Can only be called by .super_admin. :param req: The swob.Request to process. :returns: swob.Response, 204 on success """ if not self.is_super_admin(req): return self.denied_response(req) path = quote('/v1/%s' % self.auth_account) resp = self.make_pre_authed_request( req.environ, 'PUT', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create the main auth account: %s %s' % (path, resp.status)) path = quote('/v1/%s/.account_id' % self.auth_account) resp = self.make_pre_authed_request( req.environ, 'PUT', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create container: %s %s' % (path, resp.status)) for container in xrange(16): path = quote('/v1/%s/.token_%x' % (self.auth_account, container)) resp = self.make_pre_authed_request( req.environ, 'PUT', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create container: %s %s' % (path, resp.status)) return HTTPNoContent(request=req) def handle_get_reseller(self, req): """Handles the GET v2 call for getting general reseller information (currently just a list of accounts). Can only be called by a .reseller_admin. On success, a JSON dictionary will be returned with a single `accounts` key whose value is list of dicts. Each dict represents an account and currently only contains the single key `name`. For example:: {"accounts": [{"name": "reseller"}, {"name": "test"}, {"name": "test2"}]} :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with a JSON dictionary as explained above. """ if not self.is_reseller_admin(req): return self.denied_response(req) listing = [] marker = '' while True: path = '/v1/%s?format=json&marker=%s' % (quote(self.auth_account), quote(marker)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not list main auth account: %s %s' % (path, resp.status)) sublisting = json.loads(resp.body) if not sublisting: break for container in sublisting: if container['name'][0] != '.': listing.append({'name': container['name']}) marker = sublisting[-1]['name'].encode('utf-8') return Response(body=json.dumps({'accounts': listing}), content_type=CONTENT_TYPE_JSON) def handle_get_account(self, req): """Handles the GET v2/<account> call for getting account information. Can only be called by an account .admin. On success, a JSON dictionary will be returned containing the keys `account_id`, `services`, and `users`. The `account_id` is the value used when creating service accounts. The `services` value is a dict as described in the :func:`handle_get_token` call. The `users` value is a list of dicts, each dict representing a user and currently only containing the single key `name`. For example:: {"account_id": "AUTH_018c3946-23f8-4efb-a8fb-b67aae8e4162", "services": {"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_018c3946"}}, "users": [{"name": "tester"}, {"name": "tester3"}]} :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with a JSON dictionary as explained above. """ account = req.path_info_pop() if req.path_info or not account or account[0] == '.': return HTTPBadRequest(request=req) if not self.is_account_admin(req, account): return self.denied_response(req) path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not obtain the .services object: %s %s' % (path, resp.status)) services = json.loads(resp.body) listing = [] marker = '' while True: path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' % (self.auth_account, account)), quote(marker)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not list in main auth account: %s %s' % (path, resp.status)) account_id = resp.headers['X-Container-Meta-Account-Id'] sublisting = json.loads(resp.body) if not sublisting: break for obj in sublisting: if obj['name'][0] != '.': listing.append({'name': obj['name']}) marker = sublisting[-1]['name'].encode('utf-8') return Response(content_type=CONTENT_TYPE_JSON, body=json.dumps({'account_id': account_id, 'services': services, 'users': listing})) def handle_set_services(self, req): """Handles the POST v2/<account>/.services call for setting services information. Can only be called by a reseller .admin. In the :func:`handle_get_account` (GET v2/<account>) call, a section of the returned JSON dict is `services`. This section looks something like this:: "services": {"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_018c3946"}} Making use of this section is described in :func:`handle_get_token`. This function allows setting values within this section for the <account>, allowing the addition of new service end points or updating existing ones. The body of the POST request should contain a JSON dict with the following format:: {"service_name": {"end_point_name": "end_point_value"}} There can be multiple services and multiple end points in the same call. Any new services or end points will be added to the existing set of services and end points. Any existing services with the same service name will be merged with the new end points. Any existing end points with the same end point name will have their values updated. The updated services dictionary will be returned on success. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with the udpated services JSON dict as described above """ if not self.is_reseller_admin(req): return self.denied_response(req) account = req.path_info_pop() if req.path_info != '/.services' or not account or account[0] == '.': return HTTPBadRequest(request=req) try: new_services = json.loads(req.body) except ValueError as err: return HTTPBadRequest(body=str(err)) # Get the current services information path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not obtain services info: %s %s' % (path, resp.status)) services = json.loads(resp.body) for new_service, value in new_services.iteritems(): if new_service in services: services[new_service].update(value) else: services[new_service] = value # Save the new services information services = json.dumps(services) resp = self.make_pre_authed_request( req.environ, 'PUT', path, services).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not save .services object: %s %s' % (path, resp.status)) return Response(request=req, body=services, content_type=CONTENT_TYPE_JSON) def handle_put_account(self, req): """Handles the PUT v2/<account> call for adding an account to the auth system. Can only be called by a .reseller_admin. By default, a newly created UUID4 will be used with the reseller prefix as the account id used when creating corresponding service accounts. However, you can provide an X-Account-Suffix header to replace the UUID4 part. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success. """ if not self.is_reseller_admin(req): return self.denied_response(req) account = req.path_info_pop() if req.path_info or not account or account[0] == '.': return HTTPBadRequest(request=req) account_suffix = req.headers.get('x-account-suffix') if not account_suffix: account_suffix = str(uuid4()) # Create the new account in the Swift cluster path = quote('%s/%s%s' % (self.dsc_parsed2.path, self.reseller_prefix, account_suffix)) try: conn = self.get_conn() conn.request('PUT', path, headers={'X-Auth-Token': self.get_itoken(req.environ), 'Content-Length': '0'}) resp = conn.getresponse() resp.read() if resp.status // 100 != 2: raise Exception('Could not create account on the Swift ' 'cluster: %s %s %s' % (path, resp.status, resp.reason)) except (Exception, TimeoutError): self.logger.error(_('ERROR: Exception while trying to communicate ' 'with %(scheme)s://%(host)s:%(port)s/%(path)s'), {'scheme': self.dsc_parsed2.scheme, 'host': self.dsc_parsed2.hostname, 'port': self.dsc_parsed2.port, 'path': path}) raise # Ensure the container in the main auth account exists (this # container represents the new account) path = quote('/v1/%s/%s' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'HEAD', path).get_response(self.app) if resp.status_int == 404: resp = self.make_pre_authed_request( req.environ, 'PUT', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create account within main auth ' 'account: %s %s' % (path, resp.status)) elif resp.status_int // 100 == 2: if 'x-container-meta-account-id' in resp.headers: # Account was already created return HTTPAccepted(request=req) else: raise Exception('Could not verify account within main auth ' 'account: %s %s' % (path, resp.status)) # Record the mapping from account id back to account name path = quote('/v1/%s/.account_id/%s%s' % (self.auth_account, self.reseller_prefix, account_suffix)) resp = self.make_pre_authed_request( req.environ, 'PUT', path, account).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create account id mapping: %s %s' % (path, resp.status)) # Record the cluster url(s) for the account path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) services = {'storage': {}} services['storage'][self.dsc_name] = '%s/%s%s' % (self.dsc_url, self.reseller_prefix, account_suffix) services['storage']['default'] = self.dsc_name resp = self.make_pre_authed_request( req.environ, 'PUT', path, json.dumps(services)).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create .services object: %s %s' % (path, resp.status)) # Record the mapping from account name to the account id path = quote('/v1/%s/%s' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'POST', path, headers={'X-Container-Meta-Account-Id': '%s%s' % ( self.reseller_prefix, account_suffix)}).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not record the account id on the account: ' '%s %s' % (path, resp.status)) return HTTPCreated(request=req) def handle_delete_account(self, req): """Handles the DELETE v2/<account> call for removing an account from the auth system. Can only be called by a .reseller_admin. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success. """ if not self.is_reseller_admin(req): return self.denied_response(req) account = req.path_info_pop() if req.path_info or not account or account[0] == '.': return HTTPBadRequest(request=req) # Make sure the account has no users and get the account_id marker = '' while True: path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' % (self.auth_account, account)), quote(marker)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not list in main auth account: %s %s' % (path, resp.status)) account_id = resp.headers['x-container-meta-account-id'] sublisting = json.loads(resp.body) if not sublisting: break for obj in sublisting: if obj['name'][0] != '.': return HTTPConflict(request=req) marker = sublisting[-1]['name'].encode('utf-8') # Obtain the listing of services the account is on. path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2 and resp.status_int != 404: raise Exception('Could not obtain .services object: %s %s' % (path, resp.status)) if resp.status_int // 100 == 2: services = json.loads(resp.body) # Delete the account on each cluster it is on. deleted_any = False for name, url in services['storage'].iteritems(): if name != 'default': parsed = urlparse(url) conn = self.get_conn(parsed) conn.request('DELETE', parsed.path, headers={'X-Auth-Token': self.get_itoken(req.environ)}) resp = conn.getresponse() resp.read() if resp.status == 409: if deleted_any: raise Exception('Managed to delete one or more ' 'service end points, but failed with: ' '%s %s %s' % (url, resp.status, resp.reason)) else: return HTTPConflict(request=req) if resp.status // 100 != 2 and resp.status != 404: raise Exception('Could not delete account on the ' 'Swift cluster: %s %s %s' % (url, resp.status, resp.reason)) deleted_any = True # Delete the .services object itself. path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) if resp.status_int // 100 != 2 and resp.status_int != 404: raise Exception('Could not delete .services object: %s %s' % (path, resp.status)) # Delete the account id mapping for the account. path = quote('/v1/%s/.account_id/%s' % (self.auth_account, account_id)) resp = self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) if resp.status_int // 100 != 2 and resp.status_int != 404: raise Exception('Could not delete account id mapping: %s %s' % (path, resp.status)) # Delete the account marker itself. path = quote('/v1/%s/%s' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) if resp.status_int // 100 != 2 and resp.status_int != 404: raise Exception('Could not delete account marked: %s %s' % (path, resp.status)) return HTTPNoContent(request=req) def handle_get_user(self, req): """Handles the GET v2/<account>/<user> call for getting user information. Can only be called by an account .admin. On success, a JSON dict will be returned as described:: {"groups": [ # List of groups the user is a member of {"name": "<act>:<usr>"}, # The first group is a unique user identifier {"name": "<account>"}, # The second group is the auth account name {"name": "<additional-group>"} # There may be additional groups, .admin being a special # group indicating an account admin and .reseller_admin # indicating a reseller admin. ], "auth": "plaintext:<key>" # The auth-type and key for the user; currently only plaintext is # implemented. } For example:: {"groups": [{"name": "test:tester"}, {"name": "test"}, {"name": ".admin"}], "auth": "plaintext:testing"} If the <user> in the request is the special user `.groups`, the JSON dict will contain a single key of `groups` whose value is a list of dicts representing the active groups within the account. Each dict currently has the single key `name`. For example:: {"groups": [{"name": ".admin"}, {"name": "test"}, {"name": "test:tester"}, {"name": "test:tester3"}]} :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with a JSON dictionary as explained above. """ account = req.path_info_pop() user = req.path_info_pop() if req.path_info or not account or account[0] == '.' or not user or \ (user[0] == '.' and user != '.groups'): return HTTPBadRequest(request=req) if not self.is_account_admin(req, account): return self.denied_response(req) # get information for each user for the specified # account and create a list of all groups that the users # are part of if user == '.groups': # TODO(gholt): This could be very slow for accounts with a really # large number of users. Speed could be improved by concurrently # requesting user group information. Then again, I don't *know* # it's slow for `normal` use cases, so testing should be done. groups = set() marker = '' while True: path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' % (self.auth_account, account)), quote(marker)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not list in main auth account: ' '%s %s' % (path, resp.status)) sublisting = json.loads(resp.body) if not sublisting: break for obj in sublisting: if obj['name'][0] != '.': # get list of groups for each user user_json = self.get_user_detail(req, account, obj['name']) if user_json is None: raise Exception('Could not retrieve user object: ' '%s:%s %s' % (account, user, 404)) groups.update( g['name'] for g in json.loads(user_json)['groups']) marker = sublisting[-1]['name'].encode('utf-8') body = json.dumps( {'groups': [{'name': g} for g in sorted(groups)]}) else: # get information for specific user, # if user doesn't exist, return HTTPNotFound body = self.get_user_detail(req, account, user) if body is None: return HTTPNotFound(request=req) display_groups = [g['name'] for g in json.loads(body)['groups']] if ('.admin' in display_groups and not self.is_reseller_admin(req)) or \ ('.reseller_admin' in display_groups and not self.is_super_admin(req)): return self.denied_response(req) return Response(body=body, content_type=CONTENT_TYPE_JSON) def handle_put_user(self, req): """Handles the PUT v2/<account>/<user> call for adding a user to an account. X-Auth-User-Key represents the user's key (url encoded), - OR - X-Auth-User-Key-Hash represents the user's hashed key (url encoded), X-Auth-User-Admin may be set to `true` to create an account .admin, and X-Auth-User-Reseller-Admin may be set to `true` to create a .reseller_admin. Creating users ************** Can only be called by an account .admin unless the user is to be a .reseller_admin, in which case the request must be by .super_admin. Changing password/key ********************* 1) reseller_admin key can be changed by super_admin and by himself. 2) admin key can be changed by any admin in same account, reseller_admin, super_admin and himself. 3) Regular user key can be changed by any admin in his account, reseller_admin, super_admin and himself. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success. """ # Validate path info account = req.path_info_pop() user = req.path_info_pop() key = unquote(req.headers.get('x-auth-user-key', '')) key_hash = unquote(req.headers.get('x-auth-user-key-hash', '')) admin = req.headers.get('x-auth-user-admin') == 'true' reseller_admin = \ req.headers.get('x-auth-user-reseller-admin') == 'true' if reseller_admin: admin = True if req.path_info or not account or account[0] == '.' or not user or \ user[0] == '.' or (not key and not key_hash): return HTTPBadRequest(request=req) if key_hash: try: swauth.authtypes.validate_creds(key_hash) except ValueError: return HTTPBadRequest(request=req) user_arg = account + ':' + user if reseller_admin: if not self.is_super_admin(req) and\ not self.is_user_changing_own_key(req, user_arg): return self.denied_response(req) elif not self.is_account_admin(req, account) and\ not self.is_user_changing_own_key(req, user_arg): return self.denied_response(req) path = quote('/v1/%s/%s' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'HEAD', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not retrieve account id value: %s %s' % (path, resp.status)) headers = {'X-Object-Meta-Account-Id': resp.headers['x-container-meta-account-id']} # Create the object in the main auth account (this object represents # the user) path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) groups = ['%s:%s' % (account, user), account] if admin: groups.append('.admin') if reseller_admin: groups.append('.reseller_admin') auth_value = key_hash or self.auth_encoder().encode(key) resp = self.make_pre_authed_request( req.environ, 'PUT', path, json.dumps({'auth': auth_value, 'groups': [{'name': g} for g in groups]}), headers=headers).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not create user object: %s %s' % (path, resp.status)) return HTTPCreated(request=req) def handle_delete_user(self, req): """Handles the DELETE v2/<account>/<user> call for deleting a user from an account. Can only be called by an account .admin. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success. """ # Validate path info account = req.path_info_pop() user = req.path_info_pop() if req.path_info or not account or account[0] == '.' or not user or \ user[0] == '.': return HTTPBadRequest(request=req) # if user to be deleted is reseller_admin, then requesting # user must be the super_admin is_reseller_admin = self.is_user_reseller_admin(req, account, user) if not is_reseller_admin and not req.credentials_valid: # if user to be deleted can't be found, return 404 return HTTPNotFound(request=req) elif is_reseller_admin and not self.is_super_admin(req): return HTTPForbidden(request=req) if not self.is_account_admin(req, account): return self.denied_response(req) # Delete the user's existing token, if any. path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( req.environ, 'HEAD', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) elif resp.status_int // 100 != 2: raise Exception('Could not obtain user details: %s %s' % (path, resp.status)) candidate_token = resp.headers.get('x-object-meta-auth-token') if candidate_token: path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, candidate_token[-1], candidate_token)) resp = self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) if resp.status_int // 100 != 2 and resp.status_int != 404: raise Exception('Could not delete possibly existing token: ' '%s %s' % (path, resp.status)) # Delete the user entry itself. path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) if resp.status_int // 100 != 2 and resp.status_int != 404: raise Exception('Could not delete the user object: %s %s' % (path, resp.status)) return HTTPNoContent(request=req) def is_user_reseller_admin(self, req, account, user): """Returns True if the user is a .reseller_admin. :param account: account user is part of :param user: the user :returns: True if user .reseller_admin, False if user is not a reseller_admin and None if the user doesn't exist. """ req.credentials_valid = True user_json = self.get_user_detail(req, account, user) if user_json is None: req.credentials_valid = False return False user_detail = json.loads(user_json) return '.reseller_admin' in (g['name'] for g in user_detail['groups']) def handle_get_token(self, req): """Handles the various `request for token and service end point(s)` calls. There are various formats to support the various auth servers in the past. Examples:: GET <auth-prefix>/v1/<act>/auth X-Auth-User: <act>:<usr> or X-Storage-User: <usr> X-Auth-Key: <key> or X-Storage-Pass: <key> GET <auth-prefix>/auth X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr> X-Auth-Key: <key> or X-Storage-Pass: <key> GET <auth-prefix>/v1.0 X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr> X-Auth-Key: <key> or X-Storage-Pass: <key> Values should be url encoded, "act%3Ausr" instead of "act:usr" for example; however, for backwards compatibility the colon may be included unencoded. On successful authentication, the response will have X-Auth-Token and X-Storage-Token set to the token to use with Swift and X-Storage-URL set to the URL to the default Swift cluster to use. The response body will be set to the account's services JSON object as described here:: {"storage": { # Represents the Swift storage service end points "default": "cluster1", # Indicates which cluster is the default "cluster1": "<URL to use with Swift>", # A Swift cluster that can be used with this account, # "cluster1" is the name of the cluster which is usually a # location indicator (like "dfw" for a datacenter region). "cluster2": "<URL to use with Swift>" # Another Swift cluster that can be used with this account, # there will always be at least one Swift cluster to use or # this whole "storage" dict won't be included at all. }, "servers": { # Represents the Nova server service end points # Expected to be similar to the "storage" dict, but not # implemented yet. }, # Possibly other service dicts, not implemented yet. } One can also include an "X-Auth-New-Token: true" header to force issuing a new token and revoking any old token, even if it hasn't expired yet. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with data set as explained above. """ # Validate the request info try: pathsegs = split_path(req.path_info, minsegs=1, maxsegs=3, rest_with_last=True) except ValueError: return HTTPNotFound(request=req) if pathsegs[0] == 'v1' and pathsegs[2] == 'auth': account = pathsegs[1] user = req.headers.get('x-storage-user') if not user: user = unquote(req.headers.get('x-auth-user', '')) if not user or ':' not in user: return HTTPUnauthorized(request=req) account2, user = user.split(':', 1) if account != account2: return HTTPUnauthorized(request=req) key = req.headers.get('x-storage-pass') if not key: key = unquote(req.headers.get('x-auth-key', '')) elif pathsegs[0] in ('auth', 'v1.0'): user = unquote(req.headers.get('x-auth-user', '')) if not user: user = req.headers.get('x-storage-user') if not user or ':' not in user: return HTTPUnauthorized(request=req) account, user = user.split(':', 1) key = unquote(req.headers.get('x-auth-key', '')) if not key: key = req.headers.get('x-storage-pass') else: return HTTPBadRequest(request=req) if not all((account, user, key)): return HTTPUnauthorized(request=req) if user == '.super_admin' and self.super_admin_key and \ key == self.super_admin_key: token = self.get_itoken(req.environ) url = '%s/%s.auth' % (self.dsc_url, self.reseller_prefix) return Response( request=req, content_type=CONTENT_TYPE_JSON, body=json.dumps({'storage': {'default': 'local', 'local': url}}), headers={'x-auth-token': token, 'x-storage-token': token, 'x-storage-url': url}) # Authenticate user path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPUnauthorized(request=req) if resp.status_int // 100 != 2: raise Exception('Could not obtain user details: %s %s' % (path, resp.status)) user_detail = json.loads(resp.body) if not self.credentials_match(user_detail, key): return HTTPUnauthorized(request=req) # See if a token already exists and hasn't expired token = None expires = None candidate_token = resp.headers.get('x-object-meta-auth-token') if candidate_token: path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, candidate_token[-1], candidate_token)) delete_token = False try: if req.headers.get('x-auth-new-token', 'false').lower() in \ TRUE_VALUES: delete_token = True else: resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 == 2: token_detail = json.loads(resp.body) if token_detail['expires'] > time(): token = candidate_token expires = token_detail['expires'] else: delete_token = True elif resp.status_int != 404: raise Exception( 'Could not detect whether a token already exists: ' '%s %s' % (path, resp.status)) finally: if delete_token: self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) memcache_client = cache_from_env(req.environ) if memcache_client: memcache_key = '%s/auth/%s' % (self.reseller_prefix, candidate_token) memcache_client.delete(memcache_key) # Create a new token if one didn't exist if not token: # Retrieve account id, we'll save this in the token path = quote('/v1/%s/%s' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'HEAD', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not retrieve account id value: ' '%s %s' % (path, resp.status)) account_id = \ resp.headers['x-container-meta-account-id'] # Generate new token token = '%stk%s' % (self.reseller_prefix, uuid4().hex) # Save token info path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, token[-1], token)) try: token_life = min( int(req.headers.get('x-auth-token-lifetime', self.token_life)), self.max_token_life) except ValueError: token_life = self.token_life expires = int(time() + token_life) resp = self.make_pre_authed_request( req.environ, 'PUT', path, json.dumps({'account': account, 'user': user, 'account_id': account_id, 'groups': user_detail['groups'], 'expires': expires})).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create new token: %s %s' % (path, resp.status)) # Record the token with the user info for future use. path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( req.environ, 'POST', path, headers={'X-Object-Meta-Auth-Token': token} ).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not save new token: %s %s' % (path, resp.status)) # Get the services information path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not obtain services info: %s %s' % (path, resp.status)) detail = json.loads(resp.body) url = detail['storage'][detail['storage']['default']] return Response( request=req, body=resp.body, content_type=CONTENT_TYPE_JSON, headers={'x-auth-token': token, 'x-storage-token': token, 'x-auth-token-expires': str(int(expires - time())), 'x-storage-url': url}) def handle_validate_token(self, req): """Handles the GET v2/.token/<token> call for validating a token, usually called by a service like Swift. On a successful validation, X-Auth-TTL will be set for how much longer this token is valid and X-Auth-Groups will contain a comma separated list of groups the user belongs to. The first group listed will be a unique identifier for the user the token represents. .reseller_admin is a special group that indicates the user should be allowed to do anything on any account. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with data set as explained above. """ token = req.path_info_pop() if req.path_info or not token.startswith(self.reseller_prefix): return HTTPBadRequest(request=req) expires = groups = None memcache_client = cache_from_env(req.environ) if memcache_client: memcache_key = '%s/auth/%s' % (self.reseller_prefix, token) cached_auth_data = memcache_client.get(memcache_key) if cached_auth_data: expires, groups = cached_auth_data if expires < time(): groups = None if not groups: path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, token[-1], token)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: return HTTPNotFound(request=req) detail = json.loads(resp.body) expires = detail['expires'] if expires < time(): self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) return HTTPNotFound(request=req) groups = [g['name'] for g in detail['groups']] if '.admin' in groups: groups.remove('.admin') groups.append(detail['account_id']) groups = ','.join(groups) return HTTPNoContent(headers={'X-Auth-TTL': expires - time(), 'X-Auth-Groups': groups}) def get_conn(self, urlparsed=None): """Returns an HTTPConnection based on the urlparse result given or the default Swift cluster (internal url) urlparse result. :param urlparsed: The result from urlparse.urlparse or None to use the default Swift cluster's value """ if not urlparsed: urlparsed = self.dsc_parsed2 if urlparsed.scheme == 'http': return HTTPConnection(urlparsed.netloc) else: return HTTPSConnection(urlparsed.netloc) def get_itoken(self, env): """Returns the current internal token to use for the auth system's own actions with other services. Each process will create its own itoken and the token will be deleted and recreated based on the token_life configuration value. The itoken information is stored in memcache because the auth process that is asked by Swift to validate the token may not be the same as the auth process that created the token. """ if not self.itoken or self.itoken_expires < time() or \ env.get('HTTP_X_AUTH_NEW_TOKEN', 'false').lower() in \ TRUE_VALUES: self.itoken = '%sitk%s' % (self.reseller_prefix, uuid4().hex) memcache_key = '%s/auth/%s' % (self.reseller_prefix, self.itoken) self.itoken_expires = time() + self.token_life memcache_client = cache_from_env(env) if not memcache_client: raise Exception( 'No memcache set up; required for Swauth middleware') memcache_client.set( memcache_key, (self.itoken_expires, '.auth,.reseller_admin,%s.auth' % self.reseller_prefix), time=self.token_life) return self.itoken def get_admin_detail(self, req): """Returns the dict for the user specified as the admin in the request with the addition of an `account` key set to the admin user's account. :param req: The swob request to retrieve X-Auth-Admin-User and X-Auth-Admin-Key from. :returns: The dict for the admin user with the addition of the `account` key. """ if ':' not in req.headers.get('x-auth-admin-user', ''): return None admin_account, admin_user = \ req.headers.get('x-auth-admin-user').split(':', 1) user_json = self.get_user_detail(req, admin_account, admin_user) if user_json is None: return None admin_detail = json.loads(user_json) admin_detail['account'] = admin_account return admin_detail def get_user_detail(self, req, account, user): """Returns the response body of a GET request for the specified user The body is in JSON format and contains all user information. :param req: The swob request :param account: the account the user is a member of :param user: the user :returns: A JSON response with the user detail information, None if the user doesn't exist """ path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return None if resp.status_int // 100 != 2: raise Exception('Could not get user object: %s %s' % (path, resp.status)) return resp.body def credentials_match(self, user_detail, key): """Returns True if the key is valid for the user_detail. It will use auth_encoder type the password was encoded with, to check for a key match. :param user_detail: The dict for the user. :param key: The key to validate for the user. :returns: True if the key is valid for the user, False if not. """ if user_detail: creds = user_detail.get('auth') try: auth_encoder, creds_dict = \ swauth.authtypes.validate_creds(creds) except ValueError as e: self.logger.error('%s' % e.args[0]) return False return user_detail and auth_encoder.match(key, creds, **creds_dict) def is_user_changing_own_key(self, req, user): """Check if the user is changing his own key. :param req: The swob.Request to check. This contains x-auth-admin-user and x-auth-admin-key headers which are credentials of the user sending the request. :param user: User whose password is to be changed. :returns: True if user is changing his own key, False if not. """ admin_detail = self.get_admin_detail(req) if not admin_detail: # The user does not exist return False # If user is not admin/reseller_admin and x-auth-user-admin or # x-auth-user-reseller-admin headers are present in request, he may be # attempting to escalate himself as admin/reseller_admin! if '.admin' not in (g['name'] for g in admin_detail['groups']): if req.headers.get('x-auth-user-admin') == 'true' or \ req.headers.get('x-auth-user-reseller-admin') == 'true': return False if '.reseller_admin' not in \ (g['name'] for g in admin_detail['groups']) and \ req.headers.get('x-auth-user-reseller-admin') == 'true': return False return req.headers.get('x-auth-admin-user') == user and \ self.credentials_match(admin_detail, req.headers.get('x-auth-admin-key')) def is_super_admin(self, req): """Returns True if the admin specified in the request represents the .super_admin. :param req: The swob.Request to check. :param returns: True if .super_admin. """ return req.headers.get('x-auth-admin-user') == '.super_admin' and \ self.super_admin_key and \ req.headers.get('x-auth-admin-key') == self.super_admin_key def is_reseller_admin(self, req, admin_detail=None): """Returns True if the admin specified in the request represents a .reseller_admin. :param req: The swob.Request to check. :param admin_detail: The previously retrieved dict from :func:`get_admin_detail` or None for this function to retrieve the admin_detail itself. :param returns: True if .reseller_admin. """ req.credentials_valid = False if self.is_super_admin(req): return True if not admin_detail: admin_detail = self.get_admin_detail(req) if not self.credentials_match(admin_detail, req.headers.get('x-auth-admin-key')): return False req.credentials_valid = True return '.reseller_admin' in (g['name'] for g in admin_detail['groups']) def is_account_admin(self, req, account): """Returns True if the admin specified in the request represents a .admin for the account specified. :param req: The swob.Request to check. :param account: The account to check for .admin against. :param returns: True if .admin. """ req.credentials_valid = False if self.is_super_admin(req): return True admin_detail = self.get_admin_detail(req) if admin_detail: if self.is_reseller_admin(req, admin_detail=admin_detail): return True if not self.credentials_match(admin_detail, req.headers.get('x-auth-admin-key')): return False req.credentials_valid = True return admin_detail and admin_detail['account'] == account and \ '.admin' in (g['name'] for g in admin_detail['groups']) return False def posthooklogger(self, env, req): if not req.path.startswith(self.auth_prefix): return response = getattr(req, 'response', None) if not response: return trans_time = '%.4f' % (time() - req.start_time) the_request = quote(unquote(req.path)) if req.query_string: the_request = the_request + '?' + req.query_string # remote user for zeus client = req.headers.get('x-cluster-client-ip') if not client and 'x-forwarded-for' in req.headers: # remote user for other lbs client = req.headers['x-forwarded-for'].split(',')[0].strip() logged_headers = None if self.log_headers: logged_headers = '\n'.join('%s: %s' % (k, v) for k, v in req.headers.items()) status_int = response.status_int if getattr(req, 'client_disconnect', False) or \ getattr(response, 'client_disconnect', False): status_int = 499 self.logger.info(' '.join(quote(str(x)) for x in (client or '-', req.remote_addr or '-', strftime('%d/%b/%Y/%H/%M/%S', gmtime()), req.method, the_request, req.environ['SERVER_PROTOCOL'], status_int, req.referer or '-', req.user_agent or '-', req.headers.get('x-auth-token', req.headers.get('x-auth-admin-user', '-')), getattr(req, 'bytes_transferred', 0) or '-', getattr(response, 'bytes_transferred', 0) or '-', req.headers.get('etag', '-'), req.headers.get('x-trans-id', '-'), logged_headers or '-', trans_time))) def filter_factory(global_conf, **local_conf): """Returns a WSGI filter app for use with paste.deploy.""" conf = global_conf.copy() conf.update(local_conf) def auth_filter(app): return Swauth(app, conf) return auth_filter
./CrossVul/dataset_final_sorted/CWE-287/py/bad_2920_0
crossvul-python_data_good_3757_0
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 OpenStack LLC # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Main entry point into the Identity service.""" import uuid import urllib import urlparse from keystone import config from keystone import exception from keystone import policy from keystone import token from keystone.common import logging from keystone.common import manager from keystone.common import wsgi CONF = config.CONF LOG = logging.getLogger(__name__) class Manager(manager.Manager): """Default pivot point for the Identity backend. See :mod:`keystone.common.manager.Manager` for more details on how this dynamically calls the backend. """ def __init__(self): super(Manager, self).__init__(CONF.identity.driver) class Driver(object): """Interface description for an Identity driver.""" def authenticate(self, user_id=None, tenant_id=None, password=None): """Authenticate a given user, tenant and password. Returns: (user, tenant, metadata). """ raise exception.NotImplemented() def get_tenant(self, tenant_id): """Get a tenant by id. Returns: tenant_ref or None. """ raise exception.NotImplemented() def get_tenant_by_name(self, tenant_name): """Get a tenant by name. Returns: tenant_ref or None. """ raise exception.NotImplemented() def get_user(self, user_id): """Get a user by id. Returns: user_ref or None. """ raise exception.NotImplemented() def get_user_by_name(self, user_name): """Get a user by name. Returns: user_ref or None. """ raise exception.NotImplemented() def get_role(self, role_id): """Get a role by id. Returns: role_ref or None. """ raise exception.NotImplemented() def list_users(self): """List all users in the system. NOTE(termie): I'd prefer if this listed only the users for a given tenant. Returns: a list of user_refs or an empty list. """ raise exception.NotImplemented() def list_roles(self): """List all roles in the system. Returns: a list of role_refs or an empty list. """ raise exception.NotImplemented() # NOTE(termie): seven calls below should probably be exposed by the api # more clearly when the api redesign happens def add_user_to_tenant(self, tenant_id, user_id): raise exception.NotImplemented() def remove_user_from_tenant(self, tenant_id, user_id): raise exception.NotImplemented() def get_all_tenants(self): raise exception.NotImplemented() def get_tenants_for_user(self, user_id): """Get the tenants associated with a given user. Returns: a list of tenant ids. """ raise exception.NotImplemented() def get_roles_for_user_and_tenant(self, user_id, tenant_id): """Get the roles associated with a user within given tenant. Returns: a list of role ids. """ raise exception.NotImplemented() def add_role_to_user_and_tenant(self, user_id, tenant_id, role_id): """Add a role to a user within given tenant.""" raise exception.NotImplemented() def remove_role_from_user_and_tenant(self, user_id, tenant_id, role_id): """Remove a role from a user within given tenant.""" raise exception.NotImplemented() # user crud def create_user(self, user_id, user): raise exception.NotImplemented() def update_user(self, user_id, user): raise exception.NotImplemented() def delete_user(self, user_id): raise exception.NotImplemented() # tenant crud def create_tenant(self, tenant_id, tenant): raise exception.NotImplemented() def update_tenant(self, tenant_id, tenant): raise exception.NotImplemented() def delete_tenant(self, tenant_id, tenant): raise exception.NotImplemented() # metadata crud def get_metadata(self, user_id, tenant_id): raise exception.NotImplemented() def create_metadata(self, user_id, tenant_id, metadata): raise exception.NotImplemented() def update_metadata(self, user_id, tenant_id, metadata): raise exception.NotImplemented() def delete_metadata(self, user_id, tenant_id, metadata): raise exception.NotImplemented() # role crud def create_role(self, role_id, role): raise exception.NotImplemented() def update_role(self, role_id, role): raise exception.NotImplemented() def delete_role(self, role_id): raise exception.NotImplemented() class PublicRouter(wsgi.ComposableRouter): def add_routes(self, mapper): tenant_controller = TenantController() mapper.connect('/tenants', controller=tenant_controller, action='get_tenants_for_token', conditions=dict(methods=['GET'])) class AdminRouter(wsgi.ComposableRouter): def add_routes(self, mapper): # Tenant Operations tenant_controller = TenantController() mapper.connect('/tenants', controller=tenant_controller, action='get_all_tenants', conditions=dict(method=['GET'])) mapper.connect('/tenants/{tenant_id}', controller=tenant_controller, action='get_tenant', conditions=dict(method=['GET'])) # User Operations user_controller = UserController() mapper.connect('/users/{user_id}', controller=user_controller, action='get_user', conditions=dict(method=['GET'])) # Role Operations roles_controller = RoleController() mapper.connect('/tenants/{tenant_id}/users/{user_id}/roles', controller=roles_controller, action='get_user_roles', conditions=dict(method=['GET'])) mapper.connect('/users/{user_id}/roles', controller=user_controller, action='get_user_roles', conditions=dict(method=['GET'])) class TenantController(wsgi.Application): def __init__(self): self.identity_api = Manager() self.policy_api = policy.Manager() self.token_api = token.Manager() super(TenantController, self).__init__() def get_all_tenants(self, context, **kw): """Gets a list of all tenants for an admin user.""" self.assert_admin(context) tenant_refs = self.identity_api.get_tenants(context) params = { 'limit': context['query_string'].get('limit'), 'marker': context['query_string'].get('marker'), } return self._format_tenant_list(tenant_refs, **params) def get_tenants_for_token(self, context, **kw): """Get valid tenants for token based on token used to authenticate. Pulls the token from the context, validates it and gets the valid tenants for the user in the token. Doesn't care about token scopedness. """ try: token_ref = self.token_api.get_token(context=context, token_id=context['token_id']) except exception.NotFound: raise exception.Unauthorized() user_ref = token_ref['user'] tenant_ids = self.identity_api.get_tenants_for_user( context, user_ref['id']) tenant_refs = [] for tenant_id in tenant_ids: tenant_refs.append(self.identity_api.get_tenant( context=context, tenant_id=tenant_id)) params = { 'limit': context['query_string'].get('limit'), 'marker': context['query_string'].get('marker'), } return self._format_tenant_list(tenant_refs, **params) def get_tenant(self, context, tenant_id): # TODO(termie): this stuff should probably be moved to middleware self.assert_admin(context) tenant = self.identity_api.get_tenant(context, tenant_id) if tenant is None: raise exception.TenantNotFound(tenant_id=tenant_id) return {'tenant': tenant} # CRUD Extension def create_tenant(self, context, tenant): tenant_ref = self._normalize_dict(tenant) self.assert_admin(context) tenant_id = (tenant_ref.get('id') and tenant_ref.get('id') or uuid.uuid4().hex) tenant_ref['id'] = tenant_id tenant = self.identity_api.create_tenant( context, tenant_id, tenant_ref) return {'tenant': tenant} def update_tenant(self, context, tenant_id, tenant): self.assert_admin(context) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) tenant_ref = self.identity_api.update_tenant( context, tenant_id, tenant) return {'tenant': tenant_ref} def delete_tenant(self, context, tenant_id, **kw): self.assert_admin(context) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) self.identity_api.delete_tenant(context, tenant_id) def get_tenant_users(self, context, tenant_id, **kw): self.assert_admin(context) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) user_refs = self.identity_api.get_tenant_users(context, tenant_id) return {'users': user_refs} def _format_tenant_list(self, tenant_refs, **kwargs): marker = kwargs.get('marker') page_idx = 0 if marker is not None: for (marker_idx, tenant) in enumerate(tenant_refs): if tenant['id'] == marker: # we start pagination after the marker page_idx = marker_idx + 1 break else: msg = 'Marker could not be found' raise exception.ValidationError(message=msg) limit = kwargs.get('limit') if limit is not None: try: limit = int(limit) if limit < 0: raise AssertionError() except (ValueError, AssertionError): msg = 'Invalid limit value' raise exception.ValidationError(message=msg) tenant_refs = tenant_refs[page_idx:limit] for x in tenant_refs: if 'enabled' not in x: x['enabled'] = True o = {'tenants': tenant_refs, 'tenants_links': []} return o class UserController(wsgi.Application): def __init__(self): self.identity_api = Manager() self.policy_api = policy.Manager() self.token_api = token.Manager() super(UserController, self).__init__() def get_user(self, context, user_id): self.assert_admin(context) user_ref = self.identity_api.get_user(context, user_id) if not user_ref: raise exception.UserNotFound(user_id=user_id) return {'user': user_ref} def get_users(self, context): # NOTE(termie): i can't imagine that this really wants all the data # about every single user in the system... self.assert_admin(context) user_refs = self.identity_api.list_users(context) return {'users': user_refs} # CRUD extension def create_user(self, context, user): user = self._normalize_dict(user) self.assert_admin(context) tenant_id = user.get('tenantId', None) if (tenant_id is not None and self.identity_api.get_tenant(context, tenant_id) is None): raise exception.TenantNotFound(tenant_id=tenant_id) user_id = uuid.uuid4().hex user_ref = user.copy() user_ref['id'] = user_id new_user_ref = self.identity_api.create_user( context, user_id, user_ref) if tenant_id: self.identity_api.add_user_to_tenant(context, tenant_id, user_id) return {'user': new_user_ref} def update_user(self, context, user_id, user): # NOTE(termie): this is really more of a patch than a put self.assert_admin(context) if self.identity_api.get_user(context, user_id) is None: raise exception.UserNotFound(user_id=user_id) user_ref = self.identity_api.update_user(context, user_id, user) # If the password was changed or the user was disabled we clear tokens if user.get('password') or user.get('enabled', True) == False: try: for token_id in self.token_api.list_tokens(context, user_id): self.token_api.delete_token(context, token_id) except exception.NotImplemented: # The users status has been changed but tokens remain valid for # backends that can't list tokens for users LOG.warning('User %s status has changed, but existing tokens ' 'remain valid' % user_id) return {'user': user_ref} def delete_user(self, context, user_id): self.assert_admin(context) if self.identity_api.get_user(context, user_id) is None: raise exception.UserNotFound(user_id=user_id) self.identity_api.delete_user(context, user_id) def set_user_enabled(self, context, user_id, user): return self.update_user(context, user_id, user) def set_user_password(self, context, user_id, user): return self.update_user(context, user_id, user) def update_user_tenant(self, context, user_id, user): """Update the default tenant.""" # ensure that we're a member of that tenant tenant_id = user.get('tenantId') self.identity_api.add_user_to_tenant(context, tenant_id, user_id) return self.update_user(context, user_id, user) class RoleController(wsgi.Application): def __init__(self): self.identity_api = Manager() self.token_api = token.Manager() self.policy_api = policy.Manager() super(RoleController, self).__init__() # COMPAT(essex-3) def get_user_roles(self, context, user_id, tenant_id=None): """Get the roles for a user and tenant pair. Since we're trying to ignore the idea of user-only roles we're not implementing them in hopes that the idea will die off. """ self.assert_admin(context) if tenant_id is None: raise exception.NotImplemented(message='User roles not supported: ' 'tenant ID required') user = self.identity_api.get_user(context, user_id) if user is None: raise exception.UserNotFound(user_id=user_id) tenant = self.identity_api.get_tenant(context, tenant_id) if tenant is None: raise exception.TenantNotFound(tenant_id=tenant_id) roles = self.identity_api.get_roles_for_user_and_tenant( context, user_id, tenant_id) return {'roles': [self.identity_api.get_role(context, x) for x in roles]} # CRUD extension def get_role(self, context, role_id): self.assert_admin(context) role_ref = self.identity_api.get_role(context, role_id) if not role_ref: raise exception.RoleNotFound(role_id=role_id) return {'role': role_ref} def create_role(self, context, role): role = self._normalize_dict(role) self.assert_admin(context) role_id = uuid.uuid4().hex role['id'] = role_id role_ref = self.identity_api.create_role(context, role_id, role) return {'role': role_ref} def delete_role(self, context, role_id): self.assert_admin(context) self.get_role(context, role_id) self.identity_api.delete_role(context, role_id) def get_roles(self, context): self.assert_admin(context) roles = self.identity_api.list_roles(context) # TODO(termie): probably inefficient at some point return {'roles': roles} def add_role_to_user(self, context, user_id, role_id, tenant_id=None): """Add a role to a user and tenant pair. Since we're trying to ignore the idea of user-only roles we're not implementing them in hopes that the idea will die off. """ self.assert_admin(context) if tenant_id is None: raise exception.NotImplemented(message='User roles not supported: ' 'tenant_id required') if self.identity_api.get_user(context, user_id) is None: raise exception.UserNotFound(user_id=user_id) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) if self.identity_api.get_role(context, role_id) is None: raise exception.RoleNotFound(role_id=role_id) # This still has the weird legacy semantics that adding a role to # a user also adds them to a tenant self.identity_api.add_user_to_tenant(context, tenant_id, user_id) self.identity_api.add_role_to_user_and_tenant( context, user_id, tenant_id, role_id) role_ref = self.identity_api.get_role(context, role_id) return {'role': role_ref} def remove_role_from_user(self, context, user_id, role_id, tenant_id=None): """Remove a role from a user and tenant pair. Since we're trying to ignore the idea of user-only roles we're not implementing them in hopes that the idea will die off. """ self.assert_admin(context) if tenant_id is None: raise exception.NotImplemented(message='User roles not supported: ' 'tenant_id required') if self.identity_api.get_user(context, user_id) is None: raise exception.UserNotFound(user_id=user_id) if self.identity_api.get_tenant(context, tenant_id) is None: raise exception.TenantNotFound(tenant_id=tenant_id) if self.identity_api.get_role(context, role_id) is None: raise exception.RoleNotFound(role_id=role_id) # This still has the weird legacy semantics that adding a role to # a user also adds them to a tenant, so we must follow up on that self.identity_api.remove_role_from_user_and_tenant( context, user_id, tenant_id, role_id) roles = self.identity_api.get_roles_for_user_and_tenant( context, user_id, tenant_id) if not roles: self.identity_api.remove_user_from_tenant( context, tenant_id, user_id) return # COMPAT(diablo): CRUD extension def get_role_refs(self, context, user_id): """Ultimate hack to get around having to make role_refs first-class. This will basically iterate over the various roles the user has in all tenants the user is a member of and create fake role_refs where the id encodes the user-tenant-role information so we can look up the appropriate data when we need to delete them. """ self.assert_admin(context) user_ref = self.identity_api.get_user(context, user_id) tenant_ids = self.identity_api.get_tenants_for_user(context, user_id) o = [] for tenant_id in tenant_ids: role_ids = self.identity_api.get_roles_for_user_and_tenant( context, user_id, tenant_id) for role_id in role_ids: ref = {'roleId': role_id, 'tenantId': tenant_id, 'userId': user_id} ref['id'] = urllib.urlencode(ref) o.append(ref) return {'roles': o} # COMPAT(diablo): CRUD extension def create_role_ref(self, context, user_id, role): """This is actually used for adding a user to a tenant. In the legacy data model adding a user to a tenant required setting a role. """ self.assert_admin(context) # TODO(termie): for now we're ignoring the actual role tenant_id = role.get('tenantId') role_id = role.get('roleId') self.identity_api.add_user_to_tenant(context, tenant_id, user_id) self.identity_api.add_role_to_user_and_tenant( context, user_id, tenant_id, role_id) role_ref = self.identity_api.get_role(context, role_id) return {'role': role_ref} # COMPAT(diablo): CRUD extension def delete_role_ref(self, context, user_id, role_ref_id): """This is actually used for deleting a user from a tenant. In the legacy data model removing a user from a tenant required deleting a role. To emulate this, we encode the tenant and role in the role_ref_id, and if this happens to be the last role for the user-tenant pair, we remove the user from the tenant. """ self.assert_admin(context) # TODO(termie): for now we're ignoring the actual role role_ref_ref = urlparse.parse_qs(role_ref_id) tenant_id = role_ref_ref.get('tenantId')[0] role_id = role_ref_ref.get('roleId')[0] self.identity_api.remove_role_from_user_and_tenant( context, user_id, tenant_id, role_id) roles = self.identity_api.get_roles_for_user_and_tenant( context, user_id, tenant_id) if not roles: self.identity_api.remove_user_from_tenant( context, tenant_id, user_id)
./CrossVul/dataset_final_sorted/CWE-287/py/good_3757_0
crossvul-python_data_good_2505_4
# -*- coding: utf-8 -*- from typing import Any, List, Dict, Mapping, Optional, Text from django.utils.translation import ugettext as _ from django.conf import settings from django.contrib.auth import authenticate, get_backends from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect, HttpResponseForbidden, HttpResponse, HttpRequest from django.shortcuts import redirect, render from django.template import RequestContext, loader from django.utils.timezone import now from django.core.exceptions import ValidationError from django.core import validators from zerver.context_processors import get_realm_from_request from zerver.models import UserProfile, Realm, Stream, PreregistrationUser, MultiuseInvite, \ name_changes_disabled, email_to_username, email_allowed_for_realm, \ get_realm, get_user_profile_by_email from zerver.lib.send_email import send_email, FromAddress from zerver.lib.events import do_events_register from zerver.lib.actions import do_change_password, do_change_full_name, do_change_is_admin, \ do_activate_user, do_create_user, do_create_realm, \ user_email_is_unique, compute_mit_user_fullname, validate_email_for_realm, \ do_set_user_display_setting from zerver.forms import RegistrationForm, HomepageForm, RealmCreationForm, \ CreateUserForm, FindMyTeamForm from django_auth_ldap.backend import LDAPBackend, _LDAPUser from zerver.decorator import require_post, has_request_variables, \ JsonableError, REQ, do_login from zerver.lib.onboarding import send_initial_pms, setup_initial_streams, \ setup_initial_private_stream, send_initial_realm_messages from zerver.lib.response import json_success from zerver.lib.subdomains import get_subdomain, is_root_domain_available from zerver.lib.timezone import get_all_timezones from zproject.backends import ldap_auth_enabled, password_auth_enabled, ZulipLDAPAuthBackend from confirmation.models import Confirmation, RealmCreationKey, ConfirmationKeyException, \ check_key_is_valid, create_confirmation_link, get_object_from_key, \ render_confirmation_key_error import logging import requests import smtplib import ujson from six.moves import urllib def redirect_and_log_into_subdomain(realm, full_name, email_address, is_signup=False): # type: (Realm, Text, Text, bool) -> HttpResponse subdomain_login_uri = ''.join([ realm.uri, reverse('zerver.views.auth.log_into_subdomain') ]) domain = settings.EXTERNAL_HOST.split(':')[0] response = redirect(subdomain_login_uri) data = {'name': full_name, 'email': email_address, 'subdomain': realm.subdomain, 'is_signup': is_signup} # Creating a singed cookie so that it cannot be tampered with. # Cookie and the signature expire in 15 seconds. response.set_signed_cookie('subdomain.signature', ujson.dumps(data), expires=15, domain=domain, salt='zerver.views.auth') return response @require_post def accounts_register(request): # type: (HttpRequest) -> HttpResponse key = request.POST['key'] confirmation = Confirmation.objects.get(confirmation_key=key) prereg_user = confirmation.content_object email = prereg_user.email realm_creation = prereg_user.realm_creation password_required = prereg_user.password_required validators.validate_email(email) if realm_creation: # For creating a new realm, there is no existing realm or domain realm = None else: realm = get_realm(get_subdomain(request)) if prereg_user.realm is None: return render(request, 'confirmation/link_expired.html') if prereg_user.realm != realm: return render(request, 'confirmation/link_does_not_exist.html') if realm and not email_allowed_for_realm(email, realm): return render(request, "zerver/closed_realm.html", context={"closed_domain_name": realm.name}) if realm and realm.deactivated: # The user is trying to register for a deactivated realm. Advise them to # contact support. return redirect_to_deactivation_notice() try: validate_email_for_realm(realm, email) except ValidationError: return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.parse.quote_plus(email)) name_validated = False full_name = None if request.POST.get('from_confirmation'): try: del request.session['authenticated_full_name'] except KeyError: pass if realm is not None and realm.is_zephyr_mirror_realm: # For MIT users, we can get an authoritative name from Hesiod. # Technically we should check that this is actually an MIT # realm, but we can cross that bridge if we ever get a non-MIT # zephyr mirroring realm. hesiod_name = compute_mit_user_fullname(email) form = RegistrationForm( initial={'full_name': hesiod_name if "@" not in hesiod_name else ""}, realm_creation=realm_creation) name_validated = True elif settings.POPULATE_PROFILE_VIA_LDAP: for backend in get_backends(): if isinstance(backend, LDAPBackend): ldap_attrs = _LDAPUser(backend, backend.django_to_ldap_username(email)).attrs try: ldap_full_name = ldap_attrs[settings.AUTH_LDAP_USER_ATTR_MAP['full_name']][0] request.session['authenticated_full_name'] = ldap_full_name name_validated = True # We don't use initial= here, because if the form is # complete (that is, no additional fields need to be # filled out by the user) we want the form to validate, # so they can be directly registered without having to # go through this interstitial. form = RegistrationForm({'full_name': ldap_full_name}, realm_creation=realm_creation) # FIXME: This will result in the user getting # validation errors if they have to enter a password. # Not relevant for ONLY_SSO, though. break except TypeError: # Let the user fill out a name and/or try another backend form = RegistrationForm(realm_creation=realm_creation) elif 'full_name' in request.POST: form = RegistrationForm( initial={'full_name': request.POST.get('full_name')}, realm_creation=realm_creation ) else: form = RegistrationForm(realm_creation=realm_creation) else: postdata = request.POST.copy() if name_changes_disabled(realm): # If we populate profile information via LDAP and we have a # verified name from you on file, use that. Otherwise, fall # back to the full name in the request. try: postdata.update({'full_name': request.session['authenticated_full_name']}) name_validated = True except KeyError: pass form = RegistrationForm(postdata, realm_creation=realm_creation) if not (password_auth_enabled(realm) and password_required): form['password'].field.required = False if form.is_valid(): if password_auth_enabled(realm): password = form.cleaned_data['password'] else: # SSO users don't need no passwords password = None if realm_creation: string_id = form.cleaned_data['realm_subdomain'] realm_name = form.cleaned_data['realm_name'] realm = do_create_realm(string_id, realm_name) setup_initial_streams(realm) assert(realm is not None) full_name = form.cleaned_data['full_name'] short_name = email_to_username(email) timezone = u"" if 'timezone' in request.POST and request.POST['timezone'] in get_all_timezones(): timezone = request.POST['timezone'] try: existing_user_profile = get_user_profile_by_email(email) except UserProfile.DoesNotExist: existing_user_profile = None return_data = {} # type: Dict[str, bool] if ldap_auth_enabled(realm): # If the user was authenticated using an external SSO # mechanism like Google or GitHub auth, then authentication # will have already been done before creating the # PreregistrationUser object with password_required=False, and # so we don't need to worry about passwords. # # If instead the realm is using EmailAuthBackend, we will # set their password above. # # But if the realm is using LDAPAuthBackend, we need to verify # their LDAP password (which will, as a side effect, create # the user account) here using authenticate. auth_result = authenticate(request, username=email, password=password, realm_subdomain=realm.subdomain, return_data=return_data) if auth_result is None: # TODO: This probably isn't going to give a # user-friendly error message, but it doesn't # particularly matter, because the registration form # is hidden for most users. return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.parse.quote_plus(email)) # Since we'll have created a user, we now just log them in. return login_and_go_to_home(request, auth_result) elif existing_user_profile is not None and existing_user_profile.is_mirror_dummy: user_profile = existing_user_profile do_activate_user(user_profile) do_change_password(user_profile, password) do_change_full_name(user_profile, full_name, user_profile) do_set_user_display_setting(user_profile, 'timezone', timezone) else: user_profile = do_create_user(email, password, realm, full_name, short_name, prereg_user=prereg_user, is_realm_admin=realm_creation, tos_version=settings.TOS_VERSION, timezone=timezone, newsletter_data={"IP": request.META['REMOTE_ADDR']}) # Note: Any logic like this must also be replicated in # ZulipLDAPAuthBackend and zerver/views/users.py. This is # ripe for a refactoring, though care is required to avoid # import loops with zerver/lib/actions.py and zerver/lib/onboarding.py. send_initial_pms(user_profile) if realm_creation: setup_initial_private_stream(user_profile) send_initial_realm_messages(realm) if realm_creation: # Because for realm creation, registration happens on the # root domain, we need to log them into the subdomain for # their new realm. return redirect_and_log_into_subdomain(realm, full_name, email) # This dummy_backend check below confirms the user is # authenticating to the correct subdomain. auth_result = authenticate(username=user_profile.email, realm_subdomain=realm.subdomain, return_data=return_data, use_dummy_backend=True) if return_data.get('invalid_subdomain'): # By construction, this should never happen. logging.error("Subdomain mismatch in registration %s: %s" % ( realm.subdomain, user_profile.email,)) return redirect('/') return login_and_go_to_home(request, auth_result) return render( request, 'zerver/register.html', context={'form': form, 'email': email, 'key': key, 'full_name': request.session.get('authenticated_full_name', None), 'lock_name': name_validated and name_changes_disabled(realm), # password_auth_enabled is normally set via our context processor, # but for the registration form, there is no logged in user yet, so # we have to set it here. 'creating_new_team': realm_creation, 'password_required': password_auth_enabled(realm) and password_required, 'password_auth_enabled': password_auth_enabled(realm), 'root_domain_available': is_root_domain_available(), 'MAX_REALM_NAME_LENGTH': str(Realm.MAX_REALM_NAME_LENGTH), 'MAX_NAME_LENGTH': str(UserProfile.MAX_NAME_LENGTH), 'MAX_PASSWORD_LENGTH': str(form.MAX_PASSWORD_LENGTH), 'MAX_REALM_SUBDOMAIN_LENGTH': str(Realm.MAX_REALM_SUBDOMAIN_LENGTH) } ) def login_and_go_to_home(request, user_profile): # type: (HttpRequest, UserProfile) -> HttpResponse # Mark the user as having been just created, so no "new login" email is sent user_profile.just_registered = True do_login(request, user_profile) return HttpResponseRedirect(user_profile.realm.uri + reverse('zerver.views.home.home')) def create_preregistration_user(email, request, realm_creation=False, password_required=True): # type: (Text, HttpRequest, bool, bool) -> HttpResponse realm = None if not realm_creation: realm = get_realm(get_subdomain(request)) return PreregistrationUser.objects.create(email=email, realm_creation=realm_creation, password_required=password_required, realm=realm) def send_registration_completion_email(email, request, realm_creation=False, streams=None): # type: (str, HttpRequest, bool, Optional[List[Stream]]) -> None """ Send an email with a confirmation link to the provided e-mail so the user can complete their registration. """ prereg_user = create_preregistration_user(email, request, realm_creation) if streams is not None: prereg_user.streams = streams prereg_user.save() activation_url = create_confirmation_link(prereg_user, request.get_host(), Confirmation.USER_REGISTRATION) send_email('zerver/emails/confirm_registration', to_email=email, from_address=FromAddress.NOREPLY, context={'activate_url': activation_url}) if settings.DEVELOPMENT and realm_creation: request.session['confirmation_key'] = {'confirmation_key': activation_url.split('/')[-1]} def redirect_to_email_login_url(email): # type: (str) -> HttpResponseRedirect login_url = reverse('django.contrib.auth.views.login') email = urllib.parse.quote_plus(email) redirect_url = login_url + '?already_registered=' + email return HttpResponseRedirect(redirect_url) def create_realm(request, creation_key=None): # type: (HttpRequest, Optional[Text]) -> HttpResponse if not settings.OPEN_REALM_CREATION: if creation_key is None: return render(request, "zerver/realm_creation_failed.html", context={'message': _('New organization creation disabled.')}) elif not check_key_is_valid(creation_key): return render(request, "zerver/realm_creation_failed.html", context={'message': _('The organization creation link has expired' ' or is not valid.')}) # When settings.OPEN_REALM_CREATION is enabled, anyone can create a new realm, # subject to a few restrictions on their email address. if request.method == 'POST': form = RealmCreationForm(request.POST) if form.is_valid(): email = form.cleaned_data['email'] try: send_registration_completion_email(email, request, realm_creation=True) except smtplib.SMTPException as e: logging.error('Error in create_realm: %s' % (str(e),)) return HttpResponseRedirect("/config-error/smtp") if (creation_key is not None and check_key_is_valid(creation_key)): RealmCreationKey.objects.get(creation_key=creation_key).delete() return HttpResponseRedirect(reverse('send_confirm', kwargs={'email': email})) try: email = request.POST['email'] user_email_is_unique(email) except ValidationError: # Maybe the user is trying to log in return redirect_to_email_login_url(email) else: form = RealmCreationForm() return render(request, 'zerver/create_realm.html', context={'form': form, 'current_url': request.get_full_path}, ) def confirmation_key(request): # type: (HttpRequest) -> HttpResponse return json_success(request.session.get('confirmation_key')) def show_deactivation_notice(request): # type: (HttpRequest) -> HttpResponse realm = get_realm_from_request(request) if realm and realm.deactivated: return render(request, "zerver/deactivated.html", context={"deactivated_domain_name": realm.name}) return HttpResponseRedirect(reverse('zerver.views.auth.login_page')) def redirect_to_deactivation_notice(): # type: () -> HttpResponse return HttpResponseRedirect(reverse('zerver.views.registration.show_deactivation_notice')) def accounts_home(request, multiuse_object=None): # type: (HttpRequest, Optional[MultiuseInvite]) -> HttpResponse realm = get_realm(get_subdomain(request)) if realm and realm.deactivated: return redirect_to_deactivation_notice() from_multiuse_invite = False streams_to_subscribe = None if multiuse_object: realm = multiuse_object.realm streams_to_subscribe = multiuse_object.streams.all() from_multiuse_invite = True if request.method == 'POST': form = HomepageForm(request.POST, realm=realm, from_multiuse_invite=from_multiuse_invite) if form.is_valid(): email = form.cleaned_data['email'] try: send_registration_completion_email(email, request, streams=streams_to_subscribe) except smtplib.SMTPException as e: logging.error('Error in accounts_home: %s' % (str(e),)) return HttpResponseRedirect("/config-error/smtp") return HttpResponseRedirect(reverse('send_confirm', kwargs={'email': email})) email = request.POST['email'] try: validate_email_for_realm(realm, email) except ValidationError: return redirect_to_email_login_url(email) else: form = HomepageForm(realm=realm) return render(request, 'zerver/accounts_home.html', context={'form': form, 'current_url': request.get_full_path, 'from_multiuse_invite': from_multiuse_invite}, ) def accounts_home_from_multiuse_invite(request, confirmation_key): # type: (HttpRequest, str) -> HttpResponse multiuse_object = None try: multiuse_object = get_object_from_key(confirmation_key) # Required for oAuth2 request.session["multiuse_object_key"] = confirmation_key except ConfirmationKeyException as exception: realm = get_realm_from_request(request) if realm is None or realm.invite_required: return render_confirmation_key_error(request, exception) return accounts_home(request, multiuse_object=multiuse_object) def generate_204(request): # type: (HttpRequest) -> HttpResponse return HttpResponse(content=None, status=204) def find_account(request): # type: (HttpRequest) -> HttpResponse url = reverse('zerver.views.registration.find_account') emails = [] # type: List[Text] if request.method == 'POST': form = FindMyTeamForm(request.POST) if form.is_valid(): emails = form.cleaned_data['emails'] for user_profile in UserProfile.objects.filter( email__in=emails, is_active=True, is_bot=False, realm__deactivated=False): send_email('zerver/emails/find_team', to_user_id=user_profile.id, context={'user_profile': user_profile}) # Note: Show all the emails in the result otherwise this # feature can be used to ascertain which email addresses # are associated with Zulip. data = urllib.parse.urlencode({'emails': ','.join(emails)}) return redirect(url + "?" + data) else: form = FindMyTeamForm() result = request.GET.get('emails') # The below validation is perhaps unnecessary, in that we # shouldn't get able to get here with an invalid email unless # the user hand-edits the URLs. if result: for email in result.split(','): try: validators.validate_email(email) emails.append(email) except ValidationError: pass return render(request, 'zerver/find_account.html', context={'form': form, 'current_url': lambda: url, 'emails': emails},)
./CrossVul/dataset_final_sorted/CWE-287/py/good_2505_4
crossvul-python_data_good_1224_0
# -*- coding: utf-8 -*- from __future__ import print_function from __future__ import absolute_import from typing import List, TYPE_CHECKING from zulint.custom_rules import RuleList if TYPE_CHECKING: from zulint.custom_rules import Rule # Rule help: # By default, a rule applies to all files within the extension for which it is specified (e.g. all .py files) # There are three operators we can use to manually include or exclude files from linting for a rule: # 'exclude': 'set([<path>, ...])' - if <path> is a filename, excludes that file. # if <path> is a directory, excludes all files directly below the directory <path>. # 'exclude_line': 'set([(<path>, <line>), ...])' - excludes all lines matching <line> in the file <path> from linting. # 'include_only': 'set([<path>, ...])' - includes only those files where <path> is a substring of the filepath. PYDELIMS = r'''"'()\[\]{}#\\''' PYREG = r"[^{}]".format(PYDELIMS) PYSQ = r'"(?:[^"\\]|\\.)*"' PYDQ = r"'(?:[^'\\]|\\.)*'" PYLEFT = r"[(\[{]" PYRIGHT = r"[)\]}]" PYCODE = PYREG for depth in range(5): PYGROUP = r"""(?:{}|{}|{}{}*{})""".format(PYSQ, PYDQ, PYLEFT, PYCODE, PYRIGHT) PYCODE = r"""(?:{}|{})""".format(PYREG, PYGROUP) FILES_WITH_LEGACY_SUBJECT = { # This basically requires a big DB migration: 'zerver/lib/topic.py', # This is for backward compatibility. 'zerver/tests/test_legacy_subject.py', # Other migration-related changes require extreme care. 'zerver/lib/fix_unreads.py', 'zerver/tests/test_migrations.py', # These use subject in the email sense, and will # probably always be exempt: 'zerver/lib/email_mirror.py', 'zerver/lib/feedback.py', 'zerver/tests/test_new_users.py', 'zerver/tests/test_email_mirror.py', # These are tied more to our API than our DB model. 'zerver/openapi/python_examples.py', 'zerver/tests/test_openapi.py', # This has lots of query data embedded, so it's hard # to fix everything until we migrate the DB to "topic". 'zerver/tests/test_narrow.py', } shebang_rules = [ {'pattern': '^#!', 'description': "zerver library code shouldn't have a shebang line.", 'include_only': set(['zerver/'])}, # /bin/sh and /usr/bin/env are the only two binaries # that NixOS provides at a fixed path (outside a # buildFHSUserEnv sandbox). {'pattern': '^#!(?! *(?:/usr/bin/env|/bin/sh)(?: |$))', 'description': "Use `#!/usr/bin/env foo` instead of `#!/path/foo`" " for interpreters other than sh."}, {'pattern': '^#!/usr/bin/env python$', 'description': "Use `#!/usr/bin/env python3` instead of `#!/usr/bin/env python`."} ] # type: List[Rule] trailing_whitespace_rule = { 'pattern': r'\s+$', 'strip': '\n', 'description': 'Fix trailing whitespace' } # type: Rule whitespace_rules = [ # This linter should be first since bash_rules depends on it. trailing_whitespace_rule, {'pattern': 'http://zulip.readthedocs.io', 'description': 'Use HTTPS when linking to ReadTheDocs', }, {'pattern': '\t', 'strip': '\n', 'exclude': set(['tools/ci/success-http-headers.txt']), 'description': 'Fix tab-based whitespace'}, ] # type: List[Rule] comma_whitespace_rule = [ {'pattern': ', {2,}[^#/ ]', 'exclude': set(['zerver/tests', 'frontend_tests/node_tests', 'corporate/tests']), 'description': "Remove multiple whitespaces after ','", 'good_lines': ['foo(1, 2, 3)', 'foo = bar # some inline comment'], 'bad_lines': ['foo(1, 2, 3)', 'foo(1, 2, 3)']}, ] # type: List[Rule] markdown_whitespace_rules = list([rule for rule in whitespace_rules if rule['pattern'] != r'\s+$']) + [ # Two spaces trailing a line with other content is okay--it's a markdown line break. # This rule finds one space trailing a non-space, three or more trailing spaces, and # spaces on an empty line. {'pattern': r'((?<!\s)\s$)|(\s\s\s+$)|(^\s+$)', 'strip': '\n', 'description': 'Fix trailing whitespace'}, {'pattern': '^#+[A-Za-z0-9]', 'strip': '\n', 'description': 'Missing space after # in heading', 'good_lines': ['### some heading', '# another heading'], 'bad_lines': ['###some heading', '#another heading']}, ] js_rules = RuleList( langs=['js'], rules=[ {'pattern': 'subject|SUBJECT', 'exclude': set(['static/js/util.js', 'frontend_tests/']), 'exclude_pattern': 'emails', 'description': 'avoid subject in JS code', 'good_lines': ['topic_name'], 'bad_lines': ['subject="foo"', ' MAX_SUBJECT_LEN']}, {'pattern': r'[^_]function\(', 'description': 'The keyword "function" should be followed by a space'}, {'pattern': 'msgid|MSGID', 'description': 'Avoid using "msgid" as a variable name; use "message_id" instead.'}, {'pattern': r'.*blueslip.warning\(.*', 'description': 'The module blueslip has no function warning, try using blueslip.warn'}, {'pattern': '[)]{$', 'description': 'Missing space between ) and {'}, {'pattern': r'i18n\.t\([^)]+[^,\{\)]$', 'description': 'i18n string should not be a multiline string'}, {'pattern': r'''i18n\.t\(['"].+?['"]\s*\+''', 'description': 'Do not concatenate arguments within i18n.t()'}, {'pattern': r'i18n\.t\(.+\).*\+', 'description': 'Do not concatenate i18n strings'}, {'pattern': r'\+.*i18n\.t\(.+\)', 'description': 'Do not concatenate i18n strings'}, {'pattern': '[.]includes[(]', 'exclude': {'frontend_tests/'}, 'description': '.includes() is incompatible with Internet Explorer. Use .indexOf() !== -1 instead.'}, {'pattern': '[.]html[(]', 'exclude_pattern': r'''[.]html[(]("|'|render_|html|message.content|sub.rendered_description|i18n.t|rendered_|$|[)]|error_text|widget_elem|[$]error|[$][(]"<p>"[)])''', 'exclude': {'static/js/portico', 'static/js/lightbox.js', 'static/js/ui_report.js', 'static/js/confirm_dialog.js', 'frontend_tests/'}, 'description': 'Setting HTML content with jQuery .html() can lead to XSS security bugs. Consider .text() or using rendered_foo as a variable name if content comes from handlebars and thus is already sanitized.'}, {'pattern': '["\']json/', 'description': 'Relative URL for JSON route not supported by i18n'}, # This rule is constructed with + to avoid triggering on itself {'pattern': " =" + '[^ =>~"]', 'description': 'Missing whitespace after "="'}, {'pattern': '^[ ]*//[A-Za-z0-9]', 'description': 'Missing space after // in comment'}, {'pattern': 'if[(]', 'description': 'Missing space between if and ('}, {'pattern': 'else{$', 'description': 'Missing space between else and {'}, {'pattern': '^else {$', 'description': 'Write JS else statements on same line as }'}, {'pattern': '^else if', 'description': 'Write JS else statements on same line as }'}, {'pattern': 'console[.][a-z]', 'exclude': set(['static/js/blueslip.js', 'frontend_tests/zjsunit', 'frontend_tests/casper_lib/common.js', 'frontend_tests/node_tests', 'static/js/debug.js']), 'description': 'console.log and similar should not be used in webapp'}, {'pattern': r'''[.]text\(["'][a-zA-Z]''', 'description': 'Strings passed to $().text should be wrapped in i18n.t() for internationalization', 'exclude': set(['frontend_tests/node_tests/'])}, {'pattern': r'''compose_error\(["']''', 'description': 'Argument to compose_error should be a literal string enclosed ' 'by i18n.t()'}, {'pattern': r'ui.report_success\(', 'description': 'Deprecated function, use ui_report.success.'}, {'pattern': r'''report.success\(["']''', 'description': 'Argument to report_success should be a literal string enclosed ' 'by i18n.t()'}, {'pattern': r'ui.report_error\(', 'description': 'Deprecated function, use ui_report.error.'}, {'pattern': r'''report.error\(["'][^'"]''', 'description': 'Argument to ui_report.error should be a literal string enclosed ' 'by i18n.t()', 'good_lines': ['ui_report.error("")', 'ui_report.error(_("text"))'], 'bad_lines': ['ui_report.error("test")']}, {'pattern': r'\$\(document\)\.ready\(', 'description': "`Use $(f) rather than `$(document).ready(f)`", 'good_lines': ['$(function () {foo();}'], 'bad_lines': ['$(document).ready(function () {foo();}']}, {'pattern': '[$][.](get|post|patch|delete|ajax)[(]', 'description': "Use channel module for AJAX calls", 'exclude': set([ # Internal modules can do direct network calls 'static/js/blueslip.js', 'static/js/channel.js', # External modules that don't include channel.js 'static/js/stats/', 'static/js/portico/', 'static/js/billing/', ]), 'good_lines': ['channel.get(...)'], 'bad_lines': ['$.get()', '$.post()', '$.ajax()']}, {'pattern': 'style ?=', 'description': "Avoid using the `style=` attribute; we prefer styling in CSS files", 'exclude': set([ 'frontend_tests/node_tests/copy_and_paste.js', 'frontend_tests/node_tests/upload.js', 'frontend_tests/node_tests/templates.js', 'static/js/upload.js', 'static/js/stream_color.js', ]), 'good_lines': ['#my-style {color: blue;}'], 'bad_lines': ['<p style="color: blue;">Foo</p>', 'style = "color: blue;"']}, *whitespace_rules, *comma_whitespace_rule, ], ) python_rules = RuleList( langs=['py'], rules=[ {'pattern': 'subject|SUBJECT', 'exclude_pattern': 'subject to the|email|outbox', 'description': 'avoid subject as a var', 'good_lines': ['topic_name'], 'bad_lines': ['subject="foo"', ' MAX_SUBJECT_LEN'], 'exclude': FILES_WITH_LEGACY_SUBJECT, 'include_only': set([ 'zerver/data_import/', 'zerver/lib/', 'zerver/tests/', 'zerver/views/'])}, {'pattern': 'msgid|MSGID', 'exclude': set(['tools/check-capitalization', 'tools/i18n/tagmessages']), 'description': 'Avoid using "msgid" as a variable name; use "message_id" instead.'}, {'pattern': '^(?!#)@login_required', 'description': '@login_required is unsupported; use @zulip_login_required', 'good_lines': ['@zulip_login_required', '# foo @login_required'], 'bad_lines': ['@login_required', ' @login_required']}, {'pattern': '^user_profile[.]save[(][)]', 'description': 'Always pass update_fields when saving user_profile objects', 'exclude_line': set([ ('zerver/lib/actions.py', "user_profile.save() # Can't use update_fields because of how the foreign key works."), ]), 'exclude': set(['zerver/tests', 'zerver/lib/create_user.py']), 'good_lines': ['user_profile.save(update_fields=["pointer"])'], 'bad_lines': ['user_profile.save()']}, {'pattern': r'^[^"]*"[^"]*"%\(', 'description': 'Missing space around "%"', 'good_lines': ['"%s" % ("foo")', '"%s" % (foo)'], 'bad_lines': ['"%s"%("foo")', '"%s"%(foo)']}, {'pattern': r"^[^']*'[^']*'%\(", 'description': 'Missing space around "%"', 'good_lines': ["'%s' % ('foo')", "'%s' % (foo)"], 'bad_lines': ["'%s'%('foo')", "'%s'%(foo)"]}, {'pattern': 'self: Any', 'description': 'you can omit Any annotation for self', 'good_lines': ['def foo (self):'], 'bad_lines': ['def foo(self: Any):']}, # This rule is constructed with + to avoid triggering on itself {'pattern': " =" + '[^ =>~"]', 'description': 'Missing whitespace after "="', 'good_lines': ['a = b', '5 == 6'], 'bad_lines': ['a =b', 'asdf =42']}, {'pattern': r'":\w[^"]*$', 'description': 'Missing whitespace after ":"', 'exclude': set(['zerver/tests/test_push_notifications.py']), 'good_lines': ['"foo": bar', '"some:string:with:colons"'], 'bad_lines': ['"foo":bar', '"foo":1']}, {'pattern': r"':\w[^']*$", 'description': 'Missing whitespace after ":"', 'good_lines': ["'foo': bar", "'some:string:with:colons'"], 'bad_lines': ["'foo':bar", "'foo':1"]}, {'pattern': r"^\s+#\w", 'strip': '\n', 'exclude': set(['tools/droplets/create.py']), 'description': 'Missing whitespace after "#"', 'good_lines': ['a = b # some operation', '1+2 # 3 is the result'], 'bad_lines': [' #some operation', ' #not valid!!!']}, {'pattern': "assertEquals[(]", 'description': 'Use assertEqual, not assertEquals (which is deprecated).', 'good_lines': ['assertEqual(1, 2)'], 'bad_lines': ['assertEquals(1, 2)']}, {'pattern': "== None", 'description': 'Use `is None` to check whether something is None', 'good_lines': ['if foo is None'], 'bad_lines': ['foo == None']}, {'pattern': "type:[(]", 'description': 'Missing whitespace after ":" in type annotation', 'good_lines': ['# type: (Any, Any)', 'colon:separated:string:containing:type:as:keyword'], 'bad_lines': ['# type:(Any, Any)']}, {'pattern': "type: ignore$", 'exclude': set(['tools/tests', 'zerver/lib/test_runner.py', 'zerver/tests']), 'description': '"type: ignore" should always end with "# type: ignore # explanation for why"', 'good_lines': ['foo = bar # type: ignore # explanation'], 'bad_lines': ['foo = bar # type: ignore']}, {'pattern': "# type [(]", 'description': 'Missing : after type in type annotation', 'good_lines': ['foo = 42 # type: int', '# type: (str, int) -> None'], 'bad_lines': ['# type (str, int) -> None']}, {'pattern': "#type", 'description': 'Missing whitespace after "#" in type annotation', 'good_lines': ['foo = 42 # type: int'], 'bad_lines': ['foo = 42 #type: int']}, {'pattern': r'\b(if|else|while)[(]', 'description': 'Put a space between statements like if, else, etc. and (.', 'good_lines': ['if (1 == 2):', 'while (foo == bar):'], 'bad_lines': ['if(1 == 2):', 'while(foo == bar):']}, {'pattern': ", [)]", 'description': 'Unnecessary whitespace between "," and ")"', 'good_lines': ['foo = (1, 2, 3,)', 'foo(bar, 42)'], 'bad_lines': ['foo = (1, 2, 3, )']}, {'pattern': "% [(]", 'description': 'Unnecessary whitespace between "%" and "("', 'good_lines': ['"foo %s bar" % ("baz",)'], 'bad_lines': ['"foo %s bar" % ("baz",)']}, # This next check could have false positives, but it seems pretty # rare; if we find any, they can be added to the exclude list for # this rule. {'pattern': r"""^(?:[^'"#\\]|{}|{})*(?:{}|{})\s*%\s*(?![\s({{\\]|dict\(|tuple\()(?:[^,{}]|{})+(?:$|[,#\\]|{})""".format( PYSQ, PYDQ, PYSQ, PYDQ, PYDELIMS, PYGROUP, PYRIGHT), 'description': 'Used % formatting without a tuple', 'good_lines': ['"foo %s bar" % ("baz",)'], 'bad_lines': ['"foo %s bar" % "baz"']}, {'pattern': r"""^(?:[^'"#\\]|{}|{})*(?:{}|{})\s*%\s*\((?:[^,{}]|{})*\)""".format( PYSQ, PYDQ, PYSQ, PYDQ, PYDELIMS, PYGROUP), 'description': 'Used % formatting with parentheses that do not form a tuple', 'good_lines': ['"foo %s bar" % ("baz",)"'], 'bad_lines': ['"foo %s bar" % ("baz")']}, {'pattern': 'sudo', 'include_only': set(['scripts/']), 'exclude': set(['scripts/lib/setup_venv.py']), 'exclude_line': set([ ('scripts/lib/zulip_tools.py', 'sudo_args = kwargs.pop(\'sudo_args\', [])'), ('scripts/lib/zulip_tools.py', 'args = [\'sudo\'] + sudo_args + [\'--\'] + args'), ]), 'description': 'Most scripts are intended to run on systems without sudo.', 'good_lines': ['subprocess.check_call(["ls"])'], 'bad_lines': ['subprocess.check_call(["sudo", "ls"])']}, {'pattern': 'django.utils.translation', 'include_only': set(['test/', 'zerver/views/development/']), 'description': 'Test strings should not be tagged for translation', 'good_lines': [''], 'bad_lines': ['django.utils.translation']}, {'pattern': 'userid', 'description': 'We prefer user_id over userid.', 'good_lines': ['id = alice.user_id'], 'bad_lines': ['id = alice.userid']}, {'pattern': r'json_success\({}\)', 'description': 'Use json_success() to return nothing', 'good_lines': ['return json_success()'], 'bad_lines': ['return json_success({})']}, {'pattern': r'\Wjson_error\(_\(?\w+\)', 'exclude': set(['zerver/tests', 'zerver/views/development/']), 'description': 'Argument to json_error should be a literal string enclosed by _()', 'good_lines': ['return json_error(_("string"))'], 'bad_lines': ['return json_error(_variable)', 'return json_error(_(variable))']}, {'pattern': r'''\Wjson_error\(['"].+[),]$''', 'exclude': set(['zerver/tests']), 'description': 'Argument to json_error should a literal string enclosed by _()'}, # To avoid JsonableError(_variable) and JsonableError(_(variable)) {'pattern': r'\WJsonableError\(_\(?\w.+\)', 'exclude': set(['zerver/tests', 'zerver/views/development/']), 'description': 'Argument to JsonableError should be a literal string enclosed by _()'}, {'pattern': r'''\WJsonableError\(["'].+\)''', 'exclude': set(['zerver/tests', 'zerver/views/development/']), 'description': 'Argument to JsonableError should be a literal string enclosed by _()'}, {'pattern': r"""\b_\((?:\s|{}|{})*[^\s'")]""".format(PYSQ, PYDQ), 'description': 'Called _() on a computed string', 'exclude_line': set([ ('zerver/lib/i18n.py', 'result = _(string)'), ]), 'good_lines': ["return json_error(_('No presence data for %s') % (target.email,))"], 'bad_lines': ["return json_error(_('No presence data for %s' % (target.email,)))"]}, {'pattern': r'''([a-zA-Z0-9_]+)=REQ\(['"]\1['"]''', 'description': 'REQ\'s first argument already defaults to parameter name'}, {'pattern': r'self\.client\.(get|post|patch|put|delete)', 'description': \ '''Do not call self.client directly for put/patch/post/get. See WRAPPER_COMMENT in test_helpers.py for details. '''}, # Directly fetching Message objects in e.g. views code is often a security bug. {'pattern': '[^r]Message.objects.get', 'exclude': set(["zerver/tests", "zerver/lib/onboarding.py", "zilencer/management/commands/add_mock_conversation.py", "zerver/worker/queue_processors.py", "zerver/management/commands/export.py", "zerver/lib/export.py"]), 'description': 'Please use access_message() to fetch Message objects', }, {'pattern': 'Stream.objects.get', 'include_only': set(["zerver/views/"]), 'description': 'Please use access_stream_by_*() to fetch Stream objects', }, {'pattern': 'get_stream[(]', 'include_only': set(["zerver/views/", "zerver/lib/actions.py"]), 'exclude_line': set([ # This one in check_message is kinda terrible, since it's # how most instances are written, but better to exclude something than nothing ('zerver/lib/actions.py', 'stream = get_stream(stream_name, realm)'), ('zerver/lib/actions.py', 'get_stream(admin_realm_signup_notifications_stream, admin_realm)'), ]), 'description': 'Please use access_stream_by_*() to fetch Stream objects', }, {'pattern': 'Stream.objects.filter', 'include_only': set(["zerver/views/"]), 'description': 'Please use access_stream_by_*() to fetch Stream objects', }, {'pattern': '^from (zerver|analytics|confirmation)', 'include_only': set(["/migrations/"]), 'exclude': set([ 'zerver/migrations/0032_verify_all_medium_avatar_images.py', 'zerver/migrations/0060_move_avatars_to_be_uid_based.py', 'zerver/migrations/0104_fix_unreads.py', 'zerver/migrations/0206_stream_rendered_description.py', 'zerver/migrations/0209_user_profile_no_empty_password.py', 'pgroonga/migrations/0002_html_escape_subject.py', ]), 'description': "Don't import models or other code in migrations; see docs/subsystems/schema-migrations.md", }, {'pattern': 'datetime[.](now|utcnow)', 'include_only': set(["zerver/", "analytics/"]), 'description': "Don't use datetime in backend code.\n" "See https://zulip.readthedocs.io/en/latest/contributing/code-style.html#naive-datetime-objects", }, {'pattern': r'render_to_response\(', 'description': "Use render() instead of render_to_response().", }, {'pattern': 'from os.path', 'description': "Don't use from when importing from the standard library", }, {'pattern': 'import os.path', 'description': "Use import os instead of import os.path", }, {'pattern': r'(logging|logger)\.warn\W', 'description': "Logger.warn is a deprecated alias for Logger.warning; Use 'warning' instead of 'warn'.", 'good_lines': ["logging.warning('I am a warning.')", "logger.warning('warning')"], 'bad_lines': ["logging.warn('I am a warning.')", "logger.warn('warning')"]}, {'pattern': r'\.pk', 'exclude_pattern': '[.]_meta[.]pk', 'description': "Use `id` instead of `pk`.", 'good_lines': ['if my_django_model.id == 42', 'self.user_profile._meta.pk'], 'bad_lines': ['if my_django_model.pk == 42']}, {'pattern': r'^[ ]*# type: \(', 'exclude': set([ # These directories, especially scripts/ and puppet/, # have tools that need to run before a Zulip environment # is provisioned; in some of those, the `typing` module # might not be available yet, so care is required. 'scripts/', 'tools/', 'puppet/', # Zerver files that we should just clean. 'zerver/tests', 'zerver/openapi/python_examples.py', 'zerver/lib/request.py', 'zerver/views/streams.py', # thumbor is (currently) python2 only 'zthumbor/', ]), 'description': 'Comment-style function type annotation. Use Python3 style annotations instead.', }, {'pattern': r' = models[.].*null=True.*\) # type: (?!Optional)', 'include_only': {"zerver/models.py"}, 'description': 'Model variable with null=true not annotated as Optional.', 'good_lines': ['desc = models.TextField(null=True) # type: Optional[Text]', 'stream = models.ForeignKey(Stream, null=True, on_delete=CASCADE) # type: Optional[Stream]', 'desc = models.TextField() # type: Text', 'stream = models.ForeignKey(Stream, on_delete=CASCADE) # type: Stream'], 'bad_lines': ['desc = models.CharField(null=True) # type: Text', 'stream = models.ForeignKey(Stream, null=True, on_delete=CASCADE) # type: Stream'], }, {'pattern': r' = models[.](?!NullBoolean).*\) # type: Optional', # Optional tag, except NullBoolean(Field) 'exclude_pattern': 'null=True', 'include_only': {"zerver/models.py"}, 'description': 'Model variable annotated with Optional but variable does not have null=true.', 'good_lines': ['desc = models.TextField(null=True) # type: Optional[Text]', 'stream = models.ForeignKey(Stream, null=True, on_delete=CASCADE) # type: Optional[Stream]', 'desc = models.TextField() # type: Text', 'stream = models.ForeignKey(Stream, on_delete=CASCADE) # type: Stream'], 'bad_lines': ['desc = models.TextField() # type: Optional[Text]', 'stream = models.ForeignKey(Stream, on_delete=CASCADE) # type: Optional[Stream]'], }, {'pattern': r'[\s([]Text([^\s\w]|$)', 'exclude': set([ # We are likely to want to keep these dirs Python 2+3 compatible, # since the plan includes extracting them to a separate project eventually. 'tools/lib', # TODO: Update our migrations from Text->str. 'zerver/migrations/', # thumbor is (currently) python2 only 'zthumbor/', ]), 'description': "Now that we're a Python 3 only codebase, we don't need to use typing.Text. Please use str instead.", }, {'pattern': 'exit[(]1[)]', 'include_only': set(["/management/commands/"]), 'description': 'Raise CommandError to exit with failure in management commands', }, *whitespace_rules, *comma_whitespace_rule, ], max_length=110, shebang_rules=shebang_rules, ) bash_rules = RuleList( langs=['bash'], rules=[ {'pattern': '#!.*sh [-xe]', 'description': 'Fix shebang line with proper call to /usr/bin/env for Bash path, change -x|-e switches' ' to set -x|set -e'}, {'pattern': 'sudo', 'description': 'Most scripts are intended to work on systems without sudo', 'include_only': set(['scripts/']), 'exclude': set([ 'scripts/lib/install', 'scripts/setup/configure-rabbitmq' ]), }, *whitespace_rules[0:1], ], shebang_rules=shebang_rules, ) css_rules = RuleList( langs=['css', 'scss'], rules=[ {'pattern': r'calc\([^+]+\+[^+]+\)', 'description': "Avoid using calc with '+' operator. See #8403 : in CSS.", 'good_lines': ["width: calc(20% - -14px);"], 'bad_lines': ["width: calc(20% + 14px);"]}, {'pattern': r'^[^:]*:\S[^:]*;$', 'description': "Missing whitespace after : in CSS", 'good_lines': ["background-color: white;", "text-size: 16px;"], 'bad_lines': ["background-color:white;", "text-size:16px;"]}, {'pattern': '[a-z]{', 'description': "Missing whitespace before '{' in CSS.", 'good_lines': ["input {", "body {"], 'bad_lines': ["input{", "body{"]}, {'pattern': 'https://', 'description': "Zulip CSS should have no dependencies on external resources", 'good_lines': ['background: url(/static/images/landing-page/pycon.jpg);'], 'bad_lines': ['background: url(https://example.com/image.png);']}, {'pattern': '^[ ][ ][a-zA-Z0-9]', 'description': "Incorrect 2-space indentation in CSS", 'strip': '\n', 'good_lines': [" color: white;", "color: white;"], 'bad_lines': [" color: white;"]}, {'pattern': r'{\w', 'description': "Missing whitespace after '{' in CSS (should be newline).", 'good_lines': ["{\n"], 'bad_lines': ["{color: LightGoldenRodYellow;"]}, {'pattern': ' thin[ ;]', 'description': "thin CSS attribute is under-specified, please use 1px.", 'good_lines': ["border-width: 1px;"], 'bad_lines': ["border-width: thin;", "border-width: thin solid black;"]}, {'pattern': ' medium[ ;]', 'description': "medium CSS attribute is under-specified, please use pixels.", 'good_lines': ["border-width: 3px;"], 'bad_lines': ["border-width: medium;", "border: medium solid black;"]}, {'pattern': ' thick[ ;]', 'description': "thick CSS attribute is under-specified, please use pixels.", 'good_lines': ["border-width: 5px;"], 'bad_lines': ["border-width: thick;", "border: thick solid black;"]}, {'pattern': r'rgba?\(', 'description': 'Use of rgb(a) format is banned, Please use hsl(a) instead', 'good_lines': ['hsl(0, 0%, 0%)', 'hsla(0, 0%, 100%, 0.1)'], 'bad_lines': ['rgb(0, 0, 0)', 'rgba(255, 255, 255, 0.1)']}, *whitespace_rules, *comma_whitespace_rule, ], ) prose_style_rules = [ {'pattern': r'[^\/\#\-"]([jJ]avascript)', # exclude usage in hrefs/divs 'exclude': set(["docs/documentation/api.md"]), 'description': "javascript should be spelled JavaScript"}, {'pattern': r'''[^\/\-\."'\_\=\>]([gG]ithub)[^\.\-\_"\<]''', # exclude usage in hrefs/divs 'description': "github should be spelled GitHub"}, {'pattern': '[oO]rganisation', # exclude usage in hrefs/divs 'description': "Organization is spelled with a z", 'exclude_line': {('docs/translating/french.md', '* organization - **organisation**')}}, {'pattern': '!!! warning', 'description': "!!! warning is invalid; it's spelled '!!! warn'"}, {'pattern': 'Terms of service', 'description': "The S in Terms of Service is capitalized"}, {'pattern': '[^-_p]botserver(?!rc)|bot server', 'description': "Use Botserver instead of botserver or bot server."}, *comma_whitespace_rule, ] # type: List[Rule] html_rules = whitespace_rules + prose_style_rules + [ {'pattern': 'subject|SUBJECT', 'exclude': set(['templates/zerver/email.html']), 'exclude_pattern': 'email subject', 'description': 'avoid subject in templates', 'good_lines': ['topic_name'], 'bad_lines': ['subject="foo"', ' MAX_SUBJECT_LEN']}, {'pattern': r'placeholder="[^{#](?:(?!\.com).)+$', 'description': "`placeholder` value should be translatable.", 'exclude_line': {('templates/zerver/register.html', 'placeholder="acme"'), ('templates/zerver/register.html', 'placeholder="Acme or Aκμή"')}, 'exclude': set(["templates/analytics/support.html"]), 'good_lines': ['<input class="stream-list-filter" type="text" placeholder="{{ _(\'Search streams\') }}" />'], 'bad_lines': ['<input placeholder="foo">']}, {'pattern': "placeholder='[^{]", 'description': "`placeholder` value should be translatable.", 'good_lines': ['<input class="stream-list-filter" type="text" placeholder="{{ _(\'Search streams\') }}" />'], 'bad_lines': ["<input placeholder='foo'>"]}, {'pattern': "aria-label='[^{]", 'description': "`aria-label` value should be translatable.", 'good_lines': ['<button type="button" class="close close-alert-word-status" aria-label="{{t \'Close\' }}">'], 'bad_lines': ["<button aria-label='foo'></button>"]}, {'pattern': 'aria-label="[^{]', 'description': "`aria-label` value should be translatable.", 'good_lines': ['<button type="button" class="close close-alert-word-status" aria-label="{{t \'Close\' }}">'], 'bad_lines': ['<button aria-label="foo"></button>']}, {'pattern': 'script src="http', 'description': "Don't directly load dependencies from CDNs. See docs/subsystems/html-css.md", 'exclude': set(["templates/corporate/billing.html", "templates/zerver/hello.html", "templates/corporate/upgrade.html"]), 'good_lines': ["{{ render_entrypoint('landing-page') }}"], 'bad_lines': ['<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>']}, {'pattern': "title='[^{]", 'description': "`title` value should be translatable.", 'good_lines': ['<link rel="author" title="{{ _(\'About these documents\') }}" />'], 'bad_lines': ["<p title='foo'></p>"]}, {'pattern': r'title="[^{\:]', 'exclude_line': set([ ('templates/zerver/app/markdown_help.html', '<td class="rendered_markdown"><img alt=":heart:" class="emoji" src="/static/generated/emoji/images/emoji/heart.png" title=":heart:" /></td>') ]), 'exclude': set(["templates/zerver/emails", "templates/analytics/realm_details.html", "templates/analytics/support.html"]), 'description': "`title` value should be translatable."}, {'pattern': r'''\Walt=["'][^{"']''', 'description': "alt argument should be enclosed by _() or it should be an empty string.", 'exclude': set(['static/templates/settings/display_settings.hbs', 'templates/zerver/app/keyboard_shortcuts.html', 'templates/zerver/app/markdown_help.html']), 'good_lines': ['<img src="{{source_url}}" alt="{{ _(name) }}" />', '<img alg="" />'], 'bad_lines': ['<img alt="Foo Image" />']}, {'pattern': r'''\Walt=["']{{ ?["']''', 'description': "alt argument should be enclosed by _().", 'good_lines': ['<img src="{{source_url}}" alt="{{ _(name) }}" />'], 'bad_lines': ['<img alt="{{ " />']}, {'pattern': r'\bon\w+ ?=', 'description': "Don't use inline event handlers (onclick=, etc. attributes) in HTML. Instead," "attach a jQuery event handler ($('#foo').on('click', function () {...})) when " "the DOM is ready (inside a $(function () {...}) block).", 'exclude': set(['templates/zerver/dev_login.html', 'templates/corporate/upgrade.html']), 'good_lines': ["($('#foo').on('click', function () {}"], 'bad_lines': ["<button id='foo' onclick='myFunction()'>Foo</button>", "<input onchange='myFunction()'>"]}, {'pattern': 'style ?=', 'description': "Avoid using the `style=` attribute; we prefer styling in CSS files", 'exclude_pattern': r'.*style ?=["' + "'" + '](display: ?none|background: {{|color: {{|background-color: {{).*', 'exclude': set([ # KaTeX output uses style attribute 'templates/zerver/app/markdown_help.html', # 5xx page doesn't have external CSS 'static/html/5xx.html', # Group PMs color is dynamically calculated 'static/templates/group_pms.hbs', # exclude_pattern above handles color, but have other issues: 'static/templates/draft.hbs', 'static/templates/subscription.hbs', 'static/templates/single_message.hbs', # Old-style email templates need to use inline style # attributes; it should be possible to clean these up # when we convert these templates to use premailer. 'templates/zerver/emails/email_base_messages.html', # Email log templates; should clean up. 'templates/zerver/email.html', 'templates/zerver/email_log.html', # Social backend logos are dynamically loaded 'templates/zerver/accounts_home.html', 'templates/zerver/login.html', # Probably just needs to be changed to display: none so the exclude works 'templates/zerver/app/navbar.html', # Needs the width cleaned up; display: none is fine 'static/templates/settings/account_settings.hbs', # background image property is dynamically generated 'static/templates/user_profile_modal.hbs', 'static/templates/sidebar_private_message_list.hbs', # Inline styling for an svg; could be moved to CSS files? 'templates/zerver/landing_nav.html', 'templates/zerver/billing_nav.html', 'templates/zerver/app/home.html', 'templates/zerver/features.html', 'templates/zerver/portico-header.html', 'templates/corporate/billing.html', 'templates/corporate/upgrade.html', # Miscellaneous violations to be cleaned up 'static/templates/user_info_popover_title.hbs', 'static/templates/subscription_invites_warning_modal.hbs', 'templates/zerver/reset_confirm.html', 'templates/zerver/config_error.html', 'templates/zerver/dev_env_email_access_details.html', 'templates/zerver/confirm_continue_registration.html', 'templates/zerver/register.html', 'templates/zerver/accounts_send_confirm.html', 'templates/zerver/integrations/index.html', 'templates/zerver/documentation_main.html', 'templates/analytics/realm_summary_table.html', 'templates/corporate/zephyr.html', 'templates/corporate/zephyr-mirror.html', ]), 'good_lines': ['#my-style {color: blue;}', 'style="display: none"', "style='display: none"], 'bad_lines': ['<p style="color: blue;">Foo</p>', 'style = "color: blue;"']}, ] # type: List[Rule] handlebars_rules = RuleList( langs=['hbs'], rules=html_rules + [ {'pattern': "[<]script", 'description': "Do not use inline <script> tags here; put JavaScript in static/js instead."}, {'pattern': '{{ t ("|\')', 'description': 'There should be no spaces before the "t" in a translation tag.'}, {'pattern': r"{{t '.*' }}[\.\?!]", 'description': "Period should be part of the translatable string."}, {'pattern': r'{{t ".*" }}[\.\?!]', 'description': "Period should be part of the translatable string."}, {'pattern': r"{{/tr}}[\.\?!]", 'description': "Period should be part of the translatable string."}, {'pattern': '{{t ("|\') ', 'description': 'Translatable strings should not have leading spaces.'}, {'pattern': "{{t '[^']+ ' }}", 'description': 'Translatable strings should not have trailing spaces.'}, {'pattern': '{{t "[^"]+ " }}', 'description': 'Translatable strings should not have trailing spaces.'}, ], ) jinja2_rules = RuleList( langs=['html'], rules=html_rules + [ {'pattern': r"{% endtrans %}[\.\?!]", 'description': "Period should be part of the translatable string."}, {'pattern': r"{{ _(.+) }}[\.\?!]", 'description': "Period should be part of the translatable string."}, ], ) json_rules = RuleList( langs=['json'], rules=[ # Here, we don't use `whitespace_rules`, because the tab-based # whitespace rule flags a lot of third-party JSON fixtures # under zerver/webhooks that we want preserved verbatim. So # we just include the trailing whitespace rule and a modified # version of the tab-based whitespace rule (we can't just use # exclude in whitespace_rules, since we only want to ignore # JSON files with tab-based whitespace, not webhook code). trailing_whitespace_rule, {'pattern': '\t', 'strip': '\n', 'exclude': set(['zerver/webhooks/']), 'description': 'Fix tab-based whitespace'}, {'pattern': r'":["\[\{]', 'exclude': set(['zerver/webhooks/', 'zerver/tests/fixtures/']), 'description': 'Require space after : in JSON'}, ] ) markdown_docs_length_exclude = { # Has some example Vagrant output that's very long "docs/development/setup-vagrant.md", # Have wide output in code blocks "docs/subsystems/logging.md", "docs/subsystems/schema-migrations.md", # Have curl commands with JSON that would be messy to wrap "zerver/webhooks/helloworld/doc.md", "zerver/webhooks/trello/doc.md", # Has a very long configuration line "templates/zerver/integrations/perforce.md", # Has some example code that could perhaps be wrapped "templates/zerver/api/incoming-webhooks-walkthrough.md", # This macro has a long indented URL "templates/zerver/help/include/git-webhook-url-with-branches-indented.md", "templates/zerver/api/update-notification-settings.md", # These two are the same file and have some too-long lines for GitHub badges "README.md", "docs/overview/readme.md", } markdown_rules = RuleList( langs=['md'], rules=markdown_whitespace_rules + prose_style_rules + [ {'pattern': r'\[(?P<url>[^\]]+)\]\((?P=url)\)', 'description': 'Linkified markdown URLs should use cleaner <http://example.com> syntax.'}, {'pattern': 'https://zulip.readthedocs.io/en/latest/[a-zA-Z0-9]', 'exclude': {'docs/overview/contributing.md', 'docs/overview/readme.md', 'docs/README.md'}, 'include_only': set(['docs/']), 'description': "Use relative links (../foo/bar.html) to other documents in docs/", }, {'pattern': "su zulip -c [^']", 'include_only': set(['docs/']), 'description': "Always quote arguments using `su zulip -c '` to avoid confusion about how su works.", }, {'pattern': r'\][(][^#h]', 'include_only': set(['README.md', 'CONTRIBUTING.md']), 'description': "Use absolute links from docs served by GitHub", }, ], max_length=120, length_exclude=markdown_docs_length_exclude, exclude_files_in='templates/zerver/help/' ) help_markdown_rules = RuleList( langs=['md'], rules=markdown_rules.rules + [ {'pattern': '[a-z][.][A-Z]', 'description': "Likely missing space after end of sentence", 'include_only': set(['templates/zerver/help/']), }, {'pattern': r'\b[rR]ealm[s]?\b', 'include_only': set(['templates/zerver/help/']), 'good_lines': ['Organization', 'deactivate_realm', 'realm_filter'], 'bad_lines': ['Users are in a realm', 'Realm is the best model'], 'description': "Realms are referred to as Organizations in user-facing docs."}, ], length_exclude=markdown_docs_length_exclude, ) txt_rules = RuleList( langs=['txt', 'text', 'yaml', 'rst'], rules=whitespace_rules, ) non_py_rules = [ handlebars_rules, jinja2_rules, css_rules, js_rules, json_rules, markdown_rules, help_markdown_rules, bash_rules, txt_rules, ]
./CrossVul/dataset_final_sorted/CWE-287/py/good_1224_0
crossvul-python_data_good_275_0
# Copyright Red Hat 2017, Jake Hunsaker <jhunsake@redhat.com> # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. import fnmatch import inspect import logging import os import random import re import string import tarfile import threading import tempfile import shutil import subprocess import sys from datetime import datetime from concurrent.futures import ThreadPoolExecutor from .sosnode import SosNode from distutils.sysconfig import get_python_lib from getpass import getpass from six.moves import input from textwrap import fill from soscollector import __version__ class SosCollector(): '''Main sos-collector class''' def __init__(self, config): os.umask(0077) self.config = config self.threads = [] self.workers = [] self.client_list = [] self.node_list = [] self.master = False self.retrieved = 0 self.need_local_sudo = False if not self.config['list_options']: try: if not self.config['tmp_dir']: self.create_tmp_dir() self._setup_logging() self.log_debug('Executing %s' % ' '.join(s for s in sys.argv)) self._load_clusters() self._parse_options() self.prep() except KeyboardInterrupt: self._exit('Exiting on user cancel', 130) else: self._load_clusters() def _setup_logging(self): # behind the scenes logging self.logger = logging.getLogger('sos_collector') self.logger.setLevel(logging.DEBUG) self.logfile = tempfile.NamedTemporaryFile( mode="w+", dir=self.config['tmp_dir']) hndlr = logging.StreamHandler(self.logfile) hndlr.setFormatter(logging.Formatter( '%(asctime)s %(levelname)s: %(message)s')) hndlr.setLevel(logging.DEBUG) self.logger.addHandler(hndlr) console = logging.StreamHandler(sys.stderr) console.setFormatter(logging.Formatter('%(message)s')) # ui logging self.console = logging.getLogger('sos_collector_console') self.console.setLevel(logging.DEBUG) self.console_log_file = tempfile.NamedTemporaryFile( mode="w+", dir=self.config['tmp_dir']) chandler = logging.StreamHandler(self.console_log_file) cfmt = logging.Formatter('%(asctime)s %(levelname)s: %(message)s') chandler.setFormatter(cfmt) self.console.addHandler(chandler) # also print to console ui = logging.StreamHandler() fmt = logging.Formatter('%(message)s') ui.setFormatter(fmt) if self.config['verbose']: ui.setLevel(logging.DEBUG) else: ui.setLevel(logging.INFO) self.console.addHandler(ui) def _exit(self, msg, error=1): '''Used to safely terminate if sos-collector encounters an error''' self.log_error(msg) try: self.close_all_connections() except Exception: pass sys.exit(error) def _parse_options(self): '''If there are cluster options set on the CLI, override the defaults ''' if self.config['cluster_options']: for opt in self.config['cluster_options']: match = False for option in self.clusters[opt.cluster].options: if opt.name == option.name: match = True # override the default from CLI option.value = self._validate_option(option, opt) if not match: self._exit('Unknown option provided: %s.%s' % ( opt.cluster, opt.name )) def _validate_option(self, default, cli): '''Checks to make sure that the option given on the CLI is valid. Valid in this sense means that the type of value given matches what a cluster profile expects (str for str, bool for bool, etc). For bool options, this will also convert the string equivalent to an actual boolean value ''' if not default.opt_type == bool: if not default.opt_type == cli.opt_type: msg = "Invalid option type for %s. Expected %s got %s" self._exit(msg % (cli.name, default.opt_type, cli.opt_type)) return cli.value else: val = cli.value.lower() if val not in ['true', 'on', 'false', 'off']: msg = ("Invalid value for %s. Accepted values are: 'true', " "'false', 'on', 'off'") self._exit(msg % cli.name) else: if val in ['true', 'on']: return True else: return False def log_info(self, msg): '''Log info messages to both console and log file''' self.logger.info(msg) self.console.info(msg) def log_error(self, msg): '''Log error messages to both console and log file''' self.logger.error(msg) self.console.error(msg) def log_debug(self, msg): '''Log debug message to both console and log file''' caller = inspect.stack()[1][3] msg = '[sos_collector:%s] %s' % (caller, msg) self.logger.debug(msg) if self.config['verbose']: self.console.debug(msg) def create_tmp_dir(self): '''Creates a temp directory to transfer sosreports to''' tmpdir = tempfile.mkdtemp(prefix='sos-collector-', dir='/var/tmp') self.config['tmp_dir'] = tmpdir self.config['tmp_dir_created'] = True def list_options(self): '''Display options for available clusters''' print('\nThe following cluster options are available:\n') print('{:15} {:15} {:<10} {:10} {:<}'.format( 'Cluster', 'Option Name', 'Type', 'Default', 'Description' )) for cluster in self.clusters: for opt in self.clusters[cluster].options: optln = '{:15} {:15} {:<10} {:<10} {:<10}'.format( opt.cluster, opt.name, opt.opt_type.__name__, str(opt.value), opt.description ) print(optln) print('\nOptions take the form of cluster.name=value' '\nE.G. "ovirt.no-database=True" or "pacemaker.offline=False"') def delete_tmp_dir(self): '''Removes the temp directory and all collected sosreports''' shutil.rmtree(self.config['tmp_dir']) def _load_clusters(self): '''Load an instance of each cluster so that sos-collector can later determine what type of cluster is in use ''' if 'soscollector' not in os.listdir(os.getcwd()): p = get_python_lib() path = p + '/soscollector/clusters/' else: path = 'soscollector/clusters' self.clusters = {} sys.path.insert(0, path) for f in sorted(os.listdir(path)): fname, ext = os.path.splitext(f) if ext == '.py' and fname not in ['__init__', 'cluster']: mods = inspect.getmembers(__import__(fname), inspect.isclass) for cluster in mods[1:]: self.clusters[cluster[0]] = cluster[1](self.config) self.log_debug('Found cluster profiles: %s' % list(self.clusters.keys())) sys.path.pop(0) def _get_archive_name(self): '''Generates a name for the tarball archive''' nstr = 'sos-collector' if self.config['label']: nstr += '-%s' % self.config['label'] if self.config['case_id']: nstr += '-%s' % self.config['case_id'] dt = datetime.strftime(datetime.now(), '%Y-%m-%d') try: string.lowercase = string.ascii_lowercase except NameError: pass rand = ''.join(random.choice(string.lowercase) for x in range(5)) return '%s-%s-%s' % (nstr, dt, rand) def _get_archive_path(self): '''Returns the path, including filename, of the tarball we build that contains the collected sosreports ''' self.arc_name = self._get_archive_name() compr = 'gz' return self.config['out_dir'] + self.arc_name + '.tar.' + compr def _fmt_msg(self, msg): width = 80 _fmt = '' for line in msg.splitlines(): _fmt = _fmt + fill(line, width, replace_whitespace=False) + '\n' return _fmt def prep(self): '''Based on configuration, performs setup for collection''' disclaimer = ("""\ This utility is used to collect sosreports from multiple \ nodes simultaneously. It uses the python-paramiko library \ to manage the SSH connections to remote systems. If this \ library is not acceptable for use in your environment, \ you should not use this utility. An archive of sosreport tarballs collected from the nodes will be \ generated in %s and may be provided to an appropriate support representative. The generated archive may contain data considered sensitive \ and its content should be reviewed by the originating \ organization before being passed to any third party. No configuration changes will be made to the system running \ this utility or remote systems that it connects to. """) self.console.info("\nsos-collector (version %s)\n" % __version__) intro_msg = self._fmt_msg(disclaimer % self.config['tmp_dir']) self.console.info(intro_msg) prompt = "\nPress ENTER to continue, or CTRL-C to quit\n" if not self.config['batch']: input(prompt) if not self.config['password']: self.log_debug('password not specified, assuming SSH keys') msg = ('sos-collector ASSUMES that SSH keys are installed on all ' 'nodes unless the --password option is provided.\n') self.console.info(self._fmt_msg(msg)) if self.config['password']: self.log_debug('password specified, not using SSH keys') msg = ('Provide the SSH password for user %s: ' % self.config['ssh_user']) self.config['password'] = getpass(prompt=msg) if self.config['need_sudo'] and not self.config['insecure_sudo']: if not self.config['password']: self.log_debug('non-root user specified, will request ' 'sudo password') msg = ('A non-root user has been provided. Provide sudo ' 'password for %s on remote nodes: ' % self.config['ssh_user']) self.config['sudo_pw'] = getpass(prompt=msg) else: if not self.config['insecure_sudo']: self.config['sudo_pw'] = self.config['password'] if self.config['become_root']: if not self.config['ssh_user'] == 'root': self.log_debug('non-root user asking to become root remotely') msg = ('User %s will attempt to become root. ' 'Provide root password: ' % self.config['ssh_user']) self.config['root_password'] = getpass(prompt=msg) self.config['need_sudo'] = False else: self.log_info('Option to become root but ssh user is root.' ' Ignoring request to change user on node') self.config['become_root'] = False if self.config['master']: self.connect_to_master() self.config['no_local'] = True else: self.master = SosNode('localhost', self.config) if self.config['cluster_type']: self.config['cluster'] = self.clusters[self.config['cluster_type']] else: self.determine_cluster() if self.config['cluster'] is None and not self.config['nodes']: msg = ('Cluster type could not be determined and no nodes provided' '\nAborting...') self._exit(msg, 1) self.config['cluster'].setup() self.get_nodes() self.intro() self.configure_sos_cmd() def intro(self): '''Prints initial messages and collects user and case if not provided already. ''' self.console.info('') if not self.node_list and not self.master.connected: self._exit('No nodes were detected, or nodes do not have sos ' 'installed.\nAborting...') self.console.info('The following is a list of nodes to collect from:') if self.master.connected: self.console.info('\t%-*s' % (self.config['hostlen'], self.config['master'])) for node in sorted(self.node_list): self.console.info("\t%-*s" % (self.config['hostlen'], node)) self.console.info('') if not self.config['case_id'] and not self.config['batch']: msg = 'Please enter the case id you are collecting reports for: ' self.config['case_id'] = input(msg) def configure_sos_cmd(self): '''Configures the sosreport command that is run on the nodes''' if self.config['sos_opt_line']: self.config['sos_cmd'] += self.config['sos_opt_line'] self.log_debug("User specified manual sosreport command line. " "sos command set to %s" % self.config['sos_cmd']) return True if self.config['case_id']: self.config['sos_cmd'] += ' --case-id=%s' % self.config['case_id'] if self.config['alloptions']: self.config['sos_cmd'] += ' --alloptions' if self.config['verify']: self.config['sos_cmd'] += ' --verify' if self.config['log_size']: self.config['sos_cmd'] += (' --log-size=%s' % self.config['log_size']) if self.config['sysroot']: self.config['sos_cmd'] += ' -s %s' % self.config['sysroot'] if self.config['chroot']: self.config['sos_cmd'] += ' -c %s' % self.config['chroot'] if self.config['compression']: self.config['sos_cmd'] += ' -z %s' % self.config['compression'] if self.config['cluster_type']: self.config['cluster'].modify_sos_cmd() self.log_debug('Initial sos cmd set to %s' % self.config['sos_cmd']) def connect_to_master(self): '''If run with --master, we will run cluster checks again that instead of the localhost. ''' try: self.master = SosNode(self.config['master'], self.config) except Exception as e: self.log_debug('Failed to connect to master: %s' % e) self._exit('Could not connect to master node.\nAborting...', 1) def determine_cluster(self): '''This sets the cluster type and loads that cluster's cluster. If no cluster type is matched and no list of nodes is provided by the user, then we abort. If a list of nodes is given, this is not run, however the cluster can still be run if the user sets a --cluster-type manually ''' for clus in self.clusters: self.clusters[clus].master = self.master if self.clusters[clus].check_enabled(): self.config['cluster'] = self.clusters[clus] name = str(self.clusters[clus].__class__.__name__).lower() self.config['cluster_type'] = name self.log_info( 'Cluster type set to %s' % self.config['cluster_type']) break def get_nodes_from_cluster(self): '''Collects the list of nodes from the determined cluster cluster''' nodes = self.config['cluster']._get_nodes() self.log_debug('Node list: %s' % nodes) return nodes def reduce_node_list(self): '''Reduce duplicate entries of the localhost and/or master node if applicable''' if (self.config['hostname'] in self.node_list and self.config['no_local']): self.node_list.remove(self.config['hostname']) for i in self.config['ip_addrs']: if i in self.node_list: self.node_list.remove(i) # remove the master node from the list, since we already have # an open session to it. if self.config['master']: for n in self.node_list: if n == self.master.hostname or n == self.config['master']: self.node_list.remove(n) self.node_list = list(set(n for n in self.node_list if n)) self.log_debug('Node list reduced to %s' % self.node_list) def compare_node_to_regex(self, node): '''Compares a discovered node name to a provided list of nodes from the user. If there is not a match, the node is removed from the list''' for regex in self.config['nodes']: try: if re.match(regex, node): return True except re.error as err: msg = 'Error comparing %s to provided node regex %s: %s' self.log_debug(msg % (node, regex, err)) return False def get_nodes(self): ''' Sets the list of nodes to collect sosreports from ''' if not self.config['master'] and not self.config['cluster']: msg = ('Could not determine a cluster type and no list of ' 'nodes or master node was provided.\nAborting...' ) self._exit(msg) try: nodes = self.get_nodes_from_cluster() if self.config['nodes']: for node in nodes: if self.compare_node_to_regex(node): self.node_list.append(node) else: self.node_list = nodes except Exception as e: self.log_debug("Error parsing node list: %s" % e) self.log_debug('Setting node list to --nodes option') self.node_list = self.config['nodes'] for node in self.node_list: if any(i in node for i in ('*', '\\', '?', '(', ')', '/')): self.node_list.remove(node) # force add any non-regex node strings from nodes option if self.config['nodes']: for node in self.config['nodes']: if any(i in node for i in ('*', '\\', '?', '(', ')', '/')): continue if node not in self.node_list: self.log_debug("Force adding %s to node list" % node) self.node_list.append(node) if not self.config['master']: host = self.config['hostname'].split('.')[0] # trust the local hostname before the node report from cluster for node in self.node_list: if host == node.split('.')[0]: self.node_list.remove(node) self.node_list.append(self.config['hostname']) self.reduce_node_list() try: self.config['hostlen'] = len(max(self.node_list, key=len)) except (TypeError, ValueError): self.config['hostlen'] = len(self.config['master']) def can_run_local_sos(self): '''Check if sosreport can be run as the current user, or if we need to invoke sudo''' if os.geteuid() != 0: self.log_debug('Not running as root. Need sudo for local sos') self.need_local_sudo = True msg = ('\nLocal sosreport requires root. Provide sudo password' 'or press ENTER to skip: ') self.local_sudopw = getpass(prompt=msg) self.console.info('\n') if not self.local_sudopw: self.logger.info('Will not collect local sos, no password') return False self.log_debug('Able to collect local sos') return True def _connect_to_node(self, node): '''Try to connect to the node, and if we can add to the client list to run sosreport on ''' try: client = SosNode(node, self.config) if client.connected: self.client_list.append(client) else: client.close_ssh_session() except Exception: pass def collect(self): ''' For each node, start a collection thread and then tar all collected sosreports ''' if self.master.connected: self.client_list.append(self.master) self.console.info("\nConnecting to nodes...") filters = [self.master.address, self.master.hostname] nodes = [n for n in self.node_list if n not in filters] try: pool = ThreadPoolExecutor(self.config['threads']) pool.map(self._connect_to_node, nodes, chunksize=1) pool.shutdown(wait=True) self.report_num = len(self.client_list) self.console.info("\nBeginning collection of sosreports from %s " "nodes, collecting a maximum of %s " "concurrently\n" % (len(self.client_list), self.config['threads']) ) pool = ThreadPoolExecutor(self.config['threads']) pool.map(self._collect, self.client_list, chunksize=1) pool.shutdown(wait=True) except KeyboardInterrupt: self.log_error('Exiting on user cancel\n') os._exit(130) if hasattr(self.config['cluster'], 'run_extra_cmd'): self.console.info('Collecting additional data from master node...') f = self.config['cluster'].run_extra_cmd() if f: self.master.collect_extra_cmd(f) msg = '\nSuccessfully captured %s of %s sosreports' self.log_info(msg % (self.retrieved, self.report_num)) if self.retrieved > 0: self.create_cluster_archive() else: msg = 'No sosreports were collected, nothing to archive...' self._exit(msg, 1) self.close_all_connections() def _collect(self, client): '''Runs sosreport on each node''' if not client.local: client.sosreport() else: if not self.config['no_local']: client.sosreport() if client.retrieved: self.retrieved += 1 def close_all_connections(self): '''Close all ssh sessions for nodes''' for client in self.client_list: self.log_debug('Closing SSH connection to %s' % client.address) client.close_ssh_session() def create_cluster_archive(self): '''Calls for creation of tar archive then cleans up the temporary files created by sos-collector''' self.log_info('Creating archive of sosreports...') self.create_sos_archive() if self.archive: self.logger.info('Archive created as %s' % self.archive) self.cleanup() self.console.info('\nThe following archive has been created. ' 'Please provide it to your support team.') self.console.info(' %s' % self.archive) def create_sos_archive(self): '''Creates a tar archive containing all collected sosreports''' try: self.archive = self._get_archive_path() with tarfile.open(self.archive, "w:gz") as tar: for fname in os.listdir(self.config['tmp_dir']): arcname = fname if fname == self.logfile.name.split('/')[-1]: arcname = 'sos-collector.log' if fname == self.console_log_file.name.split('/')[-1]: arcname = 'ui.log' tar.add(os.path.join(self.config['tmp_dir'], fname), arcname=self.arc_name + '/' + arcname) tar.close() except Exception as e: msg = 'Could not create archive: %s' % e self._exit(msg, 2) def cleanup(self): ''' Removes the tmp dir and all sosarchives therein. If tmp dir was supplied by user, only the sos archives within that dir are removed. ''' if self.config['tmp_dir_created']: self.delete_tmp_dir() else: for f in os.listdir(self.config['tmp_dir']): if re.search('*sosreport-*tar*', f): os.remove(os.path.join(self.config['tmp_dir'], f))
./CrossVul/dataset_final_sorted/CWE-276/py/good_275_0
crossvul-python_data_bad_275_0
# Copyright Red Hat 2017, Jake Hunsaker <jhunsake@redhat.com> # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. import fnmatch import inspect import logging import os import random import re import string import tarfile import threading import tempfile import shutil import subprocess import sys from datetime import datetime from concurrent.futures import ThreadPoolExecutor from .sosnode import SosNode from distutils.sysconfig import get_python_lib from getpass import getpass from six.moves import input from textwrap import fill from soscollector import __version__ class SosCollector(): '''Main sos-collector class''' def __init__(self, config): self.config = config self.threads = [] self.workers = [] self.client_list = [] self.node_list = [] self.master = False self.retrieved = 0 self.need_local_sudo = False if not self.config['list_options']: try: if not self.config['tmp_dir']: self.create_tmp_dir() self._setup_logging() self.log_debug('Executing %s' % ' '.join(s for s in sys.argv)) self._load_clusters() self._parse_options() self.prep() except KeyboardInterrupt: self._exit('Exiting on user cancel', 130) else: self._load_clusters() def _setup_logging(self): # behind the scenes logging self.logger = logging.getLogger('sos_collector') self.logger.setLevel(logging.DEBUG) self.logfile = tempfile.NamedTemporaryFile( mode="w+", dir=self.config['tmp_dir']) hndlr = logging.StreamHandler(self.logfile) hndlr.setFormatter(logging.Formatter( '%(asctime)s %(levelname)s: %(message)s')) hndlr.setLevel(logging.DEBUG) self.logger.addHandler(hndlr) console = logging.StreamHandler(sys.stderr) console.setFormatter(logging.Formatter('%(message)s')) # ui logging self.console = logging.getLogger('sos_collector_console') self.console.setLevel(logging.DEBUG) self.console_log_file = tempfile.NamedTemporaryFile( mode="w+", dir=self.config['tmp_dir']) chandler = logging.StreamHandler(self.console_log_file) cfmt = logging.Formatter('%(asctime)s %(levelname)s: %(message)s') chandler.setFormatter(cfmt) self.console.addHandler(chandler) # also print to console ui = logging.StreamHandler() fmt = logging.Formatter('%(message)s') ui.setFormatter(fmt) if self.config['verbose']: ui.setLevel(logging.DEBUG) else: ui.setLevel(logging.INFO) self.console.addHandler(ui) def _exit(self, msg, error=1): '''Used to safely terminate if sos-collector encounters an error''' self.log_error(msg) try: self.close_all_connections() except Exception: pass sys.exit(error) def _parse_options(self): '''If there are cluster options set on the CLI, override the defaults ''' if self.config['cluster_options']: for opt in self.config['cluster_options']: match = False for option in self.clusters[opt.cluster].options: if opt.name == option.name: match = True # override the default from CLI option.value = self._validate_option(option, opt) if not match: self._exit('Unknown option provided: %s.%s' % ( opt.cluster, opt.name )) def _validate_option(self, default, cli): '''Checks to make sure that the option given on the CLI is valid. Valid in this sense means that the type of value given matches what a cluster profile expects (str for str, bool for bool, etc). For bool options, this will also convert the string equivalent to an actual boolean value ''' if not default.opt_type == bool: if not default.opt_type == cli.opt_type: msg = "Invalid option type for %s. Expected %s got %s" self._exit(msg % (cli.name, default.opt_type, cli.opt_type)) return cli.value else: val = cli.value.lower() if val not in ['true', 'on', 'false', 'off']: msg = ("Invalid value for %s. Accepted values are: 'true', " "'false', 'on', 'off'") self._exit(msg % cli.name) else: if val in ['true', 'on']: return True else: return False def log_info(self, msg): '''Log info messages to both console and log file''' self.logger.info(msg) self.console.info(msg) def log_error(self, msg): '''Log error messages to both console and log file''' self.logger.error(msg) self.console.error(msg) def log_debug(self, msg): '''Log debug message to both console and log file''' caller = inspect.stack()[1][3] msg = '[sos_collector:%s] %s' % (caller, msg) self.logger.debug(msg) if self.config['verbose']: self.console.debug(msg) def create_tmp_dir(self): '''Creates a temp directory to transfer sosreports to''' tmpdir = tempfile.mkdtemp(prefix='sos-collector-', dir='/var/tmp') self.config['tmp_dir'] = tmpdir self.config['tmp_dir_created'] = True def list_options(self): '''Display options for available clusters''' print('\nThe following cluster options are available:\n') print('{:15} {:15} {:<10} {:10} {:<}'.format( 'Cluster', 'Option Name', 'Type', 'Default', 'Description' )) for cluster in self.clusters: for opt in self.clusters[cluster].options: optln = '{:15} {:15} {:<10} {:<10} {:<10}'.format( opt.cluster, opt.name, opt.opt_type.__name__, str(opt.value), opt.description ) print(optln) print('\nOptions take the form of cluster.name=value' '\nE.G. "ovirt.no-database=True" or "pacemaker.offline=False"') def delete_tmp_dir(self): '''Removes the temp directory and all collected sosreports''' shutil.rmtree(self.config['tmp_dir']) def _load_clusters(self): '''Load an instance of each cluster so that sos-collector can later determine what type of cluster is in use ''' if 'soscollector' not in os.listdir(os.getcwd()): p = get_python_lib() path = p + '/soscollector/clusters/' else: path = 'soscollector/clusters' self.clusters = {} sys.path.insert(0, path) for f in sorted(os.listdir(path)): fname, ext = os.path.splitext(f) if ext == '.py' and fname not in ['__init__', 'cluster']: mods = inspect.getmembers(__import__(fname), inspect.isclass) for cluster in mods[1:]: self.clusters[cluster[0]] = cluster[1](self.config) self.log_debug('Found cluster profiles: %s' % list(self.clusters.keys())) sys.path.pop(0) def _get_archive_name(self): '''Generates a name for the tarball archive''' nstr = 'sos-collector' if self.config['label']: nstr += '-%s' % self.config['label'] if self.config['case_id']: nstr += '-%s' % self.config['case_id'] dt = datetime.strftime(datetime.now(), '%Y-%m-%d') try: string.lowercase = string.ascii_lowercase except NameError: pass rand = ''.join(random.choice(string.lowercase) for x in range(5)) return '%s-%s-%s' % (nstr, dt, rand) def _get_archive_path(self): '''Returns the path, including filename, of the tarball we build that contains the collected sosreports ''' self.arc_name = self._get_archive_name() compr = 'gz' return self.config['out_dir'] + self.arc_name + '.tar.' + compr def _fmt_msg(self, msg): width = 80 _fmt = '' for line in msg.splitlines(): _fmt = _fmt + fill(line, width, replace_whitespace=False) + '\n' return _fmt def prep(self): '''Based on configuration, performs setup for collection''' disclaimer = ("""\ This utility is used to collect sosreports from multiple \ nodes simultaneously. It uses the python-paramiko library \ to manage the SSH connections to remote systems. If this \ library is not acceptable for use in your environment, \ you should not use this utility. An archive of sosreport tarballs collected from the nodes will be \ generated in %s and may be provided to an appropriate support representative. The generated archive may contain data considered sensitive \ and its content should be reviewed by the originating \ organization before being passed to any third party. No configuration changes will be made to the system running \ this utility or remote systems that it connects to. """) self.console.info("\nsos-collector (version %s)\n" % __version__) intro_msg = self._fmt_msg(disclaimer % self.config['tmp_dir']) self.console.info(intro_msg) prompt = "\nPress ENTER to continue, or CTRL-C to quit\n" if not self.config['batch']: input(prompt) if not self.config['password']: self.log_debug('password not specified, assuming SSH keys') msg = ('sos-collector ASSUMES that SSH keys are installed on all ' 'nodes unless the --password option is provided.\n') self.console.info(self._fmt_msg(msg)) if self.config['password']: self.log_debug('password specified, not using SSH keys') msg = ('Provide the SSH password for user %s: ' % self.config['ssh_user']) self.config['password'] = getpass(prompt=msg) if self.config['need_sudo'] and not self.config['insecure_sudo']: if not self.config['password']: self.log_debug('non-root user specified, will request ' 'sudo password') msg = ('A non-root user has been provided. Provide sudo ' 'password for %s on remote nodes: ' % self.config['ssh_user']) self.config['sudo_pw'] = getpass(prompt=msg) else: if not self.config['insecure_sudo']: self.config['sudo_pw'] = self.config['password'] if self.config['become_root']: if not self.config['ssh_user'] == 'root': self.log_debug('non-root user asking to become root remotely') msg = ('User %s will attempt to become root. ' 'Provide root password: ' % self.config['ssh_user']) self.config['root_password'] = getpass(prompt=msg) self.config['need_sudo'] = False else: self.log_info('Option to become root but ssh user is root.' ' Ignoring request to change user on node') self.config['become_root'] = False if self.config['master']: self.connect_to_master() self.config['no_local'] = True else: self.master = SosNode('localhost', self.config) if self.config['cluster_type']: self.config['cluster'] = self.clusters[self.config['cluster_type']] else: self.determine_cluster() if self.config['cluster'] is None and not self.config['nodes']: msg = ('Cluster type could not be determined and no nodes provided' '\nAborting...') self._exit(msg, 1) self.config['cluster'].setup() self.get_nodes() self.intro() self.configure_sos_cmd() def intro(self): '''Prints initial messages and collects user and case if not provided already. ''' self.console.info('') if not self.node_list and not self.master.connected: self._exit('No nodes were detected, or nodes do not have sos ' 'installed.\nAborting...') self.console.info('The following is a list of nodes to collect from:') if self.master.connected: self.console.info('\t%-*s' % (self.config['hostlen'], self.config['master'])) for node in sorted(self.node_list): self.console.info("\t%-*s" % (self.config['hostlen'], node)) self.console.info('') if not self.config['case_id'] and not self.config['batch']: msg = 'Please enter the case id you are collecting reports for: ' self.config['case_id'] = input(msg) def configure_sos_cmd(self): '''Configures the sosreport command that is run on the nodes''' if self.config['sos_opt_line']: self.config['sos_cmd'] += self.config['sos_opt_line'] self.log_debug("User specified manual sosreport command line. " "sos command set to %s" % self.config['sos_cmd']) return True if self.config['case_id']: self.config['sos_cmd'] += ' --case-id=%s' % self.config['case_id'] if self.config['alloptions']: self.config['sos_cmd'] += ' --alloptions' if self.config['verify']: self.config['sos_cmd'] += ' --verify' if self.config['log_size']: self.config['sos_cmd'] += (' --log-size=%s' % self.config['log_size']) if self.config['sysroot']: self.config['sos_cmd'] += ' -s %s' % self.config['sysroot'] if self.config['chroot']: self.config['sos_cmd'] += ' -c %s' % self.config['chroot'] if self.config['compression']: self.config['sos_cmd'] += ' -z %s' % self.config['compression'] if self.config['cluster_type']: self.config['cluster'].modify_sos_cmd() self.log_debug('Initial sos cmd set to %s' % self.config['sos_cmd']) def connect_to_master(self): '''If run with --master, we will run cluster checks again that instead of the localhost. ''' try: self.master = SosNode(self.config['master'], self.config) except Exception as e: self.log_debug('Failed to connect to master: %s' % e) self._exit('Could not connect to master node.\nAborting...', 1) def determine_cluster(self): '''This sets the cluster type and loads that cluster's cluster. If no cluster type is matched and no list of nodes is provided by the user, then we abort. If a list of nodes is given, this is not run, however the cluster can still be run if the user sets a --cluster-type manually ''' for clus in self.clusters: self.clusters[clus].master = self.master if self.clusters[clus].check_enabled(): self.config['cluster'] = self.clusters[clus] name = str(self.clusters[clus].__class__.__name__).lower() self.config['cluster_type'] = name self.log_info( 'Cluster type set to %s' % self.config['cluster_type']) break def get_nodes_from_cluster(self): '''Collects the list of nodes from the determined cluster cluster''' nodes = self.config['cluster']._get_nodes() self.log_debug('Node list: %s' % nodes) return nodes def reduce_node_list(self): '''Reduce duplicate entries of the localhost and/or master node if applicable''' if (self.config['hostname'] in self.node_list and self.config['no_local']): self.node_list.remove(self.config['hostname']) for i in self.config['ip_addrs']: if i in self.node_list: self.node_list.remove(i) # remove the master node from the list, since we already have # an open session to it. if self.config['master']: for n in self.node_list: if n == self.master.hostname or n == self.config['master']: self.node_list.remove(n) self.node_list = list(set(n for n in self.node_list if n)) self.log_debug('Node list reduced to %s' % self.node_list) def compare_node_to_regex(self, node): '''Compares a discovered node name to a provided list of nodes from the user. If there is not a match, the node is removed from the list''' for regex in self.config['nodes']: try: if re.match(regex, node): return True except re.error as err: msg = 'Error comparing %s to provided node regex %s: %s' self.log_debug(msg % (node, regex, err)) return False def get_nodes(self): ''' Sets the list of nodes to collect sosreports from ''' if not self.config['master'] and not self.config['cluster']: msg = ('Could not determine a cluster type and no list of ' 'nodes or master node was provided.\nAborting...' ) self._exit(msg) try: nodes = self.get_nodes_from_cluster() if self.config['nodes']: for node in nodes: if self.compare_node_to_regex(node): self.node_list.append(node) else: self.node_list = nodes except Exception as e: self.log_debug("Error parsing node list: %s" % e) self.log_debug('Setting node list to --nodes option') self.node_list = self.config['nodes'] for node in self.node_list: if any(i in node for i in ('*', '\\', '?', '(', ')', '/')): self.node_list.remove(node) # force add any non-regex node strings from nodes option if self.config['nodes']: for node in self.config['nodes']: if any(i in node for i in ('*', '\\', '?', '(', ')', '/')): continue if node not in self.node_list: self.log_debug("Force adding %s to node list" % node) self.node_list.append(node) if not self.config['master']: host = self.config['hostname'].split('.')[0] # trust the local hostname before the node report from cluster for node in self.node_list: if host == node.split('.')[0]: self.node_list.remove(node) self.node_list.append(self.config['hostname']) self.reduce_node_list() try: self.config['hostlen'] = len(max(self.node_list, key=len)) except (TypeError, ValueError): self.config['hostlen'] = len(self.config['master']) def can_run_local_sos(self): '''Check if sosreport can be run as the current user, or if we need to invoke sudo''' if os.geteuid() != 0: self.log_debug('Not running as root. Need sudo for local sos') self.need_local_sudo = True msg = ('\nLocal sosreport requires root. Provide sudo password' 'or press ENTER to skip: ') self.local_sudopw = getpass(prompt=msg) self.console.info('\n') if not self.local_sudopw: self.logger.info('Will not collect local sos, no password') return False self.log_debug('Able to collect local sos') return True def _connect_to_node(self, node): '''Try to connect to the node, and if we can add to the client list to run sosreport on ''' try: client = SosNode(node, self.config) if client.connected: self.client_list.append(client) else: client.close_ssh_session() except Exception: pass def collect(self): ''' For each node, start a collection thread and then tar all collected sosreports ''' if self.master.connected: self.client_list.append(self.master) self.console.info("\nConnecting to nodes...") filters = [self.master.address, self.master.hostname] nodes = [n for n in self.node_list if n not in filters] try: pool = ThreadPoolExecutor(self.config['threads']) pool.map(self._connect_to_node, nodes, chunksize=1) pool.shutdown(wait=True) self.report_num = len(self.client_list) self.console.info("\nBeginning collection of sosreports from %s " "nodes, collecting a maximum of %s " "concurrently\n" % (len(self.client_list), self.config['threads']) ) pool = ThreadPoolExecutor(self.config['threads']) pool.map(self._collect, self.client_list, chunksize=1) pool.shutdown(wait=True) except KeyboardInterrupt: self.log_error('Exiting on user cancel\n') os._exit(130) if hasattr(self.config['cluster'], 'run_extra_cmd'): self.console.info('Collecting additional data from master node...') f = self.config['cluster'].run_extra_cmd() if f: self.master.collect_extra_cmd(f) msg = '\nSuccessfully captured %s of %s sosreports' self.log_info(msg % (self.retrieved, self.report_num)) if self.retrieved > 0: self.create_cluster_archive() else: msg = 'No sosreports were collected, nothing to archive...' self._exit(msg, 1) self.close_all_connections() def _collect(self, client): '''Runs sosreport on each node''' if not client.local: client.sosreport() else: if not self.config['no_local']: client.sosreport() if client.retrieved: self.retrieved += 1 def close_all_connections(self): '''Close all ssh sessions for nodes''' for client in self.client_list: self.log_debug('Closing SSH connection to %s' % client.address) client.close_ssh_session() def create_cluster_archive(self): '''Calls for creation of tar archive then cleans up the temporary files created by sos-collector''' self.log_info('Creating archive of sosreports...') self.create_sos_archive() if self.archive: self.logger.info('Archive created as %s' % self.archive) self.cleanup() self.console.info('\nThe following archive has been created. ' 'Please provide it to your support team.') self.console.info(' %s' % self.archive) def create_sos_archive(self): '''Creates a tar archive containing all collected sosreports''' try: self.archive = self._get_archive_path() with tarfile.open(self.archive, "w:gz") as tar: for fname in os.listdir(self.config['tmp_dir']): arcname = fname if fname == self.logfile.name.split('/')[-1]: arcname = 'sos-collector.log' if fname == self.console_log_file.name.split('/')[-1]: arcname = 'ui.log' tar.add(os.path.join(self.config['tmp_dir'], fname), arcname=self.arc_name + '/' + arcname) tar.close() except Exception as e: msg = 'Could not create archive: %s' % e self._exit(msg, 2) def cleanup(self): ''' Removes the tmp dir and all sosarchives therein. If tmp dir was supplied by user, only the sos archives within that dir are removed. ''' if self.config['tmp_dir_created']: self.delete_tmp_dir() else: for f in os.listdir(self.config['tmp_dir']): if re.search('*sosreport-*tar*', f): os.remove(os.path.join(self.config['tmp_dir'], f))
./CrossVul/dataset_final_sorted/CWE-276/py/bad_275_0
crossvul-python_data_bad_1020_0
# Copyright 2019, David Wilson # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # !mitogen: minify_safe """ This module implements most package functionality, but remains separate from non-essential code in order to reduce its size, since it is also serves as the bootstrap implementation sent to every new slave context. """ import binascii import collections import encodings.latin_1 import encodings.utf_8 import errno import fcntl import itertools import linecache import logging import os import pickle as py_pickle import pstats import signal import socket import struct import sys import syslog import threading import time import traceback import warnings import weakref import zlib # Python >3.7 deprecated the imp module. warnings.filterwarnings('ignore', message='the imp module is deprecated') import imp # Absolute imports for <2.5. select = __import__('select') try: import cProfile except ImportError: cProfile = None try: import thread except ImportError: import threading as thread try: import cPickle as pickle except ImportError: import pickle try: from cStringIO import StringIO as BytesIO except ImportError: from io import BytesIO try: BaseException except NameError: BaseException = Exception try: ModuleNotFoundError except NameError: ModuleNotFoundError = ImportError # TODO: usage of 'import' after setting __name__, but before fixing up # sys.modules generates a warning. This happens when profiling = True. warnings.filterwarnings('ignore', "Parent module 'mitogen' not found while handling absolute import") LOG = logging.getLogger('mitogen') IOLOG = logging.getLogger('mitogen.io') IOLOG.setLevel(logging.INFO) # str.encode() may take import lock. Deadlock possible if broker calls # .encode() on behalf of thread currently waiting for module. LATIN1_CODEC = encodings.latin_1.Codec() _v = False _vv = False GET_MODULE = 100 CALL_FUNCTION = 101 FORWARD_LOG = 102 ADD_ROUTE = 103 DEL_ROUTE = 104 ALLOCATE_ID = 105 SHUTDOWN = 106 LOAD_MODULE = 107 FORWARD_MODULE = 108 DETACHING = 109 CALL_SERVICE = 110 STUB_CALL_SERVICE = 111 #: Special value used to signal disconnection or the inability to route a #: message, when it appears in the `reply_to` field. Usually causes #: :class:`mitogen.core.ChannelError` to be raised when it is received. #: #: It indicates the sender did not know how to process the message, or wishes #: no further messages to be delivered to it. It is used when: #: #: * a remote receiver is disconnected or explicitly closed. #: * a related message could not be delivered due to no route existing for it. #: * a router is being torn down, as a sentinel value to notify #: :meth:`mitogen.core.Router.add_handler` callbacks to clean up. IS_DEAD = 999 try: BaseException except NameError: BaseException = Exception PY24 = sys.version_info < (2, 5) PY3 = sys.version_info > (3,) if PY3: b = str.encode BytesType = bytes UnicodeType = str FsPathTypes = (str,) BufferType = lambda buf, start: memoryview(buf)[start:] long = int else: b = str BytesType = str FsPathTypes = (str, unicode) BufferType = buffer UnicodeType = unicode AnyTextType = (BytesType, UnicodeType) try: next except NameError: next = lambda it: it.next() # #550: prehistoric WSL did not advertise itself in uname output. try: fp = open('/proc/sys/kernel/osrelease') IS_WSL = 'Microsoft' in fp.read() fp.close() except IOError: IS_WSL = False #: Default size for calls to :meth:`Side.read` or :meth:`Side.write`, and the #: size of buffers configured by :func:`mitogen.parent.create_socketpair`. This #: value has many performance implications, 128KiB seems to be a sweet spot. #: #: * When set low, large messages cause many :class:`Broker` IO loop #: iterations, burning CPU and reducing throughput. #: * When set high, excessive RAM is reserved by the OS for socket buffers (2x #: per child), and an identically sized temporary userspace buffer is #: allocated on each read that requires zeroing, and over a particular size #: may require two system calls to allocate/deallocate. #: #: Care must be taken to ensure the underlying kernel object and receiving #: program support the desired size. For example, #: #: * Most UNIXes have TTYs with fixed 2KiB-4KiB buffers, making them unsuitable #: for efficient IO. #: * Different UNIXes have varying presets for pipes, which may not be #: configurable. On recent Linux the default pipe buffer size is 64KiB, but #: under memory pressure may be as low as 4KiB for unprivileged processes. #: * When communication is via an intermediary process, its internal buffers #: effect the speed OS buffers will drain. For example OpenSSH uses 64KiB #: reads. #: #: An ideal :class:`Message` has a size that is a multiple of #: :data:`CHUNK_SIZE` inclusive of headers, to avoid wasting IO loop iterations #: writing small trailer chunks. CHUNK_SIZE = 131072 _tls = threading.local() if __name__ == 'mitogen.core': # When loaded using import mechanism, ExternalContext.main() will not have # a chance to set the synthetic mitogen global, so just import it here. import mitogen else: # When loaded as __main__, ensure classes and functions gain a __module__ # attribute consistent with the host process, so that pickling succeeds. __name__ = 'mitogen.core' class Error(Exception): """ Base for all exceptions raised by Mitogen. :param str fmt: Exception text, or format string if `args` is non-empty. :param tuple args: Format string arguments. """ def __init__(self, fmt=None, *args): if args: fmt %= args if fmt and not isinstance(fmt, UnicodeType): fmt = fmt.decode('utf-8') Exception.__init__(self, fmt) class LatchError(Error): """ Raised when an attempt is made to use a :class:`mitogen.core.Latch` that has been marked closed. """ pass class Blob(BytesType): """ A serializable bytes subclass whose content is summarized in repr() output, making it suitable for logging binary data. """ def __repr__(self): return '[blob: %d bytes]' % len(self) def __reduce__(self): return (Blob, (BytesType(self),)) class Secret(UnicodeType): """ A serializable unicode subclass whose content is masked in repr() output, making it suitable for logging passwords. """ def __repr__(self): return '[secret]' if not PY3: # TODO: what is this needed for in 2.x? def __str__(self): return UnicodeType(self) def __reduce__(self): return (Secret, (UnicodeType(self),)) class Kwargs(dict): """ A serializable dict subclass that indicates its keys should be coerced to Unicode on Python 3 and bytes on Python<2.6. Python 2 produces keyword argument dicts whose keys are bytes, requiring a helper to ensure compatibility with Python 3 where Unicode is required, whereas Python 3 produces keyword argument dicts whose keys are Unicode, requiring a helper for Python 2.4/2.5, where bytes are required. """ if PY3: def __init__(self, dct): for k, v in dct.items(): if type(k) is bytes: self[k.decode()] = v else: self[k] = v elif sys.version_info < (2, 6, 5): def __init__(self, dct): for k, v in dct.iteritems(): if type(k) is unicode: k, _ = encodings.utf_8.encode(k) self[k] = v def __repr__(self): return 'Kwargs(%s)' % (dict.__repr__(self),) def __reduce__(self): return (Kwargs, (dict(self),)) class CallError(Error): """ Serializable :class:`Error` subclass raised when :meth:`Context.call() <mitogen.parent.Context.call>` fails. A copy of the traceback from the external context is appended to the exception message. """ def __init__(self, fmt=None, *args): if not isinstance(fmt, BaseException): Error.__init__(self, fmt, *args) else: e = fmt cls = e.__class__ fmt = '%s.%s: %s' % (cls.__module__, cls.__name__, e) tb = sys.exc_info()[2] if tb: fmt += '\n' fmt += ''.join(traceback.format_tb(tb)) Error.__init__(self, fmt) def __reduce__(self): return (_unpickle_call_error, (self.args[0],)) def _unpickle_call_error(s): if not (type(s) is UnicodeType and len(s) < 10000): raise TypeError('cannot unpickle CallError: bad input') return CallError(s) class ChannelError(Error): """ Raised when a channel dies or has been closed. """ remote_msg = 'Channel closed by remote end.' local_msg = 'Channel closed by local end.' class StreamError(Error): """ Raised when a stream cannot be established. """ pass class TimeoutError(Error): """ Raised when a timeout occurs on a stream. """ pass def to_text(o): """ Coerce `o` to Unicode by decoding it from UTF-8 if it is an instance of :class:`bytes`, otherwise pass it to the :class:`str` constructor. The returned object is always a plain :class:`str`, any subclass is removed. """ if isinstance(o, BytesType): return o.decode('utf-8') return UnicodeType(o) # Python 2.4 try: any except NameError: def any(it): for elem in it: if elem: return True def _partition(s, sep, find): """ (str|unicode).(partition|rpartition) for Python 2.4/2.5. """ idx = find(sep) if idx != -1: left = s[0:idx] return left, sep, s[len(left)+len(sep):] if hasattr(UnicodeType, 'rpartition'): str_partition = UnicodeType.partition str_rpartition = UnicodeType.rpartition bytes_partition = BytesType.partition else: def str_partition(s, sep): return _partition(s, sep, s.find) or (s, u'', u'') def str_rpartition(s, sep): return _partition(s, sep, s.rfind) or (u'', u'', s) def bytes_partition(s, sep): return _partition(s, sep, s.find) or (s, '', '') def has_parent_authority(msg, _stream=None): """ Policy function for use with :class:`Receiver` and :meth:`Router.add_handler` that requires incoming messages to originate from a parent context, or on a :class:`Stream` whose :attr:`auth_id <Stream.auth_id>` has been set to that of a parent context or the current context. """ return (msg.auth_id == mitogen.context_id or msg.auth_id in mitogen.parent_ids) def _signals(obj, signal): return ( obj.__dict__ .setdefault('_signals', {}) .setdefault(signal, []) ) def listen(obj, name, func): """ Arrange for `func()` to be invoked when signal `name` is fired on `obj`. """ _signals(obj, name).append(func) def unlisten(obj, name, func): """ Remove `func()` from the list of functions invoked when signal `name` is fired by `obj`. :raises ValueError: `func()` was not on the list. """ _signals(obj, name).remove(func) def fire(obj, name, *args, **kwargs): """ Arrange for `func(*args, **kwargs)` to be invoked for every function registered for signal `name` on `obj`. """ for func in _signals(obj, name): func(*args, **kwargs) def takes_econtext(func): func.mitogen_takes_econtext = True return func def takes_router(func): func.mitogen_takes_router = True return func def is_blacklisted_import(importer, fullname): """ Return :data:`True` if `fullname` is part of a blacklisted package, or if any packages have been whitelisted and `fullname` is not part of one. NB: - If a package is on both lists, then it is treated as blacklisted. - If any package is whitelisted, then all non-whitelisted packages are treated as blacklisted. """ return ((not any(fullname.startswith(s) for s in importer.whitelist)) or (any(fullname.startswith(s) for s in importer.blacklist))) def set_cloexec(fd): """ Set the file descriptor `fd` to automatically close on :func:`os.execve`. This has no effect on file descriptors inherited across :func:`os.fork`, they must be explicitly closed through some other means, such as :func:`mitogen.fork.on_fork`. """ flags = fcntl.fcntl(fd, fcntl.F_GETFD) assert fd > 2 fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC) def set_nonblock(fd): """ Set the file descriptor `fd` to non-blocking mode. For most underlying file types, this causes :func:`os.read` or :func:`os.write` to raise :class:`OSError` with :data:`errno.EAGAIN` rather than block the thread when the underlying kernel buffer is exhausted. """ flags = fcntl.fcntl(fd, fcntl.F_GETFL) fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) def set_block(fd): """ Inverse of :func:`set_nonblock`, i.e. cause `fd` to block the thread when the underlying kernel buffer is exhausted. """ flags = fcntl.fcntl(fd, fcntl.F_GETFL) fcntl.fcntl(fd, fcntl.F_SETFL, flags & ~os.O_NONBLOCK) def io_op(func, *args): """ Wrap `func(*args)` that may raise :class:`select.error`, :class:`IOError`, or :class:`OSError`, trapping UNIX error codes relating to disconnection and retry events in various subsystems: * When a signal is delivered to the process on Python 2, system call retry is signalled through :data:`errno.EINTR`. The invocation is automatically restarted. * When performing IO against a TTY, disconnection of the remote end is signalled by :data:`errno.EIO`. * When performing IO against a socket, disconnection of the remote end is signalled by :data:`errno.ECONNRESET`. * When performing IO against a pipe, disconnection of the remote end is signalled by :data:`errno.EPIPE`. :returns: Tuple of `(return_value, disconnect_reason)`, where `return_value` is the return value of `func(*args)`, and `disconnected` is an exception instance when disconnection was detected, otherwise :data:`None`. """ while True: try: return func(*args), None except (select.error, OSError, IOError): e = sys.exc_info()[1] _vv and IOLOG.debug('io_op(%r) -> OSError: %s', func, e) if e.args[0] == errno.EINTR: continue if e.args[0] in (errno.EIO, errno.ECONNRESET, errno.EPIPE): return None, e raise class PidfulStreamHandler(logging.StreamHandler): """ A :class:`logging.StreamHandler` subclass used when :meth:`Router.enable_debug() <mitogen.master.Router.enable_debug>` has been called, or the `debug` parameter was specified during context construction. Verifies the process ID has not changed on each call to :meth:`emit`, reopening the associated log file when a change is detected. This ensures logging to the per-process output files happens correctly even when uncooperative third party components call :func:`os.fork`. """ #: PID that last opened the log file. open_pid = None #: Output path template. template = '/tmp/mitogen.%s.%s.log' def _reopen(self): self.acquire() try: if self.open_pid == os.getpid(): return ts = time.strftime('%Y%m%d_%H%M%S') path = self.template % (os.getpid(), ts) self.stream = open(path, 'w', 1) set_cloexec(self.stream.fileno()) self.stream.write('Parent PID: %s\n' % (os.getppid(),)) self.stream.write('Created by:\n\n%s\n' % ( ''.join(traceback.format_stack()), )) self.open_pid = os.getpid() finally: self.release() def emit(self, record): if self.open_pid != os.getpid(): self._reopen() logging.StreamHandler.emit(self, record) def enable_debug_logging(): global _v, _vv _v = True _vv = True root = logging.getLogger() root.setLevel(logging.DEBUG) IOLOG.setLevel(logging.DEBUG) handler = PidfulStreamHandler() handler.formatter = logging.Formatter( '%(asctime)s %(levelname).1s %(name)s: %(message)s', '%H:%M:%S' ) root.handlers.insert(0, handler) _profile_hook = lambda name, func, *args: func(*args) _profile_fmt = os.environ.get( 'MITOGEN_PROFILE_FMT', '/tmp/mitogen.stats.%(pid)s.%(identity)s.%(now)s.%(ext)s', ) def _profile_hook(name, func, *args): """ Call `func(*args)` and return its result. This function is replaced by :func:`_real_profile_hook` when :func:`enable_profiling` is called. This interface is obsolete and will be replaced by a signals-based integration later on. """ return func(*args) def _real_profile_hook(name, func, *args): profiler = cProfile.Profile() profiler.enable() try: return func(*args) finally: path = _profile_fmt % { 'now': int(1e6 * time.time()), 'identity': name, 'pid': os.getpid(), 'ext': '%s' } profiler.dump_stats(path % ('pstats',)) profiler.create_stats() fp = open(path % ('log',), 'w') try: stats = pstats.Stats(profiler, stream=fp) stats.sort_stats('cumulative') stats.print_stats() finally: fp.close() def enable_profiling(econtext=None): global _profile_hook _profile_hook = _real_profile_hook def import_module(modname): """ Import `module` and return the attribute named `attr`. """ return __import__(modname, None, None, ['']) def pipe(): """ Create a UNIX pipe pair using :func:`os.pipe`, wrapping the returned descriptors in Python file objects in order to manage their lifetime and ensure they are closed when their last reference is discarded and they have not been closed explicitly. """ rfd, wfd = os.pipe() return ( os.fdopen(rfd, 'rb', 0), os.fdopen(wfd, 'wb', 0) ) def iter_split(buf, delim, func): """ Invoke `func(s)` for each `delim`-delimited chunk in the potentially large `buf`, avoiding intermediate lists and quadratic string operations. Return the trailing undelimited portion of `buf`, or any unprocessed portion of `buf` after `func(s)` returned :data:`False`. :returns: `(trailer, cont)`, where `cont` is :data:`False` if the last call to `func(s)` returned :data:`False`. """ dlen = len(delim) start = 0 cont = True while cont: nl = buf.find(delim, start) if nl == -1: break cont = not func(buf[start:nl]) is False start = nl + dlen return buf[start:], cont class Py24Pickler(py_pickle.Pickler): """ Exceptions were classic classes until Python 2.5. Sadly for 2.4, cPickle offers little control over how a classic instance is pickled. Therefore 2.4 uses a pure-Python pickler, so CallError can be made to look as it does on newer Pythons. This mess will go away once proper serialization exists. """ @classmethod def dumps(cls, obj, protocol): bio = BytesIO() self = cls(bio, protocol=protocol) self.dump(obj) return bio.getvalue() def save_exc_inst(self, obj): if isinstance(obj, CallError): func, args = obj.__reduce__() self.save(func) self.save(args) self.write(py_pickle.REDUCE) else: py_pickle.Pickler.save_inst(self, obj) if PY24: dispatch = py_pickle.Pickler.dispatch.copy() dispatch[py_pickle.InstanceType] = save_exc_inst if PY3: # In 3.x Unpickler is a class exposing find_class as an overridable, but it # cannot be overridden without subclassing. class _Unpickler(pickle.Unpickler): def find_class(self, module, func): return self.find_global(module, func) pickle__dumps = pickle.dumps elif PY24: # On Python 2.4, we must use a pure-Python pickler. pickle__dumps = Py24Pickler.dumps _Unpickler = pickle.Unpickler else: pickle__dumps = pickle.dumps # In 2.x Unpickler is a function exposing a writeable find_global # attribute. _Unpickler = pickle.Unpickler class Message(object): """ Messages are the fundamental unit of communication, comprising fields from the :ref:`stream-protocol` header, an optional reference to the receiving :class:`mitogen.core.Router` for ingress messages, and helper methods for deserialization and generating replies. """ #: Integer target context ID. :class:`Router` delivers messages locally #: when their :attr:`dst_id` matches :data:`mitogen.context_id`, otherwise #: they are routed up or downstream. dst_id = None #: Integer source context ID. Used as the target of replies if any are #: generated. src_id = None #: Context ID under whose authority the message is acting. See #: :ref:`source-verification`. auth_id = None #: Integer target handle in the destination context. This is one of the #: :ref:`standard-handles`, or a dynamically generated handle used to #: receive a one-time reply, such as the return value of a function call. handle = None #: Integer target handle to direct any reply to this message. Used to #: receive a one-time reply, such as the return value of a function call. #: :data:`IS_DEAD` has a special meaning when it appears in this field. reply_to = None #: Raw message data bytes. data = b('') _unpickled = object() #: The :class:`Router` responsible for routing the message. This is #: :data:`None` for locally originated messages. router = None #: The :class:`Receiver` over which the message was last received. Part of #: the :class:`mitogen.select.Select` interface. Defaults to :data:`None`. receiver = None HEADER_FMT = '>hLLLLLL' HEADER_LEN = struct.calcsize(HEADER_FMT) HEADER_MAGIC = 0x4d49 # 'MI' def __init__(self, **kwargs): """ Construct a message from from the supplied `kwargs`. :attr:`src_id` and :attr:`auth_id` are always set to :data:`mitogen.context_id`. """ self.src_id = mitogen.context_id self.auth_id = mitogen.context_id vars(self).update(kwargs) assert isinstance(self.data, BytesType) def pack(self): return ( struct.pack(self.HEADER_FMT, self.HEADER_MAGIC, self.dst_id, self.src_id, self.auth_id, self.handle, self.reply_to or 0, len(self.data)) + self.data ) def _unpickle_context(self, context_id, name): return _unpickle_context(context_id, name, router=self.router) def _unpickle_sender(self, context_id, dst_handle): return _unpickle_sender(self.router, context_id, dst_handle) def _unpickle_bytes(self, s, encoding): s, n = LATIN1_CODEC.encode(s) return s def _find_global(self, module, func): """ Return the class implementing `module_name.class_name` or raise `StreamError` if the module is not whitelisted. """ if module == __name__: if func == '_unpickle_call_error' or func == 'CallError': return _unpickle_call_error elif func == '_unpickle_sender': return self._unpickle_sender elif func == '_unpickle_context': return self._unpickle_context elif func == 'Blob': return Blob elif func == 'Secret': return Secret elif func == 'Kwargs': return Kwargs elif module == '_codecs' and func == 'encode': return self._unpickle_bytes elif module == '__builtin__' and func == 'bytes': return BytesType raise StreamError('cannot unpickle %r/%r', module, func) @property def is_dead(self): """ :data:`True` if :attr:`reply_to` is set to the magic value :data:`IS_DEAD`, indicating the sender considers the channel dead. Dead messages can be raised in a variety of circumstances, see :data:`IS_DEAD` for more information. """ return self.reply_to == IS_DEAD @classmethod def dead(cls, reason=None, **kwargs): """ Syntax helper to construct a dead message. """ kwargs['data'], _ = encodings.utf_8.encode(reason or u'') return cls(reply_to=IS_DEAD, **kwargs) @classmethod def pickled(cls, obj, **kwargs): """ Construct a pickled message, setting :attr:`data` to the serialization of `obj`, and setting remaining fields using `kwargs`. :returns: The new message. """ self = cls(**kwargs) try: self.data = pickle__dumps(obj, protocol=2) except pickle.PicklingError: e = sys.exc_info()[1] self.data = pickle__dumps(CallError(e), protocol=2) return self def reply(self, msg, router=None, **kwargs): """ Compose a reply to this message and send it using :attr:`router`, or `router` is :attr:`router` is :data:`None`. :param obj: Either a :class:`Message`, or an object to be serialized in order to construct a new message. :param router: Optional router to use if :attr:`router` is :data:`None`. :param kwargs: Optional keyword parameters overriding message fields in the reply. """ if not isinstance(msg, Message): msg = Message.pickled(msg) msg.dst_id = self.src_id msg.handle = self.reply_to vars(msg).update(kwargs) if msg.handle: (self.router or router).route(msg) else: LOG.debug('dropping reply to message with no return address: %r', msg) if PY3: UNPICKLER_KWARGS = {'encoding': 'bytes'} else: UNPICKLER_KWARGS = {} def _throw_dead(self): if len(self.data): raise ChannelError(self.data.decode('utf-8', 'replace')) elif self.src_id == mitogen.context_id: raise ChannelError(ChannelError.local_msg) else: raise ChannelError(ChannelError.remote_msg) def unpickle(self, throw=True, throw_dead=True): """ Unpickle :attr:`data`, optionally raising any exceptions present. :param bool throw_dead: If :data:`True`, raise exceptions, otherwise it is the caller's responsibility. :raises CallError: The serialized data contained CallError exception. :raises ChannelError: The `is_dead` field was set. """ _vv and IOLOG.debug('%r.unpickle()', self) if throw_dead and self.is_dead: self._throw_dead() obj = self._unpickled if obj is Message._unpickled: fp = BytesIO(self.data) unpickler = _Unpickler(fp, **self.UNPICKLER_KWARGS) unpickler.find_global = self._find_global try: # Must occur off the broker thread. try: obj = unpickler.load() except: LOG.error('raw pickle was: %r', self.data) raise self._unpickled = obj except (TypeError, ValueError): e = sys.exc_info()[1] raise StreamError('invalid message: %s', e) if throw: if isinstance(obj, CallError): raise obj return obj def __repr__(self): return 'Message(%r, %r, %r, %r, %r, %r..%d)' % ( self.dst_id, self.src_id, self.auth_id, self.handle, self.reply_to, (self.data or '')[:50], len(self.data) ) class Sender(object): """ Senders are used to send pickled messages to a handle in another context, it is the inverse of :class:`mitogen.core.Receiver`. Senders may be serialized, making them convenient to wire up data flows. See :meth:`mitogen.core.Receiver.to_sender` for more information. :param mitogen.core.Context context: Context to send messages to. :param int dst_handle: Destination handle to send messages to. """ def __init__(self, context, dst_handle): self.context = context self.dst_handle = dst_handle def send(self, data): """ Send `data` to the remote end. """ _vv and IOLOG.debug('%r.send(%r..)', self, repr(data)[:100]) self.context.send(Message.pickled(data, handle=self.dst_handle)) explicit_close_msg = 'Sender was explicitly closed' def close(self): """ Send a dead message to the remote, causing :meth:`ChannelError` to be raised in any waiting thread. """ _vv and IOLOG.debug('%r.close()', self) self.context.send( Message.dead( reason=self.explicit_close_msg, handle=self.dst_handle ) ) def __repr__(self): return 'Sender(%r, %r)' % (self.context, self.dst_handle) def __reduce__(self): return _unpickle_sender, (self.context.context_id, self.dst_handle) def _unpickle_sender(router, context_id, dst_handle): if not (isinstance(router, Router) and isinstance(context_id, (int, long)) and context_id >= 0 and isinstance(dst_handle, (int, long)) and dst_handle > 0): raise TypeError('cannot unpickle Sender: bad input or missing router') return Sender(Context(router, context_id), dst_handle) class Receiver(object): """ Receivers maintain a thread-safe queue of messages sent to a handle of this context from another context. :param mitogen.core.Router router: Router to register the handler on. :param int handle: If not :data:`None`, an explicit handle to register, otherwise an unused handle is chosen. :param bool persist: If :data:`False`, unregister the handler after one message is received. Single-message receivers are intended for RPC-like transactions, such as in the case of :meth:`mitogen.parent.Context.call_async`. :param mitogen.core.Context respondent: Context this receiver is receiving from. If not :data:`None`, arranges for the receiver to receive a dead message if messages can no longer be routed to the context due to disconnection, and ignores messages that did not originate from the respondent context. """ #: If not :data:`None`, a reference to a function invoked as #: `notify(receiver)` when a new message is delivered to this receiver. The #: function is invoked on the broker thread, therefore it must not block. #: Used by :class:`mitogen.select.Select` to implement waiting on multiple #: receivers. notify = None raise_channelerror = True def __init__(self, router, handle=None, persist=True, respondent=None, policy=None, overwrite=False): self.router = router #: The handle. self.handle = handle # Avoid __repr__ crash in add_handler() self._latch = Latch() # Must exist prior to .add_handler() self.handle = router.add_handler( fn=self._on_receive, handle=handle, policy=policy, persist=persist, respondent=respondent, overwrite=overwrite, ) def __repr__(self): return 'Receiver(%r, %r)' % (self.router, self.handle) def __enter__(self): return self def __exit__(self, _1, _2, _3): self.close() def to_sender(self): """ Return a :class:`Sender` configured to deliver messages to this receiver. As senders are serializable, this makes it convenient to pass `(context_id, handle)` pairs around:: def deliver_monthly_report(sender): for line in open('monthly_report.txt'): sender.send(line) sender.close() @mitogen.main() def main(router): remote = router.ssh(hostname='mainframe') recv = mitogen.core.Receiver(router) remote.call(deliver_monthly_report, recv.to_sender()) for msg in recv: print(msg) """ return Sender(self.router.myself(), self.handle) def _on_receive(self, msg): """ Callback registered for the handle with :class:`Router`; appends data to the internal queue. """ _vv and IOLOG.debug('%r._on_receive(%r)', self, msg) self._latch.put(msg) if self.notify: self.notify(self) closed_msg = 'the Receiver has been closed' def close(self): """ Unregister the receiver's handle from its associated router, and cause :class:`ChannelError` to be raised in any thread waiting in :meth:`get` on this receiver. """ if self.handle: self.router.del_handler(self.handle) self.handle = None self._latch.close() def size(self): """ Return the number of items currently buffered. As with :class:`Queue.Queue`, `0` may be returned even though a subsequent call to :meth:`get` will succeed, since a message may be posted at any moment between :meth:`size` and :meth:`get`. As with :class:`Queue.Queue`, `>0` may be returned even though a subsequent call to :meth:`get` will block, since another waiting thread may be woken at any moment between :meth:`size` and :meth:`get`. :raises LatchError: The underlying latch has already been marked closed. """ return self._latch.size() def empty(self): """ Return `size() == 0`. .. deprecated:: 0.2.8 Use :meth:`size` instead. :raises LatchError: The latch has already been marked closed. """ return self._latch.empty() def get(self, timeout=None, block=True, throw_dead=True): """ Sleep waiting for a message to arrive on this receiver. :param float timeout: If not :data:`None`, specifies a timeout in seconds. :raises mitogen.core.ChannelError: The remote end indicated the channel should be closed, communication with it was lost, or :meth:`close` was called in the local process. :raises mitogen.core.TimeoutError: Timeout was reached. :returns: :class:`Message` that was received. """ _vv and IOLOG.debug('%r.get(timeout=%r, block=%r)', self, timeout, block) try: msg = self._latch.get(timeout=timeout, block=block) except LatchError: raise ChannelError(self.closed_msg) if msg.is_dead and throw_dead: msg._throw_dead() return msg def __iter__(self): """ Yield consecutive :class:`Message` instances delivered to this receiver until :class:`ChannelError` is raised. """ while True: try: msg = self.get() except ChannelError: return yield msg class Channel(Sender, Receiver): """ A channel inherits from :class:`mitogen.core.Sender` and `mitogen.core.Receiver` to provide bidirectional functionality. .. deprecated:: 0.2.0 This class is incomplete and obsolete, it will be removed in Mitogen 0.3. Channels were an early attempt at syntax sugar. It is always easier to pass around unidirectional pairs of senders/receivers, even though the syntax is baroque: .. literalinclude:: ../examples/ping_pong.py Since all handles aren't known until after both ends are constructed, for both ends to communicate through a channel, it is necessary for one end to retrieve the handle allocated to the other and reconfigure its own channel to match. Currently this is a manual task. """ def __init__(self, router, context, dst_handle, handle=None): Sender.__init__(self, context, dst_handle) Receiver.__init__(self, router, handle) def close(self): Receiver.close(self) Sender.close(self) def __repr__(self): return 'Channel(%s, %s)' % ( Sender.__repr__(self), Receiver.__repr__(self) ) class Importer(object): """ Import protocol implementation that fetches modules from the parent process. :param context: Context to communicate via. """ # The Mitogen package is handled specially, since the child context must # construct it manually during startup. MITOGEN_PKG_CONTENT = [ 'buildah', 'compat', 'debug', 'doas', 'docker', 'kubectl', 'fakessh', 'fork', 'jail', 'lxc', 'lxd', 'master', 'minify', 'os_fork', 'parent', 'select', 'service', 'setns', 'ssh', 'su', 'sudo', 'utils', ] ALWAYS_BLACKLIST = [ # 2.x generates needless imports for 'builtins', while 3.x does the # same for '__builtin__'. The correct one is built-in, the other always # a negative round-trip. 'builtins', '__builtin__', 'thread', # org.python.core imported by copy, pickle, xml.sax; breaks Jython, but # very unlikely to trigger a bug report. 'org', ] if PY3: ALWAYS_BLACKLIST += ['cStringIO'] def __init__(self, router, context, core_src, whitelist=(), blacklist=()): self._log = logging.getLogger('mitogen.importer') self._context = context self._present = {'mitogen': self.MITOGEN_PKG_CONTENT} self._lock = threading.Lock() self.whitelist = list(whitelist) or [''] self.blacklist = list(blacklist) + self.ALWAYS_BLACKLIST # Preserve copies of the original server-supplied whitelist/blacklist # for later use by children. self.master_whitelist = self.whitelist[:] self.master_blacklist = self.blacklist[:] # Presence of an entry in this map indicates in-flight GET_MODULE. self._callbacks = {} self._cache = {} if core_src: self._update_linecache('x/mitogen/core.py', core_src) self._cache['mitogen.core'] = ( 'mitogen.core', None, 'x/mitogen/core.py', zlib.compress(core_src, 9), [], ) self._install_handler(router) def _update_linecache(self, path, data): """ The Python 2.4 linecache module, used to fetch source code for tracebacks and :func:`inspect.getsource`, does not support PEP-302, meaning it needs extra help to for Mitogen-loaded modules. Directly populate its cache if a loaded module belongs to the Mitogen package. """ if PY24 and 'mitogen' in path: linecache.cache[path] = ( len(data), 0.0, [line+'\n' for line in data.splitlines()], path, ) def _install_handler(self, router): router.add_handler( fn=self._on_load_module, handle=LOAD_MODULE, policy=has_parent_authority, ) def __repr__(self): return 'Importer' def builtin_find_module(self, fullname): # imp.find_module() will always succeed for __main__, because it is a # built-in module. That means it exists on a special linked list deep # within the bowels of the interpreter. We must special case it. if fullname == '__main__': raise ModuleNotFoundError() parent, _, modname = str_rpartition(fullname, '.') if parent: path = sys.modules[parent].__path__ else: path = None fp, pathname, description = imp.find_module(modname, path) if fp: fp.close() def find_module(self, fullname, path=None): if hasattr(_tls, 'running'): return None _tls.running = True try: #_v and self._log.debug('Python requested %r', fullname) fullname = to_text(fullname) pkgname, dot, _ = str_rpartition(fullname, '.') pkg = sys.modules.get(pkgname) if pkgname and getattr(pkg, '__loader__', None) is not self: self._log.debug('%s is submodule of a locally loaded package', fullname) return None suffix = fullname[len(pkgname+dot):] if pkgname and suffix not in self._present.get(pkgname, ()): self._log.debug('%s has no submodule %s', pkgname, suffix) return None # #114: explicitly whitelisted prefixes override any # system-installed package. if self.whitelist != ['']: if any(fullname.startswith(s) for s in self.whitelist): return self try: self.builtin_find_module(fullname) _vv and self._log.debug('%r is available locally', fullname) except ImportError: _vv and self._log.debug('we will try to load %r', fullname) return self finally: del _tls.running blacklisted_msg = ( '%r is present in the Mitogen importer blacklist, therefore this ' 'context will not attempt to request it from the master, as the ' 'request will always be refused.' ) pkg_resources_msg = ( 'pkg_resources is prohibited from importing __main__, as it causes ' 'problems in applications whose main module is not designed to be ' 're-imported by children.' ) absent_msg = ( 'The Mitogen master process was unable to serve %r. It may be a ' 'native Python extension, or it may be missing entirely. Check the ' 'importer debug logs on the master for more information.' ) def _refuse_imports(self, fullname): if is_blacklisted_import(self, fullname): raise ModuleNotFoundError(self.blacklisted_msg % (fullname,)) f = sys._getframe(2) requestee = f.f_globals['__name__'] if fullname == '__main__' and requestee == 'pkg_resources': # Anything that imports pkg_resources will eventually cause # pkg_resources to try and scan __main__ for its __requires__ # attribute (pkg_resources/__init__.py::_build_master()). This # breaks any app that is not expecting its __main__ to suddenly be # sucked over a network and injected into a remote process, like # py.test. raise ModuleNotFoundError(self.pkg_resources_msg) if fullname == 'pbr': # It claims to use pkg_resources to read version information, which # would result in PEP-302 being used, but it actually does direct # filesystem access. So instead smodge the environment to override # any version that was defined. This will probably break something # later. os.environ['PBR_VERSION'] = '0.0.0' def _on_load_module(self, msg): if msg.is_dead: return tup = msg.unpickle() fullname = tup[0] _v and self._log.debug('received %s', fullname) self._lock.acquire() try: self._cache[fullname] = tup if tup[2] is not None and PY24: self._update_linecache( path='master:' + tup[2], data=zlib.decompress(tup[3]) ) callbacks = self._callbacks.pop(fullname, []) finally: self._lock.release() for callback in callbacks: callback() def _request_module(self, fullname, callback): self._lock.acquire() try: present = fullname in self._cache if not present: funcs = self._callbacks.get(fullname) if funcs is not None: _v and self._log.debug('existing request for %s in flight', fullname) funcs.append(callback) else: _v and self._log.debug('sending new %s request to parent', fullname) self._callbacks[fullname] = [callback] self._context.send( Message(data=b(fullname), handle=GET_MODULE) ) finally: self._lock.release() if present: callback() def load_module(self, fullname): fullname = to_text(fullname) _v and self._log.debug('requesting %s', fullname) self._refuse_imports(fullname) event = threading.Event() self._request_module(fullname, event.set) event.wait() ret = self._cache[fullname] if ret[2] is None: raise ModuleNotFoundError(self.absent_msg % (fullname,)) pkg_present = ret[1] mod = sys.modules.setdefault(fullname, imp.new_module(fullname)) mod.__file__ = self.get_filename(fullname) mod.__loader__ = self if pkg_present is not None: # it's a package. mod.__path__ = [] mod.__package__ = fullname self._present[fullname] = pkg_present else: mod.__package__ = str_rpartition(fullname, '.')[0] or None if mod.__package__ and not PY3: # 2.x requires __package__ to be exactly a string. mod.__package__, _ = encodings.utf_8.encode(mod.__package__) source = self.get_source(fullname) try: code = compile(source, mod.__file__, 'exec', 0, 1) except SyntaxError: LOG.exception('while importing %r', fullname) raise if PY3: exec(code, vars(mod)) else: exec('exec code in vars(mod)') # #590: if a module replaces itself in sys.modules during import, below # is necessary. This matches PyImport_ExecCodeModuleEx() return sys.modules.get(fullname, mod) def get_filename(self, fullname): if fullname in self._cache: path = self._cache[fullname][2] if path is None: # If find_loader() returns self but a subsequent master RPC # reveals the module can't be loaded, and so load_module() # throws ImportError, on Python 3.x it is still possible for # the loader to be called to fetch metadata. raise ModuleNotFoundError(self.absent_msg % (fullname,)) return u'master:' + self._cache[fullname][2] def get_source(self, fullname): if fullname in self._cache: compressed = self._cache[fullname][3] if compressed is None: raise ModuleNotFoundError(self.absent_msg % (fullname,)) source = zlib.decompress(self._cache[fullname][3]) if PY3: return to_text(source) return source class LogHandler(logging.Handler): def __init__(self, context): logging.Handler.__init__(self) self.context = context self.local = threading.local() self._buffer = [] # Private synchronization is needed while corked, to ensure no # concurrent call to _send() exists during uncork(). self._buffer_lock = threading.Lock() def uncork(self): """ #305: during startup :class:`LogHandler` may be installed before it is possible to route messages, therefore messages are buffered until :meth:`uncork` is called by :class:`ExternalContext`. """ self._buffer_lock.acquire() try: self._send = self.context.send for msg in self._buffer: self._send(msg) self._buffer = None finally: self._buffer_lock.release() def _send(self, msg): self._buffer_lock.acquire() try: if self._buffer is None: # uncork() may run concurrent to _send() self._send(msg) else: self._buffer.append(msg) finally: self._buffer_lock.release() def emit(self, rec): if rec.name == 'mitogen.io' or \ getattr(self.local, 'in_emit', False): return self.local.in_emit = True try: msg = self.format(rec) encoded = '%s\x00%s\x00%s' % (rec.name, rec.levelno, msg) if isinstance(encoded, UnicodeType): # Logging package emits both :( encoded = encoded.encode('utf-8') self._send(Message(data=encoded, handle=FORWARD_LOG)) finally: self.local.in_emit = False class Stream(object): #: A :class:`Side` representing the stream's receive file descriptor. receive_side = None #: A :class:`Side` representing the stream's transmit file descriptor. transmit_side = None #: A :class:`Protocol` representing the protocol active on the stream. protocol = None #: In parents, the :class:`mitogen.parent.Connection` instance. conn = None name = u'default' def set_protocol(self, protocol): """ Bind a protocol to this stream, by updating :attr:`Protocol.stream` to refer to this stream, and updating this stream's :attr:`Stream.protocol` to the refer to the protocol. Any prior protocol's :attr:`Protocol.stream` is set to :data:`None`. """ if self.protocol: self.protocol.stream = None self.protocol = protocol self.protocol.stream = self def accept(self, rfp, wfp): self.receive_side = Side(self, rfp) self.transmit_side = Side(self, wfp) def __repr__(self): return "<Stream %s>" % (self.name,) def on_receive(self, broker): """ Called by :class:`Broker` when the stream's :attr:`receive_side` has been marked readable using :meth:`Broker.start_receive` and the broker has detected the associated file descriptor is ready for reading. Subclasses must implement this if :meth:`Broker.start_receive` is ever called on them, and the method must call :meth:`on_disconect` if reading produces an empty string. """ buf = self.receive_side.read(self.protocol.read_size) if not buf: LOG.debug('%r: empty read, disconnecting', self.receive_side) return self.on_disconnect(broker) self.protocol.on_receive(broker, buf) def on_transmit(self, broker): """ Called by :class:`Broker` when the stream's :attr:`transmit_side` has been marked writeable using :meth:`Broker._start_transmit` and the broker has detected the associated file descriptor is ready for writing. Subclasses must implement this if :meth:`Broker._start_transmit` is ever called on them. """ self.protocol.on_transmit(broker) def on_shutdown(self, broker): """ Called by :meth:`Broker.shutdown` to allow the stream time to gracefully shutdown. The base implementation simply called :meth:`on_disconnect`. """ fire(self, 'shutdown') self.protocol.on_shutdown(broker) def on_disconnect(self, broker): """ Called by :class:`Broker` to force disconnect the stream. The base implementation simply closes :attr:`receive_side` and :attr:`transmit_side` and unregisters the stream from the broker. """ fire(self, 'disconnect') self.protocol.on_disconnect(broker) class Protocol(object): """ Implement the program behaviour associated with activity on a :class:`Stream`. The protocol in use may vary over a stream's life, for example to allow :class:`mitogen.parent.BootstrapProtocol` to initialize the connected child before handing it off to :class:`MitogenProtocol`. A stream's active protocol is tracked in the :attr:`Stream.protocol` attribute, and modified via :meth:`Stream.set_protocol`. Protocols do not handle IO, they are entirely reliant on the interface provided by :class:`Stream` and :class:`Side`, allowing the underlying IO implementation to be replaced without modifying behavioural logic. """ stream_class = Stream #: The :class:`Stream` this protocol is currently bound to, or #: :data:`None`. stream = None read_size = CHUNK_SIZE @classmethod def build_stream(cls, *args, **kwargs): stream = cls.stream_class() stream.set_protocol(cls(*args, **kwargs)) return stream def __repr__(self): return '%s(%s)' % ( self.__class__.__name__, self.stream and self.stream.name, ) def on_shutdown(self, broker): _v and LOG.debug('%r: shutting down', self) self.stream.on_disconnect(broker) def on_disconnect(self, broker): # Normally both sides an FD, so it is important that tranmit_side is # deregistered from Poller before closing the receive side, as pollers # like epoll and kqueue unregister all events on FD close, causing # subsequent attempt to unregister the transmit side to fail. LOG.debug('%r: disconnecting', self) broker.stop_receive(self.stream) if self.stream.transmit_side: broker._stop_transmit(self.stream) self.stream.receive_side.close() if self.stream.transmit_side: self.stream.transmit_side.close() class DelimitedProtocol(Protocol): """ Provide a :meth:`Protocol.on_receive` implementation for protocols that are delimited by a fixed string, like text based protocols. Each message is passed to :meth:`on_line_received` as it arrives, with incomplete messages passed to :meth:`on_partial_line_received`. When emulating user input it is often necessary to respond to incomplete lines, such as when a "Password: " prompt is sent. :meth:`on_partial_line_received` may be called repeatedly with an increasingly complete message. When a complete message is finally received, :meth:`on_line_received` will be called once for it before the buffer is discarded. If :func:`on_line_received` returns :data:`False`, remaining data is passed unprocessed to the stream's current protocol's :meth:`on_receive`. This allows switching from line-oriented to binary while the input buffer contains both kinds of data. """ #: The delimiter. Defaults to newline. delimiter = b('\n') _trailer = b('') def on_receive(self, broker, buf): _vv and IOLOG.debug('%r.on_receive()', self) stream = self.stream self._trailer, cont = mitogen.core.iter_split( buf=self._trailer + buf, delim=self.delimiter, func=self.on_line_received, ) if self._trailer: if cont: self.on_partial_line_received(self._trailer) else: assert stream.protocol is not self stream.protocol.on_receive(broker, self._trailer) def on_line_received(self, line): """ Receive a line from the stream. :param bytes line: The encoded line, excluding the delimiter. :returns: :data:`False` to indicate this invocation modified the stream's active protocol, and any remaining buffered data should be passed to the new protocol's :meth:`on_receive` method. Any other return value is ignored. """ pass def on_partial_line_received(self, line): """ Receive a trailing unterminated partial line from the stream. :param bytes line: The encoded partial line. """ pass class BufferedWriter(object): """ Implement buffered output while avoiding quadratic string operations. This is currently constructed by each protocol, in future it may become fixed for each stream instead. """ def __init__(self, broker, protocol): self._broker = broker self._protocol = protocol self._buf = collections.deque() self._len = 0 def write(self, s): """ Transmit `s` immediately, falling back to enqueuing it and marking the stream writeable if no OS buffer space is available. """ if not self._len: # Modifying epoll/Kqueue state is expensive, as are needless broker # loops. Rather than wait for writeability, just write immediately, # and fall back to the broker loop on error or full buffer. try: n = self._protocol.stream.transmit_side.write(s) if n: if n == len(s): return s = s[n:] except OSError: pass self._broker._start_transmit(self._protocol.stream) self._buf.append(s) self._len += len(s) def on_transmit(self, broker): """ Respond to stream writeability by retrying previously buffered :meth:`write` calls. """ if self._buf: buf = self._buf.popleft() written = self._protocol.stream.transmit_side.write(buf) if not written: _v and LOG.debug('disconnected during write to %r', self) self._protocol.stream.on_disconnect(broker) return elif written != len(buf): self._buf.appendleft(BufferType(buf, written)) _vv and IOLOG.debug('transmitted %d bytes to %r', written, self) self._len -= written if not self._buf: broker._stop_transmit(self._protocol.stream) class Side(object): """ Represent one side of a :class:`Stream`. This allows unidirectional (e.g. pipe) and bidirectional (e.g. socket) streams to operate identically. Sides are also responsible for tracking the open/closed state of the underlying FD, preventing erroneous duplicate calls to :func:`os.close` due to duplicate :meth:`Stream.on_disconnect` calls, which would otherwise risk silently succeeding by closing an unrelated descriptor. For this reason, it is crucial only one file object exists per unique descriptor. :param mitogen.core.Stream stream: The stream this side is associated with. :param object fp: The file or socket object managing the underlying file descriptor. Any object may be used that supports `fileno()` and `close()` methods. :param bool cloexec: If :data:`True`, the descriptor has its :data:`fcntl.FD_CLOEXEC` flag enabled using :func:`fcntl.fcntl`. :param bool keep_alive: If :data:`True`, the continued existence of this side will extend the shutdown grace period until it has been unregistered from the broker. :param bool blocking: If :data:`False`, the descriptor has its :data:`os.O_NONBLOCK` flag enabled using :func:`fcntl.fcntl`. """ _fork_refs = weakref.WeakValueDictionary() closed = False def __init__(self, stream, fp, cloexec=True, keep_alive=True, blocking=False): #: The :class:`Stream` for which this is a read or write side. self.stream = stream # File or socket object responsible for the lifetime of its underlying # file descriptor. self.fp = fp #: Integer file descriptor to perform IO on, or :data:`None` if #: :meth:`close` has been called. This is saved separately from the #: file object, since :meth:`file.fileno` cannot be called on it after #: it has been closed. self.fd = fp.fileno() #: If :data:`True`, causes presence of this side in #: :class:`Broker`'s active reader set to defer shutdown until the #: side is disconnected. self.keep_alive = keep_alive self._fork_refs[id(self)] = self if cloexec: set_cloexec(self.fd) if not blocking: set_nonblock(self.fd) def __repr__(self): return '<Side of %s fd %s>' % ( self.stream.name or repr(self.stream), self.fd ) @classmethod def _on_fork(cls): while cls._fork_refs: _, side = cls._fork_refs.popitem() _vv and IOLOG.debug('Side._on_fork() closing %r', side) side.close() def close(self): """ Call :meth:`file.close` on :attr:`fp` if it is not :data:`None`, then set it to :data:`None`. """ _vv and IOLOG.debug('%r.close()', self) if not self.closed: self.closed = True self.fp.close() def read(self, n=CHUNK_SIZE): """ Read up to `n` bytes from the file descriptor, wrapping the underlying :func:`os.read` call with :func:`io_op` to trap common disconnection conditions. :meth:`read` always behaves as if it is reading from a regular UNIX file; socket, pipe, and TTY disconnection errors are masked and result in a 0-sized read like a regular file. :returns: Bytes read, or the empty string to indicate disconnection was detected. """ if self.closed: # Refuse to touch the handle after closed, it may have been reused # by another thread. TODO: synchronize read()/write()/close(). return b('') s, disconnected = io_op(os.read, self.fd, n) if disconnected: LOG.debug('%r: disconnected during read: %s', self, disconnected) return b('') return s def write(self, s): """ Write as much of the bytes from `s` as possible to the file descriptor, wrapping the underlying :func:`os.write` call with :func:`io_op` to trap common disconnection conditions. :returns: Number of bytes written, or :data:`None` if disconnection was detected. """ if self.closed: # Don't touch the handle after close, it may be reused elsewhere. return None written, disconnected = io_op(os.write, self.fd, s) if disconnected: LOG.debug('%r: disconnected during write: %s', self, disconnected) return None return written class MitogenProtocol(Protocol): """ :class:`Protocol` implementing mitogen's :ref:`stream protocol <stream-protocol>`. """ #: If not :data:`None`, :class:`Router` stamps this into #: :attr:`Message.auth_id` of every message received on this stream. auth_id = None #: If not :data:`False`, indicates the stream has :attr:`auth_id` set and #: its value is the same as :data:`mitogen.context_id` or appears in #: :data:`mitogen.parent_ids`. is_privileged = False def __init__(self, router, remote_id): self._router = router self.remote_id = remote_id self.sent_modules = set(['mitogen', 'mitogen.core']) self._input_buf = collections.deque() self._input_buf_len = 0 self._writer = BufferedWriter(router.broker, self) #: Routing records the dst_id of every message arriving from this #: stream. Any arriving DEL_ROUTE is rebroadcast for any such ID. self.egress_ids = set() def on_receive(self, broker, buf): """ Handle the next complete message on the stream. Raise :class:`StreamError` on failure. """ _vv and IOLOG.debug('%r.on_receive()', self) if self._input_buf and self._input_buf_len < 128: self._input_buf[0] += buf else: self._input_buf.append(buf) self._input_buf_len += len(buf) while self._receive_one(broker): pass corrupt_msg = ( '%s: Corruption detected: frame signature incorrect. This likely means' ' some external process is interfering with the connection. Received:' '\n\n' '%r' ) def _receive_one(self, broker): if self._input_buf_len < Message.HEADER_LEN: return False msg = Message() msg.router = self._router (magic, msg.dst_id, msg.src_id, msg.auth_id, msg.handle, msg.reply_to, msg_len) = struct.unpack( Message.HEADER_FMT, self._input_buf[0][:Message.HEADER_LEN], ) if magic != Message.HEADER_MAGIC: LOG.error(self.corrupt_msg, self.stream.name, self._input_buf[0][:2048]) self.stream.on_disconnect(broker) return False if msg_len > self._router.max_message_size: LOG.error('Maximum message size exceeded (got %d, max %d)', msg_len, self._router.max_message_size) self.stream.on_disconnect(broker) return False total_len = msg_len + Message.HEADER_LEN if self._input_buf_len < total_len: _vv and IOLOG.debug( '%r: Input too short (want %d, got %d)', self, msg_len, self._input_buf_len - Message.HEADER_LEN ) return False start = Message.HEADER_LEN prev_start = start remain = total_len bits = [] while remain: buf = self._input_buf.popleft() bit = buf[start:remain] bits.append(bit) remain -= len(bit) + start prev_start = start start = 0 msg.data = b('').join(bits) self._input_buf.appendleft(buf[prev_start+len(bit):]) self._input_buf_len -= total_len self._router._async_route(msg, self.stream) return True def pending_bytes(self): """ Return the number of bytes queued for transmission on this stream. This can be used to limit the amount of data buffered in RAM by an otherwise unlimited consumer. For an accurate result, this method should be called from the Broker thread, for example by using :meth:`Broker.defer_sync`. """ return self._writer._len def on_transmit(self, broker): """ Transmit buffered messages. """ _vv and IOLOG.debug('%r.on_transmit()', self) self._writer.on_transmit(broker) def _send(self, msg): _vv and IOLOG.debug('%r._send(%r)', self, msg) self._writer.write(msg.pack()) def send(self, msg): """ Send `data` to `handle`, and tell the broker we have output. May be called from any thread. """ self._router.broker.defer(self._send, msg) def on_shutdown(self, broker): """ Disable :class:`Protocol` immediate disconnect behaviour. """ _v and LOG.debug('%r: shutting down', self) class Context(object): """ Represent a remote context regardless of the underlying connection method. Context objects are simple facades that emit messages through an associated router, and have :ref:`signals` raised against them in response to various events relating to the context. **Note:** This is the somewhat limited core version, used by child contexts. The master subclass is documented below this one. Contexts maintain no internal state and are thread-safe. Prefer :meth:`Router.context_by_id` over constructing context objects explicitly, as that method is deduplicating, and returns the only context instance :ref:`signals` will be raised on. :param mitogen.core.Router router: Router to emit messages through. :param int context_id: Context ID. :param str name: Context name. """ name = None remote_name = None def __init__(self, router, context_id, name=None): self.router = router self.context_id = context_id if name: self.name = to_text(name) def __reduce__(self): return _unpickle_context, (self.context_id, self.name) def on_disconnect(self): _v and LOG.debug('%r: disconnecting', self) fire(self, 'disconnect') def send_async(self, msg, persist=False): """ Arrange for `msg` to be delivered to this context, with replies directed to a newly constructed receiver. :attr:`dst_id <Message.dst_id>` is set to the target context ID, and :attr:`reply_to <Message.reply_to>` is set to the newly constructed receiver's handle. :param bool persist: If :data:`False`, the handler will be unregistered after a single message has been received. :param mitogen.core.Message msg: The message. :returns: :class:`Receiver` configured to receive any replies sent to the message's `reply_to` handle. """ receiver = Receiver(self.router, persist=persist, respondent=self) msg.dst_id = self.context_id msg.reply_to = receiver.handle _v and LOG.debug('sending message to %r: %r', self, msg) self.send(msg) return receiver def call_service_async(self, service_name, method_name, **kwargs): _v and LOG.debug('calling service %s.%s of %r, args: %r', service_name, method_name, self, kwargs) if isinstance(service_name, BytesType): service_name = service_name.encode('utf-8') elif not isinstance(service_name, UnicodeType): service_name = service_name.name() # Service.name() tup = (service_name, to_text(method_name), Kwargs(kwargs)) msg = Message.pickled(tup, handle=CALL_SERVICE) return self.send_async(msg) def send(self, msg): """ Arrange for `msg` to be delivered to this context. :attr:`dst_id <Message.dst_id>` is set to the target context ID. :param Message msg: Message. """ msg.dst_id = self.context_id self.router.route(msg) def call_service(self, service_name, method_name, **kwargs): recv = self.call_service_async(service_name, method_name, **kwargs) return recv.get().unpickle() def send_await(self, msg, deadline=None): """ Like :meth:`send_async`, but expect a single reply (`persist=False`) delivered within `deadline` seconds. :param mitogen.core.Message msg: The message. :param float deadline: If not :data:`None`, seconds before timing out waiting for a reply. :returns: Deserialized reply. :raises TimeoutError: No message was received and `deadline` passed. """ receiver = self.send_async(msg) response = receiver.get(deadline) data = response.unpickle() _vv and IOLOG.debug('%r._send_await() -> %r', self, data) return data def __repr__(self): return 'Context(%s, %r)' % (self.context_id, self.name) def _unpickle_context(context_id, name, router=None): if not (isinstance(context_id, (int, long)) and context_id >= 0 and ( (name is None) or (isinstance(name, UnicodeType) and len(name) < 100)) ): raise TypeError('cannot unpickle Context: bad input') if isinstance(router, Router): return router.context_by_id(context_id, name=name) return Context(None, context_id, name) # For plain Jane pickle. class Poller(object): """ A poller manages OS file descriptors the user is waiting to become available for IO. The :meth:`poll` method blocks the calling thread until one or more become ready. The default implementation is based on :func:`select.poll`. Each descriptor has an associated `data` element, which is unique for each readiness type, and defaults to being the same as the file descriptor. The :meth:`poll` method yields the data associated with a descriptor, rather than the descriptor itself, allowing concise loops like:: p = Poller() p.start_receive(conn.fd, data=conn.on_read) p.start_transmit(conn.fd, data=conn.on_write) for callback in p.poll(): callback() # invoke appropriate bound instance method Pollers may be modified while :meth:`poll` is yielding results. Removals are processed immediately, causing pending events for the descriptor to be discarded. The :meth:`close` method must be called when a poller is discarded to avoid a resource leak. Pollers may only be used by one thread at a time. """ SUPPORTED = True # This changed from select() to poll() in Mitogen 0.2.4. Since poll() has # no upper FD limit, it is suitable for use with Latch, which must handle # FDs larger than select's limit during many-host runs. We want this # because poll() requires no setup and teardown: just a single system call, # which is important because Latch.get() creates a Poller on each # invocation. In a microbenchmark, poll() vs. epoll_ctl() is 30% faster in # this scenario. If select() must return in future, it is important # Latch.poller_class is set from parent.py to point to the industrial # strength poller for the OS, otherwise Latch will fail randomly. #: Increments on every poll(). Used to version _rfds and _wfds. _generation = 1 def __init__(self): self._rfds = {} self._wfds = {} def __repr__(self): return '%s' % (type(self).__name__,) def _update(self, fd): """ Required by PollPoller subclass. """ pass @property def readers(self): """ Return a list of `(fd, data)` tuples for every FD registered for receive readiness. """ return list((fd, data) for fd, (data, gen) in self._rfds.items()) @property def writers(self): """ Return a list of `(fd, data)` tuples for every FD registered for transmit readiness. """ return list((fd, data) for fd, (data, gen) in self._wfds.items()) def close(self): """ Close any underlying OS resource used by the poller. """ pass def start_receive(self, fd, data=None): """ Cause :meth:`poll` to yield `data` when `fd` is readable. """ self._rfds[fd] = (data or fd, self._generation) self._update(fd) def stop_receive(self, fd): """ Stop yielding readability events for `fd`. Redundant calls to :meth:`stop_receive` are silently ignored, this may change in future. """ self._rfds.pop(fd, None) self._update(fd) def start_transmit(self, fd, data=None): """ Cause :meth:`poll` to yield `data` when `fd` is writeable. """ self._wfds[fd] = (data or fd, self._generation) self._update(fd) def stop_transmit(self, fd): """ Stop yielding writeability events for `fd`. Redundant calls to :meth:`stop_transmit` are silently ignored, this may change in future. """ self._wfds.pop(fd, None) self._update(fd) def _poll(self, timeout): (rfds, wfds, _), _ = io_op(select.select, self._rfds, self._wfds, (), timeout ) for fd in rfds: _vv and IOLOG.debug('%r: POLLIN for %r', self, fd) data, gen = self._rfds.get(fd, (None, None)) if gen and gen < self._generation: yield data for fd in wfds: _vv and IOLOG.debug('%r: POLLOUT for %r', self, fd) data, gen = self._wfds.get(fd, (None, None)) if gen and gen < self._generation: yield data def poll(self, timeout=None): """ Block the calling thread until one or more FDs are ready for IO. :param float timeout: If not :data:`None`, seconds to wait without an event before returning an empty iterable. :returns: Iterable of `data` elements associated with ready FDs. """ _vv and IOLOG.debug('%r.poll(%r)', self, timeout) self._generation += 1 return self._poll(timeout) class Latch(object): """ A latch is a :class:`Queue.Queue`-like object that supports mutation and waiting from multiple threads, however unlike :class:`Queue.Queue`, waiting threads always remain interruptible, so CTRL+C always succeeds, and waits where a timeout is set experience no wake up latency. These properties are not possible in combination using the built-in threading primitives available in Python 2.x. Latches implement queues using the UNIX self-pipe trick, and a per-thread :func:`socket.socketpair` that is lazily created the first time any latch attempts to sleep on a thread, and dynamically associated with the waiting Latch only for duration of the wait. See :ref:`waking-sleeping-threads` for further discussion. """ poller_class = Poller notify = None # The _cls_ prefixes here are to make it crystal clear in the code which # state mutation isn't covered by :attr:`_lock`. #: List of reusable :func:`socket.socketpair` tuples. The list is mutated #: from multiple threads, the only safe operations are `append()` and #: `pop()`. _cls_idle_socketpairs = [] #: List of every socket object that must be closed by :meth:`_on_fork`. #: Inherited descriptors cannot be reused, as the duplicated handles #: reference the same underlying kernel object in use by the parent. _cls_all_sockets = [] def __init__(self): self.closed = False self._lock = threading.Lock() #: List of unconsumed enqueued items. self._queue = [] #: List of `(wsock, cookie)` awaiting an element, where `wsock` is the #: socketpair's write side, and `cookie` is the string to write. self._sleeping = [] #: Number of elements of :attr:`_sleeping` that have already been #: woken, and have a corresponding element index from :attr:`_queue` #: assigned to them. self._waking = 0 @classmethod def _on_fork(cls): """ Clean up any files belonging to the parent process after a fork. """ cls._cls_idle_socketpairs = [] while cls._cls_all_sockets: cls._cls_all_sockets.pop().close() def close(self): """ Mark the latch as closed, and cause every sleeping thread to be woken, with :class:`mitogen.core.LatchError` raised in each thread. """ self._lock.acquire() try: self.closed = True while self._waking < len(self._sleeping): wsock, cookie = self._sleeping[self._waking] self._wake(wsock, cookie) self._waking += 1 finally: self._lock.release() def size(self): """ Return the number of items currently buffered. As with :class:`Queue.Queue`, `0` may be returned even though a subsequent call to :meth:`get` will succeed, since a message may be posted at any moment between :meth:`size` and :meth:`get`. As with :class:`Queue.Queue`, `>0` may be returned even though a subsequent call to :meth:`get` will block, since another waiting thread may be woken at any moment between :meth:`size` and :meth:`get`. :raises LatchError: The latch has already been marked closed. """ self._lock.acquire() try: if self.closed: raise LatchError() return len(self._queue) finally: self._lock.release() def empty(self): """ Return `size() == 0`. .. deprecated:: 0.2.8 Use :meth:`size` instead. :raises LatchError: The latch has already been marked closed. """ return self.size() == 0 def _get_socketpair(self): """ Return an unused socketpair, creating one if none exist. """ try: return self._cls_idle_socketpairs.pop() # pop() must be atomic except IndexError: rsock, wsock = socket.socketpair() set_cloexec(rsock.fileno()) set_cloexec(wsock.fileno()) self._cls_all_sockets.extend((rsock, wsock)) return rsock, wsock COOKIE_MAGIC, = struct.unpack('L', b('LTCH') * (struct.calcsize('L')//4)) COOKIE_FMT = '>Qqqq' # #545: id() and get_ident() may exceed long on armhfp. COOKIE_SIZE = struct.calcsize(COOKIE_FMT) def _make_cookie(self): """ Return a string encoding the ID of the process, instance and thread. This disambiguates legitimate wake-ups, accidental writes to the FD, and buggy internal FD sharing. """ return struct.pack(self.COOKIE_FMT, self.COOKIE_MAGIC, os.getpid(), id(self), thread.get_ident()) def get(self, timeout=None, block=True): """ Return the next enqueued object, or sleep waiting for one. :param float timeout: If not :data:`None`, specifies a timeout in seconds. :param bool block: If :data:`False`, immediately raise :class:`mitogen.core.TimeoutError` if the latch is empty. :raises mitogen.core.LatchError: :meth:`close` has been called, and the object is no longer valid. :raises mitogen.core.TimeoutError: Timeout was reached. :returns: The de-queued object. """ _vv and IOLOG.debug('%r.get(timeout=%r, block=%r)', self, timeout, block) self._lock.acquire() try: if self.closed: raise LatchError() i = len(self._sleeping) if len(self._queue) > i: _vv and IOLOG.debug('%r.get() -> %r', self, self._queue[i]) return self._queue.pop(i) if not block: raise TimeoutError() rsock, wsock = self._get_socketpair() cookie = self._make_cookie() self._sleeping.append((wsock, cookie)) finally: self._lock.release() poller = self.poller_class() poller.start_receive(rsock.fileno()) try: return self._get_sleep(poller, timeout, block, rsock, wsock, cookie) finally: poller.close() def _get_sleep(self, poller, timeout, block, rsock, wsock, cookie): """ When a result is not immediately available, sleep waiting for :meth:`put` to write a byte to our socket pair. """ _vv and IOLOG.debug( '%r._get_sleep(timeout=%r, block=%r, fd=%d/%d)', self, timeout, block, rsock.fileno(), wsock.fileno() ) e = None woken = None try: woken = list(poller.poll(timeout)) except Exception: e = sys.exc_info()[1] self._lock.acquire() try: i = self._sleeping.index((wsock, cookie)) del self._sleeping[i] if not woken: raise e or TimeoutError() got_cookie = rsock.recv(self.COOKIE_SIZE) self._cls_idle_socketpairs.append((rsock, wsock)) assert cookie == got_cookie, ( "Cookie incorrect; got %r, expected %r" \ % (binascii.hexlify(got_cookie), binascii.hexlify(cookie)) ) assert i < self._waking, ( "Cookie correct, but no queue element assigned." ) self._waking -= 1 if self.closed: raise LatchError() _vv and IOLOG.debug('%r.get() wake -> %r', self, self._queue[i]) return self._queue.pop(i) finally: self._lock.release() def put(self, obj=None): """ Enqueue an object, waking the first thread waiting for a result, if one exists. :param obj: Object to enqueue. Defaults to :data:`None` as a convenience when using :class:`Latch` only for synchronization. :raises mitogen.core.LatchError: :meth:`close` has been called, and the object is no longer valid. """ _vv and IOLOG.debug('%r.put(%r)', self, obj) self._lock.acquire() try: if self.closed: raise LatchError() self._queue.append(obj) wsock = None if self._waking < len(self._sleeping): wsock, cookie = self._sleeping[self._waking] self._waking += 1 _vv and IOLOG.debug('%r.put() -> waking wfd=%r', self, wsock.fileno()) elif self.notify: self.notify(self) finally: self._lock.release() if wsock: self._wake(wsock, cookie) def _wake(self, wsock, cookie): written, disconnected = io_op(os.write, wsock.fileno(), cookie) assert written == len(cookie) and not disconnected def __repr__(self): return 'Latch(%#x, size=%d, t=%r)' % ( id(self), len(self._queue), threading.currentThread().getName(), ) class Waker(Protocol): """ :class:`BasicStream` subclass implementing the `UNIX self-pipe trick`_. Used to wake the multiplexer when another thread needs to modify its state (via a cross-thread function call). .. _UNIX self-pipe trick: https://cr.yp.to/docs/selfpipe.html """ read_size = 1 broker_ident = None @classmethod def build_stream(cls, broker): stream = super(Waker, cls).build_stream(broker) stream.accept(*pipe()) return stream def __init__(self, broker): self._broker = broker self._lock = threading.Lock() self._deferred = [] def __repr__(self): return 'Waker(fd=%r/%r)' % ( self.stream.receive_side and self.stream.receive_side.fd, self.stream.transmit_side and self.stream.transmit_side.fd, ) @property def keep_alive(self): """ Prevent immediate Broker shutdown while deferred functions remain. """ self._lock.acquire() try: return len(self._deferred) finally: self._lock.release() def on_receive(self, broker, buf): """ Drain the pipe and fire callbacks. Since :attr:`_deferred` is synchronized, :meth:`defer` and :meth:`on_receive` can conspire to ensure only one byte needs to be pending regardless of queue length. """ _vv and IOLOG.debug('%r.on_receive()', self) self._lock.acquire() try: deferred = self._deferred self._deferred = [] finally: self._lock.release() for func, args, kwargs in deferred: try: func(*args, **kwargs) except Exception: LOG.exception('defer() crashed: %r(*%r, **%r)', func, args, kwargs) broker.shutdown() def _wake(self): """ Wake the multiplexer by writing a byte. If Broker is midway through teardown, the FD may already be closed, so ignore EBADF. """ try: self.stream.transmit_side.write(b(' ')) except OSError: e = sys.exc_info()[1] if e.args[0] != errno.EBADF: raise broker_shutdown_msg = ( "An attempt was made to enqueue a message with a Broker that has " "already exitted. It is likely your program called Broker.shutdown() " "too early." ) def defer(self, func, *args, **kwargs): """ Arrange for `func()` to execute on the broker thread. This function returns immediately without waiting the result of `func()`. Use :meth:`defer_sync` to block until a result is available. :raises mitogen.core.Error: :meth:`defer` was called after :class:`Broker` has begun shutdown. """ if thread.get_ident() == self.broker_ident: _vv and IOLOG.debug('%r.defer() [immediate]', self) return func(*args, **kwargs) if self._broker._exitted: raise Error(self.broker_shutdown_msg) _vv and IOLOG.debug('%r.defer() [fd=%r]', self, self.stream.transmit_side.fd) self._lock.acquire() try: should_wake = not self._deferred self._deferred.append((func, args, kwargs)) finally: self._lock.release() if should_wake: self._wake() class IoLoggerProtocol(DelimitedProtocol): """ Handle redirection of standard IO into the :mod:`logging` package. """ @classmethod def build_stream(cls, name, dest_fd): """ Even though the descriptor `dest_fd` will hold the opposite end of the socket open, we must keep a separate dup() of it (i.e. wsock) in case some code decides to overwrite `dest_fd` later, which would thus break :meth:`on_shutdown`. """ rsock, wsock = socket.socketpair() os.dup2(wsock.fileno(), dest_fd) stream = super(IoLoggerProtocol, cls).build_stream(name) stream.name = name stream.accept(rsock, wsock) return stream def __init__(self, name): self._log = logging.getLogger(name) # #453: prevent accidental log initialization in a child creating a # feedback loop. self._log.propagate = False self._log.handlers = logging.getLogger().handlers[:] def on_shutdown(self, broker): """ Shut down the write end of the socket, preventing any further writes to it by this process, or subprocess that inherited it. This allows any remaining kernel-buffered data to be drained during graceful shutdown without the buffer continuously refilling due to some out of control child process. """ _v and LOG.debug('%r: shutting down', self) if not IS_WSL: # #333: WSL generates invalid readiness indication on shutdown(). # This modifies the *kernel object* inherited by children, causing # EPIPE on subsequent writes to any dupped FD in any process. The # read side can then drain completely of prior buffered data. self.stream.transmit_side.fp.shutdown(socket.SHUT_WR) self.stream.transmit_side.close() def on_line_received(self, line): """ Decode the received line as UTF-8 and pass it to the logging framework. """ self._log.info('%s', line.decode('utf-8', 'replace')) class Router(object): """ Route messages between contexts, and invoke local handlers for messages addressed to this context. :meth:`Router.route() <route>` straddles the :class:`Broker` thread and user threads, it is safe to call anywhere. **Note:** This is the somewhat limited core version of the Router class used by child contexts. The master subclass is documented below this one. """ #: The :class:`mitogen.core.Context` subclass to use when constructing new #: :class:`Context` objects in :meth:`myself` and :meth:`context_by_id`. #: Permits :class:`Router` subclasses to extend the :class:`Context` #: interface, as done in :class:`mitogen.parent.Router`. context_class = Context max_message_size = 128 * 1048576 #: When :data:`True`, permit children to only communicate with the current #: context or a parent of the current context. Routing between siblings or #: children of parents is prohibited, ensuring no communication is possible #: between intentionally partitioned networks, such as when a program #: simultaneously manipulates hosts spread across a corporate and a #: production network, or production networks that are otherwise #: air-gapped. #: #: Sending a prohibited message causes an error to be logged and a dead #: message to be sent in reply to the errant message, if that message has #: ``reply_to`` set. #: #: The value of :data:`unidirectional` becomes the default for the #: :meth:`local() <mitogen.master.Router.local>` `unidirectional` #: parameter. unidirectional = False duplicate_handle_msg = 'cannot register a handle that already exists' refused_msg = 'refused by policy' invalid_handle_msg = 'invalid handle' too_large_msg = 'message too large (max %d bytes)' respondent_disconnect_msg = 'the respondent Context has disconnected' broker_exit_msg = 'Broker has exitted' no_route_msg = 'no route to %r, my ID is %r' unidirectional_msg = ( 'routing mode prevents forward of message from context %d via ' 'context %d' ) def __init__(self, broker): self.broker = broker listen(broker, 'exit', self._on_broker_exit) self._setup_logging() self._write_lock = threading.Lock() #: context ID -> Stream; must hold _write_lock to edit or iterate self._stream_by_id = {} #: List of contexts to notify of shutdown; must hold _write_lock self._context_by_id = {} self._last_handle = itertools.count(1000) #: handle -> (persistent?, func(msg)) self._handle_map = {} #: Context -> set { handle, .. } self._handles_by_respondent = {} self.add_handler(self._on_del_route, DEL_ROUTE) def __repr__(self): return 'Router(%r)' % (self.broker,) def _setup_logging(self): """ This is done in the :class:`Router` constructor for historical reasons. It must be called before ExternalContext logs its first messages, but after logging has been setup. It must also be called when any router is constructed for a consumer app. """ # Here seems as good a place as any. global _v, _vv _v = logging.getLogger().level <= logging.DEBUG _vv = IOLOG.level <= logging.DEBUG def _on_del_route(self, msg): """ Stub :data:`DEL_ROUTE` handler; fires 'disconnect' events on the corresponding :attr:`_context_by_id` member. This is replaced by :class:`mitogen.parent.RouteMonitor` in an upgraded context. """ if msg.is_dead: return target_id_s, _, name = bytes_partition(msg.data, b(':')) target_id = int(target_id_s, 10) LOG.error('%r: deleting route to %s (%d)', self, to_text(name), target_id) context = self._context_by_id.get(target_id) if context: fire(context, 'disconnect') else: LOG.debug('DEL_ROUTE for unknown ID %r: %r', target_id, msg) def _on_stream_disconnect(self, stream): notify = [] self._write_lock.acquire() try: for context in list(self._context_by_id.values()): stream_ = self._stream_by_id.get(context.context_id) if stream_ is stream: del self._stream_by_id[context.context_id] notify.append(context) finally: self._write_lock.release() # Happens outside lock as e.g. RouteMonitor wants the same lock. for context in notify: context.on_disconnect() def _on_broker_exit(self): """ Called prior to broker exit, informs callbacks registered with :meth:`add_handler` the connection is dead. """ _v and LOG.debug('%r: broker has exitted', self) while self._handle_map: _, (_, func, _, _) = self._handle_map.popitem() func(Message.dead(self.broker_exit_msg)) def myself(self): """ Return a :class:`Context` referring to the current process. Since :class:`Context` is serializable, this is convenient to use in remote function call parameter lists. """ return self.context_class( router=self, context_id=mitogen.context_id, name='self', ) def context_by_id(self, context_id, via_id=None, create=True, name=None): """ Return or construct a :class:`Context` given its ID. An internal mapping of ID to the canonical :class:`Context` representing that ID, so that :ref:`signals` can be raised. This may be called from any thread, lookup and construction are atomic. :param int context_id: The context ID to look up. :param int via_id: If the :class:`Context` does not already exist, set its :attr:`Context.via` to the :class:`Context` matching this ID. :param bool create: If the :class:`Context` does not already exist, create it. :param str name: If the :class:`Context` does not already exist, set its name. :returns: :class:`Context`, or return :data:`None` if `create` is :data:`False` and no :class:`Context` previously existed. """ context = self._context_by_id.get(context_id) if context: return context if create and via_id is not None: via = self.context_by_id(via_id) else: via = None self._write_lock.acquire() try: context = self._context_by_id.get(context_id) if create and not context: context = self.context_class(self, context_id, name=name) context.via = via self._context_by_id[context_id] = context finally: self._write_lock.release() return context def register(self, context, stream): """ Register a newly constructed context and its associated stream, and add the stream's receive side to the I/O multiplexer. This method remains public while the design has not yet settled. """ _v and LOG.debug('%s: registering %r to stream %r', self, context, stream) self._write_lock.acquire() try: self._stream_by_id[context.context_id] = stream self._context_by_id[context.context_id] = context finally: self._write_lock.release() self.broker.start_receive(stream) listen(stream, 'disconnect', lambda: self._on_stream_disconnect(stream)) def stream_by_id(self, dst_id): """ Return the :class:`Stream` that should be used to communicate with `dst_id`. If a specific route for `dst_id` is not known, a reference to the parent context's stream is returned. If the parent is disconnected, or when running in the master context, return :data:`None` instead. This can be used from any thread, but its output is only meaningful from the context of the :class:`Broker` thread, as disconnection or replacement could happen in parallel on the broker thread at any moment. """ return ( self._stream_by_id.get(dst_id) or self._stream_by_id.get(mitogen.parent_id) ) def del_handler(self, handle): """ Remove the handle registered for `handle` :raises KeyError: The handle wasn't registered. """ _, _, _, respondent = self._handle_map.pop(handle) if respondent: self._handles_by_respondent[respondent].discard(handle) def add_handler(self, fn, handle=None, persist=True, policy=None, respondent=None, overwrite=False): """ Invoke `fn(msg)` on the :class:`Broker` thread for each Message sent to `handle` from this context. Unregister after one invocation if `persist` is :data:`False`. If `handle` is :data:`None`, a new handle is allocated and returned. :param int handle: If not :data:`None`, an explicit handle to register, usually one of the ``mitogen.core.*`` constants. If unspecified, a new unused handle will be allocated. :param bool persist: If :data:`False`, the handler will be unregistered after a single message has been received. :param mitogen.core.Context respondent: Context that messages to this handle are expected to be sent from. If specified, arranges for a dead message to be delivered to `fn` when disconnection of the context is detected. In future `respondent` will likely also be used to prevent other contexts from sending messages to the handle. :param function policy: Function invoked as `policy(msg, stream)` where `msg` is a :class:`mitogen.core.Message` about to be delivered, and `stream` is the :class:`mitogen.core.Stream` on which it was received. The function must return :data:`True`, otherwise an error is logged and delivery is refused. Two built-in policy functions exist: * :func:`has_parent_authority`: requires the message arrived from a parent context, or a context acting with a parent context's authority (``auth_id``). * :func:`mitogen.parent.is_immediate_child`: requires the message arrived from an immediately connected child, for use in messaging patterns where either something becomes buggy or insecure by permitting indirect upstream communication. In case of refusal, and the message's ``reply_to`` field is nonzero, a :class:`mitogen.core.CallError` is delivered to the sender indicating refusal occurred. :param bool overwrite: If :data:`True`, allow existing handles to be silently overwritten. :return: `handle`, or if `handle` was :data:`None`, the newly allocated handle. :raises Error: Attemp to register handle that was already registered. """ handle = handle or next(self._last_handle) _vv and IOLOG.debug('%r.add_handler(%r, %r, %r)', self, fn, handle, persist) if handle in self._handle_map and not overwrite: raise Error(self.duplicate_handle_msg) self._handle_map[handle] = persist, fn, policy, respondent if respondent: if respondent not in self._handles_by_respondent: self._handles_by_respondent[respondent] = set() listen(respondent, 'disconnect', lambda: self._on_respondent_disconnect(respondent)) self._handles_by_respondent[respondent].add(handle) return handle def _on_respondent_disconnect(self, context): for handle in self._handles_by_respondent.pop(context, ()): _, fn, _, _ = self._handle_map[handle] fn(Message.dead(self.respondent_disconnect_msg)) del self._handle_map[handle] def _maybe_send_dead(self, msg, reason, *args): if args: reason %= args LOG.debug('%r: %r is dead: %r', self, msg, reason) if msg.reply_to and not msg.is_dead: msg.reply(Message.dead(reason=reason), router=self) def _invoke(self, msg, stream): # IOLOG.debug('%r._invoke(%r)', self, msg) try: persist, fn, policy, respondent = self._handle_map[msg.handle] except KeyError: self._maybe_send_dead(msg, reason=self.invalid_handle_msg) return if respondent and not (msg.is_dead or msg.src_id == respondent.context_id): self._maybe_send_dead(msg, 'reply from unexpected context') return if policy and not policy(msg, stream): self._maybe_send_dead(msg, self.refused_msg) return if not persist: self.del_handler(msg.handle) try: fn(msg) except Exception: LOG.exception('%r._invoke(%r): %r crashed', self, msg, fn) def _async_route(self, msg, in_stream=None): """ Arrange for `msg` to be forwarded towards its destination. If its destination is the local context, then arrange for it to be dispatched using the local handlers. This is a lower overhead version of :meth:`route` that may only be called from the :class:`Broker` thread. :param Stream in_stream: If not :data:`None`, the stream the message arrived on. Used for performing source route verification, to ensure sensitive messages such as ``CALL_FUNCTION`` arrive only from trusted contexts. """ _vv and IOLOG.debug('%r._async_route(%r, %r)', self, msg, in_stream) if len(msg.data) > self.max_message_size: self._maybe_send_dead(msg, self.too_large_msg % ( self.max_message_size, )) return # Perform source verification. if in_stream: parent = self._stream_by_id.get(mitogen.parent_id) expect = self._stream_by_id.get(msg.auth_id, parent) if in_stream != expect: LOG.error('%r: bad auth_id: got %r via %r, not %r: %r', self, msg.auth_id, in_stream, expect, msg) return if msg.src_id != msg.auth_id: expect = self._stream_by_id.get(msg.src_id, parent) if in_stream != expect: LOG.error('%r: bad src_id: got %r via %r, not %r: %r', self, msg.src_id, in_stream, expect, msg) return if in_stream.protocol.auth_id is not None: msg.auth_id = in_stream.protocol.auth_id # Maintain a set of IDs the source ever communicated with. in_stream.protocol.egress_ids.add(msg.dst_id) if msg.dst_id == mitogen.context_id: return self._invoke(msg, in_stream) out_stream = self._stream_by_id.get(msg.dst_id) if out_stream is None: out_stream = self._stream_by_id.get(mitogen.parent_id) if out_stream is None: self._maybe_send_dead(msg, self.no_route_msg, msg.dst_id, mitogen.context_id) return if in_stream and self.unidirectional and not \ (in_stream.protocol.is_privileged or out_stream.protocol.is_privileged): self._maybe_send_dead(msg, self.unidirectional_msg, in_stream.protocol.remote_id, out_stream.protocol.remote_id) return out_stream.protocol._send(msg) def route(self, msg): """ Arrange for the :class:`Message` `msg` to be delivered to its destination using any relevant downstream context, or if none is found, by forwarding the message upstream towards the master context. If `msg` is destined for the local context, it is dispatched using the handles registered with :meth:`add_handler`. This may be called from any thread. """ self.broker.defer(self._async_route, msg) class NullTimerList(object): def get_timeout(self): return None class Broker(object): """ Responsible for handling I/O multiplexing in a private thread. **Note:** This somewhat limited core version is used by children. The master subclass is documented below. """ poller_class = Poller _waker = None _thread = None # :func:`mitogen.parent._upgrade_broker` replaces this with # :class:`mitogen.parent.TimerList` during upgrade. timers = NullTimerList() #: Seconds grace to allow :class:`streams <Stream>` to shutdown gracefully #: before force-disconnecting them during :meth:`shutdown`. shutdown_timeout = 3.0 def __init__(self, poller_class=None, activate_compat=True): self._alive = True self._exitted = False self._waker = Waker.build_stream(self) #: Arrange for `func(\*args, \**kwargs)` to be executed on the broker #: thread, or immediately if the current thread is the broker thread. #: Safe to call from any thread. self.defer = self._waker.protocol.defer self.poller = self.poller_class() self.poller.start_receive( self._waker.receive_side.fd, (self._waker.receive_side, self._waker.on_receive) ) self._thread = threading.Thread( target=self._broker_main, name='mitogen.broker' ) self._thread.start() if activate_compat: self._py24_25_compat() def _py24_25_compat(self): """ Python 2.4/2.5 have grave difficulties with threads/fork. We mandatorily quiesce all running threads during fork using a monkey-patch there. """ if sys.version_info < (2, 6): # import_module() is used to avoid dep scanner. os_fork = import_module('mitogen.os_fork') os_fork._notice_broker_or_pool(self) def start_receive(self, stream): """ Mark the :attr:`receive_side <Stream.receive_side>` on `stream` as ready for reading. Safe to call from any thread. When the associated file descriptor becomes ready for reading, :meth:`BasicStream.on_receive` will be called. """ _vv and IOLOG.debug('%r.start_receive(%r)', self, stream) side = stream.receive_side assert side and not side.closed self.defer(self.poller.start_receive, side.fd, (side, stream.on_receive)) def stop_receive(self, stream): """ Mark the :attr:`receive_side <Stream.receive_side>` on `stream` as not ready for reading. Safe to call from any thread. """ _vv and IOLOG.debug('%r.stop_receive(%r)', self, stream) self.defer(self.poller.stop_receive, stream.receive_side.fd) def _start_transmit(self, stream): """ Mark the :attr:`transmit_side <Stream.transmit_side>` on `stream` as ready for writing. Must only be called from the Broker thread. When the associated file descriptor becomes ready for writing, :meth:`BasicStream.on_transmit` will be called. """ _vv and IOLOG.debug('%r._start_transmit(%r)', self, stream) side = stream.transmit_side assert side and not side.closed self.poller.start_transmit(side.fd, (side, stream.on_transmit)) def _stop_transmit(self, stream): """ Mark the :attr:`transmit_side <Stream.receive_side>` on `stream` as not ready for writing. """ _vv and IOLOG.debug('%r._stop_transmit(%r)', self, stream) self.poller.stop_transmit(stream.transmit_side.fd) def keep_alive(self): """ Return :data:`True` if any reader's :attr:`Side.keep_alive` attribute is :data:`True`, or any :class:`Context` is still registered that is not the master. Used to delay shutdown while some important work is in progress (e.g. log draining). """ it = (side.keep_alive for (_, (side, _)) in self.poller.readers) return sum(it, 0) > 0 or self.timers.get_timeout() is not None def defer_sync(self, func): """ Arrange for `func()` to execute on :class:`Broker` thread, blocking the current thread until a result or exception is available. :returns: Return value of `func()`. """ latch = Latch() def wrapper(): try: latch.put(func()) except Exception: latch.put(sys.exc_info()[1]) self.defer(wrapper) res = latch.get() if isinstance(res, Exception): raise res return res def _call(self, stream, func): """ Call `func(self)`, catching any exception that might occur, logging it, and force-disconnecting the related `stream`. """ try: func(self) except Exception: LOG.exception('%r crashed', stream) stream.on_disconnect(self) def _loop_once(self, timeout=None): """ Execute a single :class:`Poller` wait, dispatching any IO events that caused the wait to complete. :param float timeout: If not :data:`None`, maximum time in seconds to wait for events. """ _vv and IOLOG.debug('%r._loop_once(%r, %r)', self, timeout, self.poller) timer_to = self.timers.get_timeout() if timeout is None: timeout = timer_to elif timer_to is not None and timer_to < timeout: timeout = timer_to #IOLOG.debug('readers =\n%s', pformat(self.poller.readers)) #IOLOG.debug('writers =\n%s', pformat(self.poller.writers)) for side, func in self.poller.poll(timeout): self._call(side.stream, func) if timer_to is not None: self.timers.expire() def _broker_exit(self): """ Forcefully call :meth:`Stream.on_disconnect` on any streams that failed to shut down gracefully, then discard the :class:`Poller`. """ for _, (side, _) in self.poller.readers + self.poller.writers: LOG.debug('%r: force disconnecting %r', self, side) side.stream.on_disconnect(self) self.poller.close() def _broker_shutdown(self): """ Invoke :meth:`Stream.on_shutdown` for every active stream, then allow up to :attr:`shutdown_timeout` seconds for the streams to unregister themselves, logging an error if any did not unregister during the grace period. """ for _, (side, _) in self.poller.readers + self.poller.writers: self._call(side.stream, side.stream.on_shutdown) deadline = time.time() + self.shutdown_timeout while self.keep_alive() and time.time() < deadline: self._loop_once(max(0, deadline - time.time())) if self.keep_alive(): LOG.error('%r: pending work still existed %d seconds after ' 'shutdown began. This may be due to a timer that is yet ' 'to expire, or a child connection that did not fully ' 'shut down.', self, self.shutdown_timeout) def _do_broker_main(self): """ Broker thread main function. Dispatches IO events until :meth:`shutdown` is called. """ # For Python 2.4, no way to retrieve ident except on thread. self._waker.protocol.broker_ident = thread.get_ident() try: while self._alive: self._loop_once() fire(self, 'shutdown') self._broker_shutdown() except Exception: e = sys.exc_info()[1] LOG.exception('broker crashed') syslog.syslog(syslog.LOG_ERR, 'broker crashed: %s' % (e,)) syslog.closelog() # prevent test 'fd leak'. self._alive = False # Ensure _alive is consistent on crash. self._exitted = True self._broker_exit() def _broker_main(self): try: _profile_hook('mitogen.broker', self._do_broker_main) finally: # 'finally' to ensure _on_broker_exit() can always SIGTERM. fire(self, 'exit') def shutdown(self): """ Request broker gracefully disconnect streams and stop. Safe to call from any thread. """ _v and LOG.debug('%r: shutting down', self) def _shutdown(): self._alive = False if self._alive and not self._exitted: self.defer(_shutdown) def join(self): """ Wait for the broker to stop, expected to be called after :meth:`shutdown`. """ self._thread.join() def __repr__(self): return 'Broker(%04x)' % (id(self) & 0xffff,) class Dispatcher(object): """ Implementation of the :data:`CALL_FUNCTION` handle for a child context. Listens on the child's main thread for messages sent by :class:`mitogen.parent.CallChain` and dispatches the function calls they describe. If a :class:`mitogen.parent.CallChain` sending a message is in pipelined mode, any exception that occurs is recorded, and causes all subsequent calls with the same `chain_id` to fail with the same exception. """ def __repr__(self): return 'Dispatcher' def __init__(self, econtext): self.econtext = econtext #: Chain ID -> CallError if prior call failed. self._error_by_chain_id = {} self.recv = Receiver( router=econtext.router, handle=CALL_FUNCTION, policy=has_parent_authority, ) #: The :data:`CALL_SERVICE` :class:`Receiver` that will eventually be #: reused by :class:`mitogen.service.Pool`, should it ever be loaded. #: This is necessary for race-free reception of all service requests #: delivered regardless of whether the stub or real service pool are #: loaded. See #547 for related sorrows. Dispatcher._service_recv = Receiver( router=econtext.router, handle=CALL_SERVICE, policy=has_parent_authority, ) self._service_recv.notify = self._on_call_service listen(econtext.broker, 'shutdown', self.recv.close) @classmethod @takes_econtext def forget_chain(cls, chain_id, econtext): econtext.dispatcher._error_by_chain_id.pop(chain_id, None) def _parse_request(self, msg): data = msg.unpickle(throw=False) _v and LOG.debug('%r: dispatching %r', self, data) chain_id, modname, klass, func, args, kwargs = data obj = import_module(modname) if klass: obj = getattr(obj, klass) fn = getattr(obj, func) if getattr(fn, 'mitogen_takes_econtext', None): kwargs.setdefault('econtext', self.econtext) if getattr(fn, 'mitogen_takes_router', None): kwargs.setdefault('router', self.econtext.router) return chain_id, fn, args, kwargs def _dispatch_one(self, msg): try: chain_id, fn, args, kwargs = self._parse_request(msg) except Exception: return None, CallError(sys.exc_info()[1]) if chain_id in self._error_by_chain_id: return chain_id, self._error_by_chain_id[chain_id] try: return chain_id, fn(*args, **kwargs) except Exception: e = CallError(sys.exc_info()[1]) if chain_id is not None: self._error_by_chain_id[chain_id] = e return chain_id, e def _on_call_service(self, recv): """ Notifier for the :data:`CALL_SERVICE` receiver. This is called on the :class:`Broker` thread for any service messages arriving at this context, for as long as no real service pool implementation is loaded. In order to safely bootstrap the service pool implementation a sentinel message is enqueued on the :data:`CALL_FUNCTION` receiver in order to wake the main thread, where the importer can run without any possibility of suffering deadlock due to concurrent uses of the importer. Should the main thread be blocked indefinitely, preventing the import from ever running, if it is blocked waiting on a service call, then it means :mod:`mitogen.service` has already been imported and :func:`mitogen.service.get_or_create_pool` has already run, meaning the service pool is already active and the duplicate initialization was not needed anyway. #547: This trickery is needed to avoid the alternate option of spinning a temporary thread to import the service pool, which could deadlock if a custom import hook executing on the main thread (under the importer lock) would block waiting for some data that was in turn received by a service. Main thread import lock can't be released until service is running, service cannot satisfy request until import lock is released. """ self.recv._on_receive(Message(handle=STUB_CALL_SERVICE)) def _init_service_pool(self): import mitogen.service mitogen.service.get_or_create_pool(router=self.econtext.router) def _dispatch_calls(self): for msg in self.recv: if msg.handle == STUB_CALL_SERVICE: if msg.src_id == mitogen.context_id: self._init_service_pool() continue chain_id, ret = self._dispatch_one(msg) _v and LOG.debug('%r: %r -> %r', self, msg, ret) if msg.reply_to: msg.reply(ret) elif isinstance(ret, CallError) and chain_id is None: LOG.error('No-reply function call failed: %s', ret) def run(self): if self.econtext.config.get('on_start'): self.econtext.config['on_start'](self.econtext) _profile_hook('mitogen.child_main', self._dispatch_calls) class ExternalContext(object): """ External context implementation. This class contains the main program implementation for new children. It is responsible for setting up everything about the process environment, import hooks, standard IO redirection, logging, configuring a :class:`Router` and :class:`Broker`, and finally arranging for :class:`Dispatcher` to take over the main thread after initialization is complete. .. attribute:: broker The :class:`mitogen.core.Broker` instance. .. attribute:: context The :class:`mitogen.core.Context` instance. .. attribute:: channel The :class:`mitogen.core.Channel` over which :data:`CALL_FUNCTION` requests are received. .. attribute:: importer The :class:`mitogen.core.Importer` instance. .. attribute:: stdout_log The :class:`IoLogger` connected to :data:`sys.stdout`. .. attribute:: stderr_log The :class:`IoLogger` connected to :data:`sys.stderr`. """ detached = False def __init__(self, config): self.config = config def _on_broker_exit(self): if not self.config['profiling']: os.kill(os.getpid(), signal.SIGTERM) def _on_shutdown_msg(self, msg): if not msg.is_dead: _v and LOG.debug('shutdown request from context %d', msg.src_id) self.broker.shutdown() def _on_parent_disconnect(self): if self.detached: mitogen.parent_ids = [] mitogen.parent_id = None LOG.info('Detachment complete') else: _v and LOG.debug('parent stream is gone, dying.') self.broker.shutdown() def detach(self): self.detached = True stream = self.router.stream_by_id(mitogen.parent_id) if stream: # not double-detach()'d os.setsid() self.parent.send_await(Message(handle=DETACHING)) LOG.info('Detaching from %r; parent is %s', stream, self.parent) for x in range(20): pending = self.broker.defer_sync(stream.protocol.pending_bytes) if not pending: break time.sleep(0.05) if pending: LOG.error('Stream had %d bytes after 2000ms', pending) self.broker.defer(stream.on_disconnect, self.broker) def _setup_master(self): Router.max_message_size = self.config['max_message_size'] if self.config['profiling']: enable_profiling() self.broker = Broker(activate_compat=False) self.router = Router(self.broker) self.router.debug = self.config.get('debug', False) self.router.undirectional = self.config['unidirectional'] self.router.add_handler( fn=self._on_shutdown_msg, handle=SHUTDOWN, policy=has_parent_authority, ) self.master = Context(self.router, 0, 'master') parent_id = self.config['parent_ids'][0] if parent_id == 0: self.parent = self.master else: self.parent = Context(self.router, parent_id, 'parent') in_fd = self.config.get('in_fd', 100) in_fp = os.fdopen(os.dup(in_fd), 'rb', 0) os.close(in_fd) out_fp = os.fdopen(os.dup(self.config.get('out_fd', 1)), 'wb', 0) self.stream = MitogenProtocol.build_stream(self.router, parent_id) self.stream.accept(in_fp, out_fp) self.stream.name = 'parent' self.stream.receive_side.keep_alive = False listen(self.stream, 'disconnect', self._on_parent_disconnect) listen(self.broker, 'exit', self._on_broker_exit) def _reap_first_stage(self): try: os.wait() # Reap first stage. except OSError: pass # No first stage exists (e.g. fakessh) def _setup_logging(self): self.log_handler = LogHandler(self.master) root = logging.getLogger() root.setLevel(self.config['log_level']) root.handlers = [self.log_handler] if self.config['debug']: enable_debug_logging() def _setup_importer(self): importer = self.config.get('importer') if importer: importer._install_handler(self.router) importer._context = self.parent else: core_src_fd = self.config.get('core_src_fd', 101) if core_src_fd: fp = os.fdopen(core_src_fd, 'rb', 1) try: core_src = fp.read() # Strip "ExternalContext.main()" call from last line. core_src = b('\n').join(core_src.splitlines()[:-1]) finally: fp.close() else: core_src = None importer = Importer( self.router, self.parent, core_src, self.config.get('whitelist', ()), self.config.get('blacklist', ()), ) self.importer = importer self.router.importer = importer sys.meta_path.insert(0, self.importer) def _setup_package(self): global mitogen mitogen = imp.new_module('mitogen') mitogen.__package__ = 'mitogen' mitogen.__path__ = [] mitogen.__loader__ = self.importer mitogen.main = lambda *args, **kwargs: (lambda func: None) mitogen.core = sys.modules['__main__'] mitogen.core.__file__ = 'x/mitogen/core.py' # For inspect.getsource() mitogen.core.__loader__ = self.importer sys.modules['mitogen'] = mitogen sys.modules['mitogen.core'] = mitogen.core del sys.modules['__main__'] def _setup_globals(self): mitogen.is_master = False mitogen.__version__ = self.config['version'] mitogen.context_id = self.config['context_id'] mitogen.parent_ids = self.config['parent_ids'][:] mitogen.parent_id = mitogen.parent_ids[0] def _nullify_stdio(self): """ Open /dev/null to replace stdio temporarily. In case of odd startup, assume we may be allocated a standard handle. """ for stdfd, mode in ((0, os.O_RDONLY), (1, os.O_RDWR), (2, os.O_RDWR)): fd = os.open('/dev/null', mode) if fd != stdfd: os.dup2(fd, stdfd) os.close(fd) def _preserve_tty_fp(self): """ #481: when stderr is a TTY due to being started via tty_create_child() or hybrid_tty_create_child(), and some privilege escalation tool like prehistoric versions of sudo exec this process over the top of itself, there is nothing left to keep the slave PTY open after we replace our stdio. Therefore if stderr is a TTY, keep around a permanent dup() to avoid receiving SIGHUP. """ try: if os.isatty(2): self.reserve_tty_fp = os.fdopen(os.dup(2), 'r+b', 0) set_cloexec(self.reserve_tty_fp.fileno()) except OSError: pass def _setup_stdio(self): self._preserve_tty_fp() # When sys.stdout was opened by the runtime, overwriting it will not # close FD 1. However when forking from a child that previously used # fdopen(), overwriting it /will/ close FD 1. So we must swallow the # close before IoLogger overwrites FD 1, otherwise its new FD 1 will be # clobbered. Additionally, stdout must be replaced with /dev/null prior # to stdout.close(), since if block buffering was active in the parent, # any pre-fork buffered data will be flushed on close(), corrupting the # connection to the parent. self._nullify_stdio() sys.stdout.close() self._nullify_stdio() self.loggers = [] for name, fd in (('stdout', 1), ('stderr', 2)): log = IoLoggerProtocol.build_stream(name, fd) self.broker.start_receive(log) self.loggers.append(log) # Reopen with line buffering. sys.stdout = os.fdopen(1, 'w', 1) def main(self): self._setup_master() try: try: self._setup_logging() self._setup_importer() self._reap_first_stage() if self.config.get('setup_package', True): self._setup_package() self._setup_globals() if self.config.get('setup_stdio', True): self._setup_stdio() self.dispatcher = Dispatcher(self) self.router.register(self.parent, self.stream) self.router._setup_logging() sys.executable = os.environ.pop('ARGV0', sys.executable) _v and LOG.debug('Parent is context %r (%s); my ID is %r', self.parent.context_id, self.parent.name, mitogen.context_id) _v and LOG.debug('pid:%r ppid:%r uid:%r/%r, gid:%r/%r host:%r', os.getpid(), os.getppid(), os.geteuid(), os.getuid(), os.getegid(), os.getgid(), socket.gethostname()) _v and LOG.debug('Recovered sys.executable: %r', sys.executable) if self.config.get('send_ec2', True): self.stream.transmit_side.write(b('MITO002\n')) self.broker._py24_25_compat() self.log_handler.uncork() self.dispatcher.run() _v and LOG.debug('ExternalContext.main() normal exit') except KeyboardInterrupt: LOG.debug('KeyboardInterrupt received, exiting gracefully.') except BaseException: LOG.exception('ExternalContext.main() crashed') raise finally: self.broker.shutdown() self.broker.join()
./CrossVul/dataset_final_sorted/CWE-254/py/bad_1020_0
crossvul-python_data_good_1020_0
# Copyright 2019, David Wilson # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # !mitogen: minify_safe """ This module implements most package functionality, but remains separate from non-essential code in order to reduce its size, since it is also serves as the bootstrap implementation sent to every new slave context. """ import binascii import collections import encodings.latin_1 import encodings.utf_8 import errno import fcntl import itertools import linecache import logging import os import pickle as py_pickle import pstats import signal import socket import struct import sys import syslog import threading import time import traceback import warnings import weakref import zlib # Python >3.7 deprecated the imp module. warnings.filterwarnings('ignore', message='the imp module is deprecated') import imp # Absolute imports for <2.5. select = __import__('select') try: import cProfile except ImportError: cProfile = None try: import thread except ImportError: import threading as thread try: import cPickle as pickle except ImportError: import pickle try: from cStringIO import StringIO as BytesIO except ImportError: from io import BytesIO try: BaseException except NameError: BaseException = Exception try: ModuleNotFoundError except NameError: ModuleNotFoundError = ImportError # TODO: usage of 'import' after setting __name__, but before fixing up # sys.modules generates a warning. This happens when profiling = True. warnings.filterwarnings('ignore', "Parent module 'mitogen' not found while handling absolute import") LOG = logging.getLogger('mitogen') IOLOG = logging.getLogger('mitogen.io') IOLOG.setLevel(logging.INFO) # str.encode() may take import lock. Deadlock possible if broker calls # .encode() on behalf of thread currently waiting for module. LATIN1_CODEC = encodings.latin_1.Codec() _v = False _vv = False GET_MODULE = 100 CALL_FUNCTION = 101 FORWARD_LOG = 102 ADD_ROUTE = 103 DEL_ROUTE = 104 ALLOCATE_ID = 105 SHUTDOWN = 106 LOAD_MODULE = 107 FORWARD_MODULE = 108 DETACHING = 109 CALL_SERVICE = 110 STUB_CALL_SERVICE = 111 #: Special value used to signal disconnection or the inability to route a #: message, when it appears in the `reply_to` field. Usually causes #: :class:`mitogen.core.ChannelError` to be raised when it is received. #: #: It indicates the sender did not know how to process the message, or wishes #: no further messages to be delivered to it. It is used when: #: #: * a remote receiver is disconnected or explicitly closed. #: * a related message could not be delivered due to no route existing for it. #: * a router is being torn down, as a sentinel value to notify #: :meth:`mitogen.core.Router.add_handler` callbacks to clean up. IS_DEAD = 999 try: BaseException except NameError: BaseException = Exception PY24 = sys.version_info < (2, 5) PY3 = sys.version_info > (3,) if PY3: b = str.encode BytesType = bytes UnicodeType = str FsPathTypes = (str,) BufferType = lambda buf, start: memoryview(buf)[start:] long = int else: b = str BytesType = str FsPathTypes = (str, unicode) BufferType = buffer UnicodeType = unicode AnyTextType = (BytesType, UnicodeType) try: next except NameError: next = lambda it: it.next() # #550: prehistoric WSL did not advertise itself in uname output. try: fp = open('/proc/sys/kernel/osrelease') IS_WSL = 'Microsoft' in fp.read() fp.close() except IOError: IS_WSL = False #: Default size for calls to :meth:`Side.read` or :meth:`Side.write`, and the #: size of buffers configured by :func:`mitogen.parent.create_socketpair`. This #: value has many performance implications, 128KiB seems to be a sweet spot. #: #: * When set low, large messages cause many :class:`Broker` IO loop #: iterations, burning CPU and reducing throughput. #: * When set high, excessive RAM is reserved by the OS for socket buffers (2x #: per child), and an identically sized temporary userspace buffer is #: allocated on each read that requires zeroing, and over a particular size #: may require two system calls to allocate/deallocate. #: #: Care must be taken to ensure the underlying kernel object and receiving #: program support the desired size. For example, #: #: * Most UNIXes have TTYs with fixed 2KiB-4KiB buffers, making them unsuitable #: for efficient IO. #: * Different UNIXes have varying presets for pipes, which may not be #: configurable. On recent Linux the default pipe buffer size is 64KiB, but #: under memory pressure may be as low as 4KiB for unprivileged processes. #: * When communication is via an intermediary process, its internal buffers #: effect the speed OS buffers will drain. For example OpenSSH uses 64KiB #: reads. #: #: An ideal :class:`Message` has a size that is a multiple of #: :data:`CHUNK_SIZE` inclusive of headers, to avoid wasting IO loop iterations #: writing small trailer chunks. CHUNK_SIZE = 131072 _tls = threading.local() if __name__ == 'mitogen.core': # When loaded using import mechanism, ExternalContext.main() will not have # a chance to set the synthetic mitogen global, so just import it here. import mitogen else: # When loaded as __main__, ensure classes and functions gain a __module__ # attribute consistent with the host process, so that pickling succeeds. __name__ = 'mitogen.core' class Error(Exception): """ Base for all exceptions raised by Mitogen. :param str fmt: Exception text, or format string if `args` is non-empty. :param tuple args: Format string arguments. """ def __init__(self, fmt=None, *args): if args: fmt %= args if fmt and not isinstance(fmt, UnicodeType): fmt = fmt.decode('utf-8') Exception.__init__(self, fmt) class LatchError(Error): """ Raised when an attempt is made to use a :class:`mitogen.core.Latch` that has been marked closed. """ pass class Blob(BytesType): """ A serializable bytes subclass whose content is summarized in repr() output, making it suitable for logging binary data. """ def __repr__(self): return '[blob: %d bytes]' % len(self) def __reduce__(self): return (Blob, (BytesType(self),)) class Secret(UnicodeType): """ A serializable unicode subclass whose content is masked in repr() output, making it suitable for logging passwords. """ def __repr__(self): return '[secret]' if not PY3: # TODO: what is this needed for in 2.x? def __str__(self): return UnicodeType(self) def __reduce__(self): return (Secret, (UnicodeType(self),)) class Kwargs(dict): """ A serializable dict subclass that indicates its keys should be coerced to Unicode on Python 3 and bytes on Python<2.6. Python 2 produces keyword argument dicts whose keys are bytes, requiring a helper to ensure compatibility with Python 3 where Unicode is required, whereas Python 3 produces keyword argument dicts whose keys are Unicode, requiring a helper for Python 2.4/2.5, where bytes are required. """ if PY3: def __init__(self, dct): for k, v in dct.items(): if type(k) is bytes: self[k.decode()] = v else: self[k] = v elif sys.version_info < (2, 6, 5): def __init__(self, dct): for k, v in dct.iteritems(): if type(k) is unicode: k, _ = encodings.utf_8.encode(k) self[k] = v def __repr__(self): return 'Kwargs(%s)' % (dict.__repr__(self),) def __reduce__(self): return (Kwargs, (dict(self),)) class CallError(Error): """ Serializable :class:`Error` subclass raised when :meth:`Context.call() <mitogen.parent.Context.call>` fails. A copy of the traceback from the external context is appended to the exception message. """ def __init__(self, fmt=None, *args): if not isinstance(fmt, BaseException): Error.__init__(self, fmt, *args) else: e = fmt cls = e.__class__ fmt = '%s.%s: %s' % (cls.__module__, cls.__name__, e) tb = sys.exc_info()[2] if tb: fmt += '\n' fmt += ''.join(traceback.format_tb(tb)) Error.__init__(self, fmt) def __reduce__(self): return (_unpickle_call_error, (self.args[0],)) def _unpickle_call_error(s): if not (type(s) is UnicodeType and len(s) < 10000): raise TypeError('cannot unpickle CallError: bad input') return CallError(s) class ChannelError(Error): """ Raised when a channel dies or has been closed. """ remote_msg = 'Channel closed by remote end.' local_msg = 'Channel closed by local end.' class StreamError(Error): """ Raised when a stream cannot be established. """ pass class TimeoutError(Error): """ Raised when a timeout occurs on a stream. """ pass def to_text(o): """ Coerce `o` to Unicode by decoding it from UTF-8 if it is an instance of :class:`bytes`, otherwise pass it to the :class:`str` constructor. The returned object is always a plain :class:`str`, any subclass is removed. """ if isinstance(o, BytesType): return o.decode('utf-8') return UnicodeType(o) # Python 2.4 try: any except NameError: def any(it): for elem in it: if elem: return True def _partition(s, sep, find): """ (str|unicode).(partition|rpartition) for Python 2.4/2.5. """ idx = find(sep) if idx != -1: left = s[0:idx] return left, sep, s[len(left)+len(sep):] if hasattr(UnicodeType, 'rpartition'): str_partition = UnicodeType.partition str_rpartition = UnicodeType.rpartition bytes_partition = BytesType.partition else: def str_partition(s, sep): return _partition(s, sep, s.find) or (s, u'', u'') def str_rpartition(s, sep): return _partition(s, sep, s.rfind) or (u'', u'', s) def bytes_partition(s, sep): return _partition(s, sep, s.find) or (s, '', '') def has_parent_authority(msg, _stream=None): """ Policy function for use with :class:`Receiver` and :meth:`Router.add_handler` that requires incoming messages to originate from a parent context, or on a :class:`Stream` whose :attr:`auth_id <Stream.auth_id>` has been set to that of a parent context or the current context. """ return (msg.auth_id == mitogen.context_id or msg.auth_id in mitogen.parent_ids) def _signals(obj, signal): return ( obj.__dict__ .setdefault('_signals', {}) .setdefault(signal, []) ) def listen(obj, name, func): """ Arrange for `func()` to be invoked when signal `name` is fired on `obj`. """ _signals(obj, name).append(func) def unlisten(obj, name, func): """ Remove `func()` from the list of functions invoked when signal `name` is fired by `obj`. :raises ValueError: `func()` was not on the list. """ _signals(obj, name).remove(func) def fire(obj, name, *args, **kwargs): """ Arrange for `func(*args, **kwargs)` to be invoked for every function registered for signal `name` on `obj`. """ for func in _signals(obj, name): func(*args, **kwargs) def takes_econtext(func): func.mitogen_takes_econtext = True return func def takes_router(func): func.mitogen_takes_router = True return func def is_blacklisted_import(importer, fullname): """ Return :data:`True` if `fullname` is part of a blacklisted package, or if any packages have been whitelisted and `fullname` is not part of one. NB: - If a package is on both lists, then it is treated as blacklisted. - If any package is whitelisted, then all non-whitelisted packages are treated as blacklisted. """ return ((not any(fullname.startswith(s) for s in importer.whitelist)) or (any(fullname.startswith(s) for s in importer.blacklist))) def set_cloexec(fd): """ Set the file descriptor `fd` to automatically close on :func:`os.execve`. This has no effect on file descriptors inherited across :func:`os.fork`, they must be explicitly closed through some other means, such as :func:`mitogen.fork.on_fork`. """ flags = fcntl.fcntl(fd, fcntl.F_GETFD) assert fd > 2 fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC) def set_nonblock(fd): """ Set the file descriptor `fd` to non-blocking mode. For most underlying file types, this causes :func:`os.read` or :func:`os.write` to raise :class:`OSError` with :data:`errno.EAGAIN` rather than block the thread when the underlying kernel buffer is exhausted. """ flags = fcntl.fcntl(fd, fcntl.F_GETFL) fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) def set_block(fd): """ Inverse of :func:`set_nonblock`, i.e. cause `fd` to block the thread when the underlying kernel buffer is exhausted. """ flags = fcntl.fcntl(fd, fcntl.F_GETFL) fcntl.fcntl(fd, fcntl.F_SETFL, flags & ~os.O_NONBLOCK) def io_op(func, *args): """ Wrap `func(*args)` that may raise :class:`select.error`, :class:`IOError`, or :class:`OSError`, trapping UNIX error codes relating to disconnection and retry events in various subsystems: * When a signal is delivered to the process on Python 2, system call retry is signalled through :data:`errno.EINTR`. The invocation is automatically restarted. * When performing IO against a TTY, disconnection of the remote end is signalled by :data:`errno.EIO`. * When performing IO against a socket, disconnection of the remote end is signalled by :data:`errno.ECONNRESET`. * When performing IO against a pipe, disconnection of the remote end is signalled by :data:`errno.EPIPE`. :returns: Tuple of `(return_value, disconnect_reason)`, where `return_value` is the return value of `func(*args)`, and `disconnected` is an exception instance when disconnection was detected, otherwise :data:`None`. """ while True: try: return func(*args), None except (select.error, OSError, IOError): e = sys.exc_info()[1] _vv and IOLOG.debug('io_op(%r) -> OSError: %s', func, e) if e.args[0] == errno.EINTR: continue if e.args[0] in (errno.EIO, errno.ECONNRESET, errno.EPIPE): return None, e raise class PidfulStreamHandler(logging.StreamHandler): """ A :class:`logging.StreamHandler` subclass used when :meth:`Router.enable_debug() <mitogen.master.Router.enable_debug>` has been called, or the `debug` parameter was specified during context construction. Verifies the process ID has not changed on each call to :meth:`emit`, reopening the associated log file when a change is detected. This ensures logging to the per-process output files happens correctly even when uncooperative third party components call :func:`os.fork`. """ #: PID that last opened the log file. open_pid = None #: Output path template. template = '/tmp/mitogen.%s.%s.log' def _reopen(self): self.acquire() try: if self.open_pid == os.getpid(): return ts = time.strftime('%Y%m%d_%H%M%S') path = self.template % (os.getpid(), ts) self.stream = open(path, 'w', 1) set_cloexec(self.stream.fileno()) self.stream.write('Parent PID: %s\n' % (os.getppid(),)) self.stream.write('Created by:\n\n%s\n' % ( ''.join(traceback.format_stack()), )) self.open_pid = os.getpid() finally: self.release() def emit(self, record): if self.open_pid != os.getpid(): self._reopen() logging.StreamHandler.emit(self, record) def enable_debug_logging(): global _v, _vv _v = True _vv = True root = logging.getLogger() root.setLevel(logging.DEBUG) IOLOG.setLevel(logging.DEBUG) handler = PidfulStreamHandler() handler.formatter = logging.Formatter( '%(asctime)s %(levelname).1s %(name)s: %(message)s', '%H:%M:%S' ) root.handlers.insert(0, handler) _profile_hook = lambda name, func, *args: func(*args) _profile_fmt = os.environ.get( 'MITOGEN_PROFILE_FMT', '/tmp/mitogen.stats.%(pid)s.%(identity)s.%(now)s.%(ext)s', ) def _profile_hook(name, func, *args): """ Call `func(*args)` and return its result. This function is replaced by :func:`_real_profile_hook` when :func:`enable_profiling` is called. This interface is obsolete and will be replaced by a signals-based integration later on. """ return func(*args) def _real_profile_hook(name, func, *args): profiler = cProfile.Profile() profiler.enable() try: return func(*args) finally: path = _profile_fmt % { 'now': int(1e6 * time.time()), 'identity': name, 'pid': os.getpid(), 'ext': '%s' } profiler.dump_stats(path % ('pstats',)) profiler.create_stats() fp = open(path % ('log',), 'w') try: stats = pstats.Stats(profiler, stream=fp) stats.sort_stats('cumulative') stats.print_stats() finally: fp.close() def enable_profiling(econtext=None): global _profile_hook _profile_hook = _real_profile_hook def import_module(modname): """ Import `module` and return the attribute named `attr`. """ return __import__(modname, None, None, ['']) def pipe(): """ Create a UNIX pipe pair using :func:`os.pipe`, wrapping the returned descriptors in Python file objects in order to manage their lifetime and ensure they are closed when their last reference is discarded and they have not been closed explicitly. """ rfd, wfd = os.pipe() return ( os.fdopen(rfd, 'rb', 0), os.fdopen(wfd, 'wb', 0) ) def iter_split(buf, delim, func): """ Invoke `func(s)` for each `delim`-delimited chunk in the potentially large `buf`, avoiding intermediate lists and quadratic string operations. Return the trailing undelimited portion of `buf`, or any unprocessed portion of `buf` after `func(s)` returned :data:`False`. :returns: `(trailer, cont)`, where `cont` is :data:`False` if the last call to `func(s)` returned :data:`False`. """ dlen = len(delim) start = 0 cont = True while cont: nl = buf.find(delim, start) if nl == -1: break cont = not func(buf[start:nl]) is False start = nl + dlen return buf[start:], cont class Py24Pickler(py_pickle.Pickler): """ Exceptions were classic classes until Python 2.5. Sadly for 2.4, cPickle offers little control over how a classic instance is pickled. Therefore 2.4 uses a pure-Python pickler, so CallError can be made to look as it does on newer Pythons. This mess will go away once proper serialization exists. """ @classmethod def dumps(cls, obj, protocol): bio = BytesIO() self = cls(bio, protocol=protocol) self.dump(obj) return bio.getvalue() def save_exc_inst(self, obj): if isinstance(obj, CallError): func, args = obj.__reduce__() self.save(func) self.save(args) self.write(py_pickle.REDUCE) else: py_pickle.Pickler.save_inst(self, obj) if PY24: dispatch = py_pickle.Pickler.dispatch.copy() dispatch[py_pickle.InstanceType] = save_exc_inst if PY3: # In 3.x Unpickler is a class exposing find_class as an overridable, but it # cannot be overridden without subclassing. class _Unpickler(pickle.Unpickler): def find_class(self, module, func): return self.find_global(module, func) pickle__dumps = pickle.dumps elif PY24: # On Python 2.4, we must use a pure-Python pickler. pickle__dumps = Py24Pickler.dumps _Unpickler = pickle.Unpickler else: pickle__dumps = pickle.dumps # In 2.x Unpickler is a function exposing a writeable find_global # attribute. _Unpickler = pickle.Unpickler class Message(object): """ Messages are the fundamental unit of communication, comprising fields from the :ref:`stream-protocol` header, an optional reference to the receiving :class:`mitogen.core.Router` for ingress messages, and helper methods for deserialization and generating replies. """ #: Integer target context ID. :class:`Router` delivers messages locally #: when their :attr:`dst_id` matches :data:`mitogen.context_id`, otherwise #: they are routed up or downstream. dst_id = None #: Integer source context ID. Used as the target of replies if any are #: generated. src_id = None #: Context ID under whose authority the message is acting. See #: :ref:`source-verification`. auth_id = None #: Integer target handle in the destination context. This is one of the #: :ref:`standard-handles`, or a dynamically generated handle used to #: receive a one-time reply, such as the return value of a function call. handle = None #: Integer target handle to direct any reply to this message. Used to #: receive a one-time reply, such as the return value of a function call. #: :data:`IS_DEAD` has a special meaning when it appears in this field. reply_to = None #: Raw message data bytes. data = b('') _unpickled = object() #: The :class:`Router` responsible for routing the message. This is #: :data:`None` for locally originated messages. router = None #: The :class:`Receiver` over which the message was last received. Part of #: the :class:`mitogen.select.Select` interface. Defaults to :data:`None`. receiver = None HEADER_FMT = '>hLLLLLL' HEADER_LEN = struct.calcsize(HEADER_FMT) HEADER_MAGIC = 0x4d49 # 'MI' def __init__(self, **kwargs): """ Construct a message from from the supplied `kwargs`. :attr:`src_id` and :attr:`auth_id` are always set to :data:`mitogen.context_id`. """ self.src_id = mitogen.context_id self.auth_id = mitogen.context_id vars(self).update(kwargs) assert isinstance(self.data, BytesType) def pack(self): return ( struct.pack(self.HEADER_FMT, self.HEADER_MAGIC, self.dst_id, self.src_id, self.auth_id, self.handle, self.reply_to or 0, len(self.data)) + self.data ) def _unpickle_context(self, context_id, name): return _unpickle_context(context_id, name, router=self.router) def _unpickle_sender(self, context_id, dst_handle): return _unpickle_sender(self.router, context_id, dst_handle) def _unpickle_bytes(self, s, encoding): s, n = LATIN1_CODEC.encode(s) return s def _find_global(self, module, func): """ Return the class implementing `module_name.class_name` or raise `StreamError` if the module is not whitelisted. """ if module == __name__: if func == '_unpickle_call_error' or func == 'CallError': return _unpickle_call_error elif func == '_unpickle_sender': return self._unpickle_sender elif func == '_unpickle_context': return self._unpickle_context elif func == 'Blob': return Blob elif func == 'Secret': return Secret elif func == 'Kwargs': return Kwargs elif module == '_codecs' and func == 'encode': return self._unpickle_bytes elif module == '__builtin__' and func == 'bytes': return BytesType raise StreamError('cannot unpickle %r/%r', module, func) @property def is_dead(self): """ :data:`True` if :attr:`reply_to` is set to the magic value :data:`IS_DEAD`, indicating the sender considers the channel dead. Dead messages can be raised in a variety of circumstances, see :data:`IS_DEAD` for more information. """ return self.reply_to == IS_DEAD @classmethod def dead(cls, reason=None, **kwargs): """ Syntax helper to construct a dead message. """ kwargs['data'], _ = encodings.utf_8.encode(reason or u'') return cls(reply_to=IS_DEAD, **kwargs) @classmethod def pickled(cls, obj, **kwargs): """ Construct a pickled message, setting :attr:`data` to the serialization of `obj`, and setting remaining fields using `kwargs`. :returns: The new message. """ self = cls(**kwargs) try: self.data = pickle__dumps(obj, protocol=2) except pickle.PicklingError: e = sys.exc_info()[1] self.data = pickle__dumps(CallError(e), protocol=2) return self def reply(self, msg, router=None, **kwargs): """ Compose a reply to this message and send it using :attr:`router`, or `router` is :attr:`router` is :data:`None`. :param obj: Either a :class:`Message`, or an object to be serialized in order to construct a new message. :param router: Optional router to use if :attr:`router` is :data:`None`. :param kwargs: Optional keyword parameters overriding message fields in the reply. """ if not isinstance(msg, Message): msg = Message.pickled(msg) msg.dst_id = self.src_id msg.handle = self.reply_to vars(msg).update(kwargs) if msg.handle: (self.router or router).route(msg) else: LOG.debug('dropping reply to message with no return address: %r', msg) if PY3: UNPICKLER_KWARGS = {'encoding': 'bytes'} else: UNPICKLER_KWARGS = {} def _throw_dead(self): if len(self.data): raise ChannelError(self.data.decode('utf-8', 'replace')) elif self.src_id == mitogen.context_id: raise ChannelError(ChannelError.local_msg) else: raise ChannelError(ChannelError.remote_msg) def unpickle(self, throw=True, throw_dead=True): """ Unpickle :attr:`data`, optionally raising any exceptions present. :param bool throw_dead: If :data:`True`, raise exceptions, otherwise it is the caller's responsibility. :raises CallError: The serialized data contained CallError exception. :raises ChannelError: The `is_dead` field was set. """ _vv and IOLOG.debug('%r.unpickle()', self) if throw_dead and self.is_dead: self._throw_dead() obj = self._unpickled if obj is Message._unpickled: fp = BytesIO(self.data) unpickler = _Unpickler(fp, **self.UNPICKLER_KWARGS) unpickler.find_global = self._find_global try: # Must occur off the broker thread. try: obj = unpickler.load() except: LOG.error('raw pickle was: %r', self.data) raise self._unpickled = obj except (TypeError, ValueError): e = sys.exc_info()[1] raise StreamError('invalid message: %s', e) if throw: if isinstance(obj, CallError): raise obj return obj def __repr__(self): return 'Message(%r, %r, %r, %r, %r, %r..%d)' % ( self.dst_id, self.src_id, self.auth_id, self.handle, self.reply_to, (self.data or '')[:50], len(self.data) ) class Sender(object): """ Senders are used to send pickled messages to a handle in another context, it is the inverse of :class:`mitogen.core.Receiver`. Senders may be serialized, making them convenient to wire up data flows. See :meth:`mitogen.core.Receiver.to_sender` for more information. :param mitogen.core.Context context: Context to send messages to. :param int dst_handle: Destination handle to send messages to. """ def __init__(self, context, dst_handle): self.context = context self.dst_handle = dst_handle def send(self, data): """ Send `data` to the remote end. """ _vv and IOLOG.debug('%r.send(%r..)', self, repr(data)[:100]) self.context.send(Message.pickled(data, handle=self.dst_handle)) explicit_close_msg = 'Sender was explicitly closed' def close(self): """ Send a dead message to the remote, causing :meth:`ChannelError` to be raised in any waiting thread. """ _vv and IOLOG.debug('%r.close()', self) self.context.send( Message.dead( reason=self.explicit_close_msg, handle=self.dst_handle ) ) def __repr__(self): return 'Sender(%r, %r)' % (self.context, self.dst_handle) def __reduce__(self): return _unpickle_sender, (self.context.context_id, self.dst_handle) def _unpickle_sender(router, context_id, dst_handle): if not (isinstance(router, Router) and isinstance(context_id, (int, long)) and context_id >= 0 and isinstance(dst_handle, (int, long)) and dst_handle > 0): raise TypeError('cannot unpickle Sender: bad input or missing router') return Sender(Context(router, context_id), dst_handle) class Receiver(object): """ Receivers maintain a thread-safe queue of messages sent to a handle of this context from another context. :param mitogen.core.Router router: Router to register the handler on. :param int handle: If not :data:`None`, an explicit handle to register, otherwise an unused handle is chosen. :param bool persist: If :data:`False`, unregister the handler after one message is received. Single-message receivers are intended for RPC-like transactions, such as in the case of :meth:`mitogen.parent.Context.call_async`. :param mitogen.core.Context respondent: Context this receiver is receiving from. If not :data:`None`, arranges for the receiver to receive a dead message if messages can no longer be routed to the context due to disconnection, and ignores messages that did not originate from the respondent context. """ #: If not :data:`None`, a reference to a function invoked as #: `notify(receiver)` when a new message is delivered to this receiver. The #: function is invoked on the broker thread, therefore it must not block. #: Used by :class:`mitogen.select.Select` to implement waiting on multiple #: receivers. notify = None raise_channelerror = True def __init__(self, router, handle=None, persist=True, respondent=None, policy=None, overwrite=False): self.router = router #: The handle. self.handle = handle # Avoid __repr__ crash in add_handler() self._latch = Latch() # Must exist prior to .add_handler() self.handle = router.add_handler( fn=self._on_receive, handle=handle, policy=policy, persist=persist, respondent=respondent, overwrite=overwrite, ) def __repr__(self): return 'Receiver(%r, %r)' % (self.router, self.handle) def __enter__(self): return self def __exit__(self, _1, _2, _3): self.close() def to_sender(self): """ Return a :class:`Sender` configured to deliver messages to this receiver. As senders are serializable, this makes it convenient to pass `(context_id, handle)` pairs around:: def deliver_monthly_report(sender): for line in open('monthly_report.txt'): sender.send(line) sender.close() @mitogen.main() def main(router): remote = router.ssh(hostname='mainframe') recv = mitogen.core.Receiver(router) remote.call(deliver_monthly_report, recv.to_sender()) for msg in recv: print(msg) """ return Sender(self.router.myself(), self.handle) def _on_receive(self, msg): """ Callback registered for the handle with :class:`Router`; appends data to the internal queue. """ _vv and IOLOG.debug('%r._on_receive(%r)', self, msg) self._latch.put(msg) if self.notify: self.notify(self) closed_msg = 'the Receiver has been closed' def close(self): """ Unregister the receiver's handle from its associated router, and cause :class:`ChannelError` to be raised in any thread waiting in :meth:`get` on this receiver. """ if self.handle: self.router.del_handler(self.handle) self.handle = None self._latch.close() def size(self): """ Return the number of items currently buffered. As with :class:`Queue.Queue`, `0` may be returned even though a subsequent call to :meth:`get` will succeed, since a message may be posted at any moment between :meth:`size` and :meth:`get`. As with :class:`Queue.Queue`, `>0` may be returned even though a subsequent call to :meth:`get` will block, since another waiting thread may be woken at any moment between :meth:`size` and :meth:`get`. :raises LatchError: The underlying latch has already been marked closed. """ return self._latch.size() def empty(self): """ Return `size() == 0`. .. deprecated:: 0.2.8 Use :meth:`size` instead. :raises LatchError: The latch has already been marked closed. """ return self._latch.empty() def get(self, timeout=None, block=True, throw_dead=True): """ Sleep waiting for a message to arrive on this receiver. :param float timeout: If not :data:`None`, specifies a timeout in seconds. :raises mitogen.core.ChannelError: The remote end indicated the channel should be closed, communication with it was lost, or :meth:`close` was called in the local process. :raises mitogen.core.TimeoutError: Timeout was reached. :returns: :class:`Message` that was received. """ _vv and IOLOG.debug('%r.get(timeout=%r, block=%r)', self, timeout, block) try: msg = self._latch.get(timeout=timeout, block=block) except LatchError: raise ChannelError(self.closed_msg) if msg.is_dead and throw_dead: msg._throw_dead() return msg def __iter__(self): """ Yield consecutive :class:`Message` instances delivered to this receiver until :class:`ChannelError` is raised. """ while True: try: msg = self.get() except ChannelError: return yield msg class Channel(Sender, Receiver): """ A channel inherits from :class:`mitogen.core.Sender` and `mitogen.core.Receiver` to provide bidirectional functionality. .. deprecated:: 0.2.0 This class is incomplete and obsolete, it will be removed in Mitogen 0.3. Channels were an early attempt at syntax sugar. It is always easier to pass around unidirectional pairs of senders/receivers, even though the syntax is baroque: .. literalinclude:: ../examples/ping_pong.py Since all handles aren't known until after both ends are constructed, for both ends to communicate through a channel, it is necessary for one end to retrieve the handle allocated to the other and reconfigure its own channel to match. Currently this is a manual task. """ def __init__(self, router, context, dst_handle, handle=None): Sender.__init__(self, context, dst_handle) Receiver.__init__(self, router, handle) def close(self): Receiver.close(self) Sender.close(self) def __repr__(self): return 'Channel(%s, %s)' % ( Sender.__repr__(self), Receiver.__repr__(self) ) class Importer(object): """ Import protocol implementation that fetches modules from the parent process. :param context: Context to communicate via. """ # The Mitogen package is handled specially, since the child context must # construct it manually during startup. MITOGEN_PKG_CONTENT = [ 'buildah', 'compat', 'debug', 'doas', 'docker', 'kubectl', 'fakessh', 'fork', 'jail', 'lxc', 'lxd', 'master', 'minify', 'os_fork', 'parent', 'select', 'service', 'setns', 'ssh', 'su', 'sudo', 'utils', ] ALWAYS_BLACKLIST = [ # 2.x generates needless imports for 'builtins', while 3.x does the # same for '__builtin__'. The correct one is built-in, the other always # a negative round-trip. 'builtins', '__builtin__', 'thread', # org.python.core imported by copy, pickle, xml.sax; breaks Jython, but # very unlikely to trigger a bug report. 'org', ] if PY3: ALWAYS_BLACKLIST += ['cStringIO'] def __init__(self, router, context, core_src, whitelist=(), blacklist=()): self._log = logging.getLogger('mitogen.importer') self._context = context self._present = {'mitogen': self.MITOGEN_PKG_CONTENT} self._lock = threading.Lock() self.whitelist = list(whitelist) or [''] self.blacklist = list(blacklist) + self.ALWAYS_BLACKLIST # Preserve copies of the original server-supplied whitelist/blacklist # for later use by children. self.master_whitelist = self.whitelist[:] self.master_blacklist = self.blacklist[:] # Presence of an entry in this map indicates in-flight GET_MODULE. self._callbacks = {} self._cache = {} if core_src: self._update_linecache('x/mitogen/core.py', core_src) self._cache['mitogen.core'] = ( 'mitogen.core', None, 'x/mitogen/core.py', zlib.compress(core_src, 9), [], ) self._install_handler(router) def _update_linecache(self, path, data): """ The Python 2.4 linecache module, used to fetch source code for tracebacks and :func:`inspect.getsource`, does not support PEP-302, meaning it needs extra help to for Mitogen-loaded modules. Directly populate its cache if a loaded module belongs to the Mitogen package. """ if PY24 and 'mitogen' in path: linecache.cache[path] = ( len(data), 0.0, [line+'\n' for line in data.splitlines()], path, ) def _install_handler(self, router): router.add_handler( fn=self._on_load_module, handle=LOAD_MODULE, policy=has_parent_authority, ) def __repr__(self): return 'Importer' def builtin_find_module(self, fullname): # imp.find_module() will always succeed for __main__, because it is a # built-in module. That means it exists on a special linked list deep # within the bowels of the interpreter. We must special case it. if fullname == '__main__': raise ModuleNotFoundError() parent, _, modname = str_rpartition(fullname, '.') if parent: path = sys.modules[parent].__path__ else: path = None fp, pathname, description = imp.find_module(modname, path) if fp: fp.close() def find_module(self, fullname, path=None): if hasattr(_tls, 'running'): return None _tls.running = True try: #_v and self._log.debug('Python requested %r', fullname) fullname = to_text(fullname) pkgname, dot, _ = str_rpartition(fullname, '.') pkg = sys.modules.get(pkgname) if pkgname and getattr(pkg, '__loader__', None) is not self: self._log.debug('%s is submodule of a locally loaded package', fullname) return None suffix = fullname[len(pkgname+dot):] if pkgname and suffix not in self._present.get(pkgname, ()): self._log.debug('%s has no submodule %s', pkgname, suffix) return None # #114: explicitly whitelisted prefixes override any # system-installed package. if self.whitelist != ['']: if any(fullname.startswith(s) for s in self.whitelist): return self try: self.builtin_find_module(fullname) _vv and self._log.debug('%r is available locally', fullname) except ImportError: _vv and self._log.debug('we will try to load %r', fullname) return self finally: del _tls.running blacklisted_msg = ( '%r is present in the Mitogen importer blacklist, therefore this ' 'context will not attempt to request it from the master, as the ' 'request will always be refused.' ) pkg_resources_msg = ( 'pkg_resources is prohibited from importing __main__, as it causes ' 'problems in applications whose main module is not designed to be ' 're-imported by children.' ) absent_msg = ( 'The Mitogen master process was unable to serve %r. It may be a ' 'native Python extension, or it may be missing entirely. Check the ' 'importer debug logs on the master for more information.' ) def _refuse_imports(self, fullname): if is_blacklisted_import(self, fullname): raise ModuleNotFoundError(self.blacklisted_msg % (fullname,)) f = sys._getframe(2) requestee = f.f_globals['__name__'] if fullname == '__main__' and requestee == 'pkg_resources': # Anything that imports pkg_resources will eventually cause # pkg_resources to try and scan __main__ for its __requires__ # attribute (pkg_resources/__init__.py::_build_master()). This # breaks any app that is not expecting its __main__ to suddenly be # sucked over a network and injected into a remote process, like # py.test. raise ModuleNotFoundError(self.pkg_resources_msg) if fullname == 'pbr': # It claims to use pkg_resources to read version information, which # would result in PEP-302 being used, but it actually does direct # filesystem access. So instead smodge the environment to override # any version that was defined. This will probably break something # later. os.environ['PBR_VERSION'] = '0.0.0' def _on_load_module(self, msg): if msg.is_dead: return tup = msg.unpickle() fullname = tup[0] _v and self._log.debug('received %s', fullname) self._lock.acquire() try: self._cache[fullname] = tup if tup[2] is not None and PY24: self._update_linecache( path='master:' + tup[2], data=zlib.decompress(tup[3]) ) callbacks = self._callbacks.pop(fullname, []) finally: self._lock.release() for callback in callbacks: callback() def _request_module(self, fullname, callback): self._lock.acquire() try: present = fullname in self._cache if not present: funcs = self._callbacks.get(fullname) if funcs is not None: _v and self._log.debug('existing request for %s in flight', fullname) funcs.append(callback) else: _v and self._log.debug('sending new %s request to parent', fullname) self._callbacks[fullname] = [callback] self._context.send( Message(data=b(fullname), handle=GET_MODULE) ) finally: self._lock.release() if present: callback() def load_module(self, fullname): fullname = to_text(fullname) _v and self._log.debug('requesting %s', fullname) self._refuse_imports(fullname) event = threading.Event() self._request_module(fullname, event.set) event.wait() ret = self._cache[fullname] if ret[2] is None: raise ModuleNotFoundError(self.absent_msg % (fullname,)) pkg_present = ret[1] mod = sys.modules.setdefault(fullname, imp.new_module(fullname)) mod.__file__ = self.get_filename(fullname) mod.__loader__ = self if pkg_present is not None: # it's a package. mod.__path__ = [] mod.__package__ = fullname self._present[fullname] = pkg_present else: mod.__package__ = str_rpartition(fullname, '.')[0] or None if mod.__package__ and not PY3: # 2.x requires __package__ to be exactly a string. mod.__package__, _ = encodings.utf_8.encode(mod.__package__) source = self.get_source(fullname) try: code = compile(source, mod.__file__, 'exec', 0, 1) except SyntaxError: LOG.exception('while importing %r', fullname) raise if PY3: exec(code, vars(mod)) else: exec('exec code in vars(mod)') # #590: if a module replaces itself in sys.modules during import, below # is necessary. This matches PyImport_ExecCodeModuleEx() return sys.modules.get(fullname, mod) def get_filename(self, fullname): if fullname in self._cache: path = self._cache[fullname][2] if path is None: # If find_loader() returns self but a subsequent master RPC # reveals the module can't be loaded, and so load_module() # throws ImportError, on Python 3.x it is still possible for # the loader to be called to fetch metadata. raise ModuleNotFoundError(self.absent_msg % (fullname,)) return u'master:' + self._cache[fullname][2] def get_source(self, fullname): if fullname in self._cache: compressed = self._cache[fullname][3] if compressed is None: raise ModuleNotFoundError(self.absent_msg % (fullname,)) source = zlib.decompress(self._cache[fullname][3]) if PY3: return to_text(source) return source class LogHandler(logging.Handler): def __init__(self, context): logging.Handler.__init__(self) self.context = context self.local = threading.local() self._buffer = [] # Private synchronization is needed while corked, to ensure no # concurrent call to _send() exists during uncork(). self._buffer_lock = threading.Lock() def uncork(self): """ #305: during startup :class:`LogHandler` may be installed before it is possible to route messages, therefore messages are buffered until :meth:`uncork` is called by :class:`ExternalContext`. """ self._buffer_lock.acquire() try: self._send = self.context.send for msg in self._buffer: self._send(msg) self._buffer = None finally: self._buffer_lock.release() def _send(self, msg): self._buffer_lock.acquire() try: if self._buffer is None: # uncork() may run concurrent to _send() self._send(msg) else: self._buffer.append(msg) finally: self._buffer_lock.release() def emit(self, rec): if rec.name == 'mitogen.io' or \ getattr(self.local, 'in_emit', False): return self.local.in_emit = True try: msg = self.format(rec) encoded = '%s\x00%s\x00%s' % (rec.name, rec.levelno, msg) if isinstance(encoded, UnicodeType): # Logging package emits both :( encoded = encoded.encode('utf-8') self._send(Message(data=encoded, handle=FORWARD_LOG)) finally: self.local.in_emit = False class Stream(object): #: A :class:`Side` representing the stream's receive file descriptor. receive_side = None #: A :class:`Side` representing the stream's transmit file descriptor. transmit_side = None #: A :class:`Protocol` representing the protocol active on the stream. protocol = None #: In parents, the :class:`mitogen.parent.Connection` instance. conn = None name = u'default' def set_protocol(self, protocol): """ Bind a protocol to this stream, by updating :attr:`Protocol.stream` to refer to this stream, and updating this stream's :attr:`Stream.protocol` to the refer to the protocol. Any prior protocol's :attr:`Protocol.stream` is set to :data:`None`. """ if self.protocol: self.protocol.stream = None self.protocol = protocol self.protocol.stream = self def accept(self, rfp, wfp): self.receive_side = Side(self, rfp) self.transmit_side = Side(self, wfp) def __repr__(self): return "<Stream %s>" % (self.name,) def on_receive(self, broker): """ Called by :class:`Broker` when the stream's :attr:`receive_side` has been marked readable using :meth:`Broker.start_receive` and the broker has detected the associated file descriptor is ready for reading. Subclasses must implement this if :meth:`Broker.start_receive` is ever called on them, and the method must call :meth:`on_disconect` if reading produces an empty string. """ buf = self.receive_side.read(self.protocol.read_size) if not buf: LOG.debug('%r: empty read, disconnecting', self.receive_side) return self.on_disconnect(broker) self.protocol.on_receive(broker, buf) def on_transmit(self, broker): """ Called by :class:`Broker` when the stream's :attr:`transmit_side` has been marked writeable using :meth:`Broker._start_transmit` and the broker has detected the associated file descriptor is ready for writing. Subclasses must implement this if :meth:`Broker._start_transmit` is ever called on them. """ self.protocol.on_transmit(broker) def on_shutdown(self, broker): """ Called by :meth:`Broker.shutdown` to allow the stream time to gracefully shutdown. The base implementation simply called :meth:`on_disconnect`. """ fire(self, 'shutdown') self.protocol.on_shutdown(broker) def on_disconnect(self, broker): """ Called by :class:`Broker` to force disconnect the stream. The base implementation simply closes :attr:`receive_side` and :attr:`transmit_side` and unregisters the stream from the broker. """ fire(self, 'disconnect') self.protocol.on_disconnect(broker) class Protocol(object): """ Implement the program behaviour associated with activity on a :class:`Stream`. The protocol in use may vary over a stream's life, for example to allow :class:`mitogen.parent.BootstrapProtocol` to initialize the connected child before handing it off to :class:`MitogenProtocol`. A stream's active protocol is tracked in the :attr:`Stream.protocol` attribute, and modified via :meth:`Stream.set_protocol`. Protocols do not handle IO, they are entirely reliant on the interface provided by :class:`Stream` and :class:`Side`, allowing the underlying IO implementation to be replaced without modifying behavioural logic. """ stream_class = Stream #: The :class:`Stream` this protocol is currently bound to, or #: :data:`None`. stream = None read_size = CHUNK_SIZE @classmethod def build_stream(cls, *args, **kwargs): stream = cls.stream_class() stream.set_protocol(cls(*args, **kwargs)) return stream def __repr__(self): return '%s(%s)' % ( self.__class__.__name__, self.stream and self.stream.name, ) def on_shutdown(self, broker): _v and LOG.debug('%r: shutting down', self) self.stream.on_disconnect(broker) def on_disconnect(self, broker): # Normally both sides an FD, so it is important that tranmit_side is # deregistered from Poller before closing the receive side, as pollers # like epoll and kqueue unregister all events on FD close, causing # subsequent attempt to unregister the transmit side to fail. LOG.debug('%r: disconnecting', self) broker.stop_receive(self.stream) if self.stream.transmit_side: broker._stop_transmit(self.stream) self.stream.receive_side.close() if self.stream.transmit_side: self.stream.transmit_side.close() class DelimitedProtocol(Protocol): """ Provide a :meth:`Protocol.on_receive` implementation for protocols that are delimited by a fixed string, like text based protocols. Each message is passed to :meth:`on_line_received` as it arrives, with incomplete messages passed to :meth:`on_partial_line_received`. When emulating user input it is often necessary to respond to incomplete lines, such as when a "Password: " prompt is sent. :meth:`on_partial_line_received` may be called repeatedly with an increasingly complete message. When a complete message is finally received, :meth:`on_line_received` will be called once for it before the buffer is discarded. If :func:`on_line_received` returns :data:`False`, remaining data is passed unprocessed to the stream's current protocol's :meth:`on_receive`. This allows switching from line-oriented to binary while the input buffer contains both kinds of data. """ #: The delimiter. Defaults to newline. delimiter = b('\n') _trailer = b('') def on_receive(self, broker, buf): _vv and IOLOG.debug('%r.on_receive()', self) stream = self.stream self._trailer, cont = mitogen.core.iter_split( buf=self._trailer + buf, delim=self.delimiter, func=self.on_line_received, ) if self._trailer: if cont: self.on_partial_line_received(self._trailer) else: assert stream.protocol is not self stream.protocol.on_receive(broker, self._trailer) def on_line_received(self, line): """ Receive a line from the stream. :param bytes line: The encoded line, excluding the delimiter. :returns: :data:`False` to indicate this invocation modified the stream's active protocol, and any remaining buffered data should be passed to the new protocol's :meth:`on_receive` method. Any other return value is ignored. """ pass def on_partial_line_received(self, line): """ Receive a trailing unterminated partial line from the stream. :param bytes line: The encoded partial line. """ pass class BufferedWriter(object): """ Implement buffered output while avoiding quadratic string operations. This is currently constructed by each protocol, in future it may become fixed for each stream instead. """ def __init__(self, broker, protocol): self._broker = broker self._protocol = protocol self._buf = collections.deque() self._len = 0 def write(self, s): """ Transmit `s` immediately, falling back to enqueuing it and marking the stream writeable if no OS buffer space is available. """ if not self._len: # Modifying epoll/Kqueue state is expensive, as are needless broker # loops. Rather than wait for writeability, just write immediately, # and fall back to the broker loop on error or full buffer. try: n = self._protocol.stream.transmit_side.write(s) if n: if n == len(s): return s = s[n:] except OSError: pass self._broker._start_transmit(self._protocol.stream) self._buf.append(s) self._len += len(s) def on_transmit(self, broker): """ Respond to stream writeability by retrying previously buffered :meth:`write` calls. """ if self._buf: buf = self._buf.popleft() written = self._protocol.stream.transmit_side.write(buf) if not written: _v and LOG.debug('disconnected during write to %r', self) self._protocol.stream.on_disconnect(broker) return elif written != len(buf): self._buf.appendleft(BufferType(buf, written)) _vv and IOLOG.debug('transmitted %d bytes to %r', written, self) self._len -= written if not self._buf: broker._stop_transmit(self._protocol.stream) class Side(object): """ Represent one side of a :class:`Stream`. This allows unidirectional (e.g. pipe) and bidirectional (e.g. socket) streams to operate identically. Sides are also responsible for tracking the open/closed state of the underlying FD, preventing erroneous duplicate calls to :func:`os.close` due to duplicate :meth:`Stream.on_disconnect` calls, which would otherwise risk silently succeeding by closing an unrelated descriptor. For this reason, it is crucial only one file object exists per unique descriptor. :param mitogen.core.Stream stream: The stream this side is associated with. :param object fp: The file or socket object managing the underlying file descriptor. Any object may be used that supports `fileno()` and `close()` methods. :param bool cloexec: If :data:`True`, the descriptor has its :data:`fcntl.FD_CLOEXEC` flag enabled using :func:`fcntl.fcntl`. :param bool keep_alive: If :data:`True`, the continued existence of this side will extend the shutdown grace period until it has been unregistered from the broker. :param bool blocking: If :data:`False`, the descriptor has its :data:`os.O_NONBLOCK` flag enabled using :func:`fcntl.fcntl`. """ _fork_refs = weakref.WeakValueDictionary() closed = False def __init__(self, stream, fp, cloexec=True, keep_alive=True, blocking=False): #: The :class:`Stream` for which this is a read or write side. self.stream = stream # File or socket object responsible for the lifetime of its underlying # file descriptor. self.fp = fp #: Integer file descriptor to perform IO on, or :data:`None` if #: :meth:`close` has been called. This is saved separately from the #: file object, since :meth:`file.fileno` cannot be called on it after #: it has been closed. self.fd = fp.fileno() #: If :data:`True`, causes presence of this side in #: :class:`Broker`'s active reader set to defer shutdown until the #: side is disconnected. self.keep_alive = keep_alive self._fork_refs[id(self)] = self if cloexec: set_cloexec(self.fd) if not blocking: set_nonblock(self.fd) def __repr__(self): return '<Side of %s fd %s>' % ( self.stream.name or repr(self.stream), self.fd ) @classmethod def _on_fork(cls): while cls._fork_refs: _, side = cls._fork_refs.popitem() _vv and IOLOG.debug('Side._on_fork() closing %r', side) side.close() def close(self): """ Call :meth:`file.close` on :attr:`fp` if it is not :data:`None`, then set it to :data:`None`. """ _vv and IOLOG.debug('%r.close()', self) if not self.closed: self.closed = True self.fp.close() def read(self, n=CHUNK_SIZE): """ Read up to `n` bytes from the file descriptor, wrapping the underlying :func:`os.read` call with :func:`io_op` to trap common disconnection conditions. :meth:`read` always behaves as if it is reading from a regular UNIX file; socket, pipe, and TTY disconnection errors are masked and result in a 0-sized read like a regular file. :returns: Bytes read, or the empty string to indicate disconnection was detected. """ if self.closed: # Refuse to touch the handle after closed, it may have been reused # by another thread. TODO: synchronize read()/write()/close(). return b('') s, disconnected = io_op(os.read, self.fd, n) if disconnected: LOG.debug('%r: disconnected during read: %s', self, disconnected) return b('') return s def write(self, s): """ Write as much of the bytes from `s` as possible to the file descriptor, wrapping the underlying :func:`os.write` call with :func:`io_op` to trap common disconnection conditions. :returns: Number of bytes written, or :data:`None` if disconnection was detected. """ if self.closed: # Don't touch the handle after close, it may be reused elsewhere. return None written, disconnected = io_op(os.write, self.fd, s) if disconnected: LOG.debug('%r: disconnected during write: %s', self, disconnected) return None return written class MitogenProtocol(Protocol): """ :class:`Protocol` implementing mitogen's :ref:`stream protocol <stream-protocol>`. """ #: If not :data:`None`, :class:`Router` stamps this into #: :attr:`Message.auth_id` of every message received on this stream. auth_id = None #: If not :data:`False`, indicates the stream has :attr:`auth_id` set and #: its value is the same as :data:`mitogen.context_id` or appears in #: :data:`mitogen.parent_ids`. is_privileged = False def __init__(self, router, remote_id): self._router = router self.remote_id = remote_id self.sent_modules = set(['mitogen', 'mitogen.core']) self._input_buf = collections.deque() self._input_buf_len = 0 self._writer = BufferedWriter(router.broker, self) #: Routing records the dst_id of every message arriving from this #: stream. Any arriving DEL_ROUTE is rebroadcast for any such ID. self.egress_ids = set() def on_receive(self, broker, buf): """ Handle the next complete message on the stream. Raise :class:`StreamError` on failure. """ _vv and IOLOG.debug('%r.on_receive()', self) if self._input_buf and self._input_buf_len < 128: self._input_buf[0] += buf else: self._input_buf.append(buf) self._input_buf_len += len(buf) while self._receive_one(broker): pass corrupt_msg = ( '%s: Corruption detected: frame signature incorrect. This likely means' ' some external process is interfering with the connection. Received:' '\n\n' '%r' ) def _receive_one(self, broker): if self._input_buf_len < Message.HEADER_LEN: return False msg = Message() msg.router = self._router (magic, msg.dst_id, msg.src_id, msg.auth_id, msg.handle, msg.reply_to, msg_len) = struct.unpack( Message.HEADER_FMT, self._input_buf[0][:Message.HEADER_LEN], ) if magic != Message.HEADER_MAGIC: LOG.error(self.corrupt_msg, self.stream.name, self._input_buf[0][:2048]) self.stream.on_disconnect(broker) return False if msg_len > self._router.max_message_size: LOG.error('Maximum message size exceeded (got %d, max %d)', msg_len, self._router.max_message_size) self.stream.on_disconnect(broker) return False total_len = msg_len + Message.HEADER_LEN if self._input_buf_len < total_len: _vv and IOLOG.debug( '%r: Input too short (want %d, got %d)', self, msg_len, self._input_buf_len - Message.HEADER_LEN ) return False start = Message.HEADER_LEN prev_start = start remain = total_len bits = [] while remain: buf = self._input_buf.popleft() bit = buf[start:remain] bits.append(bit) remain -= len(bit) + start prev_start = start start = 0 msg.data = b('').join(bits) self._input_buf.appendleft(buf[prev_start+len(bit):]) self._input_buf_len -= total_len self._router._async_route(msg, self.stream) return True def pending_bytes(self): """ Return the number of bytes queued for transmission on this stream. This can be used to limit the amount of data buffered in RAM by an otherwise unlimited consumer. For an accurate result, this method should be called from the Broker thread, for example by using :meth:`Broker.defer_sync`. """ return self._writer._len def on_transmit(self, broker): """ Transmit buffered messages. """ _vv and IOLOG.debug('%r.on_transmit()', self) self._writer.on_transmit(broker) def _send(self, msg): _vv and IOLOG.debug('%r._send(%r)', self, msg) self._writer.write(msg.pack()) def send(self, msg): """ Send `data` to `handle`, and tell the broker we have output. May be called from any thread. """ self._router.broker.defer(self._send, msg) def on_shutdown(self, broker): """ Disable :class:`Protocol` immediate disconnect behaviour. """ _v and LOG.debug('%r: shutting down', self) class Context(object): """ Represent a remote context regardless of the underlying connection method. Context objects are simple facades that emit messages through an associated router, and have :ref:`signals` raised against them in response to various events relating to the context. **Note:** This is the somewhat limited core version, used by child contexts. The master subclass is documented below this one. Contexts maintain no internal state and are thread-safe. Prefer :meth:`Router.context_by_id` over constructing context objects explicitly, as that method is deduplicating, and returns the only context instance :ref:`signals` will be raised on. :param mitogen.core.Router router: Router to emit messages through. :param int context_id: Context ID. :param str name: Context name. """ name = None remote_name = None def __init__(self, router, context_id, name=None): self.router = router self.context_id = context_id if name: self.name = to_text(name) def __reduce__(self): return _unpickle_context, (self.context_id, self.name) def on_disconnect(self): _v and LOG.debug('%r: disconnecting', self) fire(self, 'disconnect') def send_async(self, msg, persist=False): """ Arrange for `msg` to be delivered to this context, with replies directed to a newly constructed receiver. :attr:`dst_id <Message.dst_id>` is set to the target context ID, and :attr:`reply_to <Message.reply_to>` is set to the newly constructed receiver's handle. :param bool persist: If :data:`False`, the handler will be unregistered after a single message has been received. :param mitogen.core.Message msg: The message. :returns: :class:`Receiver` configured to receive any replies sent to the message's `reply_to` handle. """ receiver = Receiver(self.router, persist=persist, respondent=self) msg.dst_id = self.context_id msg.reply_to = receiver.handle _v and LOG.debug('sending message to %r: %r', self, msg) self.send(msg) return receiver def call_service_async(self, service_name, method_name, **kwargs): _v and LOG.debug('calling service %s.%s of %r, args: %r', service_name, method_name, self, kwargs) if isinstance(service_name, BytesType): service_name = service_name.encode('utf-8') elif not isinstance(service_name, UnicodeType): service_name = service_name.name() # Service.name() tup = (service_name, to_text(method_name), Kwargs(kwargs)) msg = Message.pickled(tup, handle=CALL_SERVICE) return self.send_async(msg) def send(self, msg): """ Arrange for `msg` to be delivered to this context. :attr:`dst_id <Message.dst_id>` is set to the target context ID. :param Message msg: Message. """ msg.dst_id = self.context_id self.router.route(msg) def call_service(self, service_name, method_name, **kwargs): recv = self.call_service_async(service_name, method_name, **kwargs) return recv.get().unpickle() def send_await(self, msg, deadline=None): """ Like :meth:`send_async`, but expect a single reply (`persist=False`) delivered within `deadline` seconds. :param mitogen.core.Message msg: The message. :param float deadline: If not :data:`None`, seconds before timing out waiting for a reply. :returns: Deserialized reply. :raises TimeoutError: No message was received and `deadline` passed. """ receiver = self.send_async(msg) response = receiver.get(deadline) data = response.unpickle() _vv and IOLOG.debug('%r._send_await() -> %r', self, data) return data def __repr__(self): return 'Context(%s, %r)' % (self.context_id, self.name) def _unpickle_context(context_id, name, router=None): if not (isinstance(context_id, (int, long)) and context_id >= 0 and ( (name is None) or (isinstance(name, UnicodeType) and len(name) < 100)) ): raise TypeError('cannot unpickle Context: bad input') if isinstance(router, Router): return router.context_by_id(context_id, name=name) return Context(None, context_id, name) # For plain Jane pickle. class Poller(object): """ A poller manages OS file descriptors the user is waiting to become available for IO. The :meth:`poll` method blocks the calling thread until one or more become ready. The default implementation is based on :func:`select.poll`. Each descriptor has an associated `data` element, which is unique for each readiness type, and defaults to being the same as the file descriptor. The :meth:`poll` method yields the data associated with a descriptor, rather than the descriptor itself, allowing concise loops like:: p = Poller() p.start_receive(conn.fd, data=conn.on_read) p.start_transmit(conn.fd, data=conn.on_write) for callback in p.poll(): callback() # invoke appropriate bound instance method Pollers may be modified while :meth:`poll` is yielding results. Removals are processed immediately, causing pending events for the descriptor to be discarded. The :meth:`close` method must be called when a poller is discarded to avoid a resource leak. Pollers may only be used by one thread at a time. """ SUPPORTED = True # This changed from select() to poll() in Mitogen 0.2.4. Since poll() has # no upper FD limit, it is suitable for use with Latch, which must handle # FDs larger than select's limit during many-host runs. We want this # because poll() requires no setup and teardown: just a single system call, # which is important because Latch.get() creates a Poller on each # invocation. In a microbenchmark, poll() vs. epoll_ctl() is 30% faster in # this scenario. If select() must return in future, it is important # Latch.poller_class is set from parent.py to point to the industrial # strength poller for the OS, otherwise Latch will fail randomly. #: Increments on every poll(). Used to version _rfds and _wfds. _generation = 1 def __init__(self): self._rfds = {} self._wfds = {} def __repr__(self): return '%s' % (type(self).__name__,) def _update(self, fd): """ Required by PollPoller subclass. """ pass @property def readers(self): """ Return a list of `(fd, data)` tuples for every FD registered for receive readiness. """ return list((fd, data) for fd, (data, gen) in self._rfds.items()) @property def writers(self): """ Return a list of `(fd, data)` tuples for every FD registered for transmit readiness. """ return list((fd, data) for fd, (data, gen) in self._wfds.items()) def close(self): """ Close any underlying OS resource used by the poller. """ pass def start_receive(self, fd, data=None): """ Cause :meth:`poll` to yield `data` when `fd` is readable. """ self._rfds[fd] = (data or fd, self._generation) self._update(fd) def stop_receive(self, fd): """ Stop yielding readability events for `fd`. Redundant calls to :meth:`stop_receive` are silently ignored, this may change in future. """ self._rfds.pop(fd, None) self._update(fd) def start_transmit(self, fd, data=None): """ Cause :meth:`poll` to yield `data` when `fd` is writeable. """ self._wfds[fd] = (data or fd, self._generation) self._update(fd) def stop_transmit(self, fd): """ Stop yielding writeability events for `fd`. Redundant calls to :meth:`stop_transmit` are silently ignored, this may change in future. """ self._wfds.pop(fd, None) self._update(fd) def _poll(self, timeout): (rfds, wfds, _), _ = io_op(select.select, self._rfds, self._wfds, (), timeout ) for fd in rfds: _vv and IOLOG.debug('%r: POLLIN for %r', self, fd) data, gen = self._rfds.get(fd, (None, None)) if gen and gen < self._generation: yield data for fd in wfds: _vv and IOLOG.debug('%r: POLLOUT for %r', self, fd) data, gen = self._wfds.get(fd, (None, None)) if gen and gen < self._generation: yield data def poll(self, timeout=None): """ Block the calling thread until one or more FDs are ready for IO. :param float timeout: If not :data:`None`, seconds to wait without an event before returning an empty iterable. :returns: Iterable of `data` elements associated with ready FDs. """ _vv and IOLOG.debug('%r.poll(%r)', self, timeout) self._generation += 1 return self._poll(timeout) class Latch(object): """ A latch is a :class:`Queue.Queue`-like object that supports mutation and waiting from multiple threads, however unlike :class:`Queue.Queue`, waiting threads always remain interruptible, so CTRL+C always succeeds, and waits where a timeout is set experience no wake up latency. These properties are not possible in combination using the built-in threading primitives available in Python 2.x. Latches implement queues using the UNIX self-pipe trick, and a per-thread :func:`socket.socketpair` that is lazily created the first time any latch attempts to sleep on a thread, and dynamically associated with the waiting Latch only for duration of the wait. See :ref:`waking-sleeping-threads` for further discussion. """ poller_class = Poller notify = None # The _cls_ prefixes here are to make it crystal clear in the code which # state mutation isn't covered by :attr:`_lock`. #: List of reusable :func:`socket.socketpair` tuples. The list is mutated #: from multiple threads, the only safe operations are `append()` and #: `pop()`. _cls_idle_socketpairs = [] #: List of every socket object that must be closed by :meth:`_on_fork`. #: Inherited descriptors cannot be reused, as the duplicated handles #: reference the same underlying kernel object in use by the parent. _cls_all_sockets = [] def __init__(self): self.closed = False self._lock = threading.Lock() #: List of unconsumed enqueued items. self._queue = [] #: List of `(wsock, cookie)` awaiting an element, where `wsock` is the #: socketpair's write side, and `cookie` is the string to write. self._sleeping = [] #: Number of elements of :attr:`_sleeping` that have already been #: woken, and have a corresponding element index from :attr:`_queue` #: assigned to them. self._waking = 0 @classmethod def _on_fork(cls): """ Clean up any files belonging to the parent process after a fork. """ cls._cls_idle_socketpairs = [] while cls._cls_all_sockets: cls._cls_all_sockets.pop().close() def close(self): """ Mark the latch as closed, and cause every sleeping thread to be woken, with :class:`mitogen.core.LatchError` raised in each thread. """ self._lock.acquire() try: self.closed = True while self._waking < len(self._sleeping): wsock, cookie = self._sleeping[self._waking] self._wake(wsock, cookie) self._waking += 1 finally: self._lock.release() def size(self): """ Return the number of items currently buffered. As with :class:`Queue.Queue`, `0` may be returned even though a subsequent call to :meth:`get` will succeed, since a message may be posted at any moment between :meth:`size` and :meth:`get`. As with :class:`Queue.Queue`, `>0` may be returned even though a subsequent call to :meth:`get` will block, since another waiting thread may be woken at any moment between :meth:`size` and :meth:`get`. :raises LatchError: The latch has already been marked closed. """ self._lock.acquire() try: if self.closed: raise LatchError() return len(self._queue) finally: self._lock.release() def empty(self): """ Return `size() == 0`. .. deprecated:: 0.2.8 Use :meth:`size` instead. :raises LatchError: The latch has already been marked closed. """ return self.size() == 0 def _get_socketpair(self): """ Return an unused socketpair, creating one if none exist. """ try: return self._cls_idle_socketpairs.pop() # pop() must be atomic except IndexError: rsock, wsock = socket.socketpair() set_cloexec(rsock.fileno()) set_cloexec(wsock.fileno()) self._cls_all_sockets.extend((rsock, wsock)) return rsock, wsock COOKIE_MAGIC, = struct.unpack('L', b('LTCH') * (struct.calcsize('L')//4)) COOKIE_FMT = '>Qqqq' # #545: id() and get_ident() may exceed long on armhfp. COOKIE_SIZE = struct.calcsize(COOKIE_FMT) def _make_cookie(self): """ Return a string encoding the ID of the process, instance and thread. This disambiguates legitimate wake-ups, accidental writes to the FD, and buggy internal FD sharing. """ return struct.pack(self.COOKIE_FMT, self.COOKIE_MAGIC, os.getpid(), id(self), thread.get_ident()) def get(self, timeout=None, block=True): """ Return the next enqueued object, or sleep waiting for one. :param float timeout: If not :data:`None`, specifies a timeout in seconds. :param bool block: If :data:`False`, immediately raise :class:`mitogen.core.TimeoutError` if the latch is empty. :raises mitogen.core.LatchError: :meth:`close` has been called, and the object is no longer valid. :raises mitogen.core.TimeoutError: Timeout was reached. :returns: The de-queued object. """ _vv and IOLOG.debug('%r.get(timeout=%r, block=%r)', self, timeout, block) self._lock.acquire() try: if self.closed: raise LatchError() i = len(self._sleeping) if len(self._queue) > i: _vv and IOLOG.debug('%r.get() -> %r', self, self._queue[i]) return self._queue.pop(i) if not block: raise TimeoutError() rsock, wsock = self._get_socketpair() cookie = self._make_cookie() self._sleeping.append((wsock, cookie)) finally: self._lock.release() poller = self.poller_class() poller.start_receive(rsock.fileno()) try: return self._get_sleep(poller, timeout, block, rsock, wsock, cookie) finally: poller.close() def _get_sleep(self, poller, timeout, block, rsock, wsock, cookie): """ When a result is not immediately available, sleep waiting for :meth:`put` to write a byte to our socket pair. """ _vv and IOLOG.debug( '%r._get_sleep(timeout=%r, block=%r, fd=%d/%d)', self, timeout, block, rsock.fileno(), wsock.fileno() ) e = None woken = None try: woken = list(poller.poll(timeout)) except Exception: e = sys.exc_info()[1] self._lock.acquire() try: i = self._sleeping.index((wsock, cookie)) del self._sleeping[i] if not woken: raise e or TimeoutError() got_cookie = rsock.recv(self.COOKIE_SIZE) self._cls_idle_socketpairs.append((rsock, wsock)) assert cookie == got_cookie, ( "Cookie incorrect; got %r, expected %r" \ % (binascii.hexlify(got_cookie), binascii.hexlify(cookie)) ) assert i < self._waking, ( "Cookie correct, but no queue element assigned." ) self._waking -= 1 if self.closed: raise LatchError() _vv and IOLOG.debug('%r.get() wake -> %r', self, self._queue[i]) return self._queue.pop(i) finally: self._lock.release() def put(self, obj=None): """ Enqueue an object, waking the first thread waiting for a result, if one exists. :param obj: Object to enqueue. Defaults to :data:`None` as a convenience when using :class:`Latch` only for synchronization. :raises mitogen.core.LatchError: :meth:`close` has been called, and the object is no longer valid. """ _vv and IOLOG.debug('%r.put(%r)', self, obj) self._lock.acquire() try: if self.closed: raise LatchError() self._queue.append(obj) wsock = None if self._waking < len(self._sleeping): wsock, cookie = self._sleeping[self._waking] self._waking += 1 _vv and IOLOG.debug('%r.put() -> waking wfd=%r', self, wsock.fileno()) elif self.notify: self.notify(self) finally: self._lock.release() if wsock: self._wake(wsock, cookie) def _wake(self, wsock, cookie): written, disconnected = io_op(os.write, wsock.fileno(), cookie) assert written == len(cookie) and not disconnected def __repr__(self): return 'Latch(%#x, size=%d, t=%r)' % ( id(self), len(self._queue), threading.currentThread().getName(), ) class Waker(Protocol): """ :class:`BasicStream` subclass implementing the `UNIX self-pipe trick`_. Used to wake the multiplexer when another thread needs to modify its state (via a cross-thread function call). .. _UNIX self-pipe trick: https://cr.yp.to/docs/selfpipe.html """ read_size = 1 broker_ident = None @classmethod def build_stream(cls, broker): stream = super(Waker, cls).build_stream(broker) stream.accept(*pipe()) return stream def __init__(self, broker): self._broker = broker self._lock = threading.Lock() self._deferred = [] def __repr__(self): return 'Waker(fd=%r/%r)' % ( self.stream.receive_side and self.stream.receive_side.fd, self.stream.transmit_side and self.stream.transmit_side.fd, ) @property def keep_alive(self): """ Prevent immediate Broker shutdown while deferred functions remain. """ self._lock.acquire() try: return len(self._deferred) finally: self._lock.release() def on_receive(self, broker, buf): """ Drain the pipe and fire callbacks. Since :attr:`_deferred` is synchronized, :meth:`defer` and :meth:`on_receive` can conspire to ensure only one byte needs to be pending regardless of queue length. """ _vv and IOLOG.debug('%r.on_receive()', self) self._lock.acquire() try: deferred = self._deferred self._deferred = [] finally: self._lock.release() for func, args, kwargs in deferred: try: func(*args, **kwargs) except Exception: LOG.exception('defer() crashed: %r(*%r, **%r)', func, args, kwargs) broker.shutdown() def _wake(self): """ Wake the multiplexer by writing a byte. If Broker is midway through teardown, the FD may already be closed, so ignore EBADF. """ try: self.stream.transmit_side.write(b(' ')) except OSError: e = sys.exc_info()[1] if e.args[0] != errno.EBADF: raise broker_shutdown_msg = ( "An attempt was made to enqueue a message with a Broker that has " "already exitted. It is likely your program called Broker.shutdown() " "too early." ) def defer(self, func, *args, **kwargs): """ Arrange for `func()` to execute on the broker thread. This function returns immediately without waiting the result of `func()`. Use :meth:`defer_sync` to block until a result is available. :raises mitogen.core.Error: :meth:`defer` was called after :class:`Broker` has begun shutdown. """ if thread.get_ident() == self.broker_ident: _vv and IOLOG.debug('%r.defer() [immediate]', self) return func(*args, **kwargs) if self._broker._exitted: raise Error(self.broker_shutdown_msg) _vv and IOLOG.debug('%r.defer() [fd=%r]', self, self.stream.transmit_side.fd) self._lock.acquire() try: should_wake = not self._deferred self._deferred.append((func, args, kwargs)) finally: self._lock.release() if should_wake: self._wake() class IoLoggerProtocol(DelimitedProtocol): """ Handle redirection of standard IO into the :mod:`logging` package. """ @classmethod def build_stream(cls, name, dest_fd): """ Even though the descriptor `dest_fd` will hold the opposite end of the socket open, we must keep a separate dup() of it (i.e. wsock) in case some code decides to overwrite `dest_fd` later, which would thus break :meth:`on_shutdown`. """ rsock, wsock = socket.socketpair() os.dup2(wsock.fileno(), dest_fd) stream = super(IoLoggerProtocol, cls).build_stream(name) stream.name = name stream.accept(rsock, wsock) return stream def __init__(self, name): self._log = logging.getLogger(name) # #453: prevent accidental log initialization in a child creating a # feedback loop. self._log.propagate = False self._log.handlers = logging.getLogger().handlers[:] def on_shutdown(self, broker): """ Shut down the write end of the socket, preventing any further writes to it by this process, or subprocess that inherited it. This allows any remaining kernel-buffered data to be drained during graceful shutdown without the buffer continuously refilling due to some out of control child process. """ _v and LOG.debug('%r: shutting down', self) if not IS_WSL: # #333: WSL generates invalid readiness indication on shutdown(). # This modifies the *kernel object* inherited by children, causing # EPIPE on subsequent writes to any dupped FD in any process. The # read side can then drain completely of prior buffered data. self.stream.transmit_side.fp.shutdown(socket.SHUT_WR) self.stream.transmit_side.close() def on_line_received(self, line): """ Decode the received line as UTF-8 and pass it to the logging framework. """ self._log.info('%s', line.decode('utf-8', 'replace')) class Router(object): """ Route messages between contexts, and invoke local handlers for messages addressed to this context. :meth:`Router.route() <route>` straddles the :class:`Broker` thread and user threads, it is safe to call anywhere. **Note:** This is the somewhat limited core version of the Router class used by child contexts. The master subclass is documented below this one. """ #: The :class:`mitogen.core.Context` subclass to use when constructing new #: :class:`Context` objects in :meth:`myself` and :meth:`context_by_id`. #: Permits :class:`Router` subclasses to extend the :class:`Context` #: interface, as done in :class:`mitogen.parent.Router`. context_class = Context max_message_size = 128 * 1048576 #: When :data:`True`, permit children to only communicate with the current #: context or a parent of the current context. Routing between siblings or #: children of parents is prohibited, ensuring no communication is possible #: between intentionally partitioned networks, such as when a program #: simultaneously manipulates hosts spread across a corporate and a #: production network, or production networks that are otherwise #: air-gapped. #: #: Sending a prohibited message causes an error to be logged and a dead #: message to be sent in reply to the errant message, if that message has #: ``reply_to`` set. #: #: The value of :data:`unidirectional` becomes the default for the #: :meth:`local() <mitogen.master.Router.local>` `unidirectional` #: parameter. unidirectional = False duplicate_handle_msg = 'cannot register a handle that already exists' refused_msg = 'refused by policy' invalid_handle_msg = 'invalid handle' too_large_msg = 'message too large (max %d bytes)' respondent_disconnect_msg = 'the respondent Context has disconnected' broker_exit_msg = 'Broker has exitted' no_route_msg = 'no route to %r, my ID is %r' unidirectional_msg = ( 'routing mode prevents forward of message from context %d via ' 'context %d' ) def __init__(self, broker): self.broker = broker listen(broker, 'exit', self._on_broker_exit) self._setup_logging() self._write_lock = threading.Lock() #: context ID -> Stream; must hold _write_lock to edit or iterate self._stream_by_id = {} #: List of contexts to notify of shutdown; must hold _write_lock self._context_by_id = {} self._last_handle = itertools.count(1000) #: handle -> (persistent?, func(msg)) self._handle_map = {} #: Context -> set { handle, .. } self._handles_by_respondent = {} self.add_handler(self._on_del_route, DEL_ROUTE) def __repr__(self): return 'Router(%r)' % (self.broker,) def _setup_logging(self): """ This is done in the :class:`Router` constructor for historical reasons. It must be called before ExternalContext logs its first messages, but after logging has been setup. It must also be called when any router is constructed for a consumer app. """ # Here seems as good a place as any. global _v, _vv _v = logging.getLogger().level <= logging.DEBUG _vv = IOLOG.level <= logging.DEBUG def _on_del_route(self, msg): """ Stub :data:`DEL_ROUTE` handler; fires 'disconnect' events on the corresponding :attr:`_context_by_id` member. This is replaced by :class:`mitogen.parent.RouteMonitor` in an upgraded context. """ if msg.is_dead: return target_id_s, _, name = bytes_partition(msg.data, b(':')) target_id = int(target_id_s, 10) LOG.error('%r: deleting route to %s (%d)', self, to_text(name), target_id) context = self._context_by_id.get(target_id) if context: fire(context, 'disconnect') else: LOG.debug('DEL_ROUTE for unknown ID %r: %r', target_id, msg) def _on_stream_disconnect(self, stream): notify = [] self._write_lock.acquire() try: for context in list(self._context_by_id.values()): stream_ = self._stream_by_id.get(context.context_id) if stream_ is stream: del self._stream_by_id[context.context_id] notify.append(context) finally: self._write_lock.release() # Happens outside lock as e.g. RouteMonitor wants the same lock. for context in notify: context.on_disconnect() def _on_broker_exit(self): """ Called prior to broker exit, informs callbacks registered with :meth:`add_handler` the connection is dead. """ _v and LOG.debug('%r: broker has exitted', self) while self._handle_map: _, (_, func, _, _) = self._handle_map.popitem() func(Message.dead(self.broker_exit_msg)) def myself(self): """ Return a :class:`Context` referring to the current process. Since :class:`Context` is serializable, this is convenient to use in remote function call parameter lists. """ return self.context_class( router=self, context_id=mitogen.context_id, name='self', ) def context_by_id(self, context_id, via_id=None, create=True, name=None): """ Return or construct a :class:`Context` given its ID. An internal mapping of ID to the canonical :class:`Context` representing that ID, so that :ref:`signals` can be raised. This may be called from any thread, lookup and construction are atomic. :param int context_id: The context ID to look up. :param int via_id: If the :class:`Context` does not already exist, set its :attr:`Context.via` to the :class:`Context` matching this ID. :param bool create: If the :class:`Context` does not already exist, create it. :param str name: If the :class:`Context` does not already exist, set its name. :returns: :class:`Context`, or return :data:`None` if `create` is :data:`False` and no :class:`Context` previously existed. """ context = self._context_by_id.get(context_id) if context: return context if create and via_id is not None: via = self.context_by_id(via_id) else: via = None self._write_lock.acquire() try: context = self._context_by_id.get(context_id) if create and not context: context = self.context_class(self, context_id, name=name) context.via = via self._context_by_id[context_id] = context finally: self._write_lock.release() return context def register(self, context, stream): """ Register a newly constructed context and its associated stream, and add the stream's receive side to the I/O multiplexer. This method remains public while the design has not yet settled. """ _v and LOG.debug('%s: registering %r to stream %r', self, context, stream) self._write_lock.acquire() try: self._stream_by_id[context.context_id] = stream self._context_by_id[context.context_id] = context finally: self._write_lock.release() self.broker.start_receive(stream) listen(stream, 'disconnect', lambda: self._on_stream_disconnect(stream)) def stream_by_id(self, dst_id): """ Return the :class:`Stream` that should be used to communicate with `dst_id`. If a specific route for `dst_id` is not known, a reference to the parent context's stream is returned. If the parent is disconnected, or when running in the master context, return :data:`None` instead. This can be used from any thread, but its output is only meaningful from the context of the :class:`Broker` thread, as disconnection or replacement could happen in parallel on the broker thread at any moment. """ return ( self._stream_by_id.get(dst_id) or self._stream_by_id.get(mitogen.parent_id) ) def del_handler(self, handle): """ Remove the handle registered for `handle` :raises KeyError: The handle wasn't registered. """ _, _, _, respondent = self._handle_map.pop(handle) if respondent: self._handles_by_respondent[respondent].discard(handle) def add_handler(self, fn, handle=None, persist=True, policy=None, respondent=None, overwrite=False): """ Invoke `fn(msg)` on the :class:`Broker` thread for each Message sent to `handle` from this context. Unregister after one invocation if `persist` is :data:`False`. If `handle` is :data:`None`, a new handle is allocated and returned. :param int handle: If not :data:`None`, an explicit handle to register, usually one of the ``mitogen.core.*`` constants. If unspecified, a new unused handle will be allocated. :param bool persist: If :data:`False`, the handler will be unregistered after a single message has been received. :param mitogen.core.Context respondent: Context that messages to this handle are expected to be sent from. If specified, arranges for a dead message to be delivered to `fn` when disconnection of the context is detected. In future `respondent` will likely also be used to prevent other contexts from sending messages to the handle. :param function policy: Function invoked as `policy(msg, stream)` where `msg` is a :class:`mitogen.core.Message` about to be delivered, and `stream` is the :class:`mitogen.core.Stream` on which it was received. The function must return :data:`True`, otherwise an error is logged and delivery is refused. Two built-in policy functions exist: * :func:`has_parent_authority`: requires the message arrived from a parent context, or a context acting with a parent context's authority (``auth_id``). * :func:`mitogen.parent.is_immediate_child`: requires the message arrived from an immediately connected child, for use in messaging patterns where either something becomes buggy or insecure by permitting indirect upstream communication. In case of refusal, and the message's ``reply_to`` field is nonzero, a :class:`mitogen.core.CallError` is delivered to the sender indicating refusal occurred. :param bool overwrite: If :data:`True`, allow existing handles to be silently overwritten. :return: `handle`, or if `handle` was :data:`None`, the newly allocated handle. :raises Error: Attemp to register handle that was already registered. """ handle = handle or next(self._last_handle) _vv and IOLOG.debug('%r.add_handler(%r, %r, %r)', self, fn, handle, persist) if handle in self._handle_map and not overwrite: raise Error(self.duplicate_handle_msg) self._handle_map[handle] = persist, fn, policy, respondent if respondent: if respondent not in self._handles_by_respondent: self._handles_by_respondent[respondent] = set() listen(respondent, 'disconnect', lambda: self._on_respondent_disconnect(respondent)) self._handles_by_respondent[respondent].add(handle) return handle def _on_respondent_disconnect(self, context): for handle in self._handles_by_respondent.pop(context, ()): _, fn, _, _ = self._handle_map[handle] fn(Message.dead(self.respondent_disconnect_msg)) del self._handle_map[handle] def _maybe_send_dead(self, msg, reason, *args): if args: reason %= args LOG.debug('%r: %r is dead: %r', self, msg, reason) if msg.reply_to and not msg.is_dead: msg.reply(Message.dead(reason=reason), router=self) def _invoke(self, msg, stream): # IOLOG.debug('%r._invoke(%r)', self, msg) try: persist, fn, policy, respondent = self._handle_map[msg.handle] except KeyError: self._maybe_send_dead(msg, reason=self.invalid_handle_msg) return if respondent and not (msg.is_dead or msg.src_id == respondent.context_id): self._maybe_send_dead(msg, 'reply from unexpected context') return if policy and not policy(msg, stream): self._maybe_send_dead(msg, self.refused_msg) return if not persist: self.del_handler(msg.handle) try: fn(msg) except Exception: LOG.exception('%r._invoke(%r): %r crashed', self, msg, fn) def _async_route(self, msg, in_stream=None): """ Arrange for `msg` to be forwarded towards its destination. If its destination is the local context, then arrange for it to be dispatched using the local handlers. This is a lower overhead version of :meth:`route` that may only be called from the :class:`Broker` thread. :param Stream in_stream: If not :data:`None`, the stream the message arrived on. Used for performing source route verification, to ensure sensitive messages such as ``CALL_FUNCTION`` arrive only from trusted contexts. """ _vv and IOLOG.debug('%r._async_route(%r, %r)', self, msg, in_stream) if len(msg.data) > self.max_message_size: self._maybe_send_dead(msg, self.too_large_msg % ( self.max_message_size, )) return # Perform source verification. if in_stream: parent = self._stream_by_id.get(mitogen.parent_id) expect = self._stream_by_id.get(msg.auth_id, parent) if in_stream != expect: LOG.error('%r: bad auth_id: got %r via %r, not %r: %r', self, msg.auth_id, in_stream, expect, msg) return if msg.src_id != msg.auth_id: expect = self._stream_by_id.get(msg.src_id, parent) if in_stream != expect: LOG.error('%r: bad src_id: got %r via %r, not %r: %r', self, msg.src_id, in_stream, expect, msg) return if in_stream.protocol.auth_id is not None: msg.auth_id = in_stream.protocol.auth_id # Maintain a set of IDs the source ever communicated with. in_stream.protocol.egress_ids.add(msg.dst_id) if msg.dst_id == mitogen.context_id: return self._invoke(msg, in_stream) out_stream = self._stream_by_id.get(msg.dst_id) if out_stream is None: out_stream = self._stream_by_id.get(mitogen.parent_id) if out_stream is None: self._maybe_send_dead(msg, self.no_route_msg, msg.dst_id, mitogen.context_id) return if in_stream and self.unidirectional and not \ (in_stream.protocol.is_privileged or out_stream.protocol.is_privileged): self._maybe_send_dead(msg, self.unidirectional_msg, in_stream.protocol.remote_id, out_stream.protocol.remote_id) return out_stream.protocol._send(msg) def route(self, msg): """ Arrange for the :class:`Message` `msg` to be delivered to its destination using any relevant downstream context, or if none is found, by forwarding the message upstream towards the master context. If `msg` is destined for the local context, it is dispatched using the handles registered with :meth:`add_handler`. This may be called from any thread. """ self.broker.defer(self._async_route, msg) class NullTimerList(object): def get_timeout(self): return None class Broker(object): """ Responsible for handling I/O multiplexing in a private thread. **Note:** This somewhat limited core version is used by children. The master subclass is documented below. """ poller_class = Poller _waker = None _thread = None # :func:`mitogen.parent._upgrade_broker` replaces this with # :class:`mitogen.parent.TimerList` during upgrade. timers = NullTimerList() #: Seconds grace to allow :class:`streams <Stream>` to shutdown gracefully #: before force-disconnecting them during :meth:`shutdown`. shutdown_timeout = 3.0 def __init__(self, poller_class=None, activate_compat=True): self._alive = True self._exitted = False self._waker = Waker.build_stream(self) #: Arrange for `func(\*args, \**kwargs)` to be executed on the broker #: thread, or immediately if the current thread is the broker thread. #: Safe to call from any thread. self.defer = self._waker.protocol.defer self.poller = self.poller_class() self.poller.start_receive( self._waker.receive_side.fd, (self._waker.receive_side, self._waker.on_receive) ) self._thread = threading.Thread( target=self._broker_main, name='mitogen.broker' ) self._thread.start() if activate_compat: self._py24_25_compat() def _py24_25_compat(self): """ Python 2.4/2.5 have grave difficulties with threads/fork. We mandatorily quiesce all running threads during fork using a monkey-patch there. """ if sys.version_info < (2, 6): # import_module() is used to avoid dep scanner. os_fork = import_module('mitogen.os_fork') os_fork._notice_broker_or_pool(self) def start_receive(self, stream): """ Mark the :attr:`receive_side <Stream.receive_side>` on `stream` as ready for reading. Safe to call from any thread. When the associated file descriptor becomes ready for reading, :meth:`BasicStream.on_receive` will be called. """ _vv and IOLOG.debug('%r.start_receive(%r)', self, stream) side = stream.receive_side assert side and not side.closed self.defer(self.poller.start_receive, side.fd, (side, stream.on_receive)) def stop_receive(self, stream): """ Mark the :attr:`receive_side <Stream.receive_side>` on `stream` as not ready for reading. Safe to call from any thread. """ _vv and IOLOG.debug('%r.stop_receive(%r)', self, stream) self.defer(self.poller.stop_receive, stream.receive_side.fd) def _start_transmit(self, stream): """ Mark the :attr:`transmit_side <Stream.transmit_side>` on `stream` as ready for writing. Must only be called from the Broker thread. When the associated file descriptor becomes ready for writing, :meth:`BasicStream.on_transmit` will be called. """ _vv and IOLOG.debug('%r._start_transmit(%r)', self, stream) side = stream.transmit_side assert side and not side.closed self.poller.start_transmit(side.fd, (side, stream.on_transmit)) def _stop_transmit(self, stream): """ Mark the :attr:`transmit_side <Stream.receive_side>` on `stream` as not ready for writing. """ _vv and IOLOG.debug('%r._stop_transmit(%r)', self, stream) self.poller.stop_transmit(stream.transmit_side.fd) def keep_alive(self): """ Return :data:`True` if any reader's :attr:`Side.keep_alive` attribute is :data:`True`, or any :class:`Context` is still registered that is not the master. Used to delay shutdown while some important work is in progress (e.g. log draining). """ it = (side.keep_alive for (_, (side, _)) in self.poller.readers) return sum(it, 0) > 0 or self.timers.get_timeout() is not None def defer_sync(self, func): """ Arrange for `func()` to execute on :class:`Broker` thread, blocking the current thread until a result or exception is available. :returns: Return value of `func()`. """ latch = Latch() def wrapper(): try: latch.put(func()) except Exception: latch.put(sys.exc_info()[1]) self.defer(wrapper) res = latch.get() if isinstance(res, Exception): raise res return res def _call(self, stream, func): """ Call `func(self)`, catching any exception that might occur, logging it, and force-disconnecting the related `stream`. """ try: func(self) except Exception: LOG.exception('%r crashed', stream) stream.on_disconnect(self) def _loop_once(self, timeout=None): """ Execute a single :class:`Poller` wait, dispatching any IO events that caused the wait to complete. :param float timeout: If not :data:`None`, maximum time in seconds to wait for events. """ _vv and IOLOG.debug('%r._loop_once(%r, %r)', self, timeout, self.poller) timer_to = self.timers.get_timeout() if timeout is None: timeout = timer_to elif timer_to is not None and timer_to < timeout: timeout = timer_to #IOLOG.debug('readers =\n%s', pformat(self.poller.readers)) #IOLOG.debug('writers =\n%s', pformat(self.poller.writers)) for side, func in self.poller.poll(timeout): self._call(side.stream, func) if timer_to is not None: self.timers.expire() def _broker_exit(self): """ Forcefully call :meth:`Stream.on_disconnect` on any streams that failed to shut down gracefully, then discard the :class:`Poller`. """ for _, (side, _) in self.poller.readers + self.poller.writers: LOG.debug('%r: force disconnecting %r', self, side) side.stream.on_disconnect(self) self.poller.close() def _broker_shutdown(self): """ Invoke :meth:`Stream.on_shutdown` for every active stream, then allow up to :attr:`shutdown_timeout` seconds for the streams to unregister themselves, logging an error if any did not unregister during the grace period. """ for _, (side, _) in self.poller.readers + self.poller.writers: self._call(side.stream, side.stream.on_shutdown) deadline = time.time() + self.shutdown_timeout while self.keep_alive() and time.time() < deadline: self._loop_once(max(0, deadline - time.time())) if self.keep_alive(): LOG.error('%r: pending work still existed %d seconds after ' 'shutdown began. This may be due to a timer that is yet ' 'to expire, or a child connection that did not fully ' 'shut down.', self, self.shutdown_timeout) def _do_broker_main(self): """ Broker thread main function. Dispatches IO events until :meth:`shutdown` is called. """ # For Python 2.4, no way to retrieve ident except on thread. self._waker.protocol.broker_ident = thread.get_ident() try: while self._alive: self._loop_once() fire(self, 'shutdown') self._broker_shutdown() except Exception: e = sys.exc_info()[1] LOG.exception('broker crashed') syslog.syslog(syslog.LOG_ERR, 'broker crashed: %s' % (e,)) syslog.closelog() # prevent test 'fd leak'. self._alive = False # Ensure _alive is consistent on crash. self._exitted = True self._broker_exit() def _broker_main(self): try: _profile_hook('mitogen.broker', self._do_broker_main) finally: # 'finally' to ensure _on_broker_exit() can always SIGTERM. fire(self, 'exit') def shutdown(self): """ Request broker gracefully disconnect streams and stop. Safe to call from any thread. """ _v and LOG.debug('%r: shutting down', self) def _shutdown(): self._alive = False if self._alive and not self._exitted: self.defer(_shutdown) def join(self): """ Wait for the broker to stop, expected to be called after :meth:`shutdown`. """ self._thread.join() def __repr__(self): return 'Broker(%04x)' % (id(self) & 0xffff,) class Dispatcher(object): """ Implementation of the :data:`CALL_FUNCTION` handle for a child context. Listens on the child's main thread for messages sent by :class:`mitogen.parent.CallChain` and dispatches the function calls they describe. If a :class:`mitogen.parent.CallChain` sending a message is in pipelined mode, any exception that occurs is recorded, and causes all subsequent calls with the same `chain_id` to fail with the same exception. """ def __repr__(self): return 'Dispatcher' def __init__(self, econtext): self.econtext = econtext #: Chain ID -> CallError if prior call failed. self._error_by_chain_id = {} self.recv = Receiver( router=econtext.router, handle=CALL_FUNCTION, policy=has_parent_authority, ) #: The :data:`CALL_SERVICE` :class:`Receiver` that will eventually be #: reused by :class:`mitogen.service.Pool`, should it ever be loaded. #: This is necessary for race-free reception of all service requests #: delivered regardless of whether the stub or real service pool are #: loaded. See #547 for related sorrows. Dispatcher._service_recv = Receiver( router=econtext.router, handle=CALL_SERVICE, policy=has_parent_authority, ) self._service_recv.notify = self._on_call_service listen(econtext.broker, 'shutdown', self.recv.close) @classmethod @takes_econtext def forget_chain(cls, chain_id, econtext): econtext.dispatcher._error_by_chain_id.pop(chain_id, None) def _parse_request(self, msg): data = msg.unpickle(throw=False) _v and LOG.debug('%r: dispatching %r', self, data) chain_id, modname, klass, func, args, kwargs = data obj = import_module(modname) if klass: obj = getattr(obj, klass) fn = getattr(obj, func) if getattr(fn, 'mitogen_takes_econtext', None): kwargs.setdefault('econtext', self.econtext) if getattr(fn, 'mitogen_takes_router', None): kwargs.setdefault('router', self.econtext.router) return chain_id, fn, args, kwargs def _dispatch_one(self, msg): try: chain_id, fn, args, kwargs = self._parse_request(msg) except Exception: return None, CallError(sys.exc_info()[1]) if chain_id in self._error_by_chain_id: return chain_id, self._error_by_chain_id[chain_id] try: return chain_id, fn(*args, **kwargs) except Exception: e = CallError(sys.exc_info()[1]) if chain_id is not None: self._error_by_chain_id[chain_id] = e return chain_id, e def _on_call_service(self, recv): """ Notifier for the :data:`CALL_SERVICE` receiver. This is called on the :class:`Broker` thread for any service messages arriving at this context, for as long as no real service pool implementation is loaded. In order to safely bootstrap the service pool implementation a sentinel message is enqueued on the :data:`CALL_FUNCTION` receiver in order to wake the main thread, where the importer can run without any possibility of suffering deadlock due to concurrent uses of the importer. Should the main thread be blocked indefinitely, preventing the import from ever running, if it is blocked waiting on a service call, then it means :mod:`mitogen.service` has already been imported and :func:`mitogen.service.get_or_create_pool` has already run, meaning the service pool is already active and the duplicate initialization was not needed anyway. #547: This trickery is needed to avoid the alternate option of spinning a temporary thread to import the service pool, which could deadlock if a custom import hook executing on the main thread (under the importer lock) would block waiting for some data that was in turn received by a service. Main thread import lock can't be released until service is running, service cannot satisfy request until import lock is released. """ self.recv._on_receive(Message(handle=STUB_CALL_SERVICE)) def _init_service_pool(self): import mitogen.service mitogen.service.get_or_create_pool(router=self.econtext.router) def _dispatch_calls(self): for msg in self.recv: if msg.handle == STUB_CALL_SERVICE: if msg.src_id == mitogen.context_id: self._init_service_pool() continue chain_id, ret = self._dispatch_one(msg) _v and LOG.debug('%r: %r -> %r', self, msg, ret) if msg.reply_to: msg.reply(ret) elif isinstance(ret, CallError) and chain_id is None: LOG.error('No-reply function call failed: %s', ret) def run(self): if self.econtext.config.get('on_start'): self.econtext.config['on_start'](self.econtext) _profile_hook('mitogen.child_main', self._dispatch_calls) class ExternalContext(object): """ External context implementation. This class contains the main program implementation for new children. It is responsible for setting up everything about the process environment, import hooks, standard IO redirection, logging, configuring a :class:`Router` and :class:`Broker`, and finally arranging for :class:`Dispatcher` to take over the main thread after initialization is complete. .. attribute:: broker The :class:`mitogen.core.Broker` instance. .. attribute:: context The :class:`mitogen.core.Context` instance. .. attribute:: channel The :class:`mitogen.core.Channel` over which :data:`CALL_FUNCTION` requests are received. .. attribute:: importer The :class:`mitogen.core.Importer` instance. .. attribute:: stdout_log The :class:`IoLogger` connected to :data:`sys.stdout`. .. attribute:: stderr_log The :class:`IoLogger` connected to :data:`sys.stderr`. """ detached = False def __init__(self, config): self.config = config def _on_broker_exit(self): if not self.config['profiling']: os.kill(os.getpid(), signal.SIGTERM) def _on_shutdown_msg(self, msg): if not msg.is_dead: _v and LOG.debug('shutdown request from context %d', msg.src_id) self.broker.shutdown() def _on_parent_disconnect(self): if self.detached: mitogen.parent_ids = [] mitogen.parent_id = None LOG.info('Detachment complete') else: _v and LOG.debug('parent stream is gone, dying.') self.broker.shutdown() def detach(self): self.detached = True stream = self.router.stream_by_id(mitogen.parent_id) if stream: # not double-detach()'d os.setsid() self.parent.send_await(Message(handle=DETACHING)) LOG.info('Detaching from %r; parent is %s', stream, self.parent) for x in range(20): pending = self.broker.defer_sync(stream.protocol.pending_bytes) if not pending: break time.sleep(0.05) if pending: LOG.error('Stream had %d bytes after 2000ms', pending) self.broker.defer(stream.on_disconnect, self.broker) def _setup_master(self): Router.max_message_size = self.config['max_message_size'] if self.config['profiling']: enable_profiling() self.broker = Broker(activate_compat=False) self.router = Router(self.broker) self.router.debug = self.config.get('debug', False) self.router.unidirectional = self.config['unidirectional'] self.router.add_handler( fn=self._on_shutdown_msg, handle=SHUTDOWN, policy=has_parent_authority, ) self.master = Context(self.router, 0, 'master') parent_id = self.config['parent_ids'][0] if parent_id == 0: self.parent = self.master else: self.parent = Context(self.router, parent_id, 'parent') in_fd = self.config.get('in_fd', 100) in_fp = os.fdopen(os.dup(in_fd), 'rb', 0) os.close(in_fd) out_fp = os.fdopen(os.dup(self.config.get('out_fd', 1)), 'wb', 0) self.stream = MitogenProtocol.build_stream(self.router, parent_id) self.stream.accept(in_fp, out_fp) self.stream.name = 'parent' self.stream.receive_side.keep_alive = False listen(self.stream, 'disconnect', self._on_parent_disconnect) listen(self.broker, 'exit', self._on_broker_exit) def _reap_first_stage(self): try: os.wait() # Reap first stage. except OSError: pass # No first stage exists (e.g. fakessh) def _setup_logging(self): self.log_handler = LogHandler(self.master) root = logging.getLogger() root.setLevel(self.config['log_level']) root.handlers = [self.log_handler] if self.config['debug']: enable_debug_logging() def _setup_importer(self): importer = self.config.get('importer') if importer: importer._install_handler(self.router) importer._context = self.parent else: core_src_fd = self.config.get('core_src_fd', 101) if core_src_fd: fp = os.fdopen(core_src_fd, 'rb', 1) try: core_src = fp.read() # Strip "ExternalContext.main()" call from last line. core_src = b('\n').join(core_src.splitlines()[:-1]) finally: fp.close() else: core_src = None importer = Importer( self.router, self.parent, core_src, self.config.get('whitelist', ()), self.config.get('blacklist', ()), ) self.importer = importer self.router.importer = importer sys.meta_path.insert(0, self.importer) def _setup_package(self): global mitogen mitogen = imp.new_module('mitogen') mitogen.__package__ = 'mitogen' mitogen.__path__ = [] mitogen.__loader__ = self.importer mitogen.main = lambda *args, **kwargs: (lambda func: None) mitogen.core = sys.modules['__main__'] mitogen.core.__file__ = 'x/mitogen/core.py' # For inspect.getsource() mitogen.core.__loader__ = self.importer sys.modules['mitogen'] = mitogen sys.modules['mitogen.core'] = mitogen.core del sys.modules['__main__'] def _setup_globals(self): mitogen.is_master = False mitogen.__version__ = self.config['version'] mitogen.context_id = self.config['context_id'] mitogen.parent_ids = self.config['parent_ids'][:] mitogen.parent_id = mitogen.parent_ids[0] def _nullify_stdio(self): """ Open /dev/null to replace stdio temporarily. In case of odd startup, assume we may be allocated a standard handle. """ for stdfd, mode in ((0, os.O_RDONLY), (1, os.O_RDWR), (2, os.O_RDWR)): fd = os.open('/dev/null', mode) if fd != stdfd: os.dup2(fd, stdfd) os.close(fd) def _preserve_tty_fp(self): """ #481: when stderr is a TTY due to being started via tty_create_child() or hybrid_tty_create_child(), and some privilege escalation tool like prehistoric versions of sudo exec this process over the top of itself, there is nothing left to keep the slave PTY open after we replace our stdio. Therefore if stderr is a TTY, keep around a permanent dup() to avoid receiving SIGHUP. """ try: if os.isatty(2): self.reserve_tty_fp = os.fdopen(os.dup(2), 'r+b', 0) set_cloexec(self.reserve_tty_fp.fileno()) except OSError: pass def _setup_stdio(self): self._preserve_tty_fp() # When sys.stdout was opened by the runtime, overwriting it will not # close FD 1. However when forking from a child that previously used # fdopen(), overwriting it /will/ close FD 1. So we must swallow the # close before IoLogger overwrites FD 1, otherwise its new FD 1 will be # clobbered. Additionally, stdout must be replaced with /dev/null prior # to stdout.close(), since if block buffering was active in the parent, # any pre-fork buffered data will be flushed on close(), corrupting the # connection to the parent. self._nullify_stdio() sys.stdout.close() self._nullify_stdio() self.loggers = [] for name, fd in (('stdout', 1), ('stderr', 2)): log = IoLoggerProtocol.build_stream(name, fd) self.broker.start_receive(log) self.loggers.append(log) # Reopen with line buffering. sys.stdout = os.fdopen(1, 'w', 1) def main(self): self._setup_master() try: try: self._setup_logging() self._setup_importer() self._reap_first_stage() if self.config.get('setup_package', True): self._setup_package() self._setup_globals() if self.config.get('setup_stdio', True): self._setup_stdio() self.dispatcher = Dispatcher(self) self.router.register(self.parent, self.stream) self.router._setup_logging() sys.executable = os.environ.pop('ARGV0', sys.executable) _v and LOG.debug('Parent is context %r (%s); my ID is %r', self.parent.context_id, self.parent.name, mitogen.context_id) _v and LOG.debug('pid:%r ppid:%r uid:%r/%r, gid:%r/%r host:%r', os.getpid(), os.getppid(), os.geteuid(), os.getuid(), os.getegid(), os.getgid(), socket.gethostname()) _v and LOG.debug('Recovered sys.executable: %r', sys.executable) if self.config.get('send_ec2', True): self.stream.transmit_side.write(b('MITO002\n')) self.broker._py24_25_compat() self.log_handler.uncork() self.dispatcher.run() _v and LOG.debug('ExternalContext.main() normal exit') except KeyboardInterrupt: LOG.debug('KeyboardInterrupt received, exiting gracefully.') except BaseException: LOG.exception('ExternalContext.main() crashed') raise finally: self.broker.shutdown() self.broker.join()
./CrossVul/dataset_final_sorted/CWE-254/py/good_1020_0
crossvul-python_data_bad_4859_1
import base64 import os import time from gluon import portalocker from gluon.admin import apath from gluon.fileutils import read_file from gluon.utils import web2py_uuid # ########################################################### # ## make sure administrator is on localhost or https # ########################################################### http_host = request.env.http_host.split(':')[0] if request.env.web2py_runtime_gae: session_db = DAL('gae') session.connect(request, response, db=session_db) hosts = (http_host, ) is_gae = True else: is_gae = False if request.is_https: session.secure() elif not request.is_local and not DEMO_MODE: raise HTTP(200, T('Admin is disabled because insecure channel')) try: _config = {} port = int(request.env.server_port or 0) restricted( read_file(apath('../parameters_%i.py' % port, request)), _config) if not 'password' in _config or not _config['password']: raise HTTP(200, T('admin disabled because no admin password')) except IOError: import gluon.fileutils if is_gae: if gluon.fileutils.check_credentials(request): session.authorized = True session.last_time = time.time() else: raise HTTP(200, T('admin disabled because not supported on google app engine')) else: raise HTTP( 200, T('admin disabled because unable to access password file')) def verify_password(password): session.pam_user = None if DEMO_MODE: ret = True elif not _config.get('password'): ret - False elif _config['password'].startswith('pam_user:'): session.pam_user = _config['password'][9:].strip() import gluon.contrib.pam ret = gluon.contrib.pam.authenticate(session.pam_user, password) else: ret = _config['password'] == CRYPT()(password)[0] if ret: session.hmac_key = web2py_uuid() return ret # ########################################################### # ## handle brute-force login attacks # ########################################################### deny_file = os.path.join(request.folder, 'private', 'hosts.deny') allowed_number_of_attempts = 5 expiration_failed_logins = 3600 def read_hosts_deny(): import datetime hosts = {} if os.path.exists(deny_file): hosts = {} f = open(deny_file, 'r') portalocker.lock(f, portalocker.LOCK_SH) for line in f.readlines(): if not line.strip() or line.startswith('#'): continue fields = line.strip().split() if len(fields) > 2: hosts[fields[0].strip()] = ( # ip int(fields[1].strip()), # n attemps int(fields[2].strip()) # last attempts ) portalocker.unlock(f) f.close() return hosts def write_hosts_deny(denied_hosts): f = open(deny_file, 'w') portalocker.lock(f, portalocker.LOCK_EX) for key, val in denied_hosts.items(): if time.time() - val[1] < expiration_failed_logins: line = '%s %s %s\n' % (key, val[0], val[1]) f.write(line) portalocker.unlock(f) f.close() def login_record(success=True): denied_hosts = read_hosts_deny() val = (0, 0) if success and request.client in denied_hosts: del denied_hosts[request.client] elif not success and not request.is_local: val = denied_hosts.get(request.client, (0, 0)) if time.time() - val[1] < expiration_failed_logins \ and val[0] >= allowed_number_of_attempts: return val[0] # locked out time.sleep(2 ** val[0]) val = (val[0] + 1, int(time.time())) denied_hosts[request.client] = val write_hosts_deny(denied_hosts) return val[0] # ########################################################### # ## session expiration # ########################################################### t0 = time.time() if session.authorized: if session.last_time and session.last_time < t0 - EXPIRATION: session.flash = T('session expired') session.authorized = False else: session.last_time = t0 if request.vars.is_mobile in ('true', 'false', 'auto'): session.is_mobile = request.vars.is_mobile or 'auto' if request.controller == 'default' and request.function == 'index': if not request.vars.is_mobile: session.is_mobile = 'auto' if not session.is_mobile: session.is_mobile = 'auto' if session.is_mobile == 'true': is_mobile = True elif session.is_mobile == 'false': is_mobile = False else: is_mobile = request.user_agent().get('is_mobile',False) if DEMO_MODE: session.authorized = True session.forget() if request.controller == "webservices": basic = request.env.http_authorization if not basic or not basic[:6].lower() == 'basic ': raise HTTP(401, "Wrong credentials") (username, password) = base64.b64decode(basic[6:]).split(':') if not verify_password(password) or MULTI_USER_MODE: time.sleep(10) raise HTTP(403, "Not authorized") elif not session.authorized and not \ (request.controller + '/' + request.function in ('default/index', 'default/user', 'plugin_jqmobile/index', 'plugin_jqmobile/about')): if request.env.query_string: query_string = '?' + request.env.query_string else: query_string = '' if request.env.web2py_original_uri: url = request.env.web2py_original_uri else: url = request.env.path_info + query_string redirect(URL(request.application, 'default', 'index', vars=dict(send=url))) elif session.authorized and \ request.controller == 'default' and \ request.function == 'index': redirect(URL(request.application, 'default', 'site')) if request.controller == 'appadmin' and DEMO_MODE: session.flash = 'Appadmin disabled in demo mode' redirect(URL('default', 'sites'))
./CrossVul/dataset_final_sorted/CWE-254/py/bad_4859_1
crossvul-python_data_good_5219_3
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import os import os.path import yaml import logging import glob from build_pack_utils import FileUtil _log = logging.getLogger('helpers') class FakeBuilder(object): def __init__(self, ctx): self._ctx = ctx class FakeInstaller(object): def __init__(self, builder, installer): self._installer = installer self.builder = builder def setup_webdir_if_it_doesnt_exist(ctx): if is_web_app(ctx): webdirPath = os.path.join(ctx['BUILD_DIR'], ctx['WEBDIR']) if not os.path.exists(webdirPath): fu = FileUtil(FakeBuilder(ctx), move=True) fu.under('BUILD_DIR') fu.into('WEBDIR') fu.where_name_does_not_match( '^%s/.*$' % os.path.join(ctx['BUILD_DIR'], '.bp')) fu.where_name_does_not_match( '^%s/.*$' % os.path.join(ctx['BUILD_DIR'], '.extensions')) fu.where_name_does_not_match( '^%s/.*$' % os.path.join(ctx['BUILD_DIR'], '.bp-config')) fu.where_name_does_not_match( '^%s$' % os.path.join(ctx['BUILD_DIR'], 'manifest.yml')) fu.where_name_does_not_match( '^%s/.*$' % os.path.join(ctx['BUILD_DIR'], ctx['LIBDIR'])) fu.where_name_does_not_match( '^%s/.*$' % os.path.join(ctx['BUILD_DIR'], '.profile.d')) fu.where_name_does_not_match( '^%s$' % os.path.join(ctx['BUILD_DIR'], '.profile')) fu.done() def log_bp_version(ctx): version_file = os.path.join(ctx['BP_DIR'], 'VERSION') if os.path.exists(version_file): print('-------> Buildpack version %s' % open(version_file).read()) def setup_log_dir(ctx): os.makedirs(os.path.join(ctx['BUILD_DIR'], 'logs')) def load_manifest(ctx): manifest_path = os.path.join(ctx['BP_DIR'], 'manifest.yml') _log.debug('Loading manifest from %s', manifest_path) return yaml.load(open(manifest_path)) def find_all_php_versions(dependencies): versions = [] for dependency in dependencies: if dependency['name'] == 'php': versions.append(dependency['version']) return versions def validate_php_version(ctx): if ctx['PHP_VERSION'] in ctx['ALL_PHP_VERSIONS']: _log.debug('App selected PHP [%s]', ctx['PHP_VERSION']) else: _log.warning('Selected version of PHP [%s] not available. Defaulting' ' to the latest version [%s]', ctx['PHP_VERSION'], ctx['PHP_55_LATEST']) ctx['PHP_VERSION'] = ctx['PHP_55_LATEST'] def _get_supported_php_extensions(ctx): php_extensions = [] php_extension_glob = os.path.join(ctx["PHP_INSTALL_PATH"], 'lib', 'php', 'extensions', 'no-debug-non-zts-*') php_extension_directory = glob.glob(php_extension_glob)[0] for root, dirs, files in os.walk(php_extension_directory): for f in files: if '.so' in f: php_extensions.append(f.replace('.so', '')) return php_extensions def validate_php_extensions(ctx): filtered_extensions = [] requested_extensions = ctx['PHP_EXTENSIONS'] supported_extensions = _get_supported_php_extensions(ctx) for extension in requested_extensions: if extension not in supported_extensions: print("The extension '%s' is not provided by this buildpack." % extension, file=os.sys.stderr) else: filtered_extensions.append(extension) ctx['PHP_EXTENSIONS'] = filtered_extensions def convert_php_extensions(ctx): _log.debug('Converting PHP extensions') SKIP = ('cli', 'pear', 'cgi') ctx['PHP_EXTENSIONS'] = \ "\n".join(["extension=%s.so" % ex for ex in ctx['PHP_EXTENSIONS'] if ex not in SKIP]) path = '' ctx['ZEND_EXTENSIONS'] = \ "\n".join(['zend_extension="%s"' % os.path.join(path, "%s.so" % ze) for ze in ctx['ZEND_EXTENSIONS']]) def is_web_app(ctx): return ctx.get('WEB_SERVER', '') != 'none' def find_stand_alone_app_to_run(ctx): app = ctx.get('APP_START_CMD', None) if not app: possible_files = ('app.php', 'main.php', 'run.php', 'start.php') for pf in possible_files: if os.path.exists(os.path.join(ctx['BUILD_DIR'], pf)): app = pf break if not app: print('Build pack could not find a PHP file to execute!') _log.info('Build pack could not find a file to execute. Either ' 'set "APP_START_CMD" or include one of these files [%s]', ", ".join(possible_files)) app = 'app.php' return app
./CrossVul/dataset_final_sorted/CWE-254/py/good_5219_3
crossvul-python_data_good_4859_0
# -*- coding: utf-8 -*- EXPERIMENTAL_STUFF = True MAXNFILES = 1000 if EXPERIMENTAL_STUFF: if is_mobile: response.view = response.view.replace('default/', 'default.mobile/') response.menu = [] import re from gluon.admin import * from gluon.fileutils import abspath, read_file, write_file from gluon.utils import web2py_uuid from gluon.tools import Config from gluon.compileapp import find_exposed_functions from glob import glob import shutil import platform try: import git if git.__version__ < '0.3.1': raise ImportError("Your version of git is %s. Upgrade to 0.3.1 or better." % git.__version__) have_git = True except ImportError, e: have_git = False GIT_MISSING = 'Requires gitpython module, but not installed or incompatible version: %s' % e from gluon.languages import (read_possible_languages, read_dict, write_dict, read_plural_dict, write_plural_dict) if DEMO_MODE and request.function in ['change_password', 'pack', 'pack_custom', 'pack_plugin', 'upgrade_web2py', 'uninstall', 'cleanup', 'compile_app', 'remove_compiled_app', 'delete', 'delete_plugin', 'create_file', 'upload_file', 'update_languages', 'reload_routes', 'git_push', 'git_pull', 'install_plugin']: session.flash = T('disabled in demo mode') redirect(URL('site')) if is_gae and request.function in ('edit', 'edit_language', 'edit_plurals', 'update_languages', 'create_file', 'install_plugin'): session.flash = T('disabled in GAE mode') redirect(URL('site')) if not is_manager() and request.function in ['change_password', 'upgrade_web2py']: session.flash = T('disabled in multi user mode') redirect(URL('site')) if FILTER_APPS and request.args(0) and not request.args(0) in FILTER_APPS: session.flash = T('disabled in demo mode') redirect(URL('site')) if not session.token: session.token = web2py_uuid() def count_lines(data): return len([line for line in data.split('\n') if line.strip() and not line.startswith('#')]) def log_progress(app, mode='EDIT', filename=None, progress=0): progress_file = os.path.join(apath(app, r=request), 'progress.log') now = str(request.now)[:19] if not os.path.exists(progress_file): safe_open(progress_file, 'w').write('[%s] START\n' % now) if filename: safe_open(progress_file, 'a').write( '[%s] %s %s: %s\n' % (now, mode, filename, progress)) def safe_open(a, b): if (DEMO_MODE or is_gae) and ('w' in b or 'a' in b): class tmp: def write(self, data): pass def close(self): pass return tmp() return open(a, b) def safe_read(a, b='r'): safe_file = safe_open(a, b) try: return safe_file.read() finally: safe_file.close() def safe_write(a, value, b='w'): safe_file = safe_open(a, b) try: safe_file.write(value) finally: safe_file.close() def get_app(name=None): app = name or request.args(0) if (app and os.path.exists(apath(app, r=request)) and (not MULTI_USER_MODE or is_manager() or db(db.app.name == app)(db.app.owner == auth.user.id).count())): return app session.flash = T('App does not exist or you are not authorized') redirect(URL('site')) def index(): """ Index handler """ send = request.vars.send if DEMO_MODE: session.authorized = True session.last_time = t0 if not send: send = URL('site') if session.authorized: redirect(send) elif failed_login_count() >= allowed_number_of_attempts: time.sleep(2 ** allowed_number_of_attempts) raise HTTP(403) elif request.vars.password: if verify_password(request.vars.password[:1024]): session.authorized = True login_record(True) if CHECK_VERSION: session.check_version = True else: session.check_version = False session.last_time = t0 if isinstance(send, list): # ## why does this happen? send = str(send[0]) redirect(send) else: times_denied = login_record(False) if times_denied >= allowed_number_of_attempts: response.flash = \ T('admin disabled because too many invalid login attempts') elif times_denied == allowed_number_of_attempts - 1: response.flash = \ T('You have one more login attempt before you are locked out') else: response.flash = T('invalid password.') return dict(send=send) def check_version(): """ Checks if web2py is up to date """ session.forget() session._unlock(response) new_version, version = check_new_version(request.env.web2py_version, WEB2PY_VERSION_URL) if new_version == -1: return A(T('Unable to check for upgrades'), _href=WEB2PY_URL) elif new_version != True: return A(T('web2py is up to date'), _href=WEB2PY_URL) elif platform.system().lower() in ('windows', 'win32', 'win64') and os.path.exists("web2py.exe"): return SPAN('You should upgrade to %s' % version.split('(')[0]) else: return sp_button(URL('upgrade_web2py'), T('upgrade now to %s') % version.split('(')[0]) def logout(): """ Logout handler """ session.authorized = None if MULTI_USER_MODE: redirect(URL('user/logout')) redirect(URL('index')) def change_password(): if session.pam_user: session.flash = T( 'PAM authenticated user, cannot change password here') redirect(URL('site')) form = SQLFORM.factory(Field('current_admin_password', 'password'), Field('new_admin_password', 'password', requires=IS_STRONG()), Field('new_admin_password_again', 'password'), _class="span4 well") if form.accepts(request.vars): if not verify_password(request.vars.current_admin_password): form.errors.current_admin_password = T('invalid password') elif form.vars.new_admin_password != form.vars.new_admin_password_again: form.errors.new_admin_password_again = T('no match') else: path = abspath('parameters_%s.py' % request.env.server_port) safe_write(path, 'password="%s"' % CRYPT()( request.vars.new_admin_password)[0]) session.flash = T('password changed') redirect(URL('site')) return dict(form=form) def site(): """ Site handler """ myversion = request.env.web2py_version # Shortcut to make the elif statements more legible file_or_appurl = 'file' in request.vars or 'appurl' in request.vars class IS_VALID_APPNAME(object): def __call__(self, value): if not re.compile('^\w+$').match(value): return (value, T('Invalid application name')) if not request.vars.overwrite and \ os.path.exists(os.path.join(apath(r=request), value)): return (value, T('Application exists already')) return (value, None) is_appname = IS_VALID_APPNAME() form_create = SQLFORM.factory(Field('name', requires=is_appname), table_name='appcreate') form_update = SQLFORM.factory(Field('name', requires=is_appname), Field('file', 'upload', uploadfield=False), Field('url'), Field('overwrite', 'boolean'), table_name='appupdate') form_create.process() form_update.process() if DEMO_MODE: pass elif form_create.accepted: # create a new application appname = cleanpath(form_create.vars.name) created, error = app_create(appname, request, info=True) if created: if MULTI_USER_MODE: db.app.insert(name=appname, owner=auth.user.id) log_progress(appname) session.flash = T('new application "%s" created', appname) redirect(URL('design', args=appname)) else: session.flash = \ DIV(T('unable to create application "%s"', appname), PRE(error)) redirect(URL(r=request)) elif form_update.accepted: if (form_update.vars.url or '').endswith('.git'): if not have_git: session.flash = GIT_MISSING redirect(URL(r=request)) target = os.path.join(apath(r=request), form_update.vars.name) try: new_repo = git.Repo.clone_from(form_update.vars.url, target) session.flash = T('new application "%s" imported', form_update.vars.name) except git.GitCommandError, err: session.flash = T('Invalid git repository specified.') redirect(URL(r=request)) elif form_update.vars.url: # fetch an application via URL or file upload try: f = urllib.urlopen(form_update.vars.url) if f.code == 404: raise Exception("404 file not found") except Exception, e: session.flash = \ DIV(T('Unable to download app because:'), PRE(repr(e))) redirect(URL(r=request)) fname = form_update.vars.url elif form_update.accepted and form_update.vars.file: fname = request.vars.file.filename f = request.vars.file.file else: session.flash = 'No file uploaded and no URL specified' redirect(URL(r=request)) if f: appname = cleanpath(form_update.vars.name) installed = app_install(appname, f, request, fname, overwrite=form_update.vars.overwrite) if f and installed: msg = 'application %(appname)s installed with md5sum: %(digest)s' if MULTI_USER_MODE: db.app.insert(name=appname, owner=auth.user.id) log_progress(appname) session.flash = T(msg, dict(appname=appname, digest=md5_hash(installed))) else: msg = 'unable to install application "%(appname)s"' session.flash = T(msg, dict(appname=form_update.vars.name)) redirect(URL(r=request)) regex = re.compile('^\w+$') if is_manager(): apps = [f for f in os.listdir(apath(r=request)) if regex.match(f)] else: apps = [f.name for f in db(db.app.owner == auth.user_id).select()] if FILTER_APPS: apps = [f for f in apps if f in FILTER_APPS] apps = sorted(apps, lambda a, b: cmp(a.upper(), b.upper())) myplatform = platform.python_version() return dict(app=None, apps=apps, myversion=myversion, myplatform=myplatform, form_create=form_create, form_update=form_update) def report_progress(app): import datetime progress_file = os.path.join(apath(app, r=request), 'progress.log') regex = re.compile('\[(.*?)\][^\:]+\:\s+(\-?\d+)') if not os.path.exists(progress_file): return [] matches = regex.findall(open(progress_file, 'r').read()) events, counter = [], 0 for m in matches: if not m: continue days = -(request.now - datetime.datetime.strptime(m[0], '%Y-%m-%d %H:%M:%S')).days counter += int(m[1]) events.append([days, counter]) return events def pack(): app = get_app() try: if len(request.args) == 1: fname = 'web2py.app.%s.w2p' % app filename = app_pack(app, request, raise_ex=True) else: fname = 'web2py.app.%s.compiled.w2p' % app filename = app_pack_compiled(app, request, raise_ex=True) except Exception, e: filename = None if filename: response.headers['Content-Type'] = 'application/w2p' disposition = 'attachment; filename=%s' % fname response.headers['Content-Disposition'] = disposition return safe_read(filename, 'rb') else: session.flash = T('internal error: %s', e) redirect(URL('site')) def pack_plugin(): app = get_app() if len(request.args) == 2: fname = 'web2py.plugin.%s.w2p' % request.args[1] filename = plugin_pack(app, request.args[1], request) if filename: response.headers['Content-Type'] = 'application/w2p' disposition = 'attachment; filename=%s' % fname response.headers['Content-Disposition'] = disposition return safe_read(filename, 'rb') else: session.flash = T('internal error') redirect(URL('plugin', args=request.args)) def pack_exe(app, base, filenames=None): import urllib import zipfile from cStringIO import StringIO # Download latest web2py_win and open it with zipfile download_url = 'http://www.web2py.com/examples/static/web2py_win.zip' out = StringIO() out.write(urllib.urlopen(download_url).read()) web2py_win = zipfile.ZipFile(out, mode='a') # Write routes.py with the application as default routes = u'# -*- coding: utf-8 -*-\nrouters = dict(BASE=dict(default_application="%s"))' % app web2py_win.writestr('web2py/routes.py', routes.encode('utf-8')) # Copy the application into the zipfile common_root = os.path.dirname(base) for filename in filenames: fname = os.path.join(base, filename) arcname = os.path.join('web2py/applications', app, filename) web2py_win.write(fname, arcname) web2py_win.close() response.headers['Content-Type'] = 'application/zip' response.headers['Content-Disposition'] = 'attachment; filename=web2py.app.%s.zip' % app out.seek(0) return response.stream(out) def pack_custom(): app = get_app() base = apath(app, r=request) def ignore(fs): return [f for f in fs if not ( f[:1] in '#' or f.endswith('~') or f.endswith('.bak'))] files = {} for (r, d, f) in os.walk(base): files[r] = {'folders': ignore(d), 'files': ignore(f)} if request.post_vars.file: valid_set = set(os.path.relpath(os.path.join(r, f), base) for r in files for f in files[r]['files']) files = request.post_vars.file files = [files] if not isinstance(files, list) else files files = [file for file in files if file in valid_set] if request.post_vars.doexe is None: fname = 'web2py.app.%s.w2p' % app try: filename = app_pack(app, request, raise_ex=True, filenames=files) except Exception, e: filename = None if filename: response.headers['Content-Type'] = 'application/w2p' disposition = 'attachment; filename=%s' % fname response.headers['Content-Disposition'] = disposition return safe_read(filename, 'rb') else: session.flash = T('internal error: %s', e) redirect(URL(args=request.args)) else: return pack_exe(app, base, files) return locals() def upgrade_web2py(): dialog = FORM.confirm(T('Upgrade'), {T('Cancel'): URL('site')}) if dialog.accepted: (success, error) = upgrade(request) if success: session.flash = T('web2py upgraded; please restart it') else: session.flash = T('unable to upgrade because "%s"', error) redirect(URL('site')) return dict(dialog=dialog) def uninstall(): app = get_app() dialog = FORM.confirm(T('Uninstall'), {T('Cancel'): URL('site')}) dialog['_id'] = 'confirm_form' dialog['_class'] = 'well' for component in dialog.components: component['_class'] = 'btn' if dialog.accepted: if MULTI_USER_MODE: if is_manager() and db(db.app.name == app).delete(): pass elif db(db.app.name == app)(db.app.owner == auth.user.id).delete(): pass else: session.flash = T('no permission to uninstall "%s"', app) redirect(URL('site')) try: filename = app_pack(app, request, raise_ex=True) except: session.flash = T('unable to uninstall "%s"', app) else: if app_uninstall(app, request): session.flash = T('application "%s" uninstalled', app) else: session.flash = T('unable to uninstall "%s"', app) redirect(URL('site')) return dict(app=app, dialog=dialog) def cleanup(): app = get_app() clean = app_cleanup(app, request) if not clean: session.flash = T("some files could not be removed") else: session.flash = T('cache, errors and sessions cleaned') redirect(URL('site')) def compile_app(): app = get_app() c = app_compile(app, request, skip_failed_views=(request.args(1) == 'skip_failed_views')) if not c: session.flash = T('application compiled') elif isinstance(c, list): session.flash = DIV(*[T('application compiled'), BR(), BR(), T('WARNING: The following views could not be compiled:'), BR()] + [CAT(BR(), view) for view in c] + [BR(), BR(), T('DO NOT use the "Pack compiled" feature.')]) else: session.flash = DIV(T('Cannot compile: there are errors in your app:'), CODE(c)) redirect(URL('site')) def remove_compiled_app(): """ Remove the compiled application """ app = get_app() remove_compiled_application(apath(app, r=request)) session.flash = T('compiled application removed') redirect(URL('site')) def delete(): """ Object delete handler """ app = get_app() filename = '/'.join(request.args) sender = request.vars.sender if isinstance(sender, list): # ## fix a problem with Vista sender = sender[0] dialog = FORM.confirm(T('Delete'), {T('Cancel'): URL(sender, anchor=request.vars.id)}) if dialog.accepted: try: full_path = apath(filename, r=request) lineno = count_lines(open(full_path, 'r').read()) os.unlink(full_path) log_progress(app, 'DELETE', filename, progress=-lineno) session.flash = T('file "%(filename)s" deleted', dict(filename=filename)) except Exception: session.flash = T('unable to delete file "%(filename)s"', dict(filename=filename)) redirect(URL(sender, anchor=request.vars.id2)) return dict(dialog=dialog, filename=filename) def enable(): if not URL.verify(request, hmac_key=session.hmac_key): raise HTTP(401) app = get_app() filename = os.path.join(apath(app, r=request), 'DISABLED') if is_gae: return SPAN(T('Not supported'), _style='color:yellow') elif os.path.exists(filename): os.unlink(filename) return SPAN(T('Disable'), _style='color:green') else: safe_open(filename, 'wb').write('disabled: True\ntime-disabled: %s' % request.now) return SPAN(T('Enable'), _style='color:red') def peek(): """ Visualize object code """ app = get_app(request.vars.app) filename = '/'.join(request.args) if request.vars.app: path = abspath(filename) else: path = apath(filename, r=request) try: data = safe_read(path).replace('\r', '') except IOError: session.flash = T('file does not exist') redirect(URL('site')) extension = filename[filename.rfind('.') + 1:].lower() return dict(app=app, filename=filename, data=data, extension=extension) def test(): """ Execute controller tests """ app = get_app() if len(request.args) > 1: file = request.args[1] else: file = '.*\.py' controllers = listdir( apath('%s/controllers/' % app, r=request), file + '$') return dict(app=app, controllers=controllers) def keepalive(): return '' def search(): keywords = request.vars.keywords or '' app = get_app() def match(filename, keywords): filename = os.path.join(apath(app, r=request), filename) if keywords in read_file(filename, 'rb'): return True return False path = apath(request.args[0], r=request) files1 = glob(os.path.join(path, '*/*.py')) files2 = glob(os.path.join(path, '*/*.html')) files3 = glob(os.path.join(path, '*/*/*.html')) files = [x[len(path) + 1:].replace( '\\', '/') for x in files1 + files2 + files3 if match(x, keywords)] return response.json(dict(files=files, message=T.M('Searching: **%s** %%{file}', len(files)))) def edit(): """ File edit handler """ # Load json only if it is ajax edited... app = get_app(request.vars.app) app_path = apath(app, r=request) preferences = {'theme': 'web2py', 'editor': 'default', 'closetag': 'true', 'codefolding': 'false', 'tabwidth': '4', 'indentwithtabs': 'false', 'linenumbers': 'true', 'highlightline': 'true'} config = Config(os.path.join(request.folder, 'settings.cfg'), section='editor', default_values={}) preferences.update(config.read()) if not(request.ajax) and not(is_mobile): # return the scaffolding, the rest will be through ajax requests response.title = T('Editing %s') % app return response.render('default/edit.html', dict(app=app, editor_settings=preferences)) # show settings tab and save prefernces if 'settings' in request.vars: if request.post_vars: # save new preferences post_vars = request.post_vars.items() # Since unchecked checkbox are not serialized, we must set them as false by hand to store the correct preference in the settings post_vars += [(opt, 'false') for opt in preferences if opt not in request.post_vars] if config.save(post_vars): response.headers["web2py-component-flash"] = T('Preferences saved correctly') else: response.headers["web2py-component-flash"] = T('Preferences saved on session only') response.headers["web2py-component-command"] = "update_editor(%s);$('a[href=#editor_settings] button.close').click();" % response.json(config.read()) return else: details = {'realfilename': 'settings', 'filename': 'settings', 'id': 'editor_settings', 'force': False} details['plain_html'] = response.render('default/editor_settings.html', {'editor_settings': preferences}) return response.json(details) """ File edit handler """ # Load json only if it is ajax edited... app = get_app(request.vars.app) filename = '/'.join(request.args) realfilename = request.args[-1] if request.vars.app: path = abspath(filename) else: path = apath(filename, r=request) # Try to discover the file type if filename[-3:] == '.py': filetype = 'python' elif filename[-5:] == '.html': filetype = 'html' elif filename[-5:] == '.load': filetype = 'html' elif filename[-4:] == '.css': filetype = 'css' elif filename[-3:] == '.js': filetype = 'javascript' else: filetype = 'html' # ## check if file is not there if ('revert' in request.vars) and os.path.exists(path + '.bak'): try: data = safe_read(path + '.bak') data1 = safe_read(path) except IOError: session.flash = T('Invalid action') if 'from_ajax' in request.vars: return response.json({'error': str(T('Invalid action'))}) else: redirect(URL('site')) safe_write(path, data) file_hash = md5_hash(data) saved_on = time.ctime(os.stat(path)[stat.ST_MTIME]) safe_write(path + '.bak', data1) response.flash = T('file "%s" of %s restored', (filename, saved_on)) else: try: data = safe_read(path) except IOError: session.flash = T('Invalid action') if 'from_ajax' in request.vars: return response.json({'error': str(T('Invalid action'))}) else: redirect(URL('site')) lineno_old = count_lines(data) file_hash = md5_hash(data) saved_on = time.ctime(os.stat(path)[stat.ST_MTIME]) if request.vars.file_hash and request.vars.file_hash != file_hash: session.flash = T('file changed on disk') data = request.vars.data.replace('\r\n', '\n').strip() + '\n' safe_write(path + '.1', data) if 'from_ajax' in request.vars: return response.json({'error': str(T('file changed on disk')), 'redirect': URL('resolve', args=request.args)}) else: redirect(URL('resolve', args=request.args)) elif request.vars.data: safe_write(path + '.bak', data) data = request.vars.data.replace('\r\n', '\n').strip() + '\n' safe_write(path, data) lineno_new = count_lines(data) log_progress( app, 'EDIT', filename, progress=lineno_new - lineno_old) file_hash = md5_hash(data) saved_on = time.ctime(os.stat(path)[stat.ST_MTIME]) response.flash = T('file saved on %s', saved_on) data_or_revert = (request.vars.data or request.vars.revert) # Check compile errors highlight = None if filetype == 'python' and request.vars.data: import _ast try: code = request.vars.data.rstrip().replace('\r\n', '\n') + '\n' compile(code, path, "exec", _ast.PyCF_ONLY_AST) except Exception, e: # offset calculation is only used for textarea (start/stop) start = sum([len(line) + 1 for l, line in enumerate(request.vars.data.split("\n")) if l < e.lineno - 1]) if e.text and e.offset: offset = e.offset - (len(e.text) - len( e.text.splitlines()[-1])) else: offset = 0 highlight = {'start': start, 'end': start + offset + 1, 'lineno': e.lineno, 'offset': offset} try: ex_name = e.__class__.__name__ except: ex_name = 'unknown exception!' response.flash = DIV(T('failed to compile file because:'), BR(), B(ex_name), ' ' + T('at line %s', e.lineno), offset and ' ' + T('at char %s', offset) or '', PRE(repr(e))) if data_or_revert and request.args[1] == 'modules': # Lets try to reload the modules try: mopath = '.'.join(request.args[2:])[:-3] exec 'import applications.%s.modules.%s' % ( request.args[0], mopath) reload(sys.modules['applications.%s.modules.%s' % (request.args[0], mopath)]) except Exception, e: response.flash = DIV( T('failed to reload module because:'), PRE(repr(e))) edit_controller = None editviewlinks = None view_link = None if filetype == 'html' and len(request.args) >= 3: cfilename = os.path.join(request.args[0], 'controllers', request.args[2] + '.py') if os.path.exists(apath(cfilename, r=request)): edit_controller = URL('edit', args=[cfilename.replace(os.sep, "/")]) view = request.args[3].replace('.html', '') view_link = URL(request.args[0], request.args[2], view) elif filetype == 'python' and request.args[1] == 'controllers': # it's a controller file. # Create links to all of the associated view files. app = get_app() viewname = os.path.splitext(request.args[2])[0] viewpath = os.path.join(app, 'views', viewname) aviewpath = apath(viewpath, r=request) viewlist = [] if os.path.exists(aviewpath): if os.path.isdir(aviewpath): viewlist = glob(os.path.join(aviewpath, '*.html')) elif os.path.exists(aviewpath + '.html'): viewlist.append(aviewpath + '.html') if len(viewlist): editviewlinks = [] for v in sorted(viewlist): vf = os.path.split(v)[-1] vargs = "/".join([viewpath.replace(os.sep, "/"), vf]) editviewlinks.append(A(vf.split(".")[0], _class="editor_filelink", _href=URL('edit', args=[vargs]))) if len(request.args) > 2 and request.args[1] == 'controllers': controller = (request.args[2])[:-3] functions = find_exposed_functions(data) functions = functions and sorted(functions) or [] else: (controller, functions) = (None, None) if 'from_ajax' in request.vars: return response.json({'file_hash': file_hash, 'saved_on': saved_on, 'functions': functions, 'controller': controller, 'application': request.args[0], 'highlight': highlight}) else: file_details = dict(app=request.args[0], lineno=request.vars.lineno or 1, editor_settings=preferences, filename=filename, realfilename=realfilename, filetype=filetype, data=data, edit_controller=edit_controller, file_hash=file_hash, saved_on=saved_on, controller=controller, functions=functions, view_link=view_link, editviewlinks=editviewlinks, id=IS_SLUG()(filename)[0], force=True if (request.vars.restore or request.vars.revert) else False) plain_html = response.render('default/edit_js.html', file_details) file_details['plain_html'] = plain_html if is_mobile: return response.render('default.mobile/edit.html', file_details, editor_settings=preferences) else: return response.json(file_details) def todolist(): """ Returns all TODO of the requested app """ app = request.vars.app or '' app_path = apath('%(app)s' % {'app': app}, r=request) dirs = ['models', 'controllers', 'modules', 'private'] def listfiles(app, dir, regexp='.*\.py$'): files = sorted(listdir(apath('%(app)s/%(dir)s/' % {'app': app, 'dir': dir}, r=request), regexp)) files = [x.replace(os.path.sep, '/') for x in files if not x.endswith('.bak')] return files pattern = '#\s*(todo)+\s+(.*)' regex = re.compile(pattern, re.IGNORECASE) output = [] for d in dirs: for f in listfiles(app, d): matches = [] filename = apath(os.path.join(app, d, f), r=request) with open(filename, 'r') as f_s: src = f_s.read() for m in regex.finditer(src): start = m.start() lineno = src.count('\n', 0, start) + 1 matches.append({'text': m.group(0), 'lineno': lineno}) if len(matches) != 0: output.append({'filename': f, 'matches': matches, 'dir': d}) return {'todo': output, 'app': app} def editor_sessions(): config = Config(os.path.join(request.folder, 'settings.cfg'), section='editor_sessions', default_values={}) preferences = config.read() if request.vars.session_name and request.vars.files: session_name = request.vars.session_name files = request.vars.files preferences.update({session_name: ','.join(files)}) if config.save(preferences.items()): response.headers["web2py-component-flash"] = T('Session saved correctly') else: response.headers["web2py-component-flash"] = T('Session saved on session only') return response.render('default/editor_sessions.html', {'editor_sessions': preferences}) def resolve(): """ """ filename = '/'.join(request.args) # ## check if file is not there path = apath(filename, r=request) a = safe_read(path).split('\n') try: b = safe_read(path + '.1').split('\n') except IOError: session.flash = 'Other file, no longer there' redirect(URL('edit', args=request.args)) d = difflib.ndiff(a, b) def leading(line): """ """ # TODO: we really need to comment this z = '' for (k, c) in enumerate(line): if c == ' ': z += '&nbsp;' elif c == ' \t': z += '&nbsp;' elif k == 0 and c == '?': pass else: break return XML(z) def getclass(item): """ Determine item class """ operators = {' ': 'normal', '+': 'plus', '-': 'minus'} return operators[item[0]] if request.vars: c = '\n'.join([item[2:].rstrip() for (i, item) in enumerate(d) if item[0] == ' ' or 'line%i' % i in request.vars]) safe_write(path, c) session.flash = 'files merged' redirect(URL('edit', args=request.args)) else: # Making the short circuit compatible with <= python2.4 gen_data = lambda index, item: not item[:1] in ['+', '-'] and "" \ or INPUT(_type='checkbox', _name='line%i' % index, value=item[0] == '+') diff = TABLE(*[TR(TD(gen_data(i, item)), TD(item[0]), TD(leading(item[2:]), TT(item[2:].rstrip())), _class=getclass(item)) for (i, item) in enumerate(d) if item[0] != '?']) return dict(diff=diff, filename=filename) def edit_language(): """ Edit language file """ app = get_app() filename = '/'.join(request.args) response.title = request.args[-1] strings = read_dict(apath(filename, r=request)) if '__corrupted__' in strings: form = SPAN(strings['__corrupted__'], _class='error') return dict(filename=filename, form=form) keys = sorted(strings.keys(), lambda x, y: cmp( unicode(x, 'utf-8').lower(), unicode(y, 'utf-8').lower())) rows = [] rows.append(H2(T('Original/Translation'))) for key in keys: name = md5_hash(key) s = strings[key] (prefix, sep, key) = key.partition('\x01') if sep: prefix = SPAN(prefix + ': ', _class='tm_ftag') k = key else: (k, prefix) = (prefix, '') _class = 'untranslated' if k == s else 'translated' if len(s) <= 40: elem = INPUT(_type='text', _name=name, value=s, _size=70, _class=_class) else: elem = TEXTAREA(_name=name, value=s, _cols=70, _rows=5, _class=_class) # Making the short circuit compatible with <= python2.4 k = (s != k) and k or B(k) new_row = DIV(LABEL(prefix, k, _style="font-weight:normal;"), CAT(elem, '\n', TAG.BUTTON( T('delete'), _onclick='return delkey("%s")' % name, _class='btn')), _id=name, _class='span6 well well-small') rows.append(DIV(new_row, _class="row-fluid")) rows.append(DIV(INPUT(_type='submit', _value=T('update'), _class="btn btn-primary"), _class='controls')) form = FORM(*rows) if form.accepts(request.vars, keepvalues=True): strs = dict() for key in keys: name = md5_hash(key) if form.vars[name] == chr(127): continue strs[key] = form.vars[name] write_dict(apath(filename, r=request), strs) session.flash = T('file saved on %(time)s', dict(time=time.ctime())) redirect(URL(r=request, args=request.args)) return dict(app=request.args[0], filename=filename, form=form) def edit_plurals(): """ Edit plurals file """ app = get_app() filename = '/'.join(request.args) plurals = read_plural_dict( apath(filename, r=request)) # plural forms dictionary nplurals = int(request.vars.nplurals) - 1 # plural forms quantity xnplurals = xrange(nplurals) if '__corrupted__' in plurals: # show error message and exit form = SPAN(plurals['__corrupted__'], _class='error') return dict(filename=filename, form=form) keys = sorted(plurals.keys(), lambda x, y: cmp( unicode(x, 'utf-8').lower(), unicode(y, 'utf-8').lower())) tab_rows = [] for key in keys: name = md5_hash(key) forms = plurals[key] if len(forms) < nplurals: forms.extend(None for i in xrange(nplurals - len(forms))) tab_col1 = DIV(CAT(LABEL(T("Singular Form")), B(key, _class='fake-input'))) tab_inputs = [SPAN(LABEL(T("Plural Form #%s", n + 1)), INPUT(_type='text', _name=name + '_' + str(n), value=forms[n], _size=20), _class='span6') for n in xnplurals] tab_col2 = DIV(CAT(*tab_inputs)) tab_col3 = DIV(CAT(LABEL(XML('&nbsp;')), TAG.BUTTON(T('delete'), _onclick='return delkey("%s")' % name, _class='btn'), _class='span6')) tab_row = DIV(DIV(tab_col1, '\n', tab_col2, '\n', tab_col3, _class='well well-small'), _id=name, _class='row-fluid tab_row') tab_rows.append(tab_row) tab_rows.append(DIV(TAG['button'](T('update'), _type='submit', _class='btn btn-primary'), _class='controls')) tab_container = DIV(*tab_rows, **dict(_class="row-fluid")) form = FORM(tab_container) if form.accepts(request.vars, keepvalues=True): new_plurals = dict() for key in keys: name = md5_hash(key) if form.vars[name + '_0'] == chr(127): continue new_plurals[key] = [form.vars[name + '_' + str(n)] for n in xnplurals] write_plural_dict(apath(filename, r=request), new_plurals) session.flash = T('file saved on %(time)s', dict(time=time.ctime())) redirect(URL(r=request, args=request.args, vars=dict( nplurals=request.vars.nplurals))) return dict(app=request.args[0], filename=filename, form=form) def about(): """ Read about info """ app = get_app() # ## check if file is not there about = safe_read(apath('%s/ABOUT' % app, r=request)) license = safe_read(apath('%s/LICENSE' % app, r=request)) return dict(app=app, about=MARKMIN(about), license=MARKMIN(license), progress=report_progress(app)) def design(): """ Application design handler """ app = get_app() if not response.flash and app == request.application: msg = T('ATTENTION: you cannot edit the running application!') response.flash = msg if request.vars and not request.vars.token == session.token: redirect(URL('logout')) if request.vars.pluginfile is not None and not isinstance(request.vars.pluginfile, str): filename = os.path.basename(request.vars.pluginfile.filename) if plugin_install(app, request.vars.pluginfile.file, request, filename): session.flash = T('new plugin installed') redirect(URL('design', args=app)) else: session.flash = \ T('unable to create application "%s"', request.vars.filename) redirect(URL(r=request)) elif isinstance(request.vars.pluginfile, str): session.flash = T('plugin not specified') redirect(URL(r=request)) # If we have only pyc files it means that # we cannot design if os.path.exists(apath('%s/compiled' % app, r=request)): session.flash = \ T('application is compiled and cannot be designed') redirect(URL('site')) # Get all models models = listdir(apath('%s/models/' % app, r=request), '.*\.py$') models = [x.replace('\\', '/') for x in models] defines = {} for m in models: data = safe_read(apath('%s/models/%s' % (app, m), r=request)) defines[m] = regex_tables.findall(data) defines[m].sort() # Get all controllers controllers = sorted( listdir(apath('%s/controllers/' % app, r=request), '.*\.py$')) controllers = [x.replace('\\', '/') for x in controllers] functions = {} for c in controllers: data = safe_read(apath('%s/controllers/%s' % (app, c), r=request)) items = find_exposed_functions(data) functions[c] = items and sorted(items) or [] # Get all views views = sorted( listdir(apath('%s/views/' % app, r=request), '[\w/\-]+(\.\w+)+$')) views = [x.replace('\\', '/') for x in views if not x.endswith('.bak')] extend = {} include = {} for c in views: data = safe_read(apath('%s/views/%s' % (app, c), r=request)) items = regex_extend.findall(data) if items: extend[c] = items[0][1] items = regex_include.findall(data) include[c] = [i[1] for i in items] # Get all modules modules = listdir(apath('%s/modules/' % app, r=request), '.*\.py$') modules = modules = [x.replace('\\', '/') for x in modules] modules.sort() # Get all private files privates = listdir(apath('%s/private/' % app, r=request), '[^\.#].*') privates = [x.replace('\\', '/') for x in privates] privates.sort() # Get all static files statics = listdir(apath('%s/static/' % app, r=request), '[^\.#].*', maxnum=MAXNFILES) statics = [x.replace(os.path.sep, '/') for x in statics] statics.sort() # Get all languages langpath = os.path.join(apath(app, r=request), 'languages') languages = dict([(lang, info) for lang, info in read_possible_languages(langpath).iteritems() if info[2] != 0]) # info[2] is langfile_mtime: # get only existed files # Get crontab cronfolder = apath('%s/cron' % app, r=request) crontab = apath('%s/cron/crontab' % app, r=request) if not is_gae: if not os.path.exists(cronfolder): os.mkdir(cronfolder) if not os.path.exists(crontab): safe_write(crontab, '#crontab') plugins = [] def filter_plugins(items, plugins): plugins += [item[7:].split('/')[0].split( '.')[0] for item in items if item.startswith('plugin_')] plugins[:] = list(set(plugins)) plugins.sort() return [item for item in items if not item.startswith('plugin_')] return dict(app=app, models=filter_plugins(models, plugins), defines=defines, controllers=filter_plugins(controllers, plugins), functions=functions, views=filter_plugins(views, plugins), modules=filter_plugins(modules, plugins), extend=extend, include=include, privates=filter_plugins(privates, plugins), statics=filter_plugins(statics, plugins), languages=languages, crontab=crontab, plugins=plugins) def delete_plugin(): """ Object delete handler """ app = request.args(0) plugin = request.args(1) plugin_name = 'plugin_' + plugin dialog = FORM.confirm( T('Delete'), {T('Cancel'): URL('design', args=app)}) if dialog.accepted: try: for folder in ['models', 'views', 'controllers', 'static', 'modules', 'private']: path = os.path.join(apath(app, r=request), folder) for item in os.listdir(path): if item.rsplit('.', 1)[0] == plugin_name: filename = os.path.join(path, item) if os.path.isdir(filename): shutil.rmtree(filename) else: os.unlink(filename) session.flash = T('plugin "%(plugin)s" deleted', dict(plugin=plugin)) except Exception: session.flash = T('unable to delete file plugin "%(plugin)s"', dict(plugin=plugin)) redirect(URL('design', args=request.args(0), anchor=request.vars.id2)) return dict(dialog=dialog, plugin=plugin) def plugin(): """ Application design handler """ app = get_app() plugin = request.args(1) if not response.flash and app == request.application: msg = T('ATTENTION: you cannot edit the running application!') response.flash = msg # If we have only pyc files it means that # we cannot design if os.path.exists(apath('%s/compiled' % app, r=request)): session.flash = \ T('application is compiled and cannot be designed') redirect(URL('site')) # Get all models models = listdir(apath('%s/models/' % app, r=request), '.*\.py$') models = [x.replace('\\', '/') for x in models] defines = {} for m in models: data = safe_read(apath('%s/models/%s' % (app, m), r=request)) defines[m] = regex_tables.findall(data) defines[m].sort() # Get all controllers controllers = sorted( listdir(apath('%s/controllers/' % app, r=request), '.*\.py$')) controllers = [x.replace('\\', '/') for x in controllers] functions = {} for c in controllers: data = safe_read(apath('%s/controllers/%s' % (app, c), r=request)) items = find_exposed_functions(data) functions[c] = items and sorted(items) or [] # Get all views views = sorted( listdir(apath('%s/views/' % app, r=request), '[\w/\-]+\.\w+$')) views = [x.replace('\\', '/') for x in views] extend = {} include = {} for c in views: data = safe_read(apath('%s/views/%s' % (app, c), r=request)) items = regex_extend.findall(data) if items: extend[c] = items[0][1] items = regex_include.findall(data) include[c] = [i[1] for i in items] # Get all modules modules = listdir(apath('%s/modules/' % app, r=request), '.*\.py$') modules = modules = [x.replace('\\', '/') for x in modules] modules.sort() # Get all private files privates = listdir(apath('%s/private/' % app, r=request), '[^\.#].*') privates = [x.replace('\\', '/') for x in privates] privates.sort() # Get all static files statics = listdir(apath('%s/static/' % app, r=request), '[^\.#].*', maxnum=MAXNFILES) statics = [x.replace(os.path.sep, '/') for x in statics] statics.sort() # Get all languages languages = sorted([lang + '.py' for lang, info in T.get_possible_languages_info().iteritems() if info[2] != 0]) # info[2] is langfile_mtime: # get only existed files # Get crontab crontab = apath('%s/cron/crontab' % app, r=request) if not os.path.exists(crontab): safe_write(crontab, '#crontab') def filter_plugins(items): regex = re.compile('^plugin_' + plugin + '(/.*|\..*)?$') return [item for item in items if item and regex.match(item)] return dict(app=app, models=filter_plugins(models), defines=defines, controllers=filter_plugins(controllers), functions=functions, views=filter_plugins(views), modules=filter_plugins(modules), extend=extend, include=include, privates=filter_plugins(privates), statics=filter_plugins(statics), languages=languages, crontab=crontab) def create_file(): """ Create files handler """ if request.vars and not request.vars.token == session.token: redirect(URL('logout')) try: anchor = '#' + request.vars.id if request.vars.id else '' if request.vars.app: app = get_app(request.vars.app) path = abspath(request.vars.location) else: if request.vars.dir: request.vars.location += request.vars.dir + '/' app = get_app(name=request.vars.location.split('/')[0]) path = apath(request.vars.location, r=request) filename = re.sub('[^\w./-]+', '_', request.vars.filename) if path[-7:] == '/rules/': # Handle plural rules files if len(filename) == 0: raise SyntaxError if not filename[-3:] == '.py': filename += '.py' lang = re.match('^plural_rules-(.*)\.py$', filename).group(1) langinfo = read_possible_languages(apath(app, r=request))[lang] text = dedent(""" #!/usr/bin/env python # -*- coding: utf-8 -*- # Plural-Forms for %(lang)s (%(langname)s) nplurals=2 # for example, English language has 2 forms: # 1 singular and 1 plural # Determine plural_id for number *n* as sequence of positive # integers: 0,1,... # NOTE! For singular form ALWAYS return plural_id = 0 get_plural_id = lambda n: int(n != 1) # Construct and return plural form of *word* using # *plural_id* (which ALWAYS>0). This function will be executed # for words (or phrases) not found in plural_dict dictionary. # By default this function simply returns word in singular: construct_plural_form = lambda word, plural_id: word """)[1:] % dict(lang=langinfo[0], langname=langinfo[1]) elif path[-11:] == '/languages/': # Handle language files if len(filename) == 0: raise SyntaxError if not filename[-3:] == '.py': filename += '.py' path = os.path.join(apath(app, r=request), 'languages', filename) if not os.path.exists(path): safe_write(path, '') # create language xx[-yy].py file: findT(apath(app, r=request), filename[:-3]) session.flash = T('language file "%(filename)s" created/updated', dict(filename=filename)) redirect(request.vars.sender + anchor) elif path[-8:] == '/models/': # Handle python models if not filename[-3:] == '.py': filename += '.py' if len(filename) == 3: raise SyntaxError text = '# -*- coding: utf-8 -*-\n' elif path[-13:] == '/controllers/': # Handle python controllers if not filename[-3:] == '.py': filename += '.py' if len(filename) == 3: raise SyntaxError text = '# -*- coding: utf-8 -*-\n# %s\ndef index(): return dict(message="hello from %s")' text = text % (T('try something like'), filename) elif path[-7:] == '/views/': if request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin): filename = 'plugin_%s/%s' % (request.vars.plugin, filename) # Handle template (html) views if filename.find('.') < 0: filename += '.html' extension = filename.split('.')[-1].lower() if len(filename) == 5: raise SyntaxError msg = T( 'This is the %(filename)s template', dict(filename=filename)) if extension == 'html': text = dedent(""" {{extend 'layout.html'}} <h1>%s</h1> {{=BEAUTIFY(response._vars)}}""" % msg)[1:] else: generic = os.path.join(path, 'generic.' + extension) if os.path.exists(generic): text = read_file(generic) else: text = '' elif path[-9:] == '/modules/': if request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin): filename = 'plugin_%s/%s' % (request.vars.plugin, filename) # Handle python module files if not filename[-3:] == '.py': filename += '.py' if len(filename) == 3: raise SyntaxError text = dedent(""" #!/usr/bin/env python # -*- coding: utf-8 -*- from gluon import *\n""")[1:] elif (path[-8:] == '/static/') or (path[-9:] == '/private/'): if (request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin)): filename = 'plugin_%s/%s' % (request.vars.plugin, filename) text = '' else: redirect(request.vars.sender + anchor) full_filename = os.path.join(path, filename) dirpath = os.path.dirname(full_filename) if not os.path.exists(dirpath): os.makedirs(dirpath) if os.path.exists(full_filename): raise SyntaxError safe_write(full_filename, text) log_progress(app, 'CREATE', filename) if request.vars.dir: result = T('file "%(filename)s" created', dict(filename=full_filename[len(path):])) else: session.flash = T('file "%(filename)s" created', dict(filename=full_filename[len(path):])) vars = {} if request.vars.id: vars['id'] = request.vars.id if request.vars.app: vars['app'] = request.vars.app redirect(URL('edit', args=[os.path.join(request.vars.location, filename)], vars=vars)) except Exception, e: if not isinstance(e, HTTP): session.flash = T('cannot create file') if request.vars.dir: response.flash = result response.headers['web2py-component-content'] = 'append' response.headers['web2py-component-command'] = "%s %s %s" % ( "$.web2py.invalidate('#files_menu');", "load_file('%s');" % URL('edit', args=[app, request.vars.dir, filename]), "$.web2py.enableElement($('#form form').find($.web2py.formInputClickSelector));") return '' else: redirect(request.vars.sender + anchor) def listfiles(app, dir, regexp='.*\.py$'): files = sorted( listdir(apath('%(app)s/%(dir)s/' % {'app': app, 'dir': dir}, r=request), regexp)) files = [x.replace('\\', '/') for x in files if not x.endswith('.bak')] return files def editfile(path, file, vars={}, app=None): args = (path, file) if 'app' in vars else (app, path, file) url = URL('edit', args=args, vars=vars) return A(file, _class='editor_filelink', _href=url, _style='word-wrap: nowrap;') def files_menu(): app = request.vars.app or 'welcome' dirs = [{'name': 'models', 'reg': '.*\.py$'}, {'name': 'controllers', 'reg': '.*\.py$'}, {'name': 'views', 'reg': '[\w/\-]+(\.\w+)+$'}, {'name': 'modules', 'reg': '.*\.py$'}, {'name': 'static', 'reg': '[^\.#].*'}, {'name': 'private', 'reg': '.*\.py$'}] result_files = [] for dir in dirs: result_files.append(TAG[''](LI(dir['name'], _class="nav-header component", _onclick="collapse('" + dir['name'] + "_files');"), LI(UL(*[LI(editfile(dir['name'], f, dict(id=dir['name'] + f.replace('.', '__')), app), _style="overflow:hidden", _id=dir['name'] + "__" + f.replace('.', '__')) for f in listfiles(app, dir['name'], regexp=dir['reg'])], _class="nav nav-list small-font"), _id=dir['name'] + '_files', _style="display: none;"))) return dict(result_files=result_files) def upload_file(): """ File uploading handler """ if request.vars and not request.vars.token == session.token: redirect(URL('logout')) try: filename = None app = get_app(name=request.vars.location.split('/')[0]) path = apath(request.vars.location, r=request) if request.vars.filename: filename = re.sub('[^\w\./]+', '_', request.vars.filename) else: filename = os.path.split(request.vars.file.filename)[-1] if path[-8:] == '/models/' and not filename[-3:] == '.py': filename += '.py' if path[-9:] == '/modules/' and not filename[-3:] == '.py': filename += '.py' if path[-13:] == '/controllers/' and not filename[-3:] == '.py': filename += '.py' if path[-7:] == '/views/' and not filename[-5:] == '.html': filename += '.html' if path[-11:] == '/languages/' and not filename[-3:] == '.py': filename += '.py' filename = os.path.join(path, filename) dirpath = os.path.dirname(filename) if not os.path.exists(dirpath): os.makedirs(dirpath) data = request.vars.file.file.read() lineno = count_lines(data) safe_write(filename, data, 'wb') log_progress(app, 'UPLOAD', filename, lineno) session.flash = T('file "%(filename)s" uploaded', dict(filename=filename[len(path):])) except Exception: if filename: d = dict(filename=filename[len(path):]) else: d = dict(filename='unknown') session.flash = T('cannot upload file "%(filename)s"', d) redirect(request.vars.sender) def errors(): """ Error handler """ import operator import os import pickle import hashlib app = get_app() if is_gae: method = 'dbold' if ('old' in (request.args(1) or '')) else 'dbnew' else: method = request.args(1) or 'new' db_ready = {} db_ready['status'] = get_ticket_storage(app) db_ready['errmessage'] = T( "No ticket_storage.txt found under /private folder") db_ready['errlink'] = "http://web2py.com/books/default/chapter/29/13#Collecting-tickets" if method == 'new': errors_path = apath('%s/errors' % app, r=request) delete_hashes = [] for item in request.vars: if item[:7] == 'delete_': delete_hashes.append(item[7:]) hash2error = dict() for fn in listdir(errors_path, '^[a-fA-F0-9.\-]+$'): fullpath = os.path.join(errors_path, fn) if not os.path.isfile(fullpath): continue try: fullpath_file = open(fullpath, 'r') try: error = pickle.load(fullpath_file) finally: fullpath_file.close() except IOError: continue except EOFError: continue hash = hashlib.md5(error['traceback']).hexdigest() if hash in delete_hashes: os.unlink(fullpath) else: try: hash2error[hash]['count'] += 1 except KeyError: error_lines = error['traceback'].split("\n") last_line = error_lines[-2] if len(error_lines) > 1 else 'unknown' error_causer = os.path.split(error['layer'])[1] hash2error[hash] = dict(count=1, pickel=error, causer=error_causer, last_line=last_line, hash=hash, ticket=fn) decorated = [(x['count'], x) for x in hash2error.values()] decorated.sort(key=operator.itemgetter(0), reverse=True) return dict(errors=[x[1] for x in decorated], app=app, method=method, db_ready=db_ready) elif method == 'dbnew': errors_path = apath('%s/errors' % app, r=request) tk_db, tk_table = get_ticket_storage(app) delete_hashes = [] for item in request.vars: if item[:7] == 'delete_': delete_hashes.append(item[7:]) hash2error = dict() for fn in tk_db(tk_table.id > 0).select(): try: error = pickle.loads(fn.ticket_data) hash = hashlib.md5(error['traceback']).hexdigest() if hash in delete_hashes: tk_db(tk_table.id == fn.id).delete() tk_db.commit() else: try: hash2error[hash]['count'] += 1 except KeyError: error_lines = error['traceback'].split("\n") last_line = error_lines[-2] error_causer = os.path.split(error['layer'])[1] hash2error[hash] = dict(count=1, pickel=error, causer=error_causer, last_line=last_line, hash=hash, ticket=fn.ticket_id) except AttributeError, e: tk_db(tk_table.id == fn.id).delete() tk_db.commit() decorated = [(x['count'], x) for x in hash2error.values()] decorated.sort(key=operator.itemgetter(0), reverse=True) return dict(errors=[x[1] for x in decorated], app=app, method=method, db_ready=db_ready) elif method == 'dbold': tk_db, tk_table = get_ticket_storage(app) for item in request.vars: if item[:7] == 'delete_': tk_db(tk_table.ticket_id == item[7:]).delete() tk_db.commit() tickets_ = tk_db(tk_table.id > 0).select(tk_table.ticket_id, tk_table.created_datetime, orderby=~tk_table.created_datetime) tickets = [row.ticket_id for row in tickets_] times = dict([(row.ticket_id, row.created_datetime) for row in tickets_]) return dict(app=app, tickets=tickets, method=method, times=times, db_ready=db_ready) else: for item in request.vars: # delete_all rows doesn't contain any ticket # Remove anything else as requested if item[:7] == 'delete_' and (not item == "delete_all}"): os.unlink(apath('%s/errors/%s' % (app, item[7:]), r=request)) func = lambda p: os.stat(apath('%s/errors/%s' % (app, p), r=request)).st_mtime tickets = sorted( listdir(apath('%s/errors/' % app, r=request), '^\w.*'), key=func, reverse=True) return dict(app=app, tickets=tickets, method=method, db_ready=db_ready) def get_ticket_storage(app): private_folder = apath('%s/private' % app, r=request) ticket_file = os.path.join(private_folder, 'ticket_storage.txt') if os.path.exists(ticket_file): db_string = open(ticket_file).read() db_string = db_string.strip().replace('\r', '').replace('\n', '') elif is_gae: # use Datastore as fallback if there is no ticket_file db_string = "google:datastore" else: return False tickets_table = 'web2py_ticket' tablename = tickets_table + '_' + app db_path = apath('%s/databases' % app, r=request) ticketsdb = DAL(db_string, folder=db_path, auto_import=True) if not ticketsdb.get(tablename): table = ticketsdb.define_table( tablename, Field('ticket_id', length=100), Field('ticket_data', 'text'), Field('created_datetime', 'datetime'), ) return ticketsdb, ticketsdb.get(tablename) def make_link(path): """ Create a link from a path """ tryFile = path.replace('\\', '/') if os.path.isabs(tryFile) and os.path.isfile(tryFile): (folder, filename) = os.path.split(tryFile) (base, ext) = os.path.splitext(filename) app = get_app() editable = {'controllers': '.py', 'models': '.py', 'views': '.html'} for key in editable.keys(): check_extension = folder.endswith("%s/%s" % (app, key)) if ext.lower() == editable[key] and check_extension: return A('"' + tryFile + '"', _href=URL(r=request, f='edit/%s/%s/%s' % (app, key, filename))).xml() return '' def make_links(traceback): """ Make links using the given traceback """ lwords = traceback.split('"') # Making the short circuit compatible with <= python2.4 result = (len(lwords) != 0) and lwords[0] or '' i = 1 while i < len(lwords): link = make_link(lwords[i]) if link == '': result += '"' + lwords[i] else: result += link if i + 1 < len(lwords): result += lwords[i + 1] i = i + 1 i = i + 1 return result class TRACEBACK(object): """ Generate the traceback """ def __init__(self, text): """ TRACEBACK constructor """ self.s = make_links(CODE(text).xml()) def xml(self): """ Returns the xml """ return self.s def ticket(): """ Ticket handler """ if len(request.args) != 2: session.flash = T('invalid ticket') redirect(URL('site')) app = get_app() myversion = request.env.web2py_version ticket = request.args[1] e = RestrictedError() e.load(request, app, ticket) return dict(app=app, ticket=ticket, output=e.output, traceback=(e.traceback and TRACEBACK(e.traceback)), snapshot=e.snapshot, code=e.code, layer=e.layer, myversion=myversion) def ticketdb(): """ Ticket handler """ if len(request.args) != 2: session.flash = T('invalid ticket') redirect(URL('site')) app = get_app() myversion = request.env.web2py_version ticket = request.args[1] e = RestrictedError() request.tickets_db = get_ticket_storage(app)[0] e.load(request, app, ticket) response.view = 'default/ticket.html' return dict(app=app, ticket=ticket, output=e.output, traceback=(e.traceback and TRACEBACK(e.traceback)), snapshot=e.snapshot, code=e.code, layer=e.layer, myversion=myversion) def error(): """ Generate a ticket (for testing) """ raise RuntimeError('admin ticket generator at your service') def update_languages(): """ Update available languages """ app = get_app() update_all_languages(apath(app, r=request)) session.flash = T('Language files (static strings) updated') redirect(URL('design', args=app, anchor='languages')) def user(): if MULTI_USER_MODE: if not db(db.auth_user).count(): auth.settings.registration_requires_approval = False return dict(form=auth()) else: return dict(form=T("Disabled")) def reload_routes(): """ Reload routes.py """ import gluon.rewrite gluon.rewrite.load() redirect(URL('site')) def manage_students(): if not (MULTI_USER_MODE and is_manager()): session.flash = T('Not Authorized') redirect(URL('site')) db.auth_user.registration_key.writable = True grid = SQLFORM.grid(db.auth_user) return locals() def bulk_register(): if not (MULTI_USER_MODE and is_manager()): session.flash = T('Not Authorized') redirect(URL('site')) form = SQLFORM.factory(Field('emails', 'text')) if form.process().accepted: emails = [x.strip() for x in form.vars.emails.split('\n') if x.strip()] n = 0 for email in emails: if not db.auth_user(email=email): n += db.auth_user.insert(email=email) and 1 or 0 session.flash = T('%s students registered', n) redirect(URL('site')) return locals() # Begin experimental stuff need fixes: # 1) should run in its own process - cannot os.chdir # 2) should not prompt user at console # 3) should give option to force commit and not reuqire manual merge def git_pull(): """ Git Pull handler """ app = get_app() if not have_git: session.flash = GIT_MISSING redirect(URL('site')) dialog = FORM.confirm(T('Pull'), {T('Cancel'): URL('site')}) if dialog.accepted: try: repo = git.Repo(os.path.join(apath(r=request), app)) origin = repo.remotes.origin origin.fetch() origin.pull() session.flash = T("Application updated via git pull") redirect(URL('site')) except git.CheckoutError: session.flash = T("Pull failed, certain files could not be checked out. Check logs for details.") redirect(URL('site')) except git.UnmergedEntriesError: session.flash = T("Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.") redirect(URL('site')) except git.GitCommandError: session.flash = T( "Pull failed, git exited abnormally. See logs for details.") redirect(URL('site')) except AssertionError: session.flash = T("Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.") redirect(URL('site')) elif 'cancel' in request.vars: redirect(URL('site')) return dict(app=app, dialog=dialog) def git_push(): """ Git Push handler """ app = get_app() if not have_git: session.flash = GIT_MISSING redirect(URL('site')) form = SQLFORM.factory(Field('changelog', requires=IS_NOT_EMPTY())) form.element('input[type=submit]')['_value'] = T('Push') form.add_button(T('Cancel'), URL('site')) form.process() if form.accepted: try: repo = git.Repo(os.path.join(apath(r=request), app)) index = repo.index index.add([apath(r=request) + app + '/*']) new_commit = index.commit(form.vars.changelog) origin = repo.remotes.origin origin.push() session.flash = T( "Git repo updated with latest application changes.") redirect(URL('site')) except git.UnmergedEntriesError: session.flash = T("Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.") redirect(URL('site')) return dict(app=app, form=form) def plugins(): app = request.args(0) from serializers import loads_json if not session.plugins: try: rawlist = urllib.urlopen("http://www.web2pyslices.com/" + "public/api.json/action/list/content/Package?package" + "_type=plugin&search_index=false").read() session.plugins = loads_json(rawlist) except: response.flash = T('Unable to download the list of plugins') session.plugins = [] return dict(plugins=session.plugins["results"], app=request.args(0)) def install_plugin(): app = request.args(0) source = request.vars.source plugin = request.vars.plugin if not (source and app): raise HTTP(500, T("Invalid request")) # make sure no XSS attacks in source if not source.lower().split('://')[0] in ('http','https'): raise HTTP(500, T("Invalid request")) form = SQLFORM.factory() result = None if form.process().accepted: # get w2p plugin if "web2py.plugin." in source: filename = "web2py.plugin.%s.w2p" % \ source.split("web2py.plugin.")[-1].split(".w2p")[0] else: filename = "web2py.plugin.%s.w2p" % cleanpath(plugin) if plugin_install(app, urllib.urlopen(source), request, filename): session.flash = T('New plugin installed: %s', filename) else: session.flash = \ T('unable to install plugin "%s"', filename) redirect(URL(f="plugins", args=[app, ])) return dict(form=form, app=app, plugin=plugin, source=source)
./CrossVul/dataset_final_sorted/CWE-254/py/good_4859_0
crossvul-python_data_bad_4859_0
# -*- coding: utf-8 -*- EXPERIMENTAL_STUFF = True MAXNFILES = 1000 if EXPERIMENTAL_STUFF: if is_mobile: response.view = response.view.replace('default/', 'default.mobile/') response.menu = [] import re from gluon.admin import * from gluon.fileutils import abspath, read_file, write_file from gluon.utils import web2py_uuid from gluon.tools import Config from gluon.compileapp import find_exposed_functions from glob import glob import shutil import platform try: import git if git.__version__ < '0.3.1': raise ImportError("Your version of git is %s. Upgrade to 0.3.1 or better." % git.__version__) have_git = True except ImportError, e: have_git = False GIT_MISSING = 'Requires gitpython module, but not installed or incompatible version: %s' % e from gluon.languages import (read_possible_languages, read_dict, write_dict, read_plural_dict, write_plural_dict) if DEMO_MODE and request.function in ['change_password', 'pack', 'pack_custom', 'pack_plugin', 'upgrade_web2py', 'uninstall', 'cleanup', 'compile_app', 'remove_compiled_app', 'delete', 'delete_plugin', 'create_file', 'upload_file', 'update_languages', 'reload_routes', 'git_push', 'git_pull', 'install_plugin']: session.flash = T('disabled in demo mode') redirect(URL('site')) if is_gae and request.function in ('edit', 'edit_language', 'edit_plurals', 'update_languages', 'create_file', 'install_plugin'): session.flash = T('disabled in GAE mode') redirect(URL('site')) if not is_manager() and request.function in ['change_password', 'upgrade_web2py']: session.flash = T('disabled in multi user mode') redirect(URL('site')) if FILTER_APPS and request.args(0) and not request.args(0) in FILTER_APPS: session.flash = T('disabled in demo mode') redirect(URL('site')) if not session.token: session.token = web2py_uuid() def count_lines(data): return len([line for line in data.split('\n') if line.strip() and not line.startswith('#')]) def log_progress(app, mode='EDIT', filename=None, progress=0): progress_file = os.path.join(apath(app, r=request), 'progress.log') now = str(request.now)[:19] if not os.path.exists(progress_file): safe_open(progress_file, 'w').write('[%s] START\n' % now) if filename: safe_open(progress_file, 'a').write( '[%s] %s %s: %s\n' % (now, mode, filename, progress)) def safe_open(a, b): if (DEMO_MODE or is_gae) and ('w' in b or 'a' in b): class tmp: def write(self, data): pass def close(self): pass return tmp() return open(a, b) def safe_read(a, b='r'): safe_file = safe_open(a, b) try: return safe_file.read() finally: safe_file.close() def safe_write(a, value, b='w'): safe_file = safe_open(a, b) try: safe_file.write(value) finally: safe_file.close() def get_app(name=None): app = name or request.args(0) if (app and os.path.exists(apath(app, r=request)) and (not MULTI_USER_MODE or is_manager() or db(db.app.name == app)(db.app.owner == auth.user.id).count())): return app session.flash = T('App does not exist or you are not authorized') redirect(URL('site')) def index(): """ Index handler """ send = request.vars.send if DEMO_MODE: session.authorized = True session.last_time = t0 if not send: send = URL('site') if session.authorized: redirect(send) elif request.vars.password: if verify_password(request.vars.password[:1024]): session.authorized = True login_record(True) if CHECK_VERSION: session.check_version = True else: session.check_version = False session.last_time = t0 if isinstance(send, list): # ## why does this happen? send = str(send[0]) redirect(send) else: times_denied = login_record(False) if times_denied >= allowed_number_of_attempts: response.flash = \ T('admin disabled because too many invalid login attempts') elif times_denied == allowed_number_of_attempts - 1: response.flash = \ T('You have one more login attempt before you are locked out') else: response.flash = T('invalid password.') return dict(send=send) def check_version(): """ Checks if web2py is up to date """ session.forget() session._unlock(response) new_version, version = check_new_version(request.env.web2py_version, WEB2PY_VERSION_URL) if new_version == -1: return A(T('Unable to check for upgrades'), _href=WEB2PY_URL) elif new_version != True: return A(T('web2py is up to date'), _href=WEB2PY_URL) elif platform.system().lower() in ('windows', 'win32', 'win64') and os.path.exists("web2py.exe"): return SPAN('You should upgrade to %s' % version.split('(')[0]) else: return sp_button(URL('upgrade_web2py'), T('upgrade now to %s') % version.split('(')[0]) def logout(): """ Logout handler """ session.authorized = None if MULTI_USER_MODE: redirect(URL('user/logout')) redirect(URL('index')) def change_password(): if session.pam_user: session.flash = T( 'PAM authenticated user, cannot change password here') redirect(URL('site')) form = SQLFORM.factory(Field('current_admin_password', 'password'), Field('new_admin_password', 'password', requires=IS_STRONG()), Field('new_admin_password_again', 'password'), _class="span4 well") if form.accepts(request.vars): if not verify_password(request.vars.current_admin_password): form.errors.current_admin_password = T('invalid password') elif form.vars.new_admin_password != form.vars.new_admin_password_again: form.errors.new_admin_password_again = T('no match') else: path = abspath('parameters_%s.py' % request.env.server_port) safe_write(path, 'password="%s"' % CRYPT()( request.vars.new_admin_password)[0]) session.flash = T('password changed') redirect(URL('site')) return dict(form=form) def site(): """ Site handler """ myversion = request.env.web2py_version # Shortcut to make the elif statements more legible file_or_appurl = 'file' in request.vars or 'appurl' in request.vars class IS_VALID_APPNAME(object): def __call__(self, value): if not re.compile('^\w+$').match(value): return (value, T('Invalid application name')) if not request.vars.overwrite and \ os.path.exists(os.path.join(apath(r=request), value)): return (value, T('Application exists already')) return (value, None) is_appname = IS_VALID_APPNAME() form_create = SQLFORM.factory(Field('name', requires=is_appname), table_name='appcreate') form_update = SQLFORM.factory(Field('name', requires=is_appname), Field('file', 'upload', uploadfield=False), Field('url'), Field('overwrite', 'boolean'), table_name='appupdate') form_create.process() form_update.process() if DEMO_MODE: pass elif form_create.accepted: # create a new application appname = cleanpath(form_create.vars.name) created, error = app_create(appname, request, info=True) if created: if MULTI_USER_MODE: db.app.insert(name=appname, owner=auth.user.id) log_progress(appname) session.flash = T('new application "%s" created', appname) redirect(URL('design', args=appname)) else: session.flash = \ DIV(T('unable to create application "%s"', appname), PRE(error)) redirect(URL(r=request)) elif form_update.accepted: if (form_update.vars.url or '').endswith('.git'): if not have_git: session.flash = GIT_MISSING redirect(URL(r=request)) target = os.path.join(apath(r=request), form_update.vars.name) try: new_repo = git.Repo.clone_from(form_update.vars.url, target) session.flash = T('new application "%s" imported', form_update.vars.name) except git.GitCommandError, err: session.flash = T('Invalid git repository specified.') redirect(URL(r=request)) elif form_update.vars.url: # fetch an application via URL or file upload try: f = urllib.urlopen(form_update.vars.url) if f.code == 404: raise Exception("404 file not found") except Exception, e: session.flash = \ DIV(T('Unable to download app because:'), PRE(repr(e))) redirect(URL(r=request)) fname = form_update.vars.url elif form_update.accepted and form_update.vars.file: fname = request.vars.file.filename f = request.vars.file.file else: session.flash = 'No file uploaded and no URL specified' redirect(URL(r=request)) if f: appname = cleanpath(form_update.vars.name) installed = app_install(appname, f, request, fname, overwrite=form_update.vars.overwrite) if f and installed: msg = 'application %(appname)s installed with md5sum: %(digest)s' if MULTI_USER_MODE: db.app.insert(name=appname, owner=auth.user.id) log_progress(appname) session.flash = T(msg, dict(appname=appname, digest=md5_hash(installed))) else: msg = 'unable to install application "%(appname)s"' session.flash = T(msg, dict(appname=form_update.vars.name)) redirect(URL(r=request)) regex = re.compile('^\w+$') if is_manager(): apps = [f for f in os.listdir(apath(r=request)) if regex.match(f)] else: apps = [f.name for f in db(db.app.owner == auth.user_id).select()] if FILTER_APPS: apps = [f for f in apps if f in FILTER_APPS] apps = sorted(apps, lambda a, b: cmp(a.upper(), b.upper())) myplatform = platform.python_version() return dict(app=None, apps=apps, myversion=myversion, myplatform=myplatform, form_create=form_create, form_update=form_update) def report_progress(app): import datetime progress_file = os.path.join(apath(app, r=request), 'progress.log') regex = re.compile('\[(.*?)\][^\:]+\:\s+(\-?\d+)') if not os.path.exists(progress_file): return [] matches = regex.findall(open(progress_file, 'r').read()) events, counter = [], 0 for m in matches: if not m: continue days = -(request.now - datetime.datetime.strptime(m[0], '%Y-%m-%d %H:%M:%S')).days counter += int(m[1]) events.append([days, counter]) return events def pack(): app = get_app() try: if len(request.args) == 1: fname = 'web2py.app.%s.w2p' % app filename = app_pack(app, request, raise_ex=True) else: fname = 'web2py.app.%s.compiled.w2p' % app filename = app_pack_compiled(app, request, raise_ex=True) except Exception, e: filename = None if filename: response.headers['Content-Type'] = 'application/w2p' disposition = 'attachment; filename=%s' % fname response.headers['Content-Disposition'] = disposition return safe_read(filename, 'rb') else: session.flash = T('internal error: %s', e) redirect(URL('site')) def pack_plugin(): app = get_app() if len(request.args) == 2: fname = 'web2py.plugin.%s.w2p' % request.args[1] filename = plugin_pack(app, request.args[1], request) if filename: response.headers['Content-Type'] = 'application/w2p' disposition = 'attachment; filename=%s' % fname response.headers['Content-Disposition'] = disposition return safe_read(filename, 'rb') else: session.flash = T('internal error') redirect(URL('plugin', args=request.args)) def pack_exe(app, base, filenames=None): import urllib import zipfile from cStringIO import StringIO # Download latest web2py_win and open it with zipfile download_url = 'http://www.web2py.com/examples/static/web2py_win.zip' out = StringIO() out.write(urllib.urlopen(download_url).read()) web2py_win = zipfile.ZipFile(out, mode='a') # Write routes.py with the application as default routes = u'# -*- coding: utf-8 -*-\nrouters = dict(BASE=dict(default_application="%s"))' % app web2py_win.writestr('web2py/routes.py', routes.encode('utf-8')) # Copy the application into the zipfile common_root = os.path.dirname(base) for filename in filenames: fname = os.path.join(base, filename) arcname = os.path.join('web2py/applications', app, filename) web2py_win.write(fname, arcname) web2py_win.close() response.headers['Content-Type'] = 'application/zip' response.headers['Content-Disposition'] = 'attachment; filename=web2py.app.%s.zip' % app out.seek(0) return response.stream(out) def pack_custom(): app = get_app() base = apath(app, r=request) def ignore(fs): return [f for f in fs if not ( f[:1] in '#' or f.endswith('~') or f.endswith('.bak'))] files = {} for (r, d, f) in os.walk(base): files[r] = {'folders': ignore(d), 'files': ignore(f)} if request.post_vars.file: valid_set = set(os.path.relpath(os.path.join(r, f), base) for r in files for f in files[r]['files']) files = request.post_vars.file files = [files] if not isinstance(files, list) else files files = [file for file in files if file in valid_set] if request.post_vars.doexe is None: fname = 'web2py.app.%s.w2p' % app try: filename = app_pack(app, request, raise_ex=True, filenames=files) except Exception, e: filename = None if filename: response.headers['Content-Type'] = 'application/w2p' disposition = 'attachment; filename=%s' % fname response.headers['Content-Disposition'] = disposition return safe_read(filename, 'rb') else: session.flash = T('internal error: %s', e) redirect(URL(args=request.args)) else: return pack_exe(app, base, files) return locals() def upgrade_web2py(): dialog = FORM.confirm(T('Upgrade'), {T('Cancel'): URL('site')}) if dialog.accepted: (success, error) = upgrade(request) if success: session.flash = T('web2py upgraded; please restart it') else: session.flash = T('unable to upgrade because "%s"', error) redirect(URL('site')) return dict(dialog=dialog) def uninstall(): app = get_app() dialog = FORM.confirm(T('Uninstall'), {T('Cancel'): URL('site')}) dialog['_id'] = 'confirm_form' dialog['_class'] = 'well' for component in dialog.components: component['_class'] = 'btn' if dialog.accepted: if MULTI_USER_MODE: if is_manager() and db(db.app.name == app).delete(): pass elif db(db.app.name == app)(db.app.owner == auth.user.id).delete(): pass else: session.flash = T('no permission to uninstall "%s"', app) redirect(URL('site')) try: filename = app_pack(app, request, raise_ex=True) except: session.flash = T('unable to uninstall "%s"', app) else: if app_uninstall(app, request): session.flash = T('application "%s" uninstalled', app) else: session.flash = T('unable to uninstall "%s"', app) redirect(URL('site')) return dict(app=app, dialog=dialog) def cleanup(): app = get_app() clean = app_cleanup(app, request) if not clean: session.flash = T("some files could not be removed") else: session.flash = T('cache, errors and sessions cleaned') redirect(URL('site')) def compile_app(): app = get_app() c = app_compile(app, request, skip_failed_views=(request.args(1) == 'skip_failed_views')) if not c: session.flash = T('application compiled') elif isinstance(c, list): session.flash = DIV(*[T('application compiled'), BR(), BR(), T('WARNING: The following views could not be compiled:'), BR()] + [CAT(BR(), view) for view in c] + [BR(), BR(), T('DO NOT use the "Pack compiled" feature.')]) else: session.flash = DIV(T('Cannot compile: there are errors in your app:'), CODE(c)) redirect(URL('site')) def remove_compiled_app(): """ Remove the compiled application """ app = get_app() remove_compiled_application(apath(app, r=request)) session.flash = T('compiled application removed') redirect(URL('site')) def delete(): """ Object delete handler """ app = get_app() filename = '/'.join(request.args) sender = request.vars.sender if isinstance(sender, list): # ## fix a problem with Vista sender = sender[0] dialog = FORM.confirm(T('Delete'), {T('Cancel'): URL(sender, anchor=request.vars.id)}) if dialog.accepted: try: full_path = apath(filename, r=request) lineno = count_lines(open(full_path, 'r').read()) os.unlink(full_path) log_progress(app, 'DELETE', filename, progress=-lineno) session.flash = T('file "%(filename)s" deleted', dict(filename=filename)) except Exception: session.flash = T('unable to delete file "%(filename)s"', dict(filename=filename)) redirect(URL(sender, anchor=request.vars.id2)) return dict(dialog=dialog, filename=filename) def enable(): if not URL.verify(request, hmac_key=session.hmac_key): raise HTTP(401) app = get_app() filename = os.path.join(apath(app, r=request), 'DISABLED') if is_gae: return SPAN(T('Not supported'), _style='color:yellow') elif os.path.exists(filename): os.unlink(filename) return SPAN(T('Disable'), _style='color:green') else: safe_open(filename, 'wb').write('disabled: True\ntime-disabled: %s' % request.now) return SPAN(T('Enable'), _style='color:red') def peek(): """ Visualize object code """ app = get_app(request.vars.app) filename = '/'.join(request.args) if request.vars.app: path = abspath(filename) else: path = apath(filename, r=request) try: data = safe_read(path).replace('\r', '') except IOError: session.flash = T('file does not exist') redirect(URL('site')) extension = filename[filename.rfind('.') + 1:].lower() return dict(app=app, filename=filename, data=data, extension=extension) def test(): """ Execute controller tests """ app = get_app() if len(request.args) > 1: file = request.args[1] else: file = '.*\.py' controllers = listdir( apath('%s/controllers/' % app, r=request), file + '$') return dict(app=app, controllers=controllers) def keepalive(): return '' def search(): keywords = request.vars.keywords or '' app = get_app() def match(filename, keywords): filename = os.path.join(apath(app, r=request), filename) if keywords in read_file(filename, 'rb'): return True return False path = apath(request.args[0], r=request) files1 = glob(os.path.join(path, '*/*.py')) files2 = glob(os.path.join(path, '*/*.html')) files3 = glob(os.path.join(path, '*/*/*.html')) files = [x[len(path) + 1:].replace( '\\', '/') for x in files1 + files2 + files3 if match(x, keywords)] return response.json(dict(files=files, message=T.M('Searching: **%s** %%{file}', len(files)))) def edit(): """ File edit handler """ # Load json only if it is ajax edited... app = get_app(request.vars.app) app_path = apath(app, r=request) preferences = {'theme': 'web2py', 'editor': 'default', 'closetag': 'true', 'codefolding': 'false', 'tabwidth': '4', 'indentwithtabs': 'false', 'linenumbers': 'true', 'highlightline': 'true'} config = Config(os.path.join(request.folder, 'settings.cfg'), section='editor', default_values={}) preferences.update(config.read()) if not(request.ajax) and not(is_mobile): # return the scaffolding, the rest will be through ajax requests response.title = T('Editing %s') % app return response.render('default/edit.html', dict(app=app, editor_settings=preferences)) # show settings tab and save prefernces if 'settings' in request.vars: if request.post_vars: # save new preferences post_vars = request.post_vars.items() # Since unchecked checkbox are not serialized, we must set them as false by hand to store the correct preference in the settings post_vars += [(opt, 'false') for opt in preferences if opt not in request.post_vars] if config.save(post_vars): response.headers["web2py-component-flash"] = T('Preferences saved correctly') else: response.headers["web2py-component-flash"] = T('Preferences saved on session only') response.headers["web2py-component-command"] = "update_editor(%s);$('a[href=#editor_settings] button.close').click();" % response.json(config.read()) return else: details = {'realfilename': 'settings', 'filename': 'settings', 'id': 'editor_settings', 'force': False} details['plain_html'] = response.render('default/editor_settings.html', {'editor_settings': preferences}) return response.json(details) """ File edit handler """ # Load json only if it is ajax edited... app = get_app(request.vars.app) filename = '/'.join(request.args) realfilename = request.args[-1] if request.vars.app: path = abspath(filename) else: path = apath(filename, r=request) # Try to discover the file type if filename[-3:] == '.py': filetype = 'python' elif filename[-5:] == '.html': filetype = 'html' elif filename[-5:] == '.load': filetype = 'html' elif filename[-4:] == '.css': filetype = 'css' elif filename[-3:] == '.js': filetype = 'javascript' else: filetype = 'html' # ## check if file is not there if ('revert' in request.vars) and os.path.exists(path + '.bak'): try: data = safe_read(path + '.bak') data1 = safe_read(path) except IOError: session.flash = T('Invalid action') if 'from_ajax' in request.vars: return response.json({'error': str(T('Invalid action'))}) else: redirect(URL('site')) safe_write(path, data) file_hash = md5_hash(data) saved_on = time.ctime(os.stat(path)[stat.ST_MTIME]) safe_write(path + '.bak', data1) response.flash = T('file "%s" of %s restored', (filename, saved_on)) else: try: data = safe_read(path) except IOError: session.flash = T('Invalid action') if 'from_ajax' in request.vars: return response.json({'error': str(T('Invalid action'))}) else: redirect(URL('site')) lineno_old = count_lines(data) file_hash = md5_hash(data) saved_on = time.ctime(os.stat(path)[stat.ST_MTIME]) if request.vars.file_hash and request.vars.file_hash != file_hash: session.flash = T('file changed on disk') data = request.vars.data.replace('\r\n', '\n').strip() + '\n' safe_write(path + '.1', data) if 'from_ajax' in request.vars: return response.json({'error': str(T('file changed on disk')), 'redirect': URL('resolve', args=request.args)}) else: redirect(URL('resolve', args=request.args)) elif request.vars.data: safe_write(path + '.bak', data) data = request.vars.data.replace('\r\n', '\n').strip() + '\n' safe_write(path, data) lineno_new = count_lines(data) log_progress( app, 'EDIT', filename, progress=lineno_new - lineno_old) file_hash = md5_hash(data) saved_on = time.ctime(os.stat(path)[stat.ST_MTIME]) response.flash = T('file saved on %s', saved_on) data_or_revert = (request.vars.data or request.vars.revert) # Check compile errors highlight = None if filetype == 'python' and request.vars.data: import _ast try: code = request.vars.data.rstrip().replace('\r\n', '\n') + '\n' compile(code, path, "exec", _ast.PyCF_ONLY_AST) except Exception, e: # offset calculation is only used for textarea (start/stop) start = sum([len(line) + 1 for l, line in enumerate(request.vars.data.split("\n")) if l < e.lineno - 1]) if e.text and e.offset: offset = e.offset - (len(e.text) - len( e.text.splitlines()[-1])) else: offset = 0 highlight = {'start': start, 'end': start + offset + 1, 'lineno': e.lineno, 'offset': offset} try: ex_name = e.__class__.__name__ except: ex_name = 'unknown exception!' response.flash = DIV(T('failed to compile file because:'), BR(), B(ex_name), ' ' + T('at line %s', e.lineno), offset and ' ' + T('at char %s', offset) or '', PRE(repr(e))) if data_or_revert and request.args[1] == 'modules': # Lets try to reload the modules try: mopath = '.'.join(request.args[2:])[:-3] exec 'import applications.%s.modules.%s' % ( request.args[0], mopath) reload(sys.modules['applications.%s.modules.%s' % (request.args[0], mopath)]) except Exception, e: response.flash = DIV( T('failed to reload module because:'), PRE(repr(e))) edit_controller = None editviewlinks = None view_link = None if filetype == 'html' and len(request.args) >= 3: cfilename = os.path.join(request.args[0], 'controllers', request.args[2] + '.py') if os.path.exists(apath(cfilename, r=request)): edit_controller = URL('edit', args=[cfilename.replace(os.sep, "/")]) view = request.args[3].replace('.html', '') view_link = URL(request.args[0], request.args[2], view) elif filetype == 'python' and request.args[1] == 'controllers': # it's a controller file. # Create links to all of the associated view files. app = get_app() viewname = os.path.splitext(request.args[2])[0] viewpath = os.path.join(app, 'views', viewname) aviewpath = apath(viewpath, r=request) viewlist = [] if os.path.exists(aviewpath): if os.path.isdir(aviewpath): viewlist = glob(os.path.join(aviewpath, '*.html')) elif os.path.exists(aviewpath + '.html'): viewlist.append(aviewpath + '.html') if len(viewlist): editviewlinks = [] for v in sorted(viewlist): vf = os.path.split(v)[-1] vargs = "/".join([viewpath.replace(os.sep, "/"), vf]) editviewlinks.append(A(vf.split(".")[0], _class="editor_filelink", _href=URL('edit', args=[vargs]))) if len(request.args) > 2 and request.args[1] == 'controllers': controller = (request.args[2])[:-3] functions = find_exposed_functions(data) functions = functions and sorted(functions) or [] else: (controller, functions) = (None, None) if 'from_ajax' in request.vars: return response.json({'file_hash': file_hash, 'saved_on': saved_on, 'functions': functions, 'controller': controller, 'application': request.args[0], 'highlight': highlight}) else: file_details = dict(app=request.args[0], lineno=request.vars.lineno or 1, editor_settings=preferences, filename=filename, realfilename=realfilename, filetype=filetype, data=data, edit_controller=edit_controller, file_hash=file_hash, saved_on=saved_on, controller=controller, functions=functions, view_link=view_link, editviewlinks=editviewlinks, id=IS_SLUG()(filename)[0], force=True if (request.vars.restore or request.vars.revert) else False) plain_html = response.render('default/edit_js.html', file_details) file_details['plain_html'] = plain_html if is_mobile: return response.render('default.mobile/edit.html', file_details, editor_settings=preferences) else: return response.json(file_details) def todolist(): """ Returns all TODO of the requested app """ app = request.vars.app or '' app_path = apath('%(app)s' % {'app': app}, r=request) dirs = ['models', 'controllers', 'modules', 'private'] def listfiles(app, dir, regexp='.*\.py$'): files = sorted(listdir(apath('%(app)s/%(dir)s/' % {'app': app, 'dir': dir}, r=request), regexp)) files = [x.replace(os.path.sep, '/') for x in files if not x.endswith('.bak')] return files pattern = '#\s*(todo)+\s+(.*)' regex = re.compile(pattern, re.IGNORECASE) output = [] for d in dirs: for f in listfiles(app, d): matches = [] filename = apath(os.path.join(app, d, f), r=request) with open(filename, 'r') as f_s: src = f_s.read() for m in regex.finditer(src): start = m.start() lineno = src.count('\n', 0, start) + 1 matches.append({'text': m.group(0), 'lineno': lineno}) if len(matches) != 0: output.append({'filename': f, 'matches': matches, 'dir': d}) return {'todo': output, 'app': app} def editor_sessions(): config = Config(os.path.join(request.folder, 'settings.cfg'), section='editor_sessions', default_values={}) preferences = config.read() if request.vars.session_name and request.vars.files: session_name = request.vars.session_name files = request.vars.files preferences.update({session_name: ','.join(files)}) if config.save(preferences.items()): response.headers["web2py-component-flash"] = T('Session saved correctly') else: response.headers["web2py-component-flash"] = T('Session saved on session only') return response.render('default/editor_sessions.html', {'editor_sessions': preferences}) def resolve(): """ """ filename = '/'.join(request.args) # ## check if file is not there path = apath(filename, r=request) a = safe_read(path).split('\n') try: b = safe_read(path + '.1').split('\n') except IOError: session.flash = 'Other file, no longer there' redirect(URL('edit', args=request.args)) d = difflib.ndiff(a, b) def leading(line): """ """ # TODO: we really need to comment this z = '' for (k, c) in enumerate(line): if c == ' ': z += '&nbsp;' elif c == ' \t': z += '&nbsp;' elif k == 0 and c == '?': pass else: break return XML(z) def getclass(item): """ Determine item class """ operators = {' ': 'normal', '+': 'plus', '-': 'minus'} return operators[item[0]] if request.vars: c = '\n'.join([item[2:].rstrip() for (i, item) in enumerate(d) if item[0] == ' ' or 'line%i' % i in request.vars]) safe_write(path, c) session.flash = 'files merged' redirect(URL('edit', args=request.args)) else: # Making the short circuit compatible with <= python2.4 gen_data = lambda index, item: not item[:1] in ['+', '-'] and "" \ or INPUT(_type='checkbox', _name='line%i' % index, value=item[0] == '+') diff = TABLE(*[TR(TD(gen_data(i, item)), TD(item[0]), TD(leading(item[2:]), TT(item[2:].rstrip())), _class=getclass(item)) for (i, item) in enumerate(d) if item[0] != '?']) return dict(diff=diff, filename=filename) def edit_language(): """ Edit language file """ app = get_app() filename = '/'.join(request.args) response.title = request.args[-1] strings = read_dict(apath(filename, r=request)) if '__corrupted__' in strings: form = SPAN(strings['__corrupted__'], _class='error') return dict(filename=filename, form=form) keys = sorted(strings.keys(), lambda x, y: cmp( unicode(x, 'utf-8').lower(), unicode(y, 'utf-8').lower())) rows = [] rows.append(H2(T('Original/Translation'))) for key in keys: name = md5_hash(key) s = strings[key] (prefix, sep, key) = key.partition('\x01') if sep: prefix = SPAN(prefix + ': ', _class='tm_ftag') k = key else: (k, prefix) = (prefix, '') _class = 'untranslated' if k == s else 'translated' if len(s) <= 40: elem = INPUT(_type='text', _name=name, value=s, _size=70, _class=_class) else: elem = TEXTAREA(_name=name, value=s, _cols=70, _rows=5, _class=_class) # Making the short circuit compatible with <= python2.4 k = (s != k) and k or B(k) new_row = DIV(LABEL(prefix, k, _style="font-weight:normal;"), CAT(elem, '\n', TAG.BUTTON( T('delete'), _onclick='return delkey("%s")' % name, _class='btn')), _id=name, _class='span6 well well-small') rows.append(DIV(new_row, _class="row-fluid")) rows.append(DIV(INPUT(_type='submit', _value=T('update'), _class="btn btn-primary"), _class='controls')) form = FORM(*rows) if form.accepts(request.vars, keepvalues=True): strs = dict() for key in keys: name = md5_hash(key) if form.vars[name] == chr(127): continue strs[key] = form.vars[name] write_dict(apath(filename, r=request), strs) session.flash = T('file saved on %(time)s', dict(time=time.ctime())) redirect(URL(r=request, args=request.args)) return dict(app=request.args[0], filename=filename, form=form) def edit_plurals(): """ Edit plurals file """ app = get_app() filename = '/'.join(request.args) plurals = read_plural_dict( apath(filename, r=request)) # plural forms dictionary nplurals = int(request.vars.nplurals) - 1 # plural forms quantity xnplurals = xrange(nplurals) if '__corrupted__' in plurals: # show error message and exit form = SPAN(plurals['__corrupted__'], _class='error') return dict(filename=filename, form=form) keys = sorted(plurals.keys(), lambda x, y: cmp( unicode(x, 'utf-8').lower(), unicode(y, 'utf-8').lower())) tab_rows = [] for key in keys: name = md5_hash(key) forms = plurals[key] if len(forms) < nplurals: forms.extend(None for i in xrange(nplurals - len(forms))) tab_col1 = DIV(CAT(LABEL(T("Singular Form")), B(key, _class='fake-input'))) tab_inputs = [SPAN(LABEL(T("Plural Form #%s", n + 1)), INPUT(_type='text', _name=name + '_' + str(n), value=forms[n], _size=20), _class='span6') for n in xnplurals] tab_col2 = DIV(CAT(*tab_inputs)) tab_col3 = DIV(CAT(LABEL(XML('&nbsp;')), TAG.BUTTON(T('delete'), _onclick='return delkey("%s")' % name, _class='btn'), _class='span6')) tab_row = DIV(DIV(tab_col1, '\n', tab_col2, '\n', tab_col3, _class='well well-small'), _id=name, _class='row-fluid tab_row') tab_rows.append(tab_row) tab_rows.append(DIV(TAG['button'](T('update'), _type='submit', _class='btn btn-primary'), _class='controls')) tab_container = DIV(*tab_rows, **dict(_class="row-fluid")) form = FORM(tab_container) if form.accepts(request.vars, keepvalues=True): new_plurals = dict() for key in keys: name = md5_hash(key) if form.vars[name + '_0'] == chr(127): continue new_plurals[key] = [form.vars[name + '_' + str(n)] for n in xnplurals] write_plural_dict(apath(filename, r=request), new_plurals) session.flash = T('file saved on %(time)s', dict(time=time.ctime())) redirect(URL(r=request, args=request.args, vars=dict( nplurals=request.vars.nplurals))) return dict(app=request.args[0], filename=filename, form=form) def about(): """ Read about info """ app = get_app() # ## check if file is not there about = safe_read(apath('%s/ABOUT' % app, r=request)) license = safe_read(apath('%s/LICENSE' % app, r=request)) return dict(app=app, about=MARKMIN(about), license=MARKMIN(license), progress=report_progress(app)) def design(): """ Application design handler """ app = get_app() if not response.flash and app == request.application: msg = T('ATTENTION: you cannot edit the running application!') response.flash = msg if request.vars and not request.vars.token == session.token: redirect(URL('logout')) if request.vars.pluginfile is not None and not isinstance(request.vars.pluginfile, str): filename = os.path.basename(request.vars.pluginfile.filename) if plugin_install(app, request.vars.pluginfile.file, request, filename): session.flash = T('new plugin installed') redirect(URL('design', args=app)) else: session.flash = \ T('unable to create application "%s"', request.vars.filename) redirect(URL(r=request)) elif isinstance(request.vars.pluginfile, str): session.flash = T('plugin not specified') redirect(URL(r=request)) # If we have only pyc files it means that # we cannot design if os.path.exists(apath('%s/compiled' % app, r=request)): session.flash = \ T('application is compiled and cannot be designed') redirect(URL('site')) # Get all models models = listdir(apath('%s/models/' % app, r=request), '.*\.py$') models = [x.replace('\\', '/') for x in models] defines = {} for m in models: data = safe_read(apath('%s/models/%s' % (app, m), r=request)) defines[m] = regex_tables.findall(data) defines[m].sort() # Get all controllers controllers = sorted( listdir(apath('%s/controllers/' % app, r=request), '.*\.py$')) controllers = [x.replace('\\', '/') for x in controllers] functions = {} for c in controllers: data = safe_read(apath('%s/controllers/%s' % (app, c), r=request)) items = find_exposed_functions(data) functions[c] = items and sorted(items) or [] # Get all views views = sorted( listdir(apath('%s/views/' % app, r=request), '[\w/\-]+(\.\w+)+$')) views = [x.replace('\\', '/') for x in views if not x.endswith('.bak')] extend = {} include = {} for c in views: data = safe_read(apath('%s/views/%s' % (app, c), r=request)) items = regex_extend.findall(data) if items: extend[c] = items[0][1] items = regex_include.findall(data) include[c] = [i[1] for i in items] # Get all modules modules = listdir(apath('%s/modules/' % app, r=request), '.*\.py$') modules = modules = [x.replace('\\', '/') for x in modules] modules.sort() # Get all private files privates = listdir(apath('%s/private/' % app, r=request), '[^\.#].*') privates = [x.replace('\\', '/') for x in privates] privates.sort() # Get all static files statics = listdir(apath('%s/static/' % app, r=request), '[^\.#].*', maxnum=MAXNFILES) statics = [x.replace(os.path.sep, '/') for x in statics] statics.sort() # Get all languages langpath = os.path.join(apath(app, r=request), 'languages') languages = dict([(lang, info) for lang, info in read_possible_languages(langpath).iteritems() if info[2] != 0]) # info[2] is langfile_mtime: # get only existed files # Get crontab cronfolder = apath('%s/cron' % app, r=request) crontab = apath('%s/cron/crontab' % app, r=request) if not is_gae: if not os.path.exists(cronfolder): os.mkdir(cronfolder) if not os.path.exists(crontab): safe_write(crontab, '#crontab') plugins = [] def filter_plugins(items, plugins): plugins += [item[7:].split('/')[0].split( '.')[0] for item in items if item.startswith('plugin_')] plugins[:] = list(set(plugins)) plugins.sort() return [item for item in items if not item.startswith('plugin_')] return dict(app=app, models=filter_plugins(models, plugins), defines=defines, controllers=filter_plugins(controllers, plugins), functions=functions, views=filter_plugins(views, plugins), modules=filter_plugins(modules, plugins), extend=extend, include=include, privates=filter_plugins(privates, plugins), statics=filter_plugins(statics, plugins), languages=languages, crontab=crontab, plugins=plugins) def delete_plugin(): """ Object delete handler """ app = request.args(0) plugin = request.args(1) plugin_name = 'plugin_' + plugin dialog = FORM.confirm( T('Delete'), {T('Cancel'): URL('design', args=app)}) if dialog.accepted: try: for folder in ['models', 'views', 'controllers', 'static', 'modules', 'private']: path = os.path.join(apath(app, r=request), folder) for item in os.listdir(path): if item.rsplit('.', 1)[0] == plugin_name: filename = os.path.join(path, item) if os.path.isdir(filename): shutil.rmtree(filename) else: os.unlink(filename) session.flash = T('plugin "%(plugin)s" deleted', dict(plugin=plugin)) except Exception: session.flash = T('unable to delete file plugin "%(plugin)s"', dict(plugin=plugin)) redirect(URL('design', args=request.args(0), anchor=request.vars.id2)) return dict(dialog=dialog, plugin=plugin) def plugin(): """ Application design handler """ app = get_app() plugin = request.args(1) if not response.flash and app == request.application: msg = T('ATTENTION: you cannot edit the running application!') response.flash = msg # If we have only pyc files it means that # we cannot design if os.path.exists(apath('%s/compiled' % app, r=request)): session.flash = \ T('application is compiled and cannot be designed') redirect(URL('site')) # Get all models models = listdir(apath('%s/models/' % app, r=request), '.*\.py$') models = [x.replace('\\', '/') for x in models] defines = {} for m in models: data = safe_read(apath('%s/models/%s' % (app, m), r=request)) defines[m] = regex_tables.findall(data) defines[m].sort() # Get all controllers controllers = sorted( listdir(apath('%s/controllers/' % app, r=request), '.*\.py$')) controllers = [x.replace('\\', '/') for x in controllers] functions = {} for c in controllers: data = safe_read(apath('%s/controllers/%s' % (app, c), r=request)) items = find_exposed_functions(data) functions[c] = items and sorted(items) or [] # Get all views views = sorted( listdir(apath('%s/views/' % app, r=request), '[\w/\-]+\.\w+$')) views = [x.replace('\\', '/') for x in views] extend = {} include = {} for c in views: data = safe_read(apath('%s/views/%s' % (app, c), r=request)) items = regex_extend.findall(data) if items: extend[c] = items[0][1] items = regex_include.findall(data) include[c] = [i[1] for i in items] # Get all modules modules = listdir(apath('%s/modules/' % app, r=request), '.*\.py$') modules = modules = [x.replace('\\', '/') for x in modules] modules.sort() # Get all private files privates = listdir(apath('%s/private/' % app, r=request), '[^\.#].*') privates = [x.replace('\\', '/') for x in privates] privates.sort() # Get all static files statics = listdir(apath('%s/static/' % app, r=request), '[^\.#].*', maxnum=MAXNFILES) statics = [x.replace(os.path.sep, '/') for x in statics] statics.sort() # Get all languages languages = sorted([lang + '.py' for lang, info in T.get_possible_languages_info().iteritems() if info[2] != 0]) # info[2] is langfile_mtime: # get only existed files # Get crontab crontab = apath('%s/cron/crontab' % app, r=request) if not os.path.exists(crontab): safe_write(crontab, '#crontab') def filter_plugins(items): regex = re.compile('^plugin_' + plugin + '(/.*|\..*)?$') return [item for item in items if item and regex.match(item)] return dict(app=app, models=filter_plugins(models), defines=defines, controllers=filter_plugins(controllers), functions=functions, views=filter_plugins(views), modules=filter_plugins(modules), extend=extend, include=include, privates=filter_plugins(privates), statics=filter_plugins(statics), languages=languages, crontab=crontab) def create_file(): """ Create files handler """ if request.vars and not request.vars.token == session.token: redirect(URL('logout')) try: anchor = '#' + request.vars.id if request.vars.id else '' if request.vars.app: app = get_app(request.vars.app) path = abspath(request.vars.location) else: if request.vars.dir: request.vars.location += request.vars.dir + '/' app = get_app(name=request.vars.location.split('/')[0]) path = apath(request.vars.location, r=request) filename = re.sub('[^\w./-]+', '_', request.vars.filename) if path[-7:] == '/rules/': # Handle plural rules files if len(filename) == 0: raise SyntaxError if not filename[-3:] == '.py': filename += '.py' lang = re.match('^plural_rules-(.*)\.py$', filename).group(1) langinfo = read_possible_languages(apath(app, r=request))[lang] text = dedent(""" #!/usr/bin/env python # -*- coding: utf-8 -*- # Plural-Forms for %(lang)s (%(langname)s) nplurals=2 # for example, English language has 2 forms: # 1 singular and 1 plural # Determine plural_id for number *n* as sequence of positive # integers: 0,1,... # NOTE! For singular form ALWAYS return plural_id = 0 get_plural_id = lambda n: int(n != 1) # Construct and return plural form of *word* using # *plural_id* (which ALWAYS>0). This function will be executed # for words (or phrases) not found in plural_dict dictionary. # By default this function simply returns word in singular: construct_plural_form = lambda word, plural_id: word """)[1:] % dict(lang=langinfo[0], langname=langinfo[1]) elif path[-11:] == '/languages/': # Handle language files if len(filename) == 0: raise SyntaxError if not filename[-3:] == '.py': filename += '.py' path = os.path.join(apath(app, r=request), 'languages', filename) if not os.path.exists(path): safe_write(path, '') # create language xx[-yy].py file: findT(apath(app, r=request), filename[:-3]) session.flash = T('language file "%(filename)s" created/updated', dict(filename=filename)) redirect(request.vars.sender + anchor) elif path[-8:] == '/models/': # Handle python models if not filename[-3:] == '.py': filename += '.py' if len(filename) == 3: raise SyntaxError text = '# -*- coding: utf-8 -*-\n' elif path[-13:] == '/controllers/': # Handle python controllers if not filename[-3:] == '.py': filename += '.py' if len(filename) == 3: raise SyntaxError text = '# -*- coding: utf-8 -*-\n# %s\ndef index(): return dict(message="hello from %s")' text = text % (T('try something like'), filename) elif path[-7:] == '/views/': if request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin): filename = 'plugin_%s/%s' % (request.vars.plugin, filename) # Handle template (html) views if filename.find('.') < 0: filename += '.html' extension = filename.split('.')[-1].lower() if len(filename) == 5: raise SyntaxError msg = T( 'This is the %(filename)s template', dict(filename=filename)) if extension == 'html': text = dedent(""" {{extend 'layout.html'}} <h1>%s</h1> {{=BEAUTIFY(response._vars)}}""" % msg)[1:] else: generic = os.path.join(path, 'generic.' + extension) if os.path.exists(generic): text = read_file(generic) else: text = '' elif path[-9:] == '/modules/': if request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin): filename = 'plugin_%s/%s' % (request.vars.plugin, filename) # Handle python module files if not filename[-3:] == '.py': filename += '.py' if len(filename) == 3: raise SyntaxError text = dedent(""" #!/usr/bin/env python # -*- coding: utf-8 -*- from gluon import *\n""")[1:] elif (path[-8:] == '/static/') or (path[-9:] == '/private/'): if (request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin)): filename = 'plugin_%s/%s' % (request.vars.plugin, filename) text = '' else: redirect(request.vars.sender + anchor) full_filename = os.path.join(path, filename) dirpath = os.path.dirname(full_filename) if not os.path.exists(dirpath): os.makedirs(dirpath) if os.path.exists(full_filename): raise SyntaxError safe_write(full_filename, text) log_progress(app, 'CREATE', filename) if request.vars.dir: result = T('file "%(filename)s" created', dict(filename=full_filename[len(path):])) else: session.flash = T('file "%(filename)s" created', dict(filename=full_filename[len(path):])) vars = {} if request.vars.id: vars['id'] = request.vars.id if request.vars.app: vars['app'] = request.vars.app redirect(URL('edit', args=[os.path.join(request.vars.location, filename)], vars=vars)) except Exception, e: if not isinstance(e, HTTP): session.flash = T('cannot create file') if request.vars.dir: response.flash = result response.headers['web2py-component-content'] = 'append' response.headers['web2py-component-command'] = "%s %s %s" % ( "$.web2py.invalidate('#files_menu');", "load_file('%s');" % URL('edit', args=[app, request.vars.dir, filename]), "$.web2py.enableElement($('#form form').find($.web2py.formInputClickSelector));") return '' else: redirect(request.vars.sender + anchor) def listfiles(app, dir, regexp='.*\.py$'): files = sorted( listdir(apath('%(app)s/%(dir)s/' % {'app': app, 'dir': dir}, r=request), regexp)) files = [x.replace('\\', '/') for x in files if not x.endswith('.bak')] return files def editfile(path, file, vars={}, app=None): args = (path, file) if 'app' in vars else (app, path, file) url = URL('edit', args=args, vars=vars) return A(file, _class='editor_filelink', _href=url, _style='word-wrap: nowrap;') def files_menu(): app = request.vars.app or 'welcome' dirs = [{'name': 'models', 'reg': '.*\.py$'}, {'name': 'controllers', 'reg': '.*\.py$'}, {'name': 'views', 'reg': '[\w/\-]+(\.\w+)+$'}, {'name': 'modules', 'reg': '.*\.py$'}, {'name': 'static', 'reg': '[^\.#].*'}, {'name': 'private', 'reg': '.*\.py$'}] result_files = [] for dir in dirs: result_files.append(TAG[''](LI(dir['name'], _class="nav-header component", _onclick="collapse('" + dir['name'] + "_files');"), LI(UL(*[LI(editfile(dir['name'], f, dict(id=dir['name'] + f.replace('.', '__')), app), _style="overflow:hidden", _id=dir['name'] + "__" + f.replace('.', '__')) for f in listfiles(app, dir['name'], regexp=dir['reg'])], _class="nav nav-list small-font"), _id=dir['name'] + '_files', _style="display: none;"))) return dict(result_files=result_files) def upload_file(): """ File uploading handler """ if request.vars and not request.vars.token == session.token: redirect(URL('logout')) try: filename = None app = get_app(name=request.vars.location.split('/')[0]) path = apath(request.vars.location, r=request) if request.vars.filename: filename = re.sub('[^\w\./]+', '_', request.vars.filename) else: filename = os.path.split(request.vars.file.filename)[-1] if path[-8:] == '/models/' and not filename[-3:] == '.py': filename += '.py' if path[-9:] == '/modules/' and not filename[-3:] == '.py': filename += '.py' if path[-13:] == '/controllers/' and not filename[-3:] == '.py': filename += '.py' if path[-7:] == '/views/' and not filename[-5:] == '.html': filename += '.html' if path[-11:] == '/languages/' and not filename[-3:] == '.py': filename += '.py' filename = os.path.join(path, filename) dirpath = os.path.dirname(filename) if not os.path.exists(dirpath): os.makedirs(dirpath) data = request.vars.file.file.read() lineno = count_lines(data) safe_write(filename, data, 'wb') log_progress(app, 'UPLOAD', filename, lineno) session.flash = T('file "%(filename)s" uploaded', dict(filename=filename[len(path):])) except Exception: if filename: d = dict(filename=filename[len(path):]) else: d = dict(filename='unknown') session.flash = T('cannot upload file "%(filename)s"', d) redirect(request.vars.sender) def errors(): """ Error handler """ import operator import os import pickle import hashlib app = get_app() if is_gae: method = 'dbold' if ('old' in (request.args(1) or '')) else 'dbnew' else: method = request.args(1) or 'new' db_ready = {} db_ready['status'] = get_ticket_storage(app) db_ready['errmessage'] = T( "No ticket_storage.txt found under /private folder") db_ready['errlink'] = "http://web2py.com/books/default/chapter/29/13#Collecting-tickets" if method == 'new': errors_path = apath('%s/errors' % app, r=request) delete_hashes = [] for item in request.vars: if item[:7] == 'delete_': delete_hashes.append(item[7:]) hash2error = dict() for fn in listdir(errors_path, '^[a-fA-F0-9.\-]+$'): fullpath = os.path.join(errors_path, fn) if not os.path.isfile(fullpath): continue try: fullpath_file = open(fullpath, 'r') try: error = pickle.load(fullpath_file) finally: fullpath_file.close() except IOError: continue except EOFError: continue hash = hashlib.md5(error['traceback']).hexdigest() if hash in delete_hashes: os.unlink(fullpath) else: try: hash2error[hash]['count'] += 1 except KeyError: error_lines = error['traceback'].split("\n") last_line = error_lines[-2] if len(error_lines) > 1 else 'unknown' error_causer = os.path.split(error['layer'])[1] hash2error[hash] = dict(count=1, pickel=error, causer=error_causer, last_line=last_line, hash=hash, ticket=fn) decorated = [(x['count'], x) for x in hash2error.values()] decorated.sort(key=operator.itemgetter(0), reverse=True) return dict(errors=[x[1] for x in decorated], app=app, method=method, db_ready=db_ready) elif method == 'dbnew': errors_path = apath('%s/errors' % app, r=request) tk_db, tk_table = get_ticket_storage(app) delete_hashes = [] for item in request.vars: if item[:7] == 'delete_': delete_hashes.append(item[7:]) hash2error = dict() for fn in tk_db(tk_table.id > 0).select(): try: error = pickle.loads(fn.ticket_data) hash = hashlib.md5(error['traceback']).hexdigest() if hash in delete_hashes: tk_db(tk_table.id == fn.id).delete() tk_db.commit() else: try: hash2error[hash]['count'] += 1 except KeyError: error_lines = error['traceback'].split("\n") last_line = error_lines[-2] error_causer = os.path.split(error['layer'])[1] hash2error[hash] = dict(count=1, pickel=error, causer=error_causer, last_line=last_line, hash=hash, ticket=fn.ticket_id) except AttributeError, e: tk_db(tk_table.id == fn.id).delete() tk_db.commit() decorated = [(x['count'], x) for x in hash2error.values()] decorated.sort(key=operator.itemgetter(0), reverse=True) return dict(errors=[x[1] for x in decorated], app=app, method=method, db_ready=db_ready) elif method == 'dbold': tk_db, tk_table = get_ticket_storage(app) for item in request.vars: if item[:7] == 'delete_': tk_db(tk_table.ticket_id == item[7:]).delete() tk_db.commit() tickets_ = tk_db(tk_table.id > 0).select(tk_table.ticket_id, tk_table.created_datetime, orderby=~tk_table.created_datetime) tickets = [row.ticket_id for row in tickets_] times = dict([(row.ticket_id, row.created_datetime) for row in tickets_]) return dict(app=app, tickets=tickets, method=method, times=times, db_ready=db_ready) else: for item in request.vars: # delete_all rows doesn't contain any ticket # Remove anything else as requested if item[:7] == 'delete_' and (not item == "delete_all}"): os.unlink(apath('%s/errors/%s' % (app, item[7:]), r=request)) func = lambda p: os.stat(apath('%s/errors/%s' % (app, p), r=request)).st_mtime tickets = sorted( listdir(apath('%s/errors/' % app, r=request), '^\w.*'), key=func, reverse=True) return dict(app=app, tickets=tickets, method=method, db_ready=db_ready) def get_ticket_storage(app): private_folder = apath('%s/private' % app, r=request) ticket_file = os.path.join(private_folder, 'ticket_storage.txt') if os.path.exists(ticket_file): db_string = open(ticket_file).read() db_string = db_string.strip().replace('\r', '').replace('\n', '') elif is_gae: # use Datastore as fallback if there is no ticket_file db_string = "google:datastore" else: return False tickets_table = 'web2py_ticket' tablename = tickets_table + '_' + app db_path = apath('%s/databases' % app, r=request) ticketsdb = DAL(db_string, folder=db_path, auto_import=True) if not ticketsdb.get(tablename): table = ticketsdb.define_table( tablename, Field('ticket_id', length=100), Field('ticket_data', 'text'), Field('created_datetime', 'datetime'), ) return ticketsdb, ticketsdb.get(tablename) def make_link(path): """ Create a link from a path """ tryFile = path.replace('\\', '/') if os.path.isabs(tryFile) and os.path.isfile(tryFile): (folder, filename) = os.path.split(tryFile) (base, ext) = os.path.splitext(filename) app = get_app() editable = {'controllers': '.py', 'models': '.py', 'views': '.html'} for key in editable.keys(): check_extension = folder.endswith("%s/%s" % (app, key)) if ext.lower() == editable[key] and check_extension: return A('"' + tryFile + '"', _href=URL(r=request, f='edit/%s/%s/%s' % (app, key, filename))).xml() return '' def make_links(traceback): """ Make links using the given traceback """ lwords = traceback.split('"') # Making the short circuit compatible with <= python2.4 result = (len(lwords) != 0) and lwords[0] or '' i = 1 while i < len(lwords): link = make_link(lwords[i]) if link == '': result += '"' + lwords[i] else: result += link if i + 1 < len(lwords): result += lwords[i + 1] i = i + 1 i = i + 1 return result class TRACEBACK(object): """ Generate the traceback """ def __init__(self, text): """ TRACEBACK constructor """ self.s = make_links(CODE(text).xml()) def xml(self): """ Returns the xml """ return self.s def ticket(): """ Ticket handler """ if len(request.args) != 2: session.flash = T('invalid ticket') redirect(URL('site')) app = get_app() myversion = request.env.web2py_version ticket = request.args[1] e = RestrictedError() e.load(request, app, ticket) return dict(app=app, ticket=ticket, output=e.output, traceback=(e.traceback and TRACEBACK(e.traceback)), snapshot=e.snapshot, code=e.code, layer=e.layer, myversion=myversion) def ticketdb(): """ Ticket handler """ if len(request.args) != 2: session.flash = T('invalid ticket') redirect(URL('site')) app = get_app() myversion = request.env.web2py_version ticket = request.args[1] e = RestrictedError() request.tickets_db = get_ticket_storage(app)[0] e.load(request, app, ticket) response.view = 'default/ticket.html' return dict(app=app, ticket=ticket, output=e.output, traceback=(e.traceback and TRACEBACK(e.traceback)), snapshot=e.snapshot, code=e.code, layer=e.layer, myversion=myversion) def error(): """ Generate a ticket (for testing) """ raise RuntimeError('admin ticket generator at your service') def update_languages(): """ Update available languages """ app = get_app() update_all_languages(apath(app, r=request)) session.flash = T('Language files (static strings) updated') redirect(URL('design', args=app, anchor='languages')) def user(): if MULTI_USER_MODE: if not db(db.auth_user).count(): auth.settings.registration_requires_approval = False return dict(form=auth()) else: return dict(form=T("Disabled")) def reload_routes(): """ Reload routes.py """ import gluon.rewrite gluon.rewrite.load() redirect(URL('site')) def manage_students(): if not (MULTI_USER_MODE and is_manager()): session.flash = T('Not Authorized') redirect(URL('site')) db.auth_user.registration_key.writable = True grid = SQLFORM.grid(db.auth_user) return locals() def bulk_register(): if not (MULTI_USER_MODE and is_manager()): session.flash = T('Not Authorized') redirect(URL('site')) form = SQLFORM.factory(Field('emails', 'text')) if form.process().accepted: emails = [x.strip() for x in form.vars.emails.split('\n') if x.strip()] n = 0 for email in emails: if not db.auth_user(email=email): n += db.auth_user.insert(email=email) and 1 or 0 session.flash = T('%s students registered', n) redirect(URL('site')) return locals() # Begin experimental stuff need fixes: # 1) should run in its own process - cannot os.chdir # 2) should not prompt user at console # 3) should give option to force commit and not reuqire manual merge def git_pull(): """ Git Pull handler """ app = get_app() if not have_git: session.flash = GIT_MISSING redirect(URL('site')) dialog = FORM.confirm(T('Pull'), {T('Cancel'): URL('site')}) if dialog.accepted: try: repo = git.Repo(os.path.join(apath(r=request), app)) origin = repo.remotes.origin origin.fetch() origin.pull() session.flash = T("Application updated via git pull") redirect(URL('site')) except git.CheckoutError: session.flash = T("Pull failed, certain files could not be checked out. Check logs for details.") redirect(URL('site')) except git.UnmergedEntriesError: session.flash = T("Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.") redirect(URL('site')) except git.GitCommandError: session.flash = T( "Pull failed, git exited abnormally. See logs for details.") redirect(URL('site')) except AssertionError: session.flash = T("Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.") redirect(URL('site')) elif 'cancel' in request.vars: redirect(URL('site')) return dict(app=app, dialog=dialog) def git_push(): """ Git Push handler """ app = get_app() if not have_git: session.flash = GIT_MISSING redirect(URL('site')) form = SQLFORM.factory(Field('changelog', requires=IS_NOT_EMPTY())) form.element('input[type=submit]')['_value'] = T('Push') form.add_button(T('Cancel'), URL('site')) form.process() if form.accepted: try: repo = git.Repo(os.path.join(apath(r=request), app)) index = repo.index index.add([apath(r=request) + app + '/*']) new_commit = index.commit(form.vars.changelog) origin = repo.remotes.origin origin.push() session.flash = T( "Git repo updated with latest application changes.") redirect(URL('site')) except git.UnmergedEntriesError: session.flash = T("Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.") redirect(URL('site')) return dict(app=app, form=form) def plugins(): app = request.args(0) from serializers import loads_json if not session.plugins: try: rawlist = urllib.urlopen("http://www.web2pyslices.com/" + "public/api.json/action/list/content/Package?package" + "_type=plugin&search_index=false").read() session.plugins = loads_json(rawlist) except: response.flash = T('Unable to download the list of plugins') session.plugins = [] return dict(plugins=session.plugins["results"], app=request.args(0)) def install_plugin(): app = request.args(0) source = request.vars.source plugin = request.vars.plugin if not (source and app): raise HTTP(500, T("Invalid request")) # make sure no XSS attacks in source if not source.lower().split('://')[0] in ('http','https'): raise HTTP(500, T("Invalid request")) form = SQLFORM.factory() result = None if form.process().accepted: # get w2p plugin if "web2py.plugin." in source: filename = "web2py.plugin.%s.w2p" % \ source.split("web2py.plugin.")[-1].split(".w2p")[0] else: filename = "web2py.plugin.%s.w2p" % cleanpath(plugin) if plugin_install(app, urllib.urlopen(source), request, filename): session.flash = T('New plugin installed: %s', filename) else: session.flash = \ T('unable to install plugin "%s"', filename) redirect(URL(f="plugins", args=[app, ])) return dict(form=form, app=app, plugin=plugin, source=source)
./CrossVul/dataset_final_sorted/CWE-254/py/bad_4859_0
crossvul-python_data_good_4859_1
import base64 import os import time from gluon import portalocker from gluon.admin import apath from gluon.fileutils import read_file from gluon.utils import web2py_uuid # ########################################################### # ## make sure administrator is on localhost or https # ########################################################### http_host = request.env.http_host.split(':')[0] if request.env.web2py_runtime_gae: session_db = DAL('gae') session.connect(request, response, db=session_db) hosts = (http_host, ) is_gae = True else: is_gae = False if request.is_https: session.secure() elif not request.is_local and not DEMO_MODE: raise HTTP(200, T('Admin is disabled because insecure channel')) try: _config = {} port = int(request.env.server_port or 0) restricted( read_file(apath('../parameters_%i.py' % port, request)), _config) if not 'password' in _config or not _config['password']: raise HTTP(200, T('admin disabled because no admin password')) except IOError: import gluon.fileutils if is_gae: if gluon.fileutils.check_credentials(request): session.authorized = True session.last_time = time.time() else: raise HTTP(200, T('admin disabled because not supported on google app engine')) else: raise HTTP( 200, T('admin disabled because unable to access password file')) def verify_password(password): session.pam_user = None if DEMO_MODE: ret = True elif not _config.get('password'): ret - False elif _config['password'].startswith('pam_user:'): session.pam_user = _config['password'][9:].strip() import gluon.contrib.pam ret = gluon.contrib.pam.authenticate(session.pam_user, password) else: ret = _config['password'] == CRYPT()(password)[0] if ret: session.hmac_key = web2py_uuid() return ret # ########################################################### # ## handle brute-force login attacks # ########################################################### deny_file = os.path.join(request.folder, 'private', 'hosts.deny') allowed_number_of_attempts = 5 expiration_failed_logins = 3600 def read_hosts_deny(): import datetime hosts = {} if os.path.exists(deny_file): hosts = {} f = open(deny_file, 'r') portalocker.lock(f, portalocker.LOCK_SH) for line in f.readlines(): if not line.strip() or line.startswith('#'): continue fields = line.strip().split() if len(fields) > 2: hosts[fields[0].strip()] = ( # ip int(fields[1].strip()), # n attemps int(fields[2].strip()) # last attempts ) portalocker.unlock(f) f.close() return hosts def write_hosts_deny(denied_hosts): f = open(deny_file, 'w') portalocker.lock(f, portalocker.LOCK_EX) for key, val in denied_hosts.items(): if time.time() - val[1] < expiration_failed_logins: line = '%s %s %s\n' % (key, val[0], val[1]) f.write(line) portalocker.unlock(f) f.close() def login_record(success=True): denied_hosts = read_hosts_deny() val = (0, 0) if success and request.client in denied_hosts: del denied_hosts[request.client] elif not success: val = denied_hosts.get(request.client, (0, 0)) if time.time() - val[1] < expiration_failed_logins \ and val[0] >= allowed_number_of_attempts: return val[0] # locked out time.sleep(2 ** val[0]) val = (val[0] + 1, int(time.time())) denied_hosts[request.client] = val write_hosts_deny(denied_hosts) return val[0] def failed_login_count(): denied_hosts = read_hosts_deny() val = denied_hosts.get(request.client, (0, 0)) return val[0] # ########################################################### # ## session expiration # ########################################################### t0 = time.time() if session.authorized: if session.last_time and session.last_time < t0 - EXPIRATION: session.flash = T('session expired') session.authorized = False else: session.last_time = t0 if request.vars.is_mobile in ('true', 'false', 'auto'): session.is_mobile = request.vars.is_mobile or 'auto' if request.controller == 'default' and request.function == 'index': if not request.vars.is_mobile: session.is_mobile = 'auto' if not session.is_mobile: session.is_mobile = 'auto' if session.is_mobile == 'true': is_mobile = True elif session.is_mobile == 'false': is_mobile = False else: is_mobile = request.user_agent().get('is_mobile',False) if DEMO_MODE: session.authorized = True session.forget() if request.controller == "webservices": basic = request.env.http_authorization if not basic or not basic[:6].lower() == 'basic ': raise HTTP(401, "Wrong credentials") (username, password) = base64.b64decode(basic[6:]).split(':') if not verify_password(password) or MULTI_USER_MODE: time.sleep(10) raise HTTP(403, "Not authorized") elif not session.authorized and not \ (request.controller + '/' + request.function in ('default/index', 'default/user', 'plugin_jqmobile/index', 'plugin_jqmobile/about')): if request.env.query_string: query_string = '?' + request.env.query_string else: query_string = '' if request.env.web2py_original_uri: url = request.env.web2py_original_uri else: url = request.env.path_info + query_string redirect(URL(request.application, 'default', 'index', vars=dict(send=url))) elif session.authorized and \ request.controller == 'default' and \ request.function == 'index': redirect(URL(request.application, 'default', 'site')) if request.controller == 'appadmin' and DEMO_MODE: session.flash = 'Appadmin disabled in demo mode' redirect(URL('default', 'sites'))
./CrossVul/dataset_final_sorted/CWE-254/py/good_4859_1
crossvul-python_data_bad_5219_3
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import os import os.path import yaml import logging import glob from build_pack_utils import FileUtil _log = logging.getLogger('helpers') class FakeBuilder(object): def __init__(self, ctx): self._ctx = ctx class FakeInstaller(object): def __init__(self, builder, installer): self._installer = installer self.builder = builder def setup_webdir_if_it_doesnt_exist(ctx): if is_web_app(ctx): webdirPath = os.path.join(ctx['BUILD_DIR'], ctx['WEBDIR']) if not os.path.exists(webdirPath): fu = FileUtil(FakeBuilder(ctx), move=True) fu.under('BUILD_DIR') fu.into('WEBDIR') fu.where_name_does_not_match( '^%s/.*$' % os.path.join(ctx['BUILD_DIR'], '.bp')) fu.where_name_does_not_match( '^%s/.*$' % os.path.join(ctx['BUILD_DIR'], '.extensions')) fu.where_name_does_not_match( '^%s/.*$' % os.path.join(ctx['BUILD_DIR'], '.bp-config')) fu.where_name_does_not_match( '^%s$' % os.path.join(ctx['BUILD_DIR'], 'manifest.yml')) fu.where_name_does_not_match( '^%s/.*$' % os.path.join(ctx['BUILD_DIR'], ctx['LIBDIR'])) fu.where_name_does_not_match( '^%s/.*$' % os.path.join(ctx['BUILD_DIR'], '.profile.d')) fu.done() def log_bp_version(ctx): version_file = os.path.join(ctx['BP_DIR'], 'VERSION') if os.path.exists(version_file): print('-------> Buildpack version %s' % open(version_file).read()) def setup_log_dir(ctx): os.makedirs(os.path.join(ctx['BUILD_DIR'], 'logs')) def load_manifest(ctx): manifest_path = os.path.join(ctx['BP_DIR'], 'manifest.yml') _log.debug('Loading manifest from %s', manifest_path) return yaml.load(open(manifest_path)) def find_all_php_versions(dependencies): versions = [] for dependency in dependencies: if dependency['name'] == 'php': versions.append(dependency['version']) return versions def validate_php_version(ctx): if ctx['PHP_VERSION'] in ctx['ALL_PHP_VERSIONS']: _log.debug('App selected PHP [%s]', ctx['PHP_VERSION']) else: _log.warning('Selected version of PHP [%s] not available. Defaulting' ' to the latest version [%s]', ctx['PHP_VERSION'], ctx['PHP_55_LATEST']) ctx['PHP_VERSION'] = ctx['PHP_55_LATEST'] def _get_supported_php_extensions(ctx): php_extensions = [] php_extension_glob = os.path.join(ctx["PHP_INSTALL_PATH"], 'lib', 'php', 'extensions', 'no-debug-non-zts-*') php_extension_directory = glob.glob(php_extension_glob)[0] for root, dirs, files in os.walk(php_extension_directory): for f in files: if '.so' in f: php_extensions.append(f.replace('.so', '')) return php_extensions def validate_php_extensions(ctx): filtered_extensions = [] requested_extensions = ctx['PHP_EXTENSIONS'] supported_extensions = _get_supported_php_extensions(ctx) for extension in requested_extensions: if extension not in supported_extensions: print("The extension '%s' is not provided by this buildpack." % extension, file=os.sys.stderr) else: filtered_extensions.append(extension) ctx['PHP_EXTENSIONS'] = filtered_extensions def convert_php_extensions(ctx): _log.debug('Converting PHP extensions') SKIP = ('cli', 'pear', 'cgi') ctx['PHP_EXTENSIONS'] = \ "\n".join(["extension=%s.so" % ex for ex in ctx['PHP_EXTENSIONS'] if ex not in SKIP]) path = '' ctx['ZEND_EXTENSIONS'] = \ "\n".join(['zend_extension="%s"' % os.path.join(path, "%s.so" % ze) for ze in ctx['ZEND_EXTENSIONS']]) def is_web_app(ctx): return ctx.get('WEB_SERVER', '') != 'none' def find_stand_alone_app_to_run(ctx): app = ctx.get('APP_START_CMD', None) if not app: possible_files = ('app.php', 'main.php', 'run.php', 'start.php') for pf in possible_files: if os.path.exists(os.path.join(ctx['BUILD_DIR'], pf)): app = pf break if not app: print('Build pack could not find a PHP file to execute!') _log.info('Build pack could not find a file to execute. Either ' 'set "APP_START_CMD" or include one of these files [%s]', ", ".join(possible_files)) app = 'app.php' return app
./CrossVul/dataset_final_sorted/CWE-254/py/bad_5219_3
crossvul-python_data_good_1654_5
# -*- coding: utf-8 -*- from __future__ import with_statement from django.contrib.sites.models import Site from cms.utils.urlutils import admin_reverse from djangocms_text_ckeditor.models import Text from django.core.cache import cache from django.core.management.base import CommandError from django.core.management import call_command from django.core.urlresolvers import reverse from cms.api import create_page, add_plugin, create_title from cms.constants import PUBLISHER_STATE_PENDING, PUBLISHER_STATE_DEFAULT, PUBLISHER_STATE_DIRTY from cms.management.commands import publisher_publish from cms.models import CMSPlugin, Title from cms.models.pagemodel import Page from cms.plugin_pool import plugin_pool from cms.test_utils.testcases import SettingsOverrideTestCase as TestCase from cms.test_utils.util.context_managers import StdoutOverride, SettingsOverride from cms.test_utils.util.fuzzy_int import FuzzyInt from cms.utils.conf import get_cms_setting from cms.utils.i18n import force_language from cms.utils.compat.dj import get_user_model class PublisherCommandTests(TestCase): """ Tests for the publish command """ def test_command_line_should_raise_without_superuser(self): with self.assertRaises(CommandError): com = publisher_publish.Command() com.handle_noargs() def test_command_line_publishes_zero_pages_on_empty_db(self): # we need to create a superuser (the db is empty) get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') pages_from_output = 0 published_from_output = 0 with StdoutOverride() as buffer: # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish') lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work for line in lines: if 'Total' in line: pages_from_output = int(line.split(':')[1]) elif 'Published' in line: published_from_output = int(line.split(':')[1]) self.assertEqual(pages_from_output, 0) self.assertEqual(published_from_output, 0) def test_command_line_ignores_draft_page(self): # we need to create a superuser (the db is empty) get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') create_page("The page!", "nav_playground.html", "en", published=False) pages_from_output = 0 published_from_output = 0 with StdoutOverride() as buffer: # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish') lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work for line in lines: if 'Total' in line: pages_from_output = int(line.split(':')[1]) elif 'Published' in line: published_from_output = int(line.split(':')[1]) self.assertEqual(pages_from_output, 0) self.assertEqual(published_from_output, 0) self.assertEqual(Page.objects.public().count(), 0) def test_command_line_publishes_draft_page(self): # we need to create a superuser (the db is empty) get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') create_page("The page!", "nav_playground.html", "en", published=False) pages_from_output = 0 published_from_output = 0 with StdoutOverride() as buffer: # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish', include_unpublished=True) lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work for line in lines: if 'Total' in line: pages_from_output = int(line.split(':')[1]) elif 'Published' in line: published_from_output = int(line.split(':')[1]) self.assertEqual(pages_from_output, 1) self.assertEqual(published_from_output, 1) self.assertEqual(Page.objects.public().count(), 1) def test_command_line_publishes_selected_language(self): # we need to create a superuser (the db is empty) get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') page = create_page("en title", "nav_playground.html", "en") title = create_title('de', 'de title', page) title.published = True title.save() title = create_title('fr', 'fr title', page) title.published = True title.save() pages_from_output = 0 published_from_output = 0 with StdoutOverride() as buffer: # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish', language='de') lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work for line in lines: if 'Total' in line: pages_from_output = int(line.split(':')[1]) elif 'Published' in line: published_from_output = int(line.split(':')[1]) self.assertEqual(pages_from_output, 1) self.assertEqual(published_from_output, 1) self.assertEqual(Page.objects.public().count(), 1) public = Page.objects.public()[0] languages = sorted(public.title_set.values_list('language', flat=True)) self.assertEqual(languages, ['de']) def test_command_line_publishes_selected_language_drafts(self): # we need to create a superuser (the db is empty) get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') page = create_page("en title", "nav_playground.html", "en") title = create_title('de', 'de title', page) title.published = False title.save() title = create_title('fr', 'fr title', page) title.published = False title.save() pages_from_output = 0 published_from_output = 0 with StdoutOverride() as buffer: # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish', language='de', include_unpublished=True) lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work for line in lines: if 'Total' in line: pages_from_output = int(line.split(':')[1]) elif 'Published' in line: published_from_output = int(line.split(':')[1]) self.assertEqual(pages_from_output, 1) self.assertEqual(published_from_output, 1) self.assertEqual(Page.objects.public().count(), 1) public = Page.objects.public()[0] languages = sorted(public.title_set.values_list('language', flat=True)) self.assertEqual(languages, ['de']) def test_table_name_patching(self): """ This tests the plugin models patching when publishing from the command line """ User = get_user_model() User.objects.create_superuser('djangocms', 'cms@example.com', '123456') create_page("The page!", "nav_playground.html", "en", published=True) draft = Page.objects.drafts()[0] draft.reverse_id = 'a_test' # we have to change *something* draft.save() add_plugin(draft.placeholders.get(slot=u"body"), u"TextPlugin", u"en", body="Test content") draft.publish('en') add_plugin(draft.placeholders.get(slot=u"body"), u"TextPlugin", u"en", body="Test content") # Manually undoing table name patching Text._meta.db_table = 'djangocms_text_ckeditor_text' plugin_pool.patched = False with StdoutOverride(): # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish') not_drafts = len(Page.objects.filter(publisher_is_draft=False)) drafts = len(Page.objects.filter(publisher_is_draft=True)) self.assertEqual(not_drafts, 1) self.assertEqual(drafts, 1) def test_command_line_publishes_one_page(self): """ Publisher always creates two Page objects for every CMS page, one is_draft and one is_public. The public version of the page can be either published or not. This bit of code uses sometimes manager methods and sometimes manual filters on purpose (this helps test the managers) """ # we need to create a superuser (the db is empty) get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') # Now, let's create a page. That actually creates 2 Page objects create_page("The page!", "nav_playground.html", "en", published=True) draft = Page.objects.drafts()[0] draft.reverse_id = 'a_test' # we have to change *something* draft.save() pages_from_output = 0 published_from_output = 0 with StdoutOverride() as buffer: # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish') lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work for line in lines: if 'Total' in line: pages_from_output = int(line.split(':')[1]) elif 'Published' in line: published_from_output = int(line.split(':')[1]) self.assertEqual(pages_from_output, 1) self.assertEqual(published_from_output, 1) # Sanity check the database (we should have one draft and one public) not_drafts = len(Page.objects.filter(publisher_is_draft=False)) drafts = len(Page.objects.filter(publisher_is_draft=True)) self.assertEqual(not_drafts, 1) self.assertEqual(drafts, 1) # Now check that the non-draft has the attribute we set to the draft. non_draft = Page.objects.public()[0] self.assertEqual(non_draft.reverse_id, 'a_test') def test_command_line_publish_multiple_languages(self): # we need to create a superuser (the db is empty) get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') # Create a draft page with two published titles page = create_page(u"The page!", "nav_playground.html", "en", published=False) title = create_title('de', 'ja', page) title.published = True title.save() title = create_title('fr', 'non', page) title.published = True title.save() with StdoutOverride(): # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish') public = Page.objects.public()[0] languages = sorted(public.title_set.values_list('language', flat=True)) self.assertEqual(languages, ['de', 'fr']) def test_command_line_publish_one_site(self): get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') siteA = Site.objects.create(domain='a.example.com', name='a.example.com') siteB = Site.objects.create(domain='b.example.com', name='b.example.com') #example.com create_page(u"example.com homepage", "nav_playground.html", "en", published=True) #a.example.com create_page(u"a.example.com homepage", "nav_playground.html", "de", site=siteA, published=True) #b.example.com create_page(u"b.example.com homepage", "nav_playground.html", "de", site=siteB, published=True) create_page(u"b.example.com about", "nav_playground.html", "nl", site=siteB, published=True) with StdoutOverride() as buffer: # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish', site=siteB.id) lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work for line in lines: if 'Total' in line: pages_from_output = int(line.split(':')[1]) elif 'Published' in line: published_from_output = int(line.split(':')[1]) self.assertEqual(pages_from_output, 2) self.assertEqual(published_from_output, 2) def test_command_line_publish_multiple_languages_check_count(self): """ Publishing one page with multiple languages still counts as one page. This test case checks whether it works as expected. """ # we need to create a superuser (the db is empty) get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') # Now, let's create a page with 2 languages. page = create_page("en title", "nav_playground.html", "en", published=True) create_title("de", "de title", page) page.publish("de") pages_from_output = 0 published_from_output = 0 with StdoutOverride() as buffer: # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish') lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work for line in lines: if 'Total' in line: pages_from_output = int(line.split(':')[1]) elif 'Published' in line: published_from_output = int(line.split(':')[1]) self.assertEqual(pages_from_output, 1) self.assertEqual(published_from_output, 1) def tearDown(self): plugin_pool.patched = False plugin_pool.set_plugin_meta() class PublishingTests(TestCase): def create_page(self, title=None, **kwargs): return create_page(title or self._testMethodName, "nav_playground.html", "en", **kwargs) def test_publish_home(self): name = self._testMethodName page = self.create_page(name, published=False) self.assertFalse(page.publisher_public_id) self.assertEqual(Page.objects.all().count(), 1) superuser = self.get_superuser() with self.login_user_context(superuser): response = self.client.post(admin_reverse("cms_page_publish_page", args=[page.pk, 'en'])) self.assertEqual(response.status_code, 302) self.assertEqual(response['Location'], "http://testserver/en/?%s" % get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')) def test_publish_single(self): name = self._testMethodName page = self.create_page(name, published=False) self.assertFalse(page.is_published('en')) drafts = Page.objects.drafts() public = Page.objects.public() published = Page.objects.public().published("en") self.assertObjectExist(drafts, title_set__title=name) self.assertObjectDoesNotExist(public, title_set__title=name) self.assertObjectDoesNotExist(published, title_set__title=name) page.publish("en") drafts = Page.objects.drafts() public = Page.objects.public() published = Page.objects.public().published("en") self.assertTrue(page.is_published('en')) self.assertEqual(page.get_publisher_state("en"), PUBLISHER_STATE_DEFAULT) self.assertIsNotNone(page.publisher_public) self.assertTrue(page.publisher_public_id) self.assertObjectExist(drafts, title_set__title=name) self.assertObjectExist(public, title_set__title=name) self.assertObjectExist(published, title_set__title=name) page = Page.objects.get(pk=page.pk) self.assertEqual(page.get_publisher_state("en"), 0) def test_publish_admin(self): page = self.create_page("test_admin", published=False) superuser = self.get_superuser() with self.login_user_context(superuser): response = self.client.post(admin_reverse("cms_page_publish_page", args=[page.pk, 'en'])) self.assertEqual(response.status_code, 302) page = Page.objects.get(pk=page.pk) self.assertEqual(page.get_publisher_state('en'), 0) def test_publish_wrong_lang(self): page = self.create_page("test_admin", published=False) superuser = self.get_superuser() with SettingsOverride( LANGUAGES=(('de', 'de'), ('en', 'en')), CMS_LANGUAGES={1: [{'code': 'en', 'name': 'en', 'fallbacks': ['fr', 'de'], 'public': True}]} ): with self.login_user_context(superuser): with force_language('de'): response = self.client.post(admin_reverse("cms_page_publish_page", args=[page.pk, 'en'])) self.assertEqual(response.status_code, 302) page = Page.objects.get(pk=page.pk) def test_publish_child_first(self): parent = self.create_page('parent', published=False) child = self.create_page('child', published=False, parent=parent) parent = parent.reload() self.assertFalse(parent.is_published('en')) self.assertFalse(child.is_published('en')) drafts = Page.objects.drafts() public = Page.objects.public() published = Page.objects.public().published('en') for name in ('parent', 'child'): self.assertObjectExist(drafts, title_set__title=name) self.assertObjectDoesNotExist(public, title_set__title=name) self.assertObjectDoesNotExist(published, title_set__title=name) child.publish("en") child = child.reload() self.assertTrue(child.is_published("en")) self.assertEqual(child.get_publisher_state('en'), PUBLISHER_STATE_PENDING) self.assertIsNone(child.publisher_public) # Since we have no parent, the state is otherwise unchanged for name in ('parent', 'child'): self.assertObjectExist(drafts, title_set__title=name) self.assertObjectDoesNotExist(public, title_set__title=name) self.assertObjectDoesNotExist(published, title_set__title=name) parent.publish("en") drafts = Page.objects.drafts() public = Page.objects.public() published = Page.objects.public().published('en') # Cascade publish for all pending descendants for name in ('parent', 'child'): self.assertObjectExist(drafts, title_set__title=name) page = drafts.get(title_set__title=name) self.assertTrue(page.is_published("en"), name) self.assertEqual(page.get_publisher_state('en'), PUBLISHER_STATE_DEFAULT, name) self.assertIsNotNone(page.publisher_public, name) self.assertTrue(page.publisher_public.is_published('en'), name) self.assertObjectExist(public, title_set__title=name) self.assertObjectExist(published, title_set__title=name) def test_simple_publisher(self): """ Creates the stuff needed for these tests. Please keep this up-to-date (the docstring!) A / \ B C """ # Create a simple tree of 3 pages pageA = create_page("Page A", "nav_playground.html", "en", published=True) pageB = create_page("Page B", "nav_playground.html", "en", parent=pageA, published=True) pageC = create_page("Page C", "nav_playground.html", "en", parent=pageA, published=False) # Assert A and B are published, C unpublished self.assertTrue(pageA.publisher_public_id) self.assertTrue(pageB.publisher_public_id) self.assertTrue(not pageC.publisher_public_id) self.assertEqual(len(Page.objects.public().published("en")), 2) # Let's publish C now. pageC.publish("en") # Assert all are published self.assertTrue(pageA.publisher_public_id) self.assertTrue(pageB.publisher_public_id) self.assertTrue(pageC.publisher_public_id) self.assertEqual(len(Page.objects.public().published("en")), 3) def test_i18n_publishing(self): page = self.create_page('parent', published=True) self.assertEqual(Title.objects.all().count(), 2) create_title("de", "vater", page) self.assertEqual(Title.objects.all().count(), 3) self.assertEqual(Title.objects.filter(published=True).count(), 2) page.publish('de') self.assertEqual(Title.objects.all().count(), 4) self.assertEqual(Title.objects.filter(published=True).count(), 4) def test_publish_ordering(self): page = self.create_page('parent', published=True) pageA = self.create_page('pageA', parent=page, published=True) pageC = self.create_page('pageC', parent=page, published=True) pageB = self.create_page('pageB', parent=page, published=True) page = page.reload() pageB.move_page(pageA, 'right') pageB.publish("en") # pageC needs reload since B has swapped places with it pageC.reload().publish("en") pageA.publish('en') drafts = Page.objects.drafts().order_by('tree_id', 'lft') draft_titles = [(p.get_title('en'), p.lft, p.rght) for p in drafts] self.assertEqual([('parent', 1, 8), ('pageA', 2, 3), ('pageB', 4, 5), ('pageC', 6, 7)], draft_titles) public = Page.objects.public().order_by('tree_id', 'lft') public_titles = [(p.get_title('en'), p.lft, p.rght) for p in public] self.assertEqual([('parent', 1, 8), ('pageA', 2, 3), ('pageB', 4, 5), ('pageC', 6, 7)], public_titles) page.publish('en') drafts = Page.objects.drafts().order_by('tree_id', 'lft') draft_titles = [(p.get_title('en'), p.lft, p.rght) for p in drafts] self.assertEqual([('parent', 1, 8), ('pageA', 2, 3), ('pageB', 4, 5), ('pageC', 6, 7)], draft_titles) public = Page.objects.public().order_by('tree_id', 'lft') public_titles = [(p.get_title('en'), p.lft, p.rght) for p in public] self.assertEqual([('parent', 1, 8), ('pageA', 2, 3), ('pageB', 4, 5), ('pageC', 6, 7)], public_titles) def test_publish_ordering2(self): page = self.create_page('parent', published=False) pageA = self.create_page('pageA', published=False) pageC = self.create_page('pageC', published=False, parent=pageA) pageB = self.create_page('pageB', published=False, parent=pageA) page = page.reload() pageA.publish('en') pageB.publish('en') pageC.publish('en') page.publish('en') drafts = Page.objects.filter(publisher_is_draft=True).order_by('tree_id', 'lft') publics = Page.objects.filter(publisher_is_draft=False).order_by('tree_id', 'lft') x = 0 for draft in drafts: self.assertEqual(draft.publisher_public_id, publics[x].pk) x += 1 def test_unpublish_unpublish(self): name = self._testMethodName page = self.create_page(name, published=True) drafts = Page.objects.drafts() published = Page.objects.public().published("en") self.assertObjectExist(drafts, title_set__title=name) self.assertObjectExist(published, title_set__title=name) page.unpublish('en') self.assertFalse(page.is_published('en')) self.assertObjectExist(drafts, title_set__title=name) self.assertObjectDoesNotExist(published, title_set__title=name) page.publish('en') self.assertTrue(page.publisher_public_id) self.assertObjectExist(drafts, title_set__title=name) self.assertObjectExist(published, title_set__title=name) def test_delete_title_unpublish(self): page = self.create_page('test', published=True) sub_page = self.create_page('test2', published=True, parent=page) self.assertTrue(sub_page.publisher_public.is_published('en')) page.title_set.all().delete() self.assertFalse(sub_page.publisher_public.is_published('en', force_reload=True)) def test_modify_child_while_pending(self): home = self.create_page("Home", published=True, in_navigation=True) child = self.create_page("Child", published=True, parent=home, in_navigation=False) home = home.reload() home.unpublish('en') self.assertEqual(Title.objects.count(), 4) child = child.reload() self.assertFalse(child.publisher_public.is_published('en')) self.assertFalse(child.in_navigation) self.assertFalse(child.publisher_public.in_navigation) child.in_navigation = True child.save() child.publish('en') child = self.reload(child) self.assertEqual(Title.objects.count(), 4) self.assertTrue(child.is_published('en')) self.assertFalse(child.publisher_public.is_published('en')) self.assertTrue(child.in_navigation) self.assertTrue(child.publisher_public.in_navigation) self.assertEqual(child.get_publisher_state('en'), PUBLISHER_STATE_PENDING) home.publish('en') child = self.reload(child) self.assertTrue(child.is_published('en')) self.assertTrue(child.publisher_public_id) self.assertTrue(child.publisher_public.in_navigation) self.assertEqual(child.get_publisher_state('en'), PUBLISHER_STATE_DEFAULT) def test_republish_with_descendants(self): home = self.create_page("Home", published=True) child = self.create_page("Child", published=True, parent=home) gc = self.create_page("GC", published=True, parent=child) self.assertTrue(child.is_published("en")) self.assertTrue(gc.is_published('en')) home = home.reload() home.unpublish('en') child = self.reload(child) gc = self.reload(gc) self.assertTrue(child.is_published("en")) self.assertTrue(gc.is_published("en")) self.assertFalse(child.publisher_public.is_published("en")) self.assertFalse(gc.publisher_public.is_published('en')) self.assertEqual(child.get_publisher_state('en'), PUBLISHER_STATE_PENDING) self.assertEqual(gc.get_publisher_state('en'), PUBLISHER_STATE_PENDING) home.publish('en') child = self.reload(child) gc = self.reload(gc) self.assertTrue(child.publisher_public_id) self.assertTrue(gc.is_published('en')) self.assertTrue(child.is_published('en')) self.assertTrue(gc.publisher_public_id) self.assertEqual(child.get_publisher_state('en'), PUBLISHER_STATE_DEFAULT) self.assertEqual(gc.get_publisher_state('en'), PUBLISHER_STATE_DEFAULT) def test_republish_with_dirty_children(self): home = self.create_page("Home", published=True) dirty1 = self.create_page("Dirty1", published=True, parent=home) dirty2 = self.create_page("Dirty2", published=True, parent=home) home = self.reload(home) dirty1 = self.reload(dirty1) dirty2 = self.reload(dirty2) dirty1.in_navigation = True dirty1.save() home.unpublish('en') dirty2.in_navigation = True dirty2.save() dirty1 = self.reload(dirty1) dirty2 = self.reload(dirty2) self.assertTrue(dirty1.is_published) self.assertTrue(dirty2.publisher_public_id) self.assertEqual(dirty1.get_publisher_state("en"), PUBLISHER_STATE_DIRTY) self.assertEqual(dirty2.get_publisher_state("en"), PUBLISHER_STATE_DIRTY) home = self.reload(home) with self.assertNumQueries(FuzzyInt(0, 100)): home.publish('en') dirty1 = self.reload(dirty1) dirty2 = self.reload(dirty2) self.assertTrue(dirty1.is_published("en")) self.assertTrue(dirty2.is_published("en")) self.assertTrue(dirty1.publisher_public.is_published("en")) self.assertTrue(dirty2.publisher_public.is_published("en")) self.assertEqual(dirty1.get_publisher_state("en"), PUBLISHER_STATE_DIRTY) self.assertEqual(dirty2.get_publisher_state("en"), PUBLISHER_STATE_DIRTY) def test_republish_with_unpublished_child(self): """ Unpub1 was never published, and unpub2 has been unpublished after the fact. None of the grandchildren should become published. """ home = self.create_page("Home", published=True) unpub1 = self.create_page("Unpub1", published=False, parent=home) unpub2 = self.create_page("Unpub2", published=True, parent=home) gc1 = self.create_page("GC1", published=True, parent=unpub1) gc2 = self.create_page("GC2", published=True, parent=unpub2) self.assertFalse(gc1.publisher_public_id) self.assertFalse(gc1.publisher_public_id) self.assertTrue(gc1.is_published('en')) self.assertTrue(gc2.is_published('en')) home.unpublish('en') unpub1 = self.reload(unpub1) unpub2.unpublish('en') # Just marks this as not published for page in (unpub1, unpub2): self.assertFalse(page.is_published('en'), page) self.assertEqual(page.get_publisher_state("en"), PUBLISHER_STATE_DIRTY) self.assertIsNone(unpub1.publisher_public) self.assertIsNotNone(unpub2.publisher_public) self.assertFalse(unpub2.publisher_public.is_published('en')) gc1 = self.reload(gc1) gc2 = self.reload(gc2) for page in (gc1, gc2): self.assertTrue(page.is_published('en')) self.assertEqual(page.get_publisher_state('en'), PUBLISHER_STATE_PENDING) self.assertIsNone(gc1.publisher_public) self.assertIsNotNone(gc2.publisher_public) self.assertFalse(gc2.publisher_public.is_published('en')) def test_unpublish_with_descendants(self): page = self.create_page("Page", published=True) child = self.create_page("Child", parent=page, published=True) self.create_page("Grandchild", parent=child, published=True) page = page.reload() child.reload() drafts = Page.objects.drafts() public = Page.objects.public() published = Page.objects.public().published("en") self.assertEqual(published.count(), 3) self.assertEqual(page.get_descendant_count(), 2) base = reverse('pages-root') for url in (base, base + 'child/', base + 'child/grandchild/'): response = self.client.get(url) self.assertEqual(response.status_code, 200, url) for title in ('Page', 'Child', 'Grandchild'): self.assertObjectExist(drafts, title_set__title=title) self.assertObjectExist(public, title_set__title=title) self.assertObjectExist(published, title_set__title=title) item = drafts.get(title_set__title=title) self.assertTrue(item.publisher_public_id) self.assertEqual(item.get_publisher_state('en'), PUBLISHER_STATE_DEFAULT) self.assertTrue(page.unpublish('en'), 'Unpublish was not successful') self.assertFalse(page.is_published('en')) cache.clear() for url in (base, base + 'child/', base + 'child/grandchild/'): response = self.client.get(url) self.assertEqual(response.status_code, 404) for title in ('Page', 'Child', 'Grandchild'): self.assertObjectExist(drafts, title_set__title=title) self.assertObjectExist(public, title_set__title=title) self.assertObjectDoesNotExist(published, title_set__title=title) item = drafts.get(title_set__title=title) if title == 'Page': self.assertFalse(item.is_published("en")) self.assertFalse(item.publisher_public.is_published("en")) # Not sure what the proper state of these are after unpublish #self.assertEqual(page.publisher_state, PUBLISHER_STATE_DEFAULT) self.assertTrue(page.is_dirty('en')) else: # The changes to the published subpages are simply that the # published flag of the PUBLIC instance goes to false, and the # publisher state is set to mark waiting for parent self.assertTrue(item.is_published('en'), title) self.assertFalse(item.publisher_public.is_published('en'), title) self.assertEqual(item.get_publisher_state('en'), PUBLISHER_STATE_PENDING, title) self.assertTrue(item.is_dirty('en'), title) def test_unpublish_with_dirty_descendants(self): page = self.create_page("Page", published=True) child = self.create_page("Child", parent=page, published=True) gchild = self.create_page("Grandchild", parent=child, published=True) child.in_navigation = True child.save() self.assertTrue(child.is_dirty("en")) self.assertFalse(gchild.is_dirty('en')) self.assertTrue(child.publisher_public.is_published('en')) self.assertTrue(gchild.publisher_public.is_published('en')) page.unpublish('en') child = self.reload(child) gchild = self.reload(gchild) # Descendants become dirty after unpublish self.assertTrue(child.is_dirty('en')) self.assertTrue(gchild.is_dirty('en')) # However, their public version is still removed no matter what self.assertFalse(child.publisher_public.is_published('en')) self.assertFalse(gchild.publisher_public.is_published('en')) def test_prepublish_descendants(self): page = self.create_page("Page", published=True) child = self.create_page("Child", parent=page, published=False) gchild2 = self.create_page("Grandchild2", parent=child, published=False) self.create_page("Grandchild3", parent=child, published=False) gchild = self.create_page("Grandchild", published=True) gchild.move_page(target=child, position='last-child') gchild.publish('en') self.assertFalse(child.is_published('en')) self.assertTrue(gchild.is_published('en')) self.assertEqual(gchild.get_publisher_state('en'), PUBLISHER_STATE_PENDING) child = child.reload() child.publish('en') gchild2 = gchild2.reload() gchild2.publish('en') self.assertTrue(child.is_published("en")) self.assertTrue(gchild.is_published("en")) self.assertEqual(gchild.get_publisher_state('en', force_reload=True), PUBLISHER_STATE_DEFAULT) gchild = gchild.reload() gchild2 = gchild2.reload() self.assertEqual(gchild.lft, gchild.publisher_public.lft) self.assertEqual(gchild.rght, gchild.publisher_public.rght) def test_republish_multiple_root(self): # TODO: The paths do not match expected behaviour home = self.create_page("Page", published=True) other = self.create_page("Another Page", published=True) child = self.create_page("Child", published=True, parent=home) child2 = self.create_page("Child", published=True, parent=other) self.assertTrue(Page.objects.filter(is_home=True).count(), 2) self.assertTrue(home.is_home) home = home.reload() self.assertTrue(home.publisher_public.is_home) root = reverse('pages-root') self.assertEqual(home.get_absolute_url(), root) self.assertEqual(home.get_public_object().get_absolute_url(), root) self.assertEqual(child.get_absolute_url(), root + 'child/') self.assertEqual(child.get_public_object().get_absolute_url(), root + 'child/') self.assertEqual(other.get_absolute_url(), root + 'another-page/') self.assertEqual(other.get_public_object().get_absolute_url(), root + 'another-page/') self.assertEqual(child2.get_absolute_url(), root + 'another-page/child/') self.assertEqual(child2.get_public_object().get_absolute_url(), root + 'another-page/child/') home = self.reload(home) home.unpublish('en') home = self.reload(home) other = self.reload(other) child = self.reload(child) child2 = self.reload(child2) self.assertFalse(home.is_home) self.assertFalse(home.publisher_public.is_home) self.assertTrue(other.is_home) self.assertTrue(other.publisher_public.is_home) self.assertEqual(other.get_absolute_url(), root) self.assertEqual(other.get_public_object().get_absolute_url(), root) self.assertEqual(home.get_absolute_url(), root + 'page/') self.assertEqual(home.get_public_object().get_absolute_url(), root + 'page/') self.assertEqual(child.get_absolute_url(), root + 'page/child/') self.assertEqual(child.get_public_object().get_absolute_url(), root + 'page/child/') self.assertEqual(child2.get_absolute_url(), root + 'child/') self.assertEqual(child2.get_public_object().get_absolute_url(), root + 'child/') home.publish('en') home = self.reload(home) other = self.reload(other) child = self.reload(child) child2 = self.reload(child2) self.assertTrue(home.is_home) self.assertTrue(home.publisher_public.is_home) self.assertEqual(home.get_absolute_url(), root) self.assertEqual(home.get_public_object().get_absolute_url(), root) self.assertEqual(child.get_absolute_url(), root + 'child/') self.assertEqual(child.get_public_object().get_absolute_url(), root + 'child/') self.assertEqual(other.get_absolute_url(), root + 'another-page/') self.assertEqual(other.get_public_object().get_absolute_url(), root + 'another-page/') self.assertEqual(child2.get_absolute_url(), root + 'another-page/child/') self.assertEqual(child2.get_public_object().get_absolute_url(), root + 'another-page/child/') def test_revert_contents(self): user = self.get_superuser() page = create_page("Page", "nav_playground.html", "en", published=True, created_by=user) placeholder = page.placeholders.get(slot=u"body") deleted_plugin = add_plugin(placeholder, u"TextPlugin", u"en", body="Deleted content") text_plugin = add_plugin(placeholder, u"TextPlugin", u"en", body="Public content") page.publish('en') # Modify and delete plugins text_plugin.body = "<p>Draft content</p>" text_plugin.save() deleted_plugin.delete() self.assertEqual(CMSPlugin.objects.count(), 3) # Now let's revert and restore page.revert('en') self.assertEqual(page.get_publisher_state("en"), PUBLISHER_STATE_DEFAULT) self.assertEqual(CMSPlugin.objects.count(), 4) plugins = CMSPlugin.objects.filter(placeholder__page=page) self.assertEqual(plugins.count(), 2) plugins = [plugin.get_plugin_instance()[0] for plugin in plugins] self.assertEqual(plugins[0].body, "Deleted content") self.assertEqual(plugins[1].body, "Public content") def test_revert_move(self): parent = create_page("Parent", "nav_playground.html", "en", published=True) parent_url = parent.get_absolute_url() page = create_page("Page", "nav_playground.html", "en", published=True, parent=parent) other = create_page("Other", "nav_playground.html", "en", published=True) other_url = other.get_absolute_url() child = create_page("Child", "nav_playground.html", "en", published=True, parent=page) parent = parent.reload() page = page.reload() self.assertEqual(page.get_absolute_url(), parent_url + "page/") self.assertEqual(child.get_absolute_url(), parent_url + "page/child/") # Now let's move it (and the child) page.move_page(other) page = self.reload(page) child = self.reload(child) self.assertEqual(page.get_absolute_url(), other_url + "page/") self.assertEqual(child.get_absolute_url(), other_url + "page/child/") # Public version changed the url as well self.assertEqual(page.publisher_public.get_absolute_url(), other_url + "page/") self.assertEqual(child.publisher_public.get_absolute_url(), other_url + "page/child/") def test_publish_works_with_descendants(self): """ For help understanding what this tests for, see: http://articles.sitepoint.com/print/hierarchical-data-database Creates this published structure: home / \ item1 item2 / \ subitem1 subitem2 """ home_page = create_page("home", "nav_playground.html", "en", published=True, in_navigation=False) create_page("item1", "nav_playground.html", "en", parent=home_page, published=True) item2 = create_page("item2", "nav_playground.html", "en", parent=home_page, published=True) create_page("subitem1", "nav_playground.html", "en", parent=item2, published=True) create_page("subitem2", "nav_playground.html", "en", parent=item2, published=True) item2 = item2.reload() not_drafts = list(Page.objects.filter(publisher_is_draft=False).order_by('lft')) drafts = list(Page.objects.filter(publisher_is_draft=True).order_by('lft')) self.assertEqual(len(not_drafts), 5) self.assertEqual(len(drafts), 5) for idx, draft in enumerate(drafts): public = not_drafts[idx] # Check that a node doesn't become a root node magically self.assertEqual(bool(public.parent_id), bool(draft.parent_id)) if public.parent: # Let's assert the MPTT tree is consistent self.assertTrue(public.lft > public.parent.lft) self.assertTrue(public.rght < public.parent.rght) self.assertEqual(public.tree_id, public.parent.tree_id) self.assertTrue(public.parent in public.get_ancestors()) self.assertTrue(public in public.parent.get_descendants()) self.assertTrue(public in public.parent.get_children()) if draft.parent: # Same principle for the draft tree self.assertTrue(draft.lft > draft.parent.lft) self.assertTrue(draft.rght < draft.parent.rght) self.assertEqual(draft.tree_id, draft.parent.tree_id) self.assertTrue(draft.parent in draft.get_ancestors()) self.assertTrue(draft in draft.parent.get_descendants()) self.assertTrue(draft in draft.parent.get_children()) # Now call publish again. The structure should not change. item2.publish('en') not_drafts = list(Page.objects.filter(publisher_is_draft=False).order_by('lft')) drafts = list(Page.objects.filter(publisher_is_draft=True).order_by('lft')) self.assertEqual(len(not_drafts), 5) self.assertEqual(len(drafts), 5) for idx, draft in enumerate(drafts): public = not_drafts[idx] # Check that a node doesn't become a root node magically self.assertEqual(bool(public.parent_id), bool(draft.parent_id)) if public.parent: # Let's assert the MPTT tree is consistent self.assertTrue(public.lft > public.parent.lft) self.assertTrue(public.rght < public.parent.rght) self.assertEqual(public.tree_id, public.parent.tree_id) self.assertTrue(public.parent in public.get_ancestors()) self.assertTrue(public in public.parent.get_descendants()) self.assertTrue(public in public.parent.get_children()) if draft.parent: # Same principle for the draft tree self.assertTrue(draft.lft > draft.parent.lft) self.assertTrue(draft.rght < draft.parent.rght) self.assertEqual(draft.tree_id, draft.parent.tree_id) self.assertTrue(draft.parent in draft.get_ancestors()) self.assertTrue(draft in draft.parent.get_descendants()) self.assertTrue(draft in draft.parent.get_children())
./CrossVul/dataset_final_sorted/CWE-352/py/good_1654_5
crossvul-python_data_bad_1891_2
# -*- coding: utf-8 -*- """ flask_security.views ~~~~~~~~~~~~~~~~~~~~ Flask-Security views module :copyright: (c) 2012 by Matt Wright. :copyright: (c) 2019-2020 by J. Christopher Wagner (jwag). :license: MIT, see LICENSE for more details. CSRF is tricky. By default all our forms have CSRF protection built in via Flask-WTF. This is regardless of authentication method or whether the request is Form or JSON based. Form-based 'just works' since when rendering the form (on GET), the CSRF token is automatically populated. We want to handle: - JSON requests where CSRF token is in a header (e.g. X-CSRF-Token) - Option to skip CSRF when using a token to authenticate (rather than session) (CSRF_PROTECT_MECHANISMS) - Option to skip CSRF for 'login'/unauthenticated requests (CSRF_IGNORE_UNAUTH_ENDPOINTS) This is complicated by the fact that the only way to disable form CSRF is to pass in meta={csrf: false} at form instantiation time. Be aware that for CSRF to work, caller MUST pass in session cookie. So for pure API, and no session cookie - there is no way to support CSRF-Login so app must set CSRF_IGNORE_UNAUTH_ENDPOINTS (or use CSRF/session cookie for logging in then once they have a token, no need for cookie). TODO: two-factor routes such as tf_setup need work. They seem to support both authenticated (via session?) as well as unauthenticated access. """ import sys import time from flask import ( Blueprint, abort, after_this_request, current_app, jsonify, request, session, ) from flask_login import current_user from werkzeug.datastructures import MultiDict from werkzeug.local import LocalProxy from .changeable import change_user_password from .confirmable import ( confirm_email_token_status, confirm_user, send_confirmation_instructions, ) from .decorators import anonymous_user_required, auth_required, unauth_csrf from .passwordless import login_token_status, send_login_instructions from .quart_compat import get_quart_status from .unified_signin import ( us_signin, us_signin_send_code, us_qrcode, us_setup, us_setup_validate, us_verify, us_verify_link, us_verify_send_code, ) from .recoverable import ( reset_password_token_status, send_reset_password_instructions, update_password, ) from .registerable import register_user from .twofactor import ( complete_two_factor_process, tf_clean_session, tf_disable, tf_login, ) from .utils import ( base_render_json, config_value, do_flash, get_message, get_post_login_redirect, get_post_logout_redirect, get_post_register_redirect, get_post_verify_redirect, get_url, json_error_response, login_user, logout_user, slash_url_suffix, suppress_form_csrf, url_for_security, ) if get_quart_status(): # pragma: no cover from quart import make_response, redirect else: from flask import make_response, redirect # Convenient references _security = LocalProxy(lambda: current_app.extensions["security"]) _datastore = LocalProxy(lambda: _security.datastore) def default_render_json(payload, code, headers, user): """ Default JSON response handler. """ # Force Content-Type header to json. if headers is None: headers = dict() headers["Content-Type"] = "application/json" payload = dict(meta=dict(code=code), response=payload) return make_response(jsonify(payload), code, headers) PY3 = sys.version_info[0] == 3 if PY3 and get_quart_status(): # pragma: no cover from .async_compat import _commit # noqa: F401 else: def _commit(response=None): _datastore.commit() return response def _ctx(endpoint): return _security._run_ctx_processor(endpoint) @unauth_csrf(fall_through=True) def login(): """View function for login view Allow already authenticated users. For GET this is useful for single-page-applications on refresh - session still active but need to access user info and csrf-token. For POST - redirects to POST_LOGIN_VIEW (forms) or returns 400 (json). """ if current_user.is_authenticated and request.method == "POST": # Just redirect current_user to POST_LOGIN_VIEW. # While its tempting to try to logout the current user and login the # new requested user - that simply doesn't work with CSRF. # This does NOT use get_post_login_redirect() so that it doesn't look at # 'next' - which can cause infinite redirect loops # (see test_common::test_authenticated_loop) if _security._want_json(request): payload = json_error_response( errors=get_message("ANONYMOUS_USER_REQUIRED")[0] ) return _security._render_json(payload, 400, None, None) else: return redirect(get_url(_security.post_login_view)) form_class = _security.login_form if request.is_json: # Allow GET so we can return csrf_token for pre-login. if request.content_length: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(MultiDict([]), meta=suppress_form_csrf()) else: form = form_class(request.form, meta=suppress_form_csrf()) if form.validate_on_submit(): remember_me = form.remember.data if "remember" in form else None if config_value("TWO_FACTOR") and ( config_value("TWO_FACTOR_REQUIRED") or (form.user.tf_totp_secret and form.user.tf_primary_method) ): return tf_login( form.user, remember=remember_me, primary_authn_via="password" ) login_user(form.user, remember=remember_me, authn_via=["password"]) after_this_request(_commit) if not _security._want_json(request): return redirect(get_post_login_redirect()) if _security._want_json(request): if current_user.is_authenticated: form.user = current_user return base_render_json(form, include_auth_token=True) if current_user.is_authenticated: return redirect(get_url(_security.post_login_view)) else: return _security.render_template( config_value("LOGIN_USER_TEMPLATE"), login_user_form=form, **_ctx("login") ) @auth_required() def verify(): """View function which handles a authentication verification request. """ form_class = _security.verify_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): # form may have called verify_and_update_password() after_this_request(_commit) # verified - so set freshness time. session["fs_paa"] = time.time() if _security._want_json(request): return base_render_json(form) do_flash(*get_message("REAUTHENTICATION_SUCCESSFUL")) return redirect(get_post_verify_redirect()) if _security._want_json(request): assert form.user == current_user return base_render_json(form) return _security.render_template( config_value("VERIFY_TEMPLATE"), verify_form=form, **_ctx("verify") ) def logout(): """View function which handles a logout request.""" tf_clean_session() if current_user.is_authenticated: logout_user() # No body is required - so if a POST and json - return OK if request.method == "POST" and _security._want_json(request): return _security._render_json({}, 200, headers=None, user=None) return redirect(get_post_logout_redirect()) @anonymous_user_required def register(): """View function which handles a registration request.""" # For some unknown historic reason - if you don't require confirmation # (via email) then you need to type in your password twice. That might # make sense if you can't reset your password but in modern (2020) UX models # don't ask twice. if _security.confirmable or request.is_json: form_class = _security.confirm_register_form else: form_class = _security.register_form if request.is_json: form_data = MultiDict(request.get_json()) else: form_data = request.form form = form_class(form_data, meta=suppress_form_csrf()) if form.validate_on_submit(): did_login = False user = register_user(form) form.user = user # The 'auto-login' feature probably should be removed - I can't imagine # an application that would want random email accounts. It has been like this # since the beginning. Note that we still enforce 2FA - however for unified # signin - we adhere to historic behavior. if not _security.confirmable or _security.login_without_confirmation: if config_value("TWO_FACTOR") and config_value("TWO_FACTOR_REQUIRED"): return tf_login(user, primary_authn_via="register") after_this_request(_commit) login_user(user, authn_via=["register"]) did_login = True if not _security._want_json(request): return redirect(get_post_register_redirect()) # Only include auth token if in fact user is permitted to login return base_render_json(form, include_auth_token=did_login) if _security._want_json(request): return base_render_json(form) return _security.render_template( config_value("REGISTER_USER_TEMPLATE"), register_user_form=form, **_ctx("register") ) @unauth_csrf(fall_through=True) def send_login(): """View function that sends login instructions for passwordless login""" form_class = _security.passwordless_login_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): send_login_instructions(form.user) if not _security._want_json(request): do_flash(*get_message("LOGIN_EMAIL_SENT", email=form.user.email)) if _security._want_json(request): return base_render_json(form) return _security.render_template( config_value("SEND_LOGIN_TEMPLATE"), send_login_form=form, **_ctx("send_login") ) @anonymous_user_required def token_login(token): """View function that handles passwordless login via a token Like reset-password and confirm - this is usually a GET via an email so from the request we can't differentiate form-based apps from non. """ expired, invalid, user = login_token_status(token) if not user or invalid: m, c = get_message("INVALID_LOGIN_TOKEN") if _security.redirect_behavior == "spa": return redirect(get_url(_security.login_error_view, qparams={c: m})) do_flash(m, c) return redirect(url_for_security("login")) if expired: send_login_instructions(user) m, c = get_message( "LOGIN_EXPIRED", email=user.email, within=_security.login_within ) if _security.redirect_behavior == "spa": return redirect( get_url( _security.login_error_view, qparams=user.get_redirect_qparams({c: m}), ) ) do_flash(m, c) return redirect(url_for_security("login")) login_user(user, authn_via=["token"]) after_this_request(_commit) if _security.redirect_behavior == "spa": return redirect( get_url(_security.post_login_view, qparams=user.get_redirect_qparams()) ) do_flash(*get_message("PASSWORDLESS_LOGIN_SUCCESSFUL")) return redirect(get_post_login_redirect()) @unauth_csrf(fall_through=True) def send_confirmation(): """View function which sends confirmation instructions.""" form_class = _security.send_confirmation_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): send_confirmation_instructions(form.user) if not _security._want_json(request): do_flash(*get_message("CONFIRMATION_REQUEST", email=form.user.email)) if _security._want_json(request): return base_render_json(form) return _security.render_template( config_value("SEND_CONFIRMATION_TEMPLATE"), send_confirmation_form=form, **_ctx("send_confirmation") ) def confirm_email(token): """View function which handles a email confirmation request.""" expired, invalid, user = confirm_email_token_status(token) if not user or invalid: m, c = get_message("INVALID_CONFIRMATION_TOKEN") if _security.redirect_behavior == "spa": return redirect(get_url(_security.confirm_error_view, qparams={c: m})) do_flash(m, c) return redirect( get_url(_security.confirm_error_view) or url_for_security("send_confirmation") ) already_confirmed = user.confirmed_at is not None if expired or already_confirmed: if already_confirmed: m, c = get_message("ALREADY_CONFIRMED") else: send_confirmation_instructions(user) m, c = get_message( "CONFIRMATION_EXPIRED", email=user.email, within=_security.confirm_email_within, ) if _security.redirect_behavior == "spa": return redirect( get_url( _security.confirm_error_view, qparams=user.get_redirect_qparams({c: m}), ) ) do_flash(m, c) return redirect( get_url(_security.confirm_error_view) or url_for_security("send_confirmation") ) confirm_user(user) after_this_request(_commit) if user != current_user: logout_user() if config_value("AUTO_LOGIN_AFTER_CONFIRM"): # N.B. this is a (small) security risk if email went to wrong place. # and you have the LOGIN_WITH_CONFIRMATION flag since in that case # you can be logged in and doing stuff - but another person could # get the email. if config_value("TWO_FACTOR") and config_value("TWO_FACTOR_REQUIRED"): return tf_login(user, primary_authn_via="confirm") login_user(user, authn_via=["confirm"]) m, c = get_message("EMAIL_CONFIRMED") if _security.redirect_behavior == "spa": return redirect( get_url( _security.post_confirm_view, qparams=user.get_redirect_qparams({c: m}) ) ) do_flash(m, c) return redirect( get_url(_security.post_confirm_view) or get_url( _security.post_login_view if config_value("AUTO_LOGIN_AFTER_CONFIRM") else _security.login_url ) ) @anonymous_user_required @unauth_csrf(fall_through=True) def forgot_password(): """View function that handles a forgotten password request.""" form_class = _security.forgot_password_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): send_reset_password_instructions(form.user) if not _security._want_json(request): do_flash(*get_message("PASSWORD_RESET_REQUEST", email=form.user.email)) if _security._want_json(request): return base_render_json(form, include_user=False) return _security.render_template( config_value("FORGOT_PASSWORD_TEMPLATE"), forgot_password_form=form, **_ctx("forgot_password") ) @anonymous_user_required @unauth_csrf(fall_through=True) def reset_password(token): """View function that handles a reset password request. This is usually called via GET as part of an email link and redirects to a reset-password form It is called via POST to actually update the password (and then redirects to a post reset/login view) If in either case the token is either invalid or expired it redirects to the 'forgot-password' form. In the case of non-form based configuration: For GET normal case - redirect to RESET_VIEW?token={token}&email={email} For GET invalid case - redirect to RESET_ERROR_VIEW?error={error}&email={email} For POST normal/successful case - return 200 with new authentication token For POST error case return 400 with form.errors """ expired, invalid, user = reset_password_token_status(token) form_class = _security.reset_password_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) form.user = user if request.method == "GET": if not user or invalid: m, c = get_message("INVALID_RESET_PASSWORD_TOKEN") if _security.redirect_behavior == "spa": return redirect(get_url(_security.reset_error_view, qparams={c: m})) do_flash(m, c) return redirect(url_for_security("forgot_password")) if expired: send_reset_password_instructions(user) m, c = get_message( "PASSWORD_RESET_EXPIRED", email=user.email, within=_security.reset_password_within, ) if _security.redirect_behavior == "spa": return redirect( get_url( _security.reset_error_view, qparams=user.get_redirect_qparams({c: m}), ) ) do_flash(m, c) return redirect(url_for_security("forgot_password")) # All good - for SPA - redirect to the ``reset_view`` if _security.redirect_behavior == "spa": return redirect( get_url( _security.reset_view, qparams=user.get_redirect_qparams({"token": token}), ) ) # for forms - render the reset password form return _security.render_template( config_value("RESET_PASSWORD_TEMPLATE"), reset_password_form=form, reset_password_token=token, **_ctx("reset_password") ) # This is the POST case. m = None if not user or invalid: invalid = True m, c = get_message("INVALID_RESET_PASSWORD_TOKEN") if not _security._want_json(request): do_flash(m, c) if expired: send_reset_password_instructions(user) m, c = get_message( "PASSWORD_RESET_EXPIRED", email=user.email, within=_security.reset_password_within, ) if not _security._want_json(request): do_flash(m, c) if invalid or expired: if _security._want_json(request): return _security._render_json(json_error_response(m), 400, None, None) else: return redirect(url_for_security("forgot_password")) if form.validate_on_submit(): after_this_request(_commit) update_password(user, form.password.data) if config_value("TWO_FACTOR") and ( config_value("TWO_FACTOR_REQUIRED") or (form.user.tf_totp_secret and form.user.tf_primary_method) ): return tf_login(user, primary_authn_via="reset") login_user(user, authn_via=["reset"]) if _security._want_json(request): login_form = _security.login_form(MultiDict({"email": user.email})) setattr(login_form, "user", user) return base_render_json(login_form, include_auth_token=True) else: do_flash(*get_message("PASSWORD_RESET")) return redirect( get_url(_security.post_reset_view) or get_url(_security.post_login_view) ) # validation failure case - for forms - we try again including the token # for non-forms - we just return errors and assume caller remembers token. if _security._want_json(request): return base_render_json(form) return _security.render_template( config_value("RESET_PASSWORD_TEMPLATE"), reset_password_form=form, reset_password_token=token, **_ctx("reset_password") ) @auth_required("basic", "token", "session") def change_password(): """View function which handles a change password request.""" form_class = _security.change_password_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): after_this_request(_commit) change_user_password(current_user._get_current_object(), form.new_password.data) if not _security._want_json(request): do_flash(*get_message("PASSWORD_CHANGE")) return redirect( get_url(_security.post_change_view) or get_url(_security.post_login_view) ) if _security._want_json(request): form.user = current_user return base_render_json(form, include_auth_token=True) return _security.render_template( config_value("CHANGE_PASSWORD_TEMPLATE"), change_password_form=form, **_ctx("change_password") ) @unauth_csrf(fall_through=True) def two_factor_setup(): """View function for two-factor setup. This is used both for GET to fetch forms and POST to actually set configuration (and send token). There are 3 cases for setting up: 1) initial login and application requires 2FA 2) changing existing 2FA information 3) user wanting to enable or disable 2FA (assuming application doesn't require it) In order to CHANGE/ENABLE/DISABLE a 2FA information, user must be properly logged in AND must perform a fresh password validation by calling POST /tf-confirm (which sets 'tf_confirmed' in the session). For initial login when 2FA required of course user can't be logged in - in this case we need to have been sent some state via the session as part of login to show a) who and b) that they successfully authenticated. """ form_class = _security.two_factor_setup_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if not current_user.is_authenticated: # This is the initial login case # We can also get here from setup if they want to change if not all(k in session for k in ["tf_user_id", "tf_state"]) or session[ "tf_state" ] not in ["setup_from_login", "validating_profile"]: # illegal call on this endpoint tf_clean_session() return _tf_illegal_state(form, _security.login_url) user = _datastore.get_user(session["tf_user_id"]) if not user: tf_clean_session() return _tf_illegal_state(form, _security.login_url) else: # all other cases require user to be logged in and have performed # additional password verification as signified by 'tf_confirmed' # in the session. if "tf_confirmed" not in session: tf_clean_session() return _tf_illegal_state(form, _security.two_factor_confirm_url) user = current_user if form.validate_on_submit(): # Before storing in DB and therefore requiring 2FA we need to # make sure it actually works. # Requiring 2FA is triggered by having BOTH tf_totp_secret and # tf_primary_method in the user record (or having the application # global config TWO_FACTOR_REQUIRED) # Until we correctly validate the 2FA - we don't set primary_method in # user model but use the session to store it. pm = form.setup.data if pm == "disable": tf_disable(user) after_this_request(_commit) do_flash(*get_message("TWO_FACTOR_DISABLED")) if not _security._want_json(request): return redirect(get_url(_security.post_login_view)) else: return base_render_json(form) # Regenerate the TOTP secret on every call of 2FA setup unless it is # within the same session and method (e.g. upon entering the phone number) if pm != session.get("tf_primary_method", None): session["tf_totp_secret"] = _security._totp_factory.generate_totp_secret() session["tf_primary_method"] = pm session["tf_state"] = "validating_profile" new_phone = form.phone.data if len(form.phone.data) > 0 else None if new_phone: user.tf_phone_number = new_phone _datastore.put(user) after_this_request(_commit) # This form is sort of bizarre - for SMS and authenticator # you select, then get more info, and submit again. # For authenticator of course, we don't actually send anything # and for SMS it is the second time around that we get the phone number if pm == "email" or (pm == "sms" and new_phone): msg = user.tf_send_security_token( method=pm, totp_secret=session["tf_totp_secret"], phone_number=getattr(user, "tf_phone_number", None), ) if msg: # send code didn't work form.setup.errors = list() form.setup.errors.append(msg) if _security._want_json(request): return base_render_json( form, include_user=False, error_status_code=500 ) code_form = _security.two_factor_verify_code_form() if not _security._want_json(request): return _security.render_template( config_value("TWO_FACTOR_SETUP_TEMPLATE"), two_factor_setup_form=form, two_factor_verify_code_form=code_form, choices=config_value("TWO_FACTOR_ENABLED_METHODS"), chosen_method=pm, **_ctx("tf_setup") ) # We get here on GET and POST with failed validation. # For things like phone number - we've already done one POST # that succeeded and now if failed - so retain the initial info if _security._want_json(request): return base_render_json(form, include_user=False) code_form = _security.two_factor_verify_code_form() choices = config_value("TWO_FACTOR_ENABLED_METHODS") if not config_value("TWO_FACTOR_REQUIRED"): choices.append("disable") return _security.render_template( config_value("TWO_FACTOR_SETUP_TEMPLATE"), two_factor_setup_form=form, two_factor_verify_code_form=code_form, choices=choices, chosen_method=form.setup.data, two_factor_required=config_value("TWO_FACTOR_REQUIRED"), **_ctx("tf_setup") ) @unauth_csrf(fall_through=True) def two_factor_token_validation(): """View function for two-factor token validation Two cases: 1) normal login case - everything setup correctly; normal 2FA validation In this case - user not logged in - but 'tf_state' == 'ready' or 'validating_profile' 2) validating after CHANGE/ENABLE 2FA. In this case user logged in/authenticated they must have 'tf_confirmed' set meaning they re-entered their passwd """ form_class = _security.two_factor_verify_code_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) changing = current_user.is_authenticated if not changing: # This is the normal login case if ( not all(k in session for k in ["tf_user_id", "tf_state"]) or session["tf_state"] not in ["ready", "validating_profile"] or ( session["tf_state"] == "validating_profile" and "tf_primary_method" not in session ) ): # illegal call on this endpoint tf_clean_session() return _tf_illegal_state(form, _security.login_url) user = _datastore.get_user(session["tf_user_id"]) form.user = user if not user: tf_clean_session() return _tf_illegal_state(form, _security.login_url) if session["tf_state"] == "ready": pm = user.tf_primary_method totp_secret = user.tf_totp_secret else: pm = session["tf_primary_method"] totp_secret = session["tf_totp_secret"] else: if ( not all( k in session for k in ["tf_confirmed", "tf_state", "tf_primary_method"] ) or session["tf_state"] != "validating_profile" ): tf_clean_session() # logout since this seems like attack-ish/logic error logout_user() return _tf_illegal_state(form, _security.login_url) pm = session["tf_primary_method"] totp_secret = session["tf_totp_secret"] form.user = current_user setattr(form, "primary_method", pm) setattr(form, "tf_totp_secret", totp_secret) if form.validate_on_submit(): # Success - log in user and clear all session variables completion_message = complete_two_factor_process( form.user, pm, totp_secret, changing, session.pop("tf_remember_login", None) ) after_this_request(_commit) if not _security._want_json(request): do_flash(*get_message(completion_message)) return redirect(get_post_login_redirect()) # GET or not successful POST if _security._want_json(request): return base_render_json(form) # if we were trying to validate a new method if changing: setup_form = _security.two_factor_setup_form() return _security.render_template( config_value("TWO_FACTOR_SETUP_TEMPLATE"), two_factor_setup_form=setup_form, two_factor_verify_code_form=form, choices=config_value("TWO_FACTOR_ENABLED_METHODS"), **_ctx("tf_setup") ) # if we were trying to validate an existing method else: rescue_form = _security.two_factor_rescue_form() return _security.render_template( config_value("TWO_FACTOR_VERIFY_CODE_TEMPLATE"), two_factor_rescue_form=rescue_form, two_factor_verify_code_form=form, problem=None, **_ctx("tf_token_validation") ) @anonymous_user_required @unauth_csrf(fall_through=True) def two_factor_rescue(): """ Function that handles a situation where user can't enter his two-factor validation code User must have already provided valid username/password. User must have already established 2FA """ form_class = _security.two_factor_rescue_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if ( not all(k in session for k in ["tf_user_id", "tf_state"]) or session["tf_state"] != "ready" ): tf_clean_session() return _tf_illegal_state(form, _security.login_url) user = _datastore.get_user(session["tf_user_id"]) form.user = user if not user: tf_clean_session() return _tf_illegal_state(form, _security.login_url) rproblem = "" if form.validate_on_submit(): problem = form.data["help_setup"] rproblem = problem # if the problem is that user can't access his device, w # e send him code through mail if problem == "lost_device": msg = form.user.tf_send_security_token( method="email", totp_secret=form.user.tf_totp_secret, phone_number=getattr(form.user, "tf_phone_number", None), ) if msg: rproblem = "" form.help_setup.errors.append(msg) if _security._want_json(request): return base_render_json( form, include_user=False, error_status_code=500 ) # send app provider a mail message regarding trouble elif problem == "no_mail_access": _security._send_mail( config_value("EMAIL_SUBJECT_TWO_FACTOR_RESCUE"), config_value("TWO_FACTOR_RESCUE_MAIL"), "two_factor_rescue", user=form.user, ) else: return "", 404 if _security._want_json(request): return base_render_json(form, include_user=False) code_form = _security.two_factor_verify_code_form() return _security.render_template( config_value("TWO_FACTOR_VERIFY_CODE_TEMPLATE"), two_factor_verify_code_form=code_form, two_factor_rescue_form=form, rescue_mail=config_value("TWO_FACTOR_RESCUE_MAIL"), problem=rproblem, **_ctx("tf_token_validation") ) @auth_required("basic", "session", "token") def two_factor_verify_password(): """View function which handles a password verification request.""" form_class = _security.two_factor_verify_password_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): # form called verify_and_update_password() after_this_request(_commit) session["tf_confirmed"] = True m, c = get_message("TWO_FACTOR_PASSWORD_CONFIRMATION_DONE") if not _security._want_json(request): do_flash(m, c) return redirect(url_for_security("two_factor_setup")) else: return _security._render_json(json_error_response(m), 400, None, None) if _security._want_json(request): assert form.user == current_user # form.user = current_user return base_render_json(form) return _security.render_template( config_value("TWO_FACTOR_VERIFY_PASSWORD_TEMPLATE"), two_factor_verify_password_form=form, **_ctx("tf_verify_password") ) @unauth_csrf(fall_through=True) def two_factor_qrcode(): if current_user.is_authenticated: user = current_user else: if "tf_user_id" not in session: abort(404) user = _datastore.get_user(session["tf_user_id"]) if not user: # Seems like we should be careful here if user_id is gone. tf_clean_session() abort(404) if "authenticator" not in config_value("TWO_FACTOR_ENABLED_METHODS"): return abort(404) if ( "tf_primary_method" not in session or session["tf_primary_method"] != "authenticator" ): return abort(404) totp = user.tf_totp_secret if "tf_totp_secret" in session: totp = session["tf_totp_secret"] try: import pyqrcode # By convention, the URI should have the username that the user # logs in with. username = user.calc_username() url = pyqrcode.create( _security._totp_factory.get_totp_uri( username if username else "Unknown", totp ) ) except ImportError: # For TWO_FACTOR - this should have been checked at app init. raise from io import BytesIO stream = BytesIO() url.svg(stream, scale=3) return ( stream.getvalue(), 200, { "Content-Type": "image/svg+xml", "Cache-Control": "no-cache, no-store, must-revalidate", "Pragma": "no-cache", "Expires": "0", }, ) def _tf_illegal_state(form, redirect_to): m, c = get_message("TWO_FACTOR_PERMISSION_DENIED") if not _security._want_json(request): do_flash(m, c) return redirect(get_url(redirect_to)) else: return _security._render_json(json_error_response(m), 400, None, None) def create_blueprint(app, state, import_name, json_encoder=None): """Creates the security extension blueprint""" bp = Blueprint( state.blueprint_name, import_name, url_prefix=state.url_prefix, subdomain=state.subdomain, template_folder="templates", ) if json_encoder: bp.json_encoder = json_encoder if state.logout_methods is not None: bp.route(state.logout_url, methods=state.logout_methods, endpoint="logout")( logout ) if state.passwordless: bp.route(state.login_url, methods=["GET", "POST"], endpoint="login")(send_login) bp.route( state.login_url + slash_url_suffix(state.login_url, "<token>"), endpoint="token_login", )(token_login) elif config_value("US_SIGNIN_REPLACES_LOGIN", app=app): bp.route(state.login_url, methods=["GET", "POST"], endpoint="login")(us_signin) else: bp.route(state.login_url, methods=["GET", "POST"], endpoint="login")(login) bp.route(state.verify_url, methods=["GET", "POST"], endpoint="verify")(verify) if state.unified_signin: bp.route(state.us_signin_url, methods=["GET", "POST"], endpoint="us_signin")( us_signin ) bp.route( state.us_signin_send_code_url, methods=["GET", "POST"], endpoint="us_signin_send_code", )(us_signin_send_code) bp.route(state.us_setup_url, methods=["GET", "POST"], endpoint="us_setup")( us_setup ) bp.route( state.us_setup_url + slash_url_suffix(state.us_setup_url, "<token>"), methods=["GET", "POST"], endpoint="us_setup_validate", )(us_setup_validate) # Freshness verification if config_value("FRESHNESS", app=app).total_seconds() >= 0: bp.route( state.us_verify_url, methods=["GET", "POST"], endpoint="us_verify" )(us_verify) bp.route( state.us_verify_send_code_url, methods=["GET", "POST"], endpoint="us_verify_send_code", )(us_verify_send_code) bp.route(state.us_verify_link_url, methods=["GET"], endpoint="us_verify_link")( us_verify_link ) bp.route( state.us_qrcode_url + slash_url_suffix(state.us_setup_url, "<token>"), endpoint="us_qrcode", )(us_qrcode) if state.two_factor: tf_token_validation = "two_factor_token_validation" tf_qrcode = "two_factor_qrcode" bp.route( state.two_factor_setup_url, methods=["GET", "POST"], endpoint="two_factor_setup", )(two_factor_setup) bp.route( state.two_factor_token_validation_url, methods=["GET", "POST"], endpoint=tf_token_validation, )(two_factor_token_validation) bp.route(state.two_factor_qrcode_url, endpoint=tf_qrcode)(two_factor_qrcode) bp.route( state.two_factor_rescue_url, methods=["GET", "POST"], endpoint="two_factor_rescue", )(two_factor_rescue) bp.route( state.two_factor_confirm_url, methods=["GET", "POST"], endpoint="two_factor_verify_password", )(two_factor_verify_password) if state.registerable: bp.route(state.register_url, methods=["GET", "POST"], endpoint="register")( register ) if state.recoverable: bp.route(state.reset_url, methods=["GET", "POST"], endpoint="forgot_password")( forgot_password ) bp.route( state.reset_url + slash_url_suffix(state.reset_url, "<token>"), methods=["GET", "POST"], endpoint="reset_password", )(reset_password) if state.changeable: bp.route(state.change_url, methods=["GET", "POST"], endpoint="change_password")( change_password ) if state.confirmable: bp.route( state.confirm_url, methods=["GET", "POST"], endpoint="send_confirmation" )(send_confirmation) bp.route( state.confirm_url + slash_url_suffix(state.confirm_url, "<token>"), methods=["GET", "POST"], endpoint="confirm_email", )(confirm_email) return bp
./CrossVul/dataset_final_sorted/CWE-352/py/bad_1891_2
crossvul-python_data_good_1686_0
"""Base Tornado handlers for the notebook server.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import functools import json import os import re import sys import traceback try: # py3 from http.client import responses except ImportError: from httplib import responses try: from urllib.parse import urlparse # Py 3 except ImportError: from urlparse import urlparse # Py 2 from jinja2 import TemplateNotFound from tornado import web from tornado import gen from tornado.log import app_log import IPython from IPython.utils.sysinfo import get_sys_info from IPython.config import Application from IPython.utils.path import filefind from IPython.utils.py3compat import string_types from IPython.html.utils import is_hidden, url_path_join, url_escape from IPython.html.services.security import csp_report_uri #----------------------------------------------------------------------------- # Top-level handlers #----------------------------------------------------------------------------- non_alphanum = re.compile(r'[^A-Za-z0-9]') sys_info = json.dumps(get_sys_info()) class AuthenticatedHandler(web.RequestHandler): """A RequestHandler with an authenticated user.""" @property def content_security_policy(self): """The default Content-Security-Policy header Can be overridden by defining Content-Security-Policy in settings['headers'] """ return '; '.join([ "frame-ancestors 'self'", # Make sure the report-uri is relative to the base_url "report-uri " + url_path_join(self.base_url, csp_report_uri), ]) def set_default_headers(self): headers = self.settings.get('headers', {}) if "Content-Security-Policy" not in headers: headers["Content-Security-Policy"] = self.content_security_policy # Allow for overriding headers for header_name,value in headers.items() : try: self.set_header(header_name, value) except Exception as e: # tornado raise Exception (not a subclass) # if method is unsupported (websocket and Access-Control-Allow-Origin # for example, so just ignore) self.log.debug(e) def clear_login_cookie(self): self.clear_cookie(self.cookie_name) def get_current_user(self): if self.login_handler is None: return 'anonymous' return self.login_handler.get_user(self) @property def cookie_name(self): default_cookie_name = non_alphanum.sub('-', 'username-{}'.format( self.request.host )) return self.settings.get('cookie_name', default_cookie_name) @property def logged_in(self): """Is a user currently logged in?""" user = self.get_current_user() return (user and not user == 'anonymous') @property def login_handler(self): """Return the login handler for this application, if any.""" return self.settings.get('login_handler_class', None) @property def login_available(self): """May a user proceed to log in? This returns True if login capability is available, irrespective of whether the user is already logged in or not. """ if self.login_handler is None: return False return bool(self.login_handler.login_available(self.settings)) class IPythonHandler(AuthenticatedHandler): """IPython-specific extensions to authenticated handling Mostly property shortcuts to IPython-specific settings. """ @property def config(self): return self.settings.get('config', None) @property def log(self): """use the IPython log by default, falling back on tornado's logger""" if Application.initialized(): return Application.instance().log else: return app_log @property def jinja_template_vars(self): """User-supplied values to supply to jinja templates.""" return self.settings.get('jinja_template_vars', {}) #--------------------------------------------------------------- # URLs #--------------------------------------------------------------- @property def version_hash(self): """The version hash to use for cache hints for static files""" return self.settings.get('version_hash', '') @property def mathjax_url(self): return self.settings.get('mathjax_url', '') @property def base_url(self): return self.settings.get('base_url', '/') @property def default_url(self): return self.settings.get('default_url', '') @property def ws_url(self): return self.settings.get('websocket_url', '') @property def contents_js_source(self): self.log.debug("Using contents: %s", self.settings.get('contents_js_source', 'services/contents')) return self.settings.get('contents_js_source', 'services/contents') #--------------------------------------------------------------- # Manager objects #--------------------------------------------------------------- @property def kernel_manager(self): return self.settings['kernel_manager'] @property def contents_manager(self): return self.settings['contents_manager'] @property def cluster_manager(self): return self.settings['cluster_manager'] @property def session_manager(self): return self.settings['session_manager'] @property def terminal_manager(self): return self.settings['terminal_manager'] @property def kernel_spec_manager(self): return self.settings['kernel_spec_manager'] @property def config_manager(self): return self.settings['config_manager'] #--------------------------------------------------------------- # CORS #--------------------------------------------------------------- @property def allow_origin(self): """Normal Access-Control-Allow-Origin""" return self.settings.get('allow_origin', '') @property def allow_origin_pat(self): """Regular expression version of allow_origin""" return self.settings.get('allow_origin_pat', None) @property def allow_credentials(self): """Whether to set Access-Control-Allow-Credentials""" return self.settings.get('allow_credentials', False) def set_default_headers(self): """Add CORS headers, if defined""" super(IPythonHandler, self).set_default_headers() if self.allow_origin: self.set_header("Access-Control-Allow-Origin", self.allow_origin) elif self.allow_origin_pat: origin = self.get_origin() if origin and self.allow_origin_pat.match(origin): self.set_header("Access-Control-Allow-Origin", origin) if self.allow_credentials: self.set_header("Access-Control-Allow-Credentials", 'true') def get_origin(self): # Handle WebSocket Origin naming convention differences # The difference between version 8 and 13 is that in 8 the # client sends a "Sec-Websocket-Origin" header and in 13 it's # simply "Origin". if "Origin" in self.request.headers: origin = self.request.headers.get("Origin") else: origin = self.request.headers.get("Sec-Websocket-Origin", None) return origin #--------------------------------------------------------------- # template rendering #--------------------------------------------------------------- def get_template(self, name): """Return the jinja template object for a given name""" return self.settings['jinja2_env'].get_template(name) def render_template(self, name, **ns): ns.update(self.template_namespace) template = self.get_template(name) return template.render(**ns) @property def template_namespace(self): return dict( base_url=self.base_url, default_url=self.default_url, ws_url=self.ws_url, logged_in=self.logged_in, login_available=self.login_available, static_url=self.static_url, sys_info=sys_info, contents_js_source=self.contents_js_source, version_hash=self.version_hash, **self.jinja_template_vars ) def get_json_body(self): """Return the body of the request as JSON data.""" if not self.request.body: return None # Do we need to call body.decode('utf-8') here? body = self.request.body.strip().decode(u'utf-8') try: model = json.loads(body) except Exception: self.log.debug("Bad JSON: %r", body) self.log.error("Couldn't parse JSON", exc_info=True) raise web.HTTPError(400, u'Invalid JSON in body of request') return model def write_error(self, status_code, **kwargs): """render custom error pages""" exc_info = kwargs.get('exc_info') message = '' status_message = responses.get(status_code, 'Unknown HTTP Error') if exc_info: exception = exc_info[1] # get the custom message, if defined try: message = exception.log_message % exception.args except Exception: pass # construct the custom reason, if defined reason = getattr(exception, 'reason', '') if reason: status_message = reason # build template namespace ns = dict( status_code=status_code, status_message=status_message, message=message, exception=exception, ) self.set_header('Content-Type', 'text/html') # render the template try: html = self.render_template('%s.html' % status_code, **ns) except TemplateNotFound: self.log.debug("No template for %d", status_code) html = self.render_template('error.html', **ns) self.write(html) class APIHandler(IPythonHandler): """Base class for API handlers""" def check_origin(self): """Check Origin for cross-site API requests. Copied from WebSocket with changes: - allow unspecified host/origin (e.g. scripts) """ if self.allow_origin == '*': return True host = self.request.headers.get("Host") origin = self.request.headers.get("Origin") # If no header is provided, assume it comes from a script/curl. # We are only concerned with cross-site browser stuff here. if origin is None or host is None: return True origin = origin.lower() origin_host = urlparse(origin).netloc # OK if origin matches host if origin_host == host: return True # Check CORS headers if self.allow_origin: allow = self.allow_origin == origin elif self.allow_origin_pat: allow = bool(self.allow_origin_pat.match(origin)) else: # No CORS headers deny the request allow = False if not allow: self.log.warn("Blocking Cross Origin API request. Origin: %s, Host: %s", origin, host, ) return allow def prepare(self): if not self.check_origin(): raise web.HTTPError(404) return super(APIHandler, self).prepare() @property def content_security_policy(self): csp = '; '.join([ super(APIHandler, self).content_security_policy, "default-src 'none'", ]) return csp def finish(self, *args, **kwargs): self.set_header('Content-Type', 'application/json') return super(APIHandler, self).finish(*args, **kwargs) class Template404(IPythonHandler): """Render our 404 template""" def prepare(self): raise web.HTTPError(404) class AuthenticatedFileHandler(IPythonHandler, web.StaticFileHandler): """static files should only be accessible when logged in""" @web.authenticated def get(self, path): if os.path.splitext(path)[1] == '.ipynb': name = path.rsplit('/', 1)[-1] self.set_header('Content-Type', 'application/json') self.set_header('Content-Disposition','attachment; filename="%s"' % name) return web.StaticFileHandler.get(self, path) def set_headers(self): super(AuthenticatedFileHandler, self).set_headers() # disable browser caching, rely on 304 replies for savings if "v" not in self.request.arguments: self.add_header("Cache-Control", "no-cache") def compute_etag(self): return None def validate_absolute_path(self, root, absolute_path): """Validate and return the absolute path. Requires tornado 3.1 Adding to tornado's own handling, forbids the serving of hidden files. """ abs_path = super(AuthenticatedFileHandler, self).validate_absolute_path(root, absolute_path) abs_root = os.path.abspath(root) if is_hidden(abs_path, abs_root): self.log.info("Refusing to serve hidden file, via 404 Error") raise web.HTTPError(404) return abs_path def json_errors(method): """Decorate methods with this to return GitHub style JSON errors. This should be used on any JSON API on any handler method that can raise HTTPErrors. This will grab the latest HTTPError exception using sys.exc_info and then: 1. Set the HTTP status code based on the HTTPError 2. Create and return a JSON body with a message field describing the error in a human readable form. """ @functools.wraps(method) @gen.coroutine def wrapper(self, *args, **kwargs): try: result = yield gen.maybe_future(method(self, *args, **kwargs)) except web.HTTPError as e: self.set_header('Content-Type', 'application/json') status = e.status_code message = e.log_message self.log.warn(message) self.set_status(e.status_code) reply = dict(message=message, reason=e.reason) self.finish(json.dumps(reply)) except Exception: self.set_header('Content-Type', 'application/json') self.log.error("Unhandled error in API request", exc_info=True) status = 500 message = "Unknown server error" t, value, tb = sys.exc_info() self.set_status(status) tb_text = ''.join(traceback.format_exception(t, value, tb)) reply = dict(message=message, reason=None, traceback=tb_text) self.finish(json.dumps(reply)) else: # FIXME: can use regular return in generators in py3 raise gen.Return(result) return wrapper #----------------------------------------------------------------------------- # File handler #----------------------------------------------------------------------------- # to minimize subclass changes: HTTPError = web.HTTPError class FileFindHandler(IPythonHandler, web.StaticFileHandler): """subclass of StaticFileHandler for serving files from a search path""" # cache search results, don't search for files more than once _static_paths = {} def set_headers(self): super(FileFindHandler, self).set_headers() # disable browser caching, rely on 304 replies for savings if "v" not in self.request.arguments or \ any(self.request.path.startswith(path) for path in self.no_cache_paths): self.set_header("Cache-Control", "no-cache") def initialize(self, path, default_filename=None, no_cache_paths=None): self.no_cache_paths = no_cache_paths or [] if isinstance(path, string_types): path = [path] self.root = tuple( os.path.abspath(os.path.expanduser(p)) + os.sep for p in path ) self.default_filename = default_filename def compute_etag(self): return None @classmethod def get_absolute_path(cls, roots, path): """locate a file to serve on our static file search path""" with cls._lock: if path in cls._static_paths: return cls._static_paths[path] try: abspath = os.path.abspath(filefind(path, roots)) except IOError: # IOError means not found return '' cls._static_paths[path] = abspath return abspath def validate_absolute_path(self, root, absolute_path): """check if the file should be served (raises 404, 403, etc.)""" if absolute_path == '': raise web.HTTPError(404) for root in self.root: if (absolute_path + os.sep).startswith(root): break return super(FileFindHandler, self).validate_absolute_path(root, absolute_path) class APIVersionHandler(APIHandler): @json_errors def get(self): # not authenticated, so give as few info as possible self.finish(json.dumps({"version":IPython.__version__})) class TrailingSlashHandler(web.RequestHandler): """Simple redirect handler that strips trailing slashes This should be the first, highest priority handler. """ def get(self): self.redirect(self.request.uri.rstrip('/')) post = put = get class FilesRedirectHandler(IPythonHandler): """Handler for redirecting relative URLs to the /files/ handler""" @staticmethod def redirect_to_files(self, path): """make redirect logic a reusable static method so it can be called from other handlers. """ cm = self.contents_manager if cm.dir_exists(path): # it's a *directory*, redirect to /tree url = url_path_join(self.base_url, 'tree', path) else: orig_path = path # otherwise, redirect to /files parts = path.split('/') if not cm.file_exists(path=path) and 'files' in parts: # redirect without files/ iff it would 404 # this preserves pre-2.0-style 'files/' links self.log.warn("Deprecated files/ URL: %s", orig_path) parts.remove('files') path = '/'.join(parts) if not cm.file_exists(path=path): raise web.HTTPError(404) url = url_path_join(self.base_url, 'files', path) url = url_escape(url) self.log.debug("Redirecting %s to %s", self.request.path, url) self.redirect(url) def get(self, path=''): return self.redirect_to_files(self, path) #----------------------------------------------------------------------------- # URL pattern fragments for re-use #----------------------------------------------------------------------------- # path matches any number of `/foo[/bar...]` or just `/` or '' path_regex = r"(?P<path>(?:(?:/[^/]+)+|/?))" #----------------------------------------------------------------------------- # URL to handler mappings #----------------------------------------------------------------------------- default_handlers = [ (r".*/", TrailingSlashHandler), (r"api", APIVersionHandler) ]
./CrossVul/dataset_final_sorted/CWE-352/py/good_1686_0
crossvul-python_data_bad_114_5
404: Not Found
./CrossVul/dataset_final_sorted/CWE-352/py/bad_114_5
crossvul-python_data_bad_1654_4
# -*- coding: utf-8 -*- from __future__ import with_statement import json import datetime from cms.utils.urlutils import admin_reverse from djangocms_text_ckeditor.cms_plugins import TextPlugin from djangocms_text_ckeditor.models import Text from django.contrib import admin from django.contrib.admin.models import LogEntry from django.contrib.admin.sites import site from django.contrib.auth.models import Permission, AnonymousUser from django.contrib.sites.models import Site from django.core.urlresolvers import reverse from django.http import (Http404, HttpResponseBadRequest, HttpResponseForbidden, HttpResponse, QueryDict, HttpResponseNotFound) from django.utils.datastructures import MultiValueDictKeyError from django.utils.encoding import smart_str from django.utils import timezone from django.utils.six.moves.urllib.parse import urlparse from cms.admin.change_list import CMSChangeList from cms.admin.forms import PageForm, AdvancedSettingsForm from cms.admin.pageadmin import PageAdmin from cms.admin.permissionadmin import PagePermissionInlineAdmin from cms.api import create_page, create_title, add_plugin, assign_user_to_page, publish_page from cms.constants import PLUGIN_MOVE_ACTION from cms.models import UserSettings, StaticPlaceholder from cms.models.pagemodel import Page from cms.models.permissionmodels import GlobalPagePermission, PagePermission from cms.models.placeholdermodel import Placeholder from cms.models.pluginmodel import CMSPlugin from cms.models.titlemodels import Title from cms.test_utils import testcases as base from cms.test_utils.testcases import CMSTestCase, URL_CMS_PAGE_DELETE, URL_CMS_PAGE, URL_CMS_TRANSLATION_DELETE from cms.test_utils.util.context_managers import SettingsOverride from cms.test_utils.util.fuzzy_int import FuzzyInt from cms.utils import get_cms_setting from cms.utils.compat import DJANGO_1_4, DJANGO_1_6 from cms.utils.compat.dj import get_user_model, force_unicode class AdminTestsBase(CMSTestCase): @property def admin_class(self): return site._registry[Page] def _get_guys(self, admin_only=False, use_global_permissions=True): admiN_user = self.get_superuser() if admin_only: return admiN_user USERNAME = 'test' if get_user_model().USERNAME_FIELD == 'email': normal_guy = get_user_model().objects.create_user(USERNAME, 'test@test.com', 'test@test.com') else: normal_guy = get_user_model().objects.create_user(USERNAME, 'test@test.com', USERNAME) normal_guy.is_staff = True normal_guy.is_active = True normal_guy.save() normal_guy.user_permissions = Permission.objects.filter( codename__in=['change_page', 'change_title', 'add_page', 'add_title', 'delete_page', 'delete_title'] ) if use_global_permissions: gpp = GlobalPagePermission.objects.create( user=normal_guy, can_change=True, can_delete=True, can_change_advanced_settings=False, can_publish=True, can_change_permissions=False, can_move_page=True, ) gpp.sites = Site.objects.all() return admiN_user, normal_guy class AdminTestCase(AdminTestsBase): def test_extension_not_in_admin(self): admin_user, staff = self._get_guys() with self.login_user_context(admin_user): request = self.get_request('/admin/cms/page/1/', 'en',) response = site.index(request) self.assertNotContains(response, '/mytitleextension/') self.assertNotContains(response, '/mypageextension/') def test_permissioned_page_list(self): """ Makes sure that a user with restricted page permissions can view the page list. """ admin_user, normal_guy = self._get_guys(use_global_permissions=False) current_site = Site.objects.get(pk=1) page = create_page("Test page", "nav_playground.html", "en", site=current_site, created_by=admin_user) PagePermission.objects.create(page=page, user=normal_guy) with self.login_user_context(normal_guy): resp = self.client.get(URL_CMS_PAGE) self.assertEqual(resp.status_code, 200) def test_edit_does_not_reset_page_adv_fields(self): """ Makes sure that if a non-superuser with no rights to edit advanced page fields edits a page, those advanced fields are not touched. """ OLD_PAGE_NAME = 'Test Page' NEW_PAGE_NAME = 'Test page 2' REVERSE_ID = 'Test' OVERRIDE_URL = 'my/override/url' admin_user, normal_guy = self._get_guys() current_site = Site.objects.get(pk=1) # The admin creates the page page = create_page(OLD_PAGE_NAME, "nav_playground.html", "en", site=current_site, created_by=admin_user) page.reverse_id = REVERSE_ID page.save() title = page.get_title_obj() title.has_url_overwrite = True title.path = OVERRIDE_URL title.save() self.assertEqual(page.get_title(), OLD_PAGE_NAME) self.assertEqual(page.reverse_id, REVERSE_ID) self.assertEqual(title.overwrite_url, OVERRIDE_URL) # The user edits the page (change the page name for ex.) page_data = { 'title': NEW_PAGE_NAME, 'slug': page.get_slug(), 'language': title.language, 'site': page.site.pk, 'template': page.template, 'pagepermission_set-TOTAL_FORMS': 0, 'pagepermission_set-INITIAL_FORMS': 0, 'pagepermission_set-MAX_NUM_FORMS': 0, 'pagepermission_set-2-TOTAL_FORMS': 0, 'pagepermission_set-2-INITIAL_FORMS': 0, 'pagepermission_set-2-MAX_NUM_FORMS': 0 } # required only if user haves can_change_permission with self.login_user_context(normal_guy): resp = self.client.post(base.URL_CMS_PAGE_CHANGE % page.pk, page_data, follow=True) self.assertEqual(resp.status_code, 200) self.assertTemplateNotUsed(resp, 'admin/login.html') page = Page.objects.get(pk=page.pk) self.assertEqual(page.get_title(), NEW_PAGE_NAME) self.assertEqual(page.reverse_id, REVERSE_ID) title = page.get_title_obj() self.assertEqual(title.overwrite_url, OVERRIDE_URL) # The admin edits the page (change the page name for ex.) page_data = { 'title': OLD_PAGE_NAME, 'slug': page.get_slug(), 'language': title.language, 'site': page.site.pk, 'template': page.template, 'reverse_id': page.reverse_id, 'pagepermission_set-TOTAL_FORMS': 0, # required only if user haves can_change_permission 'pagepermission_set-INITIAL_FORMS': 0, 'pagepermission_set-MAX_NUM_FORMS': 0, 'pagepermission_set-2-TOTAL_FORMS': 0, 'pagepermission_set-2-INITIAL_FORMS': 0, 'pagepermission_set-2-MAX_NUM_FORMS': 0 } with self.login_user_context(admin_user): resp = self.client.post(base.URL_CMS_PAGE_CHANGE % page.pk, page_data, follow=True) self.assertEqual(resp.status_code, 200) self.assertTemplateNotUsed(resp, 'admin/login.html') page = Page.objects.get(pk=page.pk) self.assertEqual(page.get_title(), OLD_PAGE_NAME) self.assertEqual(page.reverse_id, REVERSE_ID) title = page.get_title_obj() self.assertEqual(title.overwrite_url, OVERRIDE_URL) def test_edit_does_not_reset_apphook(self): """ Makes sure that if a non-superuser with no rights to edit advanced page fields edits a page, those advanced fields are not touched. """ OLD_PAGE_NAME = 'Test Page' NEW_PAGE_NAME = 'Test page 2' REVERSE_ID = 'Test' APPLICATION_URLS = 'project.sampleapp.urls' admin_user, normal_guy = self._get_guys() current_site = Site.objects.get(pk=1) # The admin creates the page page = create_page(OLD_PAGE_NAME, "nav_playground.html", "en", site=current_site, created_by=admin_user) page.reverse_id = REVERSE_ID page.save() title = page.get_title_obj() title.has_url_overwrite = True title.save() page.application_urls = APPLICATION_URLS page.save() self.assertEqual(page.get_title(), OLD_PAGE_NAME) self.assertEqual(page.reverse_id, REVERSE_ID) self.assertEqual(page.application_urls, APPLICATION_URLS) # The user edits the page (change the page name for ex.) page_data = { 'title': NEW_PAGE_NAME, 'slug': page.get_slug(), 'language': title.language, 'site': page.site.pk, 'template': page.template, 'pagepermission_set-TOTAL_FORMS': 0, 'pagepermission_set-INITIAL_FORMS': 0, 'pagepermission_set-MAX_NUM_FORMS': 0, 'pagepermission_set-2-TOTAL_FORMS': 0, 'pagepermission_set-2-INITIAL_FORMS': 0, 'pagepermission_set-2-MAX_NUM_FORMS': 0, } with self.login_user_context(normal_guy): resp = self.client.post(base.URL_CMS_PAGE_CHANGE % page.pk, page_data, follow=True) self.assertEqual(resp.status_code, 200) self.assertTemplateNotUsed(resp, 'admin/login.html') page = Page.objects.get(pk=page.pk) self.assertEqual(page.get_title(), NEW_PAGE_NAME) self.assertEqual(page.reverse_id, REVERSE_ID) self.assertEqual(page.application_urls, APPLICATION_URLS) title = page.get_title_obj() # The admin edits the page (change the page name for ex.) page_data = { 'title': OLD_PAGE_NAME, 'slug': page.get_slug(), 'language': title.language, 'site': page.site.pk, 'template': page.template, 'reverse_id': page.reverse_id, } with self.login_user_context(admin_user): resp = self.client.post(base.URL_CMS_PAGE_ADVANCED_CHANGE % page.pk, page_data, follow=True) self.assertEqual(resp.status_code, 200) self.assertTemplateNotUsed(resp, 'admin/login.html') resp = self.client.post(base.URL_CMS_PAGE_CHANGE % page.pk, page_data, follow=True) self.assertEqual(resp.status_code, 200) self.assertTemplateNotUsed(resp, 'admin/login.html') page = Page.objects.get(pk=page.pk) self.assertEqual(page.get_title(), OLD_PAGE_NAME) self.assertEqual(page.reverse_id, REVERSE_ID) self.assertEqual(page.application_urls, '') def test_2apphooks_with_same_namespace(self): PAGE1 = 'Test Page' PAGE2 = 'Test page 2' APPLICATION_URLS = 'project.sampleapp.urls' admin_user, normal_guy = self._get_guys() current_site = Site.objects.get(pk=1) # The admin creates the page page = create_page(PAGE1, "nav_playground.html", "en", site=current_site, created_by=admin_user) page2 = create_page(PAGE2, "nav_playground.html", "en", site=current_site, created_by=admin_user) page.application_urls = APPLICATION_URLS page.application_namespace = "space1" page.save() page2.application_urls = APPLICATION_URLS page2.save() # The admin edits the page (change the page name for ex.) page_data = { 'title': PAGE2, 'slug': page2.get_slug(), 'language': 'en', 'site': page.site.pk, 'template': page2.template, 'application_urls': 'SampleApp', 'application_namespace': 'space1', } with self.login_user_context(admin_user): resp = self.client.post(base.URL_CMS_PAGE_ADVANCED_CHANGE % page.pk, page_data) self.assertEqual(resp.status_code, 302) self.assertEqual(Page.objects.filter(application_namespace="space1").count(), 1) resp = self.client.post(base.URL_CMS_PAGE_ADVANCED_CHANGE % page2.pk, page_data) self.assertEqual(resp.status_code, 200) page_data['application_namespace'] = 'space2' resp = self.client.post(base.URL_CMS_PAGE_ADVANCED_CHANGE % page2.pk, page_data) self.assertEqual(resp.status_code, 302) def test_delete(self): admin_user = self.get_superuser() create_page("home", "nav_playground.html", "en", created_by=admin_user, published=True) page = create_page("delete-page", "nav_playground.html", "en", created_by=admin_user, published=True) create_page('child-page', "nav_playground.html", "en", created_by=admin_user, published=True, parent=page) body = page.placeholders.get(slot='body') add_plugin(body, 'TextPlugin', 'en', body='text') page.publish('en') with self.login_user_context(admin_user): data = {'post': 'yes'} with self.assertNumQueries(FuzzyInt(300, 407)): response = self.client.post(URL_CMS_PAGE_DELETE % page.pk, data) self.assertRedirects(response, URL_CMS_PAGE) def test_delete_diff_language(self): admin_user = self.get_superuser() create_page("home", "nav_playground.html", "en", created_by=admin_user, published=True) page = create_page("delete-page", "nav_playground.html", "en", created_by=admin_user, published=True) create_page('child-page', "nav_playground.html", "de", created_by=admin_user, published=True, parent=page) body = page.placeholders.get(slot='body') add_plugin(body, 'TextPlugin', 'en', body='text') page.publish('en') with self.login_user_context(admin_user): data = {'post': 'yes'} with self.assertNumQueries(FuzzyInt(300, 394)): response = self.client.post(URL_CMS_PAGE_DELETE % page.pk, data) self.assertRedirects(response, URL_CMS_PAGE) def test_search_fields(self): superuser = self.get_superuser() from django.contrib.admin import site with self.login_user_context(superuser): for model, admin_instance in site._registry.items(): if model._meta.app_label != 'cms': continue if not admin_instance.search_fields: continue url = admin_reverse('cms_%s_changelist' % model._meta.module_name) response = self.client.get('%s?q=1' % url) errmsg = response.content self.assertEqual(response.status_code, 200, errmsg) def test_delete_translation(self): admin_user = self.get_superuser() page = create_page("delete-page-translation", "nav_playground.html", "en", created_by=admin_user, published=True) create_title("de", "delete-page-translation-2", page, slug="delete-page-translation-2") create_title("es-mx", "delete-page-translation-es", page, slug="delete-page-translation-es") with self.login_user_context(admin_user): response = self.client.get(URL_CMS_TRANSLATION_DELETE % page.pk, {'language': 'de'}) self.assertEqual(response.status_code, 200) response = self.client.post(URL_CMS_TRANSLATION_DELETE % page.pk, {'language': 'de'}) self.assertRedirects(response, URL_CMS_PAGE) response = self.client.get(URL_CMS_TRANSLATION_DELETE % page.pk, {'language': 'es-mx'}) self.assertEqual(response.status_code, 200) response = self.client.post(URL_CMS_TRANSLATION_DELETE % page.pk, {'language': 'es-mx'}) self.assertRedirects(response, URL_CMS_PAGE) def test_change_dates(self): admin_user, staff = self._get_guys() page = create_page('test-page', 'nav_playground.html', 'en') page.publish('en') draft = page.get_draft_object() with self.settings(USE_TZ=False): original_date = draft.publication_date original_end_date = draft.publication_end_date new_date = timezone.now() - datetime.timedelta(days=1) new_end_date = timezone.now() + datetime.timedelta(days=1) url = admin_reverse('cms_page_dates', args=(draft.pk,)) with self.login_user_context(admin_user): response = self.client.post(url, { 'language': 'en', 'site': draft.site.pk, 'publication_date_0': new_date.date(), 'publication_date_1': new_date.strftime("%H:%M:%S"), 'publication_end_date_0': new_end_date.date(), 'publication_end_date_1': new_end_date.strftime("%H:%M:%S"), }) self.assertEqual(response.status_code, 302) draft = Page.objects.get(pk=draft.pk) self.assertNotEqual(draft.publication_date.timetuple(), original_date.timetuple()) self.assertEqual(draft.publication_date.timetuple(), new_date.timetuple()) self.assertEqual(draft.publication_end_date.timetuple(), new_end_date.timetuple()) if original_end_date: self.assertNotEqual(draft.publication_end_date.timetuple(), original_end_date.timetuple()) with self.settings(USE_TZ=True): original_date = draft.publication_date original_end_date = draft.publication_end_date new_date = timezone.localtime(timezone.now()) - datetime.timedelta(days=1) new_end_date = timezone.localtime(timezone.now()) + datetime.timedelta(days=1) url = admin_reverse('cms_page_dates', args=(draft.pk,)) with self.login_user_context(admin_user): response = self.client.post(url, { 'language': 'en', 'site': draft.site.pk, 'publication_date_0': new_date.date(), 'publication_date_1': new_date.strftime("%H:%M:%S"), 'publication_end_date_0': new_end_date.date(), 'publication_end_date_1': new_end_date.strftime("%H:%M:%S"), }) self.assertEqual(response.status_code, 302) draft = Page.objects.get(pk=draft.pk) self.assertNotEqual(draft.publication_date.timetuple(), original_date.timetuple()) self.assertEqual(timezone.localtime(draft.publication_date).timetuple(), new_date.timetuple()) self.assertEqual(timezone.localtime(draft.publication_end_date).timetuple(), new_end_date.timetuple()) if original_end_date: self.assertNotEqual(draft.publication_end_date.timetuple(), original_end_date.timetuple()) def test_change_template(self): admin_user, staff = self._get_guys() request = self.get_request('/admin/cms/page/1/', 'en') request.method = "POST" pageadmin = site._registry[Page] with self.login_user_context(staff): self.assertRaises(Http404, pageadmin.change_template, request, 1) page = create_page('test-page', 'nav_playground.html', 'en') response = pageadmin.change_template(request, page.pk) self.assertEqual(response.status_code, 403) url = admin_reverse('cms_page_change_template', args=(page.pk,)) with self.login_user_context(admin_user): response = self.client.post(url, {'template': 'doesntexist'}) self.assertEqual(response.status_code, 400) response = self.client.post(url, {'template': get_cms_setting('TEMPLATES')[0][0]}) self.assertEqual(response.status_code, 200) def test_get_permissions(self): page = create_page('test-page', 'nav_playground.html', 'en') url = admin_reverse('cms_page_get_permissions', args=(page.pk,)) response = self.client.get(url) if DJANGO_1_6: self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'admin/login.html') else: self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/admin/login/?next=/en/admin/cms/page/%s/permissions/' % page.pk) admin_user = self.get_superuser() with self.login_user_context(admin_user): response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertTemplateNotUsed(response, 'admin/login.html') def test_changelist_items(self): admin_user = self.get_superuser() first_level_page = create_page('level1', 'nav_playground.html', 'en') second_level_page_top = create_page('level21', "nav_playground.html", "en", created_by=admin_user, published=True, parent=first_level_page) second_level_page_bottom = create_page('level22', "nav_playground.html", "en", created_by=admin_user, published=True, parent=self.reload(first_level_page)) third_level_page = create_page('level3', "nav_playground.html", "en", created_by=admin_user, published=True, parent=second_level_page_top) self.assertEqual(Page.objects.all().count(), 4) url = admin_reverse('cms_%s_changelist' % Page._meta.module_name) request = self.get_request(url) request.session = {} request.user = admin_user page_admin = site._registry[Page] cl_params = [request, page_admin.model, page_admin.list_display, page_admin.list_display_links, page_admin.list_filter, page_admin.date_hierarchy, page_admin.search_fields, page_admin.list_select_related, page_admin.list_per_page] if hasattr(page_admin, 'list_max_show_all'): # django 1.4 cl_params.append(page_admin.list_max_show_all) cl_params.extend([page_admin.list_editable, page_admin]) cl = CMSChangeList(*tuple(cl_params)) cl.set_items(request) root_page = cl.get_items()[0] self.assertEqual(root_page, first_level_page) self.assertEqual(root_page.get_children()[0], second_level_page_top) self.assertEqual(root_page.get_children()[1], second_level_page_bottom) self.assertEqual(root_page.get_children()[0].get_children()[0], third_level_page) def test_changelist_get_results(self): admin_user = self.get_superuser() first_level_page = create_page('level1', 'nav_playground.html', 'en', published=True) second_level_page_top = create_page('level21', "nav_playground.html", "en", created_by=admin_user, published=True, parent=first_level_page) second_level_page_bottom = create_page('level22', "nav_playground.html", "en", # nopyflakes created_by=admin_user, published=True, parent=self.reload(first_level_page)) third_level_page = create_page('level3', "nav_playground.html", "en", # nopyflakes created_by=admin_user, published=True, parent=second_level_page_top) fourth_level_page = create_page('level23', "nav_playground.html", "en", # nopyflakes created_by=admin_user, parent=self.reload(first_level_page)) self.assertEqual(Page.objects.all().count(), 9) url = admin_reverse('cms_%s_changelist' % Page._meta.module_name) request = self.get_request(url) request.session = {} request.user = admin_user page_admin = site._registry[Page] # full blown page list. only draft pages are taken into account cl_params = [request, page_admin.model, page_admin.list_display, page_admin.list_display_links, page_admin.list_filter, page_admin.date_hierarchy, page_admin.search_fields, page_admin.list_select_related, page_admin.list_per_page] if hasattr(page_admin, 'list_max_show_all'): # django 1.4 cl_params.append(page_admin.list_max_show_all) cl_params.extend([page_admin.list_editable, page_admin]) cl = CMSChangeList(*tuple(cl_params)) cl.get_results(request) self.assertEqual(cl.full_result_count, 5) self.assertEqual(cl.result_count, 5) # only one unpublished page is returned request = self.get_request(url+'?q=level23') request.session = {} request.user = admin_user cl_params[0] = request cl = CMSChangeList(*tuple(cl_params)) cl.get_results(request) self.assertEqual(cl.full_result_count, 5) self.assertEqual(cl.result_count, 1) # a number of pages matches the query request = self.get_request(url+'?q=level2') request.session = {} request.user = admin_user cl_params[0] = request cl = CMSChangeList(*tuple(cl_params)) cl.get_results(request) self.assertEqual(cl.full_result_count, 5) self.assertEqual(cl.result_count, 3) def test_changelist_tree(self): """ This test checks for proper jstree cookie unquoting. It should be converted to a selenium test to actually test the jstree behaviour. Cookie set below is just a forged example (from live session) """ admin_user = self.get_superuser() first_level_page = create_page('level1', 'nav_playground.html', 'en') second_level_page_top = create_page('level21', "nav_playground.html", "en", created_by=admin_user, published=True, parent=first_level_page) second_level_page_bottom = create_page('level22', "nav_playground.html", "en", created_by=admin_user, published=True, parent=self.reload(first_level_page)) third_level_page = create_page('level3', "nav_playground.html", "en", created_by=admin_user, published=True, parent=second_level_page_top) url = admin_reverse('cms_%s_changelist' % Page._meta.module_name) if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='admin@django-cms.org', password='admin@django-cms.org') else: self.client.login(username='admin', password='admin') self.client.cookies['djangocms_nodes_open'] = 'page_1%2Cpage_2' response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertEqual(response.context["open_menu_trees"], [1, 2]) # tests descendants method for the lazy load ajax call url = "%s%d/en/descendants/" % (url, first_level_page.pk) response = self.client.get(url) self.assertEqual(response.status_code, 200) # should include both direct descendant pages self.assertContains(response, 'id="page_%s"' % second_level_page_top.pk) self.assertContains(response, 'id="page_%s"' % second_level_page_bottom.pk) # but not any further down the tree self.assertNotContains(response, 'id="page_%s"' % third_level_page.pk) self.assertNotContains(response, 'None') def test_unihandecode_doesnt_break_404_in_admin(self): self.get_superuser() if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='admin@django-cms.org', password='admin@django-cms.org') else: self.client.login(username='admin', password='admin') response = self.client.get('/en/admin/cms/page/1/?language=en') self.assertEqual(response.status_code, 404) def test_tree_displays_in_correct_language(self): ''' Test to prove and protect that the page titles in the tree are displayed in the currently set language. ''' admin_guy, normal_guy = self._get_guys(use_global_permissions=False) site = Site.objects.get(pk=1) en_title = "EN Page" es_title = "ES Pagina" # Create a page in en page = create_page(en_title, "nav_playground.html", "en", site=site, created_by=admin) # Add a es-mx translation for this page create_title("es-mx", es_title, page, slug="es_pagina") url = admin_reverse('cms_%s_changelist' % Page._meta.module_name) url_pat = '<a href="{0}/{1}/preview/"[^>]*>{2}</a>' with self.login_user_context(admin_guy): # Check the EN version of the tree... response = self.client.get(url, {'language': 'en'}) self.assertRegexpMatches(str(response.content), url_pat.format(page.pk, 'en', en_title, )) # Check the ES version of the tree... response = self.client.get(url, {'language': 'es-mx'}) self.assertRegexpMatches(str(response.content), url_pat.format(page.pk, 'es-mx', es_title, )) def test_empty_placeholder_in_correct_language(self): """ Test that Cleaning a placeholder only affect current language contents """ # create some objects page_en = create_page("EmptyPlaceholderTestPage (EN)", "nav_playground.html", "en") ph = page_en.placeholders.get(slot="body") # add the text plugin to the en version of the page add_plugin(ph, "TextPlugin", "en", body="Hello World EN 1") add_plugin(ph, "TextPlugin", "en", body="Hello World EN 2") # creating a de title of the page and adding plugins to it create_title("de", page_en.get_title(), page_en, slug=page_en.get_slug()) add_plugin(ph, "TextPlugin", "de", body="Hello World DE") add_plugin(ph, "TextPlugin", "de", body="Hello World DE 2") add_plugin(ph, "TextPlugin", "de", body="Hello World DE 3") # before cleaning the de placeholder self.assertEqual(ph.get_plugins('en').count(), 2) self.assertEqual(ph.get_plugins('de').count(), 3) admin_user, staff = self._get_guys() with self.login_user_context(admin_user): url = '%s?language=de' % admin_reverse('cms_page_clear_placeholder', args=[ph.pk]) response = self.client.post(url, {'test': 0}) self.assertEqual(response.status_code, 302) # After cleaning the de placeholder, en placeholder must still have all the plugins self.assertEqual(ph.get_plugins('en').count(), 2) self.assertEqual(ph.get_plugins('de').count(), 0) class AdminTests(AdminTestsBase): # TODO: needs tests for actual permissions, not only superuser/normaluser def setUp(self): self.page = create_page("testpage", "nav_playground.html", "en") def get_admin(self): User = get_user_model() fields = dict(email="admin@django-cms.org", is_staff=True, is_superuser=True) if (User.USERNAME_FIELD != 'email'): fields[User.USERNAME_FIELD] = "admin" usr = User(**fields) usr.set_password(getattr(usr, User.USERNAME_FIELD)) usr.save() return usr def get_permless(self): User = get_user_model() fields = dict(email="permless@django-cms.org", is_staff=True) if (User.USERNAME_FIELD != 'email'): fields[User.USERNAME_FIELD] = "permless" usr = User(**fields) usr.set_password(getattr(usr, User.USERNAME_FIELD)) usr.save() return usr def get_page(self): return self.page def test_change_publish_unpublish(self): page = self.get_page() permless = self.get_permless() with self.login_user_context(permless): request = self.get_request() response = self.admin_class.publish_page(request, page.pk, "en") self.assertEqual(response.status_code, 403) page = self.reload(page) self.assertFalse(page.is_published('en')) request = self.get_request(post_data={'no': 'data'}) response = self.admin_class.publish_page(request, page.pk, "en") # Forbidden self.assertEqual(response.status_code, 403) self.assertFalse(page.is_published('en')) admin_user = self.get_admin() with self.login_user_context(admin_user): request = self.get_request(post_data={'no': 'data'}) response = self.admin_class.publish_page(request, page.pk, "en") self.assertEqual(response.status_code, 302) page = self.reload(page) self.assertTrue(page.is_published('en')) response = self.admin_class.unpublish(request, page.pk, "en") self.assertEqual(response.status_code, 302) page = self.reload(page) self.assertFalse(page.is_published('en')) def test_change_status_adds_log_entry(self): page = self.get_page() admin_user = self.get_admin() with self.login_user_context(admin_user): request = self.get_request(post_data={'no': 'data'}) self.assertFalse(LogEntry.objects.count()) response = self.admin_class.publish_page(request, page.pk, "en") self.assertEqual(response.status_code, 302) self.assertEqual(1, LogEntry.objects.count()) self.assertEqual(page.pk, int(LogEntry.objects.all()[0].object_id)) def test_change_innavigation(self): page = self.get_page() permless = self.get_permless() admin_user = self.get_admin() with self.login_user_context(permless): request = self.get_request() response = self.admin_class.change_innavigation(request, page.pk) self.assertEqual(response.status_code, 403) with self.login_user_context(permless): request = self.get_request(post_data={'no': 'data'}) self.assertRaises(Http404, self.admin_class.change_innavigation, request, page.pk + 100) with self.login_user_context(permless): request = self.get_request(post_data={'no': 'data'}) response = self.admin_class.change_innavigation(request, page.pk) self.assertEqual(response.status_code, 403) with self.login_user_context(admin_user): request = self.get_request(post_data={'no': 'data'}) old = page.in_navigation response = self.admin_class.change_innavigation(request, page.pk) # These asserts are for #3589 self.assertContains(response, 'lang="en"') self.assertContains(response, './%s/en/preview/' % page.pk) self.assertEqual(response.status_code, 200) page = self.reload(page) self.assertEqual(old, not page.in_navigation) def test_publish_page_requires_perms(self): permless = self.get_permless() with self.login_user_context(permless): request = self.get_request() request.method = "POST" response = self.admin_class.publish_page(request, Page.objects.all()[0].pk, "en") self.assertEqual(response.status_code, 403) def test_revert_page(self): self.page.publish('en') title = self.page.title_set.get(language='en') title.title = 'new' title.save() self.assertEqual(Title.objects.all().count(), 2) self.assertEqual(Page.objects.all().count(), 2) with self.login_user_context(self.get_superuser()): request = self.get_request() request.method = "POST" response = self.admin_class.revert_page(request, Page.objects.all()[0].pk, "en") self.assertEqual(response.status_code, 302) self.assertEqual(Title.objects.all().count(), 2) self.assertEqual(Page.objects.all().count(), 2) new_title = Title.objects.get(pk=title.pk) self.assertNotEqual(title.title, new_title.title) self.assertTrue(title.publisher_is_draft) self.assertTrue(new_title.publisher_is_draft) def test_revert_page_requires_perms(self): permless = self.get_permless() with self.login_user_context(permless): request = self.get_request() request.method = "POST" response = self.admin_class.revert_page(request, Page.objects.all()[0].pk, 'en') self.assertEqual(response.status_code, 403) def test_revert_page_redirects(self): admin_user = self.get_admin() self.page.publish("en") # Ensure public copy exists before reverting with self.login_user_context(admin_user): response = self.client.get(admin_reverse('cms_page_revert_page', args=(self.page.pk, 'en'))) self.assertEqual(response.status_code, 302) url = response['Location'] self.assertTrue(url.endswith('?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF'))) def test_remove_plugin_requires_post(self): ph = Placeholder.objects.create(slot='test') plugin = add_plugin(ph, 'TextPlugin', 'en', body='test') admin_user = self.get_admin() with self.login_user_context(admin_user): request = self.get_request() response = self.admin_class.delete_plugin(request, plugin.pk) self.assertEqual(response.status_code, 200) def test_move_plugin(self): ph = Placeholder.objects.create(slot='test') plugin = add_plugin(ph, 'TextPlugin', 'en', body='test') page = self.get_page() source, target = list(page.placeholders.all())[:2] pageplugin = add_plugin(source, 'TextPlugin', 'en', body='test') plugin_class = pageplugin.get_plugin_class_instance() expected = {'reload': plugin_class.requires_reload(PLUGIN_MOVE_ACTION)} placeholder = Placeholder.objects.all()[0] permless = self.get_permless() admin_user = self.get_admin() with self.login_user_context(permless): request = self.get_request() response = self.admin_class.move_plugin(request) self.assertEqual(response.status_code, 405) request = self.get_request(post_data={'not_usable': '1'}) self.assertRaises(MultiValueDictKeyError, self.admin_class.move_plugin, request) with self.login_user_context(admin_user): request = self.get_request(post_data={'ids': plugin.pk}) self.assertRaises(MultiValueDictKeyError, self.admin_class.move_plugin, request) with self.login_user_context(admin_user): request = self.get_request(post_data={'plugin_id': pageplugin.pk, 'placeholder_id': 'invalid-placeholder', 'plugin_language': 'en'}) self.assertRaises(ValueError, self.admin_class.move_plugin, request) with self.login_user_context(permless): request = self.get_request(post_data={'plugin_id': pageplugin.pk, 'placeholder_id': placeholder.pk, 'plugin_parent': '', 'plugin_language': 'en'}) self.assertEqual(self.admin_class.move_plugin(request).status_code, HttpResponseForbidden.status_code) with self.login_user_context(admin_user): request = self.get_request(post_data={'plugin_id': pageplugin.pk, 'placeholder_id': placeholder.pk, 'plugin_parent': '', 'plugin_language': 'en'}) response = self.admin_class.move_plugin(request) self.assertEqual(response.status_code, 200) self.assertEqual(json.loads(response.content.decode('utf8')), expected) with self.login_user_context(permless): request = self.get_request(post_data={'plugin_id': pageplugin.pk, 'placeholder_id': placeholder.id, 'plugin_parent': '', 'plugin_language': 'en'}) self.assertEqual(self.admin_class.move_plugin(request).status_code, HttpResponseForbidden.status_code) with self.login_user_context(admin_user): request = self.get_request(post_data={'plugin_id': pageplugin.pk, 'placeholder_id': placeholder.id, 'plugin_parent': '', 'plugin_language': 'en'}) response = self.admin_class.move_plugin(request) self.assertEqual(response.status_code, 200) self.assertEqual(json.loads(response.content.decode('utf8')), expected) def test_move_language(self): page = self.get_page() source, target = list(page.placeholders.all())[:2] col = add_plugin(source, 'MultiColumnPlugin', 'en') sub_col = add_plugin(source, 'ColumnPlugin', 'en', target=col) col2 = add_plugin(source, 'MultiColumnPlugin', 'de') admin_user = self.get_admin() with self.login_user_context(admin_user): request = self.get_request(post_data={'plugin_id': sub_col.pk, 'placeholder_id': source.id, 'plugin_parent': col2.pk, 'plugin_language': 'de'}) response = self.admin_class.move_plugin(request) self.assertEqual(response.status_code, 200) sub_col = CMSPlugin.objects.get(pk=sub_col.pk) self.assertEqual(sub_col.language, "de") self.assertEqual(sub_col.parent_id, col2.pk) def test_preview_page(self): permless = self.get_permless() with self.login_user_context(permless): request = self.get_request() self.assertRaises(Http404, self.admin_class.preview_page, request, 404, "en") page = self.get_page() page.publish("en") base_url = page.get_absolute_url() with self.login_user_context(permless): request = self.get_request('/?public=true') response = self.admin_class.preview_page(request, page.pk, 'en') self.assertEqual(response.status_code, 302) self.assertEqual(response['Location'], '%s?%s&language=en' % (base_url, get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))) request = self.get_request() response = self.admin_class.preview_page(request, page.pk, 'en') self.assertEqual(response.status_code, 302) self.assertEqual(response['Location'], '%s?%s&language=en' % (base_url, get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))) current_site = Site.objects.create(domain='django-cms.org', name='django-cms') page.site = current_site page.save() page.publish("en") self.assertTrue(page.is_home) response = self.admin_class.preview_page(request, page.pk, 'en') self.assertEqual(response.status_code, 302) self.assertEqual(response['Location'], 'http://django-cms.org%s?%s&language=en' % (base_url, get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))) def test_too_many_plugins_global(self): conf = { 'body': { 'limits': { 'global': 1, }, }, } admin_user = self.get_admin() url = admin_reverse('cms_page_add_plugin') with SettingsOverride(CMS_PERMISSION=False, CMS_PLACEHOLDER_CONF=conf): page = create_page('somepage', 'nav_playground.html', 'en') body = page.placeholders.get(slot='body') add_plugin(body, 'TextPlugin', 'en', body='text') with self.login_user_context(admin_user): data = { 'plugin_type': 'TextPlugin', 'placeholder_id': body.pk, 'plugin_language': 'en', } response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponseBadRequest.status_code) def test_too_many_plugins_type(self): conf = { 'body': { 'limits': { 'TextPlugin': 1, }, }, } admin_user = self.get_admin() url = admin_reverse('cms_page_add_plugin') with SettingsOverride(CMS_PERMISSION=False, CMS_PLACEHOLDER_CONF=conf): page = create_page('somepage', 'nav_playground.html', 'en') body = page.placeholders.get(slot='body') add_plugin(body, 'TextPlugin', 'en', body='text') with self.login_user_context(admin_user): data = { 'plugin_type': 'TextPlugin', 'placeholder_id': body.pk, 'plugin_language': 'en', 'plugin_parent': '', } response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponseBadRequest.status_code) def test_edit_title_dirty_bit(self): language = "en" admin_user = self.get_admin() page = create_page('A', 'nav_playground.html', language) page_admin = PageAdmin(Page, None) page_admin._current_page = page page.publish("en") draft_page = page.get_draft_object() admin_url = reverse("admin:cms_page_edit_title_fields", args=( draft_page.pk, language )) post_data = { 'title': "A Title" } with self.login_user_context(admin_user): self.client.post(admin_url, post_data) draft_page = Page.objects.get(pk=page.pk).get_draft_object() self.assertTrue(draft_page.is_dirty('en')) def test_edit_title_languages(self): language = "en" admin_user = self.get_admin() page = create_page('A', 'nav_playground.html', language) page_admin = PageAdmin(Page, None) page_admin._current_page = page page.publish("en") draft_page = page.get_draft_object() admin_url = reverse("admin:cms_page_edit_title_fields", args=( draft_page.pk, language )) post_data = { 'title': "A Title" } with self.login_user_context(admin_user): self.client.post(admin_url, post_data) draft_page = Page.objects.get(pk=page.pk).get_draft_object() self.assertTrue(draft_page.is_dirty('en')) def test_page_form_leak(self): language = "en" admin_user = self.get_admin() request = self.get_request('/', 'en') request.user = admin_user page = create_page('A', 'nav_playground.html', language, menu_title='menu title') page_admin = PageAdmin(Page, site) page_admin._current_page = page edit_form = page_admin.get_form(request, page) add_form = page_admin.get_form(request, None) self.assertEqual(edit_form.base_fields['menu_title'].initial, 'menu title') self.assertEqual(add_form.base_fields['menu_title'].initial, None) class NoDBAdminTests(CMSTestCase): @property def admin_class(self): return site._registry[Page] def test_lookup_allowed_site__exact(self): self.assertTrue(self.admin_class.lookup_allowed('site__exact', '1')) def test_lookup_allowed_published(self): self.assertTrue(self.admin_class.lookup_allowed('published', value='1')) class PluginPermissionTests(AdminTestsBase): def setUp(self): self._page = create_page('test page', 'nav_playground.html', 'en') self._placeholder = self._page.placeholders.all()[0] def _get_admin(self): User = get_user_model() fields = dict(email="admin@django-cms.org", is_staff=True, is_active=True) if (User.USERNAME_FIELD != 'email'): fields[User.USERNAME_FIELD] = "admin" admin_user = User(**fields) admin_user.set_password('admin') admin_user.save() return admin_user def _get_page_admin(self): return admin.site._registry[Page] def _give_permission(self, user, model, permission_type, save=True): codename = '%s_%s' % (permission_type, model._meta.object_name.lower()) user.user_permissions.add(Permission.objects.get(codename=codename)) def _give_page_permission_rights(self, user): self._give_permission(user, PagePermission, 'add') self._give_permission(user, PagePermission, 'change') self._give_permission(user, PagePermission, 'delete') def _get_change_page_request(self, user, page): return type('Request', (object,), { 'user': user, 'path': base.URL_CMS_PAGE_CHANGE % page.pk }) def _give_cms_permissions(self, user, save=True): for perm_type in ['add', 'change', 'delete']: for model in [Page, Title]: self._give_permission(user, model, perm_type, False) gpp = GlobalPagePermission.objects.create( user=user, can_change=True, can_delete=True, can_change_advanced_settings=False, can_publish=True, can_change_permissions=False, can_move_page=True, ) gpp.sites = Site.objects.all() if save: user.save() def _create_plugin(self): plugin = add_plugin(self._placeholder, 'TextPlugin', 'en') return plugin def test_plugin_add_requires_permissions(self): """User tries to add a plugin but has no permissions. He can add the plugin after he got the permissions""" admin = self._get_admin() self._give_cms_permissions(admin) if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='admin@django-cms.org', password='admin') else: self.client.login(username='admin', password='admin') url = admin_reverse('cms_page_add_plugin') data = { 'plugin_type': 'TextPlugin', 'placeholder_id': self._placeholder.pk, 'plugin_language': 'en', 'plugin_parent': '', } response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) self._give_permission(admin, Text, 'add') response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponse.status_code) def test_plugin_edit_requires_permissions(self): """User tries to edit a plugin but has no permissions. He can edit the plugin after he got the permissions""" plugin = self._create_plugin() _, normal_guy = self._get_guys() if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='test@test.com', password='test@test.com') else: self.client.login(username='test', password='test') url = admin_reverse('cms_page_edit_plugin', args=[plugin.id]) response = self.client.post(url, dict()) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) # After he got the permissions, he can edit the plugin self._give_permission(normal_guy, Text, 'change') response = self.client.post(url, dict()) self.assertEqual(response.status_code, HttpResponse.status_code) def test_plugin_edit_wrong_url(self): """User tries to edit a plugin using a random url. 404 response returned""" plugin = self._create_plugin() _, normal_guy = self._get_guys() if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='test@test.com', password='test@test.com') else: self.client.login(username='test', password='test') self._give_permission(normal_guy, Text, 'change') url = '%s/edit-plugin/%s/' % (admin_reverse('cms_page_edit_plugin', args=[plugin.id]), plugin.id) response = self.client.post(url, dict()) self.assertEqual(response.status_code, HttpResponseNotFound.status_code) self.assertTrue("Plugin not found" in force_unicode(response.content)) def test_plugin_remove_requires_permissions(self): """User tries to remove a plugin but has no permissions. He can remove the plugin after he got the permissions""" plugin = self._create_plugin() _, normal_guy = self._get_guys() if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='test@test.com', password='test@test.com') else: self.client.login(username='test', password='test') url = admin_reverse('cms_page_delete_plugin', args=[plugin.pk]) data = dict(plugin_id=plugin.id) response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) # After he got the permissions, he can edit the plugin self._give_permission(normal_guy, Text, 'delete') response = self.client.post(url, data) self.assertEqual(response.status_code, 302) def test_plugin_move_requires_permissions(self): """User tries to move a plugin but has no permissions. He can move the plugin after he got the permissions""" plugin = self._create_plugin() _, normal_guy = self._get_guys() if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='test@test.com', password='test@test.com') else: self.client.login(username='test', password='test') url = admin_reverse('cms_page_move_plugin') data = dict(plugin_id=plugin.id, placeholder_id=self._placeholder.pk, plugin_parent='', ) response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) # After he got the permissions, he can edit the plugin self._give_permission(normal_guy, Text, 'change') response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponse.status_code) def test_plugins_copy_requires_permissions(self): """User tries to copy plugin but has no permissions. He can copy plugins after he got the permissions""" plugin = self._create_plugin() _, normal_guy = self._get_guys() if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='test@test.com', password='test@test.com') else: self.client.login(username='test', password='test') url = admin_reverse('cms_page_copy_plugins') data = dict(source_plugin_id=plugin.id, source_placeholder_id=self._placeholder.pk, source_language='en', target_language='fr', target_placeholder_id=self._placeholder.pk, ) response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) # After he got the permissions, he can edit the plugin self._give_permission(normal_guy, Text, 'add') response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponse.status_code) def test_plugins_copy_placeholder_ref(self): """User copies a placeholder into a clipboard. A PlaceholderReferencePlugin is created. Afterwards he copies this into a placeholder and the PlaceholderReferencePlugin unpacks its content. After that he clear the clipboard""" self.assertEqual(Placeholder.objects.count(), 2) self._create_plugin() self._create_plugin() admin_user = self.get_superuser() clipboard = Placeholder() clipboard.save() self.assertEqual(CMSPlugin.objects.count(), 2) settings = UserSettings(language="fr", clipboard=clipboard, user=admin_user) settings.save() self.assertEqual(Placeholder.objects.count(), 3) if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='admin@django-cms.org', password='admin@django-cms.org') else: self.client.login(username='admin', password='admin') url = admin_reverse('cms_page_copy_plugins') data = dict(source_plugin_id='', source_placeholder_id=self._placeholder.pk, source_language='en', target_language='en', target_placeholder_id=clipboard.pk, ) response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponse.status_code) clipboard_plugins = clipboard.get_plugins() self.assertEqual(CMSPlugin.objects.count(), 5) self.assertEqual(clipboard_plugins.count(), 1) self.assertEqual(clipboard_plugins[0].plugin_type, "PlaceholderPlugin") placeholder_plugin, _ = clipboard_plugins[0].get_plugin_instance() ref_placeholder = placeholder_plugin.placeholder_ref copied_plugins = ref_placeholder.get_plugins() self.assertEqual(copied_plugins.count(), 2) data = dict(source_plugin_id=placeholder_plugin.pk, source_placeholder_id=clipboard.pk, source_language='en', target_language='fr', target_placeholder_id=self._placeholder.pk, ) response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponse.status_code) plugins = self._placeholder.get_plugins() self.assertEqual(plugins.count(), 4) self.assertEqual(CMSPlugin.objects.count(), 7) self.assertEqual(Placeholder.objects.count(), 4) url = admin_reverse('cms_page_clear_placeholder', args=[clipboard.pk]) with self.assertNumQueries(FuzzyInt(70, 80)): response = self.client.post(url, {'test': 0}) self.assertEqual(response.status_code, 302) self.assertEqual(CMSPlugin.objects.count(), 4) self.assertEqual(Placeholder.objects.count(), 3) def test_plugins_copy_language(self): """User tries to copy plugin but has no permissions. He can copy plugins after he got the permissions""" self._create_plugin() _, normal_guy = self._get_guys() if get_user_model().USERNAME_FIELD != 'email': self.client.login(username='test', password='test') else: self.client.login(username='test@test.com', password='test@test.com') self.assertEqual(1, CMSPlugin.objects.all().count()) url = admin_reverse('cms_page_copy_language', args=[self._page.pk]) data = dict( source_language='en', target_language='fr', ) response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) # After he got the permissions, he can edit the plugin self._give_permission(normal_guy, Text, 'add') response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponse.status_code) self.assertEqual(2, CMSPlugin.objects.all().count()) def test_page_permission_inline_visibility(self): User = get_user_model() fields = dict(email='user@domain.com', password='user', is_staff=True) if get_user_model().USERNAME_FIELD != 'email': fields[get_user_model().USERNAME_FIELD] = 'user' user = User(**fields) user.save() self._give_page_permission_rights(user) page = create_page('A', 'nav_playground.html', 'en') page_permission = PagePermission.objects.create( can_change_permissions=True, user=user, page=page) request = self._get_change_page_request(user, page) page_admin = PageAdmin(Page, None) page_admin._current_page = page # user has can_change_permission # => must see the PagePermissionInline self.assertTrue( any(type(inline) is PagePermissionInlineAdmin for inline in page_admin.get_inline_instances(request, page if not DJANGO_1_4 else None))) page = Page.objects.get(pk=page.pk) # remove can_change_permission page_permission.can_change_permissions = False page_permission.save() request = self._get_change_page_request(user, page) page_admin = PageAdmin(Page, None) page_admin._current_page = page # => PagePermissionInline is no longer visible self.assertFalse( any(type(inline) is PagePermissionInlineAdmin for inline in page_admin.get_inline_instances(request, page if not DJANGO_1_4 else None))) def test_edit_title_is_allowed_for_staff_user(self): """ We check here both the permission on a single page, and the global permissions """ user = self._create_user('user', is_staff=True) another_user = self._create_user('another_user', is_staff=True) page = create_page('A', 'nav_playground.html', 'en') admin_url = reverse("admin:cms_page_edit_title_fields", args=( page.pk, 'en' )) page_admin = PageAdmin(Page, None) page_admin._current_page = page username = getattr(user, get_user_model().USERNAME_FIELD) self.client.login(username=username, password=username) response = self.client.get(admin_url) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) assign_user_to_page(page, user, grant_all=True) username = getattr(user, get_user_model().USERNAME_FIELD) self.client.login(username=username, password=username) response = self.client.get(admin_url) self.assertEqual(response.status_code, HttpResponse.status_code) self._give_cms_permissions(another_user) username = getattr(another_user, get_user_model().USERNAME_FIELD) self.client.login(username=username, password=username) response = self.client.get(admin_url) self.assertEqual(response.status_code, HttpResponse.status_code) def test_plugin_add_returns_valid_pk_for_plugin(self): admin_user = self._get_admin() self._give_cms_permissions(admin_user) self._give_permission(admin_user, Text, 'add') username = getattr(admin_user, get_user_model().USERNAME_FIELD) self.client.login(username=username, password='admin') url = admin_reverse('cms_page_add_plugin') data = { 'plugin_type': 'TextPlugin', 'placeholder_id': self._placeholder.pk, 'plugin_language': 'en', 'plugin_parent': '', } response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponse.status_code) self.assertEqual(response['content-type'], 'application/json') pk = response.content.decode('utf8').split("edit-plugin/")[1].split("/")[0] self.assertTrue(CMSPlugin.objects.filter(pk=int(pk)).exists()) class AdminFormsTests(AdminTestsBase): def test_clean_overwrite_url(self): user = AnonymousUser() user.is_superuser = True user.pk = 1 request = type('Request', (object,), {'user': user}) with SettingsOverride(): data = { 'title': 'TestPage', 'slug': 'test-page', 'language': 'en', 'overwrite_url': '/overwrite/url/', 'site': Site.objects.get_current().pk, 'template': get_cms_setting('TEMPLATES')[0][0], 'published': True } form = PageForm(data) self.assertTrue(form.is_valid(), form.errors.as_text()) # WTF? WHY DOES form.save() not handle this stuff??? instance = form.save() instance.permission_user_cache = user instance.permission_advanced_settings_cache = True Title.objects.set_or_create(request, instance, form, 'en') form = PageForm(data, instance=instance) self.assertTrue(form.is_valid(), form.errors.as_text()) def test_missmatching_site_parent_dotsite(self): site0 = Site.objects.create(domain='foo.com', name='foo.com') site1 = Site.objects.create(domain='foo.com', name='foo.com') parent_page = Page.objects.create( template='nav_playground.html', site=site0) new_page_data = { 'title': 'Title', 'slug': 'slug', 'language': 'en', 'site': site1.pk, 'template': get_cms_setting('TEMPLATES')[0][0], 'reverse_id': '', 'parent': parent_page.pk, } form = PageForm(data=new_page_data, files=None) self.assertFalse(form.is_valid()) self.assertIn(u"Site doesn't match the parent's page site", form.errors['__all__']) def test_reverse_id_error_location(self): ''' Test moving the reverse_id validation error to a field specific one ''' # this is the Reverse ID we'll re-use to break things. dupe_id = 'p1' curren_site = Site.objects.get_current() create_page('Page 1', 'nav_playground.html', 'en', reverse_id=dupe_id) page2 = create_page('Page 2', 'nav_playground.html', 'en') # Assemble a bunch of data to test the page form page2_data = { 'language': 'en', 'site': curren_site.pk, 'reverse_id': dupe_id, 'template': 'col_two.html', } form = AdvancedSettingsForm(data=page2_data, files=None) self.assertFalse(form.is_valid()) # reverse_id is the only item that is in __all__ as every other field # has it's own clean method. Moving it to be a field error means # __all__ is now not available. self.assertNotIn('__all__', form.errors) # In moving it to it's own field, it should be in form.errors, and # the values contained therein should match these. self.assertIn('reverse_id', form.errors) self.assertEqual(1, len(form.errors['reverse_id'])) self.assertEqual([u'A page with this reverse URL id exists already.'], form.errors['reverse_id']) page2_data['reverse_id'] = "" form = AdvancedSettingsForm(data=page2_data, files=None) self.assertTrue(form.is_valid()) admin_user = self._get_guys(admin_only=True) # reset some of page2_data so we can use cms.api.create_page page2 = page2.reload() page2.site = curren_site page2.save() with self.login_user_context(admin_user): # re-reset the page2_data for the admin form instance. page2_data['reverse_id'] = dupe_id page2_data['site'] = curren_site.pk # post to the admin change form for page 2, and test that the # reverse_id form row has an errors class. Django's admin avoids # collapsing these, so that the error is visible. resp = self.client.post(base.URL_CMS_PAGE_ADVANCED_CHANGE % page2.pk, page2_data) self.assertContains(resp, '<div class="form-row errors reverse_id">') def test_create_page_type(self): page = create_page('Test', 'static.html', 'en', published=True, reverse_id="home") for placeholder in Placeholder.objects.all(): add_plugin(placeholder, TextPlugin, 'en', body='<b>Test</b>') page.publish('en') self.assertEqual(Page.objects.count(), 2) self.assertEqual(CMSPlugin.objects.count(), 4) superuser = self.get_superuser() with self.login_user_context(superuser): response = self.client.get( "%s?copy_target=%s&language=%s" % (admin_reverse("cms_page_add_page_type"), page.pk, 'en')) self.assertEqual(response.status_code, 302) self.assertEqual(Page.objects.count(), 3) self.assertEqual(Page.objects.filter(reverse_id="page_types").count(), 1) page_types = Page.objects.get(reverse_id='page_types') url = response.url if hasattr(response, 'url') else response['Location'] expected_url_params = QueryDict( 'target=%s&position=first-child&add_page_type=1&copy_target=%s&language=en' % (page_types.pk, page.pk)) response_url_params = QueryDict(urlparse(url).query) self.assertDictEqual(expected_url_params, response_url_params) response = self.client.get("%s?copy_target=%s&language=%s" % ( admin_reverse("cms_page_add_page_type"), page.pk, 'en'), follow=True) self.assertEqual(response.status_code, 200) # test no page types if no page types there response = self.client.get(admin_reverse('cms_page_add')) self.assertNotContains(response, "page_type") # create out first page type page_data = { 'title': 'type1', 'slug': 'type1', '_save': 1, 'template': 'static.html', 'site': 1, 'language': 'en' } response = self.client.post( "/en/admin/cms/page/add/?target=%s&position=first-child&add_page_type=1&copy_target=%s&language=en" % ( page_types.pk, page.pk), data=page_data) self.assertEqual(response.status_code, 302) self.assertEqual(Page.objects.count(), 4) self.assertEqual(CMSPlugin.objects.count(), 6) response = self.client.get(admin_reverse('cms_page_add')) self.assertContains(response, "page_type") # no page types available if you use the copy_target response = self.client.get("%s?copy_target=%s&language=en" % (admin_reverse('cms_page_add'), page.pk)) self.assertNotContains(response, "page_type") def test_render_edit_mode(self): from django.core.cache import cache cache.clear() create_page('Test', 'static.html', 'en', published=True) for placeholder in Placeholder.objects.all(): add_plugin(placeholder, TextPlugin, 'en', body='<b>Test</b>') user = self.get_superuser() self.assertEqual(Placeholder.objects.all().count(), 4) with self.login_user_context(user): with self.assertNumQueries(FuzzyInt(40, 66)): output = force_unicode(self.client.get('/en/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')).content) self.assertIn('<b>Test</b>', output) self.assertEqual(Placeholder.objects.all().count(), 9) self.assertEqual(StaticPlaceholder.objects.count(), 2) for placeholder in Placeholder.objects.all(): add_plugin(placeholder, TextPlugin, 'en', body='<b>Test</b>') with self.assertNumQueries(FuzzyInt(40, 60)): output = force_unicode(self.client.get('/en/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')).content) self.assertIn('<b>Test</b>', output) with self.assertNumQueries(FuzzyInt(18, 48)): force_unicode(self.client.get('/en/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')).content) with self.assertNumQueries(FuzzyInt(12, 30)): force_unicode(self.client.get('/en/').content) def test_tree_view_queries(self): from django.core.cache import cache cache.clear() for i in range(10): create_page('Test%s' % i, 'col_two.html', 'en', published=True) for placeholder in Placeholder.objects.all(): add_plugin(placeholder, TextPlugin, 'en', body='<b>Test</b>') user = self.get_superuser() with self.login_user_context(user): with self.assertNumQueries(FuzzyInt(18, 33)): force_unicode(self.client.get('/en/admin/cms/page/')) def test_smart_link_published_pages(self): admin, staff_guy = self._get_guys() page_url = '/en/admin/cms/page/published-pages/' # Not sure how to achieve this with reverse... with self.login_user_context(staff_guy): multi_title_page = create_page('main_title', 'col_two.html', 'en', published=True, overwrite_url='overwritten_url', menu_title='menu_title') title = multi_title_page.get_title_obj() title.page_title = 'page_title' title.save() multi_title_page.save() publish_page(multi_title_page, admin, 'en') # Non ajax call should return a 403 as this page shouldn't be accessed by anything else but ajax queries self.assertEqual(403, self.client.get(page_url).status_code) self.assertEqual(200, self.client.get(page_url, HTTP_X_REQUESTED_WITH='XMLHttpRequest').status_code ) # Test that the query param is working as expected. self.assertEqual(1, len(json.loads(self.client.get(page_url, {'q':'main_title'}, HTTP_X_REQUESTED_WITH='XMLHttpRequest').content.decode("utf-8")))) self.assertEqual(1, len(json.loads(self.client.get(page_url, {'q':'menu_title'}, HTTP_X_REQUESTED_WITH='XMLHttpRequest').content.decode("utf-8")))) self.assertEqual(1, len(json.loads(self.client.get(page_url, {'q':'overwritten_url'}, HTTP_X_REQUESTED_WITH='XMLHttpRequest').content.decode("utf-8")))) self.assertEqual(1, len(json.loads(self.client.get(page_url, {'q':'page_title'}, HTTP_X_REQUESTED_WITH='XMLHttpRequest').content.decode("utf-8")))) class AdminPageEditContentSizeTests(AdminTestsBase): """ System user count influences the size of the page edit page, but the users are only 2 times present on the page The test relates to extra=0 at PagePermissionInlineAdminForm and ViewRestrictionInlineAdmin """ def test_editpage_contentsize(self): """ Expected a username only 2 times in the content, but a relationship between usercount and pagesize """ with SettingsOverride(CMS_PERMISSION=True): admin_user = self.get_superuser() PAGE_NAME = 'TestPage' USER_NAME = 'test_size_user_0' current_site = Site.objects.get(pk=1) page = create_page(PAGE_NAME, "nav_playground.html", "en", site=current_site, created_by=admin_user) page.save() self._page = page with self.login_user_context(admin_user): url = base.URL_CMS_PAGE_PERMISSION_CHANGE % self._page.pk response = self.client.get(url) self.assertEqual(response.status_code, 200) old_response_size = len(response.content) old_user_count = get_user_model().objects.count() # create additionals user and reload the page get_user_model().objects.create_user(username=USER_NAME, email=USER_NAME + '@django-cms.org', password=USER_NAME) user_count = get_user_model().objects.count() more_users_in_db = old_user_count < user_count # we have more users self.assertTrue(more_users_in_db, "New users got NOT created") response = self.client.get(url) new_response_size = len(response.content) page_size_grown = old_response_size < new_response_size # expect that the pagesize gets influenced by the useramount of the system self.assertTrue(page_size_grown, "Page size has not grown after user creation") # usernames are only 2 times in content text = smart_str(response.content, response._charset) foundcount = text.count(USER_NAME) # 2 forms contain usernames as options self.assertEqual(foundcount, 2, "Username %s appeared %s times in response.content, expected 2 times" % ( USER_NAME, foundcount))
./CrossVul/dataset_final_sorted/CWE-352/py/bad_1654_4
crossvul-python_data_bad_114_1
404: Not Found
./CrossVul/dataset_final_sorted/CWE-352/py/bad_114_1
crossvul-python_data_bad_114_4
404: Not Found
./CrossVul/dataset_final_sorted/CWE-352/py/bad_114_4
crossvul-python_data_good_114_3
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2014-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org> # Copyright 2015-2018 Antoni Boucher (antoyo) <bouanto@zoho.com> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. # # pylint complains when using .render() on jinja templates, so we make it shut # up for this whole module. """Handler functions for file:... pages.""" import os from qutebrowser.browser.webkit.network import networkreply from qutebrowser.utils import jinja def get_file_list(basedir, all_files, filterfunc): """Get a list of files filtered by a filter function and sorted by name. Args: basedir: The parent directory of all files. all_files: The list of files to filter and sort. filterfunc: The filter function. Return: A list of dicts. Each dict contains the name and absname keys. """ items = [] for filename in all_files: absname = os.path.join(basedir, filename) if filterfunc(absname): items.append({'name': filename, 'absname': absname}) return sorted(items, key=lambda v: v['name'].lower()) def is_root(directory): """Check if the directory is the root directory. Args: directory: The directory to check. Return: Whether the directory is a root directory or not. """ # If you're curious as why this works: # dirname('/') = '/' # dirname('/home') = '/' # dirname('/home/') = '/home' # dirname('/home/foo') = '/home' # basically, for files (no trailing slash) it removes the file part, and # for directories, it removes the trailing slash, so the only way for this # to be equal is if the directory is the root directory. return os.path.dirname(directory) == directory def parent_dir(directory): """Return the parent directory for the given directory. Args: directory: The path to the directory. Return: The path to the parent directory. """ return os.path.normpath(os.path.join(directory, os.pardir)) def dirbrowser_html(path): """Get the directory browser web page. Args: path: The directory path. Return: The HTML of the web page. """ title = "Browse directory: {}".format(path) if is_root(path): parent = None else: parent = parent_dir(path) try: all_files = os.listdir(path) except OSError as e: html = jinja.render('error.html', title="Error while reading directory", url='file:///{}'.format(path), error=str(e)) return html.encode('UTF-8', errors='xmlcharrefreplace') files = get_file_list(path, all_files, os.path.isfile) directories = get_file_list(path, all_files, os.path.isdir) html = jinja.render('dirbrowser.html', title=title, url=path, parent=parent, files=files, directories=directories) return html.encode('UTF-8', errors='xmlcharrefreplace') def handler(request, _operation, _current_url): """Handler for a file:// URL. Args: request: QNetworkRequest to answer to. _operation: The HTTP operation being done. _current_url: The page we're on currently. Return: A QNetworkReply for directories, None for files. """ path = request.url().toLocalFile() try: if os.path.isdir(path): data = dirbrowser_html(path) return networkreply.FixedDataNetworkReply( request, data, 'text/html') return None except UnicodeEncodeError: return None
./CrossVul/dataset_final_sorted/CWE-352/py/good_114_3
crossvul-python_data_good_114_2
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2016-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """QtWebEngine specific qute://* handlers and glue code.""" from PyQt5.QtCore import QBuffer, QIODevice from PyQt5.QtWebEngineCore import (QWebEngineUrlSchemeHandler, QWebEngineUrlRequestJob) from qutebrowser.browser import qutescheme from qutebrowser.utils import log, qtutils class QuteSchemeHandler(QWebEngineUrlSchemeHandler): """Handle qute://* requests on QtWebEngine.""" def install(self, profile): """Install the handler for qute:// URLs on the given profile.""" profile.installUrlSchemeHandler(b'qute', self) if qtutils.version_check('5.11', compiled=False): # WORKAROUND for https://bugreports.qt.io/browse/QTBUG-63378 profile.installUrlSchemeHandler(b'chrome-error', self) profile.installUrlSchemeHandler(b'chrome-extension', self) def requestStarted(self, job): """Handle a request for a qute: scheme. This method must be reimplemented by all custom URL scheme handlers. The request is asynchronous and does not need to be handled right away. Args: job: QWebEngineUrlRequestJob """ url = job.requestUrl() if url.scheme() in ['chrome-error', 'chrome-extension']: # WORKAROUND for https://bugreports.qt.io/browse/QTBUG-63378 job.fail(QWebEngineUrlRequestJob.UrlInvalid) return # Only the browser itself or qute:// pages should access any of those # URLs. # The request interceptor further locks down qute://settings/set. try: initiator = job.initiator() except AttributeError: # Added in Qt 5.11 pass else: if initiator.isValid() and initiator.scheme() != 'qute': log.misc.warning("Blocking malicious request from {} to {}" .format(initiator.toDisplayString(), url.toDisplayString())) job.fail(QWebEngineUrlRequestJob.RequestDenied) return if job.requestMethod() != b'GET': job.fail(QWebEngineUrlRequestJob.RequestDenied) return assert url.scheme() == 'qute' log.misc.debug("Got request for {}".format(url.toDisplayString())) try: mimetype, data = qutescheme.data_for_url(url) except qutescheme.NoHandlerFound: log.misc.debug("No handler found for {}".format( url.toDisplayString())) job.fail(QWebEngineUrlRequestJob.UrlNotFound) except qutescheme.QuteSchemeOSError: # FIXME:qtwebengine how do we show a better error here? log.misc.exception("OSError while handling qute://* URL") job.fail(QWebEngineUrlRequestJob.UrlNotFound) except qutescheme.QuteSchemeError: # FIXME:qtwebengine how do we show a better error here? log.misc.exception("Error while handling qute://* URL") job.fail(QWebEngineUrlRequestJob.RequestFailed) except qutescheme.Redirect as e: qtutils.ensure_valid(e.url) job.redirect(e.url) else: log.misc.debug("Returning {} data".format(mimetype)) # We can't just use the QBuffer constructor taking a QByteArray, # because that somehow segfaults... # https://www.riverbankcomputing.com/pipermail/pyqt/2016-September/038075.html buf = QBuffer(parent=self) buf.open(QIODevice.WriteOnly) buf.write(data) buf.seek(0) buf.close() job.reply(mimetype.encode('ascii'), buf)
./CrossVul/dataset_final_sorted/CWE-352/py/good_114_2
crossvul-python_data_bad_114_2
404: Not Found
./CrossVul/dataset_final_sorted/CWE-352/py/bad_114_2
crossvul-python_data_good_114_0
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2016-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """Backend-independent qute://* code. Module attributes: pyeval_output: The output of the last :pyeval command. _HANDLERS: The handlers registered via decorators. """ import html import json import os import time import textwrap import mimetypes import urllib import collections import base64 try: import secrets except ImportError: # New in Python 3.6 secrets = None import pkg_resources from PyQt5.QtCore import QUrlQuery, QUrl from PyQt5.QtNetwork import QNetworkReply import qutebrowser from qutebrowser.config import config, configdata, configexc, configdiff from qutebrowser.utils import (version, utils, jinja, log, message, docutils, objreg, urlutils) from qutebrowser.misc import objects from qutebrowser.qt import sip pyeval_output = ":pyeval was never called" spawn_output = ":spawn was never called" csrf_token = None _HANDLERS = {} class NoHandlerFound(Exception): """Raised when no handler was found for the given URL.""" pass class QuteSchemeOSError(Exception): """Called when there was an OSError inside a handler.""" pass class QuteSchemeError(Exception): """Exception to signal that a handler should return an ErrorReply. Attributes correspond to the arguments in networkreply.ErrorNetworkReply. Attributes: errorstring: Error string to print. error: Numerical error value. """ def __init__(self, errorstring, error): self.errorstring = errorstring self.error = error super().__init__(errorstring) class Redirect(Exception): """Exception to signal a redirect should happen. Attributes: url: The URL to redirect to, as a QUrl. """ def __init__(self, url): super().__init__(url.toDisplayString()) self.url = url class add_handler: # noqa: N801,N806 pylint: disable=invalid-name """Decorator to register a qute://* URL handler. Attributes: _name: The 'foo' part of qute://foo backend: Limit which backends the handler can run with. """ def __init__(self, name, backend=None): self._name = name self._backend = backend self._function = None def __call__(self, function): self._function = function _HANDLERS[self._name] = self.wrapper return function def wrapper(self, *args, **kwargs): """Call the underlying function.""" if self._backend is not None and objects.backend != self._backend: return self.wrong_backend_handler(*args, **kwargs) else: return self._function(*args, **kwargs) def wrong_backend_handler(self, url): """Show an error page about using the invalid backend.""" src = jinja.render('error.html', title="Error while opening qute://url", url=url.toDisplayString(), error='{} is not available with this ' 'backend'.format(url.toDisplayString())) return 'text/html', src def data_for_url(url): """Get the data to show for the given URL. Args: url: The QUrl to show. Return: A (mimetype, data) tuple. """ norm_url = url.adjusted(QUrl.NormalizePathSegments | QUrl.StripTrailingSlash) if norm_url != url: raise Redirect(norm_url) path = url.path() host = url.host() query = urlutils.query_string(url) # A url like "qute:foo" is split as "scheme:path", not "scheme:host". log.misc.debug("url: {}, path: {}, host {}".format( url.toDisplayString(), path, host)) if not path or not host: new_url = QUrl() new_url.setScheme('qute') # When path is absent, e.g. qute://help (with no trailing slash) if host: new_url.setHost(host) # When host is absent, e.g. qute:help else: new_url.setHost(path) new_url.setPath('/') if query: new_url.setQuery(query) if new_url.host(): # path was a valid host raise Redirect(new_url) try: handler = _HANDLERS[host] except KeyError: raise NoHandlerFound(url) try: mimetype, data = handler(url) except OSError as e: # FIXME:qtwebengine how to handle this? raise QuteSchemeOSError(e) except QuteSchemeError: raise assert mimetype is not None, url if mimetype == 'text/html' and isinstance(data, str): # We let handlers return HTML as text data = data.encode('utf-8', errors='xmlcharrefreplace') return mimetype, data @add_handler('bookmarks') def qute_bookmarks(_url): """Handler for qute://bookmarks. Display all quickmarks / bookmarks.""" bookmarks = sorted(objreg.get('bookmark-manager').marks.items(), key=lambda x: x[1]) # Sort by title quickmarks = sorted(objreg.get('quickmark-manager').marks.items(), key=lambda x: x[0]) # Sort by name src = jinja.render('bookmarks.html', title='Bookmarks', bookmarks=bookmarks, quickmarks=quickmarks) return 'text/html', src @add_handler('tabs') def qute_tabs(_url): """Handler for qute://tabs. Display information about all open tabs.""" tabs = collections.defaultdict(list) for win_id, window in objreg.window_registry.items(): if sip.isdeleted(window): continue tabbed_browser = objreg.get('tabbed-browser', scope='window', window=win_id) for tab in tabbed_browser.widgets(): if tab.url() not in [QUrl("qute://tabs/"), QUrl("qute://tabs")]: urlstr = tab.url().toDisplayString() tabs[str(win_id)].append((tab.title(), urlstr)) src = jinja.render('tabs.html', title='Tabs', tab_list_by_window=tabs) return 'text/html', src def history_data(start_time, offset=None): """Return history data. Arguments: start_time: select history starting from this timestamp. offset: number of items to skip """ # history atimes are stored as ints, ensure start_time is not a float start_time = int(start_time) hist = objreg.get('web-history') if offset is not None: entries = hist.entries_before(start_time, limit=1000, offset=offset) else: # end is 24hrs earlier than start end_time = start_time - 24*60*60 entries = hist.entries_between(end_time, start_time) return [{"url": e.url, "title": html.escape(e.title) or html.escape(e.url), "time": e.atime} for e in entries] @add_handler('history') def qute_history(url): """Handler for qute://history. Display and serve history.""" if url.path() == '/data': try: offset = QUrlQuery(url).queryItemValue("offset") offset = int(offset) if offset else None except ValueError as e: raise QuteSchemeError("Query parameter offset is invalid", e) # Use start_time in query or current time. try: start_time = QUrlQuery(url).queryItemValue("start_time") start_time = float(start_time) if start_time else time.time() except ValueError as e: raise QuteSchemeError("Query parameter start_time is invalid", e) return 'text/html', json.dumps(history_data(start_time, offset)) else: return 'text/html', jinja.render( 'history.html', title='History', gap_interval=config.val.history_gap_interval ) @add_handler('javascript') def qute_javascript(url): """Handler for qute://javascript. Return content of file given as query parameter. """ path = url.path() if path: path = "javascript" + os.sep.join(path.split('/')) return 'text/html', utils.read_file(path, binary=False) else: raise QuteSchemeError("No file specified", ValueError()) @add_handler('pyeval') def qute_pyeval(_url): """Handler for qute://pyeval.""" src = jinja.render('pre.html', title='pyeval', content=pyeval_output) return 'text/html', src @add_handler('spawn-output') def qute_spawn_output(_url): """Handler for qute://spawn-output.""" src = jinja.render('pre.html', title='spawn output', content=spawn_output) return 'text/html', src @add_handler('version') @add_handler('verizon') def qute_version(_url): """Handler for qute://version.""" src = jinja.render('version.html', title='Version info', version=version.version(), copyright=qutebrowser.__copyright__) return 'text/html', src @add_handler('plainlog') def qute_plainlog(url): """Handler for qute://plainlog. An optional query parameter specifies the minimum log level to print. For example, qute://log?level=warning prints warnings and errors. Level can be one of: vdebug, debug, info, warning, error, critical. """ if log.ram_handler is None: text = "Log output was disabled." else: level = QUrlQuery(url).queryItemValue('level') if not level: level = 'vdebug' text = log.ram_handler.dump_log(html=False, level=level) src = jinja.render('pre.html', title='log', content=text) return 'text/html', src @add_handler('log') def qute_log(url): """Handler for qute://log. An optional query parameter specifies the minimum log level to print. For example, qute://log?level=warning prints warnings and errors. Level can be one of: vdebug, debug, info, warning, error, critical. """ if log.ram_handler is None: html_log = None else: level = QUrlQuery(url).queryItemValue('level') if not level: level = 'vdebug' html_log = log.ram_handler.dump_log(html=True, level=level) src = jinja.render('log.html', title='log', content=html_log) return 'text/html', src @add_handler('gpl') def qute_gpl(_url): """Handler for qute://gpl. Return HTML content as string.""" return 'text/html', utils.read_file('html/license.html') @add_handler('help') def qute_help(url): """Handler for qute://help.""" urlpath = url.path() if not urlpath or urlpath == '/': urlpath = 'index.html' else: urlpath = urlpath.lstrip('/') if not docutils.docs_up_to_date(urlpath): message.error("Your documentation is outdated! Please re-run " "scripts/asciidoc2html.py.") path = 'html/doc/{}'.format(urlpath) if not urlpath.endswith('.html'): try: bdata = utils.read_file(path, binary=True) except OSError as e: raise QuteSchemeOSError(e) mimetype, _encoding = mimetypes.guess_type(urlpath) assert mimetype is not None, url return mimetype, bdata try: data = utils.read_file(path) except OSError: # No .html around, let's see if we find the asciidoc asciidoc_path = path.replace('.html', '.asciidoc') if asciidoc_path.startswith('html/doc/'): asciidoc_path = asciidoc_path.replace('html/doc/', '../doc/help/') try: asciidoc = utils.read_file(asciidoc_path) except OSError: asciidoc = None if asciidoc is None: raise preamble = textwrap.dedent(""" There was an error loading the documentation! This most likely means the documentation was not generated properly. If you are running qutebrowser from the git repository, please (re)run scripts/asciidoc2html.py and reload this page. If you're running a released version this is a bug, please use :report to report it. Falling back to the plaintext version. --------------------------------------------------------------- """) return 'text/plain', (preamble + asciidoc).encode('utf-8') else: return 'text/html', data @add_handler('backend-warning') def qute_backend_warning(_url): """Handler for qute://backend-warning.""" src = jinja.render('backend-warning.html', distribution=version.distribution(), Distribution=version.Distribution, version=pkg_resources.parse_version, title="Legacy backend warning") return 'text/html', src def _qute_settings_set(url): """Handler for qute://settings/set.""" query = QUrlQuery(url) option = query.queryItemValue('option', QUrl.FullyDecoded) value = query.queryItemValue('value', QUrl.FullyDecoded) # https://github.com/qutebrowser/qutebrowser/issues/727 if option == 'content.javascript.enabled' and value == 'false': msg = ("Refusing to disable javascript via qute://settings " "as it needs javascript support.") message.error(msg) return 'text/html', b'error: ' + msg.encode('utf-8') try: config.instance.set_str(option, value, save_yaml=True) return 'text/html', b'ok' except configexc.Error as e: message.error(str(e)) return 'text/html', b'error: ' + str(e).encode('utf-8') @add_handler('settings') def qute_settings(url): """Handler for qute://settings. View/change qute configuration.""" global csrf_token if url.path() == '/set': if url.password() != csrf_token: message.error("Invalid CSRF token for qute://settings!") raise QuteSchemeError("Invalid CSRF token!", QNetworkReply.ContentAccessDenied) return _qute_settings_set(url) # Requests to qute://settings/set should only be allowed from # qute://settings. As an additional security precaution, we generate a CSRF # token to use here. if secrets: csrf_token = secrets.token_urlsafe() else: # On Python < 3.6, from secrets.py token = base64.urlsafe_b64encode(os.urandom(32)) csrf_token = token.rstrip(b'=').decode('ascii') src = jinja.render('settings.html', title='settings', configdata=configdata, confget=config.instance.get_str, csrf_token=csrf_token) return 'text/html', src @add_handler('bindings') def qute_bindings(_url): """Handler for qute://bindings. View keybindings.""" bindings = {} defaults = config.val.bindings.default modes = set(defaults.keys()).union(config.val.bindings.commands) modes.remove('normal') modes = ['normal'] + sorted(list(modes)) for mode in modes: bindings[mode] = config.key_instance.get_bindings_for(mode) src = jinja.render('bindings.html', title='Bindings', bindings=bindings) return 'text/html', src @add_handler('back') def qute_back(url): """Handler for qute://back. Simple page to free ram / lazy load a site, goes back on focusing the tab. """ src = jinja.render( 'back.html', title='Suspended: ' + urllib.parse.unquote(url.fragment())) return 'text/html', src @add_handler('configdiff') def qute_configdiff(url): """Handler for qute://configdiff.""" if url.path() == '/old': try: return 'text/html', configdiff.get_diff() except OSError as e: error = (b'Failed to read old config: ' + str(e.strerror).encode('utf-8')) return 'text/plain', error else: data = config.instance.dump_userconfig().encode('utf-8') return 'text/plain', data @add_handler('pastebin-version') def qute_pastebin_version(_url): """Handler that pastebins the version string.""" version.pastebin_version() return 'text/plain', b'Paste called.'
./CrossVul/dataset_final_sorted/CWE-352/py/good_114_0
crossvul-python_data_bad_1654_0
# -*- coding: utf-8 -*- import copy from functools import wraps import json import sys import django from django.contrib.admin.helpers import AdminForm from django.conf import settings from django.contrib import admin, messages from django.contrib.admin.models import LogEntry, CHANGE from django.contrib.admin.options import IncorrectLookupParameters from django.contrib.admin.util import get_deleted_objects from django.contrib.contenttypes.models import ContentType from django.contrib.sites.models import Site, get_current_site from django.core.exceptions import PermissionDenied, ObjectDoesNotExist, ValidationError from django.db import router from django.db.models import Q from django.http import HttpResponseRedirect, HttpResponse, Http404, HttpResponseBadRequest, HttpResponseForbidden from django.shortcuts import render_to_response, get_object_or_404 from django.template.context import RequestContext from django.template.defaultfilters import escape from django.utils.translation import ugettext_lazy as _, get_language from django.utils.decorators import method_decorator from django.views.decorators.http import require_POST from cms.admin.change_list import CMSChangeList from cms.admin.dialog.views import get_copy_dialog from cms.admin.forms import (PageForm, AdvancedSettingsForm, PagePermissionForm, PublicationDatesForm) from cms.admin.permissionadmin import (PERMISSION_ADMIN_INLINES, PagePermissionInlineAdmin, ViewRestrictionInlineAdmin) from cms.admin.placeholderadmin import PlaceholderAdminMixin from cms.admin.views import revert_plugins from cms.constants import PAGE_TYPES_ID, PUBLISHER_STATE_PENDING from cms.models import Page, Title, CMSPlugin, PagePermission, GlobalPagePermission, StaticPlaceholder from cms.models.managers import PagePermissionsPermissionManager from cms.plugin_pool import plugin_pool from cms.toolbar_pool import toolbar_pool from cms.utils import helpers, permissions, get_language_from_request, admin as admin_utils, copy_plugins from cms.utils.i18n import get_language_list, get_language_tuple, get_language_object, force_language from cms.utils.admin import jsonify_request from cms.utils.compat import DJANGO_1_4 from cms.utils.compat.dj import force_unicode, is_installed from cms.utils.compat.urls import unquote from cms.utils.conf import get_cms_setting from cms.utils.helpers import find_placeholder_relation from cms.utils.permissions import has_global_page_permission, has_generic_permission from cms.utils.plugins import current_site from cms.utils.transaction import wrap_transaction from cms.utils.urlutils import add_url_parameters, admin_reverse require_POST = method_decorator(require_POST) if is_installed('reversion'): from reversion.admin import VersionAdmin as ModelAdmin from reversion import create_revision else: # pragma: no cover from django.contrib.admin import ModelAdmin class ReversionContext(object): def __enter__(self): yield def __exit__(self, exc_type, exc_val, exc_tb): pass def __call__(self, func): """Allows this revision context to be used as a decorator.""" @wraps(func) def do_revision_context(*args, **kwargs): self.__enter__() exception = False try: try: return func(*args, **kwargs) except: exception = True if not self.__exit__(*sys.exc_info()): raise finally: if not exception: self.__exit__(None, None, None) return do_revision_context def create_revision(): return ReversionContext() PUBLISH_COMMENT = "Publish" INITIAL_COMMENT = "Initial version." class PageAdmin(PlaceholderAdminMixin, ModelAdmin): form = PageForm search_fields = ('=id', 'title_set__slug', 'title_set__title', 'reverse_id') revision_form_template = "admin/cms/page/history/revision_header.html" recover_form_template = "admin/cms/page/history/recover_header.html" add_general_fields = ['title', 'slug', 'language', 'template'] change_list_template = "admin/cms/page/tree/base.html" list_filter = ['in_navigation', 'template', 'changed_by', 'soft_root'] title_frontend_editable_fields = ['title', 'menu_title', 'page_title'] inlines = PERMISSION_ADMIN_INLINES def get_urls(self): """Get the admin urls """ from django.conf.urls import patterns, url info = "%s_%s" % (self.model._meta.app_label, self.model._meta.module_name) pat = lambda regex, fn: url(regex, self.admin_site.admin_view(fn), name='%s_%s' % (info, fn.__name__)) url_patterns = patterns( '', pat(r'^([0-9]+)/advanced-settings/$', self.advanced), pat(r'^([0-9]+)/dates/$', self.dates), pat(r'^([0-9]+)/permission-settings/$', self.permissions), pat(r'^([0-9]+)/delete-translation/$', self.delete_translation), pat(r'^([0-9]+)/move-page/$', self.move_page), pat(r'^([0-9]+)/copy-page/$', self.copy_page), pat(r'^([0-9]+)/copy-language/$', self.copy_language), pat(r'^([0-9]+)/dialog/copy/$', get_copy_dialog), # copy dialog pat(r'^([0-9]+)/change-navigation/$', self.change_innavigation), pat(r'^([0-9]+)/jsi18n/$', self.redirect_jsi18n), pat(r'^([0-9]+)/permissions/$', self.get_permissions), pat(r'^([0-9]+)/undo/$', self.undo), pat(r'^([0-9]+)/redo/$', self.redo), pat(r'^([0-9]+)/change_template/$', self.change_template), pat(r'^([0-9]+)/([a-z\-]+)/descendants/$', self.descendants), # menu html for page descendants pat(r'^([0-9]+)/([a-z\-]+)/edit-field/$', self.edit_title_fields), pat(r'^([0-9]+)/([a-z\-]+)/publish/$', self.publish_page), pat(r'^([0-9]+)/([a-z\-]+)/unpublish/$', self.unpublish), pat(r'^([0-9]+)/([a-z\-]+)/revert/$', self.revert_page), pat(r'^([0-9]+)/([a-z\-]+)/preview/$', self.preview_page), pat(r'^add-page-type/$', self.add_page_type), pat(r'^published-pages/$', self.get_published_pagelist), url(r'^resolve/$', self.resolve, name="cms_page_resolve"), ) if plugin_pool.get_all_plugins(): url_patterns += plugin_pool.get_patterns() url_patterns += super(PageAdmin, self).get_urls() return url_patterns def redirect_jsi18n(self, request): return HttpResponseRedirect(admin_reverse('jsi18n')) def get_revision_instances(self, request, object): """Returns all the instances to be used in the object's revision.""" if isinstance(object, Title): object = object.page if isinstance(object, Page) and not object.publisher_is_draft: object = object.publisher_public placeholder_relation = find_placeholder_relation(object) data = [object] filters = {'placeholder__%s' % placeholder_relation: object} for plugin in CMSPlugin.objects.filter(**filters): data.append(plugin) plugin_instance, admin = plugin.get_plugin_instance() if plugin_instance: data.append(plugin_instance) if isinstance(object, Page): titles = object.title_set.all() for title in titles: title.publisher_public = None data.append(title) return data def save_model(self, request, obj, form, change): """ Move the page in the tree if necessary and save every placeholder Content object. """ target = request.GET.get('target', None) position = request.GET.get('position', None) if 'recover' in request.path_info: pk = obj.pk if obj.parent_id: parent = Page.objects.get(pk=obj.parent_id) else: parent = None obj.lft = 0 obj.rght = 0 obj.tree_id = 0 obj.level = 0 obj.pk = None obj.insert_at(parent, save=False) obj.pk = pk obj.save(no_signals=True) else: if 'history' in request.path_info: old_obj = Page.objects.get(pk=obj.pk) obj.level = old_obj.level obj.parent_id = old_obj.parent_id obj.rght = old_obj.rght obj.lft = old_obj.lft obj.tree_id = old_obj.tree_id new = False if not obj.pk: new = True obj.save() if 'recover' in request.path_info or 'history' in request.path_info: revert_plugins(request, obj.version.pk, obj) if target is not None and position is not None: try: target = self.model.objects.get(pk=target) except self.model.DoesNotExist: pass else: obj.move_to(target, position) page_type_id = form.cleaned_data.get('page_type') copy_target_id = request.GET.get('copy_target') if copy_target_id or page_type_id: if page_type_id: copy_target_id = page_type_id copy_target = Page.objects.get(pk=copy_target_id) if not copy_target.has_view_permission(request): raise PermissionDenied() obj = Page.objects.get(pk=obj.pk) #mptt reload copy_target._copy_attributes(obj, clean=True) obj.save() for lang in copy_target.languages.split(','): copy_target._copy_contents(obj, lang) if not 'permission' in request.path_info: language = form.cleaned_data['language'] Title.objects.set_or_create( request, obj, form, language, ) # is it home? publish it right away if new and Page.objects.filter(site_id=obj.site_id).count() == 1: obj.publish(language) def get_fieldsets(self, request, obj=None): form = self.get_form(request, obj, fields=None) if getattr(form, 'fieldsets', None) is None: fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj)) return [(None, {'fields': fields})] else: return form.fieldsets def get_inline_classes(self, request, obj=None, **kwargs): if obj and 'permission' in request.path_info: return PERMISSION_ADMIN_INLINES return [] def get_form_class(self, request, obj=None, **kwargs): if 'advanced' in request.path_info: return AdvancedSettingsForm elif 'permission' in request.path_info: return PagePermissionForm elif 'dates' in request.path_info: return PublicationDatesForm return self.form def get_form(self, request, obj=None, **kwargs): """ Get PageForm for the Page model and modify its fields depending on the request. """ language = get_language_from_request(request, obj) form_cls = self.get_form_class(request, obj) form = super(PageAdmin, self).get_form(request, obj, form=form_cls, **kwargs) # get_form method operates by overriding initial fields value which # may persist across invocation. Code below deepcopies fields definition # to avoid leaks for field in form.base_fields.keys(): form.base_fields[field] = copy.deepcopy(form.base_fields[field]) if 'language' in form.base_fields: form.base_fields['language'].initial = language if 'page_type' in form.base_fields: if 'copy_target' in request.GET or 'add_page_type' in request.GET or obj: del form.base_fields['page_type'] elif not Title.objects.filter(page__parent__reverse_id=PAGE_TYPES_ID, language=language).exists(): del form.base_fields['page_type'] if 'add_page_type' in request.GET: del form.base_fields['menu_title'] del form.base_fields['meta_description'] del form.base_fields['page_title'] self.inlines = self.get_inline_classes(request, obj, **kwargs) if obj: if 'history' in request.path_info or 'recover' in request.path_info: version_id = request.path_info.split('/')[-2] else: version_id = None title_obj = obj.get_title_obj(language=language, fallback=False, version_id=version_id, force_reload=True) if 'site' in form.base_fields and form.base_fields['site'].initial is None: form.base_fields['site'].initial = obj.site for name in ('slug', 'title', 'meta_description', 'menu_title', 'page_title', 'redirect'): if name in form.base_fields: form.base_fields[name].initial = getattr(title_obj, name) if 'overwrite_url' in form.base_fields: if title_obj.has_url_overwrite: form.base_fields['overwrite_url'].initial = title_obj.path else: form.base_fields['overwrite_url'].initial = '' else: for name in ('slug', 'title'): form.base_fields[name].initial = u'' if 'target' in request.GET or 'copy_target' in request.GET: target = request.GET.get('copy_target') or request.GET.get('target') if 'position' in request.GET: position = request.GET['position'] if position == 'last-child' or position == 'first-child': form.base_fields['parent'].initial = request.GET.get('target', None) else: sibling = Page.objects.get(pk=target) form.base_fields['parent'].initial = sibling.parent_id else: form.base_fields['parent'].initial = request.GET.get('target', None) form.base_fields['site'].initial = request.session.get('cms_admin_site', None) return form def advanced(self, request, object_id): page = get_object_or_404(Page, pk=object_id) if not page.has_advanced_settings_permission(request): raise PermissionDenied("No permission for editing advanced settings") return self.change_view(request, object_id, extra_context={'advanced_settings': True, 'title': _("Advanced Settings")}) def dates(self, request, object_id): return self.change_view(request, object_id, extra_context={'publishing_dates': True, 'title': _("Publishing dates")}) def permissions(self, request, object_id): page = get_object_or_404(Page, pk=object_id) if not page.has_change_permissions_permission(request): raise PermissionDenied("No permission for editing advanced settings") return self.change_view(request, object_id, extra_context={'show_permissions': True, 'title': _("Change Permissions")}) def get_inline_instances(self, request, obj=None): if DJANGO_1_4: inlines = super(PageAdmin, self).get_inline_instances(request) if hasattr(self, '_current_page'): obj = self._current_page else: inlines = super(PageAdmin, self).get_inline_instances(request, obj) if get_cms_setting('PERMISSION') and obj: filtered_inlines = [] for inline in inlines: if (isinstance(inline, PagePermissionInlineAdmin) and not isinstance(inline, ViewRestrictionInlineAdmin)): if "recover" in request.path or "history" in request.path: # do not display permissions in recover mode continue if not obj.has_change_permissions_permission(request): continue filtered_inlines.append(inline) inlines = filtered_inlines return inlines def get_unihandecode_context(self, language): if language[:2] in get_cms_setting('UNIHANDECODE_DECODERS'): uhd_lang = language[:2] else: uhd_lang = get_cms_setting('UNIHANDECODE_DEFAULT_DECODER') uhd_host = get_cms_setting('UNIHANDECODE_HOST') uhd_version = get_cms_setting('UNIHANDECODE_VERSION') if uhd_lang and uhd_host and uhd_version: uhd_urls = [ '%sunihandecode-%s.core.min.js' % (uhd_host, uhd_version), '%sunihandecode-%s.%s.min.js' % (uhd_host, uhd_version, uhd_lang), ] else: uhd_urls = [] return {'unihandecode_lang': uhd_lang, 'unihandecode_urls': uhd_urls} def add_view(self, request, form_url='', extra_context=None): extra_context = extra_context or {} language = get_language_from_request(request) extra_context.update({ 'language': language, }) if not request.GET.get('add_page_type') is None: extra_context.update({ 'add_page_type': True, 'title': _("Add Page Type"), }) elif 'copy_target' in request.GET: extra_context.update({ 'title': _("Add Page Copy"), }) else: extra_context = self.update_language_tab_context(request, context=extra_context) extra_context.update(self.get_unihandecode_context(language)) return super(PageAdmin, self).add_view(request, form_url, extra_context=extra_context) def change_view(self, request, object_id, form_url='', extra_context=None): """ The 'change' admin view for the Page model. """ if extra_context is None: extra_context = {'basic_info': True} try: obj = self.model.objects.get(pk=object_id) except self.model.DoesNotExist: # Don't raise Http404 just yet, because we haven't checked # permissions yet. We don't want an unauthenticated user to be able # to determine whether a given object exists. obj = None else: #activate(user_lang_set) context = { 'page': obj, 'CMS_PERMISSION': get_cms_setting('PERMISSION'), 'ADMIN_MEDIA_URL': settings.STATIC_URL, 'can_change': obj.has_change_permission(request), 'can_change_permissions': obj.has_change_permissions_permission(request), 'current_site_id': settings.SITE_ID, } context.update(extra_context or {}) extra_context = self.update_language_tab_context(request, obj, context) tab_language = get_language_from_request(request) extra_context.update(self.get_unihandecode_context(tab_language)) # get_inline_instances will need access to 'obj' so that it can # determine if current user has enough rights to see PagePermissionInlineAdmin # because in django versions <1.5 get_inline_instances doesn't receive 'obj' # as a parameter, the workaround is to set it as an attribute... if DJANGO_1_4: self._current_page = obj response = super(PageAdmin, self).change_view( request, object_id, form_url=form_url, extra_context=extra_context) if tab_language and response.status_code == 302 and response._headers['location'][1] == request.path_info: location = response._headers['location'] response._headers['location'] = (location[0], "%s?language=%s" % (location[1], tab_language)) return response def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None): # add context variables filled_languages = [] if obj: filled_languages = [t[0] for t in obj.title_set.filter(title__isnull=False).values_list('language')] allowed_languages = [lang[0] for lang in self._get_site_languages(obj)] context.update({ 'filled_languages': [lang for lang in filled_languages if lang in allowed_languages], }) return super(PageAdmin, self).render_change_form(request, context, add, change, form_url, obj) def _get_site_languages(self, obj=None): site_id = None if obj: site_id = obj.site_id else: site_id = Site.objects.get_current().pk return get_language_tuple(site_id) def update_language_tab_context(self, request, obj=None, context=None): if not context: context = {} language = get_language_from_request(request, obj) languages = self._get_site_languages(obj) context.update({ 'language': language, 'language_tabs': languages, # Dates are not language dependent, thus we hide the language # selection bar: the language is forced through the form class 'show_language_tabs': len(list(languages)) > 1 and not context.get('publishing_dates', False), }) return context def response_change(self, request, obj): """Called always when page gets changed, call save on page, there may be some new stuff, which should be published after all other objects on page are collected. """ # save the object again, so all the related changes to page model # can be published if required obj.save() return super(PageAdmin, self).response_change(request, obj) def has_add_permission(self, request): """ Return true if the current user has permission to add a new page. """ if get_cms_setting('PERMISSION'): return permissions.has_page_add_permission(request) return super(PageAdmin, self).has_add_permission(request) def has_change_permission(self, request, obj=None): """ Return true if the current user has permission on the page. Return the string 'All' if the user has all rights. """ if get_cms_setting('PERMISSION'): if obj: return obj.has_change_permission(request) else: return permissions.has_page_change_permission(request) return super(PageAdmin, self).has_change_permission(request, obj) def has_delete_permission(self, request, obj=None): """ Returns True if the given request has permission to change the given Django model instance. If CMS_PERMISSION are in use also takes look to object permissions. """ if get_cms_setting('PERMISSION') and obj is not None: return obj.has_delete_permission(request) return super(PageAdmin, self).has_delete_permission(request, obj) def has_recover_permission(self, request): """ Returns True if the use has the right to recover pages """ if not is_installed('reversion'): return False user = request.user if user.is_superuser: return True try: if has_global_page_permission(request, can_recover_page=True): return True except: pass return False def has_add_plugin_permission(self, request, placeholder, plugin_type): if not permissions.has_plugin_permission(request.user, plugin_type, "add"): return False page = placeholder.page if page and not page.has_change_permission(request): return False if page and not page.publisher_is_draft: return False return True def has_copy_plugin_permission(self, request, source_placeholder, target_placeholder, plugins): source_page = source_placeholder.page if source_page and not source_page.has_change_permission(request): return False target_page = target_placeholder.page if target_page and not target_page.has_change_permission(request): return False if target_page and not target_page.publisher_is_draft: return False for plugin in plugins: if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "add"): return False return True def has_change_plugin_permission(self, request, plugin): page = plugin.placeholder.page if plugin.placeholder else None if page and not page.has_change_permission(request): return False if page and not page.publisher_is_draft: return False if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "change"): return False return True def has_move_plugin_permission(self, request, plugin, target_placeholder): if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "change"): return False page = plugin.placeholder.page if page and not page.has_change_permission(request): return False if page and not page.publisher_is_draft: return False return True def has_delete_plugin_permission(self, request, plugin): if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "delete"): return False page = plugin.placeholder.page if page: if not page.publisher_is_draft: return False if not page.has_change_permission(request): return False return True def has_clear_placeholder_permission(self, request, placeholder): page = placeholder.page if placeholder else None if page: if not page.publisher_is_draft: return False if not page.has_change_permission(request): return False return True def post_add_plugin(self, request, placeholder, plugin): if is_installed('reversion') and placeholder.page: plugin_name = force_unicode(plugin_pool.get_plugin(plugin.plugin_type).name) message = _(u"%(plugin_name)s plugin added to %(placeholder)s") % { 'plugin_name': plugin_name, 'placeholder': placeholder} self.cleanup_history(placeholder.page) helpers.make_revision_with_plugins(placeholder.page, request.user, message) def post_copy_plugins(self, request, source_placeholder, target_placeholder, plugins): page = target_placeholder.page if page and is_installed('reversion'): message = _(u"Copied plugins to %(placeholder)s") % {'placeholder': target_placeholder} self.cleanup_history(page) helpers.make_revision_with_plugins(page, request.user, message) def post_edit_plugin(self, request, plugin): page = plugin.placeholder.page if page: # if reversion is installed, save version of the page plugins if is_installed('reversion') and page: plugin_name = force_unicode(plugin_pool.get_plugin(plugin.plugin_type).name) message = _( u"%(plugin_name)s plugin edited at position %(position)s in %(placeholder)s") % { 'plugin_name': plugin_name, 'position': plugin.position, 'placeholder': plugin.placeholder.slot } self.cleanup_history(page) helpers.make_revision_with_plugins(page, request.user, message) def post_move_plugin(self, request, source_placeholder, target_placeholder, plugin): page = target_placeholder.page if page and is_installed('reversion'): self.cleanup_history(page) helpers.make_revision_with_plugins(page, request.user, _(u"Plugins were moved")) def post_delete_plugin(self, request, plugin): plugin_name = force_unicode(plugin_pool.get_plugin(plugin.plugin_type).name) page = plugin.placeholder.page if page: page.save() comment = _("%(plugin_name)s plugin at position %(position)s in %(placeholder)s was deleted.") % { 'plugin_name': plugin_name, 'position': plugin.position, 'placeholder': plugin.placeholder, } if is_installed('reversion'): self.cleanup_history(page) helpers.make_revision_with_plugins(page, request.user, comment) def post_clear_placeholder(self, request, placeholder): page = placeholder.page if page: page.save() comment = _('All plugins in the placeholder "%(name)s" were deleted.') % { 'name': force_unicode(placeholder) } if is_installed('reversion'): self.cleanup_history(page) helpers.make_revision_with_plugins(page, request.user, comment) def get_placeholder_template(self, request, placeholder): page = placeholder.page if page: return page.get_template() def changelist_view(self, request, extra_context=None): "The 'change list' admin view for this model." from django.contrib.admin.views.main import ERROR_FLAG opts = self.model._meta app_label = opts.app_label if not self.has_change_permission(request, None): return HttpResponseForbidden(force_unicode(_("You do not have permission to change pages."))) try: cl = CMSChangeList(request, self.model, self.list_display, self.list_display_links, self.list_filter, self.date_hierarchy, self.search_fields, self.list_select_related, self.list_per_page, self.list_max_show_all, self.list_editable, self) except IncorrectLookupParameters: # Wacky lookup parameters were given, so redirect to the main # changelist page, without parameters, and pass an 'invalid=1' # parameter via the query string. If wacky parameters were given and # the 'invalid=1' parameter was already in the query string, something # is screwed up with the database, so display an error page. if ERROR_FLAG in request.GET.keys(): return render_to_response('admin/invalid_setup.html', {'title': _('Database error')}) return HttpResponseRedirect(request.path_info + '?' + ERROR_FLAG + '=1') cl.set_items(request) site_id = request.GET.get('site__exact', None) if site_id is None: site_id = current_site(request).pk site_id = int(site_id) # languages languages = get_language_list(site_id) # parse the cookie that saves which page trees have # been opened already and extracts the page ID djangocms_nodes_open = request.COOKIES.get('djangocms_nodes_open', '') raw_nodes = unquote(djangocms_nodes_open).split(',') try: open_menu_trees = [int(c.split('page_', 1)[1]) for c in raw_nodes] except IndexError: open_menu_trees = [] # Language may be present in the GET dictionary but empty language = request.GET.get('language', get_language()) if not language: language = get_language() context = { 'title': cl.title, 'is_popup': cl.is_popup, 'cl': cl, 'opts': opts, 'has_add_permission': self.has_add_permission(request), 'root_path': admin_reverse('index'), 'app_label': app_label, 'preview_language': language, 'CMS_MEDIA_URL': get_cms_setting('MEDIA_URL'), 'CMS_PERMISSION': get_cms_setting('PERMISSION'), 'DEBUG': settings.DEBUG, 'site_languages': languages, 'open_menu_trees': open_menu_trees, } if is_installed('reversion'): context['has_recover_permission'] = self.has_recover_permission(request) context['has_change_permission'] = self.has_change_permission(request) context.update(extra_context or {}) return render_to_response(self.change_list_template or [ 'admin/%s/%s/change_list.html' % (app_label, opts.object_name.lower()), 'admin/%s/change_list.html' % app_label, 'admin/change_list.html' ], context, context_instance=RequestContext(request)) def recoverlist_view(self, request, extra_context=None): if not self.has_recover_permission(request): raise PermissionDenied return super(PageAdmin, self).recoverlist_view(request, extra_context) def recover_view(self, request, version_id, extra_context=None): if not self.has_recover_permission(request): raise PermissionDenied extra_context = self.update_language_tab_context(request, None, extra_context) return super(PageAdmin, self).recover_view(request, version_id, extra_context) def revision_view(self, request, object_id, version_id, extra_context=None): if not self.has_change_permission(request, Page.objects.get(pk=object_id)): raise PermissionDenied extra_context = self.update_language_tab_context(request, None, extra_context) response = super(PageAdmin, self).revision_view(request, object_id, version_id, extra_context) return response def history_view(self, request, object_id, extra_context=None): if not self.has_change_permission(request, Page.objects.get(pk=object_id)): raise PermissionDenied extra_context = self.update_language_tab_context(request, None, extra_context) return super(PageAdmin, self).history_view(request, object_id, extra_context) def render_revision_form(self, request, obj, version, context, revert=False, recover=False): # reset parent to null if parent is not found if version.field_dict['parent']: try: Page.objects.get(pk=version.field_dict['parent']) except: if revert and obj.parent_id != int(version.field_dict['parent']): version.field_dict['parent'] = obj.parent_id if recover: obj.parent = None obj.parent_id = None version.field_dict['parent'] = None obj.version = version return super(PageAdmin, self).render_revision_form(request, obj, version, context, revert, recover) @require_POST def undo(self, request, object_id): if not is_installed('reversion'): return HttpResponseBadRequest('django reversion not installed') from reversion.models import Revision from cms.utils.page_resolver import is_valid_url import reversion page = get_object_or_404(Page, pk=object_id) old_titles = list(page.title_set.all()) if not page.publisher_is_draft: page = page.publisher_draft if not page.has_change_permission(request): return HttpResponseForbidden(force_unicode(_("You do not have permission to change this page"))) versions = reversion.get_for_object(page) if page.revision_id: current_revision = Revision.objects.get(pk=page.revision_id) else: try: current_version = versions[0] except IndexError: return HttpResponseBadRequest("no current revision found") current_revision = current_version.revision try: previous_version = versions.filter(revision__pk__lt=current_revision.pk)[0] except IndexError: return HttpResponseBadRequest("no previous revision found") previous_revision = previous_version.revision # clear all plugins placeholders = page.placeholders.all() placeholder_ids = [] for placeholder in placeholders: placeholder_ids.append(placeholder.pk) plugins = CMSPlugin.objects.filter(placeholder__in=placeholder_ids).order_by('-level') for plugin in plugins: plugin._no_reorder = True plugin.delete() # TODO: delete placeholders instead of finding duplicates for 3.1 #page.placeholders.all().delete() previous_revision.revert(True) rev_page = get_object_or_404(Page, pk=page.pk) rev_page.revision_id = previous_revision.pk rev_page.publisher_public_id = page.publisher_public_id rev_page.save() new_placeholders = rev_page.placeholders.all() slots = {} for new_ph in new_placeholders: if not new_ph.slot in slots: slots[new_ph.slot] = new_ph else: if new_ph in placeholder_ids: new_ph.delete() elif slots[new_ph.slot] in placeholder_ids: slots[new_ph.slot].delete() new_titles = rev_page.title_set.all() for title in new_titles: try: is_valid_url(title.path, rev_page) except ValidationError: for old_title in old_titles: if old_title.language == title.language: title.slug = old_title.slug title.save() messages.error(request, _("Page reverted but slug stays the same because of url collisions.")) return HttpResponse("ok") @require_POST def redo(self, request, object_id): if not is_installed('reversion'): return HttpResponseBadRequest('django reversion not installed') from reversion.models import Revision import reversion from cms.utils.page_resolver import is_valid_url page = get_object_or_404(Page, pk=object_id) old_titles = list(page.title_set.all()) if not page.publisher_is_draft: page = page.publisher_draft if not page.has_change_permission(request): return HttpResponseForbidden(force_unicode(_("You do not have permission to change this page"))) versions = reversion.get_for_object(page) if page.revision_id: current_revision = Revision.objects.get(pk=page.revision_id) else: try: current_version = versions[0] except IndexError: return HttpResponseBadRequest("no current revision found") current_revision = current_version.revision try: previous_version = versions.filter(revision__pk__gt=current_revision.pk).order_by('pk')[0] except IndexError: return HttpResponseBadRequest("no next revision found") next_revision = previous_version.revision # clear all plugins placeholders = page.placeholders.all() placeholder_ids = [] for placeholder in placeholders: placeholder_ids.append(placeholder.pk) plugins = CMSPlugin.objects.filter(placeholder__in=placeholder_ids).order_by('-level') for plugin in plugins: plugin._no_reorder = True plugin.delete() # TODO: 3.1 remove the placeholder matching from below and just delete them #page.placeholders.all().delete() next_revision.revert(True) rev_page = get_object_or_404(Page, pk=page.pk) rev_page.revision_id = next_revision.pk rev_page.publisher_public_id = page.publisher_public_id rev_page.save() new_placeholders = rev_page.placeholders.all() slots = {} for new_ph in new_placeholders: if not new_ph.slot in slots: slots[new_ph.slot] = new_ph else: if new_ph in placeholder_ids: new_ph.delete() elif slots[new_ph.slot] in placeholder_ids: slots[new_ph.slot].delete() new_titles = rev_page.title_set.all() for title in new_titles: try: is_valid_url(title.path, rev_page) except ValidationError: for old_title in old_titles: if old_title.language == title.language: title.slug = old_title.slug title.save() messages.error(request, _("Page reverted but slug stays the same because of url collisions.")) return HttpResponse("ok") @require_POST @create_revision() def change_template(self, request, object_id): page = get_object_or_404(Page, pk=object_id) if not page.has_change_permission(request): return HttpResponseForbidden(force_unicode(_("You do not have permission to change the template"))) to_template = request.POST.get("template", None) if to_template not in dict(get_cms_setting('TEMPLATES')): return HttpResponseBadRequest(force_unicode(_("Template not valid"))) page.template = to_template page.save() if is_installed('reversion'): message = _("Template changed to %s") % dict(get_cms_setting('TEMPLATES'))[to_template] self.cleanup_history(page) helpers.make_revision_with_plugins(page, request.user, message) return HttpResponse(force_unicode(_("The template was successfully changed"))) @wrap_transaction def move_page(self, request, page_id, extra_context=None): """ Move the page to the requested target, at the given position """ target = request.POST.get('target', None) position = request.POST.get('position', None) if target is None or position is None: return HttpResponseRedirect('../../') try: page = self.model.objects.get(pk=page_id) target = self.model.objects.get(pk=target) except self.model.DoesNotExist: return jsonify_request(HttpResponseBadRequest("error")) # does he haves permissions to do this...? if not page.has_move_page_permission(request) or \ not target.has_add_permission(request): return jsonify_request( HttpResponseForbidden(force_unicode(_("Error! You don't have permissions to move this page. Please reload the page")))) # move page page.move_page(target, position) if is_installed('reversion'): self.cleanup_history(page) helpers.make_revision_with_plugins(page, request.user, _("Page moved")) return jsonify_request(HttpResponse(admin_utils.render_admin_menu_item(request, page).content)) def get_permissions(self, request, page_id): page = get_object_or_404(Page, id=page_id) can_change_list = Page.permissions.get_change_id_list(request.user, page.site_id) global_page_permissions = GlobalPagePermission.objects.filter(sites__in=[page.site_id]) page_permissions = PagePermission.objects.for_page(page) all_permissions = list(global_page_permissions) + list(page_permissions) # does he can change global permissions ? has_global = permissions.has_global_change_permissions_permission(request) permission_set = [] for permission in all_permissions: if isinstance(permission, GlobalPagePermission): if has_global: permission_set.append([(True, True), permission]) else: permission_set.append([(True, False), permission]) else: if can_change_list == PagePermissionsPermissionManager.GRANT_ALL: can_change = True else: can_change = permission.page_id in can_change_list permission_set.append([(False, can_change), permission]) context = { 'page': page, 'permission_set': permission_set, } return render_to_response('admin/cms/page/permissions.html', context) @require_POST @wrap_transaction def copy_language(self, request, page_id): with create_revision(): source_language = request.POST.get('source_language') target_language = request.POST.get('target_language') page = Page.objects.get(pk=page_id) placeholders = page.placeholders.all() if not target_language or not target_language in get_language_list(): return HttpResponseBadRequest(force_unicode(_("Language must be set to a supported language!"))) for placeholder in placeholders: plugins = list( placeholder.cmsplugin_set.filter(language=source_language).order_by('tree_id', 'level', 'position')) if not self.has_copy_plugin_permission(request, placeholder, placeholder, plugins): return HttpResponseForbidden(force_unicode(_('You do not have permission to copy these plugins.'))) copy_plugins.copy_plugins_to(plugins, placeholder, target_language) if page and is_installed('reversion'): message = _(u"Copied plugins from %(source_language)s to %(target_language)s") % { 'source_language': source_language, 'target_language': target_language} self.cleanup_history(page) helpers.make_revision_with_plugins(page, request.user, message) return HttpResponse("ok") @wrap_transaction def copy_page(self, request, page_id, extra_context=None): """ Copy the page and all its plugins and descendants to the requested target, at the given position """ context = {} page = Page.objects.get(pk=page_id) target = request.POST.get('target', None) position = request.POST.get('position', None) site = request.POST.get('site', None) if target is not None and position is not None and site is not None: try: target = self.model.objects.get(pk=target) # does he have permissions to copy this page under target? assert target.has_add_permission(request) site = Site.objects.get(pk=site) except (ObjectDoesNotExist, AssertionError): return HttpResponse("error") #context.update({'error': _('Page could not been moved.')}) else: try: kwargs = { 'copy_permissions': request.REQUEST.get('copy_permissions', False), } page.copy_page(target, site, position, **kwargs) return jsonify_request(HttpResponse("ok")) except ValidationError: exc = sys.exc_info()[1] return jsonify_request(HttpResponseBadRequest(exc.messages)) context.update(extra_context or {}) return HttpResponseRedirect('../../') @wrap_transaction @create_revision() def publish_page(self, request, page_id, language): try: page = Page.objects.get(id=page_id, publisher_is_draft=True) except Page.DoesNotExist: page = None # ensure user has permissions to publish this page all_published = True if page: if not page.has_publish_permission(request): return HttpResponseForbidden(force_unicode(_("You do not have permission to publish this page"))) published = page.publish(language) if not published: all_published = False statics = request.GET.get('statics', '') if not statics and not page: return Http404("No page or stack found for publishing.") if statics: static_ids = statics .split(',') for pk in static_ids: static_placeholder = StaticPlaceholder.objects.get(pk=pk) published = static_placeholder.publish(request, language) if not published: all_published = False if page: if all_published: if page.get_publisher_state(language) == PUBLISHER_STATE_PENDING: messages.warning(request, _("Page not published! A parent page is not published yet.")) else: messages.info(request, _('The content was successfully published.')) LogEntry.objects.log_action( user_id=request.user.id, content_type_id=ContentType.objects.get_for_model(Page).pk, object_id=page_id, object_repr=page.get_title(language), action_flag=CHANGE, ) else: if page.get_publisher_state(language) == PUBLISHER_STATE_PENDING: messages.warning(request, _("Page not published! A parent page is not published yet.")) else: messages.warning(request, _("There was a problem publishing your content")) if is_installed('reversion') and page: self.cleanup_history(page, publish=True) helpers.make_revision_with_plugins(page, request.user, PUBLISH_COMMENT) # create a new publish reversion if 'node' in request.REQUEST: # if request comes from tree.. return admin_utils.render_admin_menu_item(request, page) if 'redirect' in request.GET: return HttpResponseRedirect(request.GET['redirect']) referrer = request.META.get('HTTP_REFERER', '') path = admin_reverse("cms_page_changelist") if request.GET.get('redirect_language'): path = "%s?language=%s&page_id=%s" % (path, request.GET.get('redirect_language'), request.GET.get('redirect_page_id')) if admin_reverse('index') not in referrer: if all_published: if page: if page.get_publisher_state(language) == PUBLISHER_STATE_PENDING: path = page.get_absolute_url(language, fallback=True) else: public_page = Page.objects.get(publisher_public=page.pk) path = '%s?%s' % (public_page.get_absolute_url(language, fallback=True), get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')) else: path = '%s?%s' % (referrer, get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')) else: path = '/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF') return HttpResponseRedirect(path) def cleanup_history(self, page, publish=False): if is_installed('reversion') and page: # delete revisions that are not publish revisions from reversion.models import Version content_type = ContentType.objects.get_for_model(Page) # reversion 1.8+ removes type field, revision filtering must be based on comments versions_qs = Version.objects.filter(content_type=content_type, object_id_int=page.pk) history_limit = get_cms_setting("MAX_PAGE_HISTORY_REVERSIONS") deleted = [] for version in versions_qs.exclude(revision__comment__in=(INITIAL_COMMENT, PUBLISH_COMMENT)).order_by( '-revision__pk')[history_limit - 1:]: if not version.revision_id in deleted: revision = version.revision revision.delete() deleted.append(revision.pk) # delete all publish revisions that are more then MAX_PAGE_PUBLISH_REVERSIONS publish_limit = get_cms_setting("MAX_PAGE_PUBLISH_REVERSIONS") if publish_limit and publish: deleted = [] for version in versions_qs.filter(revision__comment__exact=PUBLISH_COMMENT).order_by( '-revision__pk')[publish_limit - 1:]: if not version.revision_id in deleted: revision = version.revision revision.delete() deleted.append(revision.pk) @wrap_transaction def unpublish(self, request, page_id, language): """ Publish or unpublish a language of a page """ site = Site.objects.get_current() page = get_object_or_404(Page, pk=page_id) if not page.has_publish_permission(request): return HttpResponseForbidden(force_unicode(_("You do not have permission to unpublish this page"))) if not page.publisher_public_id: return HttpResponseForbidden(force_unicode(_("This page was never published"))) try: page.unpublish(language) message = _('The %(language)s page "%(page)s" was successfully unpublished') % { 'language': get_language_object(language, site)['name'], 'page': page} messages.info(request, message) LogEntry.objects.log_action( user_id=request.user.id, content_type_id=ContentType.objects.get_for_model(Page).pk, object_id=page_id, object_repr=page.get_title(), action_flag=CHANGE, change_message=message, ) except RuntimeError: exc = sys.exc_info()[1] messages.error(request, exc.message) except ValidationError: exc = sys.exc_info()[1] messages.error(request, exc.message) path = admin_reverse("cms_page_changelist") if request.GET.get('redirect_language'): path = "%s?language=%s&page_id=%s" % (path, request.GET.get('redirect_language'), request.GET.get('redirect_page_id')) return HttpResponseRedirect(path) @wrap_transaction def revert_page(self, request, page_id, language): page = get_object_or_404(Page, id=page_id) # ensure user has permissions to publish this page if not page.has_change_permission(request): return HttpResponseForbidden(force_unicode(_("You do not have permission to change this page"))) page.revert(language) messages.info(request, _('The page "%s" was successfully reverted.') % page) if 'node' in request.REQUEST: # if request comes from tree.. return admin_utils.render_admin_menu_item(request, page) referer = request.META.get('HTTP_REFERER', '') path = '../../' if admin_reverse('index') not in referer: path = '%s?%s' % (referer.split('?')[0], get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')) return HttpResponseRedirect(path) @create_revision() def delete_translation(self, request, object_id, extra_context=None): if 'language' in request.GET: language = request.GET['language'] else: language = get_language_from_request(request) opts = Page._meta titleopts = Title._meta app_label = titleopts.app_label pluginopts = CMSPlugin._meta try: obj = self.queryset(request).get(pk=unquote(object_id)) except self.model.DoesNotExist: # Don't raise Http404 just yet, because we haven't checked # permissions yet. We don't want an unauthenticated user to be able # to determine whether a given object exists. obj = None if not self.has_delete_permission(request, obj): return HttpResponseForbidden(force_unicode(_("You do not have permission to change this page"))) if obj is None: raise Http404( _('%(name)s object with primary key %(key)r does not exist.') % { 'name': force_unicode(opts.verbose_name), 'key': escape(object_id) }) if not len(list(obj.get_languages())) > 1: raise Http404(_('There only exists one translation for this page')) titleobj = get_object_or_404(Title, page__id=object_id, language=language) saved_plugins = CMSPlugin.objects.filter(placeholder__page__id=object_id, language=language) using = router.db_for_read(self.model) kwargs = { 'admin_site': self.admin_site, 'user': request.user, 'using': using } deleted_objects, perms_needed = get_deleted_objects( [titleobj], titleopts, **kwargs )[:2] to_delete_plugins, perms_needed_plugins = get_deleted_objects( saved_plugins, pluginopts, **kwargs )[:2] deleted_objects.append(to_delete_plugins) perms_needed = set(list(perms_needed) + list(perms_needed_plugins)) if request.method == 'POST': if perms_needed: raise PermissionDenied message = _('Title and plugins with language %(language)s was deleted') % { 'language': force_unicode(get_language_object(language)['name']) } self.log_change(request, titleobj, message) messages.info(request, message) titleobj.delete() for p in saved_plugins: p.delete() public = obj.publisher_public if public: public.save() if is_installed('reversion'): self.cleanup_history(obj) helpers.make_revision_with_plugins(obj, request.user, message) if not self.has_change_permission(request, None): return HttpResponseRedirect("../../../../") return HttpResponseRedirect("../../") context = { "title": _("Are you sure?"), "object_name": force_unicode(titleopts.verbose_name), "object": titleobj, "deleted_objects": deleted_objects, "perms_lacking": perms_needed, "opts": opts, "root_path": admin_reverse('index'), "app_label": app_label, } context.update(extra_context or {}) context_instance = RequestContext(request, current_app=self.admin_site.name) return render_to_response(self.delete_confirmation_template or [ "admin/%s/%s/delete_confirmation.html" % (app_label, titleopts.object_name.lower()), "admin/%s/delete_confirmation.html" % app_label, "admin/delete_confirmation.html" ], context, context_instance=context_instance) def preview_page(self, request, object_id, language): """Redirecting preview function based on draft_id """ page = get_object_or_404(Page, id=object_id) attrs = "?%s" % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON') attrs += "&language=" + language with force_language(language): url = page.get_absolute_url(language) + attrs site = get_current_site(request) if not site == page.site: url = "http%s://%s%s" % ('s' if request.is_secure() else '', page.site.domain, url) return HttpResponseRedirect(url) def change_innavigation(self, request, page_id): """ Switch the in_navigation of a page """ page = get_object_or_404(Page, pk=page_id) if page.has_change_permission(request): page.toggle_in_navigation() language = request.GET.get('language') or get_language_from_request(request) return admin_utils.render_admin_menu_item(request, page, language=language) return HttpResponseForbidden(force_unicode(_("You do not have permission to change this page's in_navigation status"))) def descendants(self, request, page_id, language): """ Get html for descendants of given page Used for lazy loading pages in cms.changelist.js Permission checks is done in admin_utils.get_admin_menu_item_context which is called by admin_utils.render_admin_menu_item. """ page = get_object_or_404(Page, pk=page_id) return admin_utils.render_admin_menu_item(request, page, template="admin/cms/page/tree/lazy_menu.html", language=language) def add_page_type(self, request): site = Site.objects.get_current() language = request.GET.get('language') or get_language() target = request.GET.get('copy_target') type_root, created = Page.objects.get_or_create(reverse_id=PAGE_TYPES_ID, publisher_is_draft=True, site=site, defaults={'in_navigation': False}) type_title, created = Title.objects.get_or_create(page=type_root, language=language, slug=PAGE_TYPES_ID, defaults={'title': _('Page Types')}) url = add_url_parameters(admin_reverse('cms_page_add'), target=type_root.pk, position='first-child', add_page_type=1, copy_target=target, language=language) return HttpResponseRedirect(url) def resolve(self, request): if not request.user.is_staff: if DJANGO_1_4: return HttpResponse('/', mimetype='text/plain') else: return HttpResponse('/', content_type='text/plain') obj = False url = False if request.session.get('cms_log_latest', False): log = LogEntry.objects.get(pk=request.session['cms_log_latest']) try: obj = log.get_edited_object() except (ObjectDoesNotExist, ValueError): obj = None del request.session['cms_log_latest'] if obj and obj.__class__ in toolbar_pool.get_watch_models() and hasattr(obj, 'get_absolute_url'): # This is a test if the object url can be retrieved # In case it can't, object it's not taken into account try: force_unicode(obj.get_absolute_url()) except: obj = None else: obj = None if not obj: pk = request.REQUEST.get('pk') full_model = request.REQUEST.get('model') if pk and full_model: app_label, model = full_model.split('.') if pk and app_label: ctype = ContentType.objects.get(app_label=app_label, model=model) try: obj = ctype.get_object_for_this_type(pk=pk) except ctype.model_class().DoesNotExist: obj = None try: force_unicode(obj.get_absolute_url()) except: obj = None if obj: if not request.toolbar or not request.toolbar.edit_mode: if isinstance(obj, Page): if obj.get_public_object(): url = obj.get_public_object().get_absolute_url() else: url = '%s?%s' % ( obj.get_draft_object().get_absolute_url(), get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON') ) else: url = obj.get_absolute_url() else: url = obj.get_absolute_url() if url: return HttpResponse(force_unicode(url), content_type='text/plain') return HttpResponse('', content_type='text/plain') def lookup_allowed(self, key, *args, **kwargs): if key == 'site__exact': return True return super(PageAdmin, self).lookup_allowed(key, *args, **kwargs) def edit_title_fields(self, request, page_id, language): title = Title.objects.get(page_id=page_id, language=language) saved_successfully = False raw_fields = request.GET.get("edit_fields", 'title') edit_fields = [field for field in raw_fields.split(",") if field in self.title_frontend_editable_fields] cancel_clicked = request.POST.get("_cancel", False) opts = Title._meta if not edit_fields: # Defaults to title edit_fields = ('title',) if not has_generic_permission(title.page.pk, request.user, "change", title.page.site.pk): return HttpResponseForbidden(force_unicode(_("You do not have permission to edit this page"))) class PageTitleForm(django.forms.ModelForm): """ Dynamic form showing only the fields to be edited """ class Meta: model = Title fields = edit_fields if not cancel_clicked and request.method == 'POST': form = PageTitleForm(instance=title, data=request.POST) if form.is_valid(): form.save() saved_successfully = True else: form = PageTitleForm(instance=title) admin_form = AdminForm(form, fieldsets=[(None, {'fields': edit_fields})], prepopulated_fields={}, model_admin=self) media = self.media + admin_form.media context = { 'CMS_MEDIA_URL': get_cms_setting('MEDIA_URL'), 'title': 'Title', 'plugin': title.page, 'plugin_id': title.page.id, 'adminform': admin_form, 'add': False, 'is_popup': True, 'media': media, 'opts': opts, 'change': True, 'save_as': False, 'has_add_permission': False, 'window_close_timeout': 10, } if cancel_clicked: # cancel button was clicked context.update({ 'cancel': True, }) return render_to_response('admin/cms/page/plugin/confirm_form.html', context, RequestContext(request)) if not cancel_clicked and request.method == 'POST' and saved_successfully: return render_to_response('admin/cms/page/plugin/confirm_form.html', context, RequestContext(request)) return render_to_response('admin/cms/page/plugin/change_form.html', context, RequestContext(request)) def get_published_pagelist(self, *args, **kwargs): """ This view is used by the PageSmartLinkWidget as the user type to feed the autocomplete drop-down. """ request = args[0] if request.is_ajax(): query_term = request.GET.get('q','').strip('/') language_code = request.GET.get('language_code', settings.LANGUAGE_CODE) matching_published_pages = Page.objects.published().public().filter( Q(title_set__title__icontains=query_term, title_set__language=language_code) | Q(title_set__path__icontains=query_term, title_set__language=language_code) | Q(title_set__menu_title__icontains=query_term, title_set__language=language_code) | Q(title_set__page_title__icontains=query_term, title_set__language=language_code) ).distinct() results = [] for page in matching_published_pages: results.append( { 'path': page.get_path(language=language_code), 'title': page.get_title(language=language_code), 'redirect_url': page.get_absolute_url(language=language_code) } ) if DJANGO_1_4: return HttpResponse(json.dumps(results), mimetype='application/json') else: return HttpResponse(json.dumps(results), content_type='application/json') else: return HttpResponseForbidden() def add_plugin(self, *args, **kwargs): with create_revision(): return super(PageAdmin, self).add_plugin(*args, **kwargs) def copy_plugins(self, *args, **kwargs): with create_revision(): return super(PageAdmin, self).copy_plugins(*args, **kwargs) def edit_plugin(self, *args, **kwargs): with create_revision(): return super(PageAdmin, self).edit_plugin(*args, **kwargs) def move_plugin(self, *args, **kwargs): with create_revision(): return super(PageAdmin, self).move_plugin(*args, **kwargs) def delete_plugin(self, *args, **kwargs): with create_revision(): return super(PageAdmin, self).delete_plugin(*args, **kwargs) def clear_placeholder(self, *args, **kwargs): with create_revision(): return super(PageAdmin, self).clear_placeholder(*args, **kwargs) admin.site.register(Page, PageAdmin)
./CrossVul/dataset_final_sorted/CWE-352/py/bad_1654_0
crossvul-python_data_good_1654_0
# -*- coding: utf-8 -*- import copy from functools import wraps import json import sys import django from django.contrib.admin.helpers import AdminForm from django.conf import settings from django.contrib import admin, messages from django.contrib.admin.models import LogEntry, CHANGE from django.contrib.admin.options import IncorrectLookupParameters from django.contrib.admin.util import get_deleted_objects from django.contrib.contenttypes.models import ContentType from django.contrib.sites.models import Site, get_current_site from django.core.exceptions import PermissionDenied, ObjectDoesNotExist, ValidationError from django.db import router from django.db.models import Q from django.http import HttpResponseRedirect, HttpResponse, Http404, HttpResponseBadRequest, HttpResponseForbidden from django.shortcuts import render_to_response, get_object_or_404 from django.template.context import RequestContext from django.template.defaultfilters import escape from django.utils.translation import ugettext_lazy as _, get_language from django.utils.decorators import method_decorator from django.views.decorators.http import require_POST from cms.admin.change_list import CMSChangeList from cms.admin.dialog.views import get_copy_dialog from cms.admin.forms import (PageForm, AdvancedSettingsForm, PagePermissionForm, PublicationDatesForm) from cms.admin.permissionadmin import (PERMISSION_ADMIN_INLINES, PagePermissionInlineAdmin, ViewRestrictionInlineAdmin) from cms.admin.placeholderadmin import PlaceholderAdminMixin from cms.admin.views import revert_plugins from cms.constants import PAGE_TYPES_ID, PUBLISHER_STATE_PENDING from cms.models import Page, Title, CMSPlugin, PagePermission, GlobalPagePermission, StaticPlaceholder from cms.models.managers import PagePermissionsPermissionManager from cms.plugin_pool import plugin_pool from cms.toolbar_pool import toolbar_pool from cms.utils import helpers, permissions, get_language_from_request, admin as admin_utils, copy_plugins from cms.utils.i18n import get_language_list, get_language_tuple, get_language_object, force_language from cms.utils.admin import jsonify_request from cms.utils.compat import DJANGO_1_4 from cms.utils.compat.dj import force_unicode, is_installed from cms.utils.compat.urls import unquote from cms.utils.conf import get_cms_setting from cms.utils.helpers import find_placeholder_relation from cms.utils.permissions import has_global_page_permission, has_generic_permission from cms.utils.plugins import current_site from cms.utils.transaction import wrap_transaction from cms.utils.urlutils import add_url_parameters, admin_reverse require_POST = method_decorator(require_POST) if is_installed('reversion'): from reversion.admin import VersionAdmin as ModelAdmin from reversion import create_revision else: # pragma: no cover from django.contrib.admin import ModelAdmin class ReversionContext(object): def __enter__(self): yield def __exit__(self, exc_type, exc_val, exc_tb): pass def __call__(self, func): """Allows this revision context to be used as a decorator.""" @wraps(func) def do_revision_context(*args, **kwargs): self.__enter__() exception = False try: try: return func(*args, **kwargs) except: exception = True if not self.__exit__(*sys.exc_info()): raise finally: if not exception: self.__exit__(None, None, None) return do_revision_context def create_revision(): return ReversionContext() PUBLISH_COMMENT = "Publish" INITIAL_COMMENT = "Initial version." class PageAdmin(PlaceholderAdminMixin, ModelAdmin): form = PageForm search_fields = ('=id', 'title_set__slug', 'title_set__title', 'reverse_id') revision_form_template = "admin/cms/page/history/revision_header.html" recover_form_template = "admin/cms/page/history/recover_header.html" add_general_fields = ['title', 'slug', 'language', 'template'] change_list_template = "admin/cms/page/tree/base.html" list_filter = ['in_navigation', 'template', 'changed_by', 'soft_root'] title_frontend_editable_fields = ['title', 'menu_title', 'page_title'] inlines = PERMISSION_ADMIN_INLINES def get_urls(self): """Get the admin urls """ from django.conf.urls import patterns, url info = "%s_%s" % (self.model._meta.app_label, self.model._meta.module_name) pat = lambda regex, fn: url(regex, self.admin_site.admin_view(fn), name='%s_%s' % (info, fn.__name__)) url_patterns = patterns( '', pat(r'^([0-9]+)/advanced-settings/$', self.advanced), pat(r'^([0-9]+)/dates/$', self.dates), pat(r'^([0-9]+)/permission-settings/$', self.permissions), pat(r'^([0-9]+)/delete-translation/$', self.delete_translation), pat(r'^([0-9]+)/move-page/$', self.move_page), pat(r'^([0-9]+)/copy-page/$', self.copy_page), pat(r'^([0-9]+)/copy-language/$', self.copy_language), pat(r'^([0-9]+)/dialog/copy/$', get_copy_dialog), # copy dialog pat(r'^([0-9]+)/change-navigation/$', self.change_innavigation), pat(r'^([0-9]+)/jsi18n/$', self.redirect_jsi18n), pat(r'^([0-9]+)/permissions/$', self.get_permissions), pat(r'^([0-9]+)/undo/$', self.undo), pat(r'^([0-9]+)/redo/$', self.redo), pat(r'^([0-9]+)/change_template/$', self.change_template), pat(r'^([0-9]+)/([a-z\-]+)/descendants/$', self.descendants), # menu html for page descendants pat(r'^([0-9]+)/([a-z\-]+)/edit-field/$', self.edit_title_fields), pat(r'^([0-9]+)/([a-z\-]+)/publish/$', self.publish_page), pat(r'^([0-9]+)/([a-z\-]+)/unpublish/$', self.unpublish), pat(r'^([0-9]+)/([a-z\-]+)/revert/$', self.revert_page), pat(r'^([0-9]+)/([a-z\-]+)/preview/$', self.preview_page), pat(r'^add-page-type/$', self.add_page_type), pat(r'^published-pages/$', self.get_published_pagelist), url(r'^resolve/$', self.resolve, name="cms_page_resolve"), ) if plugin_pool.get_all_plugins(): url_patterns += plugin_pool.get_patterns() url_patterns += super(PageAdmin, self).get_urls() return url_patterns def redirect_jsi18n(self, request): return HttpResponseRedirect(admin_reverse('jsi18n')) def get_revision_instances(self, request, object): """Returns all the instances to be used in the object's revision.""" if isinstance(object, Title): object = object.page if isinstance(object, Page) and not object.publisher_is_draft: object = object.publisher_public placeholder_relation = find_placeholder_relation(object) data = [object] filters = {'placeholder__%s' % placeholder_relation: object} for plugin in CMSPlugin.objects.filter(**filters): data.append(plugin) plugin_instance, admin = plugin.get_plugin_instance() if plugin_instance: data.append(plugin_instance) if isinstance(object, Page): titles = object.title_set.all() for title in titles: title.publisher_public = None data.append(title) return data def save_model(self, request, obj, form, change): """ Move the page in the tree if necessary and save every placeholder Content object. """ target = request.GET.get('target', None) position = request.GET.get('position', None) if 'recover' in request.path_info: pk = obj.pk if obj.parent_id: parent = Page.objects.get(pk=obj.parent_id) else: parent = None obj.lft = 0 obj.rght = 0 obj.tree_id = 0 obj.level = 0 obj.pk = None obj.insert_at(parent, save=False) obj.pk = pk obj.save(no_signals=True) else: if 'history' in request.path_info: old_obj = Page.objects.get(pk=obj.pk) obj.level = old_obj.level obj.parent_id = old_obj.parent_id obj.rght = old_obj.rght obj.lft = old_obj.lft obj.tree_id = old_obj.tree_id new = False if not obj.pk: new = True obj.save() if 'recover' in request.path_info or 'history' in request.path_info: revert_plugins(request, obj.version.pk, obj) if target is not None and position is not None: try: target = self.model.objects.get(pk=target) except self.model.DoesNotExist: pass else: obj.move_to(target, position) page_type_id = form.cleaned_data.get('page_type') copy_target_id = request.GET.get('copy_target') if copy_target_id or page_type_id: if page_type_id: copy_target_id = page_type_id copy_target = Page.objects.get(pk=copy_target_id) if not copy_target.has_view_permission(request): raise PermissionDenied() obj = Page.objects.get(pk=obj.pk) #mptt reload copy_target._copy_attributes(obj, clean=True) obj.save() for lang in copy_target.languages.split(','): copy_target._copy_contents(obj, lang) if not 'permission' in request.path_info: language = form.cleaned_data['language'] Title.objects.set_or_create( request, obj, form, language, ) # is it home? publish it right away if new and Page.objects.filter(site_id=obj.site_id).count() == 1: obj.publish(language) def get_fieldsets(self, request, obj=None): form = self.get_form(request, obj, fields=None) if getattr(form, 'fieldsets', None) is None: fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj)) return [(None, {'fields': fields})] else: return form.fieldsets def get_inline_classes(self, request, obj=None, **kwargs): if obj and 'permission' in request.path_info: return PERMISSION_ADMIN_INLINES return [] def get_form_class(self, request, obj=None, **kwargs): if 'advanced' in request.path_info: return AdvancedSettingsForm elif 'permission' in request.path_info: return PagePermissionForm elif 'dates' in request.path_info: return PublicationDatesForm return self.form def get_form(self, request, obj=None, **kwargs): """ Get PageForm for the Page model and modify its fields depending on the request. """ language = get_language_from_request(request, obj) form_cls = self.get_form_class(request, obj) form = super(PageAdmin, self).get_form(request, obj, form=form_cls, **kwargs) # get_form method operates by overriding initial fields value which # may persist across invocation. Code below deepcopies fields definition # to avoid leaks for field in form.base_fields.keys(): form.base_fields[field] = copy.deepcopy(form.base_fields[field]) if 'language' in form.base_fields: form.base_fields['language'].initial = language if 'page_type' in form.base_fields: if 'copy_target' in request.GET or 'add_page_type' in request.GET or obj: del form.base_fields['page_type'] elif not Title.objects.filter(page__parent__reverse_id=PAGE_TYPES_ID, language=language).exists(): del form.base_fields['page_type'] if 'add_page_type' in request.GET: del form.base_fields['menu_title'] del form.base_fields['meta_description'] del form.base_fields['page_title'] self.inlines = self.get_inline_classes(request, obj, **kwargs) if obj: if 'history' in request.path_info or 'recover' in request.path_info: version_id = request.path_info.split('/')[-2] else: version_id = None title_obj = obj.get_title_obj(language=language, fallback=False, version_id=version_id, force_reload=True) if 'site' in form.base_fields and form.base_fields['site'].initial is None: form.base_fields['site'].initial = obj.site for name in ('slug', 'title', 'meta_description', 'menu_title', 'page_title', 'redirect'): if name in form.base_fields: form.base_fields[name].initial = getattr(title_obj, name) if 'overwrite_url' in form.base_fields: if title_obj.has_url_overwrite: form.base_fields['overwrite_url'].initial = title_obj.path else: form.base_fields['overwrite_url'].initial = '' else: for name in ('slug', 'title'): form.base_fields[name].initial = u'' if 'target' in request.GET or 'copy_target' in request.GET: target = request.GET.get('copy_target') or request.GET.get('target') if 'position' in request.GET: position = request.GET['position'] if position == 'last-child' or position == 'first-child': form.base_fields['parent'].initial = request.GET.get('target', None) else: sibling = Page.objects.get(pk=target) form.base_fields['parent'].initial = sibling.parent_id else: form.base_fields['parent'].initial = request.GET.get('target', None) form.base_fields['site'].initial = request.session.get('cms_admin_site', None) return form def advanced(self, request, object_id): page = get_object_or_404(Page, pk=object_id) if not page.has_advanced_settings_permission(request): raise PermissionDenied("No permission for editing advanced settings") return self.change_view(request, object_id, extra_context={'advanced_settings': True, 'title': _("Advanced Settings")}) def dates(self, request, object_id): return self.change_view(request, object_id, extra_context={'publishing_dates': True, 'title': _("Publishing dates")}) def permissions(self, request, object_id): page = get_object_or_404(Page, pk=object_id) if not page.has_change_permissions_permission(request): raise PermissionDenied("No permission for editing advanced settings") return self.change_view(request, object_id, extra_context={'show_permissions': True, 'title': _("Change Permissions")}) def get_inline_instances(self, request, obj=None): if DJANGO_1_4: inlines = super(PageAdmin, self).get_inline_instances(request) if hasattr(self, '_current_page'): obj = self._current_page else: inlines = super(PageAdmin, self).get_inline_instances(request, obj) if get_cms_setting('PERMISSION') and obj: filtered_inlines = [] for inline in inlines: if (isinstance(inline, PagePermissionInlineAdmin) and not isinstance(inline, ViewRestrictionInlineAdmin)): if "recover" in request.path or "history" in request.path: # do not display permissions in recover mode continue if not obj.has_change_permissions_permission(request): continue filtered_inlines.append(inline) inlines = filtered_inlines return inlines def get_unihandecode_context(self, language): if language[:2] in get_cms_setting('UNIHANDECODE_DECODERS'): uhd_lang = language[:2] else: uhd_lang = get_cms_setting('UNIHANDECODE_DEFAULT_DECODER') uhd_host = get_cms_setting('UNIHANDECODE_HOST') uhd_version = get_cms_setting('UNIHANDECODE_VERSION') if uhd_lang and uhd_host and uhd_version: uhd_urls = [ '%sunihandecode-%s.core.min.js' % (uhd_host, uhd_version), '%sunihandecode-%s.%s.min.js' % (uhd_host, uhd_version, uhd_lang), ] else: uhd_urls = [] return {'unihandecode_lang': uhd_lang, 'unihandecode_urls': uhd_urls} def add_view(self, request, form_url='', extra_context=None): extra_context = extra_context or {} language = get_language_from_request(request) extra_context.update({ 'language': language, }) if not request.GET.get('add_page_type') is None: extra_context.update({ 'add_page_type': True, 'title': _("Add Page Type"), }) elif 'copy_target' in request.GET: extra_context.update({ 'title': _("Add Page Copy"), }) else: extra_context = self.update_language_tab_context(request, context=extra_context) extra_context.update(self.get_unihandecode_context(language)) return super(PageAdmin, self).add_view(request, form_url, extra_context=extra_context) def change_view(self, request, object_id, form_url='', extra_context=None): """ The 'change' admin view for the Page model. """ if extra_context is None: extra_context = {'basic_info': True} try: obj = self.model.objects.get(pk=object_id) except self.model.DoesNotExist: # Don't raise Http404 just yet, because we haven't checked # permissions yet. We don't want an unauthenticated user to be able # to determine whether a given object exists. obj = None else: #activate(user_lang_set) context = { 'page': obj, 'CMS_PERMISSION': get_cms_setting('PERMISSION'), 'ADMIN_MEDIA_URL': settings.STATIC_URL, 'can_change': obj.has_change_permission(request), 'can_change_permissions': obj.has_change_permissions_permission(request), 'current_site_id': settings.SITE_ID, } context.update(extra_context or {}) extra_context = self.update_language_tab_context(request, obj, context) tab_language = get_language_from_request(request) extra_context.update(self.get_unihandecode_context(tab_language)) # get_inline_instances will need access to 'obj' so that it can # determine if current user has enough rights to see PagePermissionInlineAdmin # because in django versions <1.5 get_inline_instances doesn't receive 'obj' # as a parameter, the workaround is to set it as an attribute... if DJANGO_1_4: self._current_page = obj response = super(PageAdmin, self).change_view( request, object_id, form_url=form_url, extra_context=extra_context) if tab_language and response.status_code == 302 and response._headers['location'][1] == request.path_info: location = response._headers['location'] response._headers['location'] = (location[0], "%s?language=%s" % (location[1], tab_language)) return response def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None): # add context variables filled_languages = [] if obj: filled_languages = [t[0] for t in obj.title_set.filter(title__isnull=False).values_list('language')] allowed_languages = [lang[0] for lang in self._get_site_languages(obj)] context.update({ 'filled_languages': [lang for lang in filled_languages if lang in allowed_languages], }) return super(PageAdmin, self).render_change_form(request, context, add, change, form_url, obj) def _get_site_languages(self, obj=None): site_id = None if obj: site_id = obj.site_id else: site_id = Site.objects.get_current().pk return get_language_tuple(site_id) def update_language_tab_context(self, request, obj=None, context=None): if not context: context = {} language = get_language_from_request(request, obj) languages = self._get_site_languages(obj) context.update({ 'language': language, 'language_tabs': languages, # Dates are not language dependent, thus we hide the language # selection bar: the language is forced through the form class 'show_language_tabs': len(list(languages)) > 1 and not context.get('publishing_dates', False), }) return context def response_change(self, request, obj): """Called always when page gets changed, call save on page, there may be some new stuff, which should be published after all other objects on page are collected. """ # save the object again, so all the related changes to page model # can be published if required obj.save() return super(PageAdmin, self).response_change(request, obj) def has_add_permission(self, request): """ Return true if the current user has permission to add a new page. """ if get_cms_setting('PERMISSION'): return permissions.has_page_add_permission(request) return super(PageAdmin, self).has_add_permission(request) def has_change_permission(self, request, obj=None): """ Return true if the current user has permission on the page. Return the string 'All' if the user has all rights. """ if get_cms_setting('PERMISSION'): if obj: return obj.has_change_permission(request) else: return permissions.has_page_change_permission(request) return super(PageAdmin, self).has_change_permission(request, obj) def has_delete_permission(self, request, obj=None): """ Returns True if the given request has permission to change the given Django model instance. If CMS_PERMISSION are in use also takes look to object permissions. """ if get_cms_setting('PERMISSION') and obj is not None: return obj.has_delete_permission(request) return super(PageAdmin, self).has_delete_permission(request, obj) def has_recover_permission(self, request): """ Returns True if the use has the right to recover pages """ if not is_installed('reversion'): return False user = request.user if user.is_superuser: return True try: if has_global_page_permission(request, can_recover_page=True): return True except: pass return False def has_add_plugin_permission(self, request, placeholder, plugin_type): if not permissions.has_plugin_permission(request.user, plugin_type, "add"): return False page = placeholder.page if page and not page.has_change_permission(request): return False if page and not page.publisher_is_draft: return False return True def has_copy_plugin_permission(self, request, source_placeholder, target_placeholder, plugins): source_page = source_placeholder.page if source_page and not source_page.has_change_permission(request): return False target_page = target_placeholder.page if target_page and not target_page.has_change_permission(request): return False if target_page and not target_page.publisher_is_draft: return False for plugin in plugins: if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "add"): return False return True def has_change_plugin_permission(self, request, plugin): page = plugin.placeholder.page if plugin.placeholder else None if page and not page.has_change_permission(request): return False if page and not page.publisher_is_draft: return False if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "change"): return False return True def has_move_plugin_permission(self, request, plugin, target_placeholder): if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "change"): return False page = plugin.placeholder.page if page and not page.has_change_permission(request): return False if page and not page.publisher_is_draft: return False return True def has_delete_plugin_permission(self, request, plugin): if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "delete"): return False page = plugin.placeholder.page if page: if not page.publisher_is_draft: return False if not page.has_change_permission(request): return False return True def has_clear_placeholder_permission(self, request, placeholder): page = placeholder.page if placeholder else None if page: if not page.publisher_is_draft: return False if not page.has_change_permission(request): return False return True def post_add_plugin(self, request, placeholder, plugin): if is_installed('reversion') and placeholder.page: plugin_name = force_unicode(plugin_pool.get_plugin(plugin.plugin_type).name) message = _(u"%(plugin_name)s plugin added to %(placeholder)s") % { 'plugin_name': plugin_name, 'placeholder': placeholder} self.cleanup_history(placeholder.page) helpers.make_revision_with_plugins(placeholder.page, request.user, message) def post_copy_plugins(self, request, source_placeholder, target_placeholder, plugins): page = target_placeholder.page if page and is_installed('reversion'): message = _(u"Copied plugins to %(placeholder)s") % {'placeholder': target_placeholder} self.cleanup_history(page) helpers.make_revision_with_plugins(page, request.user, message) def post_edit_plugin(self, request, plugin): page = plugin.placeholder.page if page: # if reversion is installed, save version of the page plugins if is_installed('reversion') and page: plugin_name = force_unicode(plugin_pool.get_plugin(plugin.plugin_type).name) message = _( u"%(plugin_name)s plugin edited at position %(position)s in %(placeholder)s") % { 'plugin_name': plugin_name, 'position': plugin.position, 'placeholder': plugin.placeholder.slot } self.cleanup_history(page) helpers.make_revision_with_plugins(page, request.user, message) def post_move_plugin(self, request, source_placeholder, target_placeholder, plugin): page = target_placeholder.page if page and is_installed('reversion'): self.cleanup_history(page) helpers.make_revision_with_plugins(page, request.user, _(u"Plugins were moved")) def post_delete_plugin(self, request, plugin): plugin_name = force_unicode(plugin_pool.get_plugin(plugin.plugin_type).name) page = plugin.placeholder.page if page: page.save() comment = _("%(plugin_name)s plugin at position %(position)s in %(placeholder)s was deleted.") % { 'plugin_name': plugin_name, 'position': plugin.position, 'placeholder': plugin.placeholder, } if is_installed('reversion'): self.cleanup_history(page) helpers.make_revision_with_plugins(page, request.user, comment) def post_clear_placeholder(self, request, placeholder): page = placeholder.page if page: page.save() comment = _('All plugins in the placeholder "%(name)s" were deleted.') % { 'name': force_unicode(placeholder) } if is_installed('reversion'): self.cleanup_history(page) helpers.make_revision_with_plugins(page, request.user, comment) def get_placeholder_template(self, request, placeholder): page = placeholder.page if page: return page.get_template() def changelist_view(self, request, extra_context=None): "The 'change list' admin view for this model." from django.contrib.admin.views.main import ERROR_FLAG opts = self.model._meta app_label = opts.app_label if not self.has_change_permission(request, None): return HttpResponseForbidden(force_unicode(_("You do not have permission to change pages."))) try: cl = CMSChangeList(request, self.model, self.list_display, self.list_display_links, self.list_filter, self.date_hierarchy, self.search_fields, self.list_select_related, self.list_per_page, self.list_max_show_all, self.list_editable, self) except IncorrectLookupParameters: # Wacky lookup parameters were given, so redirect to the main # changelist page, without parameters, and pass an 'invalid=1' # parameter via the query string. If wacky parameters were given and # the 'invalid=1' parameter was already in the query string, something # is screwed up with the database, so display an error page. if ERROR_FLAG in request.GET.keys(): return render_to_response('admin/invalid_setup.html', {'title': _('Database error')}) return HttpResponseRedirect(request.path_info + '?' + ERROR_FLAG + '=1') cl.set_items(request) site_id = request.GET.get('site__exact', None) if site_id is None: site_id = current_site(request).pk site_id = int(site_id) # languages languages = get_language_list(site_id) # parse the cookie that saves which page trees have # been opened already and extracts the page ID djangocms_nodes_open = request.COOKIES.get('djangocms_nodes_open', '') raw_nodes = unquote(djangocms_nodes_open).split(',') try: open_menu_trees = [int(c.split('page_', 1)[1]) for c in raw_nodes] except IndexError: open_menu_trees = [] # Language may be present in the GET dictionary but empty language = request.GET.get('language', get_language()) if not language: language = get_language() context = { 'title': cl.title, 'is_popup': cl.is_popup, 'cl': cl, 'opts': opts, 'has_add_permission': self.has_add_permission(request), 'root_path': admin_reverse('index'), 'app_label': app_label, 'preview_language': language, 'CMS_MEDIA_URL': get_cms_setting('MEDIA_URL'), 'CMS_PERMISSION': get_cms_setting('PERMISSION'), 'DEBUG': settings.DEBUG, 'site_languages': languages, 'open_menu_trees': open_menu_trees, } if is_installed('reversion'): context['has_recover_permission'] = self.has_recover_permission(request) context['has_change_permission'] = self.has_change_permission(request) context.update(extra_context or {}) return render_to_response(self.change_list_template or [ 'admin/%s/%s/change_list.html' % (app_label, opts.object_name.lower()), 'admin/%s/change_list.html' % app_label, 'admin/change_list.html' ], context, context_instance=RequestContext(request)) def recoverlist_view(self, request, extra_context=None): if not self.has_recover_permission(request): raise PermissionDenied return super(PageAdmin, self).recoverlist_view(request, extra_context) def recover_view(self, request, version_id, extra_context=None): if not self.has_recover_permission(request): raise PermissionDenied extra_context = self.update_language_tab_context(request, None, extra_context) return super(PageAdmin, self).recover_view(request, version_id, extra_context) def revision_view(self, request, object_id, version_id, extra_context=None): if not self.has_change_permission(request, Page.objects.get(pk=object_id)): raise PermissionDenied extra_context = self.update_language_tab_context(request, None, extra_context) response = super(PageAdmin, self).revision_view(request, object_id, version_id, extra_context) return response def history_view(self, request, object_id, extra_context=None): if not self.has_change_permission(request, Page.objects.get(pk=object_id)): raise PermissionDenied extra_context = self.update_language_tab_context(request, None, extra_context) return super(PageAdmin, self).history_view(request, object_id, extra_context) def render_revision_form(self, request, obj, version, context, revert=False, recover=False): # reset parent to null if parent is not found if version.field_dict['parent']: try: Page.objects.get(pk=version.field_dict['parent']) except: if revert and obj.parent_id != int(version.field_dict['parent']): version.field_dict['parent'] = obj.parent_id if recover: obj.parent = None obj.parent_id = None version.field_dict['parent'] = None obj.version = version return super(PageAdmin, self).render_revision_form(request, obj, version, context, revert, recover) @require_POST def undo(self, request, object_id): if not is_installed('reversion'): return HttpResponseBadRequest('django reversion not installed') from reversion.models import Revision from cms.utils.page_resolver import is_valid_url import reversion page = get_object_or_404(Page, pk=object_id) old_titles = list(page.title_set.all()) if not page.publisher_is_draft: page = page.publisher_draft if not page.has_change_permission(request): return HttpResponseForbidden(force_unicode(_("You do not have permission to change this page"))) versions = reversion.get_for_object(page) if page.revision_id: current_revision = Revision.objects.get(pk=page.revision_id) else: try: current_version = versions[0] except IndexError: return HttpResponseBadRequest("no current revision found") current_revision = current_version.revision try: previous_version = versions.filter(revision__pk__lt=current_revision.pk)[0] except IndexError: return HttpResponseBadRequest("no previous revision found") previous_revision = previous_version.revision # clear all plugins placeholders = page.placeholders.all() placeholder_ids = [] for placeholder in placeholders: placeholder_ids.append(placeholder.pk) plugins = CMSPlugin.objects.filter(placeholder__in=placeholder_ids).order_by('-level') for plugin in plugins: plugin._no_reorder = True plugin.delete() # TODO: delete placeholders instead of finding duplicates for 3.1 #page.placeholders.all().delete() previous_revision.revert(True) rev_page = get_object_or_404(Page, pk=page.pk) rev_page.revision_id = previous_revision.pk rev_page.publisher_public_id = page.publisher_public_id rev_page.save() new_placeholders = rev_page.placeholders.all() slots = {} for new_ph in new_placeholders: if not new_ph.slot in slots: slots[new_ph.slot] = new_ph else: if new_ph in placeholder_ids: new_ph.delete() elif slots[new_ph.slot] in placeholder_ids: slots[new_ph.slot].delete() new_titles = rev_page.title_set.all() for title in new_titles: try: is_valid_url(title.path, rev_page) except ValidationError: for old_title in old_titles: if old_title.language == title.language: title.slug = old_title.slug title.save() messages.error(request, _("Page reverted but slug stays the same because of url collisions.")) return HttpResponse("ok") @require_POST def redo(self, request, object_id): if not is_installed('reversion'): return HttpResponseBadRequest('django reversion not installed') from reversion.models import Revision import reversion from cms.utils.page_resolver import is_valid_url page = get_object_or_404(Page, pk=object_id) old_titles = list(page.title_set.all()) if not page.publisher_is_draft: page = page.publisher_draft if not page.has_change_permission(request): return HttpResponseForbidden(force_unicode(_("You do not have permission to change this page"))) versions = reversion.get_for_object(page) if page.revision_id: current_revision = Revision.objects.get(pk=page.revision_id) else: try: current_version = versions[0] except IndexError: return HttpResponseBadRequest("no current revision found") current_revision = current_version.revision try: previous_version = versions.filter(revision__pk__gt=current_revision.pk).order_by('pk')[0] except IndexError: return HttpResponseBadRequest("no next revision found") next_revision = previous_version.revision # clear all plugins placeholders = page.placeholders.all() placeholder_ids = [] for placeholder in placeholders: placeholder_ids.append(placeholder.pk) plugins = CMSPlugin.objects.filter(placeholder__in=placeholder_ids).order_by('-level') for plugin in plugins: plugin._no_reorder = True plugin.delete() # TODO: 3.1 remove the placeholder matching from below and just delete them #page.placeholders.all().delete() next_revision.revert(True) rev_page = get_object_or_404(Page, pk=page.pk) rev_page.revision_id = next_revision.pk rev_page.publisher_public_id = page.publisher_public_id rev_page.save() new_placeholders = rev_page.placeholders.all() slots = {} for new_ph in new_placeholders: if not new_ph.slot in slots: slots[new_ph.slot] = new_ph else: if new_ph in placeholder_ids: new_ph.delete() elif slots[new_ph.slot] in placeholder_ids: slots[new_ph.slot].delete() new_titles = rev_page.title_set.all() for title in new_titles: try: is_valid_url(title.path, rev_page) except ValidationError: for old_title in old_titles: if old_title.language == title.language: title.slug = old_title.slug title.save() messages.error(request, _("Page reverted but slug stays the same because of url collisions.")) return HttpResponse("ok") @require_POST @create_revision() def change_template(self, request, object_id): page = get_object_or_404(Page, pk=object_id) if not page.has_change_permission(request): return HttpResponseForbidden(force_unicode(_("You do not have permission to change the template"))) to_template = request.POST.get("template", None) if to_template not in dict(get_cms_setting('TEMPLATES')): return HttpResponseBadRequest(force_unicode(_("Template not valid"))) page.template = to_template page.save() if is_installed('reversion'): message = _("Template changed to %s") % dict(get_cms_setting('TEMPLATES'))[to_template] self.cleanup_history(page) helpers.make_revision_with_plugins(page, request.user, message) return HttpResponse(force_unicode(_("The template was successfully changed"))) @require_POST @wrap_transaction def move_page(self, request, page_id, extra_context=None): """ Move the page to the requested target, at the given position """ target = request.POST.get('target', None) position = request.POST.get('position', None) if target is None or position is None: return HttpResponseRedirect('../../') try: page = self.model.objects.get(pk=page_id) target = self.model.objects.get(pk=target) except self.model.DoesNotExist: return jsonify_request(HttpResponseBadRequest("error")) # does he haves permissions to do this...? if not page.has_move_page_permission(request) or \ not target.has_add_permission(request): return jsonify_request( HttpResponseForbidden(force_unicode(_("Error! You don't have permissions to move this page. Please reload the page")))) # move page page.move_page(target, position) if is_installed('reversion'): self.cleanup_history(page) helpers.make_revision_with_plugins(page, request.user, _("Page moved")) return jsonify_request(HttpResponse(admin_utils.render_admin_menu_item(request, page).content)) def get_permissions(self, request, page_id): page = get_object_or_404(Page, id=page_id) can_change_list = Page.permissions.get_change_id_list(request.user, page.site_id) global_page_permissions = GlobalPagePermission.objects.filter(sites__in=[page.site_id]) page_permissions = PagePermission.objects.for_page(page) all_permissions = list(global_page_permissions) + list(page_permissions) # does he can change global permissions ? has_global = permissions.has_global_change_permissions_permission(request) permission_set = [] for permission in all_permissions: if isinstance(permission, GlobalPagePermission): if has_global: permission_set.append([(True, True), permission]) else: permission_set.append([(True, False), permission]) else: if can_change_list == PagePermissionsPermissionManager.GRANT_ALL: can_change = True else: can_change = permission.page_id in can_change_list permission_set.append([(False, can_change), permission]) context = { 'page': page, 'permission_set': permission_set, } return render_to_response('admin/cms/page/permissions.html', context) @require_POST @wrap_transaction def copy_language(self, request, page_id): with create_revision(): source_language = request.POST.get('source_language') target_language = request.POST.get('target_language') page = Page.objects.get(pk=page_id) placeholders = page.placeholders.all() if not target_language or not target_language in get_language_list(): return HttpResponseBadRequest(force_unicode(_("Language must be set to a supported language!"))) for placeholder in placeholders: plugins = list( placeholder.cmsplugin_set.filter(language=source_language).order_by('tree_id', 'level', 'position')) if not self.has_copy_plugin_permission(request, placeholder, placeholder, plugins): return HttpResponseForbidden(force_unicode(_('You do not have permission to copy these plugins.'))) copy_plugins.copy_plugins_to(plugins, placeholder, target_language) if page and is_installed('reversion'): message = _(u"Copied plugins from %(source_language)s to %(target_language)s") % { 'source_language': source_language, 'target_language': target_language} self.cleanup_history(page) helpers.make_revision_with_plugins(page, request.user, message) return HttpResponse("ok") @require_POST @wrap_transaction def copy_page(self, request, page_id, extra_context=None): """ Copy the page and all its plugins and descendants to the requested target, at the given position """ context = {} page = Page.objects.get(pk=page_id) target = request.POST.get('target', None) position = request.POST.get('position', None) site = request.POST.get('site', None) if target is not None and position is not None and site is not None: try: target = self.model.objects.get(pk=target) # does he have permissions to copy this page under target? assert target.has_add_permission(request) site = Site.objects.get(pk=site) except (ObjectDoesNotExist, AssertionError): return HttpResponse("error") #context.update({'error': _('Page could not been moved.')}) else: try: kwargs = { 'copy_permissions': request.REQUEST.get('copy_permissions', False), } page.copy_page(target, site, position, **kwargs) return jsonify_request(HttpResponse("ok")) except ValidationError: exc = sys.exc_info()[1] return jsonify_request(HttpResponseBadRequest(exc.messages)) context.update(extra_context or {}) return HttpResponseRedirect('../../') @require_POST @wrap_transaction @create_revision() def publish_page(self, request, page_id, language): try: page = Page.objects.get(id=page_id, publisher_is_draft=True) except Page.DoesNotExist: page = None # ensure user has permissions to publish this page all_published = True if page: if not page.has_publish_permission(request): return HttpResponseForbidden(force_unicode(_("You do not have permission to publish this page"))) published = page.publish(language) if not published: all_published = False statics = request.GET.get('statics', '') if not statics and not page: return Http404("No page or stack found for publishing.") if statics: static_ids = statics .split(',') for pk in static_ids: static_placeholder = StaticPlaceholder.objects.get(pk=pk) published = static_placeholder.publish(request, language) if not published: all_published = False if page: if all_published: if page.get_publisher_state(language) == PUBLISHER_STATE_PENDING: messages.warning(request, _("Page not published! A parent page is not published yet.")) else: messages.info(request, _('The content was successfully published.')) LogEntry.objects.log_action( user_id=request.user.id, content_type_id=ContentType.objects.get_for_model(Page).pk, object_id=page_id, object_repr=page.get_title(language), action_flag=CHANGE, ) else: if page.get_publisher_state(language) == PUBLISHER_STATE_PENDING: messages.warning(request, _("Page not published! A parent page is not published yet.")) else: messages.warning(request, _("There was a problem publishing your content")) if is_installed('reversion') and page: self.cleanup_history(page, publish=True) helpers.make_revision_with_plugins(page, request.user, PUBLISH_COMMENT) # create a new publish reversion if 'node' in request.REQUEST: # if request comes from tree.. return admin_utils.render_admin_menu_item(request, page) if 'redirect' in request.GET: return HttpResponseRedirect(request.GET['redirect']) referrer = request.META.get('HTTP_REFERER', '') path = admin_reverse("cms_page_changelist") if request.GET.get('redirect_language'): path = "%s?language=%s&page_id=%s" % (path, request.GET.get('redirect_language'), request.GET.get('redirect_page_id')) if admin_reverse('index') not in referrer: if all_published: if page: if page.get_publisher_state(language) == PUBLISHER_STATE_PENDING: path = page.get_absolute_url(language, fallback=True) else: public_page = Page.objects.get(publisher_public=page.pk) path = '%s?%s' % (public_page.get_absolute_url(language, fallback=True), get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')) else: path = '%s?%s' % (referrer, get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')) else: path = '/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF') return HttpResponseRedirect(path) def cleanup_history(self, page, publish=False): if is_installed('reversion') and page: # delete revisions that are not publish revisions from reversion.models import Version content_type = ContentType.objects.get_for_model(Page) # reversion 1.8+ removes type field, revision filtering must be based on comments versions_qs = Version.objects.filter(content_type=content_type, object_id_int=page.pk) history_limit = get_cms_setting("MAX_PAGE_HISTORY_REVERSIONS") deleted = [] for version in versions_qs.exclude(revision__comment__in=(INITIAL_COMMENT, PUBLISH_COMMENT)).order_by( '-revision__pk')[history_limit - 1:]: if not version.revision_id in deleted: revision = version.revision revision.delete() deleted.append(revision.pk) # delete all publish revisions that are more then MAX_PAGE_PUBLISH_REVERSIONS publish_limit = get_cms_setting("MAX_PAGE_PUBLISH_REVERSIONS") if publish_limit and publish: deleted = [] for version in versions_qs.filter(revision__comment__exact=PUBLISH_COMMENT).order_by( '-revision__pk')[publish_limit - 1:]: if not version.revision_id in deleted: revision = version.revision revision.delete() deleted.append(revision.pk) @require_POST @wrap_transaction def unpublish(self, request, page_id, language): """ Publish or unpublish a language of a page """ site = Site.objects.get_current() page = get_object_or_404(Page, pk=page_id) if not page.has_publish_permission(request): return HttpResponseForbidden(force_unicode(_("You do not have permission to unpublish this page"))) if not page.publisher_public_id: return HttpResponseForbidden(force_unicode(_("This page was never published"))) try: page.unpublish(language) message = _('The %(language)s page "%(page)s" was successfully unpublished') % { 'language': get_language_object(language, site)['name'], 'page': page} messages.info(request, message) LogEntry.objects.log_action( user_id=request.user.id, content_type_id=ContentType.objects.get_for_model(Page).pk, object_id=page_id, object_repr=page.get_title(), action_flag=CHANGE, change_message=message, ) except RuntimeError: exc = sys.exc_info()[1] messages.error(request, exc.message) except ValidationError: exc = sys.exc_info()[1] messages.error(request, exc.message) path = admin_reverse("cms_page_changelist") if request.GET.get('redirect_language'): path = "%s?language=%s&page_id=%s" % (path, request.GET.get('redirect_language'), request.GET.get('redirect_page_id')) return HttpResponseRedirect(path) @require_POST @wrap_transaction def revert_page(self, request, page_id, language): page = get_object_or_404(Page, id=page_id) # ensure user has permissions to publish this page if not page.has_change_permission(request): return HttpResponseForbidden(force_unicode(_("You do not have permission to change this page"))) page.revert(language) messages.info(request, _('The page "%s" was successfully reverted.') % page) if 'node' in request.REQUEST: # if request comes from tree.. return admin_utils.render_admin_menu_item(request, page) referer = request.META.get('HTTP_REFERER', '') path = '../../' if admin_reverse('index') not in referer: path = '%s?%s' % (referer.split('?')[0], get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')) return HttpResponseRedirect(path) @create_revision() def delete_translation(self, request, object_id, extra_context=None): if 'language' in request.GET: language = request.GET['language'] else: language = get_language_from_request(request) opts = Page._meta titleopts = Title._meta app_label = titleopts.app_label pluginopts = CMSPlugin._meta try: obj = self.queryset(request).get(pk=unquote(object_id)) except self.model.DoesNotExist: # Don't raise Http404 just yet, because we haven't checked # permissions yet. We don't want an unauthenticated user to be able # to determine whether a given object exists. obj = None if not self.has_delete_permission(request, obj): return HttpResponseForbidden(force_unicode(_("You do not have permission to change this page"))) if obj is None: raise Http404( _('%(name)s object with primary key %(key)r does not exist.') % { 'name': force_unicode(opts.verbose_name), 'key': escape(object_id) }) if not len(list(obj.get_languages())) > 1: raise Http404(_('There only exists one translation for this page')) titleobj = get_object_or_404(Title, page__id=object_id, language=language) saved_plugins = CMSPlugin.objects.filter(placeholder__page__id=object_id, language=language) using = router.db_for_read(self.model) kwargs = { 'admin_site': self.admin_site, 'user': request.user, 'using': using } deleted_objects, perms_needed = get_deleted_objects( [titleobj], titleopts, **kwargs )[:2] to_delete_plugins, perms_needed_plugins = get_deleted_objects( saved_plugins, pluginopts, **kwargs )[:2] deleted_objects.append(to_delete_plugins) perms_needed = set(list(perms_needed) + list(perms_needed_plugins)) if request.method == 'POST': if perms_needed: raise PermissionDenied message = _('Title and plugins with language %(language)s was deleted') % { 'language': force_unicode(get_language_object(language)['name']) } self.log_change(request, titleobj, message) messages.info(request, message) titleobj.delete() for p in saved_plugins: p.delete() public = obj.publisher_public if public: public.save() if is_installed('reversion'): self.cleanup_history(obj) helpers.make_revision_with_plugins(obj, request.user, message) if not self.has_change_permission(request, None): return HttpResponseRedirect("../../../../") return HttpResponseRedirect("../../") context = { "title": _("Are you sure?"), "object_name": force_unicode(titleopts.verbose_name), "object": titleobj, "deleted_objects": deleted_objects, "perms_lacking": perms_needed, "opts": opts, "root_path": admin_reverse('index'), "app_label": app_label, } context.update(extra_context or {}) context_instance = RequestContext(request, current_app=self.admin_site.name) return render_to_response(self.delete_confirmation_template or [ "admin/%s/%s/delete_confirmation.html" % (app_label, titleopts.object_name.lower()), "admin/%s/delete_confirmation.html" % app_label, "admin/delete_confirmation.html" ], context, context_instance=context_instance) def preview_page(self, request, object_id, language): """Redirecting preview function based on draft_id """ page = get_object_or_404(Page, id=object_id) attrs = "?%s" % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON') attrs += "&language=" + language with force_language(language): url = page.get_absolute_url(language) + attrs site = get_current_site(request) if not site == page.site: url = "http%s://%s%s" % ('s' if request.is_secure() else '', page.site.domain, url) return HttpResponseRedirect(url) @require_POST def change_innavigation(self, request, page_id): """ Switch the in_navigation of a page """ page = get_object_or_404(Page, pk=page_id) if page.has_change_permission(request): page.toggle_in_navigation() language = request.GET.get('language') or get_language_from_request(request) return admin_utils.render_admin_menu_item(request, page, language=language) return HttpResponseForbidden(force_unicode(_("You do not have permission to change this page's in_navigation status"))) def descendants(self, request, page_id, language): """ Get html for descendants of given page Used for lazy loading pages in cms.changelist.js Permission checks is done in admin_utils.get_admin_menu_item_context which is called by admin_utils.render_admin_menu_item. """ page = get_object_or_404(Page, pk=page_id) return admin_utils.render_admin_menu_item(request, page, template="admin/cms/page/tree/lazy_menu.html", language=language) def add_page_type(self, request): site = Site.objects.get_current() language = request.GET.get('language') or get_language() target = request.GET.get('copy_target') type_root, created = Page.objects.get_or_create(reverse_id=PAGE_TYPES_ID, publisher_is_draft=True, site=site, defaults={'in_navigation': False}) type_title, created = Title.objects.get_or_create(page=type_root, language=language, slug=PAGE_TYPES_ID, defaults={'title': _('Page Types')}) url = add_url_parameters(admin_reverse('cms_page_add'), target=type_root.pk, position='first-child', add_page_type=1, copy_target=target, language=language) return HttpResponseRedirect(url) def resolve(self, request): if not request.user.is_staff: if DJANGO_1_4: return HttpResponse('/', mimetype='text/plain') else: return HttpResponse('/', content_type='text/plain') obj = False url = False if request.session.get('cms_log_latest', False): log = LogEntry.objects.get(pk=request.session['cms_log_latest']) try: obj = log.get_edited_object() except (ObjectDoesNotExist, ValueError): obj = None del request.session['cms_log_latest'] if obj and obj.__class__ in toolbar_pool.get_watch_models() and hasattr(obj, 'get_absolute_url'): # This is a test if the object url can be retrieved # In case it can't, object it's not taken into account try: force_unicode(obj.get_absolute_url()) except: obj = None else: obj = None if not obj: pk = request.REQUEST.get('pk') full_model = request.REQUEST.get('model') if pk and full_model: app_label, model = full_model.split('.') if pk and app_label: ctype = ContentType.objects.get(app_label=app_label, model=model) try: obj = ctype.get_object_for_this_type(pk=pk) except ctype.model_class().DoesNotExist: obj = None try: force_unicode(obj.get_absolute_url()) except: obj = None if obj: if not request.toolbar or not request.toolbar.edit_mode: if isinstance(obj, Page): if obj.get_public_object(): url = obj.get_public_object().get_absolute_url() else: url = '%s?%s' % ( obj.get_draft_object().get_absolute_url(), get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON') ) else: url = obj.get_absolute_url() else: url = obj.get_absolute_url() if url: return HttpResponse(force_unicode(url), content_type='text/plain') return HttpResponse('', content_type='text/plain') def lookup_allowed(self, key, *args, **kwargs): if key == 'site__exact': return True return super(PageAdmin, self).lookup_allowed(key, *args, **kwargs) def edit_title_fields(self, request, page_id, language): title = Title.objects.get(page_id=page_id, language=language) saved_successfully = False raw_fields = request.GET.get("edit_fields", 'title') edit_fields = [field for field in raw_fields.split(",") if field in self.title_frontend_editable_fields] cancel_clicked = request.POST.get("_cancel", False) opts = Title._meta if not edit_fields: # Defaults to title edit_fields = ('title',) if not has_generic_permission(title.page.pk, request.user, "change", title.page.site.pk): return HttpResponseForbidden(force_unicode(_("You do not have permission to edit this page"))) class PageTitleForm(django.forms.ModelForm): """ Dynamic form showing only the fields to be edited """ class Meta: model = Title fields = edit_fields if not cancel_clicked and request.method == 'POST': form = PageTitleForm(instance=title, data=request.POST) if form.is_valid(): form.save() saved_successfully = True else: form = PageTitleForm(instance=title) admin_form = AdminForm(form, fieldsets=[(None, {'fields': edit_fields})], prepopulated_fields={}, model_admin=self) media = self.media + admin_form.media context = { 'CMS_MEDIA_URL': get_cms_setting('MEDIA_URL'), 'title': 'Title', 'plugin': title.page, 'plugin_id': title.page.id, 'adminform': admin_form, 'add': False, 'is_popup': True, 'media': media, 'opts': opts, 'change': True, 'save_as': False, 'has_add_permission': False, 'window_close_timeout': 10, } if cancel_clicked: # cancel button was clicked context.update({ 'cancel': True, }) return render_to_response('admin/cms/page/plugin/confirm_form.html', context, RequestContext(request)) if not cancel_clicked and request.method == 'POST' and saved_successfully: return render_to_response('admin/cms/page/plugin/confirm_form.html', context, RequestContext(request)) return render_to_response('admin/cms/page/plugin/change_form.html', context, RequestContext(request)) def get_published_pagelist(self, *args, **kwargs): """ This view is used by the PageSmartLinkWidget as the user type to feed the autocomplete drop-down. """ request = args[0] if request.is_ajax(): query_term = request.GET.get('q','').strip('/') language_code = request.GET.get('language_code', settings.LANGUAGE_CODE) matching_published_pages = Page.objects.published().public().filter( Q(title_set__title__icontains=query_term, title_set__language=language_code) | Q(title_set__path__icontains=query_term, title_set__language=language_code) | Q(title_set__menu_title__icontains=query_term, title_set__language=language_code) | Q(title_set__page_title__icontains=query_term, title_set__language=language_code) ).distinct() results = [] for page in matching_published_pages: results.append( { 'path': page.get_path(language=language_code), 'title': page.get_title(language=language_code), 'redirect_url': page.get_absolute_url(language=language_code) } ) if DJANGO_1_4: return HttpResponse(json.dumps(results), mimetype='application/json') else: return HttpResponse(json.dumps(results), content_type='application/json') else: return HttpResponseForbidden() def add_plugin(self, *args, **kwargs): with create_revision(): return super(PageAdmin, self).add_plugin(*args, **kwargs) def copy_plugins(self, *args, **kwargs): with create_revision(): return super(PageAdmin, self).copy_plugins(*args, **kwargs) def edit_plugin(self, *args, **kwargs): with create_revision(): return super(PageAdmin, self).edit_plugin(*args, **kwargs) def move_plugin(self, *args, **kwargs): with create_revision(): return super(PageAdmin, self).move_plugin(*args, **kwargs) def delete_plugin(self, *args, **kwargs): with create_revision(): return super(PageAdmin, self).delete_plugin(*args, **kwargs) def clear_placeholder(self, *args, **kwargs): with create_revision(): return super(PageAdmin, self).clear_placeholder(*args, **kwargs) admin.site.register(Page, PageAdmin)
./CrossVul/dataset_final_sorted/CWE-352/py/good_1654_0
crossvul-python_data_good_1654_4
# -*- coding: utf-8 -*- from __future__ import with_statement import json import datetime from cms.utils.urlutils import admin_reverse from djangocms_text_ckeditor.cms_plugins import TextPlugin from djangocms_text_ckeditor.models import Text from django.contrib import admin from django.contrib.admin.models import LogEntry from django.contrib.admin.sites import site from django.contrib.auth.models import Permission, AnonymousUser from django.contrib.sites.models import Site from django.core.urlresolvers import reverse from django.http import (Http404, HttpResponseBadRequest, HttpResponseForbidden, HttpResponse, QueryDict, HttpResponseNotFound) from django.utils.datastructures import MultiValueDictKeyError from django.utils.encoding import smart_str from django.utils import timezone from django.utils.six.moves.urllib.parse import urlparse from cms.admin.change_list import CMSChangeList from cms.admin.forms import PageForm, AdvancedSettingsForm from cms.admin.pageadmin import PageAdmin from cms.admin.permissionadmin import PagePermissionInlineAdmin from cms.api import create_page, create_title, add_plugin, assign_user_to_page, publish_page from cms.constants import PLUGIN_MOVE_ACTION from cms.models import UserSettings, StaticPlaceholder from cms.models.pagemodel import Page from cms.models.permissionmodels import GlobalPagePermission, PagePermission from cms.models.placeholdermodel import Placeholder from cms.models.pluginmodel import CMSPlugin from cms.models.titlemodels import Title from cms.test_utils import testcases as base from cms.test_utils.testcases import CMSTestCase, URL_CMS_PAGE_DELETE, URL_CMS_PAGE, URL_CMS_TRANSLATION_DELETE from cms.test_utils.util.context_managers import SettingsOverride from cms.test_utils.util.fuzzy_int import FuzzyInt from cms.utils import get_cms_setting from cms.utils.compat import DJANGO_1_4, DJANGO_1_6 from cms.utils.compat.dj import get_user_model, force_unicode class AdminTestsBase(CMSTestCase): @property def admin_class(self): return site._registry[Page] def _get_guys(self, admin_only=False, use_global_permissions=True): admiN_user = self.get_superuser() if admin_only: return admiN_user USERNAME = 'test' if get_user_model().USERNAME_FIELD == 'email': normal_guy = get_user_model().objects.create_user(USERNAME, 'test@test.com', 'test@test.com') else: normal_guy = get_user_model().objects.create_user(USERNAME, 'test@test.com', USERNAME) normal_guy.is_staff = True normal_guy.is_active = True normal_guy.save() normal_guy.user_permissions = Permission.objects.filter( codename__in=['change_page', 'change_title', 'add_page', 'add_title', 'delete_page', 'delete_title'] ) if use_global_permissions: gpp = GlobalPagePermission.objects.create( user=normal_guy, can_change=True, can_delete=True, can_change_advanced_settings=False, can_publish=True, can_change_permissions=False, can_move_page=True, ) gpp.sites = Site.objects.all() return admiN_user, normal_guy class AdminTestCase(AdminTestsBase): def test_extension_not_in_admin(self): admin_user, staff = self._get_guys() with self.login_user_context(admin_user): request = self.get_request('/admin/cms/page/1/', 'en',) response = site.index(request) self.assertNotContains(response, '/mytitleextension/') self.assertNotContains(response, '/mypageextension/') def test_permissioned_page_list(self): """ Makes sure that a user with restricted page permissions can view the page list. """ admin_user, normal_guy = self._get_guys(use_global_permissions=False) current_site = Site.objects.get(pk=1) page = create_page("Test page", "nav_playground.html", "en", site=current_site, created_by=admin_user) PagePermission.objects.create(page=page, user=normal_guy) with self.login_user_context(normal_guy): resp = self.client.get(URL_CMS_PAGE) self.assertEqual(resp.status_code, 200) def test_edit_does_not_reset_page_adv_fields(self): """ Makes sure that if a non-superuser with no rights to edit advanced page fields edits a page, those advanced fields are not touched. """ OLD_PAGE_NAME = 'Test Page' NEW_PAGE_NAME = 'Test page 2' REVERSE_ID = 'Test' OVERRIDE_URL = 'my/override/url' admin_user, normal_guy = self._get_guys() current_site = Site.objects.get(pk=1) # The admin creates the page page = create_page(OLD_PAGE_NAME, "nav_playground.html", "en", site=current_site, created_by=admin_user) page.reverse_id = REVERSE_ID page.save() title = page.get_title_obj() title.has_url_overwrite = True title.path = OVERRIDE_URL title.save() self.assertEqual(page.get_title(), OLD_PAGE_NAME) self.assertEqual(page.reverse_id, REVERSE_ID) self.assertEqual(title.overwrite_url, OVERRIDE_URL) # The user edits the page (change the page name for ex.) page_data = { 'title': NEW_PAGE_NAME, 'slug': page.get_slug(), 'language': title.language, 'site': page.site.pk, 'template': page.template, 'pagepermission_set-TOTAL_FORMS': 0, 'pagepermission_set-INITIAL_FORMS': 0, 'pagepermission_set-MAX_NUM_FORMS': 0, 'pagepermission_set-2-TOTAL_FORMS': 0, 'pagepermission_set-2-INITIAL_FORMS': 0, 'pagepermission_set-2-MAX_NUM_FORMS': 0 } # required only if user haves can_change_permission with self.login_user_context(normal_guy): resp = self.client.post(base.URL_CMS_PAGE_CHANGE % page.pk, page_data, follow=True) self.assertEqual(resp.status_code, 200) self.assertTemplateNotUsed(resp, 'admin/login.html') page = Page.objects.get(pk=page.pk) self.assertEqual(page.get_title(), NEW_PAGE_NAME) self.assertEqual(page.reverse_id, REVERSE_ID) title = page.get_title_obj() self.assertEqual(title.overwrite_url, OVERRIDE_URL) # The admin edits the page (change the page name for ex.) page_data = { 'title': OLD_PAGE_NAME, 'slug': page.get_slug(), 'language': title.language, 'site': page.site.pk, 'template': page.template, 'reverse_id': page.reverse_id, 'pagepermission_set-TOTAL_FORMS': 0, # required only if user haves can_change_permission 'pagepermission_set-INITIAL_FORMS': 0, 'pagepermission_set-MAX_NUM_FORMS': 0, 'pagepermission_set-2-TOTAL_FORMS': 0, 'pagepermission_set-2-INITIAL_FORMS': 0, 'pagepermission_set-2-MAX_NUM_FORMS': 0 } with self.login_user_context(admin_user): resp = self.client.post(base.URL_CMS_PAGE_CHANGE % page.pk, page_data, follow=True) self.assertEqual(resp.status_code, 200) self.assertTemplateNotUsed(resp, 'admin/login.html') page = Page.objects.get(pk=page.pk) self.assertEqual(page.get_title(), OLD_PAGE_NAME) self.assertEqual(page.reverse_id, REVERSE_ID) title = page.get_title_obj() self.assertEqual(title.overwrite_url, OVERRIDE_URL) def test_edit_does_not_reset_apphook(self): """ Makes sure that if a non-superuser with no rights to edit advanced page fields edits a page, those advanced fields are not touched. """ OLD_PAGE_NAME = 'Test Page' NEW_PAGE_NAME = 'Test page 2' REVERSE_ID = 'Test' APPLICATION_URLS = 'project.sampleapp.urls' admin_user, normal_guy = self._get_guys() current_site = Site.objects.get(pk=1) # The admin creates the page page = create_page(OLD_PAGE_NAME, "nav_playground.html", "en", site=current_site, created_by=admin_user) page.reverse_id = REVERSE_ID page.save() title = page.get_title_obj() title.has_url_overwrite = True title.save() page.application_urls = APPLICATION_URLS page.save() self.assertEqual(page.get_title(), OLD_PAGE_NAME) self.assertEqual(page.reverse_id, REVERSE_ID) self.assertEqual(page.application_urls, APPLICATION_URLS) # The user edits the page (change the page name for ex.) page_data = { 'title': NEW_PAGE_NAME, 'slug': page.get_slug(), 'language': title.language, 'site': page.site.pk, 'template': page.template, 'pagepermission_set-TOTAL_FORMS': 0, 'pagepermission_set-INITIAL_FORMS': 0, 'pagepermission_set-MAX_NUM_FORMS': 0, 'pagepermission_set-2-TOTAL_FORMS': 0, 'pagepermission_set-2-INITIAL_FORMS': 0, 'pagepermission_set-2-MAX_NUM_FORMS': 0, } with self.login_user_context(normal_guy): resp = self.client.post(base.URL_CMS_PAGE_CHANGE % page.pk, page_data, follow=True) self.assertEqual(resp.status_code, 200) self.assertTemplateNotUsed(resp, 'admin/login.html') page = Page.objects.get(pk=page.pk) self.assertEqual(page.get_title(), NEW_PAGE_NAME) self.assertEqual(page.reverse_id, REVERSE_ID) self.assertEqual(page.application_urls, APPLICATION_URLS) title = page.get_title_obj() # The admin edits the page (change the page name for ex.) page_data = { 'title': OLD_PAGE_NAME, 'slug': page.get_slug(), 'language': title.language, 'site': page.site.pk, 'template': page.template, 'reverse_id': page.reverse_id, } with self.login_user_context(admin_user): resp = self.client.post(base.URL_CMS_PAGE_ADVANCED_CHANGE % page.pk, page_data, follow=True) self.assertEqual(resp.status_code, 200) self.assertTemplateNotUsed(resp, 'admin/login.html') resp = self.client.post(base.URL_CMS_PAGE_CHANGE % page.pk, page_data, follow=True) self.assertEqual(resp.status_code, 200) self.assertTemplateNotUsed(resp, 'admin/login.html') page = Page.objects.get(pk=page.pk) self.assertEqual(page.get_title(), OLD_PAGE_NAME) self.assertEqual(page.reverse_id, REVERSE_ID) self.assertEqual(page.application_urls, '') def test_2apphooks_with_same_namespace(self): PAGE1 = 'Test Page' PAGE2 = 'Test page 2' APPLICATION_URLS = 'project.sampleapp.urls' admin_user, normal_guy = self._get_guys() current_site = Site.objects.get(pk=1) # The admin creates the page page = create_page(PAGE1, "nav_playground.html", "en", site=current_site, created_by=admin_user) page2 = create_page(PAGE2, "nav_playground.html", "en", site=current_site, created_by=admin_user) page.application_urls = APPLICATION_URLS page.application_namespace = "space1" page.save() page2.application_urls = APPLICATION_URLS page2.save() # The admin edits the page (change the page name for ex.) page_data = { 'title': PAGE2, 'slug': page2.get_slug(), 'language': 'en', 'site': page.site.pk, 'template': page2.template, 'application_urls': 'SampleApp', 'application_namespace': 'space1', } with self.login_user_context(admin_user): resp = self.client.post(base.URL_CMS_PAGE_ADVANCED_CHANGE % page.pk, page_data) self.assertEqual(resp.status_code, 302) self.assertEqual(Page.objects.filter(application_namespace="space1").count(), 1) resp = self.client.post(base.URL_CMS_PAGE_ADVANCED_CHANGE % page2.pk, page_data) self.assertEqual(resp.status_code, 200) page_data['application_namespace'] = 'space2' resp = self.client.post(base.URL_CMS_PAGE_ADVANCED_CHANGE % page2.pk, page_data) self.assertEqual(resp.status_code, 302) def test_delete(self): admin_user = self.get_superuser() create_page("home", "nav_playground.html", "en", created_by=admin_user, published=True) page = create_page("delete-page", "nav_playground.html", "en", created_by=admin_user, published=True) create_page('child-page', "nav_playground.html", "en", created_by=admin_user, published=True, parent=page) body = page.placeholders.get(slot='body') add_plugin(body, 'TextPlugin', 'en', body='text') page.publish('en') with self.login_user_context(admin_user): data = {'post': 'yes'} with self.assertNumQueries(FuzzyInt(300, 407)): response = self.client.post(URL_CMS_PAGE_DELETE % page.pk, data) self.assertRedirects(response, URL_CMS_PAGE) def test_delete_diff_language(self): admin_user = self.get_superuser() create_page("home", "nav_playground.html", "en", created_by=admin_user, published=True) page = create_page("delete-page", "nav_playground.html", "en", created_by=admin_user, published=True) create_page('child-page', "nav_playground.html", "de", created_by=admin_user, published=True, parent=page) body = page.placeholders.get(slot='body') add_plugin(body, 'TextPlugin', 'en', body='text') page.publish('en') with self.login_user_context(admin_user): data = {'post': 'yes'} with self.assertNumQueries(FuzzyInt(300, 394)): response = self.client.post(URL_CMS_PAGE_DELETE % page.pk, data) self.assertRedirects(response, URL_CMS_PAGE) def test_search_fields(self): superuser = self.get_superuser() from django.contrib.admin import site with self.login_user_context(superuser): for model, admin_instance in site._registry.items(): if model._meta.app_label != 'cms': continue if not admin_instance.search_fields: continue url = admin_reverse('cms_%s_changelist' % model._meta.module_name) response = self.client.get('%s?q=1' % url) errmsg = response.content self.assertEqual(response.status_code, 200, errmsg) def test_delete_translation(self): admin_user = self.get_superuser() page = create_page("delete-page-translation", "nav_playground.html", "en", created_by=admin_user, published=True) create_title("de", "delete-page-translation-2", page, slug="delete-page-translation-2") create_title("es-mx", "delete-page-translation-es", page, slug="delete-page-translation-es") with self.login_user_context(admin_user): response = self.client.get(URL_CMS_TRANSLATION_DELETE % page.pk, {'language': 'de'}) self.assertEqual(response.status_code, 200) response = self.client.post(URL_CMS_TRANSLATION_DELETE % page.pk, {'language': 'de'}) self.assertRedirects(response, URL_CMS_PAGE) response = self.client.get(URL_CMS_TRANSLATION_DELETE % page.pk, {'language': 'es-mx'}) self.assertEqual(response.status_code, 200) response = self.client.post(URL_CMS_TRANSLATION_DELETE % page.pk, {'language': 'es-mx'}) self.assertRedirects(response, URL_CMS_PAGE) def test_change_dates(self): admin_user, staff = self._get_guys() page = create_page('test-page', 'nav_playground.html', 'en') page.publish('en') draft = page.get_draft_object() with self.settings(USE_TZ=False): original_date = draft.publication_date original_end_date = draft.publication_end_date new_date = timezone.now() - datetime.timedelta(days=1) new_end_date = timezone.now() + datetime.timedelta(days=1) url = admin_reverse('cms_page_dates', args=(draft.pk,)) with self.login_user_context(admin_user): response = self.client.post(url, { 'language': 'en', 'site': draft.site.pk, 'publication_date_0': new_date.date(), 'publication_date_1': new_date.strftime("%H:%M:%S"), 'publication_end_date_0': new_end_date.date(), 'publication_end_date_1': new_end_date.strftime("%H:%M:%S"), }) self.assertEqual(response.status_code, 302) draft = Page.objects.get(pk=draft.pk) self.assertNotEqual(draft.publication_date.timetuple(), original_date.timetuple()) self.assertEqual(draft.publication_date.timetuple(), new_date.timetuple()) self.assertEqual(draft.publication_end_date.timetuple(), new_end_date.timetuple()) if original_end_date: self.assertNotEqual(draft.publication_end_date.timetuple(), original_end_date.timetuple()) with self.settings(USE_TZ=True): original_date = draft.publication_date original_end_date = draft.publication_end_date new_date = timezone.localtime(timezone.now()) - datetime.timedelta(days=1) new_end_date = timezone.localtime(timezone.now()) + datetime.timedelta(days=1) url = admin_reverse('cms_page_dates', args=(draft.pk,)) with self.login_user_context(admin_user): response = self.client.post(url, { 'language': 'en', 'site': draft.site.pk, 'publication_date_0': new_date.date(), 'publication_date_1': new_date.strftime("%H:%M:%S"), 'publication_end_date_0': new_end_date.date(), 'publication_end_date_1': new_end_date.strftime("%H:%M:%S"), }) self.assertEqual(response.status_code, 302) draft = Page.objects.get(pk=draft.pk) self.assertNotEqual(draft.publication_date.timetuple(), original_date.timetuple()) self.assertEqual(timezone.localtime(draft.publication_date).timetuple(), new_date.timetuple()) self.assertEqual(timezone.localtime(draft.publication_end_date).timetuple(), new_end_date.timetuple()) if original_end_date: self.assertNotEqual(draft.publication_end_date.timetuple(), original_end_date.timetuple()) def test_change_template(self): admin_user, staff = self._get_guys() request = self.get_request('/admin/cms/page/1/', 'en') request.method = "POST" pageadmin = site._registry[Page] with self.login_user_context(staff): self.assertRaises(Http404, pageadmin.change_template, request, 1) page = create_page('test-page', 'nav_playground.html', 'en') response = pageadmin.change_template(request, page.pk) self.assertEqual(response.status_code, 403) url = admin_reverse('cms_page_change_template', args=(page.pk,)) with self.login_user_context(admin_user): response = self.client.post(url, {'template': 'doesntexist'}) self.assertEqual(response.status_code, 400) response = self.client.post(url, {'template': get_cms_setting('TEMPLATES')[0][0]}) self.assertEqual(response.status_code, 200) def test_get_permissions(self): page = create_page('test-page', 'nav_playground.html', 'en') url = admin_reverse('cms_page_get_permissions', args=(page.pk,)) response = self.client.get(url) if DJANGO_1_6: self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'admin/login.html') else: self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/admin/login/?next=/en/admin/cms/page/%s/permissions/' % page.pk) admin_user = self.get_superuser() with self.login_user_context(admin_user): response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertTemplateNotUsed(response, 'admin/login.html') def test_changelist_items(self): admin_user = self.get_superuser() first_level_page = create_page('level1', 'nav_playground.html', 'en') second_level_page_top = create_page('level21', "nav_playground.html", "en", created_by=admin_user, published=True, parent=first_level_page) second_level_page_bottom = create_page('level22', "nav_playground.html", "en", created_by=admin_user, published=True, parent=self.reload(first_level_page)) third_level_page = create_page('level3', "nav_playground.html", "en", created_by=admin_user, published=True, parent=second_level_page_top) self.assertEqual(Page.objects.all().count(), 4) url = admin_reverse('cms_%s_changelist' % Page._meta.module_name) request = self.get_request(url) request.session = {} request.user = admin_user page_admin = site._registry[Page] cl_params = [request, page_admin.model, page_admin.list_display, page_admin.list_display_links, page_admin.list_filter, page_admin.date_hierarchy, page_admin.search_fields, page_admin.list_select_related, page_admin.list_per_page] if hasattr(page_admin, 'list_max_show_all'): # django 1.4 cl_params.append(page_admin.list_max_show_all) cl_params.extend([page_admin.list_editable, page_admin]) cl = CMSChangeList(*tuple(cl_params)) cl.set_items(request) root_page = cl.get_items()[0] self.assertEqual(root_page, first_level_page) self.assertEqual(root_page.get_children()[0], second_level_page_top) self.assertEqual(root_page.get_children()[1], second_level_page_bottom) self.assertEqual(root_page.get_children()[0].get_children()[0], third_level_page) def test_changelist_get_results(self): admin_user = self.get_superuser() first_level_page = create_page('level1', 'nav_playground.html', 'en', published=True) second_level_page_top = create_page('level21', "nav_playground.html", "en", created_by=admin_user, published=True, parent=first_level_page) second_level_page_bottom = create_page('level22', "nav_playground.html", "en", # nopyflakes created_by=admin_user, published=True, parent=self.reload(first_level_page)) third_level_page = create_page('level3', "nav_playground.html", "en", # nopyflakes created_by=admin_user, published=True, parent=second_level_page_top) fourth_level_page = create_page('level23', "nav_playground.html", "en", # nopyflakes created_by=admin_user, parent=self.reload(first_level_page)) self.assertEqual(Page.objects.all().count(), 9) url = admin_reverse('cms_%s_changelist' % Page._meta.module_name) request = self.get_request(url) request.session = {} request.user = admin_user page_admin = site._registry[Page] # full blown page list. only draft pages are taken into account cl_params = [request, page_admin.model, page_admin.list_display, page_admin.list_display_links, page_admin.list_filter, page_admin.date_hierarchy, page_admin.search_fields, page_admin.list_select_related, page_admin.list_per_page] if hasattr(page_admin, 'list_max_show_all'): # django 1.4 cl_params.append(page_admin.list_max_show_all) cl_params.extend([page_admin.list_editable, page_admin]) cl = CMSChangeList(*tuple(cl_params)) cl.get_results(request) self.assertEqual(cl.full_result_count, 5) self.assertEqual(cl.result_count, 5) # only one unpublished page is returned request = self.get_request(url+'?q=level23') request.session = {} request.user = admin_user cl_params[0] = request cl = CMSChangeList(*tuple(cl_params)) cl.get_results(request) self.assertEqual(cl.full_result_count, 5) self.assertEqual(cl.result_count, 1) # a number of pages matches the query request = self.get_request(url+'?q=level2') request.session = {} request.user = admin_user cl_params[0] = request cl = CMSChangeList(*tuple(cl_params)) cl.get_results(request) self.assertEqual(cl.full_result_count, 5) self.assertEqual(cl.result_count, 3) def test_changelist_tree(self): """ This test checks for proper jstree cookie unquoting. It should be converted to a selenium test to actually test the jstree behaviour. Cookie set below is just a forged example (from live session) """ admin_user = self.get_superuser() first_level_page = create_page('level1', 'nav_playground.html', 'en') second_level_page_top = create_page('level21', "nav_playground.html", "en", created_by=admin_user, published=True, parent=first_level_page) second_level_page_bottom = create_page('level22', "nav_playground.html", "en", created_by=admin_user, published=True, parent=self.reload(first_level_page)) third_level_page = create_page('level3', "nav_playground.html", "en", created_by=admin_user, published=True, parent=second_level_page_top) url = admin_reverse('cms_%s_changelist' % Page._meta.module_name) if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='admin@django-cms.org', password='admin@django-cms.org') else: self.client.login(username='admin', password='admin') self.client.cookies['djangocms_nodes_open'] = 'page_1%2Cpage_2' response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertEqual(response.context["open_menu_trees"], [1, 2]) # tests descendants method for the lazy load ajax call url = "%s%d/en/descendants/" % (url, first_level_page.pk) response = self.client.get(url) self.assertEqual(response.status_code, 200) # should include both direct descendant pages self.assertContains(response, 'id="page_%s"' % second_level_page_top.pk) self.assertContains(response, 'id="page_%s"' % second_level_page_bottom.pk) # but not any further down the tree self.assertNotContains(response, 'id="page_%s"' % third_level_page.pk) self.assertNotContains(response, 'None') def test_unihandecode_doesnt_break_404_in_admin(self): self.get_superuser() if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='admin@django-cms.org', password='admin@django-cms.org') else: self.client.login(username='admin', password='admin') response = self.client.get('/en/admin/cms/page/1/?language=en') self.assertEqual(response.status_code, 404) def test_tree_displays_in_correct_language(self): ''' Test to prove and protect that the page titles in the tree are displayed in the currently set language. ''' admin_guy, normal_guy = self._get_guys(use_global_permissions=False) site = Site.objects.get(pk=1) en_title = "EN Page" es_title = "ES Pagina" # Create a page in en page = create_page(en_title, "nav_playground.html", "en", site=site, created_by=admin) # Add a es-mx translation for this page create_title("es-mx", es_title, page, slug="es_pagina") url = admin_reverse('cms_%s_changelist' % Page._meta.module_name) url_pat = '<a href="{0}/{1}/preview/"[^>]*>{2}</a>' with self.login_user_context(admin_guy): # Check the EN version of the tree... response = self.client.get(url, {'language': 'en'}) self.assertRegexpMatches(str(response.content), url_pat.format(page.pk, 'en', en_title, )) # Check the ES version of the tree... response = self.client.get(url, {'language': 'es-mx'}) self.assertRegexpMatches(str(response.content), url_pat.format(page.pk, 'es-mx', es_title, )) def test_empty_placeholder_in_correct_language(self): """ Test that Cleaning a placeholder only affect current language contents """ # create some objects page_en = create_page("EmptyPlaceholderTestPage (EN)", "nav_playground.html", "en") ph = page_en.placeholders.get(slot="body") # add the text plugin to the en version of the page add_plugin(ph, "TextPlugin", "en", body="Hello World EN 1") add_plugin(ph, "TextPlugin", "en", body="Hello World EN 2") # creating a de title of the page and adding plugins to it create_title("de", page_en.get_title(), page_en, slug=page_en.get_slug()) add_plugin(ph, "TextPlugin", "de", body="Hello World DE") add_plugin(ph, "TextPlugin", "de", body="Hello World DE 2") add_plugin(ph, "TextPlugin", "de", body="Hello World DE 3") # before cleaning the de placeholder self.assertEqual(ph.get_plugins('en').count(), 2) self.assertEqual(ph.get_plugins('de').count(), 3) admin_user, staff = self._get_guys() with self.login_user_context(admin_user): url = '%s?language=de' % admin_reverse('cms_page_clear_placeholder', args=[ph.pk]) response = self.client.post(url, {'test': 0}) self.assertEqual(response.status_code, 302) # After cleaning the de placeholder, en placeholder must still have all the plugins self.assertEqual(ph.get_plugins('en').count(), 2) self.assertEqual(ph.get_plugins('de').count(), 0) class AdminTests(AdminTestsBase): # TODO: needs tests for actual permissions, not only superuser/normaluser def setUp(self): self.page = create_page("testpage", "nav_playground.html", "en") def get_admin(self): User = get_user_model() fields = dict(email="admin@django-cms.org", is_staff=True, is_superuser=True) if (User.USERNAME_FIELD != 'email'): fields[User.USERNAME_FIELD] = "admin" usr = User(**fields) usr.set_password(getattr(usr, User.USERNAME_FIELD)) usr.save() return usr def get_permless(self): User = get_user_model() fields = dict(email="permless@django-cms.org", is_staff=True) if (User.USERNAME_FIELD != 'email'): fields[User.USERNAME_FIELD] = "permless" usr = User(**fields) usr.set_password(getattr(usr, User.USERNAME_FIELD)) usr.save() return usr def get_page(self): return self.page def test_change_publish_unpublish(self): page = self.get_page() permless = self.get_permless() with self.login_user_context(permless): request = self.get_request() response = self.admin_class.publish_page(request, page.pk, "en") self.assertEqual(response.status_code, 405) page = self.reload(page) self.assertFalse(page.is_published('en')) request = self.get_request(post_data={'no': 'data'}) response = self.admin_class.publish_page(request, page.pk, "en") self.assertEqual(response.status_code, 403) page = self.reload(page) self.assertFalse(page.is_published('en')) admin_user = self.get_admin() with self.login_user_context(admin_user): request = self.get_request(post_data={'no': 'data'}) response = self.admin_class.publish_page(request, page.pk, "en") self.assertEqual(response.status_code, 302) page = self.reload(page) self.assertTrue(page.is_published('en')) response = self.admin_class.unpublish(request, page.pk, "en") self.assertEqual(response.status_code, 302) page = self.reload(page) self.assertFalse(page.is_published('en')) def test_change_status_adds_log_entry(self): page = self.get_page() admin_user = self.get_admin() with self.login_user_context(admin_user): request = self.get_request(post_data={'no': 'data'}) self.assertFalse(LogEntry.objects.count()) response = self.admin_class.publish_page(request, page.pk, "en") self.assertEqual(response.status_code, 302) self.assertEqual(1, LogEntry.objects.count()) self.assertEqual(page.pk, int(LogEntry.objects.all()[0].object_id)) def test_change_innavigation(self): page = self.get_page() permless = self.get_permless() admin_user = self.get_admin() with self.login_user_context(permless): request = self.get_request() response = self.admin_class.change_innavigation(request, page.pk) self.assertEqual(response.status_code, 405) with self.login_user_context(permless): request = self.get_request(post_data={'no': 'data'}) response = self.admin_class.change_innavigation(request, page.pk) self.assertEqual(response.status_code, 403) with self.login_user_context(permless): request = self.get_request(post_data={'no': 'data'}) self.assertRaises(Http404, self.admin_class.change_innavigation, request, page.pk + 100) with self.login_user_context(permless): request = self.get_request(post_data={'no': 'data'}) response = self.admin_class.change_innavigation(request, page.pk) self.assertEqual(response.status_code, 403) with self.login_user_context(admin_user): request = self.get_request(post_data={'no': 'data'}) old = page.in_navigation response = self.admin_class.change_innavigation(request, page.pk) # These asserts are for #3589 self.assertContains(response, 'lang="en"') self.assertContains(response, './%s/en/preview/' % page.pk) self.assertEqual(response.status_code, 200) page = self.reload(page) self.assertEqual(old, not page.in_navigation) def test_publish_page_requires_perms(self): permless = self.get_permless() with self.login_user_context(permless): request = self.get_request() request.method = "POST" response = self.admin_class.publish_page(request, Page.objects.all()[0].pk, "en") self.assertEqual(response.status_code, 403) def test_revert_page(self): self.page.publish('en') title = self.page.title_set.get(language='en') title.title = 'new' title.save() self.assertEqual(Title.objects.all().count(), 2) self.assertEqual(Page.objects.all().count(), 2) with self.login_user_context(self.get_superuser()): request = self.get_request() request.method = "POST" response = self.admin_class.revert_page(request, Page.objects.all()[0].pk, "en") self.assertEqual(response.status_code, 302) self.assertEqual(Title.objects.all().count(), 2) self.assertEqual(Page.objects.all().count(), 2) new_title = Title.objects.get(pk=title.pk) self.assertNotEqual(title.title, new_title.title) self.assertTrue(title.publisher_is_draft) self.assertTrue(new_title.publisher_is_draft) def test_revert_page_requires_perms(self): permless = self.get_permless() with self.login_user_context(permless): request = self.get_request() request.method = "POST" response = self.admin_class.revert_page(request, Page.objects.all()[0].pk, 'en') self.assertEqual(response.status_code, 403) def test_revert_page_redirects(self): admin_user = self.get_admin() self.page.publish("en") # Ensure public copy exists before reverting with self.login_user_context(admin_user): response = self.client.post(admin_reverse('cms_page_revert_page', args=(self.page.pk, 'en'))) self.assertEqual(response.status_code, 302) url = response['Location'] self.assertTrue(url.endswith('?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF'))) def test_remove_plugin_requires_post(self): ph = Placeholder.objects.create(slot='test') plugin = add_plugin(ph, 'TextPlugin', 'en', body='test') admin_user = self.get_admin() with self.login_user_context(admin_user): request = self.get_request() response = self.admin_class.delete_plugin(request, plugin.pk) self.assertEqual(response.status_code, 200) def test_move_plugin(self): ph = Placeholder.objects.create(slot='test') plugin = add_plugin(ph, 'TextPlugin', 'en', body='test') page = self.get_page() source, target = list(page.placeholders.all())[:2] pageplugin = add_plugin(source, 'TextPlugin', 'en', body='test') plugin_class = pageplugin.get_plugin_class_instance() expected = {'reload': plugin_class.requires_reload(PLUGIN_MOVE_ACTION)} placeholder = Placeholder.objects.all()[0] permless = self.get_permless() admin_user = self.get_admin() with self.login_user_context(permless): request = self.get_request() response = self.admin_class.move_plugin(request) self.assertEqual(response.status_code, 405) request = self.get_request(post_data={'not_usable': '1'}) self.assertRaises(MultiValueDictKeyError, self.admin_class.move_plugin, request) with self.login_user_context(admin_user): request = self.get_request(post_data={'ids': plugin.pk}) self.assertRaises(MultiValueDictKeyError, self.admin_class.move_plugin, request) with self.login_user_context(admin_user): request = self.get_request(post_data={'plugin_id': pageplugin.pk, 'placeholder_id': 'invalid-placeholder', 'plugin_language': 'en'}) self.assertRaises(ValueError, self.admin_class.move_plugin, request) with self.login_user_context(permless): request = self.get_request(post_data={'plugin_id': pageplugin.pk, 'placeholder_id': placeholder.pk, 'plugin_parent': '', 'plugin_language': 'en'}) self.assertEqual(self.admin_class.move_plugin(request).status_code, HttpResponseForbidden.status_code) with self.login_user_context(admin_user): request = self.get_request(post_data={'plugin_id': pageplugin.pk, 'placeholder_id': placeholder.pk, 'plugin_parent': '', 'plugin_language': 'en'}) response = self.admin_class.move_plugin(request) self.assertEqual(response.status_code, 200) self.assertEqual(json.loads(response.content.decode('utf8')), expected) with self.login_user_context(permless): request = self.get_request(post_data={'plugin_id': pageplugin.pk, 'placeholder_id': placeholder.id, 'plugin_parent': '', 'plugin_language': 'en'}) self.assertEqual(self.admin_class.move_plugin(request).status_code, HttpResponseForbidden.status_code) with self.login_user_context(admin_user): request = self.get_request(post_data={'plugin_id': pageplugin.pk, 'placeholder_id': placeholder.id, 'plugin_parent': '', 'plugin_language': 'en'}) response = self.admin_class.move_plugin(request) self.assertEqual(response.status_code, 200) self.assertEqual(json.loads(response.content.decode('utf8')), expected) def test_move_language(self): page = self.get_page() source, target = list(page.placeholders.all())[:2] col = add_plugin(source, 'MultiColumnPlugin', 'en') sub_col = add_plugin(source, 'ColumnPlugin', 'en', target=col) col2 = add_plugin(source, 'MultiColumnPlugin', 'de') admin_user = self.get_admin() with self.login_user_context(admin_user): request = self.get_request(post_data={'plugin_id': sub_col.pk, 'placeholder_id': source.id, 'plugin_parent': col2.pk, 'plugin_language': 'de'}) response = self.admin_class.move_plugin(request) self.assertEqual(response.status_code, 200) sub_col = CMSPlugin.objects.get(pk=sub_col.pk) self.assertEqual(sub_col.language, "de") self.assertEqual(sub_col.parent_id, col2.pk) def test_preview_page(self): permless = self.get_permless() with self.login_user_context(permless): request = self.get_request() self.assertRaises(Http404, self.admin_class.preview_page, request, 404, "en") page = self.get_page() page.publish("en") base_url = page.get_absolute_url() with self.login_user_context(permless): request = self.get_request('/?public=true') response = self.admin_class.preview_page(request, page.pk, 'en') self.assertEqual(response.status_code, 302) self.assertEqual(response['Location'], '%s?%s&language=en' % (base_url, get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))) request = self.get_request() response = self.admin_class.preview_page(request, page.pk, 'en') self.assertEqual(response.status_code, 302) self.assertEqual(response['Location'], '%s?%s&language=en' % (base_url, get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))) current_site = Site.objects.create(domain='django-cms.org', name='django-cms') page.site = current_site page.save() page.publish("en") self.assertTrue(page.is_home) response = self.admin_class.preview_page(request, page.pk, 'en') self.assertEqual(response.status_code, 302) self.assertEqual(response['Location'], 'http://django-cms.org%s?%s&language=en' % (base_url, get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))) def test_too_many_plugins_global(self): conf = { 'body': { 'limits': { 'global': 1, }, }, } admin_user = self.get_admin() url = admin_reverse('cms_page_add_plugin') with SettingsOverride(CMS_PERMISSION=False, CMS_PLACEHOLDER_CONF=conf): page = create_page('somepage', 'nav_playground.html', 'en') body = page.placeholders.get(slot='body') add_plugin(body, 'TextPlugin', 'en', body='text') with self.login_user_context(admin_user): data = { 'plugin_type': 'TextPlugin', 'placeholder_id': body.pk, 'plugin_language': 'en', } response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponseBadRequest.status_code) def test_too_many_plugins_type(self): conf = { 'body': { 'limits': { 'TextPlugin': 1, }, }, } admin_user = self.get_admin() url = admin_reverse('cms_page_add_plugin') with SettingsOverride(CMS_PERMISSION=False, CMS_PLACEHOLDER_CONF=conf): page = create_page('somepage', 'nav_playground.html', 'en') body = page.placeholders.get(slot='body') add_plugin(body, 'TextPlugin', 'en', body='text') with self.login_user_context(admin_user): data = { 'plugin_type': 'TextPlugin', 'placeholder_id': body.pk, 'plugin_language': 'en', 'plugin_parent': '', } response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponseBadRequest.status_code) def test_edit_title_dirty_bit(self): language = "en" admin_user = self.get_admin() page = create_page('A', 'nav_playground.html', language) page_admin = PageAdmin(Page, None) page_admin._current_page = page page.publish("en") draft_page = page.get_draft_object() admin_url = reverse("admin:cms_page_edit_title_fields", args=( draft_page.pk, language )) post_data = { 'title': "A Title" } with self.login_user_context(admin_user): self.client.post(admin_url, post_data) draft_page = Page.objects.get(pk=page.pk).get_draft_object() self.assertTrue(draft_page.is_dirty('en')) def test_edit_title_languages(self): language = "en" admin_user = self.get_admin() page = create_page('A', 'nav_playground.html', language) page_admin = PageAdmin(Page, None) page_admin._current_page = page page.publish("en") draft_page = page.get_draft_object() admin_url = reverse("admin:cms_page_edit_title_fields", args=( draft_page.pk, language )) post_data = { 'title': "A Title" } with self.login_user_context(admin_user): self.client.post(admin_url, post_data) draft_page = Page.objects.get(pk=page.pk).get_draft_object() self.assertTrue(draft_page.is_dirty('en')) def test_page_form_leak(self): language = "en" admin_user = self.get_admin() request = self.get_request('/', 'en') request.user = admin_user page = create_page('A', 'nav_playground.html', language, menu_title='menu title') page_admin = PageAdmin(Page, site) page_admin._current_page = page edit_form = page_admin.get_form(request, page) add_form = page_admin.get_form(request, None) self.assertEqual(edit_form.base_fields['menu_title'].initial, 'menu title') self.assertEqual(add_form.base_fields['menu_title'].initial, None) class NoDBAdminTests(CMSTestCase): @property def admin_class(self): return site._registry[Page] def test_lookup_allowed_site__exact(self): self.assertTrue(self.admin_class.lookup_allowed('site__exact', '1')) def test_lookup_allowed_published(self): self.assertTrue(self.admin_class.lookup_allowed('published', value='1')) class PluginPermissionTests(AdminTestsBase): def setUp(self): self._page = create_page('test page', 'nav_playground.html', 'en') self._placeholder = self._page.placeholders.all()[0] def _get_admin(self): User = get_user_model() fields = dict(email="admin@django-cms.org", is_staff=True, is_active=True) if (User.USERNAME_FIELD != 'email'): fields[User.USERNAME_FIELD] = "admin" admin_user = User(**fields) admin_user.set_password('admin') admin_user.save() return admin_user def _get_page_admin(self): return admin.site._registry[Page] def _give_permission(self, user, model, permission_type, save=True): codename = '%s_%s' % (permission_type, model._meta.object_name.lower()) user.user_permissions.add(Permission.objects.get(codename=codename)) def _give_page_permission_rights(self, user): self._give_permission(user, PagePermission, 'add') self._give_permission(user, PagePermission, 'change') self._give_permission(user, PagePermission, 'delete') def _get_change_page_request(self, user, page): return type('Request', (object,), { 'user': user, 'path': base.URL_CMS_PAGE_CHANGE % page.pk }) def _give_cms_permissions(self, user, save=True): for perm_type in ['add', 'change', 'delete']: for model in [Page, Title]: self._give_permission(user, model, perm_type, False) gpp = GlobalPagePermission.objects.create( user=user, can_change=True, can_delete=True, can_change_advanced_settings=False, can_publish=True, can_change_permissions=False, can_move_page=True, ) gpp.sites = Site.objects.all() if save: user.save() def _create_plugin(self): plugin = add_plugin(self._placeholder, 'TextPlugin', 'en') return plugin def test_plugin_add_requires_permissions(self): """User tries to add a plugin but has no permissions. He can add the plugin after he got the permissions""" admin = self._get_admin() self._give_cms_permissions(admin) if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='admin@django-cms.org', password='admin') else: self.client.login(username='admin', password='admin') url = admin_reverse('cms_page_add_plugin') data = { 'plugin_type': 'TextPlugin', 'placeholder_id': self._placeholder.pk, 'plugin_language': 'en', 'plugin_parent': '', } response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) self._give_permission(admin, Text, 'add') response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponse.status_code) def test_plugin_edit_requires_permissions(self): """User tries to edit a plugin but has no permissions. He can edit the plugin after he got the permissions""" plugin = self._create_plugin() _, normal_guy = self._get_guys() if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='test@test.com', password='test@test.com') else: self.client.login(username='test', password='test') url = admin_reverse('cms_page_edit_plugin', args=[plugin.id]) response = self.client.post(url, dict()) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) # After he got the permissions, he can edit the plugin self._give_permission(normal_guy, Text, 'change') response = self.client.post(url, dict()) self.assertEqual(response.status_code, HttpResponse.status_code) def test_plugin_edit_wrong_url(self): """User tries to edit a plugin using a random url. 404 response returned""" plugin = self._create_plugin() _, normal_guy = self._get_guys() if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='test@test.com', password='test@test.com') else: self.client.login(username='test', password='test') self._give_permission(normal_guy, Text, 'change') url = '%s/edit-plugin/%s/' % (admin_reverse('cms_page_edit_plugin', args=[plugin.id]), plugin.id) response = self.client.post(url, dict()) self.assertEqual(response.status_code, HttpResponseNotFound.status_code) self.assertTrue("Plugin not found" in force_unicode(response.content)) def test_plugin_remove_requires_permissions(self): """User tries to remove a plugin but has no permissions. He can remove the plugin after he got the permissions""" plugin = self._create_plugin() _, normal_guy = self._get_guys() if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='test@test.com', password='test@test.com') else: self.client.login(username='test', password='test') url = admin_reverse('cms_page_delete_plugin', args=[plugin.pk]) data = dict(plugin_id=plugin.id) response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) # After he got the permissions, he can edit the plugin self._give_permission(normal_guy, Text, 'delete') response = self.client.post(url, data) self.assertEqual(response.status_code, 302) def test_plugin_move_requires_permissions(self): """User tries to move a plugin but has no permissions. He can move the plugin after he got the permissions""" plugin = self._create_plugin() _, normal_guy = self._get_guys() if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='test@test.com', password='test@test.com') else: self.client.login(username='test', password='test') url = admin_reverse('cms_page_move_plugin') data = dict(plugin_id=plugin.id, placeholder_id=self._placeholder.pk, plugin_parent='', ) response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) # After he got the permissions, he can edit the plugin self._give_permission(normal_guy, Text, 'change') response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponse.status_code) def test_plugins_copy_requires_permissions(self): """User tries to copy plugin but has no permissions. He can copy plugins after he got the permissions""" plugin = self._create_plugin() _, normal_guy = self._get_guys() if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='test@test.com', password='test@test.com') else: self.client.login(username='test', password='test') url = admin_reverse('cms_page_copy_plugins') data = dict(source_plugin_id=plugin.id, source_placeholder_id=self._placeholder.pk, source_language='en', target_language='fr', target_placeholder_id=self._placeholder.pk, ) response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) # After he got the permissions, he can edit the plugin self._give_permission(normal_guy, Text, 'add') response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponse.status_code) def test_plugins_copy_placeholder_ref(self): """User copies a placeholder into a clipboard. A PlaceholderReferencePlugin is created. Afterwards he copies this into a placeholder and the PlaceholderReferencePlugin unpacks its content. After that he clear the clipboard""" self.assertEqual(Placeholder.objects.count(), 2) self._create_plugin() self._create_plugin() admin_user = self.get_superuser() clipboard = Placeholder() clipboard.save() self.assertEqual(CMSPlugin.objects.count(), 2) settings = UserSettings(language="fr", clipboard=clipboard, user=admin_user) settings.save() self.assertEqual(Placeholder.objects.count(), 3) if get_user_model().USERNAME_FIELD == 'email': self.client.login(username='admin@django-cms.org', password='admin@django-cms.org') else: self.client.login(username='admin', password='admin') url = admin_reverse('cms_page_copy_plugins') data = dict(source_plugin_id='', source_placeholder_id=self._placeholder.pk, source_language='en', target_language='en', target_placeholder_id=clipboard.pk, ) response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponse.status_code) clipboard_plugins = clipboard.get_plugins() self.assertEqual(CMSPlugin.objects.count(), 5) self.assertEqual(clipboard_plugins.count(), 1) self.assertEqual(clipboard_plugins[0].plugin_type, "PlaceholderPlugin") placeholder_plugin, _ = clipboard_plugins[0].get_plugin_instance() ref_placeholder = placeholder_plugin.placeholder_ref copied_plugins = ref_placeholder.get_plugins() self.assertEqual(copied_plugins.count(), 2) data = dict(source_plugin_id=placeholder_plugin.pk, source_placeholder_id=clipboard.pk, source_language='en', target_language='fr', target_placeholder_id=self._placeholder.pk, ) response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponse.status_code) plugins = self._placeholder.get_plugins() self.assertEqual(plugins.count(), 4) self.assertEqual(CMSPlugin.objects.count(), 7) self.assertEqual(Placeholder.objects.count(), 4) url = admin_reverse('cms_page_clear_placeholder', args=[clipboard.pk]) with self.assertNumQueries(FuzzyInt(70, 80)): response = self.client.post(url, {'test': 0}) self.assertEqual(response.status_code, 302) self.assertEqual(CMSPlugin.objects.count(), 4) self.assertEqual(Placeholder.objects.count(), 3) def test_plugins_copy_language(self): """User tries to copy plugin but has no permissions. He can copy plugins after he got the permissions""" self._create_plugin() _, normal_guy = self._get_guys() if get_user_model().USERNAME_FIELD != 'email': self.client.login(username='test', password='test') else: self.client.login(username='test@test.com', password='test@test.com') self.assertEqual(1, CMSPlugin.objects.all().count()) url = admin_reverse('cms_page_copy_language', args=[self._page.pk]) data = dict( source_language='en', target_language='fr', ) response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) # After he got the permissions, he can edit the plugin self._give_permission(normal_guy, Text, 'add') response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponse.status_code) self.assertEqual(2, CMSPlugin.objects.all().count()) def test_page_permission_inline_visibility(self): User = get_user_model() fields = dict(email='user@domain.com', password='user', is_staff=True) if get_user_model().USERNAME_FIELD != 'email': fields[get_user_model().USERNAME_FIELD] = 'user' user = User(**fields) user.save() self._give_page_permission_rights(user) page = create_page('A', 'nav_playground.html', 'en') page_permission = PagePermission.objects.create( can_change_permissions=True, user=user, page=page) request = self._get_change_page_request(user, page) page_admin = PageAdmin(Page, None) page_admin._current_page = page # user has can_change_permission # => must see the PagePermissionInline self.assertTrue( any(type(inline) is PagePermissionInlineAdmin for inline in page_admin.get_inline_instances(request, page if not DJANGO_1_4 else None))) page = Page.objects.get(pk=page.pk) # remove can_change_permission page_permission.can_change_permissions = False page_permission.save() request = self._get_change_page_request(user, page) page_admin = PageAdmin(Page, None) page_admin._current_page = page # => PagePermissionInline is no longer visible self.assertFalse( any(type(inline) is PagePermissionInlineAdmin for inline in page_admin.get_inline_instances(request, page if not DJANGO_1_4 else None))) def test_edit_title_is_allowed_for_staff_user(self): """ We check here both the permission on a single page, and the global permissions """ user = self._create_user('user', is_staff=True) another_user = self._create_user('another_user', is_staff=True) page = create_page('A', 'nav_playground.html', 'en') admin_url = reverse("admin:cms_page_edit_title_fields", args=( page.pk, 'en' )) page_admin = PageAdmin(Page, None) page_admin._current_page = page username = getattr(user, get_user_model().USERNAME_FIELD) self.client.login(username=username, password=username) response = self.client.get(admin_url) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) assign_user_to_page(page, user, grant_all=True) username = getattr(user, get_user_model().USERNAME_FIELD) self.client.login(username=username, password=username) response = self.client.get(admin_url) self.assertEqual(response.status_code, HttpResponse.status_code) self._give_cms_permissions(another_user) username = getattr(another_user, get_user_model().USERNAME_FIELD) self.client.login(username=username, password=username) response = self.client.get(admin_url) self.assertEqual(response.status_code, HttpResponse.status_code) def test_plugin_add_returns_valid_pk_for_plugin(self): admin_user = self._get_admin() self._give_cms_permissions(admin_user) self._give_permission(admin_user, Text, 'add') username = getattr(admin_user, get_user_model().USERNAME_FIELD) self.client.login(username=username, password='admin') url = admin_reverse('cms_page_add_plugin') data = { 'plugin_type': 'TextPlugin', 'placeholder_id': self._placeholder.pk, 'plugin_language': 'en', 'plugin_parent': '', } response = self.client.post(url, data) self.assertEqual(response.status_code, HttpResponse.status_code) self.assertEqual(response['content-type'], 'application/json') pk = response.content.decode('utf8').split("edit-plugin/")[1].split("/")[0] self.assertTrue(CMSPlugin.objects.filter(pk=int(pk)).exists()) class AdminFormsTests(AdminTestsBase): def test_clean_overwrite_url(self): user = AnonymousUser() user.is_superuser = True user.pk = 1 request = type('Request', (object,), {'user': user}) with SettingsOverride(): data = { 'title': 'TestPage', 'slug': 'test-page', 'language': 'en', 'overwrite_url': '/overwrite/url/', 'site': Site.objects.get_current().pk, 'template': get_cms_setting('TEMPLATES')[0][0], 'published': True } form = PageForm(data) self.assertTrue(form.is_valid(), form.errors.as_text()) # WTF? WHY DOES form.save() not handle this stuff??? instance = form.save() instance.permission_user_cache = user instance.permission_advanced_settings_cache = True Title.objects.set_or_create(request, instance, form, 'en') form = PageForm(data, instance=instance) self.assertTrue(form.is_valid(), form.errors.as_text()) def test_missmatching_site_parent_dotsite(self): site0 = Site.objects.create(domain='foo.com', name='foo.com') site1 = Site.objects.create(domain='foo.com', name='foo.com') parent_page = Page.objects.create( template='nav_playground.html', site=site0) new_page_data = { 'title': 'Title', 'slug': 'slug', 'language': 'en', 'site': site1.pk, 'template': get_cms_setting('TEMPLATES')[0][0], 'reverse_id': '', 'parent': parent_page.pk, } form = PageForm(data=new_page_data, files=None) self.assertFalse(form.is_valid()) self.assertIn(u"Site doesn't match the parent's page site", form.errors['__all__']) def test_reverse_id_error_location(self): ''' Test moving the reverse_id validation error to a field specific one ''' # this is the Reverse ID we'll re-use to break things. dupe_id = 'p1' curren_site = Site.objects.get_current() create_page('Page 1', 'nav_playground.html', 'en', reverse_id=dupe_id) page2 = create_page('Page 2', 'nav_playground.html', 'en') # Assemble a bunch of data to test the page form page2_data = { 'language': 'en', 'site': curren_site.pk, 'reverse_id': dupe_id, 'template': 'col_two.html', } form = AdvancedSettingsForm(data=page2_data, files=None) self.assertFalse(form.is_valid()) # reverse_id is the only item that is in __all__ as every other field # has it's own clean method. Moving it to be a field error means # __all__ is now not available. self.assertNotIn('__all__', form.errors) # In moving it to it's own field, it should be in form.errors, and # the values contained therein should match these. self.assertIn('reverse_id', form.errors) self.assertEqual(1, len(form.errors['reverse_id'])) self.assertEqual([u'A page with this reverse URL id exists already.'], form.errors['reverse_id']) page2_data['reverse_id'] = "" form = AdvancedSettingsForm(data=page2_data, files=None) self.assertTrue(form.is_valid()) admin_user = self._get_guys(admin_only=True) # reset some of page2_data so we can use cms.api.create_page page2 = page2.reload() page2.site = curren_site page2.save() with self.login_user_context(admin_user): # re-reset the page2_data for the admin form instance. page2_data['reverse_id'] = dupe_id page2_data['site'] = curren_site.pk # post to the admin change form for page 2, and test that the # reverse_id form row has an errors class. Django's admin avoids # collapsing these, so that the error is visible. resp = self.client.post(base.URL_CMS_PAGE_ADVANCED_CHANGE % page2.pk, page2_data) self.assertContains(resp, '<div class="form-row errors reverse_id">') def test_create_page_type(self): page = create_page('Test', 'static.html', 'en', published=True, reverse_id="home") for placeholder in Placeholder.objects.all(): add_plugin(placeholder, TextPlugin, 'en', body='<b>Test</b>') page.publish('en') self.assertEqual(Page.objects.count(), 2) self.assertEqual(CMSPlugin.objects.count(), 4) superuser = self.get_superuser() with self.login_user_context(superuser): response = self.client.get( "%s?copy_target=%s&language=%s" % (admin_reverse("cms_page_add_page_type"), page.pk, 'en')) self.assertEqual(response.status_code, 302) self.assertEqual(Page.objects.count(), 3) self.assertEqual(Page.objects.filter(reverse_id="page_types").count(), 1) page_types = Page.objects.get(reverse_id='page_types') url = response.url if hasattr(response, 'url') else response['Location'] expected_url_params = QueryDict( 'target=%s&position=first-child&add_page_type=1&copy_target=%s&language=en' % (page_types.pk, page.pk)) response_url_params = QueryDict(urlparse(url).query) self.assertDictEqual(expected_url_params, response_url_params) response = self.client.get("%s?copy_target=%s&language=%s" % ( admin_reverse("cms_page_add_page_type"), page.pk, 'en'), follow=True) self.assertEqual(response.status_code, 200) # test no page types if no page types there response = self.client.get(admin_reverse('cms_page_add')) self.assertNotContains(response, "page_type") # create out first page type page_data = { 'title': 'type1', 'slug': 'type1', '_save': 1, 'template': 'static.html', 'site': 1, 'language': 'en' } response = self.client.post( "/en/admin/cms/page/add/?target=%s&position=first-child&add_page_type=1&copy_target=%s&language=en" % ( page_types.pk, page.pk), data=page_data) self.assertEqual(response.status_code, 302) self.assertEqual(Page.objects.count(), 4) self.assertEqual(CMSPlugin.objects.count(), 6) response = self.client.get(admin_reverse('cms_page_add')) self.assertContains(response, "page_type") # no page types available if you use the copy_target response = self.client.get("%s?copy_target=%s&language=en" % (admin_reverse('cms_page_add'), page.pk)) self.assertNotContains(response, "page_type") def test_render_edit_mode(self): from django.core.cache import cache cache.clear() create_page('Test', 'static.html', 'en', published=True) for placeholder in Placeholder.objects.all(): add_plugin(placeholder, TextPlugin, 'en', body='<b>Test</b>') user = self.get_superuser() self.assertEqual(Placeholder.objects.all().count(), 4) with self.login_user_context(user): with self.assertNumQueries(FuzzyInt(40, 66)): output = force_unicode(self.client.get('/en/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')).content) self.assertIn('<b>Test</b>', output) self.assertEqual(Placeholder.objects.all().count(), 9) self.assertEqual(StaticPlaceholder.objects.count(), 2) for placeholder in Placeholder.objects.all(): add_plugin(placeholder, TextPlugin, 'en', body='<b>Test</b>') with self.assertNumQueries(FuzzyInt(40, 60)): output = force_unicode(self.client.get('/en/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')).content) self.assertIn('<b>Test</b>', output) with self.assertNumQueries(FuzzyInt(18, 48)): force_unicode(self.client.get('/en/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')).content) with self.assertNumQueries(FuzzyInt(12, 30)): force_unicode(self.client.get('/en/').content) def test_tree_view_queries(self): from django.core.cache import cache cache.clear() for i in range(10): create_page('Test%s' % i, 'col_two.html', 'en', published=True) for placeholder in Placeholder.objects.all(): add_plugin(placeholder, TextPlugin, 'en', body='<b>Test</b>') user = self.get_superuser() with self.login_user_context(user): with self.assertNumQueries(FuzzyInt(18, 33)): force_unicode(self.client.get('/en/admin/cms/page/')) def test_smart_link_published_pages(self): admin, staff_guy = self._get_guys() page_url = '/en/admin/cms/page/published-pages/' # Not sure how to achieve this with reverse... with self.login_user_context(staff_guy): multi_title_page = create_page('main_title', 'col_two.html', 'en', published=True, overwrite_url='overwritten_url', menu_title='menu_title') title = multi_title_page.get_title_obj() title.page_title = 'page_title' title.save() multi_title_page.save() publish_page(multi_title_page, admin, 'en') # Non ajax call should return a 403 as this page shouldn't be accessed by anything else but ajax queries self.assertEqual(403, self.client.get(page_url).status_code) self.assertEqual(200, self.client.get(page_url, HTTP_X_REQUESTED_WITH='XMLHttpRequest').status_code ) # Test that the query param is working as expected. self.assertEqual(1, len(json.loads(self.client.get(page_url, {'q':'main_title'}, HTTP_X_REQUESTED_WITH='XMLHttpRequest').content.decode("utf-8")))) self.assertEqual(1, len(json.loads(self.client.get(page_url, {'q':'menu_title'}, HTTP_X_REQUESTED_WITH='XMLHttpRequest').content.decode("utf-8")))) self.assertEqual(1, len(json.loads(self.client.get(page_url, {'q':'overwritten_url'}, HTTP_X_REQUESTED_WITH='XMLHttpRequest').content.decode("utf-8")))) self.assertEqual(1, len(json.loads(self.client.get(page_url, {'q':'page_title'}, HTTP_X_REQUESTED_WITH='XMLHttpRequest').content.decode("utf-8")))) class AdminPageEditContentSizeTests(AdminTestsBase): """ System user count influences the size of the page edit page, but the users are only 2 times present on the page The test relates to extra=0 at PagePermissionInlineAdminForm and ViewRestrictionInlineAdmin """ def test_editpage_contentsize(self): """ Expected a username only 2 times in the content, but a relationship between usercount and pagesize """ with SettingsOverride(CMS_PERMISSION=True): admin_user = self.get_superuser() PAGE_NAME = 'TestPage' USER_NAME = 'test_size_user_0' current_site = Site.objects.get(pk=1) page = create_page(PAGE_NAME, "nav_playground.html", "en", site=current_site, created_by=admin_user) page.save() self._page = page with self.login_user_context(admin_user): url = base.URL_CMS_PAGE_PERMISSION_CHANGE % self._page.pk response = self.client.get(url) self.assertEqual(response.status_code, 200) old_response_size = len(response.content) old_user_count = get_user_model().objects.count() # create additionals user and reload the page get_user_model().objects.create_user(username=USER_NAME, email=USER_NAME + '@django-cms.org', password=USER_NAME) user_count = get_user_model().objects.count() more_users_in_db = old_user_count < user_count # we have more users self.assertTrue(more_users_in_db, "New users got NOT created") response = self.client.get(url) new_response_size = len(response.content) page_size_grown = old_response_size < new_response_size # expect that the pagesize gets influenced by the useramount of the system self.assertTrue(page_size_grown, "Page size has not grown after user creation") # usernames are only 2 times in content text = smart_str(response.content, response._charset) foundcount = text.count(USER_NAME) # 2 forms contain usernames as options self.assertEqual(foundcount, 2, "Username %s appeared %s times in response.content, expected 2 times" % ( USER_NAME, foundcount))
./CrossVul/dataset_final_sorted/CWE-352/py/good_1654_4
crossvul-python_data_good_114_1
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2016-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """A request interceptor taking care of adblocking and custom headers.""" from PyQt5.QtCore import QUrl from PyQt5.QtWebEngineCore import (QWebEngineUrlRequestInterceptor, QWebEngineUrlRequestInfo) from qutebrowser.config import config from qutebrowser.browser import shared from qutebrowser.utils import utils, log, debug class RequestInterceptor(QWebEngineUrlRequestInterceptor): """Handle ad blocking and custom headers.""" def __init__(self, host_blocker, args, parent=None): super().__init__(parent) self._host_blocker = host_blocker self._args = args def install(self, profile): """Install the interceptor on the given QWebEngineProfile.""" profile.setRequestInterceptor(self) # Gets called in the IO thread -> showing crash window will fail @utils.prevent_exceptions(None) def interceptRequest(self, info): """Handle the given request. Reimplementing this virtual function and setting the interceptor on a profile makes it possible to intercept URL requests. This function is executed on the IO thread, and therefore running long tasks here will block networking. info contains the information about the URL request and will track internally whether its members have been altered. Args: info: QWebEngineUrlRequestInfo &info """ if 'log-requests' in self._args.debug_flags: resource_type = debug.qenum_key(QWebEngineUrlRequestInfo, info.resourceType()) navigation_type = debug.qenum_key(QWebEngineUrlRequestInfo, info.navigationType()) log.webview.debug("{} {}, first-party {}, resource {}, " "navigation {}".format( bytes(info.requestMethod()).decode('ascii'), info.requestUrl().toDisplayString(), info.firstPartyUrl().toDisplayString(), resource_type, navigation_type)) url = info.requestUrl() firstparty = info.firstPartyUrl() if ((url.scheme(), url.host(), url.path()) == ('qute', 'settings', '/set')): if (firstparty != QUrl('qute://settings/') or info.resourceType() != QWebEngineUrlRequestInfo.ResourceTypeXhr): log.webview.warning("Blocking malicious request from {} to {}" .format(firstparty.toDisplayString(), url.toDisplayString())) info.block(True) return # FIXME:qtwebengine only block ads for NavigationTypeOther? if self._host_blocker.is_blocked(url): log.webview.info("Request to {} blocked by host blocker.".format( url.host())) info.block(True) for header, value in shared.custom_headers(url=url): info.setHttpHeader(header, value) user_agent = config.instance.get('content.headers.user_agent', url=url) if user_agent is not None: info.setHttpHeader(b'User-Agent', user_agent.encode('ascii'))
./CrossVul/dataset_final_sorted/CWE-352/py/good_114_1
crossvul-python_data_good_1687_0
"""Base Tornado handlers for the notebook. Authors: * Brian Granger """ #----------------------------------------------------------------------------- # Copyright (C) 2011 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- import functools import json import logging import os import re import sys import traceback try: # py3 from http.client import responses except ImportError: from httplib import responses try: from urllib.parse import urlparse # Py 3 except ImportError: from urlparse import urlparse # Py 2 from jinja2 import TemplateNotFound from tornado import web try: from tornado.log import app_log except ImportError: app_log = logging.getLogger() from IPython.config import Application from IPython.utils.path import filefind from IPython.utils.py3compat import string_types from IPython.html.utils import is_hidden #----------------------------------------------------------------------------- # Top-level handlers #----------------------------------------------------------------------------- non_alphanum = re.compile(r'[^A-Za-z0-9]') class AuthenticatedHandler(web.RequestHandler): """A RequestHandler with an authenticated user.""" def set_default_headers(self): headers = self.settings.get('headers', {}) if "X-Frame-Options" not in headers: headers["X-Frame-Options"] = "SAMEORIGIN" for header_name,value in headers.items() : try: self.set_header(header_name, value) except Exception: # tornado raise Exception (not a subclass) # if method is unsupported (websocket and Access-Control-Allow-Origin # for example, so just ignore) pass def clear_login_cookie(self): self.clear_cookie(self.cookie_name) def get_current_user(self): user_id = self.get_secure_cookie(self.cookie_name) # For now the user_id should not return empty, but it could eventually if user_id == '': user_id = 'anonymous' if user_id is None: # prevent extra Invalid cookie sig warnings: self.clear_login_cookie() if not self.login_available: user_id = 'anonymous' return user_id @property def cookie_name(self): default_cookie_name = non_alphanum.sub('-', 'username-{}'.format( self.request.host )) return self.settings.get('cookie_name', default_cookie_name) @property def password(self): """our password""" return self.settings.get('password', '') @property def logged_in(self): """Is a user currently logged in? """ user = self.get_current_user() return (user and not user == 'anonymous') @property def login_available(self): """May a user proceed to log in? This returns True if login capability is available, irrespective of whether the user is already logged in or not. """ return bool(self.settings.get('password', '')) class IPythonHandler(AuthenticatedHandler): """IPython-specific extensions to authenticated handling Mostly property shortcuts to IPython-specific settings. """ @property def config(self): return self.settings.get('config', None) @property def log(self): """use the IPython log by default, falling back on tornado's logger""" if Application.initialized(): return Application.instance().log else: return app_log #--------------------------------------------------------------- # URLs #--------------------------------------------------------------- @property def mathjax_url(self): return self.settings.get('mathjax_url', '') @property def base_url(self): return self.settings.get('base_url', '/') #--------------------------------------------------------------- # Manager objects #--------------------------------------------------------------- @property def kernel_manager(self): return self.settings['kernel_manager'] @property def notebook_manager(self): return self.settings['notebook_manager'] @property def cluster_manager(self): return self.settings['cluster_manager'] @property def session_manager(self): return self.settings['session_manager'] @property def project_dir(self): return self.notebook_manager.notebook_dir #--------------------------------------------------------------- # CORS #--------------------------------------------------------------- @property def allow_origin(self): """Normal Access-Control-Allow-Origin""" return self.settings.get('allow_origin', '') @property def allow_origin_pat(self): """Regular expression version of allow_origin""" return self.settings.get('allow_origin_pat', None) @property def allow_credentials(self): """Whether to set Access-Control-Allow-Credentials""" return self.settings.get('allow_credentials', False) def set_default_headers(self): """Add CORS headers, if defined""" super(IPythonHandler, self).set_default_headers() if self.allow_origin: self.set_header("Access-Control-Allow-Origin", self.allow_origin) elif self.allow_origin_pat: origin = self.get_origin() if origin and self.allow_origin_pat.match(origin): self.set_header("Access-Control-Allow-Origin", origin) if self.allow_credentials: self.set_header("Access-Control-Allow-Credentials", 'true') def get_origin(self): # Handle WebSocket Origin naming convention differences # The difference between version 8 and 13 is that in 8 the # client sends a "Sec-Websocket-Origin" header and in 13 it's # simply "Origin". if "Origin" in self.request.headers: origin = self.request.headers.get("Origin") else: origin = self.request.headers.get("Sec-Websocket-Origin", None) return origin def check_origin_api(self): """Check Origin for cross-site API requests. Copied from WebSocket with changes: - allow unspecified host/origin (e.g. scripts) """ if self.allow_origin == '*': return True host = self.request.headers.get("Host") origin = self.request.headers.get("Origin") # If no header is provided, assume it comes from a script/curl. # We are only concerned with cross-site browser stuff here. if origin is None or host is None: return True origin = origin.lower() origin_host = urlparse(origin).netloc # OK if origin matches host if origin_host == host: return True # Check CORS headers if self.allow_origin: allow = self.allow_origin == origin elif self.allow_origin_pat: allow = bool(self.allow_origin_pat.match(origin)) else: # No CORS headers deny the request allow = False if not allow: self.log.warn("Blocking Cross Origin API request. Origin: %s, Host: %s", origin, host, ) return allow def prepare(self): if not self.check_origin_api(): raise web.HTTPError(404) return super(IPythonHandler, self).prepare() #--------------------------------------------------------------- # template rendering #--------------------------------------------------------------- def get_template(self, name): """Return the jinja template object for a given name""" return self.settings['jinja2_env'].get_template(name) def render_template(self, name, **ns): ns.update(self.template_namespace) template = self.get_template(name) return template.render(**ns) @property def template_namespace(self): return dict( base_url=self.base_url, logged_in=self.logged_in, login_available=self.login_available, static_url=self.static_url, ) def get_json_body(self): """Return the body of the request as JSON data.""" if not self.request.body: return None # Do we need to call body.decode('utf-8') here? body = self.request.body.strip().decode(u'utf-8') try: model = json.loads(body) except Exception: self.log.debug("Bad JSON: %r", body) self.log.error("Couldn't parse JSON", exc_info=True) raise web.HTTPError(400, u'Invalid JSON in body of request') return model def write_error(self, status_code, **kwargs): """render custom error pages""" exc_info = kwargs.get('exc_info') message = '' status_message = responses.get(status_code, 'Unknown HTTP Error') if exc_info: exception = exc_info[1] # get the custom message, if defined try: message = exception.log_message % exception.args except Exception: pass # construct the custom reason, if defined reason = getattr(exception, 'reason', '') if reason: status_message = reason # build template namespace ns = dict( status_code=status_code, status_message=status_message, message=message, exception=exception, ) self.set_header('Content-Type', 'text/html') # render the template try: html = self.render_template('%s.html' % status_code, **ns) except TemplateNotFound: self.log.debug("No template for %d", status_code) html = self.render_template('error.html', **ns) self.write(html) class Template404(IPythonHandler): """Render our 404 template""" def prepare(self): raise web.HTTPError(404) class AuthenticatedFileHandler(IPythonHandler, web.StaticFileHandler): """static files should only be accessible when logged in""" @web.authenticated def get(self, path): if os.path.splitext(path)[1] == '.ipynb': name = os.path.basename(path) self.set_header('Content-Type', 'application/json') self.set_header('Content-Disposition','attachment; filename="%s"' % name) return web.StaticFileHandler.get(self, path) def compute_etag(self): return None def validate_absolute_path(self, root, absolute_path): """Validate and return the absolute path. Requires tornado 3.1 Adding to tornado's own handling, forbids the serving of hidden files. """ abs_path = super(AuthenticatedFileHandler, self).validate_absolute_path(root, absolute_path) abs_root = os.path.abspath(root) if is_hidden(abs_path, abs_root): self.log.info("Refusing to serve hidden file, via 404 Error") raise web.HTTPError(404) return abs_path def json_errors(method): """Decorate methods with this to return GitHub style JSON errors. This should be used on any JSON API on any handler method that can raise HTTPErrors. This will grab the latest HTTPError exception using sys.exc_info and then: 1. Set the HTTP status code based on the HTTPError 2. Create and return a JSON body with a message field describing the error in a human readable form. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): try: result = method(self, *args, **kwargs) except web.HTTPError as e: status = e.status_code message = e.log_message self.log.warn(message) self.set_status(e.status_code) self.set_header('Content-Type', 'application/json') self.finish(json.dumps(dict(message=message))) except Exception: self.log.error("Unhandled error in API request", exc_info=True) status = 500 message = "Unknown server error" t, value, tb = sys.exc_info() self.set_status(status) tb_text = ''.join(traceback.format_exception(t, value, tb)) reply = dict(message=message, traceback=tb_text) self.set_header('Content-Type', 'application/json') self.finish(json.dumps(reply)) else: return result return wrapper #----------------------------------------------------------------------------- # File handler #----------------------------------------------------------------------------- # to minimize subclass changes: HTTPError = web.HTTPError class FileFindHandler(web.StaticFileHandler): """subclass of StaticFileHandler for serving files from a search path""" # cache search results, don't search for files more than once _static_paths = {} def initialize(self, path, default_filename=None): if isinstance(path, string_types): path = [path] self.root = tuple( os.path.abspath(os.path.expanduser(p)) + os.sep for p in path ) self.default_filename = default_filename def compute_etag(self): return None @classmethod def get_absolute_path(cls, roots, path): """locate a file to serve on our static file search path""" with cls._lock: if path in cls._static_paths: return cls._static_paths[path] try: abspath = os.path.abspath(filefind(path, roots)) except IOError: # IOError means not found return '' cls._static_paths[path] = abspath return abspath def validate_absolute_path(self, root, absolute_path): """check if the file should be served (raises 404, 403, etc.)""" if absolute_path == '': raise web.HTTPError(404) for root in self.root: if (absolute_path + os.sep).startswith(root): break return super(FileFindHandler, self).validate_absolute_path(root, absolute_path) class TrailingSlashHandler(web.RequestHandler): """Simple redirect handler that strips trailing slashes This should be the first, highest priority handler. """ SUPPORTED_METHODS = ['GET'] def get(self): self.redirect(self.request.uri.rstrip('/')) #----------------------------------------------------------------------------- # URL pattern fragments for re-use #----------------------------------------------------------------------------- path_regex = r"(?P<path>(?:/.*)*)" notebook_name_regex = r"(?P<name>[^/]+\.ipynb)" notebook_path_regex = "%s/%s" % (path_regex, notebook_name_regex) #----------------------------------------------------------------------------- # URL to handler mappings #----------------------------------------------------------------------------- default_handlers = [ (r".*/", TrailingSlashHandler) ]
./CrossVul/dataset_final_sorted/CWE-352/py/good_1687_0
crossvul-python_data_bad_1891_4
# -*- coding: utf-8 -*- """Simple security for Flask apps.""" import io import re from setuptools import find_packages, setup with io.open("README.rst", "rt", encoding="utf8") as f: readme = f.read() with io.open("flask_security/__init__.py", "rt", encoding="utf8") as f: version = re.search(r'__version__ = "(.*?)"', f.read()).group(1) tests_require = [ "Flask-Mongoengine>=0.9.5", "peewee>=3.11.2", "Flask-SQLAlchemy>=2.3", "argon2_cffi>=19.1.0", "bcrypt>=3.1.5", "cachetools>=3.1.0", "check-manifest>=0.25", "coverage>=4.5.4", "cryptography>=2.3.1", "isort>=4.2.2", "mock>=1.3.0", "mongoengine>=0.15.3", "mongomock>=3.14.0", "msgcheck>=2.9", "pony>=0.7.11", "phonenumberslite>=8.11.1", "psycopg2>=2.8.4", "pydocstyle>=1.0.0", "pymysql>=0.9.3", "pyqrcode>=1.2", "pytest==4.6.11", "pytest-black>=0.3.8", "pytest-cache>=1.0", "pytest-cov>=2.5.1", "pytest-flake8>=1.0.6", "pytest-mongo>=1.2.1", "pytest>=3.5.1", "sqlalchemy>=1.2.6", "sqlalchemy-utils>=0.33.0", "werkzeug>=0.15.5", "zxcvbn~=4.4.28", ] extras_require = { "docs": ["Pallets-Sphinx-Themes>=1.2.0", "Sphinx>=1.8.5", "sphinx-issues>=1.2.0"], "tests": tests_require, } extras_require["all"] = [] for reqs in extras_require.values(): extras_require["all"].extend(reqs) setup_requires = ["Babel>=1.3", "pytest-runner>=2.6.2", "twine", "wheel"] install_requires = [ "Flask>=1.0.2", "Flask-Login>=0.4.1", "Flask-Mail>=0.9.1", "Flask-Principal>=0.4.0", "Flask-WTF>=0.14.2", "Flask-BabelEx>=0.9.3", "email-validator>=1.0.5", "itsdangerous>=1.1.0", "passlib>=1.7.1", ] packages = find_packages() setup( name="Flask-Security-Too", version=version, description=__doc__, long_description=readme, keywords="flask security", license="MIT", author="Matt Wright & Chris Wagner", author_email="jwag.wagner+github@gmail.com", url="https://github.com/Flask-Middleware/flask-security", project_urls={ "Documentation": "https://flask-security-too.readthedocs.io", "Releases": "https://pypi.org/project/Flask-Security-Too/", "Code": "https://github.com/Flask-Middleware/flask-security", "Issue tracker": "https://github.com/Flask-Middleware/flask-security/issues", }, packages=packages, zip_safe=False, include_package_data=True, platforms="any", python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*", extras_require=extras_require, install_requires=install_requires, setup_requires=setup_requires, tests_require=tests_require, classifiers=[ "Environment :: Web Environment", "Framework :: Flask", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Software Development :: Libraries :: Python Modules", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Development Status :: 4 - Beta", ], )
./CrossVul/dataset_final_sorted/CWE-352/py/bad_1891_4
crossvul-python_data_bad_1654_5
# -*- coding: utf-8 -*- from __future__ import with_statement from django.contrib.sites.models import Site from cms.utils.urlutils import admin_reverse from djangocms_text_ckeditor.models import Text from django.core.cache import cache from django.core.management.base import CommandError from django.core.management import call_command from django.core.urlresolvers import reverse from cms.api import create_page, add_plugin, create_title from cms.constants import PUBLISHER_STATE_PENDING, PUBLISHER_STATE_DEFAULT, PUBLISHER_STATE_DIRTY from cms.management.commands import publisher_publish from cms.models import CMSPlugin, Title from cms.models.pagemodel import Page from cms.plugin_pool import plugin_pool from cms.test_utils.testcases import SettingsOverrideTestCase as TestCase from cms.test_utils.util.context_managers import StdoutOverride, SettingsOverride from cms.test_utils.util.fuzzy_int import FuzzyInt from cms.utils.conf import get_cms_setting from cms.utils.i18n import force_language from cms.utils.compat.dj import get_user_model class PublisherCommandTests(TestCase): """ Tests for the publish command """ def test_command_line_should_raise_without_superuser(self): with self.assertRaises(CommandError): com = publisher_publish.Command() com.handle_noargs() def test_command_line_publishes_zero_pages_on_empty_db(self): # we need to create a superuser (the db is empty) get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') pages_from_output = 0 published_from_output = 0 with StdoutOverride() as buffer: # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish') lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work for line in lines: if 'Total' in line: pages_from_output = int(line.split(':')[1]) elif 'Published' in line: published_from_output = int(line.split(':')[1]) self.assertEqual(pages_from_output, 0) self.assertEqual(published_from_output, 0) def test_command_line_ignores_draft_page(self): # we need to create a superuser (the db is empty) get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') create_page("The page!", "nav_playground.html", "en", published=False) pages_from_output = 0 published_from_output = 0 with StdoutOverride() as buffer: # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish') lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work for line in lines: if 'Total' in line: pages_from_output = int(line.split(':')[1]) elif 'Published' in line: published_from_output = int(line.split(':')[1]) self.assertEqual(pages_from_output, 0) self.assertEqual(published_from_output, 0) self.assertEqual(Page.objects.public().count(), 0) def test_command_line_publishes_draft_page(self): # we need to create a superuser (the db is empty) get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') create_page("The page!", "nav_playground.html", "en", published=False) pages_from_output = 0 published_from_output = 0 with StdoutOverride() as buffer: # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish', include_unpublished=True) lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work for line in lines: if 'Total' in line: pages_from_output = int(line.split(':')[1]) elif 'Published' in line: published_from_output = int(line.split(':')[1]) self.assertEqual(pages_from_output, 1) self.assertEqual(published_from_output, 1) self.assertEqual(Page.objects.public().count(), 1) def test_command_line_publishes_selected_language(self): # we need to create a superuser (the db is empty) get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') page = create_page("en title", "nav_playground.html", "en") title = create_title('de', 'de title', page) title.published = True title.save() title = create_title('fr', 'fr title', page) title.published = True title.save() pages_from_output = 0 published_from_output = 0 with StdoutOverride() as buffer: # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish', language='de') lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work for line in lines: if 'Total' in line: pages_from_output = int(line.split(':')[1]) elif 'Published' in line: published_from_output = int(line.split(':')[1]) self.assertEqual(pages_from_output, 1) self.assertEqual(published_from_output, 1) self.assertEqual(Page.objects.public().count(), 1) public = Page.objects.public()[0] languages = sorted(public.title_set.values_list('language', flat=True)) self.assertEqual(languages, ['de']) def test_command_line_publishes_selected_language_drafts(self): # we need to create a superuser (the db is empty) get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') page = create_page("en title", "nav_playground.html", "en") title = create_title('de', 'de title', page) title.published = False title.save() title = create_title('fr', 'fr title', page) title.published = False title.save() pages_from_output = 0 published_from_output = 0 with StdoutOverride() as buffer: # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish', language='de', include_unpublished=True) lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work for line in lines: if 'Total' in line: pages_from_output = int(line.split(':')[1]) elif 'Published' in line: published_from_output = int(line.split(':')[1]) self.assertEqual(pages_from_output, 1) self.assertEqual(published_from_output, 1) self.assertEqual(Page.objects.public().count(), 1) public = Page.objects.public()[0] languages = sorted(public.title_set.values_list('language', flat=True)) self.assertEqual(languages, ['de']) def test_table_name_patching(self): """ This tests the plugin models patching when publishing from the command line """ User = get_user_model() User.objects.create_superuser('djangocms', 'cms@example.com', '123456') create_page("The page!", "nav_playground.html", "en", published=True) draft = Page.objects.drafts()[0] draft.reverse_id = 'a_test' # we have to change *something* draft.save() add_plugin(draft.placeholders.get(slot=u"body"), u"TextPlugin", u"en", body="Test content") draft.publish('en') add_plugin(draft.placeholders.get(slot=u"body"), u"TextPlugin", u"en", body="Test content") # Manually undoing table name patching Text._meta.db_table = 'djangocms_text_ckeditor_text' plugin_pool.patched = False with StdoutOverride(): # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish') not_drafts = len(Page.objects.filter(publisher_is_draft=False)) drafts = len(Page.objects.filter(publisher_is_draft=True)) self.assertEqual(not_drafts, 1) self.assertEqual(drafts, 1) def test_command_line_publishes_one_page(self): """ Publisher always creates two Page objects for every CMS page, one is_draft and one is_public. The public version of the page can be either published or not. This bit of code uses sometimes manager methods and sometimes manual filters on purpose (this helps test the managers) """ # we need to create a superuser (the db is empty) get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') # Now, let's create a page. That actually creates 2 Page objects create_page("The page!", "nav_playground.html", "en", published=True) draft = Page.objects.drafts()[0] draft.reverse_id = 'a_test' # we have to change *something* draft.save() pages_from_output = 0 published_from_output = 0 with StdoutOverride() as buffer: # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish') lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work for line in lines: if 'Total' in line: pages_from_output = int(line.split(':')[1]) elif 'Published' in line: published_from_output = int(line.split(':')[1]) self.assertEqual(pages_from_output, 1) self.assertEqual(published_from_output, 1) # Sanity check the database (we should have one draft and one public) not_drafts = len(Page.objects.filter(publisher_is_draft=False)) drafts = len(Page.objects.filter(publisher_is_draft=True)) self.assertEqual(not_drafts, 1) self.assertEqual(drafts, 1) # Now check that the non-draft has the attribute we set to the draft. non_draft = Page.objects.public()[0] self.assertEqual(non_draft.reverse_id, 'a_test') def test_command_line_publish_multiple_languages(self): # we need to create a superuser (the db is empty) get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') # Create a draft page with two published titles page = create_page(u"The page!", "nav_playground.html", "en", published=False) title = create_title('de', 'ja', page) title.published = True title.save() title = create_title('fr', 'non', page) title.published = True title.save() with StdoutOverride(): # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish') public = Page.objects.public()[0] languages = sorted(public.title_set.values_list('language', flat=True)) self.assertEqual(languages, ['de', 'fr']) def test_command_line_publish_one_site(self): get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') siteA = Site.objects.create(domain='a.example.com', name='a.example.com') siteB = Site.objects.create(domain='b.example.com', name='b.example.com') #example.com create_page(u"example.com homepage", "nav_playground.html", "en", published=True) #a.example.com create_page(u"a.example.com homepage", "nav_playground.html", "de", site=siteA, published=True) #b.example.com create_page(u"b.example.com homepage", "nav_playground.html", "de", site=siteB, published=True) create_page(u"b.example.com about", "nav_playground.html", "nl", site=siteB, published=True) with StdoutOverride() as buffer: # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish', site=siteB.id) lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work for line in lines: if 'Total' in line: pages_from_output = int(line.split(':')[1]) elif 'Published' in line: published_from_output = int(line.split(':')[1]) self.assertEqual(pages_from_output, 2) self.assertEqual(published_from_output, 2) def test_command_line_publish_multiple_languages_check_count(self): """ Publishing one page with multiple languages still counts as one page. This test case checks whether it works as expected. """ # we need to create a superuser (the db is empty) get_user_model().objects.create_superuser('djangocms', 'cms@example.com', '123456') # Now, let's create a page with 2 languages. page = create_page("en title", "nav_playground.html", "en", published=True) create_title("de", "de title", page) page.publish("de") pages_from_output = 0 published_from_output = 0 with StdoutOverride() as buffer: # Now we don't expect it to raise, but we need to redirect IO call_command('publisher_publish') lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work for line in lines: if 'Total' in line: pages_from_output = int(line.split(':')[1]) elif 'Published' in line: published_from_output = int(line.split(':')[1]) self.assertEqual(pages_from_output, 1) self.assertEqual(published_from_output, 1) def tearDown(self): plugin_pool.patched = False plugin_pool.set_plugin_meta() class PublishingTests(TestCase): def create_page(self, title=None, **kwargs): return create_page(title or self._testMethodName, "nav_playground.html", "en", **kwargs) def test_publish_home(self): name = self._testMethodName page = self.create_page(name, published=False) self.assertFalse(page.publisher_public_id) self.assertEqual(Page.objects.all().count(), 1) superuser = self.get_superuser() with self.login_user_context(superuser): response = self.client.get(admin_reverse("cms_page_publish_page", args=[page.pk, 'en'])) self.assertEqual(response.status_code, 302) self.assertEqual(response['Location'], "http://testserver/en/?%s" % get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')) def test_publish_single(self): name = self._testMethodName page = self.create_page(name, published=False) self.assertFalse(page.is_published('en')) drafts = Page.objects.drafts() public = Page.objects.public() published = Page.objects.public().published("en") self.assertObjectExist(drafts, title_set__title=name) self.assertObjectDoesNotExist(public, title_set__title=name) self.assertObjectDoesNotExist(published, title_set__title=name) page.publish("en") drafts = Page.objects.drafts() public = Page.objects.public() published = Page.objects.public().published("en") self.assertTrue(page.is_published('en')) self.assertEqual(page.get_publisher_state("en"), PUBLISHER_STATE_DEFAULT) self.assertIsNotNone(page.publisher_public) self.assertTrue(page.publisher_public_id) self.assertObjectExist(drafts, title_set__title=name) self.assertObjectExist(public, title_set__title=name) self.assertObjectExist(published, title_set__title=name) page = Page.objects.get(pk=page.pk) self.assertEqual(page.get_publisher_state("en"), 0) def test_publish_admin(self): page = self.create_page("test_admin", published=False) superuser = self.get_superuser() with self.login_user_context(superuser): response = self.client.get(admin_reverse("cms_page_publish_page", args=[page.pk, 'en'])) self.assertEqual(response.status_code, 302) page = Page.objects.get(pk=page.pk) self.assertEqual(page.get_publisher_state('en'), 0) def test_publish_wrong_lang(self): page = self.create_page("test_admin", published=False) superuser = self.get_superuser() with SettingsOverride( LANGUAGES=(('de', 'de'), ('en', 'en')), CMS_LANGUAGES={1: [{'code': 'en', 'name': 'en', 'fallbacks': ['fr', 'de'], 'public': True}]} ): with self.login_user_context(superuser): with force_language('de'): response = self.client.get(admin_reverse("cms_page_publish_page", args=[page.pk, 'en'])) self.assertEqual(response.status_code, 302) page = Page.objects.get(pk=page.pk) def test_publish_child_first(self): parent = self.create_page('parent', published=False) child = self.create_page('child', published=False, parent=parent) parent = parent.reload() self.assertFalse(parent.is_published('en')) self.assertFalse(child.is_published('en')) drafts = Page.objects.drafts() public = Page.objects.public() published = Page.objects.public().published('en') for name in ('parent', 'child'): self.assertObjectExist(drafts, title_set__title=name) self.assertObjectDoesNotExist(public, title_set__title=name) self.assertObjectDoesNotExist(published, title_set__title=name) child.publish("en") child = child.reload() self.assertTrue(child.is_published("en")) self.assertEqual(child.get_publisher_state('en'), PUBLISHER_STATE_PENDING) self.assertIsNone(child.publisher_public) # Since we have no parent, the state is otherwise unchanged for name in ('parent', 'child'): self.assertObjectExist(drafts, title_set__title=name) self.assertObjectDoesNotExist(public, title_set__title=name) self.assertObjectDoesNotExist(published, title_set__title=name) parent.publish("en") drafts = Page.objects.drafts() public = Page.objects.public() published = Page.objects.public().published('en') # Cascade publish for all pending descendants for name in ('parent', 'child'): self.assertObjectExist(drafts, title_set__title=name) page = drafts.get(title_set__title=name) self.assertTrue(page.is_published("en"), name) self.assertEqual(page.get_publisher_state('en'), PUBLISHER_STATE_DEFAULT, name) self.assertIsNotNone(page.publisher_public, name) self.assertTrue(page.publisher_public.is_published('en'), name) self.assertObjectExist(public, title_set__title=name) self.assertObjectExist(published, title_set__title=name) def test_simple_publisher(self): """ Creates the stuff needed for these tests. Please keep this up-to-date (the docstring!) A / \ B C """ # Create a simple tree of 3 pages pageA = create_page("Page A", "nav_playground.html", "en", published=True) pageB = create_page("Page B", "nav_playground.html", "en", parent=pageA, published=True) pageC = create_page("Page C", "nav_playground.html", "en", parent=pageA, published=False) # Assert A and B are published, C unpublished self.assertTrue(pageA.publisher_public_id) self.assertTrue(pageB.publisher_public_id) self.assertTrue(not pageC.publisher_public_id) self.assertEqual(len(Page.objects.public().published("en")), 2) # Let's publish C now. pageC.publish("en") # Assert all are published self.assertTrue(pageA.publisher_public_id) self.assertTrue(pageB.publisher_public_id) self.assertTrue(pageC.publisher_public_id) self.assertEqual(len(Page.objects.public().published("en")), 3) def test_i18n_publishing(self): page = self.create_page('parent', published=True) self.assertEqual(Title.objects.all().count(), 2) create_title("de", "vater", page) self.assertEqual(Title.objects.all().count(), 3) self.assertEqual(Title.objects.filter(published=True).count(), 2) page.publish('de') self.assertEqual(Title.objects.all().count(), 4) self.assertEqual(Title.objects.filter(published=True).count(), 4) def test_publish_ordering(self): page = self.create_page('parent', published=True) pageA = self.create_page('pageA', parent=page, published=True) pageC = self.create_page('pageC', parent=page, published=True) pageB = self.create_page('pageB', parent=page, published=True) page = page.reload() pageB.move_page(pageA, 'right') pageB.publish("en") # pageC needs reload since B has swapped places with it pageC.reload().publish("en") pageA.publish('en') drafts = Page.objects.drafts().order_by('tree_id', 'lft') draft_titles = [(p.get_title('en'), p.lft, p.rght) for p in drafts] self.assertEqual([('parent', 1, 8), ('pageA', 2, 3), ('pageB', 4, 5), ('pageC', 6, 7)], draft_titles) public = Page.objects.public().order_by('tree_id', 'lft') public_titles = [(p.get_title('en'), p.lft, p.rght) for p in public] self.assertEqual([('parent', 1, 8), ('pageA', 2, 3), ('pageB', 4, 5), ('pageC', 6, 7)], public_titles) page.publish('en') drafts = Page.objects.drafts().order_by('tree_id', 'lft') draft_titles = [(p.get_title('en'), p.lft, p.rght) for p in drafts] self.assertEqual([('parent', 1, 8), ('pageA', 2, 3), ('pageB', 4, 5), ('pageC', 6, 7)], draft_titles) public = Page.objects.public().order_by('tree_id', 'lft') public_titles = [(p.get_title('en'), p.lft, p.rght) for p in public] self.assertEqual([('parent', 1, 8), ('pageA', 2, 3), ('pageB', 4, 5), ('pageC', 6, 7)], public_titles) def test_publish_ordering2(self): page = self.create_page('parent', published=False) pageA = self.create_page('pageA', published=False) pageC = self.create_page('pageC', published=False, parent=pageA) pageB = self.create_page('pageB', published=False, parent=pageA) page = page.reload() pageA.publish('en') pageB.publish('en') pageC.publish('en') page.publish('en') drafts = Page.objects.filter(publisher_is_draft=True).order_by('tree_id', 'lft') publics = Page.objects.filter(publisher_is_draft=False).order_by('tree_id', 'lft') x = 0 for draft in drafts: self.assertEqual(draft.publisher_public_id, publics[x].pk) x += 1 def test_unpublish_unpublish(self): name = self._testMethodName page = self.create_page(name, published=True) drafts = Page.objects.drafts() published = Page.objects.public().published("en") self.assertObjectExist(drafts, title_set__title=name) self.assertObjectExist(published, title_set__title=name) page.unpublish('en') self.assertFalse(page.is_published('en')) self.assertObjectExist(drafts, title_set__title=name) self.assertObjectDoesNotExist(published, title_set__title=name) page.publish('en') self.assertTrue(page.publisher_public_id) self.assertObjectExist(drafts, title_set__title=name) self.assertObjectExist(published, title_set__title=name) def test_delete_title_unpublish(self): page = self.create_page('test', published=True) sub_page = self.create_page('test2', published=True, parent=page) self.assertTrue(sub_page.publisher_public.is_published('en')) page.title_set.all().delete() self.assertFalse(sub_page.publisher_public.is_published('en', force_reload=True)) def test_modify_child_while_pending(self): home = self.create_page("Home", published=True, in_navigation=True) child = self.create_page("Child", published=True, parent=home, in_navigation=False) home = home.reload() home.unpublish('en') self.assertEqual(Title.objects.count(), 4) child = child.reload() self.assertFalse(child.publisher_public.is_published('en')) self.assertFalse(child.in_navigation) self.assertFalse(child.publisher_public.in_navigation) child.in_navigation = True child.save() child.publish('en') child = self.reload(child) self.assertEqual(Title.objects.count(), 4) self.assertTrue(child.is_published('en')) self.assertFalse(child.publisher_public.is_published('en')) self.assertTrue(child.in_navigation) self.assertTrue(child.publisher_public.in_navigation) self.assertEqual(child.get_publisher_state('en'), PUBLISHER_STATE_PENDING) home.publish('en') child = self.reload(child) self.assertTrue(child.is_published('en')) self.assertTrue(child.publisher_public_id) self.assertTrue(child.publisher_public.in_navigation) self.assertEqual(child.get_publisher_state('en'), PUBLISHER_STATE_DEFAULT) def test_republish_with_descendants(self): home = self.create_page("Home", published=True) child = self.create_page("Child", published=True, parent=home) gc = self.create_page("GC", published=True, parent=child) self.assertTrue(child.is_published("en")) self.assertTrue(gc.is_published('en')) home = home.reload() home.unpublish('en') child = self.reload(child) gc = self.reload(gc) self.assertTrue(child.is_published("en")) self.assertTrue(gc.is_published("en")) self.assertFalse(child.publisher_public.is_published("en")) self.assertFalse(gc.publisher_public.is_published('en')) self.assertEqual(child.get_publisher_state('en'), PUBLISHER_STATE_PENDING) self.assertEqual(gc.get_publisher_state('en'), PUBLISHER_STATE_PENDING) home.publish('en') child = self.reload(child) gc = self.reload(gc) self.assertTrue(child.publisher_public_id) self.assertTrue(gc.is_published('en')) self.assertTrue(child.is_published('en')) self.assertTrue(gc.publisher_public_id) self.assertEqual(child.get_publisher_state('en'), PUBLISHER_STATE_DEFAULT) self.assertEqual(gc.get_publisher_state('en'), PUBLISHER_STATE_DEFAULT) def test_republish_with_dirty_children(self): home = self.create_page("Home", published=True) dirty1 = self.create_page("Dirty1", published=True, parent=home) dirty2 = self.create_page("Dirty2", published=True, parent=home) home = self.reload(home) dirty1 = self.reload(dirty1) dirty2 = self.reload(dirty2) dirty1.in_navigation = True dirty1.save() home.unpublish('en') dirty2.in_navigation = True dirty2.save() dirty1 = self.reload(dirty1) dirty2 = self.reload(dirty2) self.assertTrue(dirty1.is_published) self.assertTrue(dirty2.publisher_public_id) self.assertEqual(dirty1.get_publisher_state("en"), PUBLISHER_STATE_DIRTY) self.assertEqual(dirty2.get_publisher_state("en"), PUBLISHER_STATE_DIRTY) home = self.reload(home) with self.assertNumQueries(FuzzyInt(0, 100)): home.publish('en') dirty1 = self.reload(dirty1) dirty2 = self.reload(dirty2) self.assertTrue(dirty1.is_published("en")) self.assertTrue(dirty2.is_published("en")) self.assertTrue(dirty1.publisher_public.is_published("en")) self.assertTrue(dirty2.publisher_public.is_published("en")) self.assertEqual(dirty1.get_publisher_state("en"), PUBLISHER_STATE_DIRTY) self.assertEqual(dirty2.get_publisher_state("en"), PUBLISHER_STATE_DIRTY) def test_republish_with_unpublished_child(self): """ Unpub1 was never published, and unpub2 has been unpublished after the fact. None of the grandchildren should become published. """ home = self.create_page("Home", published=True) unpub1 = self.create_page("Unpub1", published=False, parent=home) unpub2 = self.create_page("Unpub2", published=True, parent=home) gc1 = self.create_page("GC1", published=True, parent=unpub1) gc2 = self.create_page("GC2", published=True, parent=unpub2) self.assertFalse(gc1.publisher_public_id) self.assertFalse(gc1.publisher_public_id) self.assertTrue(gc1.is_published('en')) self.assertTrue(gc2.is_published('en')) home.unpublish('en') unpub1 = self.reload(unpub1) unpub2.unpublish('en') # Just marks this as not published for page in (unpub1, unpub2): self.assertFalse(page.is_published('en'), page) self.assertEqual(page.get_publisher_state("en"), PUBLISHER_STATE_DIRTY) self.assertIsNone(unpub1.publisher_public) self.assertIsNotNone(unpub2.publisher_public) self.assertFalse(unpub2.publisher_public.is_published('en')) gc1 = self.reload(gc1) gc2 = self.reload(gc2) for page in (gc1, gc2): self.assertTrue(page.is_published('en')) self.assertEqual(page.get_publisher_state('en'), PUBLISHER_STATE_PENDING) self.assertIsNone(gc1.publisher_public) self.assertIsNotNone(gc2.publisher_public) self.assertFalse(gc2.publisher_public.is_published('en')) def test_unpublish_with_descendants(self): page = self.create_page("Page", published=True) child = self.create_page("Child", parent=page, published=True) self.create_page("Grandchild", parent=child, published=True) page = page.reload() child.reload() drafts = Page.objects.drafts() public = Page.objects.public() published = Page.objects.public().published("en") self.assertEqual(published.count(), 3) self.assertEqual(page.get_descendant_count(), 2) base = reverse('pages-root') for url in (base, base + 'child/', base + 'child/grandchild/'): response = self.client.get(url) self.assertEqual(response.status_code, 200, url) for title in ('Page', 'Child', 'Grandchild'): self.assertObjectExist(drafts, title_set__title=title) self.assertObjectExist(public, title_set__title=title) self.assertObjectExist(published, title_set__title=title) item = drafts.get(title_set__title=title) self.assertTrue(item.publisher_public_id) self.assertEqual(item.get_publisher_state('en'), PUBLISHER_STATE_DEFAULT) self.assertTrue(page.unpublish('en'), 'Unpublish was not successful') self.assertFalse(page.is_published('en')) cache.clear() for url in (base, base + 'child/', base + 'child/grandchild/'): response = self.client.get(url) self.assertEqual(response.status_code, 404) for title in ('Page', 'Child', 'Grandchild'): self.assertObjectExist(drafts, title_set__title=title) self.assertObjectExist(public, title_set__title=title) self.assertObjectDoesNotExist(published, title_set__title=title) item = drafts.get(title_set__title=title) if title == 'Page': self.assertFalse(item.is_published("en")) self.assertFalse(item.publisher_public.is_published("en")) # Not sure what the proper state of these are after unpublish #self.assertEqual(page.publisher_state, PUBLISHER_STATE_DEFAULT) self.assertTrue(page.is_dirty('en')) else: # The changes to the published subpages are simply that the # published flag of the PUBLIC instance goes to false, and the # publisher state is set to mark waiting for parent self.assertTrue(item.is_published('en'), title) self.assertFalse(item.publisher_public.is_published('en'), title) self.assertEqual(item.get_publisher_state('en'), PUBLISHER_STATE_PENDING, title) self.assertTrue(item.is_dirty('en'), title) def test_unpublish_with_dirty_descendants(self): page = self.create_page("Page", published=True) child = self.create_page("Child", parent=page, published=True) gchild = self.create_page("Grandchild", parent=child, published=True) child.in_navigation = True child.save() self.assertTrue(child.is_dirty("en")) self.assertFalse(gchild.is_dirty('en')) self.assertTrue(child.publisher_public.is_published('en')) self.assertTrue(gchild.publisher_public.is_published('en')) page.unpublish('en') child = self.reload(child) gchild = self.reload(gchild) # Descendants become dirty after unpublish self.assertTrue(child.is_dirty('en')) self.assertTrue(gchild.is_dirty('en')) # However, their public version is still removed no matter what self.assertFalse(child.publisher_public.is_published('en')) self.assertFalse(gchild.publisher_public.is_published('en')) def test_prepublish_descendants(self): page = self.create_page("Page", published=True) child = self.create_page("Child", parent=page, published=False) gchild2 = self.create_page("Grandchild2", parent=child, published=False) self.create_page("Grandchild3", parent=child, published=False) gchild = self.create_page("Grandchild", published=True) gchild.move_page(target=child, position='last-child') gchild.publish('en') self.assertFalse(child.is_published('en')) self.assertTrue(gchild.is_published('en')) self.assertEqual(gchild.get_publisher_state('en'), PUBLISHER_STATE_PENDING) child = child.reload() child.publish('en') gchild2 = gchild2.reload() gchild2.publish('en') self.assertTrue(child.is_published("en")) self.assertTrue(gchild.is_published("en")) self.assertEqual(gchild.get_publisher_state('en', force_reload=True), PUBLISHER_STATE_DEFAULT) gchild = gchild.reload() gchild2 = gchild2.reload() self.assertEqual(gchild.lft, gchild.publisher_public.lft) self.assertEqual(gchild.rght, gchild.publisher_public.rght) def test_republish_multiple_root(self): # TODO: The paths do not match expected behaviour home = self.create_page("Page", published=True) other = self.create_page("Another Page", published=True) child = self.create_page("Child", published=True, parent=home) child2 = self.create_page("Child", published=True, parent=other) self.assertTrue(Page.objects.filter(is_home=True).count(), 2) self.assertTrue(home.is_home) home = home.reload() self.assertTrue(home.publisher_public.is_home) root = reverse('pages-root') self.assertEqual(home.get_absolute_url(), root) self.assertEqual(home.get_public_object().get_absolute_url(), root) self.assertEqual(child.get_absolute_url(), root + 'child/') self.assertEqual(child.get_public_object().get_absolute_url(), root + 'child/') self.assertEqual(other.get_absolute_url(), root + 'another-page/') self.assertEqual(other.get_public_object().get_absolute_url(), root + 'another-page/') self.assertEqual(child2.get_absolute_url(), root + 'another-page/child/') self.assertEqual(child2.get_public_object().get_absolute_url(), root + 'another-page/child/') home = self.reload(home) home.unpublish('en') home = self.reload(home) other = self.reload(other) child = self.reload(child) child2 = self.reload(child2) self.assertFalse(home.is_home) self.assertFalse(home.publisher_public.is_home) self.assertTrue(other.is_home) self.assertTrue(other.publisher_public.is_home) self.assertEqual(other.get_absolute_url(), root) self.assertEqual(other.get_public_object().get_absolute_url(), root) self.assertEqual(home.get_absolute_url(), root + 'page/') self.assertEqual(home.get_public_object().get_absolute_url(), root + 'page/') self.assertEqual(child.get_absolute_url(), root + 'page/child/') self.assertEqual(child.get_public_object().get_absolute_url(), root + 'page/child/') self.assertEqual(child2.get_absolute_url(), root + 'child/') self.assertEqual(child2.get_public_object().get_absolute_url(), root + 'child/') home.publish('en') home = self.reload(home) other = self.reload(other) child = self.reload(child) child2 = self.reload(child2) self.assertTrue(home.is_home) self.assertTrue(home.publisher_public.is_home) self.assertEqual(home.get_absolute_url(), root) self.assertEqual(home.get_public_object().get_absolute_url(), root) self.assertEqual(child.get_absolute_url(), root + 'child/') self.assertEqual(child.get_public_object().get_absolute_url(), root + 'child/') self.assertEqual(other.get_absolute_url(), root + 'another-page/') self.assertEqual(other.get_public_object().get_absolute_url(), root + 'another-page/') self.assertEqual(child2.get_absolute_url(), root + 'another-page/child/') self.assertEqual(child2.get_public_object().get_absolute_url(), root + 'another-page/child/') def test_revert_contents(self): user = self.get_superuser() page = create_page("Page", "nav_playground.html", "en", published=True, created_by=user) placeholder = page.placeholders.get(slot=u"body") deleted_plugin = add_plugin(placeholder, u"TextPlugin", u"en", body="Deleted content") text_plugin = add_plugin(placeholder, u"TextPlugin", u"en", body="Public content") page.publish('en') # Modify and delete plugins text_plugin.body = "<p>Draft content</p>" text_plugin.save() deleted_plugin.delete() self.assertEqual(CMSPlugin.objects.count(), 3) # Now let's revert and restore page.revert('en') self.assertEqual(page.get_publisher_state("en"), PUBLISHER_STATE_DEFAULT) self.assertEqual(CMSPlugin.objects.count(), 4) plugins = CMSPlugin.objects.filter(placeholder__page=page) self.assertEqual(plugins.count(), 2) plugins = [plugin.get_plugin_instance()[0] for plugin in plugins] self.assertEqual(plugins[0].body, "Deleted content") self.assertEqual(plugins[1].body, "Public content") def test_revert_move(self): parent = create_page("Parent", "nav_playground.html", "en", published=True) parent_url = parent.get_absolute_url() page = create_page("Page", "nav_playground.html", "en", published=True, parent=parent) other = create_page("Other", "nav_playground.html", "en", published=True) other_url = other.get_absolute_url() child = create_page("Child", "nav_playground.html", "en", published=True, parent=page) parent = parent.reload() page = page.reload() self.assertEqual(page.get_absolute_url(), parent_url + "page/") self.assertEqual(child.get_absolute_url(), parent_url + "page/child/") # Now let's move it (and the child) page.move_page(other) page = self.reload(page) child = self.reload(child) self.assertEqual(page.get_absolute_url(), other_url + "page/") self.assertEqual(child.get_absolute_url(), other_url + "page/child/") # Public version changed the url as well self.assertEqual(page.publisher_public.get_absolute_url(), other_url + "page/") self.assertEqual(child.publisher_public.get_absolute_url(), other_url + "page/child/") def test_publish_works_with_descendants(self): """ For help understanding what this tests for, see: http://articles.sitepoint.com/print/hierarchical-data-database Creates this published structure: home / \ item1 item2 / \ subitem1 subitem2 """ home_page = create_page("home", "nav_playground.html", "en", published=True, in_navigation=False) create_page("item1", "nav_playground.html", "en", parent=home_page, published=True) item2 = create_page("item2", "nav_playground.html", "en", parent=home_page, published=True) create_page("subitem1", "nav_playground.html", "en", parent=item2, published=True) create_page("subitem2", "nav_playground.html", "en", parent=item2, published=True) item2 = item2.reload() not_drafts = list(Page.objects.filter(publisher_is_draft=False).order_by('lft')) drafts = list(Page.objects.filter(publisher_is_draft=True).order_by('lft')) self.assertEqual(len(not_drafts), 5) self.assertEqual(len(drafts), 5) for idx, draft in enumerate(drafts): public = not_drafts[idx] # Check that a node doesn't become a root node magically self.assertEqual(bool(public.parent_id), bool(draft.parent_id)) if public.parent: # Let's assert the MPTT tree is consistent self.assertTrue(public.lft > public.parent.lft) self.assertTrue(public.rght < public.parent.rght) self.assertEqual(public.tree_id, public.parent.tree_id) self.assertTrue(public.parent in public.get_ancestors()) self.assertTrue(public in public.parent.get_descendants()) self.assertTrue(public in public.parent.get_children()) if draft.parent: # Same principle for the draft tree self.assertTrue(draft.lft > draft.parent.lft) self.assertTrue(draft.rght < draft.parent.rght) self.assertEqual(draft.tree_id, draft.parent.tree_id) self.assertTrue(draft.parent in draft.get_ancestors()) self.assertTrue(draft in draft.parent.get_descendants()) self.assertTrue(draft in draft.parent.get_children()) # Now call publish again. The structure should not change. item2.publish('en') not_drafts = list(Page.objects.filter(publisher_is_draft=False).order_by('lft')) drafts = list(Page.objects.filter(publisher_is_draft=True).order_by('lft')) self.assertEqual(len(not_drafts), 5) self.assertEqual(len(drafts), 5) for idx, draft in enumerate(drafts): public = not_drafts[idx] # Check that a node doesn't become a root node magically self.assertEqual(bool(public.parent_id), bool(draft.parent_id)) if public.parent: # Let's assert the MPTT tree is consistent self.assertTrue(public.lft > public.parent.lft) self.assertTrue(public.rght < public.parent.rght) self.assertEqual(public.tree_id, public.parent.tree_id) self.assertTrue(public.parent in public.get_ancestors()) self.assertTrue(public in public.parent.get_descendants()) self.assertTrue(public in public.parent.get_children()) if draft.parent: # Same principle for the draft tree self.assertTrue(draft.lft > draft.parent.lft) self.assertTrue(draft.rght < draft.parent.rght) self.assertEqual(draft.tree_id, draft.parent.tree_id) self.assertTrue(draft.parent in draft.get_ancestors()) self.assertTrue(draft in draft.parent.get_descendants()) self.assertTrue(draft in draft.parent.get_children())
./CrossVul/dataset_final_sorted/CWE-352/py/bad_1654_5
crossvul-python_data_bad_114_3
404: Not Found
./CrossVul/dataset_final_sorted/CWE-352/py/bad_114_3
crossvul-python_data_good_1891_4
# -*- coding: utf-8 -*- """Simple security for Flask apps.""" import io import re from setuptools import find_packages, setup with io.open("README.rst", "rt", encoding="utf8") as f: readme = f.read() with io.open("flask_security/__init__.py", "rt", encoding="utf8") as f: version = re.search(r'__version__ = "(.*?)"', f.read()).group(1) tests_require = [ "Flask-Mongoengine~=0.9.5", "peewee>=3.11.2", "Flask-SQLAlchemy>=2.3", "argon2_cffi>=19.1.0", "bcrypt>=3.1.5", "cachetools>=3.1.0", "check-manifest>=0.25", "coverage>=4.5.4", "cryptography>=2.3.1", "isort>=4.2.2", "mock>=1.3.0", "mongoengine~=0.19.1", "mongomock~=3.19.0", "msgcheck>=2.9", "pony>=0.7.11", "phonenumberslite>=8.11.1", "psycopg2>=2.8.4", "pydocstyle>=1.0.0", "pymysql>=0.9.3", "pyqrcode>=1.2", "pytest==4.6.11", "pytest-black>=0.3.8", "pytest-cache>=1.0", "pytest-cov>=2.5.1", "pytest-flake8>=1.0.6", "pytest-mongo>=1.2.1", "pytest>=3.5.1", "sqlalchemy>=1.2.6", "sqlalchemy-utils>=0.33.0", "werkzeug>=0.15.5", "zxcvbn~=4.4.28", ] extras_require = { "docs": ["Pallets-Sphinx-Themes>=1.2.0", "Sphinx>=1.8.5", "sphinx-issues>=1.2.0"], "tests": tests_require, } extras_require["all"] = [] for reqs in extras_require.values(): extras_require["all"].extend(reqs) setup_requires = ["Babel>=1.3", "pytest-runner>=2.6.2", "twine", "wheel"] install_requires = [ "Flask>=1.0.2", "Flask-Login>=0.4.1", "Flask-Mail>=0.9.1", "Flask-Principal>=0.4.0", "Flask-WTF>=0.14.2", "Flask-BabelEx>=0.9.3", "email-validator>=1.0.5", "itsdangerous>=1.1.0", "passlib>=1.7.1", ] packages = find_packages() setup( name="Flask-Security-Too", version=version, description=__doc__, long_description=readme, keywords="flask security", license="MIT", author="Matt Wright & Chris Wagner", author_email="jwag.wagner+github@gmail.com", url="https://github.com/Flask-Middleware/flask-security", project_urls={ "Documentation": "https://flask-security-too.readthedocs.io", "Releases": "https://pypi.org/project/Flask-Security-Too/", "Code": "https://github.com/Flask-Middleware/flask-security", "Issue tracker": "https://github.com/Flask-Middleware/flask-security/issues", }, packages=packages, zip_safe=False, include_package_data=True, platforms="any", python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*", extras_require=extras_require, install_requires=install_requires, setup_requires=setup_requires, tests_require=tests_require, classifiers=[ "Environment :: Web Environment", "Framework :: Flask", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Software Development :: Libraries :: Python Modules", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Development Status :: 4 - Beta", ], )
./CrossVul/dataset_final_sorted/CWE-352/py/good_1891_4
crossvul-python_data_bad_1686_0
"""Base Tornado handlers for the notebook server.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import functools import json import logging import os import re import sys import traceback try: # py3 from http.client import responses except ImportError: from httplib import responses from jinja2 import TemplateNotFound from tornado import web from tornado import gen from tornado.log import app_log import IPython from IPython.utils.sysinfo import get_sys_info from IPython.config import Application from IPython.utils.path import filefind from IPython.utils.py3compat import string_types from IPython.html.utils import is_hidden, url_path_join, url_escape from IPython.html.services.security import csp_report_uri #----------------------------------------------------------------------------- # Top-level handlers #----------------------------------------------------------------------------- non_alphanum = re.compile(r'[^A-Za-z0-9]') sys_info = json.dumps(get_sys_info()) class AuthenticatedHandler(web.RequestHandler): """A RequestHandler with an authenticated user.""" @property def content_security_policy(self): """The default Content-Security-Policy header Can be overridden by defining Content-Security-Policy in settings['headers'] """ return '; '.join([ "frame-ancestors 'self'", # Make sure the report-uri is relative to the base_url "report-uri " + url_path_join(self.base_url, csp_report_uri), ]) def set_default_headers(self): headers = self.settings.get('headers', {}) if "Content-Security-Policy" not in headers: headers["Content-Security-Policy"] = self.content_security_policy # Allow for overriding headers for header_name,value in headers.items() : try: self.set_header(header_name, value) except Exception as e: # tornado raise Exception (not a subclass) # if method is unsupported (websocket and Access-Control-Allow-Origin # for example, so just ignore) self.log.debug(e) def clear_login_cookie(self): self.clear_cookie(self.cookie_name) def get_current_user(self): if self.login_handler is None: return 'anonymous' return self.login_handler.get_user(self) @property def cookie_name(self): default_cookie_name = non_alphanum.sub('-', 'username-{}'.format( self.request.host )) return self.settings.get('cookie_name', default_cookie_name) @property def logged_in(self): """Is a user currently logged in?""" user = self.get_current_user() return (user and not user == 'anonymous') @property def login_handler(self): """Return the login handler for this application, if any.""" return self.settings.get('login_handler_class', None) @property def login_available(self): """May a user proceed to log in? This returns True if login capability is available, irrespective of whether the user is already logged in or not. """ if self.login_handler is None: return False return bool(self.login_handler.login_available(self.settings)) class IPythonHandler(AuthenticatedHandler): """IPython-specific extensions to authenticated handling Mostly property shortcuts to IPython-specific settings. """ @property def config(self): return self.settings.get('config', None) @property def log(self): """use the IPython log by default, falling back on tornado's logger""" if Application.initialized(): return Application.instance().log else: return app_log @property def jinja_template_vars(self): """User-supplied values to supply to jinja templates.""" return self.settings.get('jinja_template_vars', {}) #--------------------------------------------------------------- # URLs #--------------------------------------------------------------- @property def version_hash(self): """The version hash to use for cache hints for static files""" return self.settings.get('version_hash', '') @property def mathjax_url(self): return self.settings.get('mathjax_url', '') @property def base_url(self): return self.settings.get('base_url', '/') @property def default_url(self): return self.settings.get('default_url', '') @property def ws_url(self): return self.settings.get('websocket_url', '') @property def contents_js_source(self): self.log.debug("Using contents: %s", self.settings.get('contents_js_source', 'services/contents')) return self.settings.get('contents_js_source', 'services/contents') #--------------------------------------------------------------- # Manager objects #--------------------------------------------------------------- @property def kernel_manager(self): return self.settings['kernel_manager'] @property def contents_manager(self): return self.settings['contents_manager'] @property def cluster_manager(self): return self.settings['cluster_manager'] @property def session_manager(self): return self.settings['session_manager'] @property def terminal_manager(self): return self.settings['terminal_manager'] @property def kernel_spec_manager(self): return self.settings['kernel_spec_manager'] @property def config_manager(self): return self.settings['config_manager'] #--------------------------------------------------------------- # CORS #--------------------------------------------------------------- @property def allow_origin(self): """Normal Access-Control-Allow-Origin""" return self.settings.get('allow_origin', '') @property def allow_origin_pat(self): """Regular expression version of allow_origin""" return self.settings.get('allow_origin_pat', None) @property def allow_credentials(self): """Whether to set Access-Control-Allow-Credentials""" return self.settings.get('allow_credentials', False) def set_default_headers(self): """Add CORS headers, if defined""" super(IPythonHandler, self).set_default_headers() if self.allow_origin: self.set_header("Access-Control-Allow-Origin", self.allow_origin) elif self.allow_origin_pat: origin = self.get_origin() if origin and self.allow_origin_pat.match(origin): self.set_header("Access-Control-Allow-Origin", origin) if self.allow_credentials: self.set_header("Access-Control-Allow-Credentials", 'true') def get_origin(self): # Handle WebSocket Origin naming convention differences # The difference between version 8 and 13 is that in 8 the # client sends a "Sec-Websocket-Origin" header and in 13 it's # simply "Origin". if "Origin" in self.request.headers: origin = self.request.headers.get("Origin") else: origin = self.request.headers.get("Sec-Websocket-Origin", None) return origin #--------------------------------------------------------------- # template rendering #--------------------------------------------------------------- def get_template(self, name): """Return the jinja template object for a given name""" return self.settings['jinja2_env'].get_template(name) def render_template(self, name, **ns): ns.update(self.template_namespace) template = self.get_template(name) return template.render(**ns) @property def template_namespace(self): return dict( base_url=self.base_url, default_url=self.default_url, ws_url=self.ws_url, logged_in=self.logged_in, login_available=self.login_available, static_url=self.static_url, sys_info=sys_info, contents_js_source=self.contents_js_source, version_hash=self.version_hash, **self.jinja_template_vars ) def get_json_body(self): """Return the body of the request as JSON data.""" if not self.request.body: return None # Do we need to call body.decode('utf-8') here? body = self.request.body.strip().decode(u'utf-8') try: model = json.loads(body) except Exception: self.log.debug("Bad JSON: %r", body) self.log.error("Couldn't parse JSON", exc_info=True) raise web.HTTPError(400, u'Invalid JSON in body of request') return model def write_error(self, status_code, **kwargs): """render custom error pages""" exc_info = kwargs.get('exc_info') message = '' status_message = responses.get(status_code, 'Unknown HTTP Error') if exc_info: exception = exc_info[1] # get the custom message, if defined try: message = exception.log_message % exception.args except Exception: pass # construct the custom reason, if defined reason = getattr(exception, 'reason', '') if reason: status_message = reason # build template namespace ns = dict( status_code=status_code, status_message=status_message, message=message, exception=exception, ) self.set_header('Content-Type', 'text/html') # render the template try: html = self.render_template('%s.html' % status_code, **ns) except TemplateNotFound: self.log.debug("No template for %d", status_code) html = self.render_template('error.html', **ns) self.write(html) class APIHandler(IPythonHandler): """Base class for API handlers""" @property def content_security_policy(self): csp = '; '.join([ super(APIHandler, self).content_security_policy, "default-src 'none'", ]) return csp def finish(self, *args, **kwargs): self.set_header('Content-Type', 'application/json') return super(APIHandler, self).finish(*args, **kwargs) class Template404(IPythonHandler): """Render our 404 template""" def prepare(self): raise web.HTTPError(404) class AuthenticatedFileHandler(IPythonHandler, web.StaticFileHandler): """static files should only be accessible when logged in""" @web.authenticated def get(self, path): if os.path.splitext(path)[1] == '.ipynb': name = path.rsplit('/', 1)[-1] self.set_header('Content-Type', 'application/json') self.set_header('Content-Disposition','attachment; filename="%s"' % name) return web.StaticFileHandler.get(self, path) def set_headers(self): super(AuthenticatedFileHandler, self).set_headers() # disable browser caching, rely on 304 replies for savings if "v" not in self.request.arguments: self.add_header("Cache-Control", "no-cache") def compute_etag(self): return None def validate_absolute_path(self, root, absolute_path): """Validate and return the absolute path. Requires tornado 3.1 Adding to tornado's own handling, forbids the serving of hidden files. """ abs_path = super(AuthenticatedFileHandler, self).validate_absolute_path(root, absolute_path) abs_root = os.path.abspath(root) if is_hidden(abs_path, abs_root): self.log.info("Refusing to serve hidden file, via 404 Error") raise web.HTTPError(404) return abs_path def json_errors(method): """Decorate methods with this to return GitHub style JSON errors. This should be used on any JSON API on any handler method that can raise HTTPErrors. This will grab the latest HTTPError exception using sys.exc_info and then: 1. Set the HTTP status code based on the HTTPError 2. Create and return a JSON body with a message field describing the error in a human readable form. """ @functools.wraps(method) @gen.coroutine def wrapper(self, *args, **kwargs): try: result = yield gen.maybe_future(method(self, *args, **kwargs)) except web.HTTPError as e: self.set_header('Content-Type', 'application/json') status = e.status_code message = e.log_message self.log.warn(message) self.set_status(e.status_code) reply = dict(message=message, reason=e.reason) self.finish(json.dumps(reply)) except Exception: self.set_header('Content-Type', 'application/json') self.log.error("Unhandled error in API request", exc_info=True) status = 500 message = "Unknown server error" t, value, tb = sys.exc_info() self.set_status(status) tb_text = ''.join(traceback.format_exception(t, value, tb)) reply = dict(message=message, reason=None, traceback=tb_text) self.finish(json.dumps(reply)) else: # FIXME: can use regular return in generators in py3 raise gen.Return(result) return wrapper #----------------------------------------------------------------------------- # File handler #----------------------------------------------------------------------------- # to minimize subclass changes: HTTPError = web.HTTPError class FileFindHandler(IPythonHandler, web.StaticFileHandler): """subclass of StaticFileHandler for serving files from a search path""" # cache search results, don't search for files more than once _static_paths = {} def set_headers(self): super(FileFindHandler, self).set_headers() # disable browser caching, rely on 304 replies for savings if "v" not in self.request.arguments or \ any(self.request.path.startswith(path) for path in self.no_cache_paths): self.set_header("Cache-Control", "no-cache") def initialize(self, path, default_filename=None, no_cache_paths=None): self.no_cache_paths = no_cache_paths or [] if isinstance(path, string_types): path = [path] self.root = tuple( os.path.abspath(os.path.expanduser(p)) + os.sep for p in path ) self.default_filename = default_filename def compute_etag(self): return None @classmethod def get_absolute_path(cls, roots, path): """locate a file to serve on our static file search path""" with cls._lock: if path in cls._static_paths: return cls._static_paths[path] try: abspath = os.path.abspath(filefind(path, roots)) except IOError: # IOError means not found return '' cls._static_paths[path] = abspath return abspath def validate_absolute_path(self, root, absolute_path): """check if the file should be served (raises 404, 403, etc.)""" if absolute_path == '': raise web.HTTPError(404) for root in self.root: if (absolute_path + os.sep).startswith(root): break return super(FileFindHandler, self).validate_absolute_path(root, absolute_path) class APIVersionHandler(APIHandler): @json_errors def get(self): # not authenticated, so give as few info as possible self.finish(json.dumps({"version":IPython.__version__})) class TrailingSlashHandler(web.RequestHandler): """Simple redirect handler that strips trailing slashes This should be the first, highest priority handler. """ def get(self): self.redirect(self.request.uri.rstrip('/')) post = put = get class FilesRedirectHandler(IPythonHandler): """Handler for redirecting relative URLs to the /files/ handler""" @staticmethod def redirect_to_files(self, path): """make redirect logic a reusable static method so it can be called from other handlers. """ cm = self.contents_manager if cm.dir_exists(path): # it's a *directory*, redirect to /tree url = url_path_join(self.base_url, 'tree', path) else: orig_path = path # otherwise, redirect to /files parts = path.split('/') if not cm.file_exists(path=path) and 'files' in parts: # redirect without files/ iff it would 404 # this preserves pre-2.0-style 'files/' links self.log.warn("Deprecated files/ URL: %s", orig_path) parts.remove('files') path = '/'.join(parts) if not cm.file_exists(path=path): raise web.HTTPError(404) url = url_path_join(self.base_url, 'files', path) url = url_escape(url) self.log.debug("Redirecting %s to %s", self.request.path, url) self.redirect(url) def get(self, path=''): return self.redirect_to_files(self, path) #----------------------------------------------------------------------------- # URL pattern fragments for re-use #----------------------------------------------------------------------------- # path matches any number of `/foo[/bar...]` or just `/` or '' path_regex = r"(?P<path>(?:(?:/[^/]+)+|/?))" #----------------------------------------------------------------------------- # URL to handler mappings #----------------------------------------------------------------------------- default_handlers = [ (r".*/", TrailingSlashHandler), (r"api", APIVersionHandler) ]
./CrossVul/dataset_final_sorted/CWE-352/py/bad_1686_0
crossvul-python_data_good_1892_0
# -*- coding: utf-8 -*- """ flask_security.views ~~~~~~~~~~~~~~~~~~~~ Flask-Security views module :copyright: (c) 2012 by Matt Wright. :copyright: (c) 2019-2020 by J. Christopher Wagner (jwag). :license: MIT, see LICENSE for more details. CSRF is tricky. By default all our forms have CSRF protection built in via Flask-WTF. This is regardless of authentication method or whether the request is Form or JSON based. Form-based 'just works' since when rendering the form (on GET), the CSRF token is automatically populated. We want to handle: - JSON requests where CSRF token is in a header (e.g. X-CSRF-Token) - Option to skip CSRF when using a token to authenticate (rather than session) (CSRF_PROTECT_MECHANISMS) - Option to skip CSRF for 'login'/unauthenticated requests (CSRF_IGNORE_UNAUTH_ENDPOINTS) This is complicated by the fact that the only way to disable form CSRF is to pass in meta={csrf: false} at form instantiation time. Be aware that for CSRF to work, caller MUST pass in session cookie. So for pure API, and no session cookie - there is no way to support CSRF-Login so app must set CSRF_IGNORE_UNAUTH_ENDPOINTS (or use CSRF/session cookie for logging in then once they have a token, no need for cookie). TODO: two-factor routes such as tf_setup need work. They seem to support both authenticated (via session?) as well as unauthenticated access. """ import sys import time from flask import ( Blueprint, abort, after_this_request, current_app, jsonify, request, session, ) from flask_login import current_user from werkzeug.datastructures import MultiDict from werkzeug.local import LocalProxy from .changeable import change_user_password from .confirmable import ( confirm_email_token_status, confirm_user, send_confirmation_instructions, ) from .decorators import anonymous_user_required, auth_required, unauth_csrf from .passwordless import login_token_status, send_login_instructions from .quart_compat import get_quart_status from .unified_signin import ( us_signin, us_signin_send_code, us_qrcode, us_setup, us_setup_validate, us_verify, us_verify_link, us_verify_send_code, ) from .recoverable import ( reset_password_token_status, send_reset_password_instructions, update_password, ) from .registerable import register_user from .twofactor import ( complete_two_factor_process, tf_clean_session, tf_disable, tf_login, ) from .utils import ( base_render_json, config_value, do_flash, get_message, get_post_login_redirect, get_post_logout_redirect, get_post_register_redirect, get_post_verify_redirect, get_url, json_error_response, login_user, logout_user, slash_url_suffix, suppress_form_csrf, url_for_security, ) if get_quart_status(): # pragma: no cover from quart import make_response, redirect else: from flask import make_response, redirect # Convenient references _security = LocalProxy(lambda: current_app.extensions["security"]) _datastore = LocalProxy(lambda: _security.datastore) def default_render_json(payload, code, headers, user): """ Default JSON response handler. """ # Force Content-Type header to json. if headers is None: headers = dict() headers["Content-Type"] = "application/json" payload = dict(meta=dict(code=code), response=payload) return make_response(jsonify(payload), code, headers) PY3 = sys.version_info[0] == 3 if PY3 and get_quart_status(): # pragma: no cover from .async_compat import _commit # noqa: F401 else: def _commit(response=None): _datastore.commit() return response def _ctx(endpoint): return _security._run_ctx_processor(endpoint) @unauth_csrf(fall_through=True) def login(): """View function for login view Allow already authenticated users. For GET this is useful for single-page-applications on refresh - session still active but need to access user info and csrf-token. For POST - redirects to POST_LOGIN_VIEW (forms) or returns 400 (json). """ if current_user.is_authenticated and request.method == "POST": # Just redirect current_user to POST_LOGIN_VIEW. # While its tempting to try to logout the current user and login the # new requested user - that simply doesn't work with CSRF. # This does NOT use get_post_login_redirect() so that it doesn't look at # 'next' - which can cause infinite redirect loops # (see test_common::test_authenticated_loop) if _security._want_json(request): payload = json_error_response( errors=get_message("ANONYMOUS_USER_REQUIRED")[0] ) return _security._render_json(payload, 400, None, None) else: return redirect(get_url(_security.post_login_view)) form_class = _security.login_form if request.is_json: # Allow GET so we can return csrf_token for pre-login. if request.content_length: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(MultiDict([]), meta=suppress_form_csrf()) else: form = form_class(request.form, meta=suppress_form_csrf()) if form.validate_on_submit(): remember_me = form.remember.data if "remember" in form else None if config_value("TWO_FACTOR") and ( config_value("TWO_FACTOR_REQUIRED") or (form.user.tf_totp_secret and form.user.tf_primary_method) ): return tf_login( form.user, remember=remember_me, primary_authn_via="password" ) login_user(form.user, remember=remember_me, authn_via=["password"]) after_this_request(_commit) if _security._want_json(request): return base_render_json(form, include_auth_token=True) return redirect(get_post_login_redirect()) if _security._want_json(request): if current_user.is_authenticated: form.user = current_user return base_render_json(form) if current_user.is_authenticated: return redirect(get_url(_security.post_login_view)) else: return _security.render_template( config_value("LOGIN_USER_TEMPLATE"), login_user_form=form, **_ctx("login") ) @auth_required() def verify(): """View function which handles a authentication verification request. """ form_class = _security.verify_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): # form may have called verify_and_update_password() after_this_request(_commit) # verified - so set freshness time. session["fs_paa"] = time.time() if _security._want_json(request): return base_render_json(form) do_flash(*get_message("REAUTHENTICATION_SUCCESSFUL")) return redirect(get_post_verify_redirect()) if _security._want_json(request): assert form.user == current_user return base_render_json(form) return _security.render_template( config_value("VERIFY_TEMPLATE"), verify_form=form, **_ctx("verify") ) def logout(): """View function which handles a logout request.""" tf_clean_session() if current_user.is_authenticated: logout_user() # No body is required - so if a POST and json - return OK if request.method == "POST" and _security._want_json(request): return _security._render_json({}, 200, headers=None, user=None) return redirect(get_post_logout_redirect()) @anonymous_user_required def register(): """View function which handles a registration request.""" # For some unknown historic reason - if you don't require confirmation # (via email) then you need to type in your password twice. That might # make sense if you can't reset your password but in modern (2020) UX models # don't ask twice. if _security.confirmable or request.is_json: form_class = _security.confirm_register_form else: form_class = _security.register_form if request.is_json: form_data = MultiDict(request.get_json()) else: form_data = request.form form = form_class(form_data, meta=suppress_form_csrf()) if form.validate_on_submit(): did_login = False user = register_user(form) form.user = user # The 'auto-login' feature probably should be removed - I can't imagine # an application that would want random email accounts. It has been like this # since the beginning. Note that we still enforce 2FA - however for unified # signin - we adhere to historic behavior. if not _security.confirmable or _security.login_without_confirmation: if config_value("TWO_FACTOR") and config_value("TWO_FACTOR_REQUIRED"): return tf_login(user, primary_authn_via="register") after_this_request(_commit) login_user(user, authn_via=["register"]) did_login = True if not _security._want_json(request): return redirect(get_post_register_redirect()) # Only include auth token if in fact user is permitted to login return base_render_json(form, include_auth_token=did_login) if _security._want_json(request): return base_render_json(form) return _security.render_template( config_value("REGISTER_USER_TEMPLATE"), register_user_form=form, **_ctx("register") ) @unauth_csrf(fall_through=True) def send_login(): """View function that sends login instructions for passwordless login""" form_class = _security.passwordless_login_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): send_login_instructions(form.user) if not _security._want_json(request): do_flash(*get_message("LOGIN_EMAIL_SENT", email=form.user.email)) if _security._want_json(request): return base_render_json(form) return _security.render_template( config_value("SEND_LOGIN_TEMPLATE"), send_login_form=form, **_ctx("send_login") ) @anonymous_user_required def token_login(token): """View function that handles passwordless login via a token Like reset-password and confirm - this is usually a GET via an email so from the request we can't differentiate form-based apps from non. """ expired, invalid, user = login_token_status(token) if not user or invalid: m, c = get_message("INVALID_LOGIN_TOKEN") if _security.redirect_behavior == "spa": return redirect(get_url(_security.login_error_view, qparams={c: m})) do_flash(m, c) return redirect(url_for_security("login")) if expired: send_login_instructions(user) m, c = get_message( "LOGIN_EXPIRED", email=user.email, within=_security.login_within ) if _security.redirect_behavior == "spa": return redirect( get_url( _security.login_error_view, qparams=user.get_redirect_qparams({c: m}), ) ) do_flash(m, c) return redirect(url_for_security("login")) login_user(user, authn_via=["token"]) after_this_request(_commit) if _security.redirect_behavior == "spa": return redirect( get_url(_security.post_login_view, qparams=user.get_redirect_qparams()) ) do_flash(*get_message("PASSWORDLESS_LOGIN_SUCCESSFUL")) return redirect(get_post_login_redirect()) @unauth_csrf(fall_through=True) def send_confirmation(): """View function which sends confirmation instructions.""" form_class = _security.send_confirmation_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): send_confirmation_instructions(form.user) if not _security._want_json(request): do_flash(*get_message("CONFIRMATION_REQUEST", email=form.user.email)) if _security._want_json(request): return base_render_json(form) return _security.render_template( config_value("SEND_CONFIRMATION_TEMPLATE"), send_confirmation_form=form, **_ctx("send_confirmation") ) def confirm_email(token): """View function which handles a email confirmation request.""" expired, invalid, user = confirm_email_token_status(token) if not user or invalid: m, c = get_message("INVALID_CONFIRMATION_TOKEN") if _security.redirect_behavior == "spa": return redirect(get_url(_security.confirm_error_view, qparams={c: m})) do_flash(m, c) return redirect( get_url(_security.confirm_error_view) or url_for_security("send_confirmation") ) already_confirmed = user.confirmed_at is not None if expired or already_confirmed: if already_confirmed: m, c = get_message("ALREADY_CONFIRMED") else: send_confirmation_instructions(user) m, c = get_message( "CONFIRMATION_EXPIRED", email=user.email, within=_security.confirm_email_within, ) if _security.redirect_behavior == "spa": return redirect( get_url( _security.confirm_error_view, qparams=user.get_redirect_qparams({c: m}), ) ) do_flash(m, c) return redirect( get_url(_security.confirm_error_view) or url_for_security("send_confirmation") ) confirm_user(user) after_this_request(_commit) if user != current_user: logout_user() if config_value("AUTO_LOGIN_AFTER_CONFIRM"): # N.B. this is a (small) security risk if email went to wrong place. # and you have the LOGIN_WITH_CONFIRMATION flag since in that case # you can be logged in and doing stuff - but another person could # get the email. if config_value("TWO_FACTOR") and config_value("TWO_FACTOR_REQUIRED"): return tf_login(user, primary_authn_via="confirm") login_user(user, authn_via=["confirm"]) m, c = get_message("EMAIL_CONFIRMED") if _security.redirect_behavior == "spa": return redirect( get_url( _security.post_confirm_view, qparams=user.get_redirect_qparams({c: m}) ) ) do_flash(m, c) return redirect( get_url(_security.post_confirm_view) or get_url( _security.post_login_view if config_value("AUTO_LOGIN_AFTER_CONFIRM") else _security.login_url ) ) @anonymous_user_required @unauth_csrf(fall_through=True) def forgot_password(): """View function that handles a forgotten password request.""" form_class = _security.forgot_password_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): send_reset_password_instructions(form.user) if not _security._want_json(request): do_flash(*get_message("PASSWORD_RESET_REQUEST", email=form.user.email)) if _security._want_json(request): return base_render_json(form, include_user=False) return _security.render_template( config_value("FORGOT_PASSWORD_TEMPLATE"), forgot_password_form=form, **_ctx("forgot_password") ) @anonymous_user_required @unauth_csrf(fall_through=True) def reset_password(token): """View function that handles a reset password request. This is usually called via GET as part of an email link and redirects to a reset-password form It is called via POST to actually update the password (and then redirects to a post reset/login view) If in either case the token is either invalid or expired it redirects to the 'forgot-password' form. In the case of non-form based configuration: For GET normal case - redirect to RESET_VIEW?token={token}&email={email} For GET invalid case - redirect to RESET_ERROR_VIEW?error={error}&email={email} For POST normal/successful case - return 200 with new authentication token For POST error case return 400 with form.errors """ expired, invalid, user = reset_password_token_status(token) form_class = _security.reset_password_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) form.user = user if request.method == "GET": if not user or invalid: m, c = get_message("INVALID_RESET_PASSWORD_TOKEN") if _security.redirect_behavior == "spa": return redirect(get_url(_security.reset_error_view, qparams={c: m})) do_flash(m, c) return redirect(url_for_security("forgot_password")) if expired: send_reset_password_instructions(user) m, c = get_message( "PASSWORD_RESET_EXPIRED", email=user.email, within=_security.reset_password_within, ) if _security.redirect_behavior == "spa": return redirect( get_url( _security.reset_error_view, qparams=user.get_redirect_qparams({c: m}), ) ) do_flash(m, c) return redirect(url_for_security("forgot_password")) # All good - for SPA - redirect to the ``reset_view`` if _security.redirect_behavior == "spa": return redirect( get_url( _security.reset_view, qparams=user.get_redirect_qparams({"token": token}), ) ) # for forms - render the reset password form return _security.render_template( config_value("RESET_PASSWORD_TEMPLATE"), reset_password_form=form, reset_password_token=token, **_ctx("reset_password") ) # This is the POST case. m = None if not user or invalid: invalid = True m, c = get_message("INVALID_RESET_PASSWORD_TOKEN") if not _security._want_json(request): do_flash(m, c) if expired: send_reset_password_instructions(user) m, c = get_message( "PASSWORD_RESET_EXPIRED", email=user.email, within=_security.reset_password_within, ) if not _security._want_json(request): do_flash(m, c) if invalid or expired: if _security._want_json(request): return _security._render_json(json_error_response(m), 400, None, None) else: return redirect(url_for_security("forgot_password")) if form.validate_on_submit(): after_this_request(_commit) update_password(user, form.password.data) if config_value("TWO_FACTOR") and ( config_value("TWO_FACTOR_REQUIRED") or (form.user.tf_totp_secret and form.user.tf_primary_method) ): return tf_login(user, primary_authn_via="reset") login_user(user, authn_via=["reset"]) if _security._want_json(request): login_form = _security.login_form(MultiDict({"email": user.email})) setattr(login_form, "user", user) return base_render_json(login_form, include_auth_token=True) else: do_flash(*get_message("PASSWORD_RESET")) return redirect( get_url(_security.post_reset_view) or get_url(_security.post_login_view) ) # validation failure case - for forms - we try again including the token # for non-forms - we just return errors and assume caller remembers token. if _security._want_json(request): return base_render_json(form) return _security.render_template( config_value("RESET_PASSWORD_TEMPLATE"), reset_password_form=form, reset_password_token=token, **_ctx("reset_password") ) @auth_required("basic", "token", "session") def change_password(): """View function which handles a change password request.""" form_class = _security.change_password_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): after_this_request(_commit) change_user_password(current_user._get_current_object(), form.new_password.data) if _security._want_json(request): form.user = current_user return base_render_json(form, include_auth_token=True) do_flash(*get_message("PASSWORD_CHANGE")) return redirect( get_url(_security.post_change_view) or get_url(_security.post_login_view) ) if _security._want_json(request): form.user = current_user return base_render_json(form) return _security.render_template( config_value("CHANGE_PASSWORD_TEMPLATE"), change_password_form=form, **_ctx("change_password") ) @unauth_csrf(fall_through=True) def two_factor_setup(): """View function for two-factor setup. This is used both for GET to fetch forms and POST to actually set configuration (and send token). There are 3 cases for setting up: 1) initial login and application requires 2FA 2) changing existing 2FA information 3) user wanting to enable or disable 2FA (assuming application doesn't require it) In order to CHANGE/ENABLE/DISABLE a 2FA information, user must be properly logged in AND must perform a fresh password validation by calling POST /tf-confirm (which sets 'tf_confirmed' in the session). For initial login when 2FA required of course user can't be logged in - in this case we need to have been sent some state via the session as part of login to show a) who and b) that they successfully authenticated. """ form_class = _security.two_factor_setup_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if not current_user.is_authenticated: # This is the initial login case # We can also get here from setup if they want to change if not all(k in session for k in ["tf_user_id", "tf_state"]) or session[ "tf_state" ] not in ["setup_from_login", "validating_profile"]: # illegal call on this endpoint tf_clean_session() return _tf_illegal_state(form, _security.login_url) user = _datastore.get_user(session["tf_user_id"]) if not user: tf_clean_session() return _tf_illegal_state(form, _security.login_url) else: # all other cases require user to be logged in and have performed # additional password verification as signified by 'tf_confirmed' # in the session. if "tf_confirmed" not in session: tf_clean_session() return _tf_illegal_state(form, _security.two_factor_confirm_url) user = current_user if form.validate_on_submit(): # Before storing in DB and therefore requiring 2FA we need to # make sure it actually works. # Requiring 2FA is triggered by having BOTH tf_totp_secret and # tf_primary_method in the user record (or having the application # global config TWO_FACTOR_REQUIRED) # Until we correctly validate the 2FA - we don't set primary_method in # user model but use the session to store it. pm = form.setup.data if pm == "disable": tf_disable(user) after_this_request(_commit) do_flash(*get_message("TWO_FACTOR_DISABLED")) if not _security._want_json(request): return redirect(get_url(_security.post_login_view)) else: return base_render_json(form) # Regenerate the TOTP secret on every call of 2FA setup unless it is # within the same session and method (e.g. upon entering the phone number) if pm != session.get("tf_primary_method", None): session["tf_totp_secret"] = _security._totp_factory.generate_totp_secret() session["tf_primary_method"] = pm session["tf_state"] = "validating_profile" new_phone = form.phone.data if len(form.phone.data) > 0 else None if new_phone: user.tf_phone_number = new_phone _datastore.put(user) after_this_request(_commit) # This form is sort of bizarre - for SMS and authenticator # you select, then get more info, and submit again. # For authenticator of course, we don't actually send anything # and for SMS it is the second time around that we get the phone number if pm == "email" or (pm == "sms" and new_phone): msg = user.tf_send_security_token( method=pm, totp_secret=session["tf_totp_secret"], phone_number=getattr(user, "tf_phone_number", None), ) if msg: # send code didn't work form.setup.errors = list() form.setup.errors.append(msg) if _security._want_json(request): return base_render_json( form, include_user=False, error_status_code=500 ) code_form = _security.two_factor_verify_code_form() if not _security._want_json(request): return _security.render_template( config_value("TWO_FACTOR_SETUP_TEMPLATE"), two_factor_setup_form=form, two_factor_verify_code_form=code_form, choices=config_value("TWO_FACTOR_ENABLED_METHODS"), chosen_method=pm, **_ctx("tf_setup") ) # We get here on GET and POST with failed validation. # For things like phone number - we've already done one POST # that succeeded and now if failed - so retain the initial info if _security._want_json(request): return base_render_json(form, include_user=False) code_form = _security.two_factor_verify_code_form() choices = config_value("TWO_FACTOR_ENABLED_METHODS") if not config_value("TWO_FACTOR_REQUIRED"): choices.append("disable") return _security.render_template( config_value("TWO_FACTOR_SETUP_TEMPLATE"), two_factor_setup_form=form, two_factor_verify_code_form=code_form, choices=choices, chosen_method=form.setup.data, two_factor_required=config_value("TWO_FACTOR_REQUIRED"), **_ctx("tf_setup") ) @unauth_csrf(fall_through=True) def two_factor_token_validation(): """View function for two-factor token validation Two cases: 1) normal login case - everything setup correctly; normal 2FA validation In this case - user not logged in - but 'tf_state' == 'ready' or 'validating_profile' 2) validating after CHANGE/ENABLE 2FA. In this case user logged in/authenticated they must have 'tf_confirmed' set meaning they re-entered their passwd """ form_class = _security.two_factor_verify_code_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) changing = current_user.is_authenticated if not changing: # This is the normal login case if ( not all(k in session for k in ["tf_user_id", "tf_state"]) or session["tf_state"] not in ["ready", "validating_profile"] or ( session["tf_state"] == "validating_profile" and "tf_primary_method" not in session ) ): # illegal call on this endpoint tf_clean_session() return _tf_illegal_state(form, _security.login_url) user = _datastore.get_user(session["tf_user_id"]) form.user = user if not user: tf_clean_session() return _tf_illegal_state(form, _security.login_url) if session["tf_state"] == "ready": pm = user.tf_primary_method totp_secret = user.tf_totp_secret else: pm = session["tf_primary_method"] totp_secret = session["tf_totp_secret"] else: if ( not all( k in session for k in ["tf_confirmed", "tf_state", "tf_primary_method"] ) or session["tf_state"] != "validating_profile" ): tf_clean_session() # logout since this seems like attack-ish/logic error logout_user() return _tf_illegal_state(form, _security.login_url) pm = session["tf_primary_method"] totp_secret = session["tf_totp_secret"] form.user = current_user setattr(form, "primary_method", pm) setattr(form, "tf_totp_secret", totp_secret) if form.validate_on_submit(): # Success - log in user and clear all session variables completion_message = complete_two_factor_process( form.user, pm, totp_secret, changing, session.pop("tf_remember_login", None) ) after_this_request(_commit) if not _security._want_json(request): do_flash(*get_message(completion_message)) return redirect(get_post_login_redirect()) # GET or not successful POST if _security._want_json(request): return base_render_json(form) # if we were trying to validate a new method if changing: setup_form = _security.two_factor_setup_form() return _security.render_template( config_value("TWO_FACTOR_SETUP_TEMPLATE"), two_factor_setup_form=setup_form, two_factor_verify_code_form=form, choices=config_value("TWO_FACTOR_ENABLED_METHODS"), **_ctx("tf_setup") ) # if we were trying to validate an existing method else: rescue_form = _security.two_factor_rescue_form() return _security.render_template( config_value("TWO_FACTOR_VERIFY_CODE_TEMPLATE"), two_factor_rescue_form=rescue_form, two_factor_verify_code_form=form, problem=None, **_ctx("tf_token_validation") ) @anonymous_user_required @unauth_csrf(fall_through=True) def two_factor_rescue(): """ Function that handles a situation where user can't enter his two-factor validation code User must have already provided valid username/password. User must have already established 2FA """ form_class = _security.two_factor_rescue_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if ( not all(k in session for k in ["tf_user_id", "tf_state"]) or session["tf_state"] != "ready" ): tf_clean_session() return _tf_illegal_state(form, _security.login_url) user = _datastore.get_user(session["tf_user_id"]) form.user = user if not user: tf_clean_session() return _tf_illegal_state(form, _security.login_url) rproblem = "" if form.validate_on_submit(): problem = form.data["help_setup"] rproblem = problem # if the problem is that user can't access his device, w # e send him code through mail if problem == "lost_device": msg = form.user.tf_send_security_token( method="email", totp_secret=form.user.tf_totp_secret, phone_number=getattr(form.user, "tf_phone_number", None), ) if msg: rproblem = "" form.help_setup.errors.append(msg) if _security._want_json(request): return base_render_json( form, include_user=False, error_status_code=500 ) # send app provider a mail message regarding trouble elif problem == "no_mail_access": _security._send_mail( config_value("EMAIL_SUBJECT_TWO_FACTOR_RESCUE"), config_value("TWO_FACTOR_RESCUE_MAIL"), "two_factor_rescue", user=form.user, ) else: return "", 404 if _security._want_json(request): return base_render_json(form, include_user=False) code_form = _security.two_factor_verify_code_form() return _security.render_template( config_value("TWO_FACTOR_VERIFY_CODE_TEMPLATE"), two_factor_verify_code_form=code_form, two_factor_rescue_form=form, rescue_mail=config_value("TWO_FACTOR_RESCUE_MAIL"), problem=rproblem, **_ctx("tf_token_validation") ) @auth_required("basic", "session", "token") def two_factor_verify_password(): """View function which handles a password verification request.""" form_class = _security.two_factor_verify_password_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): # form called verify_and_update_password() after_this_request(_commit) session["tf_confirmed"] = True m, c = get_message("TWO_FACTOR_PASSWORD_CONFIRMATION_DONE") if not _security._want_json(request): do_flash(m, c) return redirect(url_for_security("two_factor_setup")) else: return _security._render_json(json_error_response(m), 400, None, None) if _security._want_json(request): assert form.user == current_user # form.user = current_user return base_render_json(form) return _security.render_template( config_value("TWO_FACTOR_VERIFY_PASSWORD_TEMPLATE"), two_factor_verify_password_form=form, **_ctx("tf_verify_password") ) @unauth_csrf(fall_through=True) def two_factor_qrcode(): if current_user.is_authenticated: user = current_user else: if "tf_user_id" not in session: abort(404) user = _datastore.get_user(session["tf_user_id"]) if not user: # Seems like we should be careful here if user_id is gone. tf_clean_session() abort(404) if "authenticator" not in config_value("TWO_FACTOR_ENABLED_METHODS"): return abort(404) if ( "tf_primary_method" not in session or session["tf_primary_method"] != "authenticator" ): return abort(404) totp = user.tf_totp_secret if "tf_totp_secret" in session: totp = session["tf_totp_secret"] try: import pyqrcode # By convention, the URI should have the username that the user # logs in with. username = user.calc_username() url = pyqrcode.create( _security._totp_factory.get_totp_uri( username if username else "Unknown", totp ) ) except ImportError: # For TWO_FACTOR - this should have been checked at app init. raise from io import BytesIO stream = BytesIO() url.svg(stream, scale=3) return ( stream.getvalue(), 200, { "Content-Type": "image/svg+xml", "Cache-Control": "no-cache, no-store, must-revalidate", "Pragma": "no-cache", "Expires": "0", }, ) def _tf_illegal_state(form, redirect_to): m, c = get_message("TWO_FACTOR_PERMISSION_DENIED") if not _security._want_json(request): do_flash(m, c) return redirect(get_url(redirect_to)) else: return _security._render_json(json_error_response(m), 400, None, None) def create_blueprint(app, state, import_name, json_encoder=None): """Creates the security extension blueprint""" bp = Blueprint( state.blueprint_name, import_name, url_prefix=state.url_prefix, subdomain=state.subdomain, template_folder="templates", ) if json_encoder: bp.json_encoder = json_encoder if state.logout_methods is not None: bp.route(state.logout_url, methods=state.logout_methods, endpoint="logout")( logout ) if state.passwordless: bp.route(state.login_url, methods=["GET", "POST"], endpoint="login")(send_login) bp.route( state.login_url + slash_url_suffix(state.login_url, "<token>"), endpoint="token_login", )(token_login) elif config_value("US_SIGNIN_REPLACES_LOGIN", app=app): bp.route(state.login_url, methods=["GET", "POST"], endpoint="login")(us_signin) else: bp.route(state.login_url, methods=["GET", "POST"], endpoint="login")(login) bp.route(state.verify_url, methods=["GET", "POST"], endpoint="verify")(verify) if state.unified_signin: bp.route(state.us_signin_url, methods=["GET", "POST"], endpoint="us_signin")( us_signin ) bp.route( state.us_signin_send_code_url, methods=["GET", "POST"], endpoint="us_signin_send_code", )(us_signin_send_code) bp.route(state.us_setup_url, methods=["GET", "POST"], endpoint="us_setup")( us_setup ) bp.route( state.us_setup_url + slash_url_suffix(state.us_setup_url, "<token>"), methods=["GET", "POST"], endpoint="us_setup_validate", )(us_setup_validate) # Freshness verification if config_value("FRESHNESS", app=app).total_seconds() >= 0: bp.route( state.us_verify_url, methods=["GET", "POST"], endpoint="us_verify" )(us_verify) bp.route( state.us_verify_send_code_url, methods=["GET", "POST"], endpoint="us_verify_send_code", )(us_verify_send_code) bp.route(state.us_verify_link_url, methods=["GET"], endpoint="us_verify_link")( us_verify_link ) bp.route( state.us_qrcode_url + slash_url_suffix(state.us_setup_url, "<token>"), endpoint="us_qrcode", )(us_qrcode) if state.two_factor: tf_token_validation = "two_factor_token_validation" tf_qrcode = "two_factor_qrcode" bp.route( state.two_factor_setup_url, methods=["GET", "POST"], endpoint="two_factor_setup", )(two_factor_setup) bp.route( state.two_factor_token_validation_url, methods=["GET", "POST"], endpoint=tf_token_validation, )(two_factor_token_validation) bp.route(state.two_factor_qrcode_url, endpoint=tf_qrcode)(two_factor_qrcode) bp.route( state.two_factor_rescue_url, methods=["GET", "POST"], endpoint="two_factor_rescue", )(two_factor_rescue) bp.route( state.two_factor_confirm_url, methods=["GET", "POST"], endpoint="two_factor_verify_password", )(two_factor_verify_password) if state.registerable: bp.route(state.register_url, methods=["GET", "POST"], endpoint="register")( register ) if state.recoverable: bp.route(state.reset_url, methods=["GET", "POST"], endpoint="forgot_password")( forgot_password ) bp.route( state.reset_url + slash_url_suffix(state.reset_url, "<token>"), methods=["GET", "POST"], endpoint="reset_password", )(reset_password) if state.changeable: bp.route(state.change_url, methods=["GET", "POST"], endpoint="change_password")( change_password ) if state.confirmable: bp.route( state.confirm_url, methods=["GET", "POST"], endpoint="send_confirmation" )(send_confirmation) bp.route( state.confirm_url + slash_url_suffix(state.confirm_url, "<token>"), methods=["GET", "POST"], endpoint="confirm_email", )(confirm_email) return bp
./CrossVul/dataset_final_sorted/CWE-352/py/good_1892_0
crossvul-python_data_good_114_4
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2014-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """Our own QNetworkAccessManager.""" import collections import html import attr from PyQt5.QtCore import (pyqtSlot, pyqtSignal, QCoreApplication, QUrl, QByteArray) from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkReply, QSslSocket from qutebrowser.config import config from qutebrowser.utils import (message, log, usertypes, utils, objreg, urlutils, debug) from qutebrowser.browser import shared from qutebrowser.browser.webkit import certificateerror from qutebrowser.browser.webkit.network import (webkitqutescheme, networkreply, filescheme) HOSTBLOCK_ERROR_STRING = '%HOSTBLOCK%' _proxy_auth_cache = {} @attr.s(frozen=True) class ProxyId: """Information identifying a proxy server.""" type = attr.ib() hostname = attr.ib() port = attr.ib() def _is_secure_cipher(cipher): """Check if a given SSL cipher (hopefully) isn't broken yet.""" tokens = [e.upper() for e in cipher.name().split('-')] if cipher.usedBits() < 128: # https://codereview.qt-project.org/#/c/75943/ return False # OpenSSL should already protect against this in a better way elif cipher.keyExchangeMethod() == 'DH' and utils.is_windows: # https://weakdh.org/ return False elif cipher.encryptionMethod().upper().startswith('RC4'): # http://en.wikipedia.org/wiki/RC4#Security # https://codereview.qt-project.org/#/c/148906/ return False elif cipher.encryptionMethod().upper().startswith('DES'): # http://en.wikipedia.org/wiki/Data_Encryption_Standard#Security_and_cryptanalysis return False elif 'MD5' in tokens: # http://www.win.tue.nl/hashclash/rogue-ca/ return False # OpenSSL should already protect against this in a better way # elif (('CBC3' in tokens or 'CBC' in tokens) and (cipher.protocol() not in # [QSsl.TlsV1_0, QSsl.TlsV1_1, QSsl.TlsV1_2])): # # http://en.wikipedia.org/wiki/POODLE # return False ### These things should never happen as those are already filtered out by ### either the SSL libraries or Qt - but let's be sure. elif cipher.authenticationMethod() in ['aNULL', 'NULL']: # Ciphers without authentication. return False elif cipher.encryptionMethod() in ['eNULL', 'NULL']: # Ciphers without encryption. return False elif 'EXP' in tokens or 'EXPORT' in tokens: # Weak export-grade ciphers return False elif 'ADH' in tokens: # No MITM protection return False ### This *should* happen ;) else: return True def init(): """Disable insecure SSL ciphers on old Qt versions.""" default_ciphers = QSslSocket.defaultCiphers() log.init.debug("Default Qt ciphers: {}".format( ', '.join(c.name() for c in default_ciphers))) good_ciphers = [] bad_ciphers = [] for cipher in default_ciphers: if _is_secure_cipher(cipher): good_ciphers.append(cipher) else: bad_ciphers.append(cipher) log.init.debug("Disabling bad ciphers: {}".format( ', '.join(c.name() for c in bad_ciphers))) QSslSocket.setDefaultCiphers(good_ciphers) class NetworkManager(QNetworkAccessManager): """Our own QNetworkAccessManager. Attributes: adopted_downloads: If downloads are running with this QNAM but the associated tab gets closed already, the NAM gets reparented to the DownloadManager. This counts the still running downloads, so the QNAM can clean itself up when this reaches zero again. _scheme_handlers: A dictionary (scheme -> handler) of supported custom schemes. _win_id: The window ID this NetworkManager is associated with. (or None for generic network managers) _tab_id: The tab ID this NetworkManager is associated with. (or None for generic network managers) _rejected_ssl_errors: A {QUrl: [SslError]} dict of rejected errors. _accepted_ssl_errors: A {QUrl: [SslError]} dict of accepted errors. _private: Whether we're in private browsing mode. netrc_used: Whether netrc authentication was performed. Signals: shutting_down: Emitted when the QNAM is shutting down. """ shutting_down = pyqtSignal() def __init__(self, *, win_id, tab_id, private, parent=None): log.init.debug("Initializing NetworkManager") with log.disable_qt_msghandler(): # WORKAROUND for a hang when a message is printed - See: # http://www.riverbankcomputing.com/pipermail/pyqt/2014-November/035045.html super().__init__(parent) log.init.debug("NetworkManager init done") self.adopted_downloads = 0 self._args = objreg.get('args') self._win_id = win_id self._tab_id = tab_id self._private = private self._scheme_handlers = { 'qute': webkitqutescheme.handler, 'file': filescheme.handler, } self._set_cookiejar() self._set_cache() self.sslErrors.connect(self.on_ssl_errors) self._rejected_ssl_errors = collections.defaultdict(list) self._accepted_ssl_errors = collections.defaultdict(list) self.authenticationRequired.connect(self.on_authentication_required) self.proxyAuthenticationRequired.connect( self.on_proxy_authentication_required) self.netrc_used = False def _set_cookiejar(self): """Set the cookie jar of the NetworkManager correctly.""" if self._private: cookie_jar = objreg.get('ram-cookie-jar') else: cookie_jar = objreg.get('cookie-jar') # We have a shared cookie jar - we restore its parent so we don't # take ownership of it. self.setCookieJar(cookie_jar) app = QCoreApplication.instance() cookie_jar.setParent(app) def _set_cache(self): """Set the cache of the NetworkManager correctly.""" if self._private: return # We have a shared cache - we restore its parent so we don't take # ownership of it. app = QCoreApplication.instance() cache = objreg.get('cache') self.setCache(cache) cache.setParent(app) def _get_abort_signals(self, owner=None): """Get a list of signals which should abort a question.""" abort_on = [self.shutting_down] if owner is not None: abort_on.append(owner.destroyed) # This might be a generic network manager, e.g. one belonging to a # DownloadManager. In this case, just skip the webview thing. if self._tab_id is not None: assert self._win_id is not None tab = objreg.get('tab', scope='tab', window=self._win_id, tab=self._tab_id) abort_on.append(tab.load_started) return abort_on def shutdown(self): """Abort all running requests.""" self.setNetworkAccessible(QNetworkAccessManager.NotAccessible) self.shutting_down.emit() # No @pyqtSlot here, see # https://github.com/qutebrowser/qutebrowser/issues/2213 def on_ssl_errors(self, reply, errors): # noqa: C901 pragma: no mccabe """Decide if SSL errors should be ignored or not. This slot is called on SSL/TLS errors by the self.sslErrors signal. Args: reply: The QNetworkReply that is encountering the errors. errors: A list of errors. """ errors = [certificateerror.CertificateErrorWrapper(e) for e in errors] log.webview.debug("Certificate errors: {!r}".format( ' / '.join(str(err) for err in errors))) try: host_tpl = urlutils.host_tuple(reply.url()) except ValueError: host_tpl = None is_accepted = False is_rejected = False else: is_accepted = set(errors).issubset( self._accepted_ssl_errors[host_tpl]) is_rejected = set(errors).issubset( self._rejected_ssl_errors[host_tpl]) log.webview.debug("Already accepted: {} / " "rejected {}".format(is_accepted, is_rejected)) if is_rejected: return elif is_accepted: reply.ignoreSslErrors() return abort_on = self._get_abort_signals(reply) ignore = shared.ignore_certificate_errors(reply.url(), errors, abort_on=abort_on) if ignore: reply.ignoreSslErrors() err_dict = self._accepted_ssl_errors else: err_dict = self._rejected_ssl_errors if host_tpl is not None: err_dict[host_tpl] += errors def clear_all_ssl_errors(self): """Clear all remembered SSL errors.""" self._accepted_ssl_errors.clear() self._rejected_ssl_errors.clear() @pyqtSlot(QUrl) def clear_rejected_ssl_errors(self, url): """Clear the rejected SSL errors on a reload. Args: url: The URL to remove. """ try: del self._rejected_ssl_errors[url] except KeyError: pass @pyqtSlot('QNetworkReply*', 'QAuthenticator*') def on_authentication_required(self, reply, authenticator): """Called when a website needs authentication.""" netrc_success = False if not self.netrc_used: self.netrc_used = True netrc_success = shared.netrc_authentication(reply.url(), authenticator) if not netrc_success: abort_on = self._get_abort_signals(reply) shared.authentication_required(reply.url(), authenticator, abort_on=abort_on) @pyqtSlot('QNetworkProxy', 'QAuthenticator*') def on_proxy_authentication_required(self, proxy, authenticator): """Called when a proxy needs authentication.""" proxy_id = ProxyId(proxy.type(), proxy.hostName(), proxy.port()) if proxy_id in _proxy_auth_cache: user, password = _proxy_auth_cache[proxy_id] authenticator.setUser(user) authenticator.setPassword(password) else: msg = '<b>{}</b> says:<br/>{}'.format( html.escape(proxy.hostName()), html.escape(authenticator.realm())) abort_on = self._get_abort_signals() answer = message.ask( title="Proxy authentication required", text=msg, mode=usertypes.PromptMode.user_pwd, abort_on=abort_on) if answer is not None: authenticator.setUser(answer.user) authenticator.setPassword(answer.password) _proxy_auth_cache[proxy_id] = answer @pyqtSlot() def on_adopted_download_destroyed(self): """Check if we can clean up if an adopted download was destroyed. See the description for adopted_downloads for details. """ self.adopted_downloads -= 1 log.downloads.debug("Adopted download destroyed, {} left.".format( self.adopted_downloads)) assert self.adopted_downloads >= 0 if self.adopted_downloads == 0: self.deleteLater() @pyqtSlot(object) # DownloadItem def adopt_download(self, download): """Adopt a new DownloadItem.""" self.adopted_downloads += 1 log.downloads.debug("Adopted download, {} adopted.".format( self.adopted_downloads)) download.destroyed.connect(self.on_adopted_download_destroyed) download.adopt_download.connect(self.adopt_download) def set_referer(self, req, current_url): """Set the referer header.""" referer_header_conf = config.val.content.headers.referer try: if referer_header_conf == 'never': # Note: using ''.encode('ascii') sends a header with no value, # instead of no header at all req.setRawHeader('Referer'.encode('ascii'), QByteArray()) elif (referer_header_conf == 'same-domain' and not urlutils.same_domain(req.url(), current_url)): req.setRawHeader('Referer'.encode('ascii'), QByteArray()) # If refer_header_conf is set to 'always', we leave the header # alone as QtWebKit did set it. except urlutils.InvalidUrlError: # req.url() or current_url can be invalid - this happens on # https://www.playstation.com/ for example. pass # WORKAROUND for: # http://www.riverbankcomputing.com/pipermail/pyqt/2014-September/034806.html # # By returning False, we provoke a TypeError because of a wrong return # type, which does *not* trigger a segfault but invoke our return handler # immediately. @utils.prevent_exceptions(False) def createRequest(self, op, req, outgoing_data): """Return a new QNetworkReply object. Args: op: Operation op req: const QNetworkRequest & req outgoing_data: QIODevice * outgoingData Return: A QNetworkReply. """ proxy_factory = objreg.get('proxy-factory', None) if proxy_factory is not None: proxy_error = proxy_factory.get_error() if proxy_error is not None: return networkreply.ErrorNetworkReply( req, proxy_error, QNetworkReply.UnknownProxyError, self) for header, value in shared.custom_headers(url=req.url()): req.setRawHeader(header, value) host_blocker = objreg.get('host-blocker') if host_blocker.is_blocked(req.url()): log.webview.info("Request to {} blocked by host blocker.".format( req.url().host())) return networkreply.ErrorNetworkReply( req, HOSTBLOCK_ERROR_STRING, QNetworkReply.ContentAccessDenied, self) # There are some scenarios where we can't figure out current_url: # - There's a generic NetworkManager, e.g. for downloads # - The download was in a tab which is now closed. current_url = QUrl() if self._tab_id is not None: assert self._win_id is not None try: tab = objreg.get('tab', scope='tab', window=self._win_id, tab=self._tab_id) current_url = tab.url() except (KeyError, RuntimeError): # https://github.com/qutebrowser/qutebrowser/issues/889 # Catching RuntimeError because we could be in the middle of # the webpage shutdown here. current_url = QUrl() if 'log-requests' in self._args.debug_flags: operation = debug.qenum_key(QNetworkAccessManager, op) operation = operation.replace('Operation', '').upper() log.webview.debug("{} {}, first-party {}".format( operation, req.url().toDisplayString(), current_url.toDisplayString())) scheme = req.url().scheme() if scheme in self._scheme_handlers: result = self._scheme_handlers[scheme](req, op, current_url) if result is not None: result.setParent(self) return result self.set_referer(req, current_url) return super().createRequest(op, req, outgoing_data)
./CrossVul/dataset_final_sorted/CWE-352/py/good_114_4
crossvul-python_data_good_114_5
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2014-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """QtWebKit specific qute://* handlers and glue code.""" import mimetypes from PyQt5.QtCore import QUrl from PyQt5.QtNetwork import QNetworkReply, QNetworkAccessManager from qutebrowser.browser import pdfjs, qutescheme from qutebrowser.browser.webkit.network import networkreply from qutebrowser.utils import log, usertypes, qtutils def handler(request, operation, current_url): """Scheme handler for qute:// URLs. Args: request: QNetworkRequest to answer to. operation: The HTTP operation being done. current_url: The page we're on currently. Return: A QNetworkReply. """ if operation != QNetworkAccessManager.GetOperation: return networkreply.ErrorNetworkReply( request, "Unsupported request type", QNetworkReply.ContentOperationNotPermittedError) url = request.url() if ((url.scheme(), url.host(), url.path()) == ('qute', 'settings', '/set')): if current_url != QUrl('qute://settings/'): log.webview.warning("Blocking malicious request from {} to {}" .format(current_url.toDisplayString(), url.toDisplayString())) return networkreply.ErrorNetworkReply( request, "Invalid qute://settings request", QNetworkReply.ContentAccessDenied) try: mimetype, data = qutescheme.data_for_url(url) except qutescheme.NoHandlerFound: errorstr = "No handler found for {}!".format(url.toDisplayString()) return networkreply.ErrorNetworkReply( request, errorstr, QNetworkReply.ContentNotFoundError) except qutescheme.QuteSchemeOSError as e: return networkreply.ErrorNetworkReply( request, str(e), QNetworkReply.ContentNotFoundError) except qutescheme.QuteSchemeError as e: return networkreply.ErrorNetworkReply(request, e.errorstring, e.error) except qutescheme.Redirect as e: qtutils.ensure_valid(e.url) return networkreply.RedirectNetworkReply(e.url) return networkreply.FixedDataNetworkReply(request, data, mimetype) @qutescheme.add_handler('pdfjs', backend=usertypes.Backend.QtWebKit) def qute_pdfjs(url): """Handler for qute://pdfjs. Return the pdf.js viewer.""" try: data = pdfjs.get_pdfjs_res(url.path()) except pdfjs.PDFJSNotFound as e: # Logging as the error might get lost otherwise since we're not showing # the error page if a single asset is missing. This way we don't lose # information, as the failed pdfjs requests are still in the log. log.misc.warning( "pdfjs resource requested but not found: {}".format(e.path)) raise qutescheme.QuteSchemeError("Can't find pdfjs resource " "'{}'".format(e.path), QNetworkReply.ContentNotFoundError) else: mimetype, _encoding = mimetypes.guess_type(url.fileName()) assert mimetype is not None, url return mimetype, data
./CrossVul/dataset_final_sorted/CWE-352/py/good_114_5
crossvul-python_data_bad_1687_0
"""Base Tornado handlers for the notebook. Authors: * Brian Granger """ #----------------------------------------------------------------------------- # Copyright (C) 2011 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- import functools import json import logging import os import re import sys import traceback try: # py3 from http.client import responses except ImportError: from httplib import responses from jinja2 import TemplateNotFound from tornado import web try: from tornado.log import app_log except ImportError: app_log = logging.getLogger() from IPython.config import Application from IPython.utils.path import filefind from IPython.utils.py3compat import string_types from IPython.html.utils import is_hidden #----------------------------------------------------------------------------- # Top-level handlers #----------------------------------------------------------------------------- non_alphanum = re.compile(r'[^A-Za-z0-9]') class AuthenticatedHandler(web.RequestHandler): """A RequestHandler with an authenticated user.""" def set_default_headers(self): headers = self.settings.get('headers', {}) if "X-Frame-Options" not in headers: headers["X-Frame-Options"] = "SAMEORIGIN" for header_name,value in headers.items() : try: self.set_header(header_name, value) except Exception: # tornado raise Exception (not a subclass) # if method is unsupported (websocket and Access-Control-Allow-Origin # for example, so just ignore) pass def clear_login_cookie(self): self.clear_cookie(self.cookie_name) def get_current_user(self): user_id = self.get_secure_cookie(self.cookie_name) # For now the user_id should not return empty, but it could eventually if user_id == '': user_id = 'anonymous' if user_id is None: # prevent extra Invalid cookie sig warnings: self.clear_login_cookie() if not self.login_available: user_id = 'anonymous' return user_id @property def cookie_name(self): default_cookie_name = non_alphanum.sub('-', 'username-{}'.format( self.request.host )) return self.settings.get('cookie_name', default_cookie_name) @property def password(self): """our password""" return self.settings.get('password', '') @property def logged_in(self): """Is a user currently logged in? """ user = self.get_current_user() return (user and not user == 'anonymous') @property def login_available(self): """May a user proceed to log in? This returns True if login capability is available, irrespective of whether the user is already logged in or not. """ return bool(self.settings.get('password', '')) class IPythonHandler(AuthenticatedHandler): """IPython-specific extensions to authenticated handling Mostly property shortcuts to IPython-specific settings. """ @property def config(self): return self.settings.get('config', None) @property def log(self): """use the IPython log by default, falling back on tornado's logger""" if Application.initialized(): return Application.instance().log else: return app_log #--------------------------------------------------------------- # URLs #--------------------------------------------------------------- @property def mathjax_url(self): return self.settings.get('mathjax_url', '') @property def base_url(self): return self.settings.get('base_url', '/') #--------------------------------------------------------------- # Manager objects #--------------------------------------------------------------- @property def kernel_manager(self): return self.settings['kernel_manager'] @property def notebook_manager(self): return self.settings['notebook_manager'] @property def cluster_manager(self): return self.settings['cluster_manager'] @property def session_manager(self): return self.settings['session_manager'] @property def project_dir(self): return self.notebook_manager.notebook_dir #--------------------------------------------------------------- # CORS #--------------------------------------------------------------- @property def allow_origin(self): """Normal Access-Control-Allow-Origin""" return self.settings.get('allow_origin', '') @property def allow_origin_pat(self): """Regular expression version of allow_origin""" return self.settings.get('allow_origin_pat', None) @property def allow_credentials(self): """Whether to set Access-Control-Allow-Credentials""" return self.settings.get('allow_credentials', False) def set_default_headers(self): """Add CORS headers, if defined""" super(IPythonHandler, self).set_default_headers() if self.allow_origin: self.set_header("Access-Control-Allow-Origin", self.allow_origin) elif self.allow_origin_pat: origin = self.get_origin() if origin and self.allow_origin_pat.match(origin): self.set_header("Access-Control-Allow-Origin", origin) if self.allow_credentials: self.set_header("Access-Control-Allow-Credentials", 'true') def get_origin(self): # Handle WebSocket Origin naming convention differences # The difference between version 8 and 13 is that in 8 the # client sends a "Sec-Websocket-Origin" header and in 13 it's # simply "Origin". if "Origin" in self.request.headers: origin = self.request.headers.get("Origin") else: origin = self.request.headers.get("Sec-Websocket-Origin", None) return origin #--------------------------------------------------------------- # template rendering #--------------------------------------------------------------- def get_template(self, name): """Return the jinja template object for a given name""" return self.settings['jinja2_env'].get_template(name) def render_template(self, name, **ns): ns.update(self.template_namespace) template = self.get_template(name) return template.render(**ns) @property def template_namespace(self): return dict( base_url=self.base_url, logged_in=self.logged_in, login_available=self.login_available, static_url=self.static_url, ) def get_json_body(self): """Return the body of the request as JSON data.""" if not self.request.body: return None # Do we need to call body.decode('utf-8') here? body = self.request.body.strip().decode(u'utf-8') try: model = json.loads(body) except Exception: self.log.debug("Bad JSON: %r", body) self.log.error("Couldn't parse JSON", exc_info=True) raise web.HTTPError(400, u'Invalid JSON in body of request') return model def write_error(self, status_code, **kwargs): """render custom error pages""" exc_info = kwargs.get('exc_info') message = '' status_message = responses.get(status_code, 'Unknown HTTP Error') if exc_info: exception = exc_info[1] # get the custom message, if defined try: message = exception.log_message % exception.args except Exception: pass # construct the custom reason, if defined reason = getattr(exception, 'reason', '') if reason: status_message = reason # build template namespace ns = dict( status_code=status_code, status_message=status_message, message=message, exception=exception, ) self.set_header('Content-Type', 'text/html') # render the template try: html = self.render_template('%s.html' % status_code, **ns) except TemplateNotFound: self.log.debug("No template for %d", status_code) html = self.render_template('error.html', **ns) self.write(html) class Template404(IPythonHandler): """Render our 404 template""" def prepare(self): raise web.HTTPError(404) class AuthenticatedFileHandler(IPythonHandler, web.StaticFileHandler): """static files should only be accessible when logged in""" @web.authenticated def get(self, path): if os.path.splitext(path)[1] == '.ipynb': name = os.path.basename(path) self.set_header('Content-Type', 'application/json') self.set_header('Content-Disposition','attachment; filename="%s"' % name) return web.StaticFileHandler.get(self, path) def compute_etag(self): return None def validate_absolute_path(self, root, absolute_path): """Validate and return the absolute path. Requires tornado 3.1 Adding to tornado's own handling, forbids the serving of hidden files. """ abs_path = super(AuthenticatedFileHandler, self).validate_absolute_path(root, absolute_path) abs_root = os.path.abspath(root) if is_hidden(abs_path, abs_root): self.log.info("Refusing to serve hidden file, via 404 Error") raise web.HTTPError(404) return abs_path def json_errors(method): """Decorate methods with this to return GitHub style JSON errors. This should be used on any JSON API on any handler method that can raise HTTPErrors. This will grab the latest HTTPError exception using sys.exc_info and then: 1. Set the HTTP status code based on the HTTPError 2. Create and return a JSON body with a message field describing the error in a human readable form. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): try: result = method(self, *args, **kwargs) except web.HTTPError as e: status = e.status_code message = e.log_message self.log.warn(message) self.set_status(e.status_code) self.set_header('Content-Type', 'application/json') self.finish(json.dumps(dict(message=message))) except Exception: self.log.error("Unhandled error in API request", exc_info=True) status = 500 message = "Unknown server error" t, value, tb = sys.exc_info() self.set_status(status) tb_text = ''.join(traceback.format_exception(t, value, tb)) reply = dict(message=message, traceback=tb_text) self.set_header('Content-Type', 'application/json') self.finish(json.dumps(reply)) else: return result return wrapper #----------------------------------------------------------------------------- # File handler #----------------------------------------------------------------------------- # to minimize subclass changes: HTTPError = web.HTTPError class FileFindHandler(web.StaticFileHandler): """subclass of StaticFileHandler for serving files from a search path""" # cache search results, don't search for files more than once _static_paths = {} def initialize(self, path, default_filename=None): if isinstance(path, string_types): path = [path] self.root = tuple( os.path.abspath(os.path.expanduser(p)) + os.sep for p in path ) self.default_filename = default_filename def compute_etag(self): return None @classmethod def get_absolute_path(cls, roots, path): """locate a file to serve on our static file search path""" with cls._lock: if path in cls._static_paths: return cls._static_paths[path] try: abspath = os.path.abspath(filefind(path, roots)) except IOError: # IOError means not found return '' cls._static_paths[path] = abspath return abspath def validate_absolute_path(self, root, absolute_path): """check if the file should be served (raises 404, 403, etc.)""" if absolute_path == '': raise web.HTTPError(404) for root in self.root: if (absolute_path + os.sep).startswith(root): break return super(FileFindHandler, self).validate_absolute_path(root, absolute_path) class TrailingSlashHandler(web.RequestHandler): """Simple redirect handler that strips trailing slashes This should be the first, highest priority handler. """ SUPPORTED_METHODS = ['GET'] def get(self): self.redirect(self.request.uri.rstrip('/')) #----------------------------------------------------------------------------- # URL pattern fragments for re-use #----------------------------------------------------------------------------- path_regex = r"(?P<path>(?:/.*)*)" notebook_name_regex = r"(?P<name>[^/]+\.ipynb)" notebook_path_regex = "%s/%s" % (path_regex, notebook_name_regex) #----------------------------------------------------------------------------- # URL to handler mappings #----------------------------------------------------------------------------- default_handlers = [ (r".*/", TrailingSlashHandler) ]
./CrossVul/dataset_final_sorted/CWE-352/py/bad_1687_0
crossvul-python_data_bad_114_0
404: Not Found
./CrossVul/dataset_final_sorted/CWE-352/py/bad_114_0
crossvul-python_data_good_1891_2
# -*- coding: utf-8 -*- """ flask_security.views ~~~~~~~~~~~~~~~~~~~~ Flask-Security views module :copyright: (c) 2012 by Matt Wright. :copyright: (c) 2019-2020 by J. Christopher Wagner (jwag). :license: MIT, see LICENSE for more details. CSRF is tricky. By default all our forms have CSRF protection built in via Flask-WTF. This is regardless of authentication method or whether the request is Form or JSON based. Form-based 'just works' since when rendering the form (on GET), the CSRF token is automatically populated. We want to handle: - JSON requests where CSRF token is in a header (e.g. X-CSRF-Token) - Option to skip CSRF when using a token to authenticate (rather than session) (CSRF_PROTECT_MECHANISMS) - Option to skip CSRF for 'login'/unauthenticated requests (CSRF_IGNORE_UNAUTH_ENDPOINTS) This is complicated by the fact that the only way to disable form CSRF is to pass in meta={csrf: false} at form instantiation time. Be aware that for CSRF to work, caller MUST pass in session cookie. So for pure API, and no session cookie - there is no way to support CSRF-Login so app must set CSRF_IGNORE_UNAUTH_ENDPOINTS (or use CSRF/session cookie for logging in then once they have a token, no need for cookie). TODO: two-factor routes such as tf_setup need work. They seem to support both authenticated (via session?) as well as unauthenticated access. """ import sys import time from flask import ( Blueprint, abort, after_this_request, current_app, jsonify, request, session, ) from flask_login import current_user from werkzeug.datastructures import MultiDict from werkzeug.local import LocalProxy from .changeable import change_user_password from .confirmable import ( confirm_email_token_status, confirm_user, send_confirmation_instructions, ) from .decorators import anonymous_user_required, auth_required, unauth_csrf from .passwordless import login_token_status, send_login_instructions from .quart_compat import get_quart_status from .unified_signin import ( us_signin, us_signin_send_code, us_qrcode, us_setup, us_setup_validate, us_verify, us_verify_link, us_verify_send_code, ) from .recoverable import ( reset_password_token_status, send_reset_password_instructions, update_password, ) from .registerable import register_user from .twofactor import ( complete_two_factor_process, tf_clean_session, tf_disable, tf_login, ) from .utils import ( base_render_json, config_value, do_flash, get_message, get_post_login_redirect, get_post_logout_redirect, get_post_register_redirect, get_post_verify_redirect, get_url, json_error_response, login_user, logout_user, slash_url_suffix, suppress_form_csrf, url_for_security, ) if get_quart_status(): # pragma: no cover from quart import make_response, redirect else: from flask import make_response, redirect # Convenient references _security = LocalProxy(lambda: current_app.extensions["security"]) _datastore = LocalProxy(lambda: _security.datastore) def default_render_json(payload, code, headers, user): """ Default JSON response handler. """ # Force Content-Type header to json. if headers is None: headers = dict() headers["Content-Type"] = "application/json" payload = dict(meta=dict(code=code), response=payload) return make_response(jsonify(payload), code, headers) PY3 = sys.version_info[0] == 3 if PY3 and get_quart_status(): # pragma: no cover from .async_compat import _commit # noqa: F401 else: def _commit(response=None): _datastore.commit() return response def _ctx(endpoint): return _security._run_ctx_processor(endpoint) @unauth_csrf(fall_through=True) def login(): """View function for login view Allow already authenticated users. For GET this is useful for single-page-applications on refresh - session still active but need to access user info and csrf-token. For POST - redirects to POST_LOGIN_VIEW (forms) or returns 400 (json). """ if current_user.is_authenticated and request.method == "POST": # Just redirect current_user to POST_LOGIN_VIEW. # While its tempting to try to logout the current user and login the # new requested user - that simply doesn't work with CSRF. # This does NOT use get_post_login_redirect() so that it doesn't look at # 'next' - which can cause infinite redirect loops # (see test_common::test_authenticated_loop) if _security._want_json(request): payload = json_error_response( errors=get_message("ANONYMOUS_USER_REQUIRED")[0] ) return _security._render_json(payload, 400, None, None) else: return redirect(get_url(_security.post_login_view)) form_class = _security.login_form if request.is_json: # Allow GET so we can return csrf_token for pre-login. if request.content_length: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(MultiDict([]), meta=suppress_form_csrf()) else: form = form_class(request.form, meta=suppress_form_csrf()) if form.validate_on_submit(): remember_me = form.remember.data if "remember" in form else None if config_value("TWO_FACTOR") and ( config_value("TWO_FACTOR_REQUIRED") or (form.user.tf_totp_secret and form.user.tf_primary_method) ): return tf_login( form.user, remember=remember_me, primary_authn_via="password" ) login_user(form.user, remember=remember_me, authn_via=["password"]) after_this_request(_commit) if _security._want_json(request): return base_render_json(form, include_auth_token=True) return redirect(get_post_login_redirect()) if _security._want_json(request): if current_user.is_authenticated: form.user = current_user return base_render_json(form) if current_user.is_authenticated: return redirect(get_url(_security.post_login_view)) else: return _security.render_template( config_value("LOGIN_USER_TEMPLATE"), login_user_form=form, **_ctx("login") ) @auth_required() def verify(): """View function which handles a authentication verification request. """ form_class = _security.verify_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): # form may have called verify_and_update_password() after_this_request(_commit) # verified - so set freshness time. session["fs_paa"] = time.time() if _security._want_json(request): return base_render_json(form) do_flash(*get_message("REAUTHENTICATION_SUCCESSFUL")) return redirect(get_post_verify_redirect()) if _security._want_json(request): assert form.user == current_user return base_render_json(form) return _security.render_template( config_value("VERIFY_TEMPLATE"), verify_form=form, **_ctx("verify") ) def logout(): """View function which handles a logout request.""" tf_clean_session() if current_user.is_authenticated: logout_user() # No body is required - so if a POST and json - return OK if request.method == "POST" and _security._want_json(request): return _security._render_json({}, 200, headers=None, user=None) return redirect(get_post_logout_redirect()) @anonymous_user_required def register(): """View function which handles a registration request.""" # For some unknown historic reason - if you don't require confirmation # (via email) then you need to type in your password twice. That might # make sense if you can't reset your password but in modern (2020) UX models # don't ask twice. if _security.confirmable or request.is_json: form_class = _security.confirm_register_form else: form_class = _security.register_form if request.is_json: form_data = MultiDict(request.get_json()) else: form_data = request.form form = form_class(form_data, meta=suppress_form_csrf()) if form.validate_on_submit(): did_login = False user = register_user(form) form.user = user # The 'auto-login' feature probably should be removed - I can't imagine # an application that would want random email accounts. It has been like this # since the beginning. Note that we still enforce 2FA - however for unified # signin - we adhere to historic behavior. if not _security.confirmable or _security.login_without_confirmation: if config_value("TWO_FACTOR") and config_value("TWO_FACTOR_REQUIRED"): return tf_login(user, primary_authn_via="register") after_this_request(_commit) login_user(user, authn_via=["register"]) did_login = True if not _security._want_json(request): return redirect(get_post_register_redirect()) # Only include auth token if in fact user is permitted to login return base_render_json(form, include_auth_token=did_login) if _security._want_json(request): return base_render_json(form) return _security.render_template( config_value("REGISTER_USER_TEMPLATE"), register_user_form=form, **_ctx("register") ) @unauth_csrf(fall_through=True) def send_login(): """View function that sends login instructions for passwordless login""" form_class = _security.passwordless_login_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): send_login_instructions(form.user) if not _security._want_json(request): do_flash(*get_message("LOGIN_EMAIL_SENT", email=form.user.email)) if _security._want_json(request): return base_render_json(form) return _security.render_template( config_value("SEND_LOGIN_TEMPLATE"), send_login_form=form, **_ctx("send_login") ) @anonymous_user_required def token_login(token): """View function that handles passwordless login via a token Like reset-password and confirm - this is usually a GET via an email so from the request we can't differentiate form-based apps from non. """ expired, invalid, user = login_token_status(token) if not user or invalid: m, c = get_message("INVALID_LOGIN_TOKEN") if _security.redirect_behavior == "spa": return redirect(get_url(_security.login_error_view, qparams={c: m})) do_flash(m, c) return redirect(url_for_security("login")) if expired: send_login_instructions(user) m, c = get_message( "LOGIN_EXPIRED", email=user.email, within=_security.login_within ) if _security.redirect_behavior == "spa": return redirect( get_url( _security.login_error_view, qparams=user.get_redirect_qparams({c: m}), ) ) do_flash(m, c) return redirect(url_for_security("login")) login_user(user, authn_via=["token"]) after_this_request(_commit) if _security.redirect_behavior == "spa": return redirect( get_url(_security.post_login_view, qparams=user.get_redirect_qparams()) ) do_flash(*get_message("PASSWORDLESS_LOGIN_SUCCESSFUL")) return redirect(get_post_login_redirect()) @unauth_csrf(fall_through=True) def send_confirmation(): """View function which sends confirmation instructions.""" form_class = _security.send_confirmation_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): send_confirmation_instructions(form.user) if not _security._want_json(request): do_flash(*get_message("CONFIRMATION_REQUEST", email=form.user.email)) if _security._want_json(request): return base_render_json(form) return _security.render_template( config_value("SEND_CONFIRMATION_TEMPLATE"), send_confirmation_form=form, **_ctx("send_confirmation") ) def confirm_email(token): """View function which handles a email confirmation request.""" expired, invalid, user = confirm_email_token_status(token) if not user or invalid: m, c = get_message("INVALID_CONFIRMATION_TOKEN") if _security.redirect_behavior == "spa": return redirect(get_url(_security.confirm_error_view, qparams={c: m})) do_flash(m, c) return redirect( get_url(_security.confirm_error_view) or url_for_security("send_confirmation") ) already_confirmed = user.confirmed_at is not None if expired or already_confirmed: if already_confirmed: m, c = get_message("ALREADY_CONFIRMED") else: send_confirmation_instructions(user) m, c = get_message( "CONFIRMATION_EXPIRED", email=user.email, within=_security.confirm_email_within, ) if _security.redirect_behavior == "spa": return redirect( get_url( _security.confirm_error_view, qparams=user.get_redirect_qparams({c: m}), ) ) do_flash(m, c) return redirect( get_url(_security.confirm_error_view) or url_for_security("send_confirmation") ) confirm_user(user) after_this_request(_commit) if user != current_user: logout_user() if config_value("AUTO_LOGIN_AFTER_CONFIRM"): # N.B. this is a (small) security risk if email went to wrong place. # and you have the LOGIN_WITH_CONFIRMATION flag since in that case # you can be logged in and doing stuff - but another person could # get the email. if config_value("TWO_FACTOR") and config_value("TWO_FACTOR_REQUIRED"): return tf_login(user, primary_authn_via="confirm") login_user(user, authn_via=["confirm"]) m, c = get_message("EMAIL_CONFIRMED") if _security.redirect_behavior == "spa": return redirect( get_url( _security.post_confirm_view, qparams=user.get_redirect_qparams({c: m}) ) ) do_flash(m, c) return redirect( get_url(_security.post_confirm_view) or get_url( _security.post_login_view if config_value("AUTO_LOGIN_AFTER_CONFIRM") else _security.login_url ) ) @anonymous_user_required @unauth_csrf(fall_through=True) def forgot_password(): """View function that handles a forgotten password request.""" form_class = _security.forgot_password_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): send_reset_password_instructions(form.user) if not _security._want_json(request): do_flash(*get_message("PASSWORD_RESET_REQUEST", email=form.user.email)) if _security._want_json(request): return base_render_json(form, include_user=False) return _security.render_template( config_value("FORGOT_PASSWORD_TEMPLATE"), forgot_password_form=form, **_ctx("forgot_password") ) @anonymous_user_required @unauth_csrf(fall_through=True) def reset_password(token): """View function that handles a reset password request. This is usually called via GET as part of an email link and redirects to a reset-password form It is called via POST to actually update the password (and then redirects to a post reset/login view) If in either case the token is either invalid or expired it redirects to the 'forgot-password' form. In the case of non-form based configuration: For GET normal case - redirect to RESET_VIEW?token={token}&email={email} For GET invalid case - redirect to RESET_ERROR_VIEW?error={error}&email={email} For POST normal/successful case - return 200 with new authentication token For POST error case return 400 with form.errors """ expired, invalid, user = reset_password_token_status(token) form_class = _security.reset_password_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) form.user = user if request.method == "GET": if not user or invalid: m, c = get_message("INVALID_RESET_PASSWORD_TOKEN") if _security.redirect_behavior == "spa": return redirect(get_url(_security.reset_error_view, qparams={c: m})) do_flash(m, c) return redirect(url_for_security("forgot_password")) if expired: send_reset_password_instructions(user) m, c = get_message( "PASSWORD_RESET_EXPIRED", email=user.email, within=_security.reset_password_within, ) if _security.redirect_behavior == "spa": return redirect( get_url( _security.reset_error_view, qparams=user.get_redirect_qparams({c: m}), ) ) do_flash(m, c) return redirect(url_for_security("forgot_password")) # All good - for SPA - redirect to the ``reset_view`` if _security.redirect_behavior == "spa": return redirect( get_url( _security.reset_view, qparams=user.get_redirect_qparams({"token": token}), ) ) # for forms - render the reset password form return _security.render_template( config_value("RESET_PASSWORD_TEMPLATE"), reset_password_form=form, reset_password_token=token, **_ctx("reset_password") ) # This is the POST case. m = None if not user or invalid: invalid = True m, c = get_message("INVALID_RESET_PASSWORD_TOKEN") if not _security._want_json(request): do_flash(m, c) if expired: send_reset_password_instructions(user) m, c = get_message( "PASSWORD_RESET_EXPIRED", email=user.email, within=_security.reset_password_within, ) if not _security._want_json(request): do_flash(m, c) if invalid or expired: if _security._want_json(request): return _security._render_json(json_error_response(m), 400, None, None) else: return redirect(url_for_security("forgot_password")) if form.validate_on_submit(): after_this_request(_commit) update_password(user, form.password.data) if config_value("TWO_FACTOR") and ( config_value("TWO_FACTOR_REQUIRED") or (form.user.tf_totp_secret and form.user.tf_primary_method) ): return tf_login(user, primary_authn_via="reset") login_user(user, authn_via=["reset"]) if _security._want_json(request): login_form = _security.login_form(MultiDict({"email": user.email})) setattr(login_form, "user", user) return base_render_json(login_form, include_auth_token=True) else: do_flash(*get_message("PASSWORD_RESET")) return redirect( get_url(_security.post_reset_view) or get_url(_security.post_login_view) ) # validation failure case - for forms - we try again including the token # for non-forms - we just return errors and assume caller remembers token. if _security._want_json(request): return base_render_json(form) return _security.render_template( config_value("RESET_PASSWORD_TEMPLATE"), reset_password_form=form, reset_password_token=token, **_ctx("reset_password") ) @auth_required("basic", "token", "session") def change_password(): """View function which handles a change password request.""" form_class = _security.change_password_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): after_this_request(_commit) change_user_password(current_user._get_current_object(), form.new_password.data) if _security._want_json(request): form.user = current_user return base_render_json(form, include_auth_token=True) do_flash(*get_message("PASSWORD_CHANGE")) return redirect( get_url(_security.post_change_view) or get_url(_security.post_login_view) ) if _security._want_json(request): form.user = current_user return base_render_json(form) return _security.render_template( config_value("CHANGE_PASSWORD_TEMPLATE"), change_password_form=form, **_ctx("change_password") ) @unauth_csrf(fall_through=True) def two_factor_setup(): """View function for two-factor setup. This is used both for GET to fetch forms and POST to actually set configuration (and send token). There are 3 cases for setting up: 1) initial login and application requires 2FA 2) changing existing 2FA information 3) user wanting to enable or disable 2FA (assuming application doesn't require it) In order to CHANGE/ENABLE/DISABLE a 2FA information, user must be properly logged in AND must perform a fresh password validation by calling POST /tf-confirm (which sets 'tf_confirmed' in the session). For initial login when 2FA required of course user can't be logged in - in this case we need to have been sent some state via the session as part of login to show a) who and b) that they successfully authenticated. """ form_class = _security.two_factor_setup_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if not current_user.is_authenticated: # This is the initial login case # We can also get here from setup if they want to change if not all(k in session for k in ["tf_user_id", "tf_state"]) or session[ "tf_state" ] not in ["setup_from_login", "validating_profile"]: # illegal call on this endpoint tf_clean_session() return _tf_illegal_state(form, _security.login_url) user = _datastore.get_user(session["tf_user_id"]) if not user: tf_clean_session() return _tf_illegal_state(form, _security.login_url) else: # all other cases require user to be logged in and have performed # additional password verification as signified by 'tf_confirmed' # in the session. if "tf_confirmed" not in session: tf_clean_session() return _tf_illegal_state(form, _security.two_factor_confirm_url) user = current_user if form.validate_on_submit(): # Before storing in DB and therefore requiring 2FA we need to # make sure it actually works. # Requiring 2FA is triggered by having BOTH tf_totp_secret and # tf_primary_method in the user record (or having the application # global config TWO_FACTOR_REQUIRED) # Until we correctly validate the 2FA - we don't set primary_method in # user model but use the session to store it. pm = form.setup.data if pm == "disable": tf_disable(user) after_this_request(_commit) do_flash(*get_message("TWO_FACTOR_DISABLED")) if not _security._want_json(request): return redirect(get_url(_security.post_login_view)) else: return base_render_json(form) # Regenerate the TOTP secret on every call of 2FA setup unless it is # within the same session and method (e.g. upon entering the phone number) if pm != session.get("tf_primary_method", None): session["tf_totp_secret"] = _security._totp_factory.generate_totp_secret() session["tf_primary_method"] = pm session["tf_state"] = "validating_profile" new_phone = form.phone.data if len(form.phone.data) > 0 else None if new_phone: user.tf_phone_number = new_phone _datastore.put(user) after_this_request(_commit) # This form is sort of bizarre - for SMS and authenticator # you select, then get more info, and submit again. # For authenticator of course, we don't actually send anything # and for SMS it is the second time around that we get the phone number if pm == "email" or (pm == "sms" and new_phone): msg = user.tf_send_security_token( method=pm, totp_secret=session["tf_totp_secret"], phone_number=getattr(user, "tf_phone_number", None), ) if msg: # send code didn't work form.setup.errors = list() form.setup.errors.append(msg) if _security._want_json(request): return base_render_json( form, include_user=False, error_status_code=500 ) code_form = _security.two_factor_verify_code_form() if not _security._want_json(request): return _security.render_template( config_value("TWO_FACTOR_SETUP_TEMPLATE"), two_factor_setup_form=form, two_factor_verify_code_form=code_form, choices=config_value("TWO_FACTOR_ENABLED_METHODS"), chosen_method=pm, **_ctx("tf_setup") ) # We get here on GET and POST with failed validation. # For things like phone number - we've already done one POST # that succeeded and now if failed - so retain the initial info if _security._want_json(request): return base_render_json(form, include_user=False) code_form = _security.two_factor_verify_code_form() choices = config_value("TWO_FACTOR_ENABLED_METHODS") if not config_value("TWO_FACTOR_REQUIRED"): choices.append("disable") return _security.render_template( config_value("TWO_FACTOR_SETUP_TEMPLATE"), two_factor_setup_form=form, two_factor_verify_code_form=code_form, choices=choices, chosen_method=form.setup.data, two_factor_required=config_value("TWO_FACTOR_REQUIRED"), **_ctx("tf_setup") ) @unauth_csrf(fall_through=True) def two_factor_token_validation(): """View function for two-factor token validation Two cases: 1) normal login case - everything setup correctly; normal 2FA validation In this case - user not logged in - but 'tf_state' == 'ready' or 'validating_profile' 2) validating after CHANGE/ENABLE 2FA. In this case user logged in/authenticated they must have 'tf_confirmed' set meaning they re-entered their passwd """ form_class = _security.two_factor_verify_code_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) changing = current_user.is_authenticated if not changing: # This is the normal login case if ( not all(k in session for k in ["tf_user_id", "tf_state"]) or session["tf_state"] not in ["ready", "validating_profile"] or ( session["tf_state"] == "validating_profile" and "tf_primary_method" not in session ) ): # illegal call on this endpoint tf_clean_session() return _tf_illegal_state(form, _security.login_url) user = _datastore.get_user(session["tf_user_id"]) form.user = user if not user: tf_clean_session() return _tf_illegal_state(form, _security.login_url) if session["tf_state"] == "ready": pm = user.tf_primary_method totp_secret = user.tf_totp_secret else: pm = session["tf_primary_method"] totp_secret = session["tf_totp_secret"] else: if ( not all( k in session for k in ["tf_confirmed", "tf_state", "tf_primary_method"] ) or session["tf_state"] != "validating_profile" ): tf_clean_session() # logout since this seems like attack-ish/logic error logout_user() return _tf_illegal_state(form, _security.login_url) pm = session["tf_primary_method"] totp_secret = session["tf_totp_secret"] form.user = current_user setattr(form, "primary_method", pm) setattr(form, "tf_totp_secret", totp_secret) if form.validate_on_submit(): # Success - log in user and clear all session variables completion_message = complete_two_factor_process( form.user, pm, totp_secret, changing, session.pop("tf_remember_login", None) ) after_this_request(_commit) if not _security._want_json(request): do_flash(*get_message(completion_message)) return redirect(get_post_login_redirect()) # GET or not successful POST if _security._want_json(request): return base_render_json(form) # if we were trying to validate a new method if changing: setup_form = _security.two_factor_setup_form() return _security.render_template( config_value("TWO_FACTOR_SETUP_TEMPLATE"), two_factor_setup_form=setup_form, two_factor_verify_code_form=form, choices=config_value("TWO_FACTOR_ENABLED_METHODS"), **_ctx("tf_setup") ) # if we were trying to validate an existing method else: rescue_form = _security.two_factor_rescue_form() return _security.render_template( config_value("TWO_FACTOR_VERIFY_CODE_TEMPLATE"), two_factor_rescue_form=rescue_form, two_factor_verify_code_form=form, problem=None, **_ctx("tf_token_validation") ) @anonymous_user_required @unauth_csrf(fall_through=True) def two_factor_rescue(): """ Function that handles a situation where user can't enter his two-factor validation code User must have already provided valid username/password. User must have already established 2FA """ form_class = _security.two_factor_rescue_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if ( not all(k in session for k in ["tf_user_id", "tf_state"]) or session["tf_state"] != "ready" ): tf_clean_session() return _tf_illegal_state(form, _security.login_url) user = _datastore.get_user(session["tf_user_id"]) form.user = user if not user: tf_clean_session() return _tf_illegal_state(form, _security.login_url) rproblem = "" if form.validate_on_submit(): problem = form.data["help_setup"] rproblem = problem # if the problem is that user can't access his device, w # e send him code through mail if problem == "lost_device": msg = form.user.tf_send_security_token( method="email", totp_secret=form.user.tf_totp_secret, phone_number=getattr(form.user, "tf_phone_number", None), ) if msg: rproblem = "" form.help_setup.errors.append(msg) if _security._want_json(request): return base_render_json( form, include_user=False, error_status_code=500 ) # send app provider a mail message regarding trouble elif problem == "no_mail_access": _security._send_mail( config_value("EMAIL_SUBJECT_TWO_FACTOR_RESCUE"), config_value("TWO_FACTOR_RESCUE_MAIL"), "two_factor_rescue", user=form.user, ) else: return "", 404 if _security._want_json(request): return base_render_json(form, include_user=False) code_form = _security.two_factor_verify_code_form() return _security.render_template( config_value("TWO_FACTOR_VERIFY_CODE_TEMPLATE"), two_factor_verify_code_form=code_form, two_factor_rescue_form=form, rescue_mail=config_value("TWO_FACTOR_RESCUE_MAIL"), problem=rproblem, **_ctx("tf_token_validation") ) @auth_required("basic", "session", "token") def two_factor_verify_password(): """View function which handles a password verification request.""" form_class = _security.two_factor_verify_password_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): # form called verify_and_update_password() after_this_request(_commit) session["tf_confirmed"] = True m, c = get_message("TWO_FACTOR_PASSWORD_CONFIRMATION_DONE") if not _security._want_json(request): do_flash(m, c) return redirect(url_for_security("two_factor_setup")) else: return _security._render_json(json_error_response(m), 400, None, None) if _security._want_json(request): assert form.user == current_user # form.user = current_user return base_render_json(form) return _security.render_template( config_value("TWO_FACTOR_VERIFY_PASSWORD_TEMPLATE"), two_factor_verify_password_form=form, **_ctx("tf_verify_password") ) @unauth_csrf(fall_through=True) def two_factor_qrcode(): if current_user.is_authenticated: user = current_user else: if "tf_user_id" not in session: abort(404) user = _datastore.get_user(session["tf_user_id"]) if not user: # Seems like we should be careful here if user_id is gone. tf_clean_session() abort(404) if "authenticator" not in config_value("TWO_FACTOR_ENABLED_METHODS"): return abort(404) if ( "tf_primary_method" not in session or session["tf_primary_method"] != "authenticator" ): return abort(404) totp = user.tf_totp_secret if "tf_totp_secret" in session: totp = session["tf_totp_secret"] try: import pyqrcode # By convention, the URI should have the username that the user # logs in with. username = user.calc_username() url = pyqrcode.create( _security._totp_factory.get_totp_uri( username if username else "Unknown", totp ) ) except ImportError: # For TWO_FACTOR - this should have been checked at app init. raise from io import BytesIO stream = BytesIO() url.svg(stream, scale=3) return ( stream.getvalue(), 200, { "Content-Type": "image/svg+xml", "Cache-Control": "no-cache, no-store, must-revalidate", "Pragma": "no-cache", "Expires": "0", }, ) def _tf_illegal_state(form, redirect_to): m, c = get_message("TWO_FACTOR_PERMISSION_DENIED") if not _security._want_json(request): do_flash(m, c) return redirect(get_url(redirect_to)) else: return _security._render_json(json_error_response(m), 400, None, None) def create_blueprint(app, state, import_name, json_encoder=None): """Creates the security extension blueprint""" bp = Blueprint( state.blueprint_name, import_name, url_prefix=state.url_prefix, subdomain=state.subdomain, template_folder="templates", ) if json_encoder: bp.json_encoder = json_encoder if state.logout_methods is not None: bp.route(state.logout_url, methods=state.logout_methods, endpoint="logout")( logout ) if state.passwordless: bp.route(state.login_url, methods=["GET", "POST"], endpoint="login")(send_login) bp.route( state.login_url + slash_url_suffix(state.login_url, "<token>"), endpoint="token_login", )(token_login) elif config_value("US_SIGNIN_REPLACES_LOGIN", app=app): bp.route(state.login_url, methods=["GET", "POST"], endpoint="login")(us_signin) else: bp.route(state.login_url, methods=["GET", "POST"], endpoint="login")(login) bp.route(state.verify_url, methods=["GET", "POST"], endpoint="verify")(verify) if state.unified_signin: bp.route(state.us_signin_url, methods=["GET", "POST"], endpoint="us_signin")( us_signin ) bp.route( state.us_signin_send_code_url, methods=["GET", "POST"], endpoint="us_signin_send_code", )(us_signin_send_code) bp.route(state.us_setup_url, methods=["GET", "POST"], endpoint="us_setup")( us_setup ) bp.route( state.us_setup_url + slash_url_suffix(state.us_setup_url, "<token>"), methods=["GET", "POST"], endpoint="us_setup_validate", )(us_setup_validate) # Freshness verification if config_value("FRESHNESS", app=app).total_seconds() >= 0: bp.route( state.us_verify_url, methods=["GET", "POST"], endpoint="us_verify" )(us_verify) bp.route( state.us_verify_send_code_url, methods=["GET", "POST"], endpoint="us_verify_send_code", )(us_verify_send_code) bp.route(state.us_verify_link_url, methods=["GET"], endpoint="us_verify_link")( us_verify_link ) bp.route( state.us_qrcode_url + slash_url_suffix(state.us_setup_url, "<token>"), endpoint="us_qrcode", )(us_qrcode) if state.two_factor: tf_token_validation = "two_factor_token_validation" tf_qrcode = "two_factor_qrcode" bp.route( state.two_factor_setup_url, methods=["GET", "POST"], endpoint="two_factor_setup", )(two_factor_setup) bp.route( state.two_factor_token_validation_url, methods=["GET", "POST"], endpoint=tf_token_validation, )(two_factor_token_validation) bp.route(state.two_factor_qrcode_url, endpoint=tf_qrcode)(two_factor_qrcode) bp.route( state.two_factor_rescue_url, methods=["GET", "POST"], endpoint="two_factor_rescue", )(two_factor_rescue) bp.route( state.two_factor_confirm_url, methods=["GET", "POST"], endpoint="two_factor_verify_password", )(two_factor_verify_password) if state.registerable: bp.route(state.register_url, methods=["GET", "POST"], endpoint="register")( register ) if state.recoverable: bp.route(state.reset_url, methods=["GET", "POST"], endpoint="forgot_password")( forgot_password ) bp.route( state.reset_url + slash_url_suffix(state.reset_url, "<token>"), methods=["GET", "POST"], endpoint="reset_password", )(reset_password) if state.changeable: bp.route(state.change_url, methods=["GET", "POST"], endpoint="change_password")( change_password ) if state.confirmable: bp.route( state.confirm_url, methods=["GET", "POST"], endpoint="send_confirmation" )(send_confirmation) bp.route( state.confirm_url + slash_url_suffix(state.confirm_url, "<token>"), methods=["GET", "POST"], endpoint="confirm_email", )(confirm_email) return bp
./CrossVul/dataset_final_sorted/CWE-352/py/good_1891_2
crossvul-python_data_bad_1892_0
# -*- coding: utf-8 -*- """ flask_security.views ~~~~~~~~~~~~~~~~~~~~ Flask-Security views module :copyright: (c) 2012 by Matt Wright. :copyright: (c) 2019-2020 by J. Christopher Wagner (jwag). :license: MIT, see LICENSE for more details. CSRF is tricky. By default all our forms have CSRF protection built in via Flask-WTF. This is regardless of authentication method or whether the request is Form or JSON based. Form-based 'just works' since when rendering the form (on GET), the CSRF token is automatically populated. We want to handle: - JSON requests where CSRF token is in a header (e.g. X-CSRF-Token) - Option to skip CSRF when using a token to authenticate (rather than session) (CSRF_PROTECT_MECHANISMS) - Option to skip CSRF for 'login'/unauthenticated requests (CSRF_IGNORE_UNAUTH_ENDPOINTS) This is complicated by the fact that the only way to disable form CSRF is to pass in meta={csrf: false} at form instantiation time. Be aware that for CSRF to work, caller MUST pass in session cookie. So for pure API, and no session cookie - there is no way to support CSRF-Login so app must set CSRF_IGNORE_UNAUTH_ENDPOINTS (or use CSRF/session cookie for logging in then once they have a token, no need for cookie). TODO: two-factor routes such as tf_setup need work. They seem to support both authenticated (via session?) as well as unauthenticated access. """ import sys import time from flask import ( Blueprint, abort, after_this_request, current_app, jsonify, request, session, ) from flask_login import current_user from werkzeug.datastructures import MultiDict from werkzeug.local import LocalProxy from .changeable import change_user_password from .confirmable import ( confirm_email_token_status, confirm_user, send_confirmation_instructions, ) from .decorators import anonymous_user_required, auth_required, unauth_csrf from .passwordless import login_token_status, send_login_instructions from .quart_compat import get_quart_status from .unified_signin import ( us_signin, us_signin_send_code, us_qrcode, us_setup, us_setup_validate, us_verify, us_verify_link, us_verify_send_code, ) from .recoverable import ( reset_password_token_status, send_reset_password_instructions, update_password, ) from .registerable import register_user from .twofactor import ( complete_two_factor_process, tf_clean_session, tf_disable, tf_login, ) from .utils import ( base_render_json, config_value, do_flash, get_message, get_post_login_redirect, get_post_logout_redirect, get_post_register_redirect, get_post_verify_redirect, get_url, json_error_response, login_user, logout_user, slash_url_suffix, suppress_form_csrf, url_for_security, ) if get_quart_status(): # pragma: no cover from quart import make_response, redirect else: from flask import make_response, redirect # Convenient references _security = LocalProxy(lambda: current_app.extensions["security"]) _datastore = LocalProxy(lambda: _security.datastore) def default_render_json(payload, code, headers, user): """ Default JSON response handler. """ # Force Content-Type header to json. if headers is None: headers = dict() headers["Content-Type"] = "application/json" payload = dict(meta=dict(code=code), response=payload) return make_response(jsonify(payload), code, headers) PY3 = sys.version_info[0] == 3 if PY3 and get_quart_status(): # pragma: no cover from .async_compat import _commit # noqa: F401 else: def _commit(response=None): _datastore.commit() return response def _ctx(endpoint): return _security._run_ctx_processor(endpoint) @unauth_csrf(fall_through=True) def login(): """View function for login view Allow already authenticated users. For GET this is useful for single-page-applications on refresh - session still active but need to access user info and csrf-token. For POST - redirects to POST_LOGIN_VIEW (forms) or returns 400 (json). """ if current_user.is_authenticated and request.method == "POST": # Just redirect current_user to POST_LOGIN_VIEW. # While its tempting to try to logout the current user and login the # new requested user - that simply doesn't work with CSRF. # This does NOT use get_post_login_redirect() so that it doesn't look at # 'next' - which can cause infinite redirect loops # (see test_common::test_authenticated_loop) if _security._want_json(request): payload = json_error_response( errors=get_message("ANONYMOUS_USER_REQUIRED")[0] ) return _security._render_json(payload, 400, None, None) else: return redirect(get_url(_security.post_login_view)) form_class = _security.login_form if request.is_json: # Allow GET so we can return csrf_token for pre-login. if request.content_length: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(MultiDict([]), meta=suppress_form_csrf()) else: form = form_class(request.form, meta=suppress_form_csrf()) if form.validate_on_submit(): remember_me = form.remember.data if "remember" in form else None if config_value("TWO_FACTOR") and ( config_value("TWO_FACTOR_REQUIRED") or (form.user.tf_totp_secret and form.user.tf_primary_method) ): return tf_login( form.user, remember=remember_me, primary_authn_via="password" ) login_user(form.user, remember=remember_me, authn_via=["password"]) after_this_request(_commit) if not _security._want_json(request): return redirect(get_post_login_redirect()) if _security._want_json(request): if current_user.is_authenticated: form.user = current_user return base_render_json(form, include_auth_token=True) if current_user.is_authenticated: return redirect(get_url(_security.post_login_view)) else: return _security.render_template( config_value("LOGIN_USER_TEMPLATE"), login_user_form=form, **_ctx("login") ) @auth_required() def verify(): """View function which handles a authentication verification request. """ form_class = _security.verify_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): # form may have called verify_and_update_password() after_this_request(_commit) # verified - so set freshness time. session["fs_paa"] = time.time() if _security._want_json(request): return base_render_json(form) do_flash(*get_message("REAUTHENTICATION_SUCCESSFUL")) return redirect(get_post_verify_redirect()) if _security._want_json(request): assert form.user == current_user return base_render_json(form) return _security.render_template( config_value("VERIFY_TEMPLATE"), verify_form=form, **_ctx("verify") ) def logout(): """View function which handles a logout request.""" tf_clean_session() if current_user.is_authenticated: logout_user() # No body is required - so if a POST and json - return OK if request.method == "POST" and _security._want_json(request): return _security._render_json({}, 200, headers=None, user=None) return redirect(get_post_logout_redirect()) @anonymous_user_required def register(): """View function which handles a registration request.""" # For some unknown historic reason - if you don't require confirmation # (via email) then you need to type in your password twice. That might # make sense if you can't reset your password but in modern (2020) UX models # don't ask twice. if _security.confirmable or request.is_json: form_class = _security.confirm_register_form else: form_class = _security.register_form if request.is_json: form_data = MultiDict(request.get_json()) else: form_data = request.form form = form_class(form_data, meta=suppress_form_csrf()) if form.validate_on_submit(): did_login = False user = register_user(form) form.user = user # The 'auto-login' feature probably should be removed - I can't imagine # an application that would want random email accounts. It has been like this # since the beginning. Note that we still enforce 2FA - however for unified # signin - we adhere to historic behavior. if not _security.confirmable or _security.login_without_confirmation: if config_value("TWO_FACTOR") and config_value("TWO_FACTOR_REQUIRED"): return tf_login(user, primary_authn_via="register") after_this_request(_commit) login_user(user, authn_via=["register"]) did_login = True if not _security._want_json(request): return redirect(get_post_register_redirect()) # Only include auth token if in fact user is permitted to login return base_render_json(form, include_auth_token=did_login) if _security._want_json(request): return base_render_json(form) return _security.render_template( config_value("REGISTER_USER_TEMPLATE"), register_user_form=form, **_ctx("register") ) @unauth_csrf(fall_through=True) def send_login(): """View function that sends login instructions for passwordless login""" form_class = _security.passwordless_login_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): send_login_instructions(form.user) if not _security._want_json(request): do_flash(*get_message("LOGIN_EMAIL_SENT", email=form.user.email)) if _security._want_json(request): return base_render_json(form) return _security.render_template( config_value("SEND_LOGIN_TEMPLATE"), send_login_form=form, **_ctx("send_login") ) @anonymous_user_required def token_login(token): """View function that handles passwordless login via a token Like reset-password and confirm - this is usually a GET via an email so from the request we can't differentiate form-based apps from non. """ expired, invalid, user = login_token_status(token) if not user or invalid: m, c = get_message("INVALID_LOGIN_TOKEN") if _security.redirect_behavior == "spa": return redirect(get_url(_security.login_error_view, qparams={c: m})) do_flash(m, c) return redirect(url_for_security("login")) if expired: send_login_instructions(user) m, c = get_message( "LOGIN_EXPIRED", email=user.email, within=_security.login_within ) if _security.redirect_behavior == "spa": return redirect( get_url( _security.login_error_view, qparams=user.get_redirect_qparams({c: m}), ) ) do_flash(m, c) return redirect(url_for_security("login")) login_user(user, authn_via=["token"]) after_this_request(_commit) if _security.redirect_behavior == "spa": return redirect( get_url(_security.post_login_view, qparams=user.get_redirect_qparams()) ) do_flash(*get_message("PASSWORDLESS_LOGIN_SUCCESSFUL")) return redirect(get_post_login_redirect()) @unauth_csrf(fall_through=True) def send_confirmation(): """View function which sends confirmation instructions.""" form_class = _security.send_confirmation_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): send_confirmation_instructions(form.user) if not _security._want_json(request): do_flash(*get_message("CONFIRMATION_REQUEST", email=form.user.email)) if _security._want_json(request): return base_render_json(form) return _security.render_template( config_value("SEND_CONFIRMATION_TEMPLATE"), send_confirmation_form=form, **_ctx("send_confirmation") ) def confirm_email(token): """View function which handles a email confirmation request.""" expired, invalid, user = confirm_email_token_status(token) if not user or invalid: m, c = get_message("INVALID_CONFIRMATION_TOKEN") if _security.redirect_behavior == "spa": return redirect(get_url(_security.confirm_error_view, qparams={c: m})) do_flash(m, c) return redirect( get_url(_security.confirm_error_view) or url_for_security("send_confirmation") ) already_confirmed = user.confirmed_at is not None if expired or already_confirmed: if already_confirmed: m, c = get_message("ALREADY_CONFIRMED") else: send_confirmation_instructions(user) m, c = get_message( "CONFIRMATION_EXPIRED", email=user.email, within=_security.confirm_email_within, ) if _security.redirect_behavior == "spa": return redirect( get_url( _security.confirm_error_view, qparams=user.get_redirect_qparams({c: m}), ) ) do_flash(m, c) return redirect( get_url(_security.confirm_error_view) or url_for_security("send_confirmation") ) confirm_user(user) after_this_request(_commit) if user != current_user: logout_user() if config_value("AUTO_LOGIN_AFTER_CONFIRM"): # N.B. this is a (small) security risk if email went to wrong place. # and you have the LOGIN_WITH_CONFIRMATION flag since in that case # you can be logged in and doing stuff - but another person could # get the email. if config_value("TWO_FACTOR") and config_value("TWO_FACTOR_REQUIRED"): return tf_login(user, primary_authn_via="confirm") login_user(user, authn_via=["confirm"]) m, c = get_message("EMAIL_CONFIRMED") if _security.redirect_behavior == "spa": return redirect( get_url( _security.post_confirm_view, qparams=user.get_redirect_qparams({c: m}) ) ) do_flash(m, c) return redirect( get_url(_security.post_confirm_view) or get_url( _security.post_login_view if config_value("AUTO_LOGIN_AFTER_CONFIRM") else _security.login_url ) ) @anonymous_user_required @unauth_csrf(fall_through=True) def forgot_password(): """View function that handles a forgotten password request.""" form_class = _security.forgot_password_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): send_reset_password_instructions(form.user) if not _security._want_json(request): do_flash(*get_message("PASSWORD_RESET_REQUEST", email=form.user.email)) if _security._want_json(request): return base_render_json(form, include_user=False) return _security.render_template( config_value("FORGOT_PASSWORD_TEMPLATE"), forgot_password_form=form, **_ctx("forgot_password") ) @anonymous_user_required @unauth_csrf(fall_through=True) def reset_password(token): """View function that handles a reset password request. This is usually called via GET as part of an email link and redirects to a reset-password form It is called via POST to actually update the password (and then redirects to a post reset/login view) If in either case the token is either invalid or expired it redirects to the 'forgot-password' form. In the case of non-form based configuration: For GET normal case - redirect to RESET_VIEW?token={token}&email={email} For GET invalid case - redirect to RESET_ERROR_VIEW?error={error}&email={email} For POST normal/successful case - return 200 with new authentication token For POST error case return 400 with form.errors """ expired, invalid, user = reset_password_token_status(token) form_class = _security.reset_password_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) form.user = user if request.method == "GET": if not user or invalid: m, c = get_message("INVALID_RESET_PASSWORD_TOKEN") if _security.redirect_behavior == "spa": return redirect(get_url(_security.reset_error_view, qparams={c: m})) do_flash(m, c) return redirect(url_for_security("forgot_password")) if expired: send_reset_password_instructions(user) m, c = get_message( "PASSWORD_RESET_EXPIRED", email=user.email, within=_security.reset_password_within, ) if _security.redirect_behavior == "spa": return redirect( get_url( _security.reset_error_view, qparams=user.get_redirect_qparams({c: m}), ) ) do_flash(m, c) return redirect(url_for_security("forgot_password")) # All good - for SPA - redirect to the ``reset_view`` if _security.redirect_behavior == "spa": return redirect( get_url( _security.reset_view, qparams=user.get_redirect_qparams({"token": token}), ) ) # for forms - render the reset password form return _security.render_template( config_value("RESET_PASSWORD_TEMPLATE"), reset_password_form=form, reset_password_token=token, **_ctx("reset_password") ) # This is the POST case. m = None if not user or invalid: invalid = True m, c = get_message("INVALID_RESET_PASSWORD_TOKEN") if not _security._want_json(request): do_flash(m, c) if expired: send_reset_password_instructions(user) m, c = get_message( "PASSWORD_RESET_EXPIRED", email=user.email, within=_security.reset_password_within, ) if not _security._want_json(request): do_flash(m, c) if invalid or expired: if _security._want_json(request): return _security._render_json(json_error_response(m), 400, None, None) else: return redirect(url_for_security("forgot_password")) if form.validate_on_submit(): after_this_request(_commit) update_password(user, form.password.data) if config_value("TWO_FACTOR") and ( config_value("TWO_FACTOR_REQUIRED") or (form.user.tf_totp_secret and form.user.tf_primary_method) ): return tf_login(user, primary_authn_via="reset") login_user(user, authn_via=["reset"]) if _security._want_json(request): login_form = _security.login_form(MultiDict({"email": user.email})) setattr(login_form, "user", user) return base_render_json(login_form, include_auth_token=True) else: do_flash(*get_message("PASSWORD_RESET")) return redirect( get_url(_security.post_reset_view) or get_url(_security.post_login_view) ) # validation failure case - for forms - we try again including the token # for non-forms - we just return errors and assume caller remembers token. if _security._want_json(request): return base_render_json(form) return _security.render_template( config_value("RESET_PASSWORD_TEMPLATE"), reset_password_form=form, reset_password_token=token, **_ctx("reset_password") ) @auth_required("basic", "token", "session") def change_password(): """View function which handles a change password request.""" form_class = _security.change_password_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): after_this_request(_commit) change_user_password(current_user._get_current_object(), form.new_password.data) if not _security._want_json(request): do_flash(*get_message("PASSWORD_CHANGE")) return redirect( get_url(_security.post_change_view) or get_url(_security.post_login_view) ) if _security._want_json(request): form.user = current_user return base_render_json(form, include_auth_token=True) return _security.render_template( config_value("CHANGE_PASSWORD_TEMPLATE"), change_password_form=form, **_ctx("change_password") ) @unauth_csrf(fall_through=True) def two_factor_setup(): """View function for two-factor setup. This is used both for GET to fetch forms and POST to actually set configuration (and send token). There are 3 cases for setting up: 1) initial login and application requires 2FA 2) changing existing 2FA information 3) user wanting to enable or disable 2FA (assuming application doesn't require it) In order to CHANGE/ENABLE/DISABLE a 2FA information, user must be properly logged in AND must perform a fresh password validation by calling POST /tf-confirm (which sets 'tf_confirmed' in the session). For initial login when 2FA required of course user can't be logged in - in this case we need to have been sent some state via the session as part of login to show a) who and b) that they successfully authenticated. """ form_class = _security.two_factor_setup_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if not current_user.is_authenticated: # This is the initial login case # We can also get here from setup if they want to change if not all(k in session for k in ["tf_user_id", "tf_state"]) or session[ "tf_state" ] not in ["setup_from_login", "validating_profile"]: # illegal call on this endpoint tf_clean_session() return _tf_illegal_state(form, _security.login_url) user = _datastore.get_user(session["tf_user_id"]) if not user: tf_clean_session() return _tf_illegal_state(form, _security.login_url) else: # all other cases require user to be logged in and have performed # additional password verification as signified by 'tf_confirmed' # in the session. if "tf_confirmed" not in session: tf_clean_session() return _tf_illegal_state(form, _security.two_factor_confirm_url) user = current_user if form.validate_on_submit(): # Before storing in DB and therefore requiring 2FA we need to # make sure it actually works. # Requiring 2FA is triggered by having BOTH tf_totp_secret and # tf_primary_method in the user record (or having the application # global config TWO_FACTOR_REQUIRED) # Until we correctly validate the 2FA - we don't set primary_method in # user model but use the session to store it. pm = form.setup.data if pm == "disable": tf_disable(user) after_this_request(_commit) do_flash(*get_message("TWO_FACTOR_DISABLED")) if not _security._want_json(request): return redirect(get_url(_security.post_login_view)) else: return base_render_json(form) # Regenerate the TOTP secret on every call of 2FA setup unless it is # within the same session and method (e.g. upon entering the phone number) if pm != session.get("tf_primary_method", None): session["tf_totp_secret"] = _security._totp_factory.generate_totp_secret() session["tf_primary_method"] = pm session["tf_state"] = "validating_profile" new_phone = form.phone.data if len(form.phone.data) > 0 else None if new_phone: user.tf_phone_number = new_phone _datastore.put(user) after_this_request(_commit) # This form is sort of bizarre - for SMS and authenticator # you select, then get more info, and submit again. # For authenticator of course, we don't actually send anything # and for SMS it is the second time around that we get the phone number if pm == "email" or (pm == "sms" and new_phone): msg = user.tf_send_security_token( method=pm, totp_secret=session["tf_totp_secret"], phone_number=getattr(user, "tf_phone_number", None), ) if msg: # send code didn't work form.setup.errors = list() form.setup.errors.append(msg) if _security._want_json(request): return base_render_json( form, include_user=False, error_status_code=500 ) code_form = _security.two_factor_verify_code_form() if not _security._want_json(request): return _security.render_template( config_value("TWO_FACTOR_SETUP_TEMPLATE"), two_factor_setup_form=form, two_factor_verify_code_form=code_form, choices=config_value("TWO_FACTOR_ENABLED_METHODS"), chosen_method=pm, **_ctx("tf_setup") ) # We get here on GET and POST with failed validation. # For things like phone number - we've already done one POST # that succeeded and now if failed - so retain the initial info if _security._want_json(request): return base_render_json(form, include_user=False) code_form = _security.two_factor_verify_code_form() choices = config_value("TWO_FACTOR_ENABLED_METHODS") if not config_value("TWO_FACTOR_REQUIRED"): choices.append("disable") return _security.render_template( config_value("TWO_FACTOR_SETUP_TEMPLATE"), two_factor_setup_form=form, two_factor_verify_code_form=code_form, choices=choices, chosen_method=form.setup.data, two_factor_required=config_value("TWO_FACTOR_REQUIRED"), **_ctx("tf_setup") ) @unauth_csrf(fall_through=True) def two_factor_token_validation(): """View function for two-factor token validation Two cases: 1) normal login case - everything setup correctly; normal 2FA validation In this case - user not logged in - but 'tf_state' == 'ready' or 'validating_profile' 2) validating after CHANGE/ENABLE 2FA. In this case user logged in/authenticated they must have 'tf_confirmed' set meaning they re-entered their passwd """ form_class = _security.two_factor_verify_code_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) changing = current_user.is_authenticated if not changing: # This is the normal login case if ( not all(k in session for k in ["tf_user_id", "tf_state"]) or session["tf_state"] not in ["ready", "validating_profile"] or ( session["tf_state"] == "validating_profile" and "tf_primary_method" not in session ) ): # illegal call on this endpoint tf_clean_session() return _tf_illegal_state(form, _security.login_url) user = _datastore.get_user(session["tf_user_id"]) form.user = user if not user: tf_clean_session() return _tf_illegal_state(form, _security.login_url) if session["tf_state"] == "ready": pm = user.tf_primary_method totp_secret = user.tf_totp_secret else: pm = session["tf_primary_method"] totp_secret = session["tf_totp_secret"] else: if ( not all( k in session for k in ["tf_confirmed", "tf_state", "tf_primary_method"] ) or session["tf_state"] != "validating_profile" ): tf_clean_session() # logout since this seems like attack-ish/logic error logout_user() return _tf_illegal_state(form, _security.login_url) pm = session["tf_primary_method"] totp_secret = session["tf_totp_secret"] form.user = current_user setattr(form, "primary_method", pm) setattr(form, "tf_totp_secret", totp_secret) if form.validate_on_submit(): # Success - log in user and clear all session variables completion_message = complete_two_factor_process( form.user, pm, totp_secret, changing, session.pop("tf_remember_login", None) ) after_this_request(_commit) if not _security._want_json(request): do_flash(*get_message(completion_message)) return redirect(get_post_login_redirect()) # GET or not successful POST if _security._want_json(request): return base_render_json(form) # if we were trying to validate a new method if changing: setup_form = _security.two_factor_setup_form() return _security.render_template( config_value("TWO_FACTOR_SETUP_TEMPLATE"), two_factor_setup_form=setup_form, two_factor_verify_code_form=form, choices=config_value("TWO_FACTOR_ENABLED_METHODS"), **_ctx("tf_setup") ) # if we were trying to validate an existing method else: rescue_form = _security.two_factor_rescue_form() return _security.render_template( config_value("TWO_FACTOR_VERIFY_CODE_TEMPLATE"), two_factor_rescue_form=rescue_form, two_factor_verify_code_form=form, problem=None, **_ctx("tf_token_validation") ) @anonymous_user_required @unauth_csrf(fall_through=True) def two_factor_rescue(): """ Function that handles a situation where user can't enter his two-factor validation code User must have already provided valid username/password. User must have already established 2FA """ form_class = _security.two_factor_rescue_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if ( not all(k in session for k in ["tf_user_id", "tf_state"]) or session["tf_state"] != "ready" ): tf_clean_session() return _tf_illegal_state(form, _security.login_url) user = _datastore.get_user(session["tf_user_id"]) form.user = user if not user: tf_clean_session() return _tf_illegal_state(form, _security.login_url) rproblem = "" if form.validate_on_submit(): problem = form.data["help_setup"] rproblem = problem # if the problem is that user can't access his device, w # e send him code through mail if problem == "lost_device": msg = form.user.tf_send_security_token( method="email", totp_secret=form.user.tf_totp_secret, phone_number=getattr(form.user, "tf_phone_number", None), ) if msg: rproblem = "" form.help_setup.errors.append(msg) if _security._want_json(request): return base_render_json( form, include_user=False, error_status_code=500 ) # send app provider a mail message regarding trouble elif problem == "no_mail_access": _security._send_mail( config_value("EMAIL_SUBJECT_TWO_FACTOR_RESCUE"), config_value("TWO_FACTOR_RESCUE_MAIL"), "two_factor_rescue", user=form.user, ) else: return "", 404 if _security._want_json(request): return base_render_json(form, include_user=False) code_form = _security.two_factor_verify_code_form() return _security.render_template( config_value("TWO_FACTOR_VERIFY_CODE_TEMPLATE"), two_factor_verify_code_form=code_form, two_factor_rescue_form=form, rescue_mail=config_value("TWO_FACTOR_RESCUE_MAIL"), problem=rproblem, **_ctx("tf_token_validation") ) @auth_required("basic", "session", "token") def two_factor_verify_password(): """View function which handles a password verification request.""" form_class = _security.two_factor_verify_password_form if request.is_json: form = form_class(MultiDict(request.get_json()), meta=suppress_form_csrf()) else: form = form_class(meta=suppress_form_csrf()) if form.validate_on_submit(): # form called verify_and_update_password() after_this_request(_commit) session["tf_confirmed"] = True m, c = get_message("TWO_FACTOR_PASSWORD_CONFIRMATION_DONE") if not _security._want_json(request): do_flash(m, c) return redirect(url_for_security("two_factor_setup")) else: return _security._render_json(json_error_response(m), 400, None, None) if _security._want_json(request): assert form.user == current_user # form.user = current_user return base_render_json(form) return _security.render_template( config_value("TWO_FACTOR_VERIFY_PASSWORD_TEMPLATE"), two_factor_verify_password_form=form, **_ctx("tf_verify_password") ) @unauth_csrf(fall_through=True) def two_factor_qrcode(): if current_user.is_authenticated: user = current_user else: if "tf_user_id" not in session: abort(404) user = _datastore.get_user(session["tf_user_id"]) if not user: # Seems like we should be careful here if user_id is gone. tf_clean_session() abort(404) if "authenticator" not in config_value("TWO_FACTOR_ENABLED_METHODS"): return abort(404) if ( "tf_primary_method" not in session or session["tf_primary_method"] != "authenticator" ): return abort(404) totp = user.tf_totp_secret if "tf_totp_secret" in session: totp = session["tf_totp_secret"] try: import pyqrcode # By convention, the URI should have the username that the user # logs in with. username = user.calc_username() url = pyqrcode.create( _security._totp_factory.get_totp_uri( username if username else "Unknown", totp ) ) except ImportError: # For TWO_FACTOR - this should have been checked at app init. raise from io import BytesIO stream = BytesIO() url.svg(stream, scale=3) return ( stream.getvalue(), 200, { "Content-Type": "image/svg+xml", "Cache-Control": "no-cache, no-store, must-revalidate", "Pragma": "no-cache", "Expires": "0", }, ) def _tf_illegal_state(form, redirect_to): m, c = get_message("TWO_FACTOR_PERMISSION_DENIED") if not _security._want_json(request): do_flash(m, c) return redirect(get_url(redirect_to)) else: return _security._render_json(json_error_response(m), 400, None, None) def create_blueprint(app, state, import_name, json_encoder=None): """Creates the security extension blueprint""" bp = Blueprint( state.blueprint_name, import_name, url_prefix=state.url_prefix, subdomain=state.subdomain, template_folder="templates", ) if json_encoder: bp.json_encoder = json_encoder if state.logout_methods is not None: bp.route(state.logout_url, methods=state.logout_methods, endpoint="logout")( logout ) if state.passwordless: bp.route(state.login_url, methods=["GET", "POST"], endpoint="login")(send_login) bp.route( state.login_url + slash_url_suffix(state.login_url, "<token>"), endpoint="token_login", )(token_login) elif config_value("US_SIGNIN_REPLACES_LOGIN", app=app): bp.route(state.login_url, methods=["GET", "POST"], endpoint="login")(us_signin) else: bp.route(state.login_url, methods=["GET", "POST"], endpoint="login")(login) bp.route(state.verify_url, methods=["GET", "POST"], endpoint="verify")(verify) if state.unified_signin: bp.route(state.us_signin_url, methods=["GET", "POST"], endpoint="us_signin")( us_signin ) bp.route( state.us_signin_send_code_url, methods=["GET", "POST"], endpoint="us_signin_send_code", )(us_signin_send_code) bp.route(state.us_setup_url, methods=["GET", "POST"], endpoint="us_setup")( us_setup ) bp.route( state.us_setup_url + slash_url_suffix(state.us_setup_url, "<token>"), methods=["GET", "POST"], endpoint="us_setup_validate", )(us_setup_validate) # Freshness verification if config_value("FRESHNESS", app=app).total_seconds() >= 0: bp.route( state.us_verify_url, methods=["GET", "POST"], endpoint="us_verify" )(us_verify) bp.route( state.us_verify_send_code_url, methods=["GET", "POST"], endpoint="us_verify_send_code", )(us_verify_send_code) bp.route(state.us_verify_link_url, methods=["GET"], endpoint="us_verify_link")( us_verify_link ) bp.route( state.us_qrcode_url + slash_url_suffix(state.us_setup_url, "<token>"), endpoint="us_qrcode", )(us_qrcode) if state.two_factor: tf_token_validation = "two_factor_token_validation" tf_qrcode = "two_factor_qrcode" bp.route( state.two_factor_setup_url, methods=["GET", "POST"], endpoint="two_factor_setup", )(two_factor_setup) bp.route( state.two_factor_token_validation_url, methods=["GET", "POST"], endpoint=tf_token_validation, )(two_factor_token_validation) bp.route(state.two_factor_qrcode_url, endpoint=tf_qrcode)(two_factor_qrcode) bp.route( state.two_factor_rescue_url, methods=["GET", "POST"], endpoint="two_factor_rescue", )(two_factor_rescue) bp.route( state.two_factor_confirm_url, methods=["GET", "POST"], endpoint="two_factor_verify_password", )(two_factor_verify_password) if state.registerable: bp.route(state.register_url, methods=["GET", "POST"], endpoint="register")( register ) if state.recoverable: bp.route(state.reset_url, methods=["GET", "POST"], endpoint="forgot_password")( forgot_password ) bp.route( state.reset_url + slash_url_suffix(state.reset_url, "<token>"), methods=["GET", "POST"], endpoint="reset_password", )(reset_password) if state.changeable: bp.route(state.change_url, methods=["GET", "POST"], endpoint="change_password")( change_password ) if state.confirmable: bp.route( state.confirm_url, methods=["GET", "POST"], endpoint="send_confirmation" )(send_confirmation) bp.route( state.confirm_url + slash_url_suffix(state.confirm_url, "<token>"), methods=["GET", "POST"], endpoint="confirm_email", )(confirm_email) return bp
./CrossVul/dataset_final_sorted/CWE-352/py/bad_1892_0
crossvul-python_data_bad_40_0
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2016-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """Backend-independent qute://* code. Module attributes: pyeval_output: The output of the last :pyeval command. _HANDLERS: The handlers registered via decorators. """ import json import os import time import textwrap import mimetypes import urllib import collections import pkg_resources import sip from PyQt5.QtCore import QUrlQuery, QUrl import qutebrowser from qutebrowser.config import config, configdata, configexc, configdiff from qutebrowser.utils import (version, utils, jinja, log, message, docutils, objreg, urlutils) from qutebrowser.misc import objects pyeval_output = ":pyeval was never called" spawn_output = ":spawn was never called" _HANDLERS = {} class NoHandlerFound(Exception): """Raised when no handler was found for the given URL.""" pass class QuteSchemeOSError(Exception): """Called when there was an OSError inside a handler.""" pass class QuteSchemeError(Exception): """Exception to signal that a handler should return an ErrorReply. Attributes correspond to the arguments in networkreply.ErrorNetworkReply. Attributes: errorstring: Error string to print. error: Numerical error value. """ def __init__(self, errorstring, error): self.errorstring = errorstring self.error = error super().__init__(errorstring) class Redirect(Exception): """Exception to signal a redirect should happen. Attributes: url: The URL to redirect to, as a QUrl. """ def __init__(self, url): super().__init__(url.toDisplayString()) self.url = url class add_handler: # noqa: N801,N806 pylint: disable=invalid-name """Decorator to register a qute://* URL handler. Attributes: _name: The 'foo' part of qute://foo backend: Limit which backends the handler can run with. """ def __init__(self, name, backend=None): self._name = name self._backend = backend self._function = None def __call__(self, function): self._function = function _HANDLERS[self._name] = self.wrapper return function def wrapper(self, *args, **kwargs): """Call the underlying function.""" if self._backend is not None and objects.backend != self._backend: return self.wrong_backend_handler(*args, **kwargs) else: return self._function(*args, **kwargs) def wrong_backend_handler(self, url): """Show an error page about using the invalid backend.""" html = jinja.render('error.html', title="Error while opening qute://url", url=url.toDisplayString(), error='{} is not available with this ' 'backend'.format(url.toDisplayString())) return 'text/html', html def data_for_url(url): """Get the data to show for the given URL. Args: url: The QUrl to show. Return: A (mimetype, data) tuple. """ norm_url = url.adjusted(QUrl.NormalizePathSegments | QUrl.StripTrailingSlash) if norm_url != url: raise Redirect(norm_url) path = url.path() host = url.host() query = urlutils.query_string(url) # A url like "qute:foo" is split as "scheme:path", not "scheme:host". log.misc.debug("url: {}, path: {}, host {}".format( url.toDisplayString(), path, host)) if not path or not host: new_url = QUrl() new_url.setScheme('qute') # When path is absent, e.g. qute://help (with no trailing slash) if host: new_url.setHost(host) # When host is absent, e.g. qute:help else: new_url.setHost(path) new_url.setPath('/') if query: new_url.setQuery(query) if new_url.host(): # path was a valid host raise Redirect(new_url) try: handler = _HANDLERS[host] except KeyError: raise NoHandlerFound(url) try: mimetype, data = handler(url) except OSError as e: # FIXME:qtwebengine how to handle this? raise QuteSchemeOSError(e) except QuteSchemeError as e: raise assert mimetype is not None, url if mimetype == 'text/html' and isinstance(data, str): # We let handlers return HTML as text data = data.encode('utf-8', errors='xmlcharrefreplace') return mimetype, data @add_handler('bookmarks') def qute_bookmarks(_url): """Handler for qute://bookmarks. Display all quickmarks / bookmarks.""" bookmarks = sorted(objreg.get('bookmark-manager').marks.items(), key=lambda x: x[1]) # Sort by title quickmarks = sorted(objreg.get('quickmark-manager').marks.items(), key=lambda x: x[0]) # Sort by name html = jinja.render('bookmarks.html', title='Bookmarks', bookmarks=bookmarks, quickmarks=quickmarks) return 'text/html', html @add_handler('tabs') def qute_tabs(_url): """Handler for qute://tabs. Display information about all open tabs.""" tabs = collections.defaultdict(list) for win_id, window in objreg.window_registry.items(): if sip.isdeleted(window): continue tabbed_browser = objreg.get('tabbed-browser', scope='window', window=win_id) for tab in tabbed_browser.widgets(): if tab.url() not in [QUrl("qute://tabs/"), QUrl("qute://tabs")]: urlstr = tab.url().toDisplayString() tabs[str(win_id)].append((tab.title(), urlstr)) html = jinja.render('tabs.html', title='Tabs', tab_list_by_window=tabs) return 'text/html', html def history_data(start_time, offset=None): """Return history data. Arguments: start_time: select history starting from this timestamp. offset: number of items to skip """ # history atimes are stored as ints, ensure start_time is not a float start_time = int(start_time) hist = objreg.get('web-history') if offset is not None: entries = hist.entries_before(start_time, limit=1000, offset=offset) else: # end is 24hrs earlier than start end_time = start_time - 24*60*60 entries = hist.entries_between(end_time, start_time) return [{"url": e.url, "title": e.title or e.url, "time": e.atime} for e in entries] @add_handler('history') def qute_history(url): """Handler for qute://history. Display and serve history.""" if url.path() == '/data': try: offset = QUrlQuery(url).queryItemValue("offset") offset = int(offset) if offset else None except ValueError as e: raise QuteSchemeError("Query parameter offset is invalid", e) # Use start_time in query or current time. try: start_time = QUrlQuery(url).queryItemValue("start_time") start_time = float(start_time) if start_time else time.time() except ValueError as e: raise QuteSchemeError("Query parameter start_time is invalid", e) return 'text/html', json.dumps(history_data(start_time, offset)) else: return 'text/html', jinja.render( 'history.html', title='History', gap_interval=config.val.history_gap_interval ) @add_handler('javascript') def qute_javascript(url): """Handler for qute://javascript. Return content of file given as query parameter. """ path = url.path() if path: path = "javascript" + os.sep.join(path.split('/')) return 'text/html', utils.read_file(path, binary=False) else: raise QuteSchemeError("No file specified", ValueError()) @add_handler('pyeval') def qute_pyeval(_url): """Handler for qute://pyeval.""" html = jinja.render('pre.html', title='pyeval', content=pyeval_output) return 'text/html', html @add_handler('spawn-output') def qute_spawn_output(_url): """Handler for qute://spawn-output.""" html = jinja.render('pre.html', title='spawn output', content=spawn_output) return 'text/html', html @add_handler('version') @add_handler('verizon') def qute_version(_url): """Handler for qute://version.""" html = jinja.render('version.html', title='Version info', version=version.version(), copyright=qutebrowser.__copyright__) return 'text/html', html @add_handler('plainlog') def qute_plainlog(url): """Handler for qute://plainlog. An optional query parameter specifies the minimum log level to print. For example, qute://log?level=warning prints warnings and errors. Level can be one of: vdebug, debug, info, warning, error, critical. """ if log.ram_handler is None: text = "Log output was disabled." else: level = QUrlQuery(url).queryItemValue('level') if not level: level = 'vdebug' text = log.ram_handler.dump_log(html=False, level=level) html = jinja.render('pre.html', title='log', content=text) return 'text/html', html @add_handler('log') def qute_log(url): """Handler for qute://log. An optional query parameter specifies the minimum log level to print. For example, qute://log?level=warning prints warnings and errors. Level can be one of: vdebug, debug, info, warning, error, critical. """ if log.ram_handler is None: html_log = None else: level = QUrlQuery(url).queryItemValue('level') if not level: level = 'vdebug' html_log = log.ram_handler.dump_log(html=True, level=level) html = jinja.render('log.html', title='log', content=html_log) return 'text/html', html @add_handler('gpl') def qute_gpl(_url): """Handler for qute://gpl. Return HTML content as string.""" return 'text/html', utils.read_file('html/license.html') @add_handler('help') def qute_help(url): """Handler for qute://help.""" urlpath = url.path() if not urlpath or urlpath == '/': urlpath = 'index.html' else: urlpath = urlpath.lstrip('/') if not docutils.docs_up_to_date(urlpath): message.error("Your documentation is outdated! Please re-run " "scripts/asciidoc2html.py.") path = 'html/doc/{}'.format(urlpath) if not urlpath.endswith('.html'): try: bdata = utils.read_file(path, binary=True) except OSError as e: raise QuteSchemeOSError(e) mimetype, _encoding = mimetypes.guess_type(urlpath) assert mimetype is not None, url return mimetype, bdata try: data = utils.read_file(path) except OSError: # No .html around, let's see if we find the asciidoc asciidoc_path = path.replace('.html', '.asciidoc') if asciidoc_path.startswith('html/doc/'): asciidoc_path = asciidoc_path.replace('html/doc/', '../doc/help/') try: asciidoc = utils.read_file(asciidoc_path) except OSError: asciidoc = None if asciidoc is None: raise preamble = textwrap.dedent(""" There was an error loading the documentation! This most likely means the documentation was not generated properly. If you are running qutebrowser from the git repository, please (re)run scripts/asciidoc2html.py and reload this page. If you're running a released version this is a bug, please use :report to report it. Falling back to the plaintext version. --------------------------------------------------------------- """) return 'text/plain', (preamble + asciidoc).encode('utf-8') else: return 'text/html', data @add_handler('backend-warning') def qute_backend_warning(_url): """Handler for qute://backend-warning.""" html = jinja.render('backend-warning.html', distribution=version.distribution(), Distribution=version.Distribution, version=pkg_resources.parse_version, title="Legacy backend warning") return 'text/html', html def _qute_settings_set(url): """Handler for qute://settings/set.""" query = QUrlQuery(url) option = query.queryItemValue('option', QUrl.FullyDecoded) value = query.queryItemValue('value', QUrl.FullyDecoded) # https://github.com/qutebrowser/qutebrowser/issues/727 if option == 'content.javascript.enabled' and value == 'false': msg = ("Refusing to disable javascript via qute://settings " "as it needs javascript support.") message.error(msg) return 'text/html', b'error: ' + msg.encode('utf-8') try: config.instance.set_str(option, value, save_yaml=True) return 'text/html', b'ok' except configexc.Error as e: message.error(str(e)) return 'text/html', b'error: ' + str(e).encode('utf-8') @add_handler('settings') def qute_settings(url): """Handler for qute://settings. View/change qute configuration.""" if url.path() == '/set': return _qute_settings_set(url) html = jinja.render('settings.html', title='settings', configdata=configdata, confget=config.instance.get_str) return 'text/html', html @add_handler('bindings') def qute_bindings(_url): """Handler for qute://bindings. View keybindings.""" bindings = {} defaults = config.val.bindings.default modes = set(defaults.keys()).union(config.val.bindings.commands) modes.remove('normal') modes = ['normal'] + sorted(list(modes)) for mode in modes: bindings[mode] = config.key_instance.get_bindings_for(mode) html = jinja.render('bindings.html', title='Bindings', bindings=bindings) return 'text/html', html @add_handler('back') def qute_back(url): """Handler for qute://back. Simple page to free ram / lazy load a site, goes back on focusing the tab. """ html = jinja.render( 'back.html', title='Suspended: ' + urllib.parse.unquote(url.fragment())) return 'text/html', html @add_handler('configdiff') def qute_configdiff(url): """Handler for qute://configdiff.""" if url.path() == '/old': try: return 'text/html', configdiff.get_diff() except OSError as e: error = (b'Failed to read old config: ' + str(e.strerror).encode('utf-8')) return 'text/plain', error else: data = config.instance.dump_userconfig().encode('utf-8') return 'text/plain', data @add_handler('pastebin-version') def qute_pastebin_version(_url): """Handler that pastebins the version string.""" version.pastebin_version() return 'text/plain', b'Paste called.'
./CrossVul/dataset_final_sorted/CWE-79/py/bad_40_0
crossvul-python_data_good_1644_3
"""Tornado handlers for the contents web service.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import json from tornado import gen, web from IPython.html.utils import url_path_join, url_escape from IPython.utils.jsonutil import date_default from IPython.html.base.handlers import ( IPythonHandler, APIHandler, json_errors, path_regex, ) def sort_key(model): """key function for case-insensitive sort by name and type""" iname = model['name'].lower() type_key = { 'directory' : '0', 'notebook' : '1', 'file' : '2', }.get(model['type'], '9') return u'%s%s' % (type_key, iname) def validate_model(model, expect_content): """ Validate a model returned by a ContentsManager method. If expect_content is True, then we expect non-null entries for 'content' and 'format'. """ required_keys = { "name", "path", "type", "writable", "created", "last_modified", "mimetype", "content", "format", } missing = required_keys - set(model.keys()) if missing: raise web.HTTPError( 500, u"Missing Model Keys: {missing}".format(missing=missing), ) maybe_none_keys = ['content', 'format'] if model['type'] == 'file': # mimetype should be populated only for file models maybe_none_keys.append('mimetype') if expect_content: errors = [key for key in maybe_none_keys if model[key] is None] if errors: raise web.HTTPError( 500, u"Keys unexpectedly None: {keys}".format(keys=errors), ) else: errors = { key: model[key] for key in maybe_none_keys if model[key] is not None } if errors: raise web.HTTPError( 500, u"Keys unexpectedly not None: {keys}".format(keys=errors), ) class ContentsHandler(APIHandler): SUPPORTED_METHODS = (u'GET', u'PUT', u'PATCH', u'POST', u'DELETE') def location_url(self, path): """Return the full URL location of a file. Parameters ---------- path : unicode The API path of the file, such as "foo/bar.txt". """ return url_escape(url_path_join( self.base_url, 'api', 'contents', path )) def _finish_model(self, model, location=True): """Finish a JSON request with a model, setting relevant headers, etc.""" if location: location = self.location_url(model['path']) self.set_header('Location', location) self.set_header('Last-Modified', model['last_modified']) self.set_header('Content-Type', 'application/json') self.finish(json.dumps(model, default=date_default)) @web.authenticated @json_errors @gen.coroutine def get(self, path=''): """Return a model for a file or directory. A directory model contains a list of models (without content) of the files and directories it contains. """ path = path or '' type = self.get_query_argument('type', default=None) if type not in {None, 'directory', 'file', 'notebook'}: raise web.HTTPError(400, u'Type %r is invalid' % type) format = self.get_query_argument('format', default=None) if format not in {None, 'text', 'base64'}: raise web.HTTPError(400, u'Format %r is invalid' % format) content = self.get_query_argument('content', default='1') if content not in {'0', '1'}: raise web.HTTPError(400, u'Content %r is invalid' % content) content = int(content) model = yield gen.maybe_future(self.contents_manager.get( path=path, type=type, format=format, content=content, )) if model['type'] == 'directory' and content: # group listing by type, then by name (case-insensitive) # FIXME: sorting should be done in the frontends model['content'].sort(key=sort_key) validate_model(model, expect_content=content) self._finish_model(model, location=False) @web.authenticated @json_errors @gen.coroutine def patch(self, path=''): """PATCH renames a file or directory without re-uploading content.""" cm = self.contents_manager model = self.get_json_body() if model is None: raise web.HTTPError(400, u'JSON body missing') model = yield gen.maybe_future(cm.update(model, path)) validate_model(model, expect_content=False) self._finish_model(model) @gen.coroutine def _copy(self, copy_from, copy_to=None): """Copy a file, optionally specifying a target directory.""" self.log.info(u"Copying {copy_from} to {copy_to}".format( copy_from=copy_from, copy_to=copy_to or '', )) model = yield gen.maybe_future(self.contents_manager.copy(copy_from, copy_to)) self.set_status(201) validate_model(model, expect_content=False) self._finish_model(model) @gen.coroutine def _upload(self, model, path): """Handle upload of a new file to path""" self.log.info(u"Uploading file to %s", path) model = yield gen.maybe_future(self.contents_manager.new(model, path)) self.set_status(201) validate_model(model, expect_content=False) self._finish_model(model) @gen.coroutine def _new_untitled(self, path, type='', ext=''): """Create a new, empty untitled entity""" self.log.info(u"Creating new %s in %s", type or 'file', path) model = yield gen.maybe_future(self.contents_manager.new_untitled(path=path, type=type, ext=ext)) self.set_status(201) validate_model(model, expect_content=False) self._finish_model(model) @gen.coroutine def _save(self, model, path): """Save an existing file.""" self.log.info(u"Saving file at %s", path) model = yield gen.maybe_future(self.contents_manager.save(model, path)) validate_model(model, expect_content=False) self._finish_model(model) @web.authenticated @json_errors @gen.coroutine def post(self, path=''): """Create a new file in the specified path. POST creates new files. The server always decides on the name. POST /api/contents/path New untitled, empty file or directory. POST /api/contents/path with body {"copy_from" : "/path/to/OtherNotebook.ipynb"} New copy of OtherNotebook in path """ cm = self.contents_manager if cm.file_exists(path): raise web.HTTPError(400, "Cannot POST to files, use PUT instead.") if not cm.dir_exists(path): raise web.HTTPError(404, "No such directory: %s" % path) model = self.get_json_body() if model is not None: copy_from = model.get('copy_from') ext = model.get('ext', '') type = model.get('type', '') if copy_from: yield self._copy(copy_from, path) else: yield self._new_untitled(path, type=type, ext=ext) else: yield self._new_untitled(path) @web.authenticated @json_errors @gen.coroutine def put(self, path=''): """Saves the file in the location specified by name and path. PUT is very similar to POST, but the requester specifies the name, whereas with POST, the server picks the name. PUT /api/contents/path/Name.ipynb Save notebook at ``path/Name.ipynb``. Notebook structure is specified in `content` key of JSON request body. If content is not specified, create a new empty notebook. """ model = self.get_json_body() if model: if model.get('copy_from'): raise web.HTTPError(400, "Cannot copy with PUT, only POST") exists = yield gen.maybe_future(self.contents_manager.file_exists(path)) if exists: yield gen.maybe_future(self._save(model, path)) else: yield gen.maybe_future(self._upload(model, path)) else: yield gen.maybe_future(self._new_untitled(path)) @web.authenticated @json_errors @gen.coroutine def delete(self, path=''): """delete a file in the given path""" cm = self.contents_manager self.log.warn('delete %s', path) yield gen.maybe_future(cm.delete(path)) self.set_status(204) self.finish() class CheckpointsHandler(APIHandler): SUPPORTED_METHODS = ('GET', 'POST') @web.authenticated @json_errors @gen.coroutine def get(self, path=''): """get lists checkpoints for a file""" cm = self.contents_manager checkpoints = yield gen.maybe_future(cm.list_checkpoints(path)) data = json.dumps(checkpoints, default=date_default) self.finish(data) @web.authenticated @json_errors @gen.coroutine def post(self, path=''): """post creates a new checkpoint""" cm = self.contents_manager checkpoint = yield gen.maybe_future(cm.create_checkpoint(path)) data = json.dumps(checkpoint, default=date_default) location = url_path_join(self.base_url, 'api/contents', path, 'checkpoints', checkpoint['id']) self.set_header('Location', url_escape(location)) self.set_status(201) self.finish(data) class ModifyCheckpointsHandler(APIHandler): SUPPORTED_METHODS = ('POST', 'DELETE') @web.authenticated @json_errors @gen.coroutine def post(self, path, checkpoint_id): """post restores a file from a checkpoint""" cm = self.contents_manager yield gen.maybe_future(cm.restore_checkpoint(checkpoint_id, path)) self.set_status(204) self.finish() @web.authenticated @json_errors @gen.coroutine def delete(self, path, checkpoint_id): """delete clears a checkpoint for a given file""" cm = self.contents_manager yield gen.maybe_future(cm.delete_checkpoint(checkpoint_id, path)) self.set_status(204) self.finish() class NotebooksRedirectHandler(IPythonHandler): """Redirect /api/notebooks to /api/contents""" SUPPORTED_METHODS = ('GET', 'PUT', 'PATCH', 'POST', 'DELETE') def get(self, path): self.log.warn("/api/notebooks is deprecated, use /api/contents") self.redirect(url_path_join( self.base_url, 'api/contents', path )) put = patch = post = delete = get #----------------------------------------------------------------------------- # URL to handler mappings #----------------------------------------------------------------------------- _checkpoint_id_regex = r"(?P<checkpoint_id>[\w-]+)" default_handlers = [ (r"/api/contents%s/checkpoints" % path_regex, CheckpointsHandler), (r"/api/contents%s/checkpoints/%s" % (path_regex, _checkpoint_id_regex), ModifyCheckpointsHandler), (r"/api/contents%s" % path_regex, ContentsHandler), (r"/api/notebooks/?(.*)", NotebooksRedirectHandler), ]
./CrossVul/dataset_final_sorted/CWE-79/py/good_1644_3
crossvul-python_data_bad_1729_0
# coding: utf-8 """A tornado based IPython notebook server.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import absolute_import, print_function import base64 import datetime import errno import importlib import io import json import logging import os import random import re import select import signal import socket import ssl import sys import threading import webbrowser from jinja2 import Environment, FileSystemLoader # Install the pyzmq ioloop. This has to be done before anything else from # tornado is imported. from zmq.eventloop import ioloop ioloop.install() # check for tornado 3.1.0 msg = "The IPython Notebook requires tornado >= 4.0" try: import tornado except ImportError: raise ImportError(msg) try: version_info = tornado.version_info except AttributeError: raise ImportError(msg + ", but you have < 1.1.0") if version_info < (4,0): raise ImportError(msg + ", but you have %s" % tornado.version) from tornado import httpserver from tornado import web from tornado.log import LogFormatter, app_log, access_log, gen_log from notebook import ( DEFAULT_STATIC_FILES_PATH, DEFAULT_TEMPLATE_PATH_LIST, __version__, ) from .base.handlers import Template404 from .log import log_request from .services.kernels.kernelmanager import MappingKernelManager from .services.config import ConfigManager from .services.contents.manager import ContentsManager from .services.contents.filemanager import FileContentsManager from .services.sessions.sessionmanager import SessionManager from .auth.login import LoginHandler from .auth.logout import LogoutHandler from .base.handlers import FileFindHandler, IPythonHandler from traitlets.config import Config from traitlets.config.application import catch_config_error, boolean_flag from jupyter_core.application import ( JupyterApp, base_flags, base_aliases, ) from jupyter_client import KernelManager from jupyter_client.kernelspec import KernelSpecManager, NoSuchKernel, NATIVE_KERNEL_NAME from jupyter_client.session import Session from nbformat.sign import NotebookNotary from traitlets import ( Dict, Unicode, Integer, List, Bool, Bytes, Instance, TraitError, Type, ) from ipython_genutils import py3compat from IPython.paths import get_ipython_dir from jupyter_core.paths import jupyter_runtime_dir, jupyter_path from notebook._sysinfo import get_sys_info from .utils import url_path_join, check_pid #----------------------------------------------------------------------------- # Module globals #----------------------------------------------------------------------------- _examples = """ ipython notebook # start the notebook ipython notebook --profile=sympy # use the sympy profile ipython notebook --certfile=mycert.pem # use SSL/TLS certificate """ #----------------------------------------------------------------------------- # Helper functions #----------------------------------------------------------------------------- def random_ports(port, n): """Generate a list of n random ports near the given port. The first 5 ports will be sequential, and the remaining n-5 will be randomly selected in the range [port-2*n, port+2*n]. """ for i in range(min(5, n)): yield port + i for i in range(n-5): yield max(1, port + random.randint(-2*n, 2*n)) def load_handlers(name): """Load the (URL pattern, handler) tuples for each component.""" name = 'notebook.' + name mod = __import__(name, fromlist=['default_handlers']) return mod.default_handlers class DeprecationHandler(IPythonHandler): def get(self, url_path): self.set_header("Content-Type", 'text/javascript') self.finish(""" console.warn('`/static/widgets/js` is deprecated. Use `/nbextensions/widgets/widgets/js` instead.'); define(['%s'], function(x) { return x; }); """ % url_path_join('nbextensions', 'widgets', 'widgets', url_path.rstrip('.js'))) self.log.warn('Deprecated widget Javascript path /static/widgets/js/*.js was used') #----------------------------------------------------------------------------- # The Tornado web application #----------------------------------------------------------------------------- class NotebookWebApplication(web.Application): def __init__(self, ipython_app, kernel_manager, contents_manager, session_manager, kernel_spec_manager, config_manager, log, base_url, default_url, settings_overrides, jinja_env_options): settings = self.init_settings( ipython_app, kernel_manager, contents_manager, session_manager, kernel_spec_manager, config_manager, log, base_url, default_url, settings_overrides, jinja_env_options) handlers = self.init_handlers(settings) super(NotebookWebApplication, self).__init__(handlers, **settings) def init_settings(self, ipython_app, kernel_manager, contents_manager, session_manager, kernel_spec_manager, config_manager, log, base_url, default_url, settings_overrides, jinja_env_options=None): _template_path = settings_overrides.get( "template_path", ipython_app.template_file_path, ) if isinstance(_template_path, py3compat.string_types): _template_path = (_template_path,) template_path = [os.path.expanduser(path) for path in _template_path] jenv_opt = jinja_env_options if jinja_env_options else {} env = Environment(loader=FileSystemLoader(template_path), **jenv_opt) sys_info = get_sys_info() if sys_info['commit_source'] == 'repository': # don't cache (rely on 304) when working from master version_hash = '' else: # reset the cache on server restart version_hash = datetime.datetime.now().strftime("%Y%m%d%H%M%S") settings = dict( # basics log_function=log_request, base_url=base_url, default_url=default_url, template_path=template_path, static_path=ipython_app.static_file_path, static_custom_path=ipython_app.static_custom_path, static_handler_class = FileFindHandler, static_url_prefix = url_path_join(base_url,'/static/'), static_handler_args = { # don't cache custom.js 'no_cache_paths': [url_path_join(base_url, 'static', 'custom')], }, version_hash=version_hash, # authentication cookie_secret=ipython_app.cookie_secret, login_url=url_path_join(base_url,'/login'), login_handler_class=ipython_app.login_handler_class, logout_handler_class=ipython_app.logout_handler_class, password=ipython_app.password, # managers kernel_manager=kernel_manager, contents_manager=contents_manager, session_manager=session_manager, kernel_spec_manager=kernel_spec_manager, config_manager=config_manager, # IPython stuff jinja_template_vars=ipython_app.jinja_template_vars, nbextensions_path=ipython_app.nbextensions_path, websocket_url=ipython_app.websocket_url, mathjax_url=ipython_app.mathjax_url, config=ipython_app.config, config_dir=ipython_app.config_dir, jinja2_env=env, terminals_available=False, # Set later if terminals are available ) # allow custom overrides for the tornado web app. settings.update(settings_overrides) return settings def init_handlers(self, settings): """Load the (URL pattern, handler) tuples for each component.""" # Order matters. The first handler to match the URL will handle the request. handlers = [] handlers.append((r'/deprecatedwidgets/(.*)', DeprecationHandler)) handlers.extend(load_handlers('tree.handlers')) handlers.extend([(r"/login", settings['login_handler_class'])]) handlers.extend([(r"/logout", settings['logout_handler_class'])]) handlers.extend(load_handlers('files.handlers')) handlers.extend(load_handlers('notebook.handlers')) handlers.extend(load_handlers('nbconvert.handlers')) handlers.extend(load_handlers('kernelspecs.handlers')) handlers.extend(load_handlers('edit.handlers')) handlers.extend(load_handlers('services.api.handlers')) handlers.extend(load_handlers('services.config.handlers')) handlers.extend(load_handlers('services.kernels.handlers')) handlers.extend(load_handlers('services.contents.handlers')) handlers.extend(load_handlers('services.sessions.handlers')) handlers.extend(load_handlers('services.nbconvert.handlers')) handlers.extend(load_handlers('services.kernelspecs.handlers')) handlers.extend(load_handlers('services.security.handlers')) # BEGIN HARDCODED WIDGETS HACK try: import ipywidgets handlers.append( (r"/nbextensions/widgets/(.*)", FileFindHandler, { 'path': ipywidgets.find_static_assets(), 'no_cache_paths': ['/'], # don't cache anything in nbextensions }), ) except: app_log.warn('ipywidgets package not installed. Widgets are unavailable.') # END HARDCODED WIDGETS HACK handlers.append( (r"/nbextensions/(.*)", FileFindHandler, { 'path': settings['nbextensions_path'], 'no_cache_paths': ['/'], # don't cache anything in nbextensions }), ) handlers.append( (r"/custom/(.*)", FileFindHandler, { 'path': settings['static_custom_path'], 'no_cache_paths': ['/'], # don't cache anything in custom }) ) # register base handlers last handlers.extend(load_handlers('base.handlers')) # set the URL that will be redirected from `/` handlers.append( (r'/?', web.RedirectHandler, { 'url' : settings['default_url'], 'permanent': False, # want 302, not 301 }) ) # prepend base_url onto the patterns that we match new_handlers = [] for handler in handlers: pattern = url_path_join(settings['base_url'], handler[0]) new_handler = tuple([pattern] + list(handler[1:])) new_handlers.append(new_handler) # add 404 on the end, which will catch everything that falls through new_handlers.append((r'(.*)', Template404)) return new_handlers class NbserverListApp(JupyterApp): version = __version__ description="List currently running notebook servers in this profile." flags = dict( json=({'NbserverListApp': {'json': True}}, "Produce machine-readable JSON output."), ) json = Bool(False, config=True, help="If True, each line of output will be a JSON object with the " "details from the server info file.") def start(self): if not self.json: print("Currently running servers:") for serverinfo in list_running_servers(self.runtime_dir): if self.json: print(json.dumps(serverinfo)) else: print(serverinfo['url'], "::", serverinfo['notebook_dir']) #----------------------------------------------------------------------------- # Aliases and Flags #----------------------------------------------------------------------------- flags = dict(base_flags) flags['no-browser']=( {'NotebookApp' : {'open_browser' : False}}, "Don't open the notebook in a browser after startup." ) flags['pylab']=( {'NotebookApp' : {'pylab' : 'warn'}}, "DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib." ) flags['no-mathjax']=( {'NotebookApp' : {'enable_mathjax' : False}}, """Disable MathJax MathJax is the javascript library IPython uses to render math/LaTeX. It is very large, so you may want to disable it if you have a slow internet connection, or for offline use of the notebook. When disabled, equations etc. will appear as their untransformed TeX source. """ ) # Add notebook manager flags flags.update(boolean_flag('script', 'FileContentsManager.save_script', 'DEPRECATED, IGNORED', 'DEPRECATED, IGNORED')) aliases = dict(base_aliases) aliases.update({ 'ip': 'NotebookApp.ip', 'port': 'NotebookApp.port', 'port-retries': 'NotebookApp.port_retries', 'transport': 'KernelManager.transport', 'keyfile': 'NotebookApp.keyfile', 'certfile': 'NotebookApp.certfile', 'notebook-dir': 'NotebookApp.notebook_dir', 'browser': 'NotebookApp.browser', 'pylab': 'NotebookApp.pylab', }) #----------------------------------------------------------------------------- # NotebookApp #----------------------------------------------------------------------------- class NotebookApp(JupyterApp): name = 'jupyter-notebook' version = __version__ description = """ The Jupyter HTML Notebook. This launches a Tornado based HTML Notebook Server that serves up an HTML5/Javascript Notebook client. """ examples = _examples aliases = aliases flags = flags classes = [ KernelManager, Session, MappingKernelManager, ContentsManager, FileContentsManager, NotebookNotary, KernelSpecManager, ] flags = Dict(flags) aliases = Dict(aliases) subcommands = dict( list=(NbserverListApp, NbserverListApp.description.splitlines()[0]), ) _log_formatter_cls = LogFormatter def _log_level_default(self): return logging.INFO def _log_datefmt_default(self): """Exclude date from default date format""" return "%H:%M:%S" def _log_format_default(self): """override default log format to include time""" return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" # create requested profiles by default, if they don't exist: auto_create = Bool(True) # file to be opened in the notebook server file_to_run = Unicode('', config=True) # Network related information allow_origin = Unicode('', config=True, help="""Set the Access-Control-Allow-Origin header Use '*' to allow any origin to access your server. Takes precedence over allow_origin_pat. """ ) allow_origin_pat = Unicode('', config=True, help="""Use a regular expression for the Access-Control-Allow-Origin header Requests from an origin matching the expression will get replies with: Access-Control-Allow-Origin: origin where `origin` is the origin of the request. Ignored if allow_origin is set. """ ) allow_credentials = Bool(False, config=True, help="Set the Access-Control-Allow-Credentials: true header" ) default_url = Unicode('/tree', config=True, help="The default URL to redirect to from `/`" ) ip = Unicode('localhost', config=True, help="The IP address the notebook server will listen on." ) def _ip_default(self): """Return localhost if available, 127.0.0.1 otherwise. On some (horribly broken) systems, localhost cannot be bound. """ s = socket.socket() try: s.bind(('localhost', 0)) except socket.error as e: self.log.warn("Cannot bind to localhost, using 127.0.0.1 as default ip\n%s", e) return '127.0.0.1' else: s.close() return 'localhost' def _ip_changed(self, name, old, new): if new == u'*': self.ip = u'' port = Integer(8888, config=True, help="The port the notebook server will listen on." ) port_retries = Integer(50, config=True, help="The number of additional ports to try if the specified port is not available." ) certfile = Unicode(u'', config=True, help="""The full path to an SSL/TLS certificate file.""" ) keyfile = Unicode(u'', config=True, help="""The full path to a private key file for usage with SSL/TLS.""" ) cookie_secret_file = Unicode(config=True, help="""The file where the cookie secret is stored.""" ) def _cookie_secret_file_default(self): return os.path.join(self.runtime_dir, 'notebook_cookie_secret') cookie_secret = Bytes(b'', config=True, help="""The random bytes used to secure cookies. By default this is a new random number every time you start the Notebook. Set it to a value in a config file to enable logins to persist across server sessions. Note: Cookie secrets should be kept private, do not share config files with cookie_secret stored in plaintext (you can read the value from a file). """ ) def _cookie_secret_default(self): if os.path.exists(self.cookie_secret_file): with io.open(self.cookie_secret_file, 'rb') as f: return f.read() else: secret = base64.encodestring(os.urandom(1024)) self._write_cookie_secret_file(secret) return secret def _write_cookie_secret_file(self, secret): """write my secret to my secret_file""" self.log.info("Writing notebook server cookie secret to %s", self.cookie_secret_file) with io.open(self.cookie_secret_file, 'wb') as f: f.write(secret) try: os.chmod(self.cookie_secret_file, 0o600) except OSError: self.log.warn( "Could not set permissions on %s", self.cookie_secret_file ) password = Unicode(u'', config=True, help="""Hashed password to use for web authentication. To generate, type in a python/IPython shell: from notebook.auth import passwd; passwd() The string should be of the form type:salt:hashed-password. """ ) open_browser = Bool(True, config=True, help="""Whether to open in a browser after starting. The specific browser used is platform dependent and determined by the python standard library `webbrowser` module, unless it is overridden using the --browser (NotebookApp.browser) configuration option. """) browser = Unicode(u'', config=True, help="""Specify what command to use to invoke a web browser when opening the notebook. If not specified, the default browser will be determined by the `webbrowser` standard library module, which allows setting of the BROWSER environment variable to override it. """) webapp_settings = Dict(config=True, help="DEPRECATED, use tornado_settings" ) def _webapp_settings_changed(self, name, old, new): self.log.warn("\n webapp_settings is deprecated, use tornado_settings.\n") self.tornado_settings = new tornado_settings = Dict(config=True, help="Supply overrides for the tornado.web.Application that the " "IPython notebook uses.") ssl_options = Dict(config=True, help="""Supply SSL options for the tornado HTTPServer. See the tornado docs for details.""") jinja_environment_options = Dict(config=True, help="Supply extra arguments that will be passed to Jinja environment.") jinja_template_vars = Dict( config=True, help="Extra variables to supply to jinja templates when rendering.", ) enable_mathjax = Bool(True, config=True, help="""Whether to enable MathJax for typesetting math/TeX MathJax is the javascript library IPython uses to render math/LaTeX. It is very large, so you may want to disable it if you have a slow internet connection, or for offline use of the notebook. When disabled, equations etc. will appear as their untransformed TeX source. """ ) def _enable_mathjax_changed(self, name, old, new): """set mathjax url to empty if mathjax is disabled""" if not new: self.mathjax_url = u'' base_url = Unicode('/', config=True, help='''The base URL for the notebook server. Leading and trailing slashes can be omitted, and will automatically be added. ''') def _base_url_changed(self, name, old, new): if not new.startswith('/'): self.base_url = '/'+new elif not new.endswith('/'): self.base_url = new+'/' base_project_url = Unicode('/', config=True, help="""DEPRECATED use base_url""") def _base_project_url_changed(self, name, old, new): self.log.warn("base_project_url is deprecated, use base_url") self.base_url = new extra_static_paths = List(Unicode(), config=True, help="""Extra paths to search for serving static files. This allows adding javascript/css to be available from the notebook server machine, or overriding individual files in the IPython""" ) @property def static_file_path(self): """return extra paths + the default location""" return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH] static_custom_path = List(Unicode(), help="""Path to search for custom.js, css""" ) def _static_custom_path_default(self): return [ os.path.join(d, 'custom') for d in ( self.config_dir, # FIXME: serve IPython profile while we don't have `jupyter migrate` os.path.join(get_ipython_dir(), 'profile_default', 'static'), DEFAULT_STATIC_FILES_PATH) ] extra_template_paths = List(Unicode(), config=True, help="""Extra paths to search for serving jinja templates. Can be used to override templates from notebook.templates.""" ) @property def template_file_path(self): """return extra paths + the default locations""" return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST extra_nbextensions_path = List(Unicode(), config=True, help="""extra paths to look for Javascript notebook extensions""" ) @property def nbextensions_path(self): """The path to look for Javascript notebook extensions""" path = self.extra_nbextensions_path + jupyter_path('nbextensions') # FIXME: remove IPython nbextensions path once migration is setup path.append(os.path.join(get_ipython_dir(), 'nbextensions')) return path websocket_url = Unicode("", config=True, help="""The base URL for websockets, if it differs from the HTTP server (hint: it almost certainly doesn't). Should be in the form of an HTTP origin: ws[s]://hostname[:port] """ ) mathjax_url = Unicode("", config=True, help="""The url for MathJax.js.""" ) def _mathjax_url_default(self): if not self.enable_mathjax: return u'' static_url_prefix = self.tornado_settings.get("static_url_prefix", url_path_join(self.base_url, "static") ) return url_path_join(static_url_prefix, 'components', 'MathJax', 'MathJax.js') def _mathjax_url_changed(self, name, old, new): if new and not self.enable_mathjax: # enable_mathjax=False overrides mathjax_url self.mathjax_url = u'' else: self.log.info("Using MathJax: %s", new) contents_manager_class = Type( default_value=FileContentsManager, klass=ContentsManager, config=True, help='The notebook manager class to use.' ) kernel_manager_class = Type( default_value=MappingKernelManager, config=True, help='The kernel manager class to use.' ) session_manager_class = Type( default_value=SessionManager, config=True, help='The session manager class to use.' ) config_manager_class = Type( default_value=ConfigManager, config = True, help='The config manager class to use' ) kernel_spec_manager = Instance(KernelSpecManager, allow_none=True) kernel_spec_manager_class = Type( default_value=KernelSpecManager, config=True, help=""" The kernel spec manager class to use. Should be a subclass of `jupyter_client.kernelspec.KernelSpecManager`. The Api of KernelSpecManager is provisional and might change without warning between this version of IPython and the next stable one. """ ) login_handler_class = Type( default_value=LoginHandler, klass=web.RequestHandler, config=True, help='The login handler class to use.', ) logout_handler_class = Type( default_value=LogoutHandler, klass=web.RequestHandler, config=True, help='The logout handler class to use.', ) trust_xheaders = Bool(False, config=True, help=("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers" "sent by the upstream reverse proxy. Necessary if the proxy handles SSL") ) info_file = Unicode() def _info_file_default(self): info_file = "nbserver-%s.json" % os.getpid() return os.path.join(self.runtime_dir, info_file) pylab = Unicode('disabled', config=True, help=""" DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. """ ) def _pylab_changed(self, name, old, new): """when --pylab is specified, display a warning and exit""" if new != 'warn': backend = ' %s' % new else: backend = '' self.log.error("Support for specifying --pylab on the command line has been removed.") self.log.error( "Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself.".format(backend) ) self.exit(1) notebook_dir = Unicode(config=True, help="The directory to use for notebooks and kernels." ) def _notebook_dir_default(self): if self.file_to_run: return os.path.dirname(os.path.abspath(self.file_to_run)) else: return py3compat.getcwd() def _notebook_dir_changed(self, name, old, new): """Do a bit of validation of the notebook dir.""" if not os.path.isabs(new): # If we receive a non-absolute path, make it absolute. self.notebook_dir = os.path.abspath(new) return if not os.path.isdir(new): raise TraitError("No such notebook dir: %r" % new) # setting App.notebook_dir implies setting notebook and kernel dirs as well self.config.FileContentsManager.root_dir = new self.config.MappingKernelManager.root_dir = new server_extensions = List(Unicode(), config=True, help=("Python modules to load as notebook server extensions. " "This is an experimental API, and may change in future releases.") ) reraise_server_extension_failures = Bool( False, config=True, help="Reraise exceptions encountered loading server extensions?", ) def parse_command_line(self, argv=None): super(NotebookApp, self).parse_command_line(argv) if self.extra_args: arg0 = self.extra_args[0] f = os.path.abspath(arg0) self.argv.remove(arg0) if not os.path.exists(f): self.log.critical("No such file or directory: %s", f) self.exit(1) # Use config here, to ensure that it takes higher priority than # anything that comes from the profile. c = Config() if os.path.isdir(f): c.NotebookApp.notebook_dir = f elif os.path.isfile(f): c.NotebookApp.file_to_run = f self.update_config(c) def init_configurables(self): self.kernel_spec_manager = self.kernel_spec_manager_class( parent=self, ) self.kernel_manager = self.kernel_manager_class( parent=self, log=self.log, connection_dir=self.runtime_dir, kernel_spec_manager=self.kernel_spec_manager, ) self.contents_manager = self.contents_manager_class( parent=self, log=self.log, ) self.session_manager = self.session_manager_class( parent=self, log=self.log, kernel_manager=self.kernel_manager, contents_manager=self.contents_manager, ) self.config_manager = self.config_manager_class( parent=self, log=self.log, config_dir=os.path.join(self.config_dir, 'nbconfig'), ) def init_logging(self): # This prevents double log messages because tornado use a root logger that # self.log is a child of. The logging module dipatches log messages to a log # and all of its ancenstors until propagate is set to False. self.log.propagate = False for log in app_log, access_log, gen_log: # consistent log output name (NotebookApp instead of tornado.access, etc.) log.name = self.log.name # hook up tornado 3's loggers to our app handlers logger = logging.getLogger('tornado') logger.propagate = True logger.parent = self.log logger.setLevel(self.log.level) def init_webapp(self): """initialize tornado webapp and httpserver""" self.tornado_settings['allow_origin'] = self.allow_origin if self.allow_origin_pat: self.tornado_settings['allow_origin_pat'] = re.compile(self.allow_origin_pat) self.tornado_settings['allow_credentials'] = self.allow_credentials # ensure default_url starts with base_url if not self.default_url.startswith(self.base_url): self.default_url = url_path_join(self.base_url, self.default_url) self.web_app = NotebookWebApplication( self, self.kernel_manager, self.contents_manager, self.session_manager, self.kernel_spec_manager, self.config_manager, self.log, self.base_url, self.default_url, self.tornado_settings, self.jinja_environment_options ) ssl_options = self.ssl_options if self.certfile: ssl_options['certfile'] = self.certfile if self.keyfile: ssl_options['keyfile'] = self.keyfile if not ssl_options: # None indicates no SSL config ssl_options = None else: # Disable SSLv3, since its use is discouraged. ssl_options['ssl_version']=ssl.PROTOCOL_TLSv1 self.login_handler_class.validate_security(self, ssl_options=ssl_options) self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options, xheaders=self.trust_xheaders) success = None for port in random_ports(self.port, self.port_retries+1): try: self.http_server.listen(port, self.ip) except socket.error as e: if e.errno == errno.EADDRINUSE: self.log.info('The port %i is already in use, trying another random port.' % port) continue elif e.errno in (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)): self.log.warn("Permission to listen on port %i denied" % port) continue else: raise else: self.port = port success = True break if not success: self.log.critical('ERROR: the notebook server could not be started because ' 'no available port could be found.') self.exit(1) @property def display_url(self): ip = self.ip if self.ip else '[all ip addresses on your system]' return self._url(ip) @property def connection_url(self): ip = self.ip if self.ip else 'localhost' return self._url(ip) def _url(self, ip): proto = 'https' if self.certfile else 'http' return "%s://%s:%i%s" % (proto, ip, self.port, self.base_url) def init_terminals(self): try: from .terminal import initialize initialize(self.web_app, self.notebook_dir, self.connection_url) self.web_app.settings['terminals_available'] = True except ImportError as e: log = self.log.debug if sys.platform == 'win32' else self.log.warn log("Terminals not available (error was %s)", e) def init_signal(self): if not sys.platform.startswith('win') and sys.stdin.isatty(): signal.signal(signal.SIGINT, self._handle_sigint) signal.signal(signal.SIGTERM, self._signal_stop) if hasattr(signal, 'SIGUSR1'): # Windows doesn't support SIGUSR1 signal.signal(signal.SIGUSR1, self._signal_info) if hasattr(signal, 'SIGINFO'): # only on BSD-based systems signal.signal(signal.SIGINFO, self._signal_info) def _handle_sigint(self, sig, frame): """SIGINT handler spawns confirmation dialog""" # register more forceful signal handler for ^C^C case signal.signal(signal.SIGINT, self._signal_stop) # request confirmation dialog in bg thread, to avoid # blocking the App thread = threading.Thread(target=self._confirm_exit) thread.daemon = True thread.start() def _restore_sigint_handler(self): """callback for restoring original SIGINT handler""" signal.signal(signal.SIGINT, self._handle_sigint) def _confirm_exit(self): """confirm shutdown on ^C A second ^C, or answering 'y' within 5s will cause shutdown, otherwise original SIGINT handler will be restored. This doesn't work on Windows. """ info = self.log.info info('interrupted') print(self.notebook_info()) sys.stdout.write("Shutdown this notebook server (y/[n])? ") sys.stdout.flush() r,w,x = select.select([sys.stdin], [], [], 5) if r: line = sys.stdin.readline() if line.lower().startswith('y') and 'n' not in line.lower(): self.log.critical("Shutdown confirmed") ioloop.IOLoop.current().stop() return else: print("No answer for 5s:", end=' ') print("resuming operation...") # no answer, or answer is no: # set it back to original SIGINT handler # use IOLoop.add_callback because signal.signal must be called # from main thread ioloop.IOLoop.current().add_callback(self._restore_sigint_handler) def _signal_stop(self, sig, frame): self.log.critical("received signal %s, stopping", sig) ioloop.IOLoop.current().stop() def _signal_info(self, sig, frame): print(self.notebook_info()) def init_components(self): """Check the components submodule, and warn if it's unclean""" # TODO: this should still check, but now we use bower, not git submodule pass def init_server_extensions(self): """Load any extensions specified by config. Import the module, then call the load_jupyter_server_extension function, if one exists. The extension API is experimental, and may change in future releases. """ for modulename in self.server_extensions: try: mod = importlib.import_module(modulename) func = getattr(mod, 'load_jupyter_server_extension', None) if func is not None: func(self) except Exception: if self.reraise_server_extension_failures: raise self.log.warn("Error loading server extension %s", modulename, exc_info=True) @catch_config_error def initialize(self, argv=None): super(NotebookApp, self).initialize(argv) self.init_logging() self.init_configurables() self.init_components() self.init_webapp() self.init_terminals() self.init_signal() self.init_server_extensions() def cleanup_kernels(self): """Shutdown all kernels. The kernels will shutdown themselves when this process no longer exists, but explicit shutdown allows the KernelManagers to cleanup the connection files. """ self.log.info('Shutting down kernels') self.kernel_manager.shutdown_all() def notebook_info(self): "Return the current working directory and the server url information" info = self.contents_manager.info_string() + "\n" info += "%d active kernels \n" % len(self.kernel_manager._kernels) return info + "The IPython Notebook is running at: %s" % self.display_url def server_info(self): """Return a JSONable dict of information about this server.""" return {'url': self.connection_url, 'hostname': self.ip if self.ip else 'localhost', 'port': self.port, 'secure': bool(self.certfile), 'base_url': self.base_url, 'notebook_dir': os.path.abspath(self.notebook_dir), 'pid': os.getpid() } def write_server_info_file(self): """Write the result of server_info() to the JSON file info_file.""" with open(self.info_file, 'w') as f: json.dump(self.server_info(), f, indent=2) def remove_server_info_file(self): """Remove the nbserver-<pid>.json file created for this server. Ignores the error raised when the file has already been removed. """ try: os.unlink(self.info_file) except OSError as e: if e.errno != errno.ENOENT: raise def start(self): """ Start the IPython Notebook server app, after initialization This method takes no arguments so all configuration and initialization must be done prior to calling this method.""" super(NotebookApp, self).start() info = self.log.info for line in self.notebook_info().split("\n"): info(line) info("Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).") self.write_server_info_file() if self.open_browser or self.file_to_run: try: browser = webbrowser.get(self.browser or None) except webbrowser.Error as e: self.log.warn('No web browser found: %s.' % e) browser = None if self.file_to_run: if not os.path.exists(self.file_to_run): self.log.critical("%s does not exist" % self.file_to_run) self.exit(1) relpath = os.path.relpath(self.file_to_run, self.notebook_dir) uri = url_path_join('notebooks', *relpath.split(os.sep)) else: uri = 'tree' if browser: b = lambda : browser.open(url_path_join(self.connection_url, uri), new=2) threading.Thread(target=b).start() self.io_loop = ioloop.IOLoop.current() if sys.platform.startswith('win'): # add no-op to wake every 5s # to handle signals that may be ignored by the inner loop pc = ioloop.PeriodicCallback(lambda : None, 5000) pc.start() try: self.io_loop.start() except KeyboardInterrupt: info("Interrupted...") finally: self.cleanup_kernels() self.remove_server_info_file() def stop(self): def _stop(): self.http_server.stop() self.io_loop.stop() self.io_loop.add_callback(_stop) def list_running_servers(runtime_dir=None): """Iterate over the server info files of running notebook servers. Given a profile name, find nbserver-* files in the security directory of that profile, and yield dicts of their information, each one pertaining to a currently running notebook server instance. """ if runtime_dir is None: runtime_dir = jupyter_runtime_dir() # The runtime dir might not exist if not os.path.isdir(runtime_dir): return for file in os.listdir(runtime_dir): if file.startswith('nbserver-'): with io.open(os.path.join(runtime_dir, file), encoding='utf-8') as f: info = json.load(f) # Simple check whether that process is really still running # Also remove leftover files from IPython 2.x without a pid field if ('pid' in info) and check_pid(info['pid']): yield info else: # If the process has died, try to delete its info file try: os.unlink(file) except OSError: pass # TODO: This should warn or log or something #----------------------------------------------------------------------------- # Main entry point #----------------------------------------------------------------------------- main = launch_new_instance = NotebookApp.launch_instance
./CrossVul/dataset_final_sorted/CWE-79/py/bad_1729_0
crossvul-python_data_bad_4208_1
# -*- coding: iso-8859-1 -*- """ MoinMoin - Multiple configuration handler and Configuration defaults class @copyright: 2000-2004 Juergen Hermann <jh@web.de>, 2005-2008 MoinMoin:ThomasWaldmann. 2008 MoinMoin:JohannesBerg @license: GNU GPL, see COPYING for details. """ import hashlib import re import os import sys import time from MoinMoin import log logging = log.getLogger(__name__) from MoinMoin import config, error, util, wikiutil, web from MoinMoin import datastruct from MoinMoin.auth import MoinAuth import MoinMoin.auth as authmodule import MoinMoin.events as events from MoinMoin.events import PageChangedEvent, PageRenamedEvent from MoinMoin.events import PageDeletedEvent, PageCopiedEvent from MoinMoin.events import PageRevertedEvent, FileAttachedEvent import MoinMoin.web.session from MoinMoin.packages import packLine from MoinMoin.security import AccessControlList _url_re_cache = None _farmconfig_mtime = None _config_cache = {} def _importConfigModule(name): """ Import and return configuration module and its modification time Handle all errors except ImportError, because missing file is not always an error. @param name: module name @rtype: tuple @return: module, modification time """ try: module = __import__(name, globals(), {}) mtime = os.path.getmtime(module.__file__) except ImportError: raise except IndentationError, err: logging.exception('Your source code / config file is not correctly indented!') msg = """IndentationError: %(err)s The configuration files are Python modules. Therefore, whitespace is important. Make sure that you use only spaces, no tabs are allowed here! You have to use four spaces at the beginning of the line mostly. """ % { 'err': err, } raise error.ConfigurationError(msg) except Exception, err: logging.exception('An exception happened.') msg = '%s: %s' % (err.__class__.__name__, str(err)) raise error.ConfigurationError(msg) return module, mtime def _url_re_list(): """ Return url matching regular expression Import wikis list from farmconfig on the first call and compile the regexes. Later just return the cached regex list. @rtype: list of tuples of (name, compiled re object) @return: url to wiki config name matching list """ global _url_re_cache, _farmconfig_mtime if _url_re_cache is None: try: farmconfig, _farmconfig_mtime = _importConfigModule('farmconfig') except ImportError, err: if 'farmconfig' in str(err): # we failed importing farmconfig logging.debug("could not import farmconfig, mapping all URLs to wikiconfig") _farmconfig_mtime = 0 _url_re_cache = [('wikiconfig', re.compile(r'.')), ] # matches everything else: # maybe there was a failing import statement inside farmconfig raise else: logging.info("using farm config: %s" % os.path.abspath(farmconfig.__file__)) try: cache = [] for name, regex in farmconfig.wikis: cache.append((name, re.compile(regex))) _url_re_cache = cache except AttributeError: logging.error("required 'wikis' list missing in farmconfig") msg = """ Missing required 'wikis' list in 'farmconfig.py'. If you run a single wiki you do not need farmconfig.py. Delete it and use wikiconfig.py. """ raise error.ConfigurationError(msg) return _url_re_cache def _makeConfig(name): """ Create and return a config instance Timestamp config with either module mtime or farmconfig mtime. This mtime can be used later to invalidate older caches. @param name: module name @rtype: DefaultConfig sub class instance @return: new configuration instance """ global _farmconfig_mtime try: module, mtime = _importConfigModule(name) configClass = getattr(module, 'Config') cfg = configClass(name) cfg.cfg_mtime = max(mtime, _farmconfig_mtime) logging.info("using wiki config: %s" % os.path.abspath(module.__file__)) except ImportError, err: logging.exception('Could not import.') msg = """ImportError: %(err)s Check that the file is in the same directory as the server script. If it is not, you must add the path of the directory where the file is located to the python path in the server script. See the comments at the top of the server script. Check that the configuration file name is either "wikiconfig.py" or the module name specified in the wikis list in farmconfig.py. Note that the module name does not include the ".py" suffix. """ % { 'err': err, } raise error.ConfigurationError(msg) except AttributeError, err: logging.exception('An exception occurred.') msg = """AttributeError: %(err)s Could not find required "Config" class in "%(name)s.py". This might happen if you are trying to use a pre 1.3 configuration file, or made a syntax or spelling error. Another reason for this could be a name clash. It is not possible to have config names like e.g. stats.py - because that collides with MoinMoin/stats/ - have a look into your MoinMoin code directory what other names are NOT possible. Please check your configuration file. As an example for correct syntax, use the wikiconfig.py file from the distribution. """ % { 'name': name, 'err': err, } raise error.ConfigurationError(msg) return cfg def _getConfigName(url): """ Return config name for url or raise """ for name, regex in _url_re_list(): match = regex.match(url) if match: return name raise error.NoConfigMatchedError def getConfig(url): """ Return cached config instance for url or create new one If called by many threads in the same time multiple config instances might be created. The first created item will be returned, using dict.setdefault. @param url: the url from request, possibly matching specific wiki @rtype: DefaultConfig subclass instance @return: config object for specific wiki """ cfgName = _getConfigName(url) try: cfg = _config_cache[cfgName] except KeyError: cfg = _makeConfig(cfgName) cfg = _config_cache.setdefault(cfgName, cfg) return cfg # This is a way to mark some text for the gettext tools so that they don't # get orphaned. See http://www.python.org/doc/current/lib/node278.html. def _(text): return text class CacheClass: """ just a container for stuff we cache """ pass class ConfigFunctionality(object): """ Configuration base class with config class behaviour. This class contains the functionality for the DefaultConfig class for the benefit of the WikiConfig macro. """ # attributes of this class that should not be shown # in the WikiConfig() macro. cfg_mtime = None siteid = None cache = None mail_enabled = None jabber_enabled = None auth_can_logout = None auth_have_login = None auth_login_inputs = None _site_plugin_lists = None _iwid = None _iwid_full = None xapian_searchers = None moinmoin_dir = None # will be lazily loaded by interwiki code when needed (?) shared_intermap_files = None def __init__(self, siteid): """ Init Config instance """ self.siteid = siteid self.cache = CacheClass() from MoinMoin.Page import ItemCache self.cache.meta = ItemCache('meta') self.cache.pagelists = ItemCache('pagelists') if self.config_check_enabled: self._config_check() # define directories self.moinmoin_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir)) data_dir = os.path.normpath(self.data_dir) self.data_dir = data_dir for dirname in ('user', 'cache', 'plugin'): name = dirname + '_dir' if not getattr(self, name, None): setattr(self, name, os.path.abspath(os.path.join(data_dir, dirname))) # directories below cache_dir (using __dirname__ to avoid conflicts) for dirname in ('session', ): name = dirname + '_dir' if not getattr(self, name, None): setattr(self, name, os.path.abspath(os.path.join(self.cache_dir, '__%s__' % dirname))) # Try to decode certain names which allow unicode self._decode() # After that, pre-compile some regexes self.cache.page_category_regex = re.compile(self.page_category_regex, re.UNICODE) self.cache.page_dict_regex = re.compile(self.page_dict_regex, re.UNICODE) self.cache.page_group_regex = re.compile(self.page_group_regex, re.UNICODE) self.cache.page_template_regex = re.compile(self.page_template_regex, re.UNICODE) # the ..._regexact versions only match if nothing is left (exact match) self.cache.page_category_regexact = re.compile(u'^%s$' % self.page_category_regex, re.UNICODE) self.cache.page_dict_regexact = re.compile(u'^%s$' % self.page_dict_regex, re.UNICODE) self.cache.page_group_regexact = re.compile(u'^%s$' % self.page_group_regex, re.UNICODE) self.cache.page_template_regexact = re.compile(u'^%s$' % self.page_template_regex, re.UNICODE) self.cache.ua_spiders = self.ua_spiders and re.compile(self.ua_spiders, re.IGNORECASE) self._check_directories() if not isinstance(self.superuser, list): msg = """The superuser setting in your wiki configuration is not a list (e.g. ['Sample User', 'AnotherUser']). Please change it in your wiki configuration and try again.""" raise error.ConfigurationError(msg) if not isinstance(self.actions_superuser, list): msg = """The actions_superuser setting in your wiki configuration is not a list (e.g. ['newaccount', 'some_other_action']). Please change it in your wiki configuration and try again.""" raise error.ConfigurationError(msg) # moin < 1.9 used cookie_lifetime = <float> (but converted it to int) for logged-in users and # anonymous_session_lifetime = <float> or None for anon users # moin >= 1.9 uses cookie_lifetime = (<float>, <float>) - first is anon, second is logged-in if not (isinstance(self.cookie_lifetime, tuple) and len(self.cookie_lifetime) == 2): logging.error("wiki configuration has an invalid setting: " + "cookie_lifetime = %r" % (self.cookie_lifetime, )) try: anon_lifetime = self.anonymous_session_lifetime logging.warning("wiki configuration has an unsupported setting: " + "anonymous_session_lifetime = %r - " % anon_lifetime + "please remove it.") if anon_lifetime is None: anon_lifetime = 0 anon_lifetime = float(anon_lifetime) except: # if anything goes wrong, use default value anon_lifetime = 0 try: logged_in_lifetime = int(self.cookie_lifetime) except: # if anything goes wrong, use default value logged_in_lifetime = 12 self.cookie_lifetime = (anon_lifetime, logged_in_lifetime) logging.warning("using cookie_lifetime = %r - " % (self.cookie_lifetime, ) + "please fix your wiki configuration.") self._loadPluginModule() # Preparse user dicts self._fillDicts() # Normalize values self.language_default = self.language_default.lower() # Use site name as default name-logo if self.logo_string is None: self.logo_string = self.sitename # Check for needed modules # FIXME: maybe we should do this check later, just before a # chart is needed, maybe in the chart module, instead doing it # for each request. But this require a large refactoring of # current code. if self.chart_options: try: import gdchart except ImportError: self.chart_options = None # 'setuid' special auth method auth method can log out self.auth_can_logout = ['setuid'] self.auth_login_inputs = [] found_names = [] for auth in self.auth: if not auth.name: raise error.ConfigurationError("Auth methods must have a name.") if auth.name in found_names: raise error.ConfigurationError("Auth method names must be unique.") found_names.append(auth.name) if auth.logout_possible and auth.name: self.auth_can_logout.append(auth.name) for input in auth.login_inputs: if not input in self.auth_login_inputs: self.auth_login_inputs.append(input) self.auth_have_login = len(self.auth_login_inputs) > 0 self.auth_methods = found_names # internal dict for plugin `modules' lists self._site_plugin_lists = {} # we replace any string placeholders with config values # e.g u'%(page_front_page)s' % self self.navi_bar = [elem % self for elem in self.navi_bar] # check if python-xapian is installed if self.xapian_search: try: import xapian except ImportError, err: self.xapian_search = False logging.error("xapian_search was auto-disabled because python-xapian is not installed [%s]." % str(err)) # list to cache xapian searcher objects self.xapian_searchers = [] # check if mail is possible and set flag: self.mail_enabled = (self.mail_smarthost is not None or self.mail_sendmail is not None) and self.mail_from self.mail_enabled = self.mail_enabled and True or False # check if jabber bot is available and set flag: self.jabber_enabled = self.notification_bot_uri is not None # if we are to use the jabber bot, instantiate a server object for future use if self.jabber_enabled: from xmlrpclib import Server self.notification_server = Server(self.notification_bot_uri, ) # Cache variables for the properties below self._iwid = self._iwid_full = self._meta_dict = None self.cache.acl_rights_before = AccessControlList(self, [self.acl_rights_before]) self.cache.acl_rights_default = AccessControlList(self, [self.acl_rights_default]) self.cache.acl_rights_after = AccessControlList(self, [self.acl_rights_after]) action_prefix = self.url_prefix_action if action_prefix is not None and action_prefix.endswith('/'): # make sure there is no trailing '/' self.url_prefix_action = action_prefix[:-1] if self.url_prefix_local is None: self.url_prefix_local = self.url_prefix_static if self.url_prefix_fckeditor is None: self.url_prefix_fckeditor = self.url_prefix_local + '/applets/FCKeditor' if self.secrets is None: # admin did not setup a real secret, so make up something self.secrets = self.calc_secrets() secret_key_names = ['action/cache', 'wikiutil/tickets', 'xmlrpc/ProcessMail', 'xmlrpc/RemoteScript', ] if self.jabber_enabled: secret_key_names.append('jabberbot') if self.textchas: secret_key_names.append('security/textcha') secret_min_length = 10 if isinstance(self.secrets, str): if len(self.secrets) < secret_min_length: raise error.ConfigurationError("The secrets = '...' wiki config setting is a way too short string (minimum length is %d chars)!" % ( secret_min_length)) # for lazy people: set all required secrets to same value secrets = {} for key in secret_key_names: secrets[key] = self.secrets self.secrets = secrets # we check if we have all secrets we need and that they have minimum length for secret_key_name in secret_key_names: try: secret = self.secrets[secret_key_name] if len(secret) < secret_min_length: raise ValueError except (KeyError, ValueError): raise error.ConfigurationError("You must set a (at least %d chars long) secret string for secrets['%s']!" % ( secret_min_length, secret_key_name)) if self.password_scheme not in config.password_schemes_configurable: raise error.ConfigurationError("not supported: password_scheme = %r" % self.password_scheme) if self.passlib_support: try: from passlib.context import CryptContext except ImportError, err: raise error.ConfigurationError("Wiki is configured to use passlib, but importing passlib failed [%s]!" % str(err)) try: self.cache.pwd_context = CryptContext(**self.passlib_crypt_context) except (ValueError, KeyError, TypeError, UserWarning), err: # ValueError: wrong configuration values # KeyError: unsupported hash (seen with passlib 1.3) # TypeError: configuration value has wrong type raise error.ConfigurationError("passlib_crypt_context configuration is invalid [%s]." % str(err)) elif self.password_scheme == '{PASSLIB}': raise error.ConfigurationError("passlib_support is switched off, thus you can't use password_scheme = '{PASSLIB}'.") def calc_secrets(self): """ make up some 'secret' using some config values """ varnames = ['data_dir', 'data_underlay_dir', 'language_default', 'mail_smarthost', 'mail_from', 'page_front_page', 'theme_default', 'sitename', 'logo_string', 'interwikiname', 'user_homewiki', 'acl_rights_before', ] secret = '' for varname in varnames: var = getattr(self, varname, None) if isinstance(var, (str, unicode)): secret += repr(var) return secret _meta_dict = None def load_meta_dict(self): """ The meta_dict contains meta data about the wiki instance. """ if self._meta_dict is None: self._meta_dict = wikiutil.MetaDict(os.path.join(self.data_dir, 'meta'), self.cache_dir) return self._meta_dict meta_dict = property(load_meta_dict) # lazily load iwid(_full) def make_iwid_property(attr): def getter(self): if getattr(self, attr, None) is None: self.load_IWID() return getattr(self, attr) return property(getter) iwid = make_iwid_property("_iwid") iwid_full = make_iwid_property("_iwid_full") # lazily create a list of event handlers _event_handlers = None def make_event_handlers_prop(): def getter(self): if self._event_handlers is None: self._event_handlers = events.get_handlers(self) return self._event_handlers def setter(self, new_handlers): self._event_handlers = new_handlers return property(getter, setter) event_handlers = make_event_handlers_prop() def load_IWID(self): """ Loads the InterWikiID of this instance. It is used to identify the instance globally. The IWID is available as cfg.iwid The full IWID containing the interwiki name is available as cfg.iwid_full This method is called by the property. """ try: iwid = self.meta_dict['IWID'] except KeyError: iwid = util.random_string(16).encode("hex") + "-" + str(int(time.time())) self.meta_dict['IWID'] = iwid self.meta_dict.sync() self._iwid = iwid if self.interwikiname is not None: self._iwid_full = packLine([iwid, self.interwikiname]) else: self._iwid_full = packLine([iwid]) def _config_check(self): """ Check namespace and warn about unknown names Warn about names which are not used by DefaultConfig, except modules, classes, _private or __magic__ names. This check is disabled by default, when enabled, it will show an error message with unknown names. """ unknown = ['"%s"' % name for name in dir(self) if not name.startswith('_') and name not in DefaultConfig.__dict__ and not isinstance(getattr(self, name), (type(sys), type(DefaultConfig)))] if unknown: msg = """ Unknown configuration options: %s. For more information, visit HelpOnConfiguration. Please check your configuration for typos before requesting support or reporting a bug. """ % ', '.join(unknown) raise error.ConfigurationError(msg) def _decode(self): """ Try to decode certain names, ignore unicode values Try to decode str using utf-8. If the decode fail, raise FatalError. Certain config variables should contain unicode values, and should be defined with u'text' syntax. Python decode these if the file have a 'coding' line. This will allow utf-8 users to use simple strings using, without using u'string'. Other users will have to use u'string' for these names, because we don't know what is the charset of the config files. """ charset = 'utf-8' message = u""" "%(name)s" configuration variable is a string, but should be unicode. Use %(name)s = u"value" syntax for unicode variables. Also check your "-*- coding -*-" line at the top of your configuration file. It should match the actual charset of the configuration file. """ decode_names = ( 'sitename', 'interwikiname', 'user_homewiki', 'logo_string', 'navi_bar', 'page_front_page', 'page_category_regex', 'page_dict_regex', 'page_group_regex', 'page_template_regex', 'page_license_page', 'page_local_spelling_words', 'acl_rights_default', 'acl_rights_before', 'acl_rights_after', 'mail_from', 'quicklinks_default', 'subscribed_pages_default', ) for name in decode_names: attr = getattr(self, name, None) if attr: # Try to decode strings if isinstance(attr, str): try: setattr(self, name, unicode(attr, charset)) except UnicodeError: raise error.ConfigurationError(message % {'name': name}) # Look into lists and try to decode strings inside them elif isinstance(attr, list): for i in xrange(len(attr)): item = attr[i] if isinstance(item, str): try: attr[i] = unicode(item, charset) except UnicodeError: raise error.ConfigurationError(message % {'name': name}) def _check_directories(self): """ Make sure directories are accessible Both data and underlay should exists and allow read, write and execute. """ mode = os.F_OK | os.R_OK | os.W_OK | os.X_OK for attr in ('data_dir', 'data_underlay_dir'): path = getattr(self, attr) # allow an empty underlay path or None if attr == 'data_underlay_dir' and not path: continue path_pages = os.path.join(path, "pages") if not (os.path.isdir(path_pages) and os.access(path_pages, mode)): msg = """ %(attr)s "%(path)s" does not exist, or has incorrect ownership or permissions. Make sure the directory and the subdirectory "pages" are owned by the web server and are readable, writable and executable by the web server user and group. It is recommended to use absolute paths and not relative paths. Check also the spelling of the directory name. """ % {'attr': attr, 'path': path, } raise error.ConfigurationError(msg) def _loadPluginModule(self): """ import all plugin modules To be able to import plugin from arbitrary path, we have to load the base package once using imp.load_module. Later, we can use standard __import__ call to load plugins in this package. Since each configured plugin path has unique plugins, we load the plugin packages as "moin_plugin_<sha1(path)>.plugin". """ import imp plugin_dirs = [self.plugin_dir] + self.plugin_dirs self._plugin_modules = [] try: # Lock other threads while we check and import imp.acquire_lock() try: for pdir in plugin_dirs: csum = 'p_%s' % hashlib.new('sha1', pdir).hexdigest() modname = '%s.%s' % (self.siteid, csum) # If the module is not loaded, try to load it if not modname in sys.modules: # Find module on disk and try to load - slow! abspath = os.path.abspath(pdir) parent_dir, pname = os.path.split(abspath) fp, path, info = imp.find_module(pname, [parent_dir]) try: # Load the module and set in sys.modules module = imp.load_module(modname, fp, path, info) setattr(sys.modules[self.siteid], 'csum', module) finally: # Make sure fp is closed properly if fp: fp.close() if modname not in self._plugin_modules: self._plugin_modules.append(modname) finally: imp.release_lock() except ImportError, err: msg = """ Could not import plugin package "%(path)s" because of ImportError: %(err)s. Make sure your data directory path is correct, check permissions, and that the data/plugin directory has an __init__.py file. """ % { 'path': pdir, 'err': str(err), } raise error.ConfigurationError(msg) def _fillDicts(self): """ fill config dicts Fills in missing dict keys of derived user config by copying them from this base class. """ # user checkbox defaults for key, value in DefaultConfig.user_checkbox_defaults.items(): if key not in self.user_checkbox_defaults: self.user_checkbox_defaults[key] = value def __getitem__(self, item): """ Make it possible to access a config object like a dict """ return getattr(self, item) class DefaultConfig(ConfigFunctionality): """ Configuration base class with default config values (added below) """ # Do not add anything into this class. Functionality must # be added above to avoid having the methods show up in # the WikiConfig macro. Settings must be added below to # the options dictionary. _default_backlink_method = lambda cfg, req: 'backlink' if req.user.valid else 'pagelink' def _default_password_checker(cfg, request, username, password, min_length=6, min_different=4): """ Check if a password is secure enough. We use a built-in check to get rid of the worst passwords. We do NOT use cracklib / python-crack here any more because it is not thread-safe (we experienced segmentation faults when using it). If you don't want to check passwords, use password_checker = None. @return: None if there is no problem with the password, some unicode object with an error msg, if the password is problematic. """ _ = request.getText # in any case, do a very simple built-in check to avoid the worst passwords if len(password) < min_length: return _("Password is too short.") if len(set(password)) < min_different: return _("Password has not enough different characters.") username_lower = username.lower() password_lower = password.lower() if username in password or password in username or \ username_lower in password_lower or password_lower in username_lower: return _("Password is too easy (password contains name or name contains password).") keyboards = (ur"`1234567890-=qwertyuiop[]\asdfghjkl;'zxcvbnm,./", # US kbd ur"^1234567890ߴqwertzuiop�+asdfghjkl��#yxcvbnm,.-", # german kbd ) # add more keyboards! for kbd in keyboards: rev_kbd = kbd[::-1] if password in kbd or password in rev_kbd or \ password_lower in kbd or password_lower in rev_kbd: return _("Password is too easy (keyboard sequence).") return None class DefaultExpression(object): def __init__(self, exprstr): self.text = exprstr self.value = eval(exprstr) # # Options that are not prefixed automatically with their # group name, see below (at the options dict) for more # information on the layout of this structure. # options_no_group_name = { # ========================================================================= 'attachment_extension': ("Mapping of attachment extensions to actions", None, ( ('extensions_mapping', {'.tdraw': {'modify': 'twikidraw'}, '.adraw': {'modify': 'anywikidraw'}, }, "file extension -> do -> action"), )), # ========================================================================== 'datastruct': ('Datastruct settings', None, ( ('dicts', lambda cfg, request: datastruct.WikiDicts(request), "function f(cfg, request) that returns a backend which is used to access dicts definitions."), ('groups', lambda cfg, request: datastruct.WikiGroups(request), "function f(cfg, request) that returns a backend which is used to access groups definitions."), )), # ========================================================================== 'session': ('Session settings', "Session-related settings, see HelpOnSessions.", ( ('session_service', DefaultExpression('web.session.FileSessionService()'), "The session service."), ('cookie_name', None, 'The variable part of the session cookie name. (None = determine from URL, siteidmagic = use siteid, any other string = use that)'), ('cookie_secure', None, 'Use secure cookie. (None = auto-enable secure cookie for https, True = ever use secure cookie, False = never use secure cookie).'), ('cookie_httponly', False, 'Use a httponly cookie that can only be used by the server, not by clientside scripts.'), ('cookie_domain', None, 'Domain used in the session cookie. (None = do not specify domain).'), ('cookie_path', None, 'Path used in the session cookie (None = auto-detect). Please only set if you know exactly what you are doing.'), ('cookie_lifetime', (0, 12), 'Session lifetime [h] of (anonymous, logged-in) users (see HelpOnSessions for details).'), )), # ========================================================================== 'auth': ('Authentication / Authorization / Security settings', None, ( ('superuser', [], "List of trusted user names with wiki system administration super powers (not to be confused with ACL admin rights!). Used for e.g. software installation, language installation via SystemPagesSetup and more. See also HelpOnSuperUser."), ('auth', DefaultExpression('[MoinAuth()]'), "list of auth objects, to be called in this order (see HelpOnAuthentication)"), ('auth_methods_trusted', ['http', 'given', 'xmlrpc_applytoken'], # Note: 'http' auth method is currently just a redirect to 'given' 'authentication methods for which users should be included in the special "Trusted" ACL group.'), ('secrets', None, """Either a long shared secret string used for multiple purposes or a dict {"purpose": "longsecretstring", ...} for setting up different shared secrets for different purposes. If you don't setup own secret(s), a secret string will be auto-generated from other config settings."""), ('DesktopEdition', False, "if True, give all local users special powers - ''only use this for a local desktop wiki!''"), ('SecurityPolicy', None, "Class object hook for implementing security restrictions or relaxations"), ('actions_superuser', ['newaccount', # spam bots create tons of user accounts, so better allow it only for superuser ], "Restrict actions to superuser only (list of strings)"), ('actions_excluded', ['xmlrpc', # we do not want wiki admins unknowingly offering xmlrpc service 'MyPages', # only works when used with a non-default SecurityPolicy (e.g. autoadmin) 'CopyPage', # has questionable behaviour regarding subpages a user can't read, but can copy ], "Exclude unwanted actions (list of strings)"), ('allow_xslt', False, "if True, enables XSLT processing via 4Suite (Note that this is DANGEROUS. It enables anyone who can edit the wiki to get '''read/write access to your filesystem as the moin process uid/gid''' and to insert '''arbitrary HTML''' into your wiki pages, which is why this setting defaults to `False` (XSLT disabled). Do not set it to other values, except if you know what you do and if you have very trusted editors only)."), ('password_checker', DefaultExpression('_default_password_checker'), 'checks whether a password is acceptable (default check is length >= 6, at least 4 different chars, no keyboard sequence, not username used somehow (you can switch this off by using `None`)'), ('password_scheme', '{PASSLIB}', 'Either "{PASSLIB}" (default) to use passlib for creating and upgrading password hashes (see also passlib_crypt_context for passlib configuration), ' 'or "{SSHA}" (or any other of the builtin password schemes) to not use passlib (not recommended).'), ('passlib_support', True, 'If True (default), import passlib and support password hashes offered by it.'), ('passlib_crypt_context', dict( # schemes we want to support (or deprecated schemes for which we still have # hashes in our storage). # note: bcrypt: we did not include it as it needs additional code (that is not pure python # and thus either needs compiling or installing platform-specific binaries) and # also there was some bcrypt issue in passlib < 1.5.3. # pbkdf2_sha512: not included as it needs at least passlib 1.4.0 # sha512_crypt: supported since passlib 1.3.0 (first public release) schemes=["sha512_crypt", ], # default scheme for creating new pw hashes (if not given, passlib uses first from schemes) #default="sha512_crypt", # deprecated schemes get auto-upgraded to the default scheme at login # time or when setting a password (including doing a moin account pwreset). # for passlib >= 1.6, giving ["auto"] means that all schemes except the default are deprecated: #deprecated=["auto"], # to support also older passlib versions, rather give a explicit list: #deprecated=[], # vary rounds parameter randomly when creating new hashes... #all__vary_rounds=0.1, ), "passlib CryptContext arguments, see passlib docs"), ('recovery_token_lifetime', 12, 'how long the password recovery token is valid [h]'), )), # ========================================================================== 'spam_leech_dos': ('Anti-Spam/Leech/DOS', 'These settings help limiting ressource usage and avoiding abuse.', ( ('hosts_deny', [], "List of denied IPs; if an IP ends with a dot, it denies a whole subnet (class A, B or C)"), ('surge_action_limits', {# allow max. <count> <action> requests per <dt> secs # action: (count, dt) 'all': (30, 30), # all requests (except cache/AttachFile action) count for this limit 'default': (30, 60), # default limit for actions without a specific limit 'show': (30, 60), 'recall': (10, 120), 'raw': (20, 40), # some people use this for css 'diff': (30, 60), 'fullsearch': (10, 120), 'edit': (30, 300), # can be lowered after making preview different from edit 'rss_rc': (1, 60), # The following actions are often used for images - to avoid pages with lots of images # (like photo galleries) triggering surge protection, we assign rather high limits: 'AttachFile': (300, 30), 'cache': (600, 30), # cache action is very cheap/efficient # special stuff to prevent someone trying lots of usernames / passwords to log in. # we keep this commented / disabled so that this feature does not get activated by default # (if somebody does not override surge_action_limits with own values): #'auth-ip': (10, 3600), # same remote ip (any name) #'auth-name': (10, 3600), # same name (any remote ip) }, "Surge protection tries to deny clients causing too much load/traffic, see HelpOnConfiguration/SurgeProtection."), ('surge_lockout_time', 3600, "time [s] someone gets locked out when ignoring the warnings"), ('textchas', None, "Spam protection setup using site-specific questions/answers, see HelpOnSpam."), ('textchas_disabled_group', None, "Name of a group of trusted users who do not get asked !TextCha questions."), ('textchas_expiry_time', 600, "Time [s] for a !TextCha to expire."), ('antispam_master_url', "http://master.moinmo.in/?action=xmlrpc2", "where antispam security policy fetches spam pattern updates (if it is enabled)"), # a regex of HTTP_USER_AGENTS that should be excluded from logging # and receive a FORBIDDEN for anything except viewing a page # list must not contain 'java' because of twikidraw wanting to save drawing uses this useragent ('ua_spiders', ('archiver|bingbot|cfetch|charlotte|crawler|gigabot|googlebot|heritrix|holmes|htdig|httrack|httpunit|' 'intelix|jeeves|larbin|leech|libwww-perl|linkbot|linkmap|linkwalk|litefinder|mercator|' 'microsoft.url.control|mirror| mj12bot|msnbot|msrbot|neomo|nutbot|omniexplorer|puf|robot|scooter|seekbot|' 'sherlock|slurp|sitecheck|snoopy|spider|teleport|twiceler|voilabot|voyager|webreaper|wget|yeti'), "A regex of HTTP_USER_AGENTs that should be excluded from logging and are not allowed to use actions."), ('unzip_single_file_size', 2.0 * 1000 ** 2, "max. size of a single file in the archive which will be extracted [bytes]"), ('unzip_attachments_space', 200.0 * 1000 ** 2, "max. total amount of bytes can be used to unzip files [bytes]"), ('unzip_attachments_count', 101, "max. number of files which are extracted from the zip file"), )), # ========================================================================== 'style': ('Style / Theme / UI related', 'These settings control how the wiki user interface will look like.', ( ('sitename', u'Untitled Wiki', "Short description of your wiki site, displayed below the logo on each page, and used in RSS documents as the channel title [Unicode]"), ('interwikiname', None, "unique and stable InterWiki name (prefix, moniker) of the site [Unicode], or None"), ('logo_string', None, "The wiki logo top of page, HTML is allowed (`<img>` is possible as well) [Unicode]"), ('html_pagetitle', None, "Allows you to set a specific HTML page title (if None, it defaults to the value of `sitename`)"), ('navi_bar', [u'RecentChanges', u'FindPage', u'HelpContents', ], 'Most important page names. Users can add more names in their quick links in user preferences. To link to URL, use `u"[[url|link title]]"`, to use a shortened name for long page name, use `u"[[LongLongPageName|title]]"`. [list of Unicode strings]'), ('theme_default', 'modernized', "the name of the theme that is used by default (see HelpOnThemes)"), ('theme_force', False, "if True, do not allow to change the theme"), ('stylesheets', [], "List of tuples (media, csshref) to insert after theme css, before user css, see HelpOnThemes."), ('supplementation_page', False, "if True, show a link to the supplementation page in the theme"), ('supplementation_page_name', u'Discussion', "default name of the supplementation (sub)page [unicode]"), ('supplementation_page_template', u'DiscussionTemplate', "default template used for creation of the supplementation page [unicode]"), ('interwiki_preferred', [], "In dialogues, show those wikis at the top of the list."), ('sistersites', [], "list of tuples `('WikiName', 'sisterpagelist_fetch_url')`"), ('trail_size', 5, "Number of pages in the trail of visited pages"), ('page_footer1', '', "Custom HTML markup sent ''before'' the system footer."), ('page_footer2', '', "Custom HTML markup sent ''after'' the system footer."), ('page_header1', '', "Custom HTML markup sent ''before'' the system header / title area but after the body tag."), ('page_header2', '', "Custom HTML markup sent ''after'' the system header / title area (and body tag)."), ('changed_time_fmt', '%H:%M', "Time format used on Recent``Changes for page edits within the last 24 hours"), ('date_fmt', '%Y-%m-%d', "System date format, used mostly in Recent``Changes"), ('datetime_fmt', '%Y-%m-%d %H:%M:%S', 'Default format for dates and times (when the user has no preferences or chose the "default" date format)'), ('chart_options', None, "If you have gdchart, use something like chart_options = {'width': 720, 'height': 540}"), ('edit_bar', ['Edit', 'Comments', 'Discussion', 'Info', 'Subscribe', 'Quicklink', 'Attachments', 'ActionsMenu'], 'list of edit bar entries'), ('history_count', (100, 200, 5, 10, 25, 50), "Number of revisions shown for info/history action (default_count_shown, max_count_shown, [other values shown as page size choices]). At least first two values (default and maximum) should be provided. If additional values are provided, user will be able to change number of items per page in the UI."), ('history_paging', True, "Enable paging functionality for info action's history display."), ('show_hosts', True, "if True, show host names and IPs. Set to False to hide them."), ('show_interwiki', False, "if True, let the theme display your interwiki name"), ('show_names', True, "if True, show user names in the revision history and on Recent``Changes. Set to False to hide them."), ('show_section_numbers', False, 'show section numbers in headings by default'), ('show_timings', False, "show some timing values at bottom of a page"), ('show_version', False, "show moin's version at the bottom of a page"), ('show_rename_redirect', False, "if True, offer creation of redirect pages when renaming wiki pages"), ('backlink_method', DefaultExpression('_default_backlink_method'), "function determining how the (last part of the) pagename should be rendered in the title area"), ('packagepages_actions_excluded', ['setthemename', # related to questionable theme stuff, see below 'copythemefile', # maybe does not work, e.g. if no fs write permissions or real theme file path is unknown to moin 'installplugin', # code installation, potentially dangerous 'renamepage', # dangerous with hierarchical acls 'deletepage', # dangerous with hierarchical acls 'delattachment', # dangerous, no revisioning ], 'list with excluded package actions (e.g. because they are dangerous / questionable)'), ('page_credits', [ '<a href="http://moinmo.in/" title="This site uses the MoinMoin Wiki software.">MoinMoin Powered</a>', '<a href="http://moinmo.in/Python" title="MoinMoin is written in Python.">Python Powered</a>', '<a href="http://moinmo.in/GPL" title="MoinMoin is GPL licensed.">GPL licensed</a>', '<a href="http://validator.w3.org/check?uri=referer" title="Click here to validate this page.">Valid HTML 4.01</a>', ], 'list with html fragments with logos or strings for crediting.'), # These icons will show in this order in the iconbar, unless they # are not relevant, e.g email icon when the wiki is not configured # for email. ('page_iconbar', ["up", "edit", "view", "diff", "info", "subscribe", "raw", "print", ], 'list of icons to show in iconbar, valid values are only those in page_icons_table. Available only in classic theme.'), # Standard buttons in the iconbar ('page_icons_table', { # key pagekey, querystr dict, title, icon-key 'diff': ('page', {'action': 'diff'}, _("Diffs"), "diff"), 'info': ('page', {'action': 'info'}, _("Info"), "info"), 'edit': ('page', {'action': 'edit'}, _("Edit"), "edit"), 'unsubscribe': ('page', {'action': 'unsubscribe'}, _("UnSubscribe"), "unsubscribe"), 'subscribe': ('page', {'action': 'subscribe'}, _("Subscribe"), "subscribe"), 'raw': ('page', {'action': 'raw'}, _("Raw"), "raw"), 'xml': ('page', {'action': 'show', 'mimetype': 'text/xml'}, _("XML"), "xml"), 'print': ('page', {'action': 'print'}, _("Print"), "print"), 'view': ('page', {}, _("View"), "view"), 'up': ('page_parent_page', {}, _("Up"), "up"), }, "dict of {'iconname': (url, title, icon-img-key), ...}. Available only in classic theme."), ('show_highlight_msg', False, "Show message that page has highlighted text " "and provide link to non-highlighted " "version."), )), # ========================================================================== 'editor': ('Editor related', None, ( ('editor_default', 'text', "Editor to use by default, 'text' or 'gui'"), ('editor_force', True, "if True, force using the default editor"), ('editor_ui', 'theonepreferred', "Editor choice shown on the user interface, 'freechoice' or 'theonepreferred'"), ('page_license_enabled', False, 'if True, show a license hint in page editor.'), ('page_license_page', u'WikiLicense', 'Page linked from the license hint. [Unicode]'), ('edit_locking', 'warn 10', "Editor locking policy: `None`, `'warn <timeout in minutes>'`, or `'lock <timeout in minutes>'`"), ('edit_ticketing', True, None), ('edit_rows', 20, "Default height of the edit box"), ('comment_required', False, "if True, only allow saving if a comment is filled in"), )), # ========================================================================== 'paths': ('Paths', None, ( ('data_dir', './data/', "Path to the data directory containing your (locally made) wiki pages."), ('data_underlay_dir', './underlay/', "Path to the underlay directory containing distribution system and help pages."), ('cache_dir', None, "Directory for caching, by default computed from `data_dir`/cache."), ('session_dir', None, "Directory for session storage, by default computed to be `cache_dir`/__session__."), ('user_dir', None, "Directory for user storage, by default computed to be `data_dir`/user."), ('plugin_dir', None, "Plugin directory, by default computed to be `data_dir`/plugin."), ('plugin_dirs', [], "Additional plugin directories."), ('docbook_html_dir', r"/usr/share/xml/docbook/stylesheet/nwalsh/html/", 'Path to the directory with the Docbook to HTML XSLT files (optional, used by the docbook parser). The default value is correct for Debian Etch.'), ('shared_intermap', None, "Path to a file containing global InterWiki definitions (or a list of such filenames)"), )), # ========================================================================== 'urls': ('URLs', None, ( # includes the moin version number, so we can have a unlimited cache lifetime # for the static stuff. if stuff changes on version upgrade, url will change # immediately and we have no problem with stale caches. ('url_prefix_static', config.url_prefix_static, "used as the base URL for icons, css, etc. - includes the moin version number and changes on every release. This replaces the deprecated and sometimes confusing `url_prefix = '/wiki'` setting."), ('url_prefix_local', None, "used as the base URL for some Javascript - set this to a URL on same server as the wiki if your url_prefix_static points to a different server."), ('url_prefix_fckeditor', None, "used as the base URL for FCKeditor - similar to url_prefix_local, but just for FCKeditor."), ('url_prefix_action', None, "Use 'action' to enable action URL generation to be compatible with robots.txt. It will generate .../action/info/PageName?action=info then. Recommended for internet wikis."), ('notification_bot_uri', None, "URI of the Jabber notification bot."), ('url_mappings', {}, "lookup table to remap URL prefixes (dict of {{{'prefix': 'replacement'}}}); especially useful in intranets, when whole trees of externally hosted documents move around"), )), # ========================================================================== 'pages': ('Special page names', None, ( ('page_front_page', u'LanguageSetup', "Name of the front page. We don't expect you to keep the default. Just read LanguageSetup in case you're wondering... [Unicode]"), # the following regexes should match the complete name when used in free text # the group 'all' shall match all, while the group 'key' shall match the key only # e.g. CategoryFoo -> group 'all' == CategoryFoo, group 'key' == Foo # moin's code will add ^ / $ at beginning / end when needed ('page_category_regex', ur'(?P<all>Category(?P<key>(?!Template)\S+))', 'Pagenames exactly matching this regex are regarded as Wiki categories [Unicode]'), ('page_dict_regex', ur'(?P<all>(?P<key>\S+)Dict)', 'Pagenames exactly matching this regex are regarded as pages containing variable dictionary definitions [Unicode]'), ('page_group_regex', ur'(?P<all>(?P<key>\S+)Group)', 'Pagenames exactly matching this regex are regarded as pages containing group definitions [Unicode]'), ('page_template_regex', ur'(?P<all>(?P<key>\S+)Template)', 'Pagenames exactly matching this regex are regarded as pages containing templates for new pages [Unicode]'), ('page_local_spelling_words', u'LocalSpellingWords', 'Name of the page containing user-provided spellchecker words [Unicode]'), )), # ========================================================================== 'user': ('User Preferences related', None, ( ('quicklinks_default', [], 'List of preset quicklinks for a newly created user accounts. Existing accounts are not affected by this option whereas changes in navi_bar do always affect existing accounts. Preset quicklinks can be removed by the user in the user preferences menu, navi_bar settings not.'), ('subscribed_pages_default', [], "List of pagenames used for presetting page subscriptions for newly created user accounts."), ('email_subscribed_events_default', [ PageChangedEvent.__name__, PageRenamedEvent.__name__, PageDeletedEvent.__name__, PageCopiedEvent.__name__, PageRevertedEvent.__name__, FileAttachedEvent.__name__, ], None), ('jabber_subscribed_events_default', [], None), ('tz_offset', 0.0, "default time zone offset in hours from UTC"), ('userprefs_disabled', [], "Disable the listed user preferences plugins."), )), # ========================================================================== 'various': ('Various', None, ( ('bang_meta', True, 'if True, enable {{{!NoWikiName}}} markup'), ('caching_formats', ['text_html'], "output formats that are cached; set to [] to turn off caching (useful for development)"), ('config_check_enabled', False, "if True, check configuration for unknown settings."), ('default_markup', 'wiki', 'Default page parser / format (name of module in `MoinMoin.parser`)'), ('html_head', '', "Additional <HEAD> tags, see HelpOnThemes."), ('html_head_queries', '<meta name="robots" content="noindex,nofollow">\n', "Additional <HEAD> tags for requests with query strings, like actions."), ('html_head_posts', '<meta name="robots" content="noindex,nofollow">\n', "Additional <HEAD> tags for POST requests."), ('html_head_index', '<meta name="robots" content="index,follow">\n', "Additional <HEAD> tags for some few index pages."), ('html_head_normal', '<meta name="robots" content="index,nofollow">\n', "Additional <HEAD> tags for most normal pages."), ('language_default', 'en', "Default language for user interface and page content, see HelpOnLanguages."), ('language_ignore_browser', False, "if True, ignore user's browser language settings, see HelpOnLanguages."), ('log_remote_addr', True, "if True, log the remote IP address (and maybe hostname)."), ('log_reverse_dns_lookups', False, "if True, do a reverse DNS lookup on page SAVE."), ('log_timing', False, "if True, add timing infos to the log output to analyse load conditions"), ('log_events_format', 1, "0 = no events logging, 1 = standard format (like <= 1.9.7) [default], 2 = extended format"), # some dangerous mimetypes (we don't use "content-disposition: inline" for them when a user # downloads such attachments, because the browser might execute e.g. Javascript contained # in the HTML and steal your moin session cookie or do other nasty stuff) ('mimetypes_xss_protect', [ 'text/html', 'application/x-shockwave-flash', 'application/xhtml+xml', ], '"content-disposition: inline" isn\'t used for them when a user downloads such attachments'), ('mimetypes_embed', [ 'application/x-dvi', 'application/postscript', 'application/pdf', 'application/ogg', 'application/vnd.visio', 'image/x-ms-bmp', 'image/svg+xml', 'image/tiff', 'image/x-photoshop', 'audio/mpeg', 'audio/midi', 'audio/x-wav', 'video/fli', 'video/mpeg', 'video/quicktime', 'video/x-msvideo', 'chemical/x-pdb', 'x-world/x-vrml', ], 'mimetypes that can be embedded by the [[HelpOnMacros/EmbedObject|EmbedObject macro]]'), ('refresh', None, "refresh = (minimum_delay_s, targets_allowed) enables use of `#refresh 5 PageName` processing instruction, targets_allowed must be either `'internal'` or `'external'`"), ('rss_cache', 60, "suggested caching time for Recent''''''Changes RSS, in second"), ('search_results_per_page', 25, "Number of hits shown per page in the search results"), ('siteid', 'default', None), ('xmlrpc_overwrite_user', True, "Overwrite authenticated user at start of xmlrpc code"), )), } # # The 'options' dict carries default MoinMoin options. The dict is a # group name to tuple mapping. # Each group tuple consists of the following items: # group section heading, group help text, option list # # where each 'option list' is a tuple or list of option tuples # # each option tuple consists of # option name, default value, help text # # All the help texts will be displayed by the WikiConfigHelp() macro. # # Unlike the options_no_group_name dict, option names in this dict # are automatically prefixed with "group name '_'" (i.e. the name of # the group they are in and an underscore), e.g. the 'hierarchic' # below creates an option called "acl_hierarchic". # # If you need to add a complex default expression that results in an # object and should not be shown in the __repr__ form in WikiConfigHelp(), # you can use the DefaultExpression class, see 'auth' above for example. # # options = { 'acl': ('Access control lists', 'ACLs control who may do what, see HelpOnAccessControlLists.', ( ('hierarchic', False, 'True to use hierarchical ACLs'), ('rights_default', u"Trusted:read,write,delete,revert Known:read All:read", "ACL used if no ACL is specified on the page"), ('rights_before', u"", "ACL that is processed before the on-page/default ACL"), ('rights_after', u"", "ACL that is processed after the on-page/default ACL"), ('rights_valid', ['read', 'write', 'delete', 'revert', 'admin'], "Valid tokens for right sides of ACL entries."), )), 'xapian': ('Xapian search', "Configuration of the Xapian based indexed search, see HelpOnXapian.", ( ('search', False, "True to enable the fast, indexed search (based on the Xapian search library)"), ('index_dir', None, "Directory where the Xapian search index is stored (None = auto-configure wiki local storage)"), ('stemming', False, "True to enable Xapian word stemmer usage for indexing / searching."), ('index_history', False, "True to enable indexing of non-current page revisions."), )), 'user': ('Users / User settings', None, ( ('email_unique', True, "if True, check email addresses for uniqueness and don't accept duplicates."), ('jid_unique', True, "if True, check Jabber IDs for uniqueness and don't accept duplicates."), ('homewiki', u'Self', "interwiki name of the wiki where the user home pages are located [Unicode] - useful if you have ''many'' users. You could even link to nonwiki \"user pages\" if the wiki username is in the target URL."), ('checkbox_fields', [ ('mailto_author', lambda _: _('Publish my email (not my wiki homepage) in author info')), ('edit_on_doubleclick', lambda _: _('Open editor on double click')), ('remember_last_visit', lambda _: _('After login, jump to last visited page')), ('show_comments', lambda _: _('Show comment sections')), ('show_nonexist_qm', lambda _: _('Show question mark for non-existing pagelinks')), ('show_page_trail', lambda _: _('Show page trail')), ('show_toolbar', lambda _: _('Show icon toolbar')), ('show_topbottom', lambda _: _('Show top/bottom links in headings')), ('show_fancy_diff', lambda _: _('Show fancy diffs')), ('wikiname_add_spaces', lambda _: _('Add spaces to displayed wiki names')), ('remember_me', lambda _: _('Remember login information')), ('disabled', lambda _: _('Disable this account forever')), # if an account is disabled, it may be used for looking up # id -> username for page info and recent changes, but it # is not usable for the user any more: ], "Describes user preferences, see HelpOnConfiguration/UserPreferences."), ('checkbox_defaults', { 'mailto_author': 0, 'edit_on_doubleclick': 1, 'remember_last_visit': 0, 'show_comments': 0, 'show_nonexist_qm': False, 'show_page_trail': 1, 'show_toolbar': 1, 'show_topbottom': 0, 'show_fancy_diff': 1, 'wikiname_add_spaces': 0, 'remember_me': 1, }, "Defaults for user preferences, see HelpOnConfiguration/UserPreferences."), ('checkbox_disable', [], "Disable user preferences, see HelpOnConfiguration/UserPreferences."), ('checkbox_remove', [], "Remove user preferences, see HelpOnConfiguration/UserPreferences."), ('form_fields', [ ('name', _('Name'), "text", "36", _("(Use FirstnameLastname)")), ('aliasname', _('Alias-Name'), "text", "36", ''), ('email', _('Email'), "text", "36", ''), ('jid', _('Jabber ID'), "text", "36", ''), ('css_url', _('User CSS URL'), "text", "40", _('(Leave it empty for disabling user CSS)')), ('edit_rows', _('Editor size'), "text", "3", ''), ], None), ('form_defaults', {# key: default - do NOT remove keys from here! 'name': '', 'aliasname': '', 'password': '', 'password2': '', 'email': '', 'jid': '', 'css_url': '', 'edit_rows': "20", }, None), ('form_disable', [], "list of field names used to disable user preferences form fields"), ('form_remove', [], "list of field names used to remove user preferences form fields"), ('transient_fields', ['id', 'valid', 'may', 'auth_username', 'password', 'password2', 'auth_method', 'auth_attribs', ], "User object attributes that are not persisted to permanent storage (internal use)."), )), 'openidrp': ('OpenID Relying Party', 'These settings control the built-in OpenID Relying Party (client).', ( ('allowed_op', [], "List of forced providers"), )), 'openid_server': ('OpenID Server', 'These settings control the built-in OpenID Identity Provider (server).', ( ('enabled', False, "True to enable the built-in OpenID server."), ('restricted_users_group', None, "If set to a group name, the group members are allowed to use the wiki as an OpenID provider. (None = allow for all users)"), ('enable_user', False, "If True, the OpenIDUser processing instruction is allowed."), )), 'mail': ('Mail settings', 'These settings control outgoing and incoming email from and to the wiki.', ( ('from', None, "Used as From: address for generated mail."), ('login', None, "'username userpass' for SMTP server authentication (None = don't use auth)."), ('smarthost', None, "Address of SMTP server to use for sending mail (None = don't use SMTP server)."), ('sendmail', None, "sendmail command to use for sending mail (None = don't use sendmail)"), ('import_subpage_template', u"$from-$date-$subject", "Create subpages using this template when importing mail."), ('import_pagename_search', ['subject', 'to', ], "Where to look for target pagename specification."), ('import_pagename_envelope', u"%s", "Use this to add some fixed prefix/postfix to the generated target pagename."), ('import_pagename_regex', r'\[\[([^\]]*)\]\]', "Regular expression used to search for target pagename specification."), ('import_wiki_addrs', [], "Target mail addresses to consider when importing mail"), ('notify_page_text', '%(intro)s%(difflink)s\n\n%(comment)s%(diff)s', "Template for putting together the pieces for the page changed/deleted/renamed notification mail text body"), ('notify_page_changed_subject', _('[%(sitename)s] %(trivial)sUpdate of "%(pagename)s" by %(username)s'), "Template for the page changed notification mail subject header"), ('notify_page_changed_intro', _("Dear Wiki user,\n\n" 'You have subscribed to a wiki page or wiki category on "%(sitename)s" for change notification.\n\n' 'The "%(pagename)s" page has been changed by %(editor)s:\n'), "Template for the page changed notification mail intro text"), ('notify_page_deleted_subject', _('[%(sitename)s] %(trivial)sUpdate of "%(pagename)s" by %(username)s'), "Template for the page deleted notification mail subject header"), ('notify_page_deleted_intro', _("Dear wiki user,\n\n" 'You have subscribed to a wiki page "%(sitename)s" for change notification.\n\n' 'The page "%(pagename)s" has been deleted by %(editor)s:\n\n'), "Template for the page deleted notification mail intro text"), ('notify_page_renamed_subject', _('[%(sitename)s] %(trivial)sUpdate of "%(pagename)s" by %(username)s'), "Template for the page renamed notification mail subject header"), ('notify_page_renamed_intro', _("Dear wiki user,\n\n" 'You have subscribed to a wiki page "%(sitename)s" for change notification.\n\n' 'The page "%(pagename)s" has been renamed from "%(oldname)s" by %(editor)s:\n'), "Template for the page renamed notification mail intro text"), ('notify_att_added_subject', _('[%(sitename)s] New attachment added to page %(pagename)s'), "Template for the attachment added notification mail subject header"), ('notify_att_added_intro', _("Dear Wiki user,\n\n" 'You have subscribed to a wiki page "%(page_name)s" for change notification. ' "An attachment has been added to that page by %(editor)s. " "Following detailed information is available:\n\n" "Attachment name: %(attach_name)s\n" "Attachment size: %(attach_size)s\n"), "Template for the attachment added notification mail intro text"), ('notify_att_removed_subject', _('[%(sitename)s] Removed attachment from page %(pagename)s'), "Template for the attachment removed notification mail subject header"), ('notify_att_removed_intro', _("Dear Wiki user,\n\n" 'You have subscribed to a wiki page "%(page_name)s" for change notification. ' "An attachment has been removed from that page by %(editor)s. " "Following detailed information is available:\n\n" "Attachment name: %(attach_name)s\n" "Attachment size: %(attach_size)s\n"), "Template for the attachment removed notification mail intro text"), ('notify_user_created_subject', _("[%(sitename)s] New user account created"), "Template for the user created notification mail subject header"), ('notify_user_created_intro', _('Dear Superuser, a new user has just been created on "%(sitename)s". Details follow:\n\n' ' User name: %(username)s\n' ' Email address: %(useremail)s'), "Template for the user created notification mail intro text"), )), 'backup': ('Backup settings', 'These settings control how the backup action works and who is allowed to use it.', ( ('compression', 'gz', 'What compression to use for the backup ("gz" or "bz2").'), ('users', [], 'List of trusted user names who are allowed to get a backup.'), ('include', [], 'List of pathes to backup.'), ('exclude', lambda self, filename: False, 'Function f(self, filename) that tells whether a file should be excluded from backup. By default, nothing is excluded.'), )), 'rss': ('RSS settings', 'These settings control RSS behaviour.', ( ('items_default', 15, "Default maximum items value for RSS feed. Can be " "changed via items URL query parameter of rss_rc " "action."), ('items_limit', 100, "Limit for item count got via RSS (i. e. user " "can't get more than items_limit items even via " "changing items URL query parameter)."), ('unique', 0, "If set to 1, for each page name only one RSS item would " "be shown. Can be changed via unique rss_rc action URL " "query parameter."), ('diffs', 0, "Add diffs in RSS item descriptions by default. Can be " "changed via diffs URL query parameter of rss_rc action."), ('ddiffs', 0, "If set to 1, links to diff view instead of page itself " "would be generated by default. Can be changed via ddiffs " "URL query parameter of rss_rc action."), ('lines_default', 20, "Default line count limit for diffs added as item " "descriptions for RSS items. Can be changed via " "lines URL query parameter of rss_rc action."), ('lines_limit', 100, "Limit for possible line count for diffs added as " "item descriptions in RSS."), ('show_attachment_entries', 0, "If set to 1, items, related to " "attachment management, would be added to " "RSS feed. Can be changed via show_att " "URL query parameter of rss_rc action."), ('page_filter_pattern', "", "Default page filter pattern for RSS feed. " "Empty pattern matches to any page. Pattern " "beginning with circumflex is interpreted as " "regular expression. Pattern ending with " "slash matches page and all its subpages. " "Otherwise pattern sets specific pagename. " "Can be changed via page URL query parameter " "of rss_rc action."), ('show_page_history_link', True, "Add link to page change history " "RSS feed in theme."), )), 'search_macro': ('Search macro settings', 'Settings related to behaviour of search macros (such as FullSearch, ' 'FullSearchCached, PageList)', ( ('parse_args', False, "Do search macro parameter parsing. In previous " "versions of MoinMoin, whole search macro " "parameter string had been interpreted as needle. " "Now, to provide ability to pass additional " "parameters, this behaviour should be changed."), ('highlight_titles', 1, "Perform title matches highlighting by default " "in search results generated by macro."), ('highlight_pages', 1, "Add highlight parameter to links in search " "results generated by search macros by default."), )), } def _add_options_to_defconfig(opts, addgroup=True): for groupname in opts: group_short, group_doc, group_opts = opts[groupname] for name, default, doc in group_opts: if addgroup: name = groupname + '_' + name if isinstance(default, DefaultExpression): default = default.value setattr(DefaultConfig, name, default) _add_options_to_defconfig(options) _add_options_to_defconfig(options_no_group_name, False) # remove the gettext pseudo function del _
./CrossVul/dataset_final_sorted/CWE-79/py/bad_4208_1
crossvul-python_data_good_41_0
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2016-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """Backend-independent qute://* code. Module attributes: pyeval_output: The output of the last :pyeval command. _HANDLERS: The handlers registered via decorators. """ import html import json import os import time import textwrap import mimetypes import urllib import collections import pkg_resources import sip from PyQt5.QtCore import QUrlQuery, QUrl import qutebrowser from qutebrowser.config import config, configdata, configexc, configdiff from qutebrowser.utils import (version, utils, jinja, log, message, docutils, objreg, urlutils) from qutebrowser.misc import objects pyeval_output = ":pyeval was never called" spawn_output = ":spawn was never called" _HANDLERS = {} class NoHandlerFound(Exception): """Raised when no handler was found for the given URL.""" pass class QuteSchemeOSError(Exception): """Called when there was an OSError inside a handler.""" pass class QuteSchemeError(Exception): """Exception to signal that a handler should return an ErrorReply. Attributes correspond to the arguments in networkreply.ErrorNetworkReply. Attributes: errorstring: Error string to print. error: Numerical error value. """ def __init__(self, errorstring, error): self.errorstring = errorstring self.error = error super().__init__(errorstring) class Redirect(Exception): """Exception to signal a redirect should happen. Attributes: url: The URL to redirect to, as a QUrl. """ def __init__(self, url): super().__init__(url.toDisplayString()) self.url = url class add_handler: # noqa: N801,N806 pylint: disable=invalid-name """Decorator to register a qute://* URL handler. Attributes: _name: The 'foo' part of qute://foo backend: Limit which backends the handler can run with. """ def __init__(self, name, backend=None): self._name = name self._backend = backend self._function = None def __call__(self, function): self._function = function _HANDLERS[self._name] = self.wrapper return function def wrapper(self, *args, **kwargs): """Call the underlying function.""" if self._backend is not None and objects.backend != self._backend: return self.wrong_backend_handler(*args, **kwargs) else: return self._function(*args, **kwargs) def wrong_backend_handler(self, url): """Show an error page about using the invalid backend.""" html = jinja.render('error.html', title="Error while opening qute://url", url=url.toDisplayString(), error='{} is not available with this ' 'backend'.format(url.toDisplayString())) return 'text/html', html def data_for_url(url): """Get the data to show for the given URL. Args: url: The QUrl to show. Return: A (mimetype, data) tuple. """ norm_url = url.adjusted(QUrl.NormalizePathSegments | QUrl.StripTrailingSlash) if norm_url != url: raise Redirect(norm_url) path = url.path() host = url.host() query = urlutils.query_string(url) # A url like "qute:foo" is split as "scheme:path", not "scheme:host". log.misc.debug("url: {}, path: {}, host {}".format( url.toDisplayString(), path, host)) if not path or not host: new_url = QUrl() new_url.setScheme('qute') # When path is absent, e.g. qute://help (with no trailing slash) if host: new_url.setHost(host) # When host is absent, e.g. qute:help else: new_url.setHost(path) new_url.setPath('/') if query: new_url.setQuery(query) if new_url.host(): # path was a valid host raise Redirect(new_url) try: handler = _HANDLERS[host] except KeyError: raise NoHandlerFound(url) try: mimetype, data = handler(url) except OSError as e: # FIXME:qtwebengine how to handle this? raise QuteSchemeOSError(e) except QuteSchemeError: raise assert mimetype is not None, url if mimetype == 'text/html' and isinstance(data, str): # We let handlers return HTML as text data = data.encode('utf-8', errors='xmlcharrefreplace') return mimetype, data @add_handler('bookmarks') def qute_bookmarks(_url): """Handler for qute://bookmarks. Display all quickmarks / bookmarks.""" bookmarks = sorted(objreg.get('bookmark-manager').marks.items(), key=lambda x: x[1]) # Sort by title quickmarks = sorted(objreg.get('quickmark-manager').marks.items(), key=lambda x: x[0]) # Sort by name html = jinja.render('bookmarks.html', title='Bookmarks', bookmarks=bookmarks, quickmarks=quickmarks) return 'text/html', html @add_handler('tabs') def qute_tabs(_url): """Handler for qute://tabs. Display information about all open tabs.""" tabs = collections.defaultdict(list) for win_id, window in objreg.window_registry.items(): if sip.isdeleted(window): continue tabbed_browser = objreg.get('tabbed-browser', scope='window', window=win_id) for tab in tabbed_browser.widgets(): if tab.url() not in [QUrl("qute://tabs/"), QUrl("qute://tabs")]: urlstr = tab.url().toDisplayString() tabs[str(win_id)].append((tab.title(), urlstr)) html = jinja.render('tabs.html', title='Tabs', tab_list_by_window=tabs) return 'text/html', html def history_data(start_time, offset=None): """Return history data. Arguments: start_time: select history starting from this timestamp. offset: number of items to skip """ # history atimes are stored as ints, ensure start_time is not a float start_time = int(start_time) hist = objreg.get('web-history') if offset is not None: entries = hist.entries_before(start_time, limit=1000, offset=offset) else: # end is 24hrs earlier than start end_time = start_time - 24*60*60 entries = hist.entries_between(end_time, start_time) return [{"url": html.escape(e.url), "title": html.escape(e.title) or html.escape(e.url), "time": e.atime} for e in entries] @add_handler('history') def qute_history(url): """Handler for qute://history. Display and serve history.""" if url.path() == '/data': try: offset = QUrlQuery(url).queryItemValue("offset") offset = int(offset) if offset else None except ValueError as e: raise QuteSchemeError("Query parameter offset is invalid", e) # Use start_time in query or current time. try: start_time = QUrlQuery(url).queryItemValue("start_time") start_time = float(start_time) if start_time else time.time() except ValueError as e: raise QuteSchemeError("Query parameter start_time is invalid", e) return 'text/html', json.dumps(history_data(start_time, offset)) else: return 'text/html', jinja.render( 'history.html', title='History', gap_interval=config.val.history_gap_interval ) @add_handler('javascript') def qute_javascript(url): """Handler for qute://javascript. Return content of file given as query parameter. """ path = url.path() if path: path = "javascript" + os.sep.join(path.split('/')) return 'text/html', utils.read_file(path, binary=False) else: raise QuteSchemeError("No file specified", ValueError()) @add_handler('pyeval') def qute_pyeval(_url): """Handler for qute://pyeval.""" html = jinja.render('pre.html', title='pyeval', content=pyeval_output) return 'text/html', html @add_handler('spawn-output') def qute_spawn_output(_url): """Handler for qute://spawn-output.""" html = jinja.render('pre.html', title='spawn output', content=spawn_output) return 'text/html', html @add_handler('version') @add_handler('verizon') def qute_version(_url): """Handler for qute://version.""" html = jinja.render('version.html', title='Version info', version=version.version(), copyright=qutebrowser.__copyright__) return 'text/html', html @add_handler('plainlog') def qute_plainlog(url): """Handler for qute://plainlog. An optional query parameter specifies the minimum log level to print. For example, qute://log?level=warning prints warnings and errors. Level can be one of: vdebug, debug, info, warning, error, critical. """ if log.ram_handler is None: text = "Log output was disabled." else: level = QUrlQuery(url).queryItemValue('level') if not level: level = 'vdebug' text = log.ram_handler.dump_log(html=False, level=level) html = jinja.render('pre.html', title='log', content=text) return 'text/html', html @add_handler('log') def qute_log(url): """Handler for qute://log. An optional query parameter specifies the minimum log level to print. For example, qute://log?level=warning prints warnings and errors. Level can be one of: vdebug, debug, info, warning, error, critical. """ if log.ram_handler is None: html_log = None else: level = QUrlQuery(url).queryItemValue('level') if not level: level = 'vdebug' html_log = log.ram_handler.dump_log(html=True, level=level) html = jinja.render('log.html', title='log', content=html_log) return 'text/html', html @add_handler('gpl') def qute_gpl(_url): """Handler for qute://gpl. Return HTML content as string.""" return 'text/html', utils.read_file('html/license.html') @add_handler('help') def qute_help(url): """Handler for qute://help.""" urlpath = url.path() if not urlpath or urlpath == '/': urlpath = 'index.html' else: urlpath = urlpath.lstrip('/') if not docutils.docs_up_to_date(urlpath): message.error("Your documentation is outdated! Please re-run " "scripts/asciidoc2html.py.") path = 'html/doc/{}'.format(urlpath) if not urlpath.endswith('.html'): try: bdata = utils.read_file(path, binary=True) except OSError as e: raise QuteSchemeOSError(e) mimetype, _encoding = mimetypes.guess_type(urlpath) assert mimetype is not None, url return mimetype, bdata try: data = utils.read_file(path) except OSError: # No .html around, let's see if we find the asciidoc asciidoc_path = path.replace('.html', '.asciidoc') if asciidoc_path.startswith('html/doc/'): asciidoc_path = asciidoc_path.replace('html/doc/', '../doc/help/') try: asciidoc = utils.read_file(asciidoc_path) except OSError: asciidoc = None if asciidoc is None: raise preamble = textwrap.dedent(""" There was an error loading the documentation! This most likely means the documentation was not generated properly. If you are running qutebrowser from the git repository, please (re)run scripts/asciidoc2html.py and reload this page. If you're running a released version this is a bug, please use :report to report it. Falling back to the plaintext version. --------------------------------------------------------------- """) return 'text/plain', (preamble + asciidoc).encode('utf-8') else: return 'text/html', data @add_handler('backend-warning') def qute_backend_warning(_url): """Handler for qute://backend-warning.""" html = jinja.render('backend-warning.html', distribution=version.distribution(), Distribution=version.Distribution, version=pkg_resources.parse_version, title="Legacy backend warning") return 'text/html', html def _qute_settings_set(url): """Handler for qute://settings/set.""" query = QUrlQuery(url) option = query.queryItemValue('option', QUrl.FullyDecoded) value = query.queryItemValue('value', QUrl.FullyDecoded) # https://github.com/qutebrowser/qutebrowser/issues/727 if option == 'content.javascript.enabled' and value == 'false': msg = ("Refusing to disable javascript via qute://settings " "as it needs javascript support.") message.error(msg) return 'text/html', b'error: ' + msg.encode('utf-8') try: config.instance.set_str(option, value, save_yaml=True) return 'text/html', b'ok' except configexc.Error as e: message.error(str(e)) return 'text/html', b'error: ' + str(e).encode('utf-8') @add_handler('settings') def qute_settings(url): """Handler for qute://settings. View/change qute configuration.""" if url.path() == '/set': return _qute_settings_set(url) html = jinja.render('settings.html', title='settings', configdata=configdata, confget=config.instance.get_str) return 'text/html', html @add_handler('bindings') def qute_bindings(_url): """Handler for qute://bindings. View keybindings.""" bindings = {} defaults = config.val.bindings.default modes = set(defaults.keys()).union(config.val.bindings.commands) modes.remove('normal') modes = ['normal'] + sorted(list(modes)) for mode in modes: bindings[mode] = config.key_instance.get_bindings_for(mode) html = jinja.render('bindings.html', title='Bindings', bindings=bindings) return 'text/html', html @add_handler('back') def qute_back(url): """Handler for qute://back. Simple page to free ram / lazy load a site, goes back on focusing the tab. """ html = jinja.render( 'back.html', title='Suspended: ' + urllib.parse.unquote(url.fragment())) return 'text/html', html @add_handler('configdiff') def qute_configdiff(url): """Handler for qute://configdiff.""" if url.path() == '/old': try: return 'text/html', configdiff.get_diff() except OSError as e: error = (b'Failed to read old config: ' + str(e.strerror).encode('utf-8')) return 'text/plain', error else: data = config.instance.dump_userconfig().encode('utf-8') return 'text/plain', data @add_handler('pastebin-version') def qute_pastebin_version(_url): """Handler that pastebins the version string.""" version.pastebin_version() return 'text/plain', b'Paste called.'
./CrossVul/dataset_final_sorted/CWE-79/py/good_41_0
crossvul-python_data_good_4208_0
# -*- coding: iso-8859-1 -*- """ MoinMoin - site-wide configuration defaults (NOT per single wiki!) @copyright: 2005-2006 MoinMoin:ThomasWaldmann @license: GNU GPL, see COPYING for details. """ import re from MoinMoin import version # unicode: set the char types (upper, lower, digits, spaces) from MoinMoin.util.chartypes import * # List of image types browser do support regulary browser_supported_images = ('gif', 'jpg', 'jpeg', 'png', 'bmp', 'ico', 'svg+xml') # Parser to use mimetype text parser_text_mimetype = ('plain', 'csv', 'rst', 'docbook', 'latex', 'tex', 'html', 'css', 'xml', 'python', 'perl', 'php', 'ruby', 'javascript', 'cplusplus', 'java', 'pascal', 'diff', 'gettext', 'xslt', 'creole', ) # When creating files, we use e.g. 0666 & config.umask for the mode: umask = 0770 # list of acceptable password hashing schemes for cfg.password_scheme, # here we only give reasonably good schemes, which is passlib (if we # have passlib) and ssha (if we only have builtin stuff): password_schemes_configurable = ['{PASSLIB}', '{SSHA}', ] # ordered list of supported password hashing schemes, best (passlib) should be # first, best builtin one should be second. this is what we support if we # encounter it in user profiles: password_schemes_supported = password_schemes_configurable + ['{SHA}', '{APR1}', '{MD5}', '{DES}', ] # Default value for the static stuff URL prefix (css, img, js). # Caution: # * do NOT use this directly, it is only the DEFAULT value to be used by # server Config classes and by multiconfig.py for request.cfg. # * must NOT end with '/'! # * some servers expect '/' at beginning and only 1 level deep. url_prefix_static = '/moin_static' + version.release_short # Threads flag - if you write a moin server that use threads, import # config in the server and set this flag to True. use_threads = False # Charset - we support only 'utf-8'. While older encodings might work, # we don't have the resources to test them, and there is no real # benefit for the user. IMPORTANT: use only lowercase 'utf-8'! charset = 'utf-8' # Regex to find lower->upper transitions (word boundaries in WikiNames), used by split_title split_regex = re.compile('([%s])([%s])' % (chars_lower, chars_upper), re.UNICODE) # Invalid characters - invisible characters that should not be in page # names. Prevent user confusion and wiki abuse, e.g u'\u202aFrontPage'. page_invalid_chars_regex = re.compile( ur""" \u0000 | # NULL # Bidi control characters \u202A | # LRE \u202B | # RLE \u202C | # PDF \u202D | # LRM \u202E # RLM """, re.UNICODE | re.VERBOSE ) # used for wikiutil.clean_input clean_input_translation_map = { # these chars will be replaced by blanks ord(u'\t'): u' ', ord(u'\r'): u' ', ord(u'\n'): u' ', } for c in u'\x00\x01\x02\x03\x04\x05\x06\x07\x08\x0b\x0c\x0e\x0f' \ '\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f': # these chars will be removed clean_input_translation_map[ord(c)] = None del c # Other stuff url_schemas = ['http', 'https', 'ftp', 'file', 'mailto', 'nntp', 'news', 'ssh', 'telnet', 'irc', 'ircs', 'xmpp', 'mumble', 'webcal', 'ed2k', 'apt', 'rootz', 'gopher', 'notes', 'rtp', 'rtsp', 'rtcp', 'tel', ] smileys = (r"X-( :D <:( :o :( :) B) :)) ;) /!\ <!> (!) :-? :\ >:> |) " + r":-( :-) B-) :-)) ;-) |-) (./) {OK} {X} {i} {1} {2} {3} {*} {o}").split()
./CrossVul/dataset_final_sorted/CWE-79/py/good_4208_0
crossvul-python_data_bad_5730_0
""" Form Widget classes specific to the Django admin site. """ from __future__ import unicode_literals import copy from django import forms from django.contrib.admin.templatetags.admin_static import static from django.core.urlresolvers import reverse from django.forms.widgets import RadioFieldRenderer from django.forms.util import flatatt from django.utils.html import escape, format_html, format_html_join, smart_urlquote from django.utils.text import Truncator from django.utils.translation import ugettext as _ from django.utils.safestring import mark_safe from django.utils.encoding import force_text from django.utils import six class FilteredSelectMultiple(forms.SelectMultiple): """ A SelectMultiple with a JavaScript filter interface. Note that the resulting JavaScript assumes that the jsi18n catalog has been loaded in the page """ @property def media(self): js = ["core.js", "SelectBox.js", "SelectFilter2.js"] return forms.Media(js=[static("admin/js/%s" % path) for path in js]) def __init__(self, verbose_name, is_stacked, attrs=None, choices=()): self.verbose_name = verbose_name self.is_stacked = is_stacked super(FilteredSelectMultiple, self).__init__(attrs, choices) def render(self, name, value, attrs=None, choices=()): if attrs is None: attrs = {} attrs['class'] = 'selectfilter' if self.is_stacked: attrs['class'] += 'stacked' output = [super(FilteredSelectMultiple, self).render(name, value, attrs, choices)] output.append('<script type="text/javascript">addEvent(window, "load", function(e) {') # TODO: "id_" is hard-coded here. This should instead use the correct # API to determine the ID dynamically. output.append('SelectFilter.init("id_%s", "%s", %s, "%s"); });</script>\n' % (name, self.verbose_name.replace('"', '\\"'), int(self.is_stacked), static('admin/'))) return mark_safe(''.join(output)) class AdminDateWidget(forms.DateInput): @property def media(self): js = ["calendar.js", "admin/DateTimeShortcuts.js"] return forms.Media(js=[static("admin/js/%s" % path) for path in js]) def __init__(self, attrs=None, format=None): final_attrs = {'class': 'vDateField', 'size': '10'} if attrs is not None: final_attrs.update(attrs) super(AdminDateWidget, self).__init__(attrs=final_attrs, format=format) class AdminTimeWidget(forms.TimeInput): @property def media(self): js = ["calendar.js", "admin/DateTimeShortcuts.js"] return forms.Media(js=[static("admin/js/%s" % path) for path in js]) def __init__(self, attrs=None, format=None): final_attrs = {'class': 'vTimeField', 'size': '8'} if attrs is not None: final_attrs.update(attrs) super(AdminTimeWidget, self).__init__(attrs=final_attrs, format=format) class AdminSplitDateTime(forms.SplitDateTimeWidget): """ A SplitDateTime Widget that has some admin-specific styling. """ def __init__(self, attrs=None): widgets = [AdminDateWidget, AdminTimeWidget] # Note that we're calling MultiWidget, not SplitDateTimeWidget, because # we want to define widgets. forms.MultiWidget.__init__(self, widgets, attrs) def format_output(self, rendered_widgets): return format_html('<p class="datetime">{0} {1}<br />{2} {3}</p>', _('Date:'), rendered_widgets[0], _('Time:'), rendered_widgets[1]) class AdminRadioFieldRenderer(RadioFieldRenderer): def render(self): """Outputs a <ul> for this set of radio fields.""" return format_html('<ul{0}>\n{1}\n</ul>', flatatt(self.attrs), format_html_join('\n', '<li>{0}</li>', ((force_text(w),) for w in self))) class AdminRadioSelect(forms.RadioSelect): renderer = AdminRadioFieldRenderer class AdminFileWidget(forms.ClearableFileInput): template_with_initial = ('<p class="file-upload">%s</p>' % forms.ClearableFileInput.template_with_initial) template_with_clear = ('<span class="clearable-file-input">%s</span>' % forms.ClearableFileInput.template_with_clear) def url_params_from_lookup_dict(lookups): """ Converts the type of lookups specified in a ForeignKey limit_choices_to attribute to a dictionary of query parameters """ params = {} if lookups and hasattr(lookups, 'items'): items = [] for k, v in lookups.items(): if callable(v): v = v() if isinstance(v, (tuple, list)): v = ','.join([str(x) for x in v]) elif isinstance(v, bool): # See django.db.fields.BooleanField.get_prep_lookup v = ('0', '1')[v] else: v = six.text_type(v) items.append((k, v)) params.update(dict(items)) return params class ForeignKeyRawIdWidget(forms.TextInput): """ A Widget for displaying ForeignKeys in the "raw_id" interface rather than in a <select> box. """ def __init__(self, rel, admin_site, attrs=None, using=None): self.rel = rel self.admin_site = admin_site self.db = using super(ForeignKeyRawIdWidget, self).__init__(attrs) def render(self, name, value, attrs=None): rel_to = self.rel.to if attrs is None: attrs = {} extra = [] if rel_to in self.admin_site._registry: # The related object is registered with the same AdminSite related_url = reverse('admin:%s_%s_changelist' % (rel_to._meta.app_label, rel_to._meta.model_name), current_app=self.admin_site.name) params = self.url_parameters() if params: url = '?' + '&amp;'.join(['%s=%s' % (k, v) for k, v in params.items()]) else: url = '' if "class" not in attrs: attrs['class'] = 'vForeignKeyRawIdAdminField' # The JavaScript code looks for this hook. # TODO: "lookup_id_" is hard-coded here. This should instead use # the correct API to determine the ID dynamically. extra.append('<a href="%s%s" class="related-lookup" id="lookup_id_%s" onclick="return showRelatedObjectLookupPopup(this);"> ' % (related_url, url, name)) extra.append('<img src="%s" width="16" height="16" alt="%s" /></a>' % (static('admin/img/selector-search.gif'), _('Lookup'))) output = [super(ForeignKeyRawIdWidget, self).render(name, value, attrs)] + extra if value: output.append(self.label_for_value(value)) return mark_safe(''.join(output)) def base_url_parameters(self): return url_params_from_lookup_dict(self.rel.limit_choices_to) def url_parameters(self): from django.contrib.admin.views.main import TO_FIELD_VAR params = self.base_url_parameters() params.update({TO_FIELD_VAR: self.rel.get_related_field().name}) return params def label_for_value(self, value): key = self.rel.get_related_field().name try: obj = self.rel.to._default_manager.using(self.db).get(**{key: value}) return '&nbsp;<strong>%s</strong>' % escape(Truncator(obj).words(14, truncate='...')) except (ValueError, self.rel.to.DoesNotExist): return '' class ManyToManyRawIdWidget(ForeignKeyRawIdWidget): """ A Widget for displaying ManyToMany ids in the "raw_id" interface rather than in a <select multiple> box. """ def render(self, name, value, attrs=None): if attrs is None: attrs = {} if self.rel.to in self.admin_site._registry: # The related object is registered with the same AdminSite attrs['class'] = 'vManyToManyRawIdAdminField' if value: value = ','.join([force_text(v) for v in value]) else: value = '' return super(ManyToManyRawIdWidget, self).render(name, value, attrs) def url_parameters(self): return self.base_url_parameters() def label_for_value(self, value): return '' def value_from_datadict(self, data, files, name): value = data.get(name) if value: return value.split(',') class RelatedFieldWidgetWrapper(forms.Widget): """ This class is a wrapper to a given widget to add the add icon for the admin interface. """ def __init__(self, widget, rel, admin_site, can_add_related=None): self.is_hidden = widget.is_hidden self.needs_multipart_form = widget.needs_multipart_form self.attrs = widget.attrs self.choices = widget.choices self.widget = widget self.rel = rel # Backwards compatible check for whether a user can add related # objects. if can_add_related is None: can_add_related = rel.to in admin_site._registry self.can_add_related = can_add_related # so we can check if the related object is registered with this AdminSite self.admin_site = admin_site def __deepcopy__(self, memo): obj = copy.copy(self) obj.widget = copy.deepcopy(self.widget, memo) obj.attrs = self.widget.attrs memo[id(self)] = obj return obj @property def media(self): return self.widget.media def render(self, name, value, *args, **kwargs): rel_to = self.rel.to info = (rel_to._meta.app_label, rel_to._meta.model_name) self.widget.choices = self.choices output = [self.widget.render(name, value, *args, **kwargs)] if self.can_add_related: related_url = reverse('admin:%s_%s_add' % info, current_app=self.admin_site.name) # TODO: "add_id_" is hard-coded here. This should instead use the # correct API to determine the ID dynamically. output.append('<a href="%s" class="add-another" id="add_id_%s" onclick="return showAddAnotherPopup(this);"> ' % (related_url, name)) output.append('<img src="%s" width="10" height="10" alt="%s"/></a>' % (static('admin/img/icon_addlink.gif'), _('Add Another'))) return mark_safe(''.join(output)) def build_attrs(self, extra_attrs=None, **kwargs): "Helper function for building an attribute dictionary." self.attrs = self.widget.build_attrs(extra_attrs=None, **kwargs) return self.attrs def value_from_datadict(self, data, files, name): return self.widget.value_from_datadict(data, files, name) def id_for_label(self, id_): return self.widget.id_for_label(id_) class AdminTextareaWidget(forms.Textarea): def __init__(self, attrs=None): final_attrs = {'class': 'vLargeTextField'} if attrs is not None: final_attrs.update(attrs) super(AdminTextareaWidget, self).__init__(attrs=final_attrs) class AdminTextInputWidget(forms.TextInput): def __init__(self, attrs=None): final_attrs = {'class': 'vTextField'} if attrs is not None: final_attrs.update(attrs) super(AdminTextInputWidget, self).__init__(attrs=final_attrs) class AdminEmailInputWidget(forms.EmailInput): def __init__(self, attrs=None): final_attrs = {'class': 'vTextField'} if attrs is not None: final_attrs.update(attrs) super(AdminEmailInputWidget, self).__init__(attrs=final_attrs) class AdminURLFieldWidget(forms.URLInput): def __init__(self, attrs=None): final_attrs = {'class': 'vURLField'} if attrs is not None: final_attrs.update(attrs) super(AdminURLFieldWidget, self).__init__(attrs=final_attrs) def render(self, name, value, attrs=None): html = super(AdminURLFieldWidget, self).render(name, value, attrs) if value: value = force_text(self._format_value(value)) final_attrs = {'href': mark_safe(smart_urlquote(value))} html = format_html( '<p class="url">{0} <a {1}>{2}</a><br />{3} {4}</p>', _('Currently:'), flatatt(final_attrs), value, _('Change:'), html ) return html class AdminIntegerFieldWidget(forms.TextInput): class_name = 'vIntegerField' def __init__(self, attrs=None): final_attrs = {'class': self.class_name} if attrs is not None: final_attrs.update(attrs) super(AdminIntegerFieldWidget, self).__init__(attrs=final_attrs) class AdminBigIntegerFieldWidget(AdminIntegerFieldWidget): class_name = 'vBigIntegerField' class AdminCommaSeparatedIntegerFieldWidget(forms.TextInput): def __init__(self, attrs=None): final_attrs = {'class': 'vCommaSeparatedIntegerField'} if attrs is not None: final_attrs.update(attrs) super(AdminCommaSeparatedIntegerFieldWidget, self).__init__(attrs=final_attrs)
./CrossVul/dataset_final_sorted/CWE-79/py/bad_5730_0
crossvul-python_data_bad_1645_0
"""Base Tornado handlers for the notebook. Authors: * Brian Granger """ #----------------------------------------------------------------------------- # Copyright (C) 2011 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- import functools import json import logging import os import re import sys import traceback try: # py3 from http.client import responses except ImportError: from httplib import responses from jinja2 import TemplateNotFound from tornado import web try: from tornado.log import app_log except ImportError: app_log = logging.getLogger() from IPython.config import Application from IPython.utils.path import filefind from IPython.utils.py3compat import string_types from IPython.html.utils import is_hidden #----------------------------------------------------------------------------- # Top-level handlers #----------------------------------------------------------------------------- non_alphanum = re.compile(r'[^A-Za-z0-9]') class AuthenticatedHandler(web.RequestHandler): """A RequestHandler with an authenticated user.""" def set_default_headers(self): headers = self.settings.get('headers', {}) if "X-Frame-Options" not in headers: headers["X-Frame-Options"] = "SAMEORIGIN" for header_name,value in headers.items() : try: self.set_header(header_name, value) except Exception: # tornado raise Exception (not a subclass) # if method is unsupported (websocket and Access-Control-Allow-Origin # for example, so just ignore) pass def clear_login_cookie(self): self.clear_cookie(self.cookie_name) def get_current_user(self): user_id = self.get_secure_cookie(self.cookie_name) # For now the user_id should not return empty, but it could eventually if user_id == '': user_id = 'anonymous' if user_id is None: # prevent extra Invalid cookie sig warnings: self.clear_login_cookie() if not self.login_available: user_id = 'anonymous' return user_id @property def cookie_name(self): default_cookie_name = non_alphanum.sub('-', 'username-{}'.format( self.request.host )) return self.settings.get('cookie_name', default_cookie_name) @property def password(self): """our password""" return self.settings.get('password', '') @property def logged_in(self): """Is a user currently logged in? """ user = self.get_current_user() return (user and not user == 'anonymous') @property def login_available(self): """May a user proceed to log in? This returns True if login capability is available, irrespective of whether the user is already logged in or not. """ return bool(self.settings.get('password', '')) class IPythonHandler(AuthenticatedHandler): """IPython-specific extensions to authenticated handling Mostly property shortcuts to IPython-specific settings. """ @property def config(self): return self.settings.get('config', None) @property def log(self): """use the IPython log by default, falling back on tornado's logger""" if Application.initialized(): return Application.instance().log else: return app_log #--------------------------------------------------------------- # URLs #--------------------------------------------------------------- @property def mathjax_url(self): return self.settings.get('mathjax_url', '') @property def base_url(self): return self.settings.get('base_url', '/') #--------------------------------------------------------------- # Manager objects #--------------------------------------------------------------- @property def kernel_manager(self): return self.settings['kernel_manager'] @property def notebook_manager(self): return self.settings['notebook_manager'] @property def cluster_manager(self): return self.settings['cluster_manager'] @property def session_manager(self): return self.settings['session_manager'] @property def project_dir(self): return self.notebook_manager.notebook_dir #--------------------------------------------------------------- # CORS #--------------------------------------------------------------- @property def allow_origin(self): """Normal Access-Control-Allow-Origin""" return self.settings.get('allow_origin', '') @property def allow_origin_pat(self): """Regular expression version of allow_origin""" return self.settings.get('allow_origin_pat', None) @property def allow_credentials(self): """Whether to set Access-Control-Allow-Credentials""" return self.settings.get('allow_credentials', False) def set_default_headers(self): """Add CORS headers, if defined""" super(IPythonHandler, self).set_default_headers() if self.allow_origin: self.set_header("Access-Control-Allow-Origin", self.allow_origin) elif self.allow_origin_pat: origin = self.get_origin() if origin and self.allow_origin_pat.match(origin): self.set_header("Access-Control-Allow-Origin", origin) if self.allow_credentials: self.set_header("Access-Control-Allow-Credentials", 'true') def get_origin(self): # Handle WebSocket Origin naming convention differences # The difference between version 8 and 13 is that in 8 the # client sends a "Sec-Websocket-Origin" header and in 13 it's # simply "Origin". if "Origin" in self.request.headers: origin = self.request.headers.get("Origin") else: origin = self.request.headers.get("Sec-Websocket-Origin", None) return origin #--------------------------------------------------------------- # template rendering #--------------------------------------------------------------- def get_template(self, name): """Return the jinja template object for a given name""" return self.settings['jinja2_env'].get_template(name) def render_template(self, name, **ns): ns.update(self.template_namespace) template = self.get_template(name) return template.render(**ns) @property def template_namespace(self): return dict( base_url=self.base_url, logged_in=self.logged_in, login_available=self.login_available, static_url=self.static_url, ) def get_json_body(self): """Return the body of the request as JSON data.""" if not self.request.body: return None # Do we need to call body.decode('utf-8') here? body = self.request.body.strip().decode(u'utf-8') try: model = json.loads(body) except Exception: self.log.debug("Bad JSON: %r", body) self.log.error("Couldn't parse JSON", exc_info=True) raise web.HTTPError(400, u'Invalid JSON in body of request') return model def write_error(self, status_code, **kwargs): """render custom error pages""" exc_info = kwargs.get('exc_info') message = '' status_message = responses.get(status_code, 'Unknown HTTP Error') if exc_info: exception = exc_info[1] # get the custom message, if defined try: message = exception.log_message % exception.args except Exception: pass # construct the custom reason, if defined reason = getattr(exception, 'reason', '') if reason: status_message = reason # build template namespace ns = dict( status_code=status_code, status_message=status_message, message=message, exception=exception, ) self.set_header('Content-Type', 'text/html') # render the template try: html = self.render_template('%s.html' % status_code, **ns) except TemplateNotFound: self.log.debug("No template for %d", status_code) html = self.render_template('error.html', **ns) self.write(html) class Template404(IPythonHandler): """Render our 404 template""" def prepare(self): raise web.HTTPError(404) class AuthenticatedFileHandler(IPythonHandler, web.StaticFileHandler): """static files should only be accessible when logged in""" @web.authenticated def get(self, path): if os.path.splitext(path)[1] == '.ipynb': name = os.path.basename(path) self.set_header('Content-Type', 'application/json') self.set_header('Content-Disposition','attachment; filename="%s"' % name) return web.StaticFileHandler.get(self, path) def compute_etag(self): return None def validate_absolute_path(self, root, absolute_path): """Validate and return the absolute path. Requires tornado 3.1 Adding to tornado's own handling, forbids the serving of hidden files. """ abs_path = super(AuthenticatedFileHandler, self).validate_absolute_path(root, absolute_path) abs_root = os.path.abspath(root) if is_hidden(abs_path, abs_root): self.log.info("Refusing to serve hidden file, via 404 Error") raise web.HTTPError(404) return abs_path def json_errors(method): """Decorate methods with this to return GitHub style JSON errors. This should be used on any JSON API on any handler method that can raise HTTPErrors. This will grab the latest HTTPError exception using sys.exc_info and then: 1. Set the HTTP status code based on the HTTPError 2. Create and return a JSON body with a message field describing the error in a human readable form. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): try: result = method(self, *args, **kwargs) except web.HTTPError as e: status = e.status_code message = e.log_message self.log.warn(message) self.set_status(e.status_code) self.finish(json.dumps(dict(message=message))) except Exception: self.log.error("Unhandled error in API request", exc_info=True) status = 500 message = "Unknown server error" t, value, tb = sys.exc_info() self.set_status(status) tb_text = ''.join(traceback.format_exception(t, value, tb)) reply = dict(message=message, traceback=tb_text) self.finish(json.dumps(reply)) else: return result return wrapper #----------------------------------------------------------------------------- # File handler #----------------------------------------------------------------------------- # to minimize subclass changes: HTTPError = web.HTTPError class FileFindHandler(web.StaticFileHandler): """subclass of StaticFileHandler for serving files from a search path""" # cache search results, don't search for files more than once _static_paths = {} def initialize(self, path, default_filename=None): if isinstance(path, string_types): path = [path] self.root = tuple( os.path.abspath(os.path.expanduser(p)) + os.sep for p in path ) self.default_filename = default_filename def compute_etag(self): return None @classmethod def get_absolute_path(cls, roots, path): """locate a file to serve on our static file search path""" with cls._lock: if path in cls._static_paths: return cls._static_paths[path] try: abspath = os.path.abspath(filefind(path, roots)) except IOError: # IOError means not found return '' cls._static_paths[path] = abspath return abspath def validate_absolute_path(self, root, absolute_path): """check if the file should be served (raises 404, 403, etc.)""" if absolute_path == '': raise web.HTTPError(404) for root in self.root: if (absolute_path + os.sep).startswith(root): break return super(FileFindHandler, self).validate_absolute_path(root, absolute_path) class TrailingSlashHandler(web.RequestHandler): """Simple redirect handler that strips trailing slashes This should be the first, highest priority handler. """ SUPPORTED_METHODS = ['GET'] def get(self): self.redirect(self.request.uri.rstrip('/')) #----------------------------------------------------------------------------- # URL pattern fragments for re-use #----------------------------------------------------------------------------- path_regex = r"(?P<path>(?:/.*)*)" notebook_name_regex = r"(?P<name>[^/]+\.ipynb)" notebook_path_regex = "%s/%s" % (path_regex, notebook_name_regex) #----------------------------------------------------------------------------- # URL to handler mappings #----------------------------------------------------------------------------- default_handlers = [ (r".*/", TrailingSlashHandler) ]
./CrossVul/dataset_final_sorted/CWE-79/py/bad_1645_0
crossvul-python_data_bad_3890_2
404: Not Found
./CrossVul/dataset_final_sorted/CWE-79/py/bad_3890_2
crossvul-python_data_good_5729_0
""" Form Widget classes specific to the Django admin site. """ from __future__ import unicode_literals import copy from django import forms from django.contrib.admin.templatetags.admin_static import static from django.core.urlresolvers import reverse from django.forms.widgets import RadioFieldRenderer from django.forms.util import flatatt from django.utils.html import escape, format_html, format_html_join, smart_urlquote from django.utils.text import Truncator from django.utils.translation import ugettext as _ from django.utils.safestring import mark_safe from django.utils.encoding import force_text from django.utils import six class FilteredSelectMultiple(forms.SelectMultiple): """ A SelectMultiple with a JavaScript filter interface. Note that the resulting JavaScript assumes that the jsi18n catalog has been loaded in the page """ @property def media(self): js = ["core.js", "SelectBox.js", "SelectFilter2.js"] return forms.Media(js=[static("admin/js/%s" % path) for path in js]) def __init__(self, verbose_name, is_stacked, attrs=None, choices=()): self.verbose_name = verbose_name self.is_stacked = is_stacked super(FilteredSelectMultiple, self).__init__(attrs, choices) def render(self, name, value, attrs=None, choices=()): if attrs is None: attrs = {} attrs['class'] = 'selectfilter' if self.is_stacked: attrs['class'] += 'stacked' output = [super(FilteredSelectMultiple, self).render(name, value, attrs, choices)] output.append('<script type="text/javascript">addEvent(window, "load", function(e) {') # TODO: "id_" is hard-coded here. This should instead use the correct # API to determine the ID dynamically. output.append('SelectFilter.init("id_%s", "%s", %s, "%s"); });</script>\n' % (name, self.verbose_name.replace('"', '\\"'), int(self.is_stacked), static('admin/'))) return mark_safe(''.join(output)) class AdminDateWidget(forms.DateInput): @property def media(self): js = ["calendar.js", "admin/DateTimeShortcuts.js"] return forms.Media(js=[static("admin/js/%s" % path) for path in js]) def __init__(self, attrs=None, format=None): final_attrs = {'class': 'vDateField', 'size': '10'} if attrs is not None: final_attrs.update(attrs) super(AdminDateWidget, self).__init__(attrs=final_attrs, format=format) class AdminTimeWidget(forms.TimeInput): @property def media(self): js = ["calendar.js", "admin/DateTimeShortcuts.js"] return forms.Media(js=[static("admin/js/%s" % path) for path in js]) def __init__(self, attrs=None, format=None): final_attrs = {'class': 'vTimeField', 'size': '8'} if attrs is not None: final_attrs.update(attrs) super(AdminTimeWidget, self).__init__(attrs=final_attrs, format=format) class AdminSplitDateTime(forms.SplitDateTimeWidget): """ A SplitDateTime Widget that has some admin-specific styling. """ def __init__(self, attrs=None): widgets = [AdminDateWidget, AdminTimeWidget] # Note that we're calling MultiWidget, not SplitDateTimeWidget, because # we want to define widgets. forms.MultiWidget.__init__(self, widgets, attrs) def format_output(self, rendered_widgets): return format_html('<p class="datetime">{0} {1}<br />{2} {3}</p>', _('Date:'), rendered_widgets[0], _('Time:'), rendered_widgets[1]) class AdminRadioFieldRenderer(RadioFieldRenderer): def render(self): """Outputs a <ul> for this set of radio fields.""" return format_html('<ul{0}>\n{1}\n</ul>', flatatt(self.attrs), format_html_join('\n', '<li>{0}</li>', ((force_text(w),) for w in self))) class AdminRadioSelect(forms.RadioSelect): renderer = AdminRadioFieldRenderer class AdminFileWidget(forms.ClearableFileInput): template_with_initial = ('<p class="file-upload">%s</p>' % forms.ClearableFileInput.template_with_initial) template_with_clear = ('<span class="clearable-file-input">%s</span>' % forms.ClearableFileInput.template_with_clear) def url_params_from_lookup_dict(lookups): """ Converts the type of lookups specified in a ForeignKey limit_choices_to attribute to a dictionary of query parameters """ params = {} if lookups and hasattr(lookups, 'items'): items = [] for k, v in lookups.items(): if isinstance(v, (tuple, list)): v = ','.join([str(x) for x in v]) elif isinstance(v, bool): # See django.db.fields.BooleanField.get_prep_lookup v = ('0', '1')[v] else: v = six.text_type(v) items.append((k, v)) params.update(dict(items)) return params class ForeignKeyRawIdWidget(forms.TextInput): """ A Widget for displaying ForeignKeys in the "raw_id" interface rather than in a <select> box. """ def __init__(self, rel, admin_site, attrs=None, using=None): self.rel = rel self.admin_site = admin_site self.db = using super(ForeignKeyRawIdWidget, self).__init__(attrs) def render(self, name, value, attrs=None): rel_to = self.rel.to if attrs is None: attrs = {} extra = [] if rel_to in self.admin_site._registry: # The related object is registered with the same AdminSite related_url = reverse('admin:%s_%s_changelist' % (rel_to._meta.app_label, rel_to._meta.module_name), current_app=self.admin_site.name) params = self.url_parameters() if params: url = '?' + '&amp;'.join(['%s=%s' % (k, v) for k, v in params.items()]) else: url = '' if "class" not in attrs: attrs['class'] = 'vForeignKeyRawIdAdminField' # The JavaScript code looks for this hook. # TODO: "lookup_id_" is hard-coded here. This should instead use # the correct API to determine the ID dynamically. extra.append('<a href="%s%s" class="related-lookup" id="lookup_id_%s" onclick="return showRelatedObjectLookupPopup(this);"> ' % (related_url, url, name)) extra.append('<img src="%s" width="16" height="16" alt="%s" /></a>' % (static('admin/img/selector-search.gif'), _('Lookup'))) output = [super(ForeignKeyRawIdWidget, self).render(name, value, attrs)] + extra if value: output.append(self.label_for_value(value)) return mark_safe(''.join(output)) def base_url_parameters(self): return url_params_from_lookup_dict(self.rel.limit_choices_to) def url_parameters(self): from django.contrib.admin.views.main import TO_FIELD_VAR params = self.base_url_parameters() params.update({TO_FIELD_VAR: self.rel.get_related_field().name}) return params def label_for_value(self, value): key = self.rel.get_related_field().name try: obj = self.rel.to._default_manager.using(self.db).get(**{key: value}) return '&nbsp;<strong>%s</strong>' % escape(Truncator(obj).words(14, truncate='...')) except (ValueError, self.rel.to.DoesNotExist): return '' class ManyToManyRawIdWidget(ForeignKeyRawIdWidget): """ A Widget for displaying ManyToMany ids in the "raw_id" interface rather than in a <select multiple> box. """ def render(self, name, value, attrs=None): if attrs is None: attrs = {} if self.rel.to in self.admin_site._registry: # The related object is registered with the same AdminSite attrs['class'] = 'vManyToManyRawIdAdminField' if value: value = ','.join([force_text(v) for v in value]) else: value = '' return super(ManyToManyRawIdWidget, self).render(name, value, attrs) def url_parameters(self): return self.base_url_parameters() def label_for_value(self, value): return '' def value_from_datadict(self, data, files, name): value = data.get(name) if value: return value.split(',') def _has_changed(self, initial, data): if initial is None: initial = [] if data is None: data = [] if len(initial) != len(data): return True for pk1, pk2 in zip(initial, data): if force_text(pk1) != force_text(pk2): return True return False class RelatedFieldWidgetWrapper(forms.Widget): """ This class is a wrapper to a given widget to add the add icon for the admin interface. """ def __init__(self, widget, rel, admin_site, can_add_related=None): self.is_hidden = widget.is_hidden self.needs_multipart_form = widget.needs_multipart_form self.attrs = widget.attrs self.choices = widget.choices self.widget = widget self.rel = rel # Backwards compatible check for whether a user can add related # objects. if can_add_related is None: can_add_related = rel.to in admin_site._registry self.can_add_related = can_add_related # so we can check if the related object is registered with this AdminSite self.admin_site = admin_site def __deepcopy__(self, memo): obj = copy.copy(self) obj.widget = copy.deepcopy(self.widget, memo) obj.attrs = self.widget.attrs memo[id(self)] = obj return obj @property def media(self): return self.widget.media def render(self, name, value, *args, **kwargs): rel_to = self.rel.to info = (rel_to._meta.app_label, rel_to._meta.object_name.lower()) self.widget.choices = self.choices output = [self.widget.render(name, value, *args, **kwargs)] if self.can_add_related: related_url = reverse('admin:%s_%s_add' % info, current_app=self.admin_site.name) # TODO: "add_id_" is hard-coded here. This should instead use the # correct API to determine the ID dynamically. output.append('<a href="%s" class="add-another" id="add_id_%s" onclick="return showAddAnotherPopup(this);"> ' % (related_url, name)) output.append('<img src="%s" width="10" height="10" alt="%s"/></a>' % (static('admin/img/icon_addlink.gif'), _('Add Another'))) return mark_safe(''.join(output)) def build_attrs(self, extra_attrs=None, **kwargs): "Helper function for building an attribute dictionary." self.attrs = self.widget.build_attrs(extra_attrs=None, **kwargs) return self.attrs def value_from_datadict(self, data, files, name): return self.widget.value_from_datadict(data, files, name) def _has_changed(self, initial, data): return self.widget._has_changed(initial, data) def id_for_label(self, id_): return self.widget.id_for_label(id_) class AdminTextareaWidget(forms.Textarea): def __init__(self, attrs=None): final_attrs = {'class': 'vLargeTextField'} if attrs is not None: final_attrs.update(attrs) super(AdminTextareaWidget, self).__init__(attrs=final_attrs) class AdminTextInputWidget(forms.TextInput): def __init__(self, attrs=None): final_attrs = {'class': 'vTextField'} if attrs is not None: final_attrs.update(attrs) super(AdminTextInputWidget, self).__init__(attrs=final_attrs) class AdminURLFieldWidget(forms.TextInput): def __init__(self, attrs=None): final_attrs = {'class': 'vURLField'} if attrs is not None: final_attrs.update(attrs) super(AdminURLFieldWidget, self).__init__(attrs=final_attrs) def render(self, name, value, attrs=None): html = super(AdminURLFieldWidget, self).render(name, value, attrs) if value: value = force_text(self._format_value(value)) final_attrs = {'href': smart_urlquote(value)} html = format_html( '<p class="url">{0} <a{1}>{2}</a><br />{3} {4}</p>', _('Currently:'), flatatt(final_attrs), value, _('Change:'), html ) return html class AdminIntegerFieldWidget(forms.TextInput): class_name = 'vIntegerField' def __init__(self, attrs=None): final_attrs = {'class': self.class_name} if attrs is not None: final_attrs.update(attrs) super(AdminIntegerFieldWidget, self).__init__(attrs=final_attrs) class AdminBigIntegerFieldWidget(AdminIntegerFieldWidget): class_name = 'vBigIntegerField' class AdminCommaSeparatedIntegerFieldWidget(forms.TextInput): def __init__(self, attrs=None): final_attrs = {'class': 'vCommaSeparatedIntegerField'} if attrs is not None: final_attrs.update(attrs) super(AdminCommaSeparatedIntegerFieldWidget, self).__init__(attrs=final_attrs)
./CrossVul/dataset_final_sorted/CWE-79/py/good_5729_0
crossvul-python_data_bad_4208_0
# -*- coding: iso-8859-1 -*- """ MoinMoin - site-wide configuration defaults (NOT per single wiki!) @copyright: 2005-2006 MoinMoin:ThomasWaldmann @license: GNU GPL, see COPYING for details. """ import re from MoinMoin import version # unicode: set the char types (upper, lower, digits, spaces) from MoinMoin.util.chartypes import * # List of image types browser do support regulary browser_supported_images = ('gif', 'jpg', 'jpeg', 'png', 'bmp', 'ico', ) # Parser to use mimetype text parser_text_mimetype = ('plain', 'csv', 'rst', 'docbook', 'latex', 'tex', 'html', 'css', 'xml', 'python', 'perl', 'php', 'ruby', 'javascript', 'cplusplus', 'java', 'pascal', 'diff', 'gettext', 'xslt', 'creole', ) # When creating files, we use e.g. 0666 & config.umask for the mode: umask = 0770 # list of acceptable password hashing schemes for cfg.password_scheme, # here we only give reasonably good schemes, which is passlib (if we # have passlib) and ssha (if we only have builtin stuff): password_schemes_configurable = ['{PASSLIB}', '{SSHA}', ] # ordered list of supported password hashing schemes, best (passlib) should be # first, best builtin one should be second. this is what we support if we # encounter it in user profiles: password_schemes_supported = password_schemes_configurable + ['{SHA}', '{APR1}', '{MD5}', '{DES}', ] # Default value for the static stuff URL prefix (css, img, js). # Caution: # * do NOT use this directly, it is only the DEFAULT value to be used by # server Config classes and by multiconfig.py for request.cfg. # * must NOT end with '/'! # * some servers expect '/' at beginning and only 1 level deep. url_prefix_static = '/moin_static' + version.release_short # Threads flag - if you write a moin server that use threads, import # config in the server and set this flag to True. use_threads = False # Charset - we support only 'utf-8'. While older encodings might work, # we don't have the resources to test them, and there is no real # benefit for the user. IMPORTANT: use only lowercase 'utf-8'! charset = 'utf-8' # Regex to find lower->upper transitions (word boundaries in WikiNames), used by split_title split_regex = re.compile('([%s])([%s])' % (chars_lower, chars_upper), re.UNICODE) # Invalid characters - invisible characters that should not be in page # names. Prevent user confusion and wiki abuse, e.g u'\u202aFrontPage'. page_invalid_chars_regex = re.compile( ur""" \u0000 | # NULL # Bidi control characters \u202A | # LRE \u202B | # RLE \u202C | # PDF \u202D | # LRM \u202E # RLM """, re.UNICODE | re.VERBOSE ) # used for wikiutil.clean_input clean_input_translation_map = { # these chars will be replaced by blanks ord(u'\t'): u' ', ord(u'\r'): u' ', ord(u'\n'): u' ', } for c in u'\x00\x01\x02\x03\x04\x05\x06\x07\x08\x0b\x0c\x0e\x0f' \ '\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f': # these chars will be removed clean_input_translation_map[ord(c)] = None del c # Other stuff url_schemas = ['http', 'https', 'ftp', 'file', 'mailto', 'nntp', 'news', 'ssh', 'telnet', 'irc', 'ircs', 'xmpp', 'mumble', 'webcal', 'ed2k', 'apt', 'rootz', 'gopher', 'notes', 'rtp', 'rtsp', 'rtcp', 'tel', ] smileys = (r"X-( :D <:( :o :( :) B) :)) ;) /!\ <!> (!) :-? :\ >:> |) " + r":-( :-) B-) :-)) ;-) |-) (./) {OK} {X} {i} {1} {2} {3} {*} {o}").split()
./CrossVul/dataset_final_sorted/CWE-79/py/bad_4208_0
crossvul-python_data_bad_1644_1
"""Tornado handlers for cluster web service.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import json from tornado import web from ...base.handlers import IPythonHandler #----------------------------------------------------------------------------- # Cluster handlers #----------------------------------------------------------------------------- class MainClusterHandler(IPythonHandler): @web.authenticated def get(self): self.finish(json.dumps(self.cluster_manager.list_profiles())) class ClusterProfileHandler(IPythonHandler): @web.authenticated def get(self, profile): self.finish(json.dumps(self.cluster_manager.profile_info(profile))) class ClusterActionHandler(IPythonHandler): @web.authenticated def post(self, profile, action): cm = self.cluster_manager if action == 'start': n = self.get_argument('n', default=None) if not n: data = cm.start_cluster(profile) else: data = cm.start_cluster(profile, int(n)) if action == 'stop': data = cm.stop_cluster(profile) self.finish(json.dumps(data)) #----------------------------------------------------------------------------- # URL to handler mappings #----------------------------------------------------------------------------- _cluster_action_regex = r"(?P<action>start|stop)" _profile_regex = r"(?P<profile>[^\/]+)" # there is almost no text that is invalid default_handlers = [ (r"/clusters", MainClusterHandler), (r"/clusters/%s/%s" % (_profile_regex, _cluster_action_regex), ClusterActionHandler), (r"/clusters/%s" % _profile_regex, ClusterProfileHandler), ]
./CrossVul/dataset_final_sorted/CWE-79/py/bad_1644_1
crossvul-python_data_bad_5191_3
# -*- coding: utf-8 -*- from __future__ import unicode_literals import os import tempfile from django import forms from django.conf.urls import url from django.contrib import admin from django.contrib.admin import BooleanFieldListFilter from django.contrib.admin.views.main import ChangeList from django.contrib.auth.admin import GroupAdmin, UserAdmin # Register core models we need in our tests from django.contrib.auth.models import Group, User from django.core.exceptions import ValidationError from django.core.files.storage import FileSystemStorage from django.core.mail import EmailMessage from django.core.servers.basehttp import FileWrapper from django.forms.models import BaseModelFormSet from django.http import HttpResponse, StreamingHttpResponse from django.utils.safestring import mark_safe from django.utils.six import StringIO from .models import ( Actor, AdminOrderedAdminMethod, AdminOrderedCallable, AdminOrderedField, AdminOrderedModelMethod, Album, Answer, Article, BarAccount, Book, Category, Chapter, ChapterXtra1, Child, ChildOfReferer, Choice, City, Collector, Color, Color2, ComplexSortedPerson, CoverLetter, CustomArticle, CyclicOne, CyclicTwo, DependentChild, DooHickey, EmptyModel, EmptyModelHidden, EmptyModelMixin, EmptyModelVisible, ExplicitlyProvidedPK, ExternalSubscriber, Fabric, FancyDoodad, FieldOverridePost, FilteredManager, FooAccount, FoodDelivery, FunkyTag, Gadget, Gallery, GenRelReference, Grommet, ImplicitlyGeneratedPK, Ingredient, InlineReference, InlineReferer, Inquisition, Language, Link, MainPrepopulated, ModelWithStringPrimaryKey, NotReferenced, OldSubscriber, OtherStory, Paper, Parent, ParentWithDependentChildren, ParentWithUUIDPK, Person, Persona, Picture, Pizza, Plot, PlotDetails, PlotProxy, PluggableSearchPerson, Podcast, Post, PrePopulatedPost, PrePopulatedPostLargeSlug, PrePopulatedSubPost, Promo, Question, Recipe, Recommendation, Recommender, ReferencedByGenRel, ReferencedByInline, ReferencedByParent, RelatedPrepopulated, RelatedWithUUIDPKModel, Report, Reservation, Restaurant, RowLevelChangePermissionModel, Section, ShortMessage, Simple, Sketch, State, Story, StumpJoke, Subscriber, SuperVillain, Telegram, Thing, Topping, UnchangeableObject, UndeletableObject, UnorderedObject, UserMessenger, Villain, Vodcast, Whatsit, Widget, Worker, WorkHour, ) def callable_year(dt_value): try: return dt_value.year except AttributeError: return None callable_year.admin_order_field = 'date' class ArticleInline(admin.TabularInline): model = Article fk_name = 'section' prepopulated_fields = { 'title': ('content',) } fieldsets = ( ('Some fields', { 'classes': ('collapse',), 'fields': ('title', 'content') }), ('Some other fields', { 'classes': ('wide',), 'fields': ('date', 'section') }) ) class ChapterInline(admin.TabularInline): model = Chapter class ChapterXtra1Admin(admin.ModelAdmin): list_filter = ('chap', 'chap__title', 'chap__book', 'chap__book__name', 'chap__book__promo', 'chap__book__promo__name',) class ArticleAdmin(admin.ModelAdmin): list_display = ('content', 'date', callable_year, 'model_year', 'modeladmin_year', 'model_year_reversed') list_filter = ('date', 'section') view_on_site = False fieldsets = ( ('Some fields', { 'classes': ('collapse',), 'fields': ('title', 'content') }), ('Some other fields', { 'classes': ('wide',), 'fields': ('date', 'section', 'sub_section') }) ) def changelist_view(self, request): "Test that extra_context works" return super(ArticleAdmin, self).changelist_view( request, extra_context={ 'extra_var': 'Hello!' } ) def modeladmin_year(self, obj): return obj.date.year modeladmin_year.admin_order_field = 'date' modeladmin_year.short_description = None def delete_model(self, request, obj): EmailMessage( 'Greetings from a deleted object', 'I hereby inform you that some user deleted me', 'from@example.com', ['to@example.com'] ).send() return super(ArticleAdmin, self).delete_model(request, obj) def save_model(self, request, obj, form, change=True): EmailMessage( 'Greetings from a created object', 'I hereby inform you that some user created me', 'from@example.com', ['to@example.com'] ).send() return super(ArticleAdmin, self).save_model(request, obj, form, change) class ArticleAdmin2(admin.ModelAdmin): def has_module_permission(self, request): return False class RowLevelChangePermissionModelAdmin(admin.ModelAdmin): def has_change_permission(self, request, obj=None): """ Only allow changing objects with even id number """ return request.user.is_staff and (obj is not None) and (obj.id % 2 == 0) class CustomArticleAdmin(admin.ModelAdmin): """ Tests various hooks for using custom templates and contexts. """ change_list_template = 'custom_admin/change_list.html' change_form_template = 'custom_admin/change_form.html' add_form_template = 'custom_admin/add_form.html' object_history_template = 'custom_admin/object_history.html' delete_confirmation_template = 'custom_admin/delete_confirmation.html' delete_selected_confirmation_template = 'custom_admin/delete_selected_confirmation.html' def changelist_view(self, request): "Test that extra_context works" return super(CustomArticleAdmin, self).changelist_view( request, extra_context={ 'extra_var': 'Hello!' } ) class ThingAdmin(admin.ModelAdmin): list_filter = ('color__warm', 'color__value', 'pub_date',) class InquisitionAdmin(admin.ModelAdmin): list_display = ('leader', 'country', 'expected', 'sketch') def sketch(self, obj): # A method with the same name as a reverse accessor. return 'list-display-sketch' class SketchAdmin(admin.ModelAdmin): raw_id_fields = ('inquisition', 'defendant0', 'defendant1') class FabricAdmin(admin.ModelAdmin): list_display = ('surface',) list_filter = ('surface',) class BasePersonModelFormSet(BaseModelFormSet): def clean(self): for person_dict in self.cleaned_data: person = person_dict.get('id') alive = person_dict.get('alive') if person and alive and person.name == "Grace Hopper": raise forms.ValidationError("Grace is not a Zombie") class PersonAdmin(admin.ModelAdmin): list_display = ('name', 'gender', 'alive') list_editable = ('gender', 'alive') list_filter = ('gender',) search_fields = ('^name',) save_as = True def get_changelist_formset(self, request, **kwargs): return super(PersonAdmin, self).get_changelist_formset(request, formset=BasePersonModelFormSet, **kwargs) def get_queryset(self, request): # Order by a field that isn't in list display, to be able to test # whether ordering is preserved. return super(PersonAdmin, self).get_queryset(request).order_by('age') class FooAccountAdmin(admin.StackedInline): model = FooAccount extra = 1 class BarAccountAdmin(admin.StackedInline): model = BarAccount extra = 1 class PersonaAdmin(admin.ModelAdmin): inlines = ( FooAccountAdmin, BarAccountAdmin ) class SubscriberAdmin(admin.ModelAdmin): actions = ['mail_admin'] def mail_admin(self, request, selected): EmailMessage( 'Greetings from a ModelAdmin action', 'This is the test email from an admin action', 'from@example.com', ['to@example.com'] ).send() def external_mail(modeladmin, request, selected): EmailMessage( 'Greetings from a function action', 'This is the test email from a function action', 'from@example.com', ['to@example.com'] ).send() external_mail.short_description = 'External mail (Another awesome action)' def redirect_to(modeladmin, request, selected): from django.http import HttpResponseRedirect return HttpResponseRedirect('/some-where-else/') redirect_to.short_description = 'Redirect to (Awesome action)' def download(modeladmin, request, selected): buf = StringIO('This is the content of the file') return StreamingHttpResponse(FileWrapper(buf)) download.short_description = 'Download subscription' def no_perm(modeladmin, request, selected): return HttpResponse(content='No permission to perform this action', status=403) no_perm.short_description = 'No permission to run' class ExternalSubscriberAdmin(admin.ModelAdmin): actions = [redirect_to, external_mail, download, no_perm] class PodcastAdmin(admin.ModelAdmin): list_display = ('name', 'release_date') list_editable = ('release_date',) date_hierarchy = 'release_date' ordering = ('name',) class VodcastAdmin(admin.ModelAdmin): list_display = ('name', 'released') list_editable = ('released',) ordering = ('name',) class ChildInline(admin.StackedInline): model = Child class ParentAdmin(admin.ModelAdmin): model = Parent inlines = [ChildInline] list_editable = ('name',) def save_related(self, request, form, formsets, change): super(ParentAdmin, self).save_related(request, form, formsets, change) first_name, last_name = form.instance.name.split() for child in form.instance.child_set.all(): if len(child.name.split()) < 2: child.name = child.name + ' ' + last_name child.save() class EmptyModelAdmin(admin.ModelAdmin): def get_queryset(self, request): return super(EmptyModelAdmin, self).get_queryset(request).filter(pk__gt=1) class OldSubscriberAdmin(admin.ModelAdmin): actions = None temp_storage = FileSystemStorage(tempfile.mkdtemp()) UPLOAD_TO = os.path.join(temp_storage.location, 'test_upload') class PictureInline(admin.TabularInline): model = Picture extra = 1 class GalleryAdmin(admin.ModelAdmin): inlines = [PictureInline] class PictureAdmin(admin.ModelAdmin): pass class LanguageAdmin(admin.ModelAdmin): list_display = ['iso', 'shortlist', 'english_name', 'name'] list_editable = ['shortlist'] class RecommendationAdmin(admin.ModelAdmin): show_full_result_count = False search_fields = ('=titletranslation__text', '=recommender__titletranslation__text',) class WidgetInline(admin.StackedInline): model = Widget class DooHickeyInline(admin.StackedInline): model = DooHickey class GrommetInline(admin.StackedInline): model = Grommet class WhatsitInline(admin.StackedInline): model = Whatsit class FancyDoodadInline(admin.StackedInline): model = FancyDoodad class CategoryAdmin(admin.ModelAdmin): list_display = ('id', 'collector', 'order') list_editable = ('order',) class CategoryInline(admin.StackedInline): model = Category class CollectorAdmin(admin.ModelAdmin): inlines = [ WidgetInline, DooHickeyInline, GrommetInline, WhatsitInline, FancyDoodadInline, CategoryInline ] class LinkInline(admin.TabularInline): model = Link extra = 1 readonly_fields = ("posted", "multiline", "readonly_link_content") def multiline(self, instance): return "InlineMultiline\ntest\nstring" class SubPostInline(admin.TabularInline): model = PrePopulatedSubPost prepopulated_fields = { 'subslug': ('subtitle',) } def get_readonly_fields(self, request, obj=None): if obj and obj.published: return ('subslug',) return self.readonly_fields def get_prepopulated_fields(self, request, obj=None): if obj and obj.published: return {} return self.prepopulated_fields class PrePopulatedPostAdmin(admin.ModelAdmin): list_display = ['title', 'slug'] prepopulated_fields = { 'slug': ('title',) } inlines = [SubPostInline] def get_readonly_fields(self, request, obj=None): if obj and obj.published: return ('slug',) return self.readonly_fields def get_prepopulated_fields(self, request, obj=None): if obj and obj.published: return {} return self.prepopulated_fields class PostAdmin(admin.ModelAdmin): list_display = ['title', 'public'] readonly_fields = ( 'posted', 'awesomeness_level', 'coolness', 'value', 'multiline', 'multiline_html', lambda obj: "foo", 'readonly_content', ) inlines = [ LinkInline ] def coolness(self, instance): if instance.pk: return "%d amount of cool." % instance.pk else: return "Unknown coolness." def value(self, instance): return 1000 def multiline(self, instance): return "Multiline\ntest\nstring" def multiline_html(self, instance): return mark_safe("Multiline<br>\nhtml<br>\ncontent") multiline_html.allow_tags = True value.short_description = 'Value in $US' class FieldOverridePostForm(forms.ModelForm): model = FieldOverridePost class Meta: help_texts = { 'posted': 'Overridden help text for the date', } labels = { 'public': 'Overridden public label', } class FieldOverridePostAdmin(PostAdmin): form = FieldOverridePostForm class CustomChangeList(ChangeList): def get_queryset(self, request): return self.root_queryset.filter(pk=9999) # Does not exist class GadgetAdmin(admin.ModelAdmin): def get_changelist(self, request, **kwargs): return CustomChangeList class ToppingAdmin(admin.ModelAdmin): readonly_fields = ('pizzas',) class PizzaAdmin(admin.ModelAdmin): readonly_fields = ('toppings',) class WorkHourAdmin(admin.ModelAdmin): list_display = ('datum', 'employee') list_filter = ('employee',) class FoodDeliveryAdmin(admin.ModelAdmin): list_display = ('reference', 'driver', 'restaurant') list_editable = ('driver', 'restaurant') class CoverLetterAdmin(admin.ModelAdmin): """ A ModelAdmin with a custom get_queryset() method that uses defer(), to test verbose_name display in messages shown after adding/editing CoverLetter instances. Note that the CoverLetter model defines a __unicode__ method. For testing fix for ticket #14529. """ def get_queryset(self, request): return super(CoverLetterAdmin, self).get_queryset(request).defer('date_written') class PaperAdmin(admin.ModelAdmin): """ A ModelAdmin with a custom get_queryset() method that uses only(), to test verbose_name display in messages shown after adding/editing Paper instances. For testing fix for ticket #14529. """ def get_queryset(self, request): return super(PaperAdmin, self).get_queryset(request).only('title') class ShortMessageAdmin(admin.ModelAdmin): """ A ModelAdmin with a custom get_queryset() method that uses defer(), to test verbose_name display in messages shown after adding/editing ShortMessage instances. For testing fix for ticket #14529. """ def get_queryset(self, request): return super(ShortMessageAdmin, self).get_queryset(request).defer('timestamp') class TelegramAdmin(admin.ModelAdmin): """ A ModelAdmin with a custom get_queryset() method that uses only(), to test verbose_name display in messages shown after adding/editing Telegram instances. Note that the Telegram model defines a __unicode__ method. For testing fix for ticket #14529. """ def get_queryset(self, request): return super(TelegramAdmin, self).get_queryset(request).only('title') class StoryForm(forms.ModelForm): class Meta: widgets = {'title': forms.HiddenInput} class StoryAdmin(admin.ModelAdmin): list_display = ('id', 'title', 'content') list_display_links = ('title',) # 'id' not in list_display_links list_editable = ('content', ) form = StoryForm ordering = ["-pk"] class OtherStoryAdmin(admin.ModelAdmin): list_display = ('id', 'title', 'content') list_display_links = ('title', 'id') # 'id' in list_display_links list_editable = ('content', ) ordering = ["-pk"] class ComplexSortedPersonAdmin(admin.ModelAdmin): list_display = ('name', 'age', 'is_employee', 'colored_name') ordering = ('name',) def colored_name(self, obj): return '<span style="color: #%s;">%s</span>' % ('ff00ff', obj.name) colored_name.allow_tags = True colored_name.admin_order_field = 'name' class PluggableSearchPersonAdmin(admin.ModelAdmin): list_display = ('name', 'age') search_fields = ('name',) def get_search_results(self, request, queryset, search_term): queryset, use_distinct = super(PluggableSearchPersonAdmin, self).get_search_results(request, queryset, search_term) try: search_term_as_int = int(search_term) queryset |= self.model.objects.filter(age=search_term_as_int) except: pass return queryset, use_distinct class AlbumAdmin(admin.ModelAdmin): list_filter = ['title'] class PrePopulatedPostLargeSlugAdmin(admin.ModelAdmin): prepopulated_fields = { 'slug': ('title',) } class AdminOrderedFieldAdmin(admin.ModelAdmin): ordering = ('order',) list_display = ('stuff', 'order') class AdminOrderedModelMethodAdmin(admin.ModelAdmin): ordering = ('order',) list_display = ('stuff', 'some_order') class AdminOrderedAdminMethodAdmin(admin.ModelAdmin): def some_admin_order(self, obj): return obj.order some_admin_order.admin_order_field = 'order' ordering = ('order',) list_display = ('stuff', 'some_admin_order') def admin_ordered_callable(obj): return obj.order admin_ordered_callable.admin_order_field = 'order' class AdminOrderedCallableAdmin(admin.ModelAdmin): ordering = ('order',) list_display = ('stuff', admin_ordered_callable) class ReportAdmin(admin.ModelAdmin): def extra(self, request): return HttpResponse() def get_urls(self): # Corner case: Don't call parent implementation return [ url(r'^extra/$', self.extra, name='cable_extra'), ] class CustomTemplateBooleanFieldListFilter(BooleanFieldListFilter): template = 'custom_filter_template.html' class CustomTemplateFilterColorAdmin(admin.ModelAdmin): list_filter = (('warm', CustomTemplateBooleanFieldListFilter),) # For Selenium Prepopulated tests ------------------------------------- class RelatedPrepopulatedInline1(admin.StackedInline): fieldsets = ( (None, { 'fields': (('pubdate', 'status'), ('name', 'slug1', 'slug2',),) }), ) model = RelatedPrepopulated extra = 1 prepopulated_fields = {'slug1': ['name', 'pubdate'], 'slug2': ['status', 'name']} class RelatedPrepopulatedInline2(admin.TabularInline): model = RelatedPrepopulated extra = 1 prepopulated_fields = {'slug1': ['name', 'pubdate'], 'slug2': ['status', 'name']} class MainPrepopulatedAdmin(admin.ModelAdmin): inlines = [RelatedPrepopulatedInline1, RelatedPrepopulatedInline2] fieldsets = ( (None, { 'fields': (('pubdate', 'status'), ('name', 'slug1', 'slug2',),) }), ) prepopulated_fields = {'slug1': ['name', 'pubdate'], 'slug2': ['status', 'name']} class UnorderedObjectAdmin(admin.ModelAdmin): list_display = ['name'] list_editable = ['name'] list_per_page = 2 class UndeletableObjectAdmin(admin.ModelAdmin): def change_view(self, *args, **kwargs): kwargs['extra_context'] = {'show_delete': False} return super(UndeletableObjectAdmin, self).change_view(*args, **kwargs) class UnchangeableObjectAdmin(admin.ModelAdmin): def get_urls(self): # Disable change_view, but leave other urls untouched urlpatterns = super(UnchangeableObjectAdmin, self).get_urls() return [p for p in urlpatterns if not p.name.endswith("_change")] def callable_on_unknown(obj): return obj.unknown class AttributeErrorRaisingAdmin(admin.ModelAdmin): list_display = [callable_on_unknown, ] class CustomManagerAdmin(admin.ModelAdmin): def get_queryset(self, request): return FilteredManager.objects class MessageTestingAdmin(admin.ModelAdmin): actions = ["message_debug", "message_info", "message_success", "message_warning", "message_error", "message_extra_tags"] def message_debug(self, request, selected): self.message_user(request, "Test debug", level="debug") def message_info(self, request, selected): self.message_user(request, "Test info", level="info") def message_success(self, request, selected): self.message_user(request, "Test success", level="success") def message_warning(self, request, selected): self.message_user(request, "Test warning", level="warning") def message_error(self, request, selected): self.message_user(request, "Test error", level="error") def message_extra_tags(self, request, selected): self.message_user(request, "Test tags", extra_tags="extra_tag") class ChoiceList(admin.ModelAdmin): list_display = ['choice'] readonly_fields = ['choice'] fields = ['choice'] class DependentChildAdminForm(forms.ModelForm): """ Issue #20522 Form to test child dependency on parent object's validation """ def clean(self): parent = self.cleaned_data.get('parent') if parent.family_name and parent.family_name != self.cleaned_data.get('family_name'): raise ValidationError("Children must share a family name with their parents " + "in this contrived test case") return super(DependentChildAdminForm, self).clean() class DependentChildInline(admin.TabularInline): model = DependentChild form = DependentChildAdminForm class ParentWithDependentChildrenAdmin(admin.ModelAdmin): inlines = [DependentChildInline] # Tests for ticket 11277 ---------------------------------- class FormWithoutHiddenField(forms.ModelForm): first = forms.CharField() second = forms.CharField() class FormWithoutVisibleField(forms.ModelForm): first = forms.CharField(widget=forms.HiddenInput) second = forms.CharField(widget=forms.HiddenInput) class FormWithVisibleAndHiddenField(forms.ModelForm): first = forms.CharField(widget=forms.HiddenInput) second = forms.CharField() class EmptyModelVisibleAdmin(admin.ModelAdmin): form = FormWithoutHiddenField fieldsets = ( (None, { 'fields': (('first', 'second'),), }), ) class EmptyModelHiddenAdmin(admin.ModelAdmin): form = FormWithoutVisibleField fieldsets = EmptyModelVisibleAdmin.fieldsets class EmptyModelMixinAdmin(admin.ModelAdmin): form = FormWithVisibleAndHiddenField fieldsets = EmptyModelVisibleAdmin.fieldsets class CityInlineAdmin(admin.TabularInline): model = City view_on_site = False class StateAdmin(admin.ModelAdmin): inlines = [CityInlineAdmin] class RestaurantInlineAdmin(admin.TabularInline): model = Restaurant view_on_site = True class CityAdmin(admin.ModelAdmin): inlines = [RestaurantInlineAdmin] view_on_site = True class WorkerAdmin(admin.ModelAdmin): def view_on_site(self, obj): return '/worker/%s/%s/' % (obj.surname, obj.name) class WorkerInlineAdmin(admin.TabularInline): model = Worker def view_on_site(self, obj): return '/worker_inline/%s/%s/' % (obj.surname, obj.name) class RestaurantAdmin(admin.ModelAdmin): inlines = [WorkerInlineAdmin] view_on_site = False def get_changeform_initial_data(self, request): return {'name': 'overridden_value'} class FunkyTagAdmin(admin.ModelAdmin): list_display = ('name', 'content_object') class InlineReferenceInline(admin.TabularInline): model = InlineReference class InlineRefererAdmin(admin.ModelAdmin): inlines = [InlineReferenceInline] class PlotReadonlyAdmin(admin.ModelAdmin): readonly_fields = ('plotdetails',) class GetFormsetsArgumentCheckingAdmin(admin.ModelAdmin): fields = ['name'] def add_view(self, request, *args, **kwargs): request.is_add_view = True return super(GetFormsetsArgumentCheckingAdmin, self).add_view(request, *args, **kwargs) def change_view(self, request, *args, **kwargs): request.is_add_view = False return super(GetFormsetsArgumentCheckingAdmin, self).change_view(request, *args, **kwargs) def get_formsets_with_inlines(self, request, obj=None): if request.is_add_view and obj is not None: raise Exception("'obj' passed to get_formsets_with_inlines wasn't None during add_view") if not request.is_add_view and obj is None: raise Exception("'obj' passed to get_formsets_with_inlines was None during change_view") return super(GetFormsetsArgumentCheckingAdmin, self).get_formsets_with_inlines(request, obj) site = admin.AdminSite(name="admin") site.site_url = '/my-site-url/' site.register(Article, ArticleAdmin) site.register(CustomArticle, CustomArticleAdmin) site.register(Section, save_as=True, inlines=[ArticleInline], readonly_fields=['name_property']) site.register(ModelWithStringPrimaryKey) site.register(Color) site.register(Thing, ThingAdmin) site.register(Actor) site.register(Inquisition, InquisitionAdmin) site.register(Sketch, SketchAdmin) site.register(Person, PersonAdmin) site.register(Persona, PersonaAdmin) site.register(Subscriber, SubscriberAdmin) site.register(ExternalSubscriber, ExternalSubscriberAdmin) site.register(OldSubscriber, OldSubscriberAdmin) site.register(Podcast, PodcastAdmin) site.register(Vodcast, VodcastAdmin) site.register(Parent, ParentAdmin) site.register(EmptyModel, EmptyModelAdmin) site.register(Fabric, FabricAdmin) site.register(Gallery, GalleryAdmin) site.register(Picture, PictureAdmin) site.register(Language, LanguageAdmin) site.register(Recommendation, RecommendationAdmin) site.register(Recommender) site.register(Collector, CollectorAdmin) site.register(Category, CategoryAdmin) site.register(Post, PostAdmin) site.register(FieldOverridePost, FieldOverridePostAdmin) site.register(Gadget, GadgetAdmin) site.register(Villain) site.register(SuperVillain) site.register(Plot) site.register(PlotDetails) site.register(PlotProxy, PlotReadonlyAdmin) site.register(CyclicOne) site.register(CyclicTwo) site.register(WorkHour, WorkHourAdmin) site.register(Reservation) site.register(FoodDelivery, FoodDeliveryAdmin) site.register(RowLevelChangePermissionModel, RowLevelChangePermissionModelAdmin) site.register(Paper, PaperAdmin) site.register(CoverLetter, CoverLetterAdmin) site.register(ShortMessage, ShortMessageAdmin) site.register(Telegram, TelegramAdmin) site.register(Story, StoryAdmin) site.register(OtherStory, OtherStoryAdmin) site.register(Report, ReportAdmin) site.register(MainPrepopulated, MainPrepopulatedAdmin) site.register(UnorderedObject, UnorderedObjectAdmin) site.register(UndeletableObject, UndeletableObjectAdmin) site.register(UnchangeableObject, UnchangeableObjectAdmin) site.register(State, StateAdmin) site.register(City, CityAdmin) site.register(Restaurant, RestaurantAdmin) site.register(Worker, WorkerAdmin) site.register(FunkyTag, FunkyTagAdmin) site.register(ReferencedByParent) site.register(ChildOfReferer) site.register(ReferencedByInline) site.register(InlineReferer, InlineRefererAdmin) site.register(ReferencedByGenRel) site.register(GenRelReference) # We intentionally register Promo and ChapterXtra1 but not Chapter nor ChapterXtra2. # That way we cover all four cases: # related ForeignKey object registered in admin # related ForeignKey object not registered in admin # related OneToOne object registered in admin # related OneToOne object not registered in admin # when deleting Book so as exercise all four troublesome (w.r.t escaping # and calling force_text to avoid problems on Python 2.3) paths through # contrib.admin.utils's get_deleted_objects function. site.register(Book, inlines=[ChapterInline]) site.register(Promo) site.register(ChapterXtra1, ChapterXtra1Admin) site.register(Pizza, PizzaAdmin) site.register(Topping, ToppingAdmin) site.register(Album, AlbumAdmin) site.register(Question) site.register(Answer) site.register(PrePopulatedPost, PrePopulatedPostAdmin) site.register(ComplexSortedPerson, ComplexSortedPersonAdmin) site.register(FilteredManager, CustomManagerAdmin) site.register(PluggableSearchPerson, PluggableSearchPersonAdmin) site.register(PrePopulatedPostLargeSlug, PrePopulatedPostLargeSlugAdmin) site.register(AdminOrderedField, AdminOrderedFieldAdmin) site.register(AdminOrderedModelMethod, AdminOrderedModelMethodAdmin) site.register(AdminOrderedAdminMethod, AdminOrderedAdminMethodAdmin) site.register(AdminOrderedCallable, AdminOrderedCallableAdmin) site.register(Color2, CustomTemplateFilterColorAdmin) site.register(Simple, AttributeErrorRaisingAdmin) site.register(UserMessenger, MessageTestingAdmin) site.register(Choice, ChoiceList) site.register(ParentWithDependentChildren, ParentWithDependentChildrenAdmin) site.register(EmptyModelHidden, EmptyModelHiddenAdmin) site.register(EmptyModelVisible, EmptyModelVisibleAdmin) site.register(EmptyModelMixin, EmptyModelMixinAdmin) site.register(StumpJoke) site.register(Recipe) site.register(Ingredient) site.register(NotReferenced) site.register(ExplicitlyProvidedPK, GetFormsetsArgumentCheckingAdmin) site.register(ImplicitlyGeneratedPK, GetFormsetsArgumentCheckingAdmin) site.register(User, UserAdmin) site.register(Group, GroupAdmin) # Used to test URL namespaces site2 = admin.AdminSite(name="namespaced_admin") site2.register(User, UserAdmin) site2.register(Group, GroupAdmin) site2.register(ParentWithUUIDPK) site2.register( RelatedWithUUIDPKModel, list_display=['pk', 'parent'], list_editable=['parent'], raw_id_fields=['parent'], ) site7 = admin.AdminSite(name="admin7") site7.register(Article, ArticleAdmin2)
./CrossVul/dataset_final_sorted/CWE-79/py/bad_5191_3
crossvul-python_data_good_5788_1
from __future__ import unicode_literals import calendar import datetime import re import sys try: from urllib import parse as urllib_parse except ImportError: # Python 2 import urllib as urllib_parse import urlparse urllib_parse.urlparse = urlparse.urlparse from email.utils import formatdate from django.utils.datastructures import MultiValueDict from django.utils.encoding import force_str, force_text from django.utils.functional import allow_lazy from django.utils import six ETAG_MATCH = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"') MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec'.split() __D = r'(?P<day>\d{2})' __D2 = r'(?P<day>[ \d]\d)' __M = r'(?P<mon>\w{3})' __Y = r'(?P<year>\d{4})' __Y2 = r'(?P<year>\d{2})' __T = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})' RFC1123_DATE = re.compile(r'^\w{3}, %s %s %s %s GMT$' % (__D, __M, __Y, __T)) RFC850_DATE = re.compile(r'^\w{6,9}, %s-%s-%s %s GMT$' % (__D, __M, __Y2, __T)) ASCTIME_DATE = re.compile(r'^\w{3} %s %s %s %s$' % (__M, __D2, __T, __Y)) def urlquote(url, safe='/'): """ A version of Python's urllib.quote() function that can operate on unicode strings. The url is first UTF-8 encoded before quoting. The returned string can safely be used as part of an argument to a subsequent iri_to_uri() call without double-quoting occurring. """ return force_text(urllib_parse.quote(force_str(url), force_str(safe))) urlquote = allow_lazy(urlquote, six.text_type) def urlquote_plus(url, safe=''): """ A version of Python's urllib.quote_plus() function that can operate on unicode strings. The url is first UTF-8 encoded before quoting. The returned string can safely be used as part of an argument to a subsequent iri_to_uri() call without double-quoting occurring. """ return force_text(urllib_parse.quote_plus(force_str(url), force_str(safe))) urlquote_plus = allow_lazy(urlquote_plus, six.text_type) def urlunquote(quoted_url): """ A wrapper for Python's urllib.unquote() function that can operate on the result of django.utils.http.urlquote(). """ return force_text(urllib_parse.unquote(force_str(quoted_url))) urlunquote = allow_lazy(urlunquote, six.text_type) def urlunquote_plus(quoted_url): """ A wrapper for Python's urllib.unquote_plus() function that can operate on the result of django.utils.http.urlquote_plus(). """ return force_text(urllib_parse.unquote_plus(force_str(quoted_url))) urlunquote_plus = allow_lazy(urlunquote_plus, six.text_type) def urlencode(query, doseq=0): """ A version of Python's urllib.urlencode() function that can operate on unicode strings. The parameters are first case to UTF-8 encoded strings and then encoded as per normal. """ if isinstance(query, MultiValueDict): query = query.lists() elif hasattr(query, 'items'): query = query.items() return urllib_parse.urlencode( [(force_str(k), [force_str(i) for i in v] if isinstance(v, (list,tuple)) else force_str(v)) for k, v in query], doseq) def cookie_date(epoch_seconds=None): """ Formats the time to ensure compatibility with Netscape's cookie standard. Accepts a floating point number expressed in seconds since the epoch, in UTC - such as that outputted by time.time(). If set to None, defaults to the current time. Outputs a string in the format 'Wdy, DD-Mon-YYYY HH:MM:SS GMT'. """ rfcdate = formatdate(epoch_seconds) return '%s-%s-%s GMT' % (rfcdate[:7], rfcdate[8:11], rfcdate[12:25]) def http_date(epoch_seconds=None): """ Formats the time to match the RFC1123 date format as specified by HTTP RFC2616 section 3.3.1. Accepts a floating point number expressed in seconds since the epoch, in UTC - such as that outputted by time.time(). If set to None, defaults to the current time. Outputs a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'. """ rfcdate = formatdate(epoch_seconds) return '%s GMT' % rfcdate[:25] def parse_http_date(date): """ Parses a date format as specified by HTTP RFC2616 section 3.3.1. The three formats allowed by the RFC are accepted, even if only the first one is still in widespread use. Returns an integer expressed in seconds since the epoch, in UTC. """ # emails.Util.parsedate does the job for RFC1123 dates; unfortunately # RFC2616 makes it mandatory to support RFC850 dates too. So we roll # our own RFC-compliant parsing. for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE: m = regex.match(date) if m is not None: break else: raise ValueError("%r is not in a valid HTTP date format" % date) try: year = int(m.group('year')) if year < 100: if year < 70: year += 2000 else: year += 1900 month = MONTHS.index(m.group('mon').lower()) + 1 day = int(m.group('day')) hour = int(m.group('hour')) min = int(m.group('min')) sec = int(m.group('sec')) result = datetime.datetime(year, month, day, hour, min, sec) return calendar.timegm(result.utctimetuple()) except Exception: raise ValueError("%r is not a valid date" % date) def parse_http_date_safe(date): """ Same as parse_http_date, but returns None if the input is invalid. """ try: return parse_http_date(date) except Exception: pass # Base 36 functions: useful for generating compact URLs def base36_to_int(s): """ Converts a base 36 string to an ``int``. Raises ``ValueError` if the input won't fit into an int. """ # To prevent overconsumption of server resources, reject any # base36 string that is long than 13 base36 digits (13 digits # is sufficient to base36-encode any 64-bit integer) if len(s) > 13: raise ValueError("Base36 input too large") value = int(s, 36) # ... then do a final check that the value will fit into an int to avoid # returning a long (#15067). The long type was removed in Python 3. if not six.PY3 and value > sys.maxint: raise ValueError("Base36 input too large") return value def int_to_base36(i): """ Converts an integer to a base36 string """ digits = "0123456789abcdefghijklmnopqrstuvwxyz" factor = 0 if i < 0: raise ValueError("Negative base36 conversion input.") if not six.PY3: if not isinstance(i, six.integer_types): raise TypeError("Non-integer base36 conversion input.") if i > sys.maxint: raise ValueError("Base36 conversion input too large.") # Find starting factor while True: factor += 1 if i < 36 ** factor: factor -= 1 break base36 = [] # Construct base36 representation while factor >= 0: j = 36 ** factor base36.append(digits[i // j]) i = i % j factor -= 1 return ''.join(base36) def parse_etags(etag_str): """ Parses a string with one or several etags passed in If-None-Match and If-Match headers by the rules in RFC 2616. Returns a list of etags without surrounding double quotes (") and unescaped from \<CHAR>. """ etags = ETAG_MATCH.findall(etag_str) if not etags: # etag_str has wrong format, treat it as an opaque string then return [etag_str] etags = [e.encode('ascii').decode('unicode_escape') for e in etags] return etags def quote_etag(etag): """ Wraps a string in double quotes escaping contents as necessary. """ return '"%s"' % etag.replace('\\', '\\\\').replace('"', '\\"') def same_origin(url1, url2): """ Checks if two URLs are 'same-origin' """ p1, p2 = urllib_parse.urlparse(url1), urllib_parse.urlparse(url2) return (p1.scheme, p1.hostname, p1.port) == (p2.scheme, p2.hostname, p2.port) def is_safe_url(url, host=None): """ Return ``True`` if the url is a safe redirection (i.e. it doesn't point to a different host and uses a safe scheme). Always returns ``False`` on an empty url. """ if not url: return False url_info = urllib_parse.urlparse(url) return (not url_info.netloc or url_info.netloc == host) and \ (not url_info.scheme or url_info.scheme in ['http', 'https'])
./CrossVul/dataset_final_sorted/CWE-79/py/good_5788_1
crossvul-python_data_bad_5190_4
# -*- coding: utf-8 -*- from __future__ import unicode_literals import datetime import os import tempfile import uuid from django.contrib.auth.models import User from django.contrib.contenttypes.fields import ( GenericForeignKey, GenericRelation, ) from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError from django.core.files.storage import FileSystemStorage from django.db import models from django.utils.encoding import python_2_unicode_compatible class Section(models.Model): """ A simple section that links to articles, to test linking to related items in admin views. """ name = models.CharField(max_length=100) @property def name_property(self): """ A property that simply returns the name. Used to test #24461 """ return self.name @python_2_unicode_compatible class Article(models.Model): """ A simple article to test admin views. Test backwards compatibility. """ title = models.CharField(max_length=100) content = models.TextField() date = models.DateTimeField() section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True) another_section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True, related_name='+') sub_section = models.ForeignKey(Section, models.SET_NULL, null=True, blank=True, related_name='+') def __str__(self): return self.title def model_year(self): return self.date.year model_year.admin_order_field = 'date' model_year.short_description = '' def model_year_reversed(self): return self.date.year model_year_reversed.admin_order_field = '-date' model_year_reversed.short_description = '' @python_2_unicode_compatible class Book(models.Model): """ A simple book that has chapters. """ name = models.CharField(max_length=100, verbose_name='¿Name?') def __str__(self): return self.name @python_2_unicode_compatible class Promo(models.Model): name = models.CharField(max_length=100, verbose_name='¿Name?') book = models.ForeignKey(Book, models.CASCADE) def __str__(self): return self.name @python_2_unicode_compatible class Chapter(models.Model): title = models.CharField(max_length=100, verbose_name='¿Title?') content = models.TextField() book = models.ForeignKey(Book, models.CASCADE) def __str__(self): return self.title class Meta: # Use a utf-8 bytestring to ensure it works (see #11710) verbose_name = '¿Chapter?' @python_2_unicode_compatible class ChapterXtra1(models.Model): chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name='¿Chap?') xtra = models.CharField(max_length=100, verbose_name='¿Xtra?') def __str__(self): return '¿Xtra1: %s' % self.xtra @python_2_unicode_compatible class ChapterXtra2(models.Model): chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name='¿Chap?') xtra = models.CharField(max_length=100, verbose_name='¿Xtra?') def __str__(self): return '¿Xtra2: %s' % self.xtra class RowLevelChangePermissionModel(models.Model): name = models.CharField(max_length=100, blank=True) class CustomArticle(models.Model): content = models.TextField() date = models.DateTimeField() @python_2_unicode_compatible class ModelWithStringPrimaryKey(models.Model): string_pk = models.CharField(max_length=255, primary_key=True) def __str__(self): return self.string_pk def get_absolute_url(self): return '/dummy/%s/' % self.string_pk @python_2_unicode_compatible class Color(models.Model): value = models.CharField(max_length=10) warm = models.BooleanField(default=False) def __str__(self): return self.value # we replicate Color to register with another ModelAdmin class Color2(Color): class Meta: proxy = True @python_2_unicode_compatible class Thing(models.Model): title = models.CharField(max_length=20) color = models.ForeignKey(Color, models.CASCADE, limit_choices_to={'warm': True}) pub_date = models.DateField(blank=True, null=True) def __str__(self): return self.title @python_2_unicode_compatible class Actor(models.Model): name = models.CharField(max_length=50) age = models.IntegerField() title = models.CharField(max_length=50, null=True, blank=True) def __str__(self): return self.name @python_2_unicode_compatible class Inquisition(models.Model): expected = models.BooleanField(default=False) leader = models.ForeignKey(Actor, models.CASCADE) country = models.CharField(max_length=20) def __str__(self): return "by %s from %s" % (self.leader, self.country) @python_2_unicode_compatible class Sketch(models.Model): title = models.CharField(max_length=100) inquisition = models.ForeignKey( Inquisition, models.CASCADE, limit_choices_to={ 'leader__name': 'Palin', 'leader__age': 27, 'expected': False, }, ) defendant0 = models.ForeignKey( Actor, models.CASCADE, limit_choices_to={'title__isnull': False}, related_name='as_defendant0', ) defendant1 = models.ForeignKey( Actor, models.CASCADE, limit_choices_to={'title__isnull': True}, related_name='as_defendant1', ) def __str__(self): return self.title def today_callable_dict(): return {"last_action__gte": datetime.datetime.today()} def today_callable_q(): return models.Q(last_action__gte=datetime.datetime.today()) @python_2_unicode_compatible class Character(models.Model): username = models.CharField(max_length=100) last_action = models.DateTimeField() def __str__(self): return self.username @python_2_unicode_compatible class StumpJoke(models.Model): variation = models.CharField(max_length=100) most_recently_fooled = models.ForeignKey( Character, models.CASCADE, limit_choices_to=today_callable_dict, related_name="+", ) has_fooled_today = models.ManyToManyField(Character, limit_choices_to=today_callable_q, related_name="+") def __str__(self): return self.variation class Fabric(models.Model): NG_CHOICES = ( ('Textured', ( ('x', 'Horizontal'), ('y', 'Vertical'), )), ('plain', 'Smooth'), ) surface = models.CharField(max_length=20, choices=NG_CHOICES) @python_2_unicode_compatible class Person(models.Model): GENDER_CHOICES = ( (1, "Male"), (2, "Female"), ) name = models.CharField(max_length=100) gender = models.IntegerField(choices=GENDER_CHOICES) age = models.IntegerField(default=21) alive = models.BooleanField(default=True) def __str__(self): return self.name @python_2_unicode_compatible class Persona(models.Model): """ A simple persona associated with accounts, to test inlining of related accounts which inherit from a common accounts class. """ name = models.CharField(blank=False, max_length=80) def __str__(self): return self.name @python_2_unicode_compatible class Account(models.Model): """ A simple, generic account encapsulating the information shared by all types of accounts. """ username = models.CharField(blank=False, max_length=80) persona = models.ForeignKey(Persona, models.CASCADE, related_name="accounts") servicename = 'generic service' def __str__(self): return "%s: %s" % (self.servicename, self.username) class FooAccount(Account): """A service-specific account of type Foo.""" servicename = 'foo' class BarAccount(Account): """A service-specific account of type Bar.""" servicename = 'bar' @python_2_unicode_compatible class Subscriber(models.Model): name = models.CharField(blank=False, max_length=80) email = models.EmailField(blank=False, max_length=175) def __str__(self): return "%s (%s)" % (self.name, self.email) class ExternalSubscriber(Subscriber): pass class OldSubscriber(Subscriber): pass class Media(models.Model): name = models.CharField(max_length=60) class Podcast(Media): release_date = models.DateField() class Meta: ordering = ('release_date',) # overridden in PodcastAdmin class Vodcast(Media): media = models.OneToOneField(Media, models.CASCADE, primary_key=True, parent_link=True) released = models.BooleanField(default=False) class Parent(models.Model): name = models.CharField(max_length=128) def clean(self): if self.name == '_invalid': raise ValidationError('invalid') class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE, editable=False) name = models.CharField(max_length=30, blank=True) def clean(self): if self.name == '_invalid': raise ValidationError('invalid') @python_2_unicode_compatible class EmptyModel(models.Model): def __str__(self): return "Primary key = %s" % self.id temp_storage = FileSystemStorage(tempfile.mkdtemp()) UPLOAD_TO = os.path.join(temp_storage.location, 'test_upload') class Gallery(models.Model): name = models.CharField(max_length=100) class Picture(models.Model): name = models.CharField(max_length=100) image = models.FileField(storage=temp_storage, upload_to='test_upload') gallery = models.ForeignKey(Gallery, models.CASCADE, related_name="pictures") class Language(models.Model): iso = models.CharField(max_length=5, primary_key=True) name = models.CharField(max_length=50) english_name = models.CharField(max_length=50) shortlist = models.BooleanField(default=False) class Meta: ordering = ('iso',) # a base class for Recommender and Recommendation class Title(models.Model): pass class TitleTranslation(models.Model): title = models.ForeignKey(Title, models.CASCADE) text = models.CharField(max_length=100) class Recommender(Title): pass class Recommendation(Title): recommender = models.ForeignKey(Recommender, models.CASCADE) class Collector(models.Model): name = models.CharField(max_length=100) class Widget(models.Model): owner = models.ForeignKey(Collector, models.CASCADE) name = models.CharField(max_length=100) class DooHickey(models.Model): code = models.CharField(max_length=10, primary_key=True) owner = models.ForeignKey(Collector, models.CASCADE) name = models.CharField(max_length=100) class Grommet(models.Model): code = models.AutoField(primary_key=True) owner = models.ForeignKey(Collector, models.CASCADE) name = models.CharField(max_length=100) class Whatsit(models.Model): index = models.IntegerField(primary_key=True) owner = models.ForeignKey(Collector, models.CASCADE) name = models.CharField(max_length=100) class Doodad(models.Model): name = models.CharField(max_length=100) class FancyDoodad(Doodad): owner = models.ForeignKey(Collector, models.CASCADE) expensive = models.BooleanField(default=True) @python_2_unicode_compatible class Category(models.Model): collector = models.ForeignKey(Collector, models.CASCADE) order = models.PositiveIntegerField() class Meta: ordering = ('order',) def __str__(self): return '%s:o%s' % (self.id, self.order) def link_posted_default(): return datetime.date.today() - datetime.timedelta(days=7) class Link(models.Model): posted = models.DateField(default=link_posted_default) url = models.URLField() post = models.ForeignKey("Post", models.CASCADE) readonly_link_content = models.TextField() class PrePopulatedPost(models.Model): title = models.CharField(max_length=100) published = models.BooleanField(default=False) slug = models.SlugField() class PrePopulatedSubPost(models.Model): post = models.ForeignKey(PrePopulatedPost, models.CASCADE) subtitle = models.CharField(max_length=100) subslug = models.SlugField() class Post(models.Model): title = models.CharField(max_length=100, help_text="Some help text for the title (with unicode ŠĐĆŽćžšđ)") content = models.TextField(help_text="Some help text for the content (with unicode ŠĐĆŽćžšđ)") readonly_content = models.TextField() posted = models.DateField( default=datetime.date.today, help_text="Some help text for the date (with unicode ŠĐĆŽćžšđ)" ) public = models.NullBooleanField() def awesomeness_level(self): return "Very awesome." # Proxy model to test overridden fields attrs on Post model so as not to # interfere with other tests. class FieldOverridePost(Post): class Meta: proxy = True @python_2_unicode_compatible class Gadget(models.Model): name = models.CharField(max_length=100) def __str__(self): return self.name @python_2_unicode_compatible class Villain(models.Model): name = models.CharField(max_length=100) def __str__(self): return self.name class SuperVillain(Villain): pass @python_2_unicode_compatible class FunkyTag(models.Model): "Because we all know there's only one real use case for GFKs." name = models.CharField(max_length=25) content_type = models.ForeignKey(ContentType, models.CASCADE) object_id = models.PositiveIntegerField() content_object = GenericForeignKey('content_type', 'object_id') def __str__(self): return self.name @python_2_unicode_compatible class Plot(models.Model): name = models.CharField(max_length=100) team_leader = models.ForeignKey(Villain, models.CASCADE, related_name='lead_plots') contact = models.ForeignKey(Villain, models.CASCADE, related_name='contact_plots') tags = GenericRelation(FunkyTag) def __str__(self): return self.name @python_2_unicode_compatible class PlotDetails(models.Model): details = models.CharField(max_length=100) plot = models.OneToOneField(Plot, models.CASCADE, null=True, blank=True) def __str__(self): return self.details class PlotProxy(Plot): class Meta: proxy = True @python_2_unicode_compatible class SecretHideout(models.Model): """ Secret! Not registered with the admin! """ location = models.CharField(max_length=100) villain = models.ForeignKey(Villain, models.CASCADE) def __str__(self): return self.location @python_2_unicode_compatible class SuperSecretHideout(models.Model): """ Secret! Not registered with the admin! """ location = models.CharField(max_length=100) supervillain = models.ForeignKey(SuperVillain, models.CASCADE) def __str__(self): return self.location @python_2_unicode_compatible class Bookmark(models.Model): name = models.CharField(max_length=60) tag = GenericRelation(FunkyTag, related_query_name='bookmark') def __str__(self): return self.name @python_2_unicode_compatible class CyclicOne(models.Model): name = models.CharField(max_length=25) two = models.ForeignKey('CyclicTwo', models.CASCADE) def __str__(self): return self.name @python_2_unicode_compatible class CyclicTwo(models.Model): name = models.CharField(max_length=25) one = models.ForeignKey(CyclicOne, models.CASCADE) def __str__(self): return self.name class Topping(models.Model): name = models.CharField(max_length=20) class Pizza(models.Model): name = models.CharField(max_length=20) toppings = models.ManyToManyField('Topping', related_name='pizzas') class Album(models.Model): owner = models.ForeignKey(User, models.SET_NULL, null=True, blank=True) title = models.CharField(max_length=30) class Employee(Person): code = models.CharField(max_length=20) class WorkHour(models.Model): datum = models.DateField() employee = models.ForeignKey(Employee, models.CASCADE) class Question(models.Model): question = models.CharField(max_length=20) @python_2_unicode_compatible class Answer(models.Model): question = models.ForeignKey(Question, models.PROTECT) answer = models.CharField(max_length=20) def __str__(self): return self.answer class Reservation(models.Model): start_date = models.DateTimeField() price = models.IntegerField() DRIVER_CHOICES = ( ('bill', 'Bill G'), ('steve', 'Steve J'), ) RESTAURANT_CHOICES = ( ('indian', 'A Taste of India'), ('thai', 'Thai Pography'), ('pizza', 'Pizza Mama'), ) class FoodDelivery(models.Model): reference = models.CharField(max_length=100) driver = models.CharField(max_length=100, choices=DRIVER_CHOICES, blank=True) restaurant = models.CharField(max_length=100, choices=RESTAURANT_CHOICES, blank=True) class Meta: unique_together = (("driver", "restaurant"),) @python_2_unicode_compatible class CoverLetter(models.Model): author = models.CharField(max_length=30) date_written = models.DateField(null=True, blank=True) def __str__(self): return self.author class Paper(models.Model): title = models.CharField(max_length=30) author = models.CharField(max_length=30, blank=True, null=True) class ShortMessage(models.Model): content = models.CharField(max_length=140) timestamp = models.DateTimeField(null=True, blank=True) @python_2_unicode_compatible class Telegram(models.Model): title = models.CharField(max_length=30) date_sent = models.DateField(null=True, blank=True) def __str__(self): return self.title class Story(models.Model): title = models.CharField(max_length=100) content = models.TextField() class OtherStory(models.Model): title = models.CharField(max_length=100) content = models.TextField() class ComplexSortedPerson(models.Model): name = models.CharField(max_length=100) age = models.PositiveIntegerField() is_employee = models.NullBooleanField() class PluggableSearchPerson(models.Model): name = models.CharField(max_length=100) age = models.PositiveIntegerField() class PrePopulatedPostLargeSlug(models.Model): """ Regression test for #15938: a large max_length for the slugfield must not be localized in prepopulated_fields_js.html or it might end up breaking the javascript (ie, using THOUSAND_SEPARATOR ends up with maxLength=1,000) """ title = models.CharField(max_length=100) published = models.BooleanField(default=False) # `db_index=False` because MySQL cannot index large CharField (#21196). slug = models.SlugField(max_length=1000, db_index=False) class AdminOrderedField(models.Model): order = models.IntegerField() stuff = models.CharField(max_length=200) class AdminOrderedModelMethod(models.Model): order = models.IntegerField() stuff = models.CharField(max_length=200) def some_order(self): return self.order some_order.admin_order_field = 'order' class AdminOrderedAdminMethod(models.Model): order = models.IntegerField() stuff = models.CharField(max_length=200) class AdminOrderedCallable(models.Model): order = models.IntegerField() stuff = models.CharField(max_length=200) @python_2_unicode_compatible class Report(models.Model): title = models.CharField(max_length=100) def __str__(self): return self.title class MainPrepopulated(models.Model): name = models.CharField(max_length=100) pubdate = models.DateField() status = models.CharField( max_length=20, choices=(('option one', 'Option One'), ('option two', 'Option Two'))) slug1 = models.SlugField(blank=True) slug2 = models.SlugField(blank=True) slug3 = models.SlugField(blank=True, allow_unicode=True) class RelatedPrepopulated(models.Model): parent = models.ForeignKey(MainPrepopulated, models.CASCADE) name = models.CharField(max_length=75) pubdate = models.DateField() status = models.CharField( max_length=20, choices=(('option one', 'Option One'), ('option two', 'Option Two'))) slug1 = models.SlugField(max_length=50) slug2 = models.SlugField(max_length=60) class UnorderedObject(models.Model): """ Model without any defined `Meta.ordering`. Refs #16819. """ name = models.CharField(max_length=255) bool = models.BooleanField(default=True) class UndeletableObject(models.Model): """ Model whose show_delete in admin change_view has been disabled Refs #10057. """ name = models.CharField(max_length=255) class UnchangeableObject(models.Model): """ Model whose change_view is disabled in admin Refs #20640. """ class UserMessenger(models.Model): """ Dummy class for testing message_user functions on ModelAdmin """ class Simple(models.Model): """ Simple model with nothing on it for use in testing """ class Choice(models.Model): choice = models.IntegerField(blank=True, null=True, choices=((1, 'Yes'), (0, 'No'), (None, 'No opinion'))) class ParentWithDependentChildren(models.Model): """ Issue #20522 Model where the validation of child foreign-key relationships depends on validation of the parent """ some_required_info = models.PositiveIntegerField() family_name = models.CharField(max_length=255, blank=False) class DependentChild(models.Model): """ Issue #20522 Model that depends on validation of the parent class for one of its fields to validate during clean """ parent = models.ForeignKey(ParentWithDependentChildren, models.CASCADE) family_name = models.CharField(max_length=255) class _Manager(models.Manager): def get_queryset(self): return super(_Manager, self).get_queryset().filter(pk__gt=1) class FilteredManager(models.Model): def __str__(self): return "PK=%d" % self.pk pk_gt_1 = _Manager() objects = models.Manager() class EmptyModelVisible(models.Model): """ See ticket #11277. """ class EmptyModelHidden(models.Model): """ See ticket #11277. """ class EmptyModelMixin(models.Model): """ See ticket #11277. """ class State(models.Model): name = models.CharField(max_length=100) class City(models.Model): state = models.ForeignKey(State, models.CASCADE) name = models.CharField(max_length=100) def get_absolute_url(self): return '/dummy/%s/' % self.pk class Restaurant(models.Model): city = models.ForeignKey(City, models.CASCADE) name = models.CharField(max_length=100) def get_absolute_url(self): return '/dummy/%s/' % self.pk class Worker(models.Model): work_at = models.ForeignKey(Restaurant, models.CASCADE) name = models.CharField(max_length=50) surname = models.CharField(max_length=50) # Models for #23329 class ReferencedByParent(models.Model): name = models.CharField(max_length=20, unique=True) class ParentWithFK(models.Model): fk = models.ForeignKey( ReferencedByParent, models.CASCADE, to_field='name', related_name='hidden+', ) class ChildOfReferer(ParentWithFK): pass # Models for #23431 class ReferencedByInline(models.Model): name = models.CharField(max_length=20, unique=True) class InlineReference(models.Model): fk = models.ForeignKey( ReferencedByInline, models.CASCADE, to_field='name', related_name='hidden+', ) class InlineReferer(models.Model): refs = models.ManyToManyField(InlineReference) # Models for #23604 and #23915 class Recipe(models.Model): rname = models.CharField(max_length=20, unique=True) class Ingredient(models.Model): iname = models.CharField(max_length=20, unique=True) recipes = models.ManyToManyField(Recipe, through='RecipeIngredient') class RecipeIngredient(models.Model): ingredient = models.ForeignKey(Ingredient, models.CASCADE, to_field='iname') recipe = models.ForeignKey(Recipe, models.CASCADE, to_field='rname') # Model for #23839 class NotReferenced(models.Model): # Don't point any FK at this model. pass # Models for #23934 class ExplicitlyProvidedPK(models.Model): name = models.IntegerField(primary_key=True) class ImplicitlyGeneratedPK(models.Model): name = models.IntegerField(unique=True) # Models for #25622 class ReferencedByGenRel(models.Model): content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.PositiveIntegerField() content_object = GenericForeignKey('content_type', 'object_id') class GenRelReference(models.Model): references = GenericRelation(ReferencedByGenRel) class ParentWithUUIDPK(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) title = models.CharField(max_length=100) def __str__(self): return str(self.id) class RelatedWithUUIDPKModel(models.Model): parent = models.ForeignKey(ParentWithUUIDPK, on_delete=models.CASCADE)
./CrossVul/dataset_final_sorted/CWE-79/py/bad_5190_4
crossvul-python_data_good_1091_1
# -*- coding: utf-8 -*- from django.http import HttpRequest, HttpResponse, HttpResponseForbidden, \ HttpResponseNotFound from django.shortcuts import redirect from django.utils.translation import ugettext as _ from zerver.lib.response import json_success, json_error from zerver.lib.upload import upload_message_image_from_request, get_local_file_path, \ get_signed_upload_url, check_upload_within_quota, INLINE_MIME_TYPES from zerver.models import UserProfile, validate_attachment_request from django.conf import settings from sendfile import sendfile from mimetypes import guess_type def serve_s3(request: HttpRequest, url_path: str) -> HttpResponse: uri = get_signed_upload_url(url_path) return redirect(uri) def serve_local(request: HttpRequest, path_id: str) -> HttpResponse: local_path = get_local_file_path(path_id) if local_path is None: return HttpResponseNotFound('<p>File not found</p>') # Here we determine whether a browser should treat the file like # an attachment (and thus clicking a link to it should download) # or like a link (and thus clicking a link to it should display it # in a browser tab). This is controlled by the # Content-Disposition header; `django-sendfile` sends the # attachment-style version of that header if and only if the # attachment argument is passed to it. For attachments, # django-sendfile sets the response['Content-disposition'] like # this: `attachment; filename="b'zulip.txt'"; filename*=UTF-8''zulip.txt`. # # The "filename" field (used to name the file when downloaded) is # unreliable because it doesn't have a well-defined encoding; the # newer filename* field takes precedence, since it uses a # consistent format (urlquoted). For more details on filename* # and filename, see the below docs: # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Disposition mimetype, encoding = guess_type(local_path) attachment = mimetype not in INLINE_MIME_TYPES return sendfile(request, local_path, attachment=attachment, mimetype=mimetype, encoding=encoding) def serve_file_backend(request: HttpRequest, user_profile: UserProfile, realm_id_str: str, filename: str) -> HttpResponse: path_id = "%s/%s" % (realm_id_str, filename) is_authorized = validate_attachment_request(user_profile, path_id) if is_authorized is None: return HttpResponseNotFound(_("<p>File not found.</p>")) if not is_authorized: return HttpResponseForbidden(_("<p>You are not authorized to view this file.</p>")) if settings.LOCAL_UPLOADS_DIR is not None: return serve_local(request, path_id) return serve_s3(request, path_id) def upload_file_backend(request: HttpRequest, user_profile: UserProfile) -> HttpResponse: if len(request.FILES) == 0: return json_error(_("You must specify a file to upload")) if len(request.FILES) != 1: return json_error(_("You may only upload one file at a time")) user_file = list(request.FILES.values())[0] file_size = user_file._get_size() if settings.MAX_FILE_UPLOAD_SIZE * 1024 * 1024 < file_size: return json_error(_("Uploaded file is larger than the allowed limit of %s MB") % ( settings.MAX_FILE_UPLOAD_SIZE)) check_upload_within_quota(user_profile.realm, file_size) if not isinstance(user_file.name, str): # It seems that in Python 2 unicode strings containing bytes are # rendered differently than ascii strings containing same bytes. # # Example: # >>> print('\xd3\x92') # Ӓ # >>> print(u'\xd3\x92') # Ó # # This is the cause of the problem as user_file.name variable # is received as a unicode which is converted into unicode # strings containing bytes and is rendered incorrectly. # # Example: # >>> import urllib.parse # >>> name = u'%D0%97%D0%B4%D1%80%D0%B0%D0%B2%D0%B5%D0%B8%CC%86%D1%82%D0%B5.txt' # >>> print(urllib.parse.unquote(name)) # Здравейте # This is wrong # # >>> name = '%D0%97%D0%B4%D1%80%D0%B0%D0%B2%D0%B5%D0%B8%CC%86%D1%82%D0%B5.txt' # >>> print(urllib.parse.unquote(name)) # Здравейте.txt # This is correct user_file.name = user_file.name.encode('ascii') uri = upload_message_image_from_request(request, user_file, user_profile) return json_success({'uri': uri})
./CrossVul/dataset_final_sorted/CWE-79/py/good_1091_1
crossvul-python_data_good_1729_0
# coding: utf-8 """A tornado based IPython notebook server.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import absolute_import, print_function import base64 import datetime import errno import importlib import io import json import logging import os import random import re import select import signal import socket import ssl import sys import threading import webbrowser from jinja2 import Environment, FileSystemLoader # Install the pyzmq ioloop. This has to be done before anything else from # tornado is imported. from zmq.eventloop import ioloop ioloop.install() # check for tornado 3.1.0 msg = "The IPython Notebook requires tornado >= 4.0" try: import tornado except ImportError: raise ImportError(msg) try: version_info = tornado.version_info except AttributeError: raise ImportError(msg + ", but you have < 1.1.0") if version_info < (4,0): raise ImportError(msg + ", but you have %s" % tornado.version) from tornado import httpserver from tornado import web from tornado.log import LogFormatter, app_log, access_log, gen_log from notebook import ( DEFAULT_STATIC_FILES_PATH, DEFAULT_TEMPLATE_PATH_LIST, __version__, ) from .base.handlers import Template404 from .log import log_request from .services.kernels.kernelmanager import MappingKernelManager from .services.config import ConfigManager from .services.contents.manager import ContentsManager from .services.contents.filemanager import FileContentsManager from .services.sessions.sessionmanager import SessionManager from .auth.login import LoginHandler from .auth.logout import LogoutHandler from .base.handlers import FileFindHandler, IPythonHandler from traitlets.config import Config from traitlets.config.application import catch_config_error, boolean_flag from jupyter_core.application import ( JupyterApp, base_flags, base_aliases, ) from jupyter_client import KernelManager from jupyter_client.kernelspec import KernelSpecManager, NoSuchKernel, NATIVE_KERNEL_NAME from jupyter_client.session import Session from nbformat.sign import NotebookNotary from traitlets import ( Dict, Unicode, Integer, List, Bool, Bytes, Instance, TraitError, Type, ) from ipython_genutils import py3compat from IPython.paths import get_ipython_dir from jupyter_core.paths import jupyter_runtime_dir, jupyter_path from notebook._sysinfo import get_sys_info from .utils import url_path_join, check_pid #----------------------------------------------------------------------------- # Module globals #----------------------------------------------------------------------------- _examples = """ ipython notebook # start the notebook ipython notebook --profile=sympy # use the sympy profile ipython notebook --certfile=mycert.pem # use SSL/TLS certificate """ #----------------------------------------------------------------------------- # Helper functions #----------------------------------------------------------------------------- def random_ports(port, n): """Generate a list of n random ports near the given port. The first 5 ports will be sequential, and the remaining n-5 will be randomly selected in the range [port-2*n, port+2*n]. """ for i in range(min(5, n)): yield port + i for i in range(n-5): yield max(1, port + random.randint(-2*n, 2*n)) def load_handlers(name): """Load the (URL pattern, handler) tuples for each component.""" name = 'notebook.' + name mod = __import__(name, fromlist=['default_handlers']) return mod.default_handlers class DeprecationHandler(IPythonHandler): def get(self, url_path): self.set_header("Content-Type", 'text/javascript') self.finish(""" console.warn('`/static/widgets/js` is deprecated. Use `/nbextensions/widgets/widgets/js` instead.'); define(['%s'], function(x) { return x; }); """ % url_path_join('nbextensions', 'widgets', 'widgets', url_path.rstrip('.js'))) self.log.warn('Deprecated widget Javascript path /static/widgets/js/*.js was used') #----------------------------------------------------------------------------- # The Tornado web application #----------------------------------------------------------------------------- class NotebookWebApplication(web.Application): def __init__(self, ipython_app, kernel_manager, contents_manager, session_manager, kernel_spec_manager, config_manager, log, base_url, default_url, settings_overrides, jinja_env_options): settings = self.init_settings( ipython_app, kernel_manager, contents_manager, session_manager, kernel_spec_manager, config_manager, log, base_url, default_url, settings_overrides, jinja_env_options) handlers = self.init_handlers(settings) super(NotebookWebApplication, self).__init__(handlers, **settings) def init_settings(self, ipython_app, kernel_manager, contents_manager, session_manager, kernel_spec_manager, config_manager, log, base_url, default_url, settings_overrides, jinja_env_options=None): _template_path = settings_overrides.get( "template_path", ipython_app.template_file_path, ) if isinstance(_template_path, py3compat.string_types): _template_path = (_template_path,) template_path = [os.path.expanduser(path) for path in _template_path] jenv_opt = {"autoescape": True} jenv_opt.update(jinja_env_options if jinja_env_options else {}) env = Environment(loader=FileSystemLoader(template_path), **jenv_opt) sys_info = get_sys_info() if sys_info['commit_source'] == 'repository': # don't cache (rely on 304) when working from master version_hash = '' else: # reset the cache on server restart version_hash = datetime.datetime.now().strftime("%Y%m%d%H%M%S") settings = dict( # basics log_function=log_request, base_url=base_url, default_url=default_url, template_path=template_path, static_path=ipython_app.static_file_path, static_custom_path=ipython_app.static_custom_path, static_handler_class = FileFindHandler, static_url_prefix = url_path_join(base_url,'/static/'), static_handler_args = { # don't cache custom.js 'no_cache_paths': [url_path_join(base_url, 'static', 'custom')], }, version_hash=version_hash, # authentication cookie_secret=ipython_app.cookie_secret, login_url=url_path_join(base_url,'/login'), login_handler_class=ipython_app.login_handler_class, logout_handler_class=ipython_app.logout_handler_class, password=ipython_app.password, # managers kernel_manager=kernel_manager, contents_manager=contents_manager, session_manager=session_manager, kernel_spec_manager=kernel_spec_manager, config_manager=config_manager, # IPython stuff jinja_template_vars=ipython_app.jinja_template_vars, nbextensions_path=ipython_app.nbextensions_path, websocket_url=ipython_app.websocket_url, mathjax_url=ipython_app.mathjax_url, config=ipython_app.config, config_dir=ipython_app.config_dir, jinja2_env=env, terminals_available=False, # Set later if terminals are available ) # allow custom overrides for the tornado web app. settings.update(settings_overrides) return settings def init_handlers(self, settings): """Load the (URL pattern, handler) tuples for each component.""" # Order matters. The first handler to match the URL will handle the request. handlers = [] handlers.append((r'/deprecatedwidgets/(.*)', DeprecationHandler)) handlers.extend(load_handlers('tree.handlers')) handlers.extend([(r"/login", settings['login_handler_class'])]) handlers.extend([(r"/logout", settings['logout_handler_class'])]) handlers.extend(load_handlers('files.handlers')) handlers.extend(load_handlers('notebook.handlers')) handlers.extend(load_handlers('nbconvert.handlers')) handlers.extend(load_handlers('kernelspecs.handlers')) handlers.extend(load_handlers('edit.handlers')) handlers.extend(load_handlers('services.api.handlers')) handlers.extend(load_handlers('services.config.handlers')) handlers.extend(load_handlers('services.kernels.handlers')) handlers.extend(load_handlers('services.contents.handlers')) handlers.extend(load_handlers('services.sessions.handlers')) handlers.extend(load_handlers('services.nbconvert.handlers')) handlers.extend(load_handlers('services.kernelspecs.handlers')) handlers.extend(load_handlers('services.security.handlers')) # BEGIN HARDCODED WIDGETS HACK try: import ipywidgets handlers.append( (r"/nbextensions/widgets/(.*)", FileFindHandler, { 'path': ipywidgets.find_static_assets(), 'no_cache_paths': ['/'], # don't cache anything in nbextensions }), ) except: app_log.warn('ipywidgets package not installed. Widgets are unavailable.') # END HARDCODED WIDGETS HACK handlers.append( (r"/nbextensions/(.*)", FileFindHandler, { 'path': settings['nbextensions_path'], 'no_cache_paths': ['/'], # don't cache anything in nbextensions }), ) handlers.append( (r"/custom/(.*)", FileFindHandler, { 'path': settings['static_custom_path'], 'no_cache_paths': ['/'], # don't cache anything in custom }) ) # register base handlers last handlers.extend(load_handlers('base.handlers')) # set the URL that will be redirected from `/` handlers.append( (r'/?', web.RedirectHandler, { 'url' : settings['default_url'], 'permanent': False, # want 302, not 301 }) ) # prepend base_url onto the patterns that we match new_handlers = [] for handler in handlers: pattern = url_path_join(settings['base_url'], handler[0]) new_handler = tuple([pattern] + list(handler[1:])) new_handlers.append(new_handler) # add 404 on the end, which will catch everything that falls through new_handlers.append((r'(.*)', Template404)) return new_handlers class NbserverListApp(JupyterApp): version = __version__ description="List currently running notebook servers in this profile." flags = dict( json=({'NbserverListApp': {'json': True}}, "Produce machine-readable JSON output."), ) json = Bool(False, config=True, help="If True, each line of output will be a JSON object with the " "details from the server info file.") def start(self): if not self.json: print("Currently running servers:") for serverinfo in list_running_servers(self.runtime_dir): if self.json: print(json.dumps(serverinfo)) else: print(serverinfo['url'], "::", serverinfo['notebook_dir']) #----------------------------------------------------------------------------- # Aliases and Flags #----------------------------------------------------------------------------- flags = dict(base_flags) flags['no-browser']=( {'NotebookApp' : {'open_browser' : False}}, "Don't open the notebook in a browser after startup." ) flags['pylab']=( {'NotebookApp' : {'pylab' : 'warn'}}, "DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib." ) flags['no-mathjax']=( {'NotebookApp' : {'enable_mathjax' : False}}, """Disable MathJax MathJax is the javascript library IPython uses to render math/LaTeX. It is very large, so you may want to disable it if you have a slow internet connection, or for offline use of the notebook. When disabled, equations etc. will appear as their untransformed TeX source. """ ) # Add notebook manager flags flags.update(boolean_flag('script', 'FileContentsManager.save_script', 'DEPRECATED, IGNORED', 'DEPRECATED, IGNORED')) aliases = dict(base_aliases) aliases.update({ 'ip': 'NotebookApp.ip', 'port': 'NotebookApp.port', 'port-retries': 'NotebookApp.port_retries', 'transport': 'KernelManager.transport', 'keyfile': 'NotebookApp.keyfile', 'certfile': 'NotebookApp.certfile', 'notebook-dir': 'NotebookApp.notebook_dir', 'browser': 'NotebookApp.browser', 'pylab': 'NotebookApp.pylab', }) #----------------------------------------------------------------------------- # NotebookApp #----------------------------------------------------------------------------- class NotebookApp(JupyterApp): name = 'jupyter-notebook' version = __version__ description = """ The Jupyter HTML Notebook. This launches a Tornado based HTML Notebook Server that serves up an HTML5/Javascript Notebook client. """ examples = _examples aliases = aliases flags = flags classes = [ KernelManager, Session, MappingKernelManager, ContentsManager, FileContentsManager, NotebookNotary, KernelSpecManager, ] flags = Dict(flags) aliases = Dict(aliases) subcommands = dict( list=(NbserverListApp, NbserverListApp.description.splitlines()[0]), ) _log_formatter_cls = LogFormatter def _log_level_default(self): return logging.INFO def _log_datefmt_default(self): """Exclude date from default date format""" return "%H:%M:%S" def _log_format_default(self): """override default log format to include time""" return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" # create requested profiles by default, if they don't exist: auto_create = Bool(True) # file to be opened in the notebook server file_to_run = Unicode('', config=True) # Network related information allow_origin = Unicode('', config=True, help="""Set the Access-Control-Allow-Origin header Use '*' to allow any origin to access your server. Takes precedence over allow_origin_pat. """ ) allow_origin_pat = Unicode('', config=True, help="""Use a regular expression for the Access-Control-Allow-Origin header Requests from an origin matching the expression will get replies with: Access-Control-Allow-Origin: origin where `origin` is the origin of the request. Ignored if allow_origin is set. """ ) allow_credentials = Bool(False, config=True, help="Set the Access-Control-Allow-Credentials: true header" ) default_url = Unicode('/tree', config=True, help="The default URL to redirect to from `/`" ) ip = Unicode('localhost', config=True, help="The IP address the notebook server will listen on." ) def _ip_default(self): """Return localhost if available, 127.0.0.1 otherwise. On some (horribly broken) systems, localhost cannot be bound. """ s = socket.socket() try: s.bind(('localhost', 0)) except socket.error as e: self.log.warn("Cannot bind to localhost, using 127.0.0.1 as default ip\n%s", e) return '127.0.0.1' else: s.close() return 'localhost' def _ip_changed(self, name, old, new): if new == u'*': self.ip = u'' port = Integer(8888, config=True, help="The port the notebook server will listen on." ) port_retries = Integer(50, config=True, help="The number of additional ports to try if the specified port is not available." ) certfile = Unicode(u'', config=True, help="""The full path to an SSL/TLS certificate file.""" ) keyfile = Unicode(u'', config=True, help="""The full path to a private key file for usage with SSL/TLS.""" ) cookie_secret_file = Unicode(config=True, help="""The file where the cookie secret is stored.""" ) def _cookie_secret_file_default(self): return os.path.join(self.runtime_dir, 'notebook_cookie_secret') cookie_secret = Bytes(b'', config=True, help="""The random bytes used to secure cookies. By default this is a new random number every time you start the Notebook. Set it to a value in a config file to enable logins to persist across server sessions. Note: Cookie secrets should be kept private, do not share config files with cookie_secret stored in plaintext (you can read the value from a file). """ ) def _cookie_secret_default(self): if os.path.exists(self.cookie_secret_file): with io.open(self.cookie_secret_file, 'rb') as f: return f.read() else: secret = base64.encodestring(os.urandom(1024)) self._write_cookie_secret_file(secret) return secret def _write_cookie_secret_file(self, secret): """write my secret to my secret_file""" self.log.info("Writing notebook server cookie secret to %s", self.cookie_secret_file) with io.open(self.cookie_secret_file, 'wb') as f: f.write(secret) try: os.chmod(self.cookie_secret_file, 0o600) except OSError: self.log.warn( "Could not set permissions on %s", self.cookie_secret_file ) password = Unicode(u'', config=True, help="""Hashed password to use for web authentication. To generate, type in a python/IPython shell: from notebook.auth import passwd; passwd() The string should be of the form type:salt:hashed-password. """ ) open_browser = Bool(True, config=True, help="""Whether to open in a browser after starting. The specific browser used is platform dependent and determined by the python standard library `webbrowser` module, unless it is overridden using the --browser (NotebookApp.browser) configuration option. """) browser = Unicode(u'', config=True, help="""Specify what command to use to invoke a web browser when opening the notebook. If not specified, the default browser will be determined by the `webbrowser` standard library module, which allows setting of the BROWSER environment variable to override it. """) webapp_settings = Dict(config=True, help="DEPRECATED, use tornado_settings" ) def _webapp_settings_changed(self, name, old, new): self.log.warn("\n webapp_settings is deprecated, use tornado_settings.\n") self.tornado_settings = new tornado_settings = Dict(config=True, help="Supply overrides for the tornado.web.Application that the " "IPython notebook uses.") ssl_options = Dict(config=True, help="""Supply SSL options for the tornado HTTPServer. See the tornado docs for details.""") jinja_environment_options = Dict(config=True, help="Supply extra arguments that will be passed to Jinja environment.") jinja_template_vars = Dict( config=True, help="Extra variables to supply to jinja templates when rendering.", ) enable_mathjax = Bool(True, config=True, help="""Whether to enable MathJax for typesetting math/TeX MathJax is the javascript library IPython uses to render math/LaTeX. It is very large, so you may want to disable it if you have a slow internet connection, or for offline use of the notebook. When disabled, equations etc. will appear as their untransformed TeX source. """ ) def _enable_mathjax_changed(self, name, old, new): """set mathjax url to empty if mathjax is disabled""" if not new: self.mathjax_url = u'' base_url = Unicode('/', config=True, help='''The base URL for the notebook server. Leading and trailing slashes can be omitted, and will automatically be added. ''') def _base_url_changed(self, name, old, new): if not new.startswith('/'): self.base_url = '/'+new elif not new.endswith('/'): self.base_url = new+'/' base_project_url = Unicode('/', config=True, help="""DEPRECATED use base_url""") def _base_project_url_changed(self, name, old, new): self.log.warn("base_project_url is deprecated, use base_url") self.base_url = new extra_static_paths = List(Unicode(), config=True, help="""Extra paths to search for serving static files. This allows adding javascript/css to be available from the notebook server machine, or overriding individual files in the IPython""" ) @property def static_file_path(self): """return extra paths + the default location""" return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH] static_custom_path = List(Unicode(), help="""Path to search for custom.js, css""" ) def _static_custom_path_default(self): return [ os.path.join(d, 'custom') for d in ( self.config_dir, # FIXME: serve IPython profile while we don't have `jupyter migrate` os.path.join(get_ipython_dir(), 'profile_default', 'static'), DEFAULT_STATIC_FILES_PATH) ] extra_template_paths = List(Unicode(), config=True, help="""Extra paths to search for serving jinja templates. Can be used to override templates from notebook.templates.""" ) @property def template_file_path(self): """return extra paths + the default locations""" return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST extra_nbextensions_path = List(Unicode(), config=True, help="""extra paths to look for Javascript notebook extensions""" ) @property def nbextensions_path(self): """The path to look for Javascript notebook extensions""" path = self.extra_nbextensions_path + jupyter_path('nbextensions') # FIXME: remove IPython nbextensions path once migration is setup path.append(os.path.join(get_ipython_dir(), 'nbextensions')) return path websocket_url = Unicode("", config=True, help="""The base URL for websockets, if it differs from the HTTP server (hint: it almost certainly doesn't). Should be in the form of an HTTP origin: ws[s]://hostname[:port] """ ) mathjax_url = Unicode("", config=True, help="""The url for MathJax.js.""" ) def _mathjax_url_default(self): if not self.enable_mathjax: return u'' static_url_prefix = self.tornado_settings.get("static_url_prefix", url_path_join(self.base_url, "static") ) return url_path_join(static_url_prefix, 'components', 'MathJax', 'MathJax.js') def _mathjax_url_changed(self, name, old, new): if new and not self.enable_mathjax: # enable_mathjax=False overrides mathjax_url self.mathjax_url = u'' else: self.log.info("Using MathJax: %s", new) contents_manager_class = Type( default_value=FileContentsManager, klass=ContentsManager, config=True, help='The notebook manager class to use.' ) kernel_manager_class = Type( default_value=MappingKernelManager, config=True, help='The kernel manager class to use.' ) session_manager_class = Type( default_value=SessionManager, config=True, help='The session manager class to use.' ) config_manager_class = Type( default_value=ConfigManager, config = True, help='The config manager class to use' ) kernel_spec_manager = Instance(KernelSpecManager, allow_none=True) kernel_spec_manager_class = Type( default_value=KernelSpecManager, config=True, help=""" The kernel spec manager class to use. Should be a subclass of `jupyter_client.kernelspec.KernelSpecManager`. The Api of KernelSpecManager is provisional and might change without warning between this version of IPython and the next stable one. """ ) login_handler_class = Type( default_value=LoginHandler, klass=web.RequestHandler, config=True, help='The login handler class to use.', ) logout_handler_class = Type( default_value=LogoutHandler, klass=web.RequestHandler, config=True, help='The logout handler class to use.', ) trust_xheaders = Bool(False, config=True, help=("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers" "sent by the upstream reverse proxy. Necessary if the proxy handles SSL") ) info_file = Unicode() def _info_file_default(self): info_file = "nbserver-%s.json" % os.getpid() return os.path.join(self.runtime_dir, info_file) pylab = Unicode('disabled', config=True, help=""" DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. """ ) def _pylab_changed(self, name, old, new): """when --pylab is specified, display a warning and exit""" if new != 'warn': backend = ' %s' % new else: backend = '' self.log.error("Support for specifying --pylab on the command line has been removed.") self.log.error( "Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself.".format(backend) ) self.exit(1) notebook_dir = Unicode(config=True, help="The directory to use for notebooks and kernels." ) def _notebook_dir_default(self): if self.file_to_run: return os.path.dirname(os.path.abspath(self.file_to_run)) else: return py3compat.getcwd() def _notebook_dir_changed(self, name, old, new): """Do a bit of validation of the notebook dir.""" if not os.path.isabs(new): # If we receive a non-absolute path, make it absolute. self.notebook_dir = os.path.abspath(new) return if not os.path.isdir(new): raise TraitError("No such notebook dir: %r" % new) # setting App.notebook_dir implies setting notebook and kernel dirs as well self.config.FileContentsManager.root_dir = new self.config.MappingKernelManager.root_dir = new server_extensions = List(Unicode(), config=True, help=("Python modules to load as notebook server extensions. " "This is an experimental API, and may change in future releases.") ) reraise_server_extension_failures = Bool( False, config=True, help="Reraise exceptions encountered loading server extensions?", ) def parse_command_line(self, argv=None): super(NotebookApp, self).parse_command_line(argv) if self.extra_args: arg0 = self.extra_args[0] f = os.path.abspath(arg0) self.argv.remove(arg0) if not os.path.exists(f): self.log.critical("No such file or directory: %s", f) self.exit(1) # Use config here, to ensure that it takes higher priority than # anything that comes from the profile. c = Config() if os.path.isdir(f): c.NotebookApp.notebook_dir = f elif os.path.isfile(f): c.NotebookApp.file_to_run = f self.update_config(c) def init_configurables(self): self.kernel_spec_manager = self.kernel_spec_manager_class( parent=self, ) self.kernel_manager = self.kernel_manager_class( parent=self, log=self.log, connection_dir=self.runtime_dir, kernel_spec_manager=self.kernel_spec_manager, ) self.contents_manager = self.contents_manager_class( parent=self, log=self.log, ) self.session_manager = self.session_manager_class( parent=self, log=self.log, kernel_manager=self.kernel_manager, contents_manager=self.contents_manager, ) self.config_manager = self.config_manager_class( parent=self, log=self.log, config_dir=os.path.join(self.config_dir, 'nbconfig'), ) def init_logging(self): # This prevents double log messages because tornado use a root logger that # self.log is a child of. The logging module dipatches log messages to a log # and all of its ancenstors until propagate is set to False. self.log.propagate = False for log in app_log, access_log, gen_log: # consistent log output name (NotebookApp instead of tornado.access, etc.) log.name = self.log.name # hook up tornado 3's loggers to our app handlers logger = logging.getLogger('tornado') logger.propagate = True logger.parent = self.log logger.setLevel(self.log.level) def init_webapp(self): """initialize tornado webapp and httpserver""" self.tornado_settings['allow_origin'] = self.allow_origin if self.allow_origin_pat: self.tornado_settings['allow_origin_pat'] = re.compile(self.allow_origin_pat) self.tornado_settings['allow_credentials'] = self.allow_credentials # ensure default_url starts with base_url if not self.default_url.startswith(self.base_url): self.default_url = url_path_join(self.base_url, self.default_url) self.web_app = NotebookWebApplication( self, self.kernel_manager, self.contents_manager, self.session_manager, self.kernel_spec_manager, self.config_manager, self.log, self.base_url, self.default_url, self.tornado_settings, self.jinja_environment_options ) ssl_options = self.ssl_options if self.certfile: ssl_options['certfile'] = self.certfile if self.keyfile: ssl_options['keyfile'] = self.keyfile if not ssl_options: # None indicates no SSL config ssl_options = None else: # Disable SSLv3, since its use is discouraged. ssl_options['ssl_version']=ssl.PROTOCOL_TLSv1 self.login_handler_class.validate_security(self, ssl_options=ssl_options) self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options, xheaders=self.trust_xheaders) success = None for port in random_ports(self.port, self.port_retries+1): try: self.http_server.listen(port, self.ip) except socket.error as e: if e.errno == errno.EADDRINUSE: self.log.info('The port %i is already in use, trying another random port.' % port) continue elif e.errno in (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)): self.log.warn("Permission to listen on port %i denied" % port) continue else: raise else: self.port = port success = True break if not success: self.log.critical('ERROR: the notebook server could not be started because ' 'no available port could be found.') self.exit(1) @property def display_url(self): ip = self.ip if self.ip else '[all ip addresses on your system]' return self._url(ip) @property def connection_url(self): ip = self.ip if self.ip else 'localhost' return self._url(ip) def _url(self, ip): proto = 'https' if self.certfile else 'http' return "%s://%s:%i%s" % (proto, ip, self.port, self.base_url) def init_terminals(self): try: from .terminal import initialize initialize(self.web_app, self.notebook_dir, self.connection_url) self.web_app.settings['terminals_available'] = True except ImportError as e: log = self.log.debug if sys.platform == 'win32' else self.log.warn log("Terminals not available (error was %s)", e) def init_signal(self): if not sys.platform.startswith('win') and sys.stdin.isatty(): signal.signal(signal.SIGINT, self._handle_sigint) signal.signal(signal.SIGTERM, self._signal_stop) if hasattr(signal, 'SIGUSR1'): # Windows doesn't support SIGUSR1 signal.signal(signal.SIGUSR1, self._signal_info) if hasattr(signal, 'SIGINFO'): # only on BSD-based systems signal.signal(signal.SIGINFO, self._signal_info) def _handle_sigint(self, sig, frame): """SIGINT handler spawns confirmation dialog""" # register more forceful signal handler for ^C^C case signal.signal(signal.SIGINT, self._signal_stop) # request confirmation dialog in bg thread, to avoid # blocking the App thread = threading.Thread(target=self._confirm_exit) thread.daemon = True thread.start() def _restore_sigint_handler(self): """callback for restoring original SIGINT handler""" signal.signal(signal.SIGINT, self._handle_sigint) def _confirm_exit(self): """confirm shutdown on ^C A second ^C, or answering 'y' within 5s will cause shutdown, otherwise original SIGINT handler will be restored. This doesn't work on Windows. """ info = self.log.info info('interrupted') print(self.notebook_info()) sys.stdout.write("Shutdown this notebook server (y/[n])? ") sys.stdout.flush() r,w,x = select.select([sys.stdin], [], [], 5) if r: line = sys.stdin.readline() if line.lower().startswith('y') and 'n' not in line.lower(): self.log.critical("Shutdown confirmed") ioloop.IOLoop.current().stop() return else: print("No answer for 5s:", end=' ') print("resuming operation...") # no answer, or answer is no: # set it back to original SIGINT handler # use IOLoop.add_callback because signal.signal must be called # from main thread ioloop.IOLoop.current().add_callback(self._restore_sigint_handler) def _signal_stop(self, sig, frame): self.log.critical("received signal %s, stopping", sig) ioloop.IOLoop.current().stop() def _signal_info(self, sig, frame): print(self.notebook_info()) def init_components(self): """Check the components submodule, and warn if it's unclean""" # TODO: this should still check, but now we use bower, not git submodule pass def init_server_extensions(self): """Load any extensions specified by config. Import the module, then call the load_jupyter_server_extension function, if one exists. The extension API is experimental, and may change in future releases. """ for modulename in self.server_extensions: try: mod = importlib.import_module(modulename) func = getattr(mod, 'load_jupyter_server_extension', None) if func is not None: func(self) except Exception: if self.reraise_server_extension_failures: raise self.log.warn("Error loading server extension %s", modulename, exc_info=True) @catch_config_error def initialize(self, argv=None): super(NotebookApp, self).initialize(argv) self.init_logging() self.init_configurables() self.init_components() self.init_webapp() self.init_terminals() self.init_signal() self.init_server_extensions() def cleanup_kernels(self): """Shutdown all kernels. The kernels will shutdown themselves when this process no longer exists, but explicit shutdown allows the KernelManagers to cleanup the connection files. """ self.log.info('Shutting down kernels') self.kernel_manager.shutdown_all() def notebook_info(self): "Return the current working directory and the server url information" info = self.contents_manager.info_string() + "\n" info += "%d active kernels \n" % len(self.kernel_manager._kernels) return info + "The IPython Notebook is running at: %s" % self.display_url def server_info(self): """Return a JSONable dict of information about this server.""" return {'url': self.connection_url, 'hostname': self.ip if self.ip else 'localhost', 'port': self.port, 'secure': bool(self.certfile), 'base_url': self.base_url, 'notebook_dir': os.path.abspath(self.notebook_dir), 'pid': os.getpid() } def write_server_info_file(self): """Write the result of server_info() to the JSON file info_file.""" with open(self.info_file, 'w') as f: json.dump(self.server_info(), f, indent=2) def remove_server_info_file(self): """Remove the nbserver-<pid>.json file created for this server. Ignores the error raised when the file has already been removed. """ try: os.unlink(self.info_file) except OSError as e: if e.errno != errno.ENOENT: raise def start(self): """ Start the IPython Notebook server app, after initialization This method takes no arguments so all configuration and initialization must be done prior to calling this method.""" super(NotebookApp, self).start() info = self.log.info for line in self.notebook_info().split("\n"): info(line) info("Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).") self.write_server_info_file() if self.open_browser or self.file_to_run: try: browser = webbrowser.get(self.browser or None) except webbrowser.Error as e: self.log.warn('No web browser found: %s.' % e) browser = None if self.file_to_run: if not os.path.exists(self.file_to_run): self.log.critical("%s does not exist" % self.file_to_run) self.exit(1) relpath = os.path.relpath(self.file_to_run, self.notebook_dir) uri = url_path_join('notebooks', *relpath.split(os.sep)) else: uri = 'tree' if browser: b = lambda : browser.open(url_path_join(self.connection_url, uri), new=2) threading.Thread(target=b).start() self.io_loop = ioloop.IOLoop.current() if sys.platform.startswith('win'): # add no-op to wake every 5s # to handle signals that may be ignored by the inner loop pc = ioloop.PeriodicCallback(lambda : None, 5000) pc.start() try: self.io_loop.start() except KeyboardInterrupt: info("Interrupted...") finally: self.cleanup_kernels() self.remove_server_info_file() def stop(self): def _stop(): self.http_server.stop() self.io_loop.stop() self.io_loop.add_callback(_stop) def list_running_servers(runtime_dir=None): """Iterate over the server info files of running notebook servers. Given a profile name, find nbserver-* files in the security directory of that profile, and yield dicts of their information, each one pertaining to a currently running notebook server instance. """ if runtime_dir is None: runtime_dir = jupyter_runtime_dir() # The runtime dir might not exist if not os.path.isdir(runtime_dir): return for file in os.listdir(runtime_dir): if file.startswith('nbserver-'): with io.open(os.path.join(runtime_dir, file), encoding='utf-8') as f: info = json.load(f) # Simple check whether that process is really still running # Also remove leftover files from IPython 2.x without a pid field if ('pid' in info) and check_pid(info['pid']): yield info else: # If the process has died, try to delete its info file try: os.unlink(file) except OSError: pass # TODO: This should warn or log or something #----------------------------------------------------------------------------- # Main entry point #----------------------------------------------------------------------------- main = launch_new_instance = NotebookApp.launch_instance
./CrossVul/dataset_final_sorted/CWE-79/py/good_1729_0
crossvul-python_data_good_5190_4
# -*- coding: utf-8 -*- from __future__ import unicode_literals import datetime import os import tempfile import uuid from django.contrib.auth.models import User from django.contrib.contenttypes.fields import ( GenericForeignKey, GenericRelation, ) from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError from django.core.files.storage import FileSystemStorage from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class Section(models.Model): """ A simple section that links to articles, to test linking to related items in admin views. """ name = models.CharField(max_length=100) def __str__(self): return self.name @property def name_property(self): """ A property that simply returns the name. Used to test #24461 """ return self.name @python_2_unicode_compatible class Article(models.Model): """ A simple article to test admin views. Test backwards compatibility. """ title = models.CharField(max_length=100) content = models.TextField() date = models.DateTimeField() section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True) another_section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True, related_name='+') sub_section = models.ForeignKey(Section, models.SET_NULL, null=True, blank=True, related_name='+') def __str__(self): return self.title def model_year(self): return self.date.year model_year.admin_order_field = 'date' model_year.short_description = '' def model_year_reversed(self): return self.date.year model_year_reversed.admin_order_field = '-date' model_year_reversed.short_description = '' @python_2_unicode_compatible class Book(models.Model): """ A simple book that has chapters. """ name = models.CharField(max_length=100, verbose_name='¿Name?') def __str__(self): return self.name @python_2_unicode_compatible class Promo(models.Model): name = models.CharField(max_length=100, verbose_name='¿Name?') book = models.ForeignKey(Book, models.CASCADE) def __str__(self): return self.name @python_2_unicode_compatible class Chapter(models.Model): title = models.CharField(max_length=100, verbose_name='¿Title?') content = models.TextField() book = models.ForeignKey(Book, models.CASCADE) def __str__(self): return self.title class Meta: # Use a utf-8 bytestring to ensure it works (see #11710) verbose_name = '¿Chapter?' @python_2_unicode_compatible class ChapterXtra1(models.Model): chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name='¿Chap?') xtra = models.CharField(max_length=100, verbose_name='¿Xtra?') def __str__(self): return '¿Xtra1: %s' % self.xtra @python_2_unicode_compatible class ChapterXtra2(models.Model): chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name='¿Chap?') xtra = models.CharField(max_length=100, verbose_name='¿Xtra?') def __str__(self): return '¿Xtra2: %s' % self.xtra class RowLevelChangePermissionModel(models.Model): name = models.CharField(max_length=100, blank=True) class CustomArticle(models.Model): content = models.TextField() date = models.DateTimeField() @python_2_unicode_compatible class ModelWithStringPrimaryKey(models.Model): string_pk = models.CharField(max_length=255, primary_key=True) def __str__(self): return self.string_pk def get_absolute_url(self): return '/dummy/%s/' % self.string_pk @python_2_unicode_compatible class Color(models.Model): value = models.CharField(max_length=10) warm = models.BooleanField(default=False) def __str__(self): return self.value # we replicate Color to register with another ModelAdmin class Color2(Color): class Meta: proxy = True @python_2_unicode_compatible class Thing(models.Model): title = models.CharField(max_length=20) color = models.ForeignKey(Color, models.CASCADE, limit_choices_to={'warm': True}) pub_date = models.DateField(blank=True, null=True) def __str__(self): return self.title @python_2_unicode_compatible class Actor(models.Model): name = models.CharField(max_length=50) age = models.IntegerField() title = models.CharField(max_length=50, null=True, blank=True) def __str__(self): return self.name @python_2_unicode_compatible class Inquisition(models.Model): expected = models.BooleanField(default=False) leader = models.ForeignKey(Actor, models.CASCADE) country = models.CharField(max_length=20) def __str__(self): return "by %s from %s" % (self.leader, self.country) @python_2_unicode_compatible class Sketch(models.Model): title = models.CharField(max_length=100) inquisition = models.ForeignKey( Inquisition, models.CASCADE, limit_choices_to={ 'leader__name': 'Palin', 'leader__age': 27, 'expected': False, }, ) defendant0 = models.ForeignKey( Actor, models.CASCADE, limit_choices_to={'title__isnull': False}, related_name='as_defendant0', ) defendant1 = models.ForeignKey( Actor, models.CASCADE, limit_choices_to={'title__isnull': True}, related_name='as_defendant1', ) def __str__(self): return self.title def today_callable_dict(): return {"last_action__gte": datetime.datetime.today()} def today_callable_q(): return models.Q(last_action__gte=datetime.datetime.today()) @python_2_unicode_compatible class Character(models.Model): username = models.CharField(max_length=100) last_action = models.DateTimeField() def __str__(self): return self.username @python_2_unicode_compatible class StumpJoke(models.Model): variation = models.CharField(max_length=100) most_recently_fooled = models.ForeignKey( Character, models.CASCADE, limit_choices_to=today_callable_dict, related_name="+", ) has_fooled_today = models.ManyToManyField(Character, limit_choices_to=today_callable_q, related_name="+") def __str__(self): return self.variation class Fabric(models.Model): NG_CHOICES = ( ('Textured', ( ('x', 'Horizontal'), ('y', 'Vertical'), )), ('plain', 'Smooth'), ) surface = models.CharField(max_length=20, choices=NG_CHOICES) @python_2_unicode_compatible class Person(models.Model): GENDER_CHOICES = ( (1, "Male"), (2, "Female"), ) name = models.CharField(max_length=100) gender = models.IntegerField(choices=GENDER_CHOICES) age = models.IntegerField(default=21) alive = models.BooleanField(default=True) def __str__(self): return self.name @python_2_unicode_compatible class Persona(models.Model): """ A simple persona associated with accounts, to test inlining of related accounts which inherit from a common accounts class. """ name = models.CharField(blank=False, max_length=80) def __str__(self): return self.name @python_2_unicode_compatible class Account(models.Model): """ A simple, generic account encapsulating the information shared by all types of accounts. """ username = models.CharField(blank=False, max_length=80) persona = models.ForeignKey(Persona, models.CASCADE, related_name="accounts") servicename = 'generic service' def __str__(self): return "%s: %s" % (self.servicename, self.username) class FooAccount(Account): """A service-specific account of type Foo.""" servicename = 'foo' class BarAccount(Account): """A service-specific account of type Bar.""" servicename = 'bar' @python_2_unicode_compatible class Subscriber(models.Model): name = models.CharField(blank=False, max_length=80) email = models.EmailField(blank=False, max_length=175) def __str__(self): return "%s (%s)" % (self.name, self.email) class ExternalSubscriber(Subscriber): pass class OldSubscriber(Subscriber): pass class Media(models.Model): name = models.CharField(max_length=60) class Podcast(Media): release_date = models.DateField() class Meta: ordering = ('release_date',) # overridden in PodcastAdmin class Vodcast(Media): media = models.OneToOneField(Media, models.CASCADE, primary_key=True, parent_link=True) released = models.BooleanField(default=False) class Parent(models.Model): name = models.CharField(max_length=128) def clean(self): if self.name == '_invalid': raise ValidationError('invalid') class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE, editable=False) name = models.CharField(max_length=30, blank=True) def clean(self): if self.name == '_invalid': raise ValidationError('invalid') @python_2_unicode_compatible class EmptyModel(models.Model): def __str__(self): return "Primary key = %s" % self.id temp_storage = FileSystemStorage(tempfile.mkdtemp()) UPLOAD_TO = os.path.join(temp_storage.location, 'test_upload') class Gallery(models.Model): name = models.CharField(max_length=100) class Picture(models.Model): name = models.CharField(max_length=100) image = models.FileField(storage=temp_storage, upload_to='test_upload') gallery = models.ForeignKey(Gallery, models.CASCADE, related_name="pictures") class Language(models.Model): iso = models.CharField(max_length=5, primary_key=True) name = models.CharField(max_length=50) english_name = models.CharField(max_length=50) shortlist = models.BooleanField(default=False) class Meta: ordering = ('iso',) # a base class for Recommender and Recommendation class Title(models.Model): pass class TitleTranslation(models.Model): title = models.ForeignKey(Title, models.CASCADE) text = models.CharField(max_length=100) class Recommender(Title): pass class Recommendation(Title): recommender = models.ForeignKey(Recommender, models.CASCADE) class Collector(models.Model): name = models.CharField(max_length=100) class Widget(models.Model): owner = models.ForeignKey(Collector, models.CASCADE) name = models.CharField(max_length=100) class DooHickey(models.Model): code = models.CharField(max_length=10, primary_key=True) owner = models.ForeignKey(Collector, models.CASCADE) name = models.CharField(max_length=100) class Grommet(models.Model): code = models.AutoField(primary_key=True) owner = models.ForeignKey(Collector, models.CASCADE) name = models.CharField(max_length=100) class Whatsit(models.Model): index = models.IntegerField(primary_key=True) owner = models.ForeignKey(Collector, models.CASCADE) name = models.CharField(max_length=100) class Doodad(models.Model): name = models.CharField(max_length=100) class FancyDoodad(Doodad): owner = models.ForeignKey(Collector, models.CASCADE) expensive = models.BooleanField(default=True) @python_2_unicode_compatible class Category(models.Model): collector = models.ForeignKey(Collector, models.CASCADE) order = models.PositiveIntegerField() class Meta: ordering = ('order',) def __str__(self): return '%s:o%s' % (self.id, self.order) def link_posted_default(): return datetime.date.today() - datetime.timedelta(days=7) class Link(models.Model): posted = models.DateField(default=link_posted_default) url = models.URLField() post = models.ForeignKey("Post", models.CASCADE) readonly_link_content = models.TextField() class PrePopulatedPost(models.Model): title = models.CharField(max_length=100) published = models.BooleanField(default=False) slug = models.SlugField() class PrePopulatedSubPost(models.Model): post = models.ForeignKey(PrePopulatedPost, models.CASCADE) subtitle = models.CharField(max_length=100) subslug = models.SlugField() class Post(models.Model): title = models.CharField(max_length=100, help_text="Some help text for the title (with unicode ŠĐĆŽćžšđ)") content = models.TextField(help_text="Some help text for the content (with unicode ŠĐĆŽćžšđ)") readonly_content = models.TextField() posted = models.DateField( default=datetime.date.today, help_text="Some help text for the date (with unicode ŠĐĆŽćžšđ)" ) public = models.NullBooleanField() def awesomeness_level(self): return "Very awesome." # Proxy model to test overridden fields attrs on Post model so as not to # interfere with other tests. class FieldOverridePost(Post): class Meta: proxy = True @python_2_unicode_compatible class Gadget(models.Model): name = models.CharField(max_length=100) def __str__(self): return self.name @python_2_unicode_compatible class Villain(models.Model): name = models.CharField(max_length=100) def __str__(self): return self.name class SuperVillain(Villain): pass @python_2_unicode_compatible class FunkyTag(models.Model): "Because we all know there's only one real use case for GFKs." name = models.CharField(max_length=25) content_type = models.ForeignKey(ContentType, models.CASCADE) object_id = models.PositiveIntegerField() content_object = GenericForeignKey('content_type', 'object_id') def __str__(self): return self.name @python_2_unicode_compatible class Plot(models.Model): name = models.CharField(max_length=100) team_leader = models.ForeignKey(Villain, models.CASCADE, related_name='lead_plots') contact = models.ForeignKey(Villain, models.CASCADE, related_name='contact_plots') tags = GenericRelation(FunkyTag) def __str__(self): return self.name @python_2_unicode_compatible class PlotDetails(models.Model): details = models.CharField(max_length=100) plot = models.OneToOneField(Plot, models.CASCADE, null=True, blank=True) def __str__(self): return self.details class PlotProxy(Plot): class Meta: proxy = True @python_2_unicode_compatible class SecretHideout(models.Model): """ Secret! Not registered with the admin! """ location = models.CharField(max_length=100) villain = models.ForeignKey(Villain, models.CASCADE) def __str__(self): return self.location @python_2_unicode_compatible class SuperSecretHideout(models.Model): """ Secret! Not registered with the admin! """ location = models.CharField(max_length=100) supervillain = models.ForeignKey(SuperVillain, models.CASCADE) def __str__(self): return self.location @python_2_unicode_compatible class Bookmark(models.Model): name = models.CharField(max_length=60) tag = GenericRelation(FunkyTag, related_query_name='bookmark') def __str__(self): return self.name @python_2_unicode_compatible class CyclicOne(models.Model): name = models.CharField(max_length=25) two = models.ForeignKey('CyclicTwo', models.CASCADE) def __str__(self): return self.name @python_2_unicode_compatible class CyclicTwo(models.Model): name = models.CharField(max_length=25) one = models.ForeignKey(CyclicOne, models.CASCADE) def __str__(self): return self.name class Topping(models.Model): name = models.CharField(max_length=20) class Pizza(models.Model): name = models.CharField(max_length=20) toppings = models.ManyToManyField('Topping', related_name='pizzas') class Album(models.Model): owner = models.ForeignKey(User, models.SET_NULL, null=True, blank=True) title = models.CharField(max_length=30) class Employee(Person): code = models.CharField(max_length=20) class WorkHour(models.Model): datum = models.DateField() employee = models.ForeignKey(Employee, models.CASCADE) class Question(models.Model): question = models.CharField(max_length=20) @python_2_unicode_compatible class Answer(models.Model): question = models.ForeignKey(Question, models.PROTECT) answer = models.CharField(max_length=20) def __str__(self): return self.answer class Reservation(models.Model): start_date = models.DateTimeField() price = models.IntegerField() DRIVER_CHOICES = ( ('bill', 'Bill G'), ('steve', 'Steve J'), ) RESTAURANT_CHOICES = ( ('indian', 'A Taste of India'), ('thai', 'Thai Pography'), ('pizza', 'Pizza Mama'), ) class FoodDelivery(models.Model): reference = models.CharField(max_length=100) driver = models.CharField(max_length=100, choices=DRIVER_CHOICES, blank=True) restaurant = models.CharField(max_length=100, choices=RESTAURANT_CHOICES, blank=True) class Meta: unique_together = (("driver", "restaurant"),) @python_2_unicode_compatible class CoverLetter(models.Model): author = models.CharField(max_length=30) date_written = models.DateField(null=True, blank=True) def __str__(self): return self.author class Paper(models.Model): title = models.CharField(max_length=30) author = models.CharField(max_length=30, blank=True, null=True) class ShortMessage(models.Model): content = models.CharField(max_length=140) timestamp = models.DateTimeField(null=True, blank=True) @python_2_unicode_compatible class Telegram(models.Model): title = models.CharField(max_length=30) date_sent = models.DateField(null=True, blank=True) def __str__(self): return self.title class Story(models.Model): title = models.CharField(max_length=100) content = models.TextField() class OtherStory(models.Model): title = models.CharField(max_length=100) content = models.TextField() class ComplexSortedPerson(models.Model): name = models.CharField(max_length=100) age = models.PositiveIntegerField() is_employee = models.NullBooleanField() class PluggableSearchPerson(models.Model): name = models.CharField(max_length=100) age = models.PositiveIntegerField() class PrePopulatedPostLargeSlug(models.Model): """ Regression test for #15938: a large max_length for the slugfield must not be localized in prepopulated_fields_js.html or it might end up breaking the javascript (ie, using THOUSAND_SEPARATOR ends up with maxLength=1,000) """ title = models.CharField(max_length=100) published = models.BooleanField(default=False) # `db_index=False` because MySQL cannot index large CharField (#21196). slug = models.SlugField(max_length=1000, db_index=False) class AdminOrderedField(models.Model): order = models.IntegerField() stuff = models.CharField(max_length=200) class AdminOrderedModelMethod(models.Model): order = models.IntegerField() stuff = models.CharField(max_length=200) def some_order(self): return self.order some_order.admin_order_field = 'order' class AdminOrderedAdminMethod(models.Model): order = models.IntegerField() stuff = models.CharField(max_length=200) class AdminOrderedCallable(models.Model): order = models.IntegerField() stuff = models.CharField(max_length=200) @python_2_unicode_compatible class Report(models.Model): title = models.CharField(max_length=100) def __str__(self): return self.title class MainPrepopulated(models.Model): name = models.CharField(max_length=100) pubdate = models.DateField() status = models.CharField( max_length=20, choices=(('option one', 'Option One'), ('option two', 'Option Two'))) slug1 = models.SlugField(blank=True) slug2 = models.SlugField(blank=True) slug3 = models.SlugField(blank=True, allow_unicode=True) class RelatedPrepopulated(models.Model): parent = models.ForeignKey(MainPrepopulated, models.CASCADE) name = models.CharField(max_length=75) pubdate = models.DateField() status = models.CharField( max_length=20, choices=(('option one', 'Option One'), ('option two', 'Option Two'))) slug1 = models.SlugField(max_length=50) slug2 = models.SlugField(max_length=60) class UnorderedObject(models.Model): """ Model without any defined `Meta.ordering`. Refs #16819. """ name = models.CharField(max_length=255) bool = models.BooleanField(default=True) class UndeletableObject(models.Model): """ Model whose show_delete in admin change_view has been disabled Refs #10057. """ name = models.CharField(max_length=255) class UnchangeableObject(models.Model): """ Model whose change_view is disabled in admin Refs #20640. """ class UserMessenger(models.Model): """ Dummy class for testing message_user functions on ModelAdmin """ class Simple(models.Model): """ Simple model with nothing on it for use in testing """ class Choice(models.Model): choice = models.IntegerField(blank=True, null=True, choices=((1, 'Yes'), (0, 'No'), (None, 'No opinion'))) class ParentWithDependentChildren(models.Model): """ Issue #20522 Model where the validation of child foreign-key relationships depends on validation of the parent """ some_required_info = models.PositiveIntegerField() family_name = models.CharField(max_length=255, blank=False) class DependentChild(models.Model): """ Issue #20522 Model that depends on validation of the parent class for one of its fields to validate during clean """ parent = models.ForeignKey(ParentWithDependentChildren, models.CASCADE) family_name = models.CharField(max_length=255) class _Manager(models.Manager): def get_queryset(self): return super(_Manager, self).get_queryset().filter(pk__gt=1) class FilteredManager(models.Model): def __str__(self): return "PK=%d" % self.pk pk_gt_1 = _Manager() objects = models.Manager() class EmptyModelVisible(models.Model): """ See ticket #11277. """ class EmptyModelHidden(models.Model): """ See ticket #11277. """ class EmptyModelMixin(models.Model): """ See ticket #11277. """ class State(models.Model): name = models.CharField(max_length=100) class City(models.Model): state = models.ForeignKey(State, models.CASCADE) name = models.CharField(max_length=100) def get_absolute_url(self): return '/dummy/%s/' % self.pk class Restaurant(models.Model): city = models.ForeignKey(City, models.CASCADE) name = models.CharField(max_length=100) def get_absolute_url(self): return '/dummy/%s/' % self.pk class Worker(models.Model): work_at = models.ForeignKey(Restaurant, models.CASCADE) name = models.CharField(max_length=50) surname = models.CharField(max_length=50) # Models for #23329 class ReferencedByParent(models.Model): name = models.CharField(max_length=20, unique=True) class ParentWithFK(models.Model): fk = models.ForeignKey( ReferencedByParent, models.CASCADE, to_field='name', related_name='hidden+', ) class ChildOfReferer(ParentWithFK): pass # Models for #23431 class ReferencedByInline(models.Model): name = models.CharField(max_length=20, unique=True) class InlineReference(models.Model): fk = models.ForeignKey( ReferencedByInline, models.CASCADE, to_field='name', related_name='hidden+', ) class InlineReferer(models.Model): refs = models.ManyToManyField(InlineReference) # Models for #23604 and #23915 class Recipe(models.Model): rname = models.CharField(max_length=20, unique=True) class Ingredient(models.Model): iname = models.CharField(max_length=20, unique=True) recipes = models.ManyToManyField(Recipe, through='RecipeIngredient') class RecipeIngredient(models.Model): ingredient = models.ForeignKey(Ingredient, models.CASCADE, to_field='iname') recipe = models.ForeignKey(Recipe, models.CASCADE, to_field='rname') # Model for #23839 class NotReferenced(models.Model): # Don't point any FK at this model. pass # Models for #23934 class ExplicitlyProvidedPK(models.Model): name = models.IntegerField(primary_key=True) class ImplicitlyGeneratedPK(models.Model): name = models.IntegerField(unique=True) # Models for #25622 class ReferencedByGenRel(models.Model): content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.PositiveIntegerField() content_object = GenericForeignKey('content_type', 'object_id') class GenRelReference(models.Model): references = GenericRelation(ReferencedByGenRel) class ParentWithUUIDPK(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) title = models.CharField(max_length=100) def __str__(self): return str(self.id) class RelatedWithUUIDPKModel(models.Model): parent = models.ForeignKey(ParentWithUUIDPK, on_delete=models.CASCADE)
./CrossVul/dataset_final_sorted/CWE-79/py/good_5190_4
crossvul-python_data_good_1644_4
"""Tornado handlers for kernels.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import json import logging from tornado import gen, web from tornado.concurrent import Future from tornado.ioloop import IOLoop from IPython.utils.jsonutil import date_default from IPython.utils.py3compat import cast_unicode from IPython.html.utils import url_path_join, url_escape from ...base.handlers import IPythonHandler, APIHandler, json_errors from ...base.zmqhandlers import AuthenticatedZMQStreamHandler, deserialize_binary_message from IPython.core.release import kernel_protocol_version class MainKernelHandler(APIHandler): @web.authenticated @json_errors def get(self): km = self.kernel_manager self.finish(json.dumps(km.list_kernels())) @web.authenticated @json_errors def post(self): km = self.kernel_manager model = self.get_json_body() if model is None: model = { 'name': km.default_kernel_name } else: model.setdefault('name', km.default_kernel_name) kernel_id = km.start_kernel(kernel_name=model['name']) model = km.kernel_model(kernel_id) location = url_path_join(self.base_url, 'api', 'kernels', kernel_id) self.set_header('Location', url_escape(location)) self.set_status(201) self.finish(json.dumps(model)) class KernelHandler(APIHandler): SUPPORTED_METHODS = ('DELETE', 'GET') @web.authenticated @json_errors def get(self, kernel_id): km = self.kernel_manager km._check_kernel_id(kernel_id) model = km.kernel_model(kernel_id) self.finish(json.dumps(model)) @web.authenticated @json_errors def delete(self, kernel_id): km = self.kernel_manager km.shutdown_kernel(kernel_id) self.set_status(204) self.finish() class KernelActionHandler(APIHandler): @web.authenticated @json_errors def post(self, kernel_id, action): km = self.kernel_manager if action == 'interrupt': km.interrupt_kernel(kernel_id) self.set_status(204) if action == 'restart': km.restart_kernel(kernel_id) model = km.kernel_model(kernel_id) self.set_header('Location', '{0}api/kernels/{1}'.format(self.base_url, kernel_id)) self.write(json.dumps(model)) self.finish() class ZMQChannelsHandler(AuthenticatedZMQStreamHandler): @property def kernel_info_timeout(self): return self.settings.get('kernel_info_timeout', 10) def __repr__(self): return "%s(%s)" % (self.__class__.__name__, getattr(self, 'kernel_id', 'uninitialized')) def create_stream(self): km = self.kernel_manager identity = self.session.bsession for channel in ('shell', 'iopub', 'stdin'): meth = getattr(km, 'connect_' + channel) self.channels[channel] = stream = meth(self.kernel_id, identity=identity) stream.channel = channel km.add_restart_callback(self.kernel_id, self.on_kernel_restarted) km.add_restart_callback(self.kernel_id, self.on_restart_failed, 'dead') def request_kernel_info(self): """send a request for kernel_info""" km = self.kernel_manager kernel = km.get_kernel(self.kernel_id) try: # check for previous request future = kernel._kernel_info_future except AttributeError: self.log.debug("Requesting kernel info from %s", self.kernel_id) # Create a kernel_info channel to query the kernel protocol version. # This channel will be closed after the kernel_info reply is received. if self.kernel_info_channel is None: self.kernel_info_channel = km.connect_shell(self.kernel_id) self.kernel_info_channel.on_recv(self._handle_kernel_info_reply) self.session.send(self.kernel_info_channel, "kernel_info_request") # store the future on the kernel, so only one request is sent kernel._kernel_info_future = self._kernel_info_future else: if not future.done(): self.log.debug("Waiting for pending kernel_info request") future.add_done_callback(lambda f: self._finish_kernel_info(f.result())) return self._kernel_info_future def _handle_kernel_info_reply(self, msg): """process the kernel_info_reply enabling msg spec adaptation, if necessary """ idents,msg = self.session.feed_identities(msg) try: msg = self.session.deserialize(msg) except: self.log.error("Bad kernel_info reply", exc_info=True) self._kernel_info_future.set_result({}) return else: info = msg['content'] self.log.debug("Received kernel info: %s", info) if msg['msg_type'] != 'kernel_info_reply' or 'protocol_version' not in info: self.log.error("Kernel info request failed, assuming current %s", info) info = {} self._finish_kernel_info(info) # close the kernel_info channel, we don't need it anymore if self.kernel_info_channel: self.kernel_info_channel.close() self.kernel_info_channel = None def _finish_kernel_info(self, info): """Finish handling kernel_info reply Set up protocol adaptation, if needed, and signal that connection can continue. """ protocol_version = info.get('protocol_version', kernel_protocol_version) if protocol_version != kernel_protocol_version: self.session.adapt_version = int(protocol_version.split('.')[0]) self.log.info("Adapting to protocol v%s for kernel %s", protocol_version, self.kernel_id) if not self._kernel_info_future.done(): self._kernel_info_future.set_result(info) def initialize(self): super(ZMQChannelsHandler, self).initialize() self.zmq_stream = None self.channels = {} self.kernel_id = None self.kernel_info_channel = None self._kernel_info_future = Future() @gen.coroutine def pre_get(self): # authenticate first super(ZMQChannelsHandler, self).pre_get() # then request kernel info, waiting up to a certain time before giving up. # We don't want to wait forever, because browsers don't take it well when # servers never respond to websocket connection requests. kernel = self.kernel_manager.get_kernel(self.kernel_id) self.session.key = kernel.session.key future = self.request_kernel_info() def give_up(): """Don't wait forever for the kernel to reply""" if future.done(): return self.log.warn("Timeout waiting for kernel_info reply from %s", self.kernel_id) future.set_result({}) loop = IOLoop.current() loop.add_timeout(loop.time() + self.kernel_info_timeout, give_up) # actually wait for it yield future @gen.coroutine def get(self, kernel_id): self.kernel_id = cast_unicode(kernel_id, 'ascii') yield super(ZMQChannelsHandler, self).get(kernel_id=kernel_id) def open(self, kernel_id): super(ZMQChannelsHandler, self).open() try: self.create_stream() except web.HTTPError as e: self.log.error("Error opening stream: %s", e) # WebSockets don't response to traditional error codes so we # close the connection. for channel, stream in self.channels.items(): if not stream.closed(): stream.close() self.close() else: for channel, stream in self.channels.items(): stream.on_recv_stream(self._on_zmq_reply) def on_message(self, msg): if not self.channels: # already closed, ignore the message self.log.debug("Received message on closed websocket %r", msg) return if isinstance(msg, bytes): msg = deserialize_binary_message(msg) else: msg = json.loads(msg) channel = msg.pop('channel', None) if channel is None: self.log.warn("No channel specified, assuming shell: %s", msg) channel = 'shell' if channel not in self.channels: self.log.warn("No such channel: %r", channel) return stream = self.channels[channel] self.session.send(stream, msg) def on_close(self): km = self.kernel_manager if self.kernel_id in km: km.remove_restart_callback( self.kernel_id, self.on_kernel_restarted, ) km.remove_restart_callback( self.kernel_id, self.on_restart_failed, 'dead', ) # This method can be called twice, once by self.kernel_died and once # from the WebSocket close event. If the WebSocket connection is # closed before the ZMQ streams are setup, they could be None. for channel, stream in self.channels.items(): if stream is not None and not stream.closed(): stream.on_recv(None) # close the socket directly, don't wait for the stream socket = stream.socket stream.close() socket.close() self.channels = {} def _send_status_message(self, status): msg = self.session.msg("status", {'execution_state': status} ) msg['channel'] = 'iopub' self.write_message(json.dumps(msg, default=date_default)) def on_kernel_restarted(self): logging.warn("kernel %s restarted", self.kernel_id) self._send_status_message('restarting') def on_restart_failed(self): logging.error("kernel %s restarted failed!", self.kernel_id) self._send_status_message('dead') #----------------------------------------------------------------------------- # URL to handler mappings #----------------------------------------------------------------------------- _kernel_id_regex = r"(?P<kernel_id>\w+-\w+-\w+-\w+-\w+)" _kernel_action_regex = r"(?P<action>restart|interrupt)" default_handlers = [ (r"/api/kernels", MainKernelHandler), (r"/api/kernels/%s" % _kernel_id_regex, KernelHandler), (r"/api/kernels/%s/%s" % (_kernel_id_regex, _kernel_action_regex), KernelActionHandler), (r"/api/kernels/%s/channels" % _kernel_id_regex, ZMQChannelsHandler), ]
./CrossVul/dataset_final_sorted/CWE-79/py/good_1644_4
crossvul-python_data_good_1644_10
import json from tornado import web, gen from ..base.handlers import APIHandler, json_errors from ..utils import url_path_join class TerminalRootHandler(APIHandler): @web.authenticated @json_errors def get(self): tm = self.terminal_manager terms = [{'name': name} for name in tm.terminals] self.finish(json.dumps(terms)) @web.authenticated @json_errors def post(self): """POST /terminals creates a new terminal and redirects to it""" name, _ = self.terminal_manager.new_named_terminal() self.finish(json.dumps({'name': name})) class TerminalHandler(APIHandler): SUPPORTED_METHODS = ('GET', 'DELETE') @web.authenticated @json_errors def get(self, name): tm = self.terminal_manager if name in tm.terminals: self.finish(json.dumps({'name': name})) else: raise web.HTTPError(404, "Terminal not found: %r" % name) @web.authenticated @json_errors @gen.coroutine def delete(self, name): tm = self.terminal_manager if name in tm.terminals: yield tm.terminate(name, force=True) self.set_status(204) self.finish() else: raise web.HTTPError(404, "Terminal not found: %r" % name)
./CrossVul/dataset_final_sorted/CWE-79/py/good_1644_10
crossvul-python_data_bad_1644_0
"""Base Tornado handlers for the notebook server.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import functools import json import logging import os import re import sys import traceback try: # py3 from http.client import responses except ImportError: from httplib import responses from jinja2 import TemplateNotFound from tornado import web from tornado import gen from tornado.log import app_log import IPython from IPython.utils.sysinfo import get_sys_info from IPython.config import Application from IPython.utils.path import filefind from IPython.utils.py3compat import string_types from IPython.html.utils import is_hidden, url_path_join, url_escape from IPython.html.services.security import csp_report_uri #----------------------------------------------------------------------------- # Top-level handlers #----------------------------------------------------------------------------- non_alphanum = re.compile(r'[^A-Za-z0-9]') sys_info = json.dumps(get_sys_info()) class AuthenticatedHandler(web.RequestHandler): """A RequestHandler with an authenticated user.""" def set_default_headers(self): headers = self.settings.get('headers', {}) if "Content-Security-Policy" not in headers: headers["Content-Security-Policy"] = ( "frame-ancestors 'self'; " # Make sure the report-uri is relative to the base_url "report-uri " + url_path_join(self.base_url, csp_report_uri) + ";" ) # Allow for overriding headers for header_name,value in headers.items() : try: self.set_header(header_name, value) except Exception as e: # tornado raise Exception (not a subclass) # if method is unsupported (websocket and Access-Control-Allow-Origin # for example, so just ignore) self.log.debug(e) def clear_login_cookie(self): self.clear_cookie(self.cookie_name) def get_current_user(self): if self.login_handler is None: return 'anonymous' return self.login_handler.get_user(self) @property def cookie_name(self): default_cookie_name = non_alphanum.sub('-', 'username-{}'.format( self.request.host )) return self.settings.get('cookie_name', default_cookie_name) @property def logged_in(self): """Is a user currently logged in?""" user = self.get_current_user() return (user and not user == 'anonymous') @property def login_handler(self): """Return the login handler for this application, if any.""" return self.settings.get('login_handler_class', None) @property def login_available(self): """May a user proceed to log in? This returns True if login capability is available, irrespective of whether the user is already logged in or not. """ if self.login_handler is None: return False return bool(self.login_handler.login_available(self.settings)) class IPythonHandler(AuthenticatedHandler): """IPython-specific extensions to authenticated handling Mostly property shortcuts to IPython-specific settings. """ @property def config(self): return self.settings.get('config', None) @property def log(self): """use the IPython log by default, falling back on tornado's logger""" if Application.initialized(): return Application.instance().log else: return app_log @property def jinja_template_vars(self): """User-supplied values to supply to jinja templates.""" return self.settings.get('jinja_template_vars', {}) #--------------------------------------------------------------- # URLs #--------------------------------------------------------------- @property def version_hash(self): """The version hash to use for cache hints for static files""" return self.settings.get('version_hash', '') @property def mathjax_url(self): return self.settings.get('mathjax_url', '') @property def base_url(self): return self.settings.get('base_url', '/') @property def default_url(self): return self.settings.get('default_url', '') @property def ws_url(self): return self.settings.get('websocket_url', '') @property def contents_js_source(self): self.log.debug("Using contents: %s", self.settings.get('contents_js_source', 'services/contents')) return self.settings.get('contents_js_source', 'services/contents') #--------------------------------------------------------------- # Manager objects #--------------------------------------------------------------- @property def kernel_manager(self): return self.settings['kernel_manager'] @property def contents_manager(self): return self.settings['contents_manager'] @property def cluster_manager(self): return self.settings['cluster_manager'] @property def session_manager(self): return self.settings['session_manager'] @property def terminal_manager(self): return self.settings['terminal_manager'] @property def kernel_spec_manager(self): return self.settings['kernel_spec_manager'] @property def config_manager(self): return self.settings['config_manager'] #--------------------------------------------------------------- # CORS #--------------------------------------------------------------- @property def allow_origin(self): """Normal Access-Control-Allow-Origin""" return self.settings.get('allow_origin', '') @property def allow_origin_pat(self): """Regular expression version of allow_origin""" return self.settings.get('allow_origin_pat', None) @property def allow_credentials(self): """Whether to set Access-Control-Allow-Credentials""" return self.settings.get('allow_credentials', False) def set_default_headers(self): """Add CORS headers, if defined""" super(IPythonHandler, self).set_default_headers() if self.allow_origin: self.set_header("Access-Control-Allow-Origin", self.allow_origin) elif self.allow_origin_pat: origin = self.get_origin() if origin and self.allow_origin_pat.match(origin): self.set_header("Access-Control-Allow-Origin", origin) if self.allow_credentials: self.set_header("Access-Control-Allow-Credentials", 'true') def get_origin(self): # Handle WebSocket Origin naming convention differences # The difference between version 8 and 13 is that in 8 the # client sends a "Sec-Websocket-Origin" header and in 13 it's # simply "Origin". if "Origin" in self.request.headers: origin = self.request.headers.get("Origin") else: origin = self.request.headers.get("Sec-Websocket-Origin", None) return origin #--------------------------------------------------------------- # template rendering #--------------------------------------------------------------- def get_template(self, name): """Return the jinja template object for a given name""" return self.settings['jinja2_env'].get_template(name) def render_template(self, name, **ns): ns.update(self.template_namespace) template = self.get_template(name) return template.render(**ns) @property def template_namespace(self): return dict( base_url=self.base_url, default_url=self.default_url, ws_url=self.ws_url, logged_in=self.logged_in, login_available=self.login_available, static_url=self.static_url, sys_info=sys_info, contents_js_source=self.contents_js_source, version_hash=self.version_hash, **self.jinja_template_vars ) def get_json_body(self): """Return the body of the request as JSON data.""" if not self.request.body: return None # Do we need to call body.decode('utf-8') here? body = self.request.body.strip().decode(u'utf-8') try: model = json.loads(body) except Exception: self.log.debug("Bad JSON: %r", body) self.log.error("Couldn't parse JSON", exc_info=True) raise web.HTTPError(400, u'Invalid JSON in body of request') return model def write_error(self, status_code, **kwargs): """render custom error pages""" exc_info = kwargs.get('exc_info') message = '' status_message = responses.get(status_code, 'Unknown HTTP Error') if exc_info: exception = exc_info[1] # get the custom message, if defined try: message = exception.log_message % exception.args except Exception: pass # construct the custom reason, if defined reason = getattr(exception, 'reason', '') if reason: status_message = reason # build template namespace ns = dict( status_code=status_code, status_message=status_message, message=message, exception=exception, ) self.set_header('Content-Type', 'text/html') # render the template try: html = self.render_template('%s.html' % status_code, **ns) except TemplateNotFound: self.log.debug("No template for %d", status_code) html = self.render_template('error.html', **ns) self.write(html) class Template404(IPythonHandler): """Render our 404 template""" def prepare(self): raise web.HTTPError(404) class AuthenticatedFileHandler(IPythonHandler, web.StaticFileHandler): """static files should only be accessible when logged in""" @web.authenticated def get(self, path): if os.path.splitext(path)[1] == '.ipynb': name = path.rsplit('/', 1)[-1] self.set_header('Content-Type', 'application/json') self.set_header('Content-Disposition','attachment; filename="%s"' % name) return web.StaticFileHandler.get(self, path) def set_headers(self): super(AuthenticatedFileHandler, self).set_headers() # disable browser caching, rely on 304 replies for savings if "v" not in self.request.arguments: self.add_header("Cache-Control", "no-cache") def compute_etag(self): return None def validate_absolute_path(self, root, absolute_path): """Validate and return the absolute path. Requires tornado 3.1 Adding to tornado's own handling, forbids the serving of hidden files. """ abs_path = super(AuthenticatedFileHandler, self).validate_absolute_path(root, absolute_path) abs_root = os.path.abspath(root) if is_hidden(abs_path, abs_root): self.log.info("Refusing to serve hidden file, via 404 Error") raise web.HTTPError(404) return abs_path def json_errors(method): """Decorate methods with this to return GitHub style JSON errors. This should be used on any JSON API on any handler method that can raise HTTPErrors. This will grab the latest HTTPError exception using sys.exc_info and then: 1. Set the HTTP status code based on the HTTPError 2. Create and return a JSON body with a message field describing the error in a human readable form. """ @functools.wraps(method) @gen.coroutine def wrapper(self, *args, **kwargs): try: result = yield gen.maybe_future(method(self, *args, **kwargs)) except web.HTTPError as e: status = e.status_code message = e.log_message self.log.warn(message) self.set_status(e.status_code) reply = dict(message=message, reason=e.reason) self.finish(json.dumps(reply)) except Exception: self.log.error("Unhandled error in API request", exc_info=True) status = 500 message = "Unknown server error" t, value, tb = sys.exc_info() self.set_status(status) tb_text = ''.join(traceback.format_exception(t, value, tb)) reply = dict(message=message, reason=None, traceback=tb_text) self.finish(json.dumps(reply)) else: # FIXME: can use regular return in generators in py3 raise gen.Return(result) return wrapper #----------------------------------------------------------------------------- # File handler #----------------------------------------------------------------------------- # to minimize subclass changes: HTTPError = web.HTTPError class FileFindHandler(web.StaticFileHandler): """subclass of StaticFileHandler for serving files from a search path""" # cache search results, don't search for files more than once _static_paths = {} def set_headers(self): super(FileFindHandler, self).set_headers() # disable browser caching, rely on 304 replies for savings if "v" not in self.request.arguments or \ any(self.request.path.startswith(path) for path in self.no_cache_paths): self.set_header("Cache-Control", "no-cache") def initialize(self, path, default_filename=None, no_cache_paths=None): self.no_cache_paths = no_cache_paths or [] if isinstance(path, string_types): path = [path] self.root = tuple( os.path.abspath(os.path.expanduser(p)) + os.sep for p in path ) self.default_filename = default_filename def compute_etag(self): return None @classmethod def get_absolute_path(cls, roots, path): """locate a file to serve on our static file search path""" with cls._lock: if path in cls._static_paths: return cls._static_paths[path] try: abspath = os.path.abspath(filefind(path, roots)) except IOError: # IOError means not found return '' cls._static_paths[path] = abspath return abspath def validate_absolute_path(self, root, absolute_path): """check if the file should be served (raises 404, 403, etc.)""" if absolute_path == '': raise web.HTTPError(404) for root in self.root: if (absolute_path + os.sep).startswith(root): break return super(FileFindHandler, self).validate_absolute_path(root, absolute_path) class ApiVersionHandler(IPythonHandler): @json_errors def get(self): # not authenticated, so give as few info as possible self.finish(json.dumps({"version":IPython.__version__})) class TrailingSlashHandler(web.RequestHandler): """Simple redirect handler that strips trailing slashes This should be the first, highest priority handler. """ def get(self): self.redirect(self.request.uri.rstrip('/')) post = put = get class FilesRedirectHandler(IPythonHandler): """Handler for redirecting relative URLs to the /files/ handler""" @staticmethod def redirect_to_files(self, path): """make redirect logic a reusable static method so it can be called from other handlers. """ cm = self.contents_manager if cm.dir_exists(path): # it's a *directory*, redirect to /tree url = url_path_join(self.base_url, 'tree', path) else: orig_path = path # otherwise, redirect to /files parts = path.split('/') if not cm.file_exists(path=path) and 'files' in parts: # redirect without files/ iff it would 404 # this preserves pre-2.0-style 'files/' links self.log.warn("Deprecated files/ URL: %s", orig_path) parts.remove('files') path = '/'.join(parts) if not cm.file_exists(path=path): raise web.HTTPError(404) url = url_path_join(self.base_url, 'files', path) url = url_escape(url) self.log.debug("Redirecting %s to %s", self.request.path, url) self.redirect(url) def get(self, path=''): return self.redirect_to_files(self, path) #----------------------------------------------------------------------------- # URL pattern fragments for re-use #----------------------------------------------------------------------------- # path matches any number of `/foo[/bar...]` or just `/` or '' path_regex = r"(?P<path>(?:(?:/[^/]+)+|/?))" #----------------------------------------------------------------------------- # URL to handler mappings #----------------------------------------------------------------------------- default_handlers = [ (r".*/", TrailingSlashHandler), (r"api", ApiVersionHandler) ]
./CrossVul/dataset_final_sorted/CWE-79/py/bad_1644_0
crossvul-python_data_good_5730_0
""" Form Widget classes specific to the Django admin site. """ from __future__ import unicode_literals import copy from django import forms from django.contrib.admin.templatetags.admin_static import static from django.core.urlresolvers import reverse from django.forms.widgets import RadioFieldRenderer from django.forms.util import flatatt from django.utils.html import escape, format_html, format_html_join, smart_urlquote from django.utils.text import Truncator from django.utils.translation import ugettext as _ from django.utils.safestring import mark_safe from django.utils.encoding import force_text from django.utils import six class FilteredSelectMultiple(forms.SelectMultiple): """ A SelectMultiple with a JavaScript filter interface. Note that the resulting JavaScript assumes that the jsi18n catalog has been loaded in the page """ @property def media(self): js = ["core.js", "SelectBox.js", "SelectFilter2.js"] return forms.Media(js=[static("admin/js/%s" % path) for path in js]) def __init__(self, verbose_name, is_stacked, attrs=None, choices=()): self.verbose_name = verbose_name self.is_stacked = is_stacked super(FilteredSelectMultiple, self).__init__(attrs, choices) def render(self, name, value, attrs=None, choices=()): if attrs is None: attrs = {} attrs['class'] = 'selectfilter' if self.is_stacked: attrs['class'] += 'stacked' output = [super(FilteredSelectMultiple, self).render(name, value, attrs, choices)] output.append('<script type="text/javascript">addEvent(window, "load", function(e) {') # TODO: "id_" is hard-coded here. This should instead use the correct # API to determine the ID dynamically. output.append('SelectFilter.init("id_%s", "%s", %s, "%s"); });</script>\n' % (name, self.verbose_name.replace('"', '\\"'), int(self.is_stacked), static('admin/'))) return mark_safe(''.join(output)) class AdminDateWidget(forms.DateInput): @property def media(self): js = ["calendar.js", "admin/DateTimeShortcuts.js"] return forms.Media(js=[static("admin/js/%s" % path) for path in js]) def __init__(self, attrs=None, format=None): final_attrs = {'class': 'vDateField', 'size': '10'} if attrs is not None: final_attrs.update(attrs) super(AdminDateWidget, self).__init__(attrs=final_attrs, format=format) class AdminTimeWidget(forms.TimeInput): @property def media(self): js = ["calendar.js", "admin/DateTimeShortcuts.js"] return forms.Media(js=[static("admin/js/%s" % path) for path in js]) def __init__(self, attrs=None, format=None): final_attrs = {'class': 'vTimeField', 'size': '8'} if attrs is not None: final_attrs.update(attrs) super(AdminTimeWidget, self).__init__(attrs=final_attrs, format=format) class AdminSplitDateTime(forms.SplitDateTimeWidget): """ A SplitDateTime Widget that has some admin-specific styling. """ def __init__(self, attrs=None): widgets = [AdminDateWidget, AdminTimeWidget] # Note that we're calling MultiWidget, not SplitDateTimeWidget, because # we want to define widgets. forms.MultiWidget.__init__(self, widgets, attrs) def format_output(self, rendered_widgets): return format_html('<p class="datetime">{0} {1}<br />{2} {3}</p>', _('Date:'), rendered_widgets[0], _('Time:'), rendered_widgets[1]) class AdminRadioFieldRenderer(RadioFieldRenderer): def render(self): """Outputs a <ul> for this set of radio fields.""" return format_html('<ul{0}>\n{1}\n</ul>', flatatt(self.attrs), format_html_join('\n', '<li>{0}</li>', ((force_text(w),) for w in self))) class AdminRadioSelect(forms.RadioSelect): renderer = AdminRadioFieldRenderer class AdminFileWidget(forms.ClearableFileInput): template_with_initial = ('<p class="file-upload">%s</p>' % forms.ClearableFileInput.template_with_initial) template_with_clear = ('<span class="clearable-file-input">%s</span>' % forms.ClearableFileInput.template_with_clear) def url_params_from_lookup_dict(lookups): """ Converts the type of lookups specified in a ForeignKey limit_choices_to attribute to a dictionary of query parameters """ params = {} if lookups and hasattr(lookups, 'items'): items = [] for k, v in lookups.items(): if callable(v): v = v() if isinstance(v, (tuple, list)): v = ','.join([str(x) for x in v]) elif isinstance(v, bool): # See django.db.fields.BooleanField.get_prep_lookup v = ('0', '1')[v] else: v = six.text_type(v) items.append((k, v)) params.update(dict(items)) return params class ForeignKeyRawIdWidget(forms.TextInput): """ A Widget for displaying ForeignKeys in the "raw_id" interface rather than in a <select> box. """ def __init__(self, rel, admin_site, attrs=None, using=None): self.rel = rel self.admin_site = admin_site self.db = using super(ForeignKeyRawIdWidget, self).__init__(attrs) def render(self, name, value, attrs=None): rel_to = self.rel.to if attrs is None: attrs = {} extra = [] if rel_to in self.admin_site._registry: # The related object is registered with the same AdminSite related_url = reverse('admin:%s_%s_changelist' % (rel_to._meta.app_label, rel_to._meta.model_name), current_app=self.admin_site.name) params = self.url_parameters() if params: url = '?' + '&amp;'.join(['%s=%s' % (k, v) for k, v in params.items()]) else: url = '' if "class" not in attrs: attrs['class'] = 'vForeignKeyRawIdAdminField' # The JavaScript code looks for this hook. # TODO: "lookup_id_" is hard-coded here. This should instead use # the correct API to determine the ID dynamically. extra.append('<a href="%s%s" class="related-lookup" id="lookup_id_%s" onclick="return showRelatedObjectLookupPopup(this);"> ' % (related_url, url, name)) extra.append('<img src="%s" width="16" height="16" alt="%s" /></a>' % (static('admin/img/selector-search.gif'), _('Lookup'))) output = [super(ForeignKeyRawIdWidget, self).render(name, value, attrs)] + extra if value: output.append(self.label_for_value(value)) return mark_safe(''.join(output)) def base_url_parameters(self): return url_params_from_lookup_dict(self.rel.limit_choices_to) def url_parameters(self): from django.contrib.admin.views.main import TO_FIELD_VAR params = self.base_url_parameters() params.update({TO_FIELD_VAR: self.rel.get_related_field().name}) return params def label_for_value(self, value): key = self.rel.get_related_field().name try: obj = self.rel.to._default_manager.using(self.db).get(**{key: value}) return '&nbsp;<strong>%s</strong>' % escape(Truncator(obj).words(14, truncate='...')) except (ValueError, self.rel.to.DoesNotExist): return '' class ManyToManyRawIdWidget(ForeignKeyRawIdWidget): """ A Widget for displaying ManyToMany ids in the "raw_id" interface rather than in a <select multiple> box. """ def render(self, name, value, attrs=None): if attrs is None: attrs = {} if self.rel.to in self.admin_site._registry: # The related object is registered with the same AdminSite attrs['class'] = 'vManyToManyRawIdAdminField' if value: value = ','.join([force_text(v) for v in value]) else: value = '' return super(ManyToManyRawIdWidget, self).render(name, value, attrs) def url_parameters(self): return self.base_url_parameters() def label_for_value(self, value): return '' def value_from_datadict(self, data, files, name): value = data.get(name) if value: return value.split(',') class RelatedFieldWidgetWrapper(forms.Widget): """ This class is a wrapper to a given widget to add the add icon for the admin interface. """ def __init__(self, widget, rel, admin_site, can_add_related=None): self.is_hidden = widget.is_hidden self.needs_multipart_form = widget.needs_multipart_form self.attrs = widget.attrs self.choices = widget.choices self.widget = widget self.rel = rel # Backwards compatible check for whether a user can add related # objects. if can_add_related is None: can_add_related = rel.to in admin_site._registry self.can_add_related = can_add_related # so we can check if the related object is registered with this AdminSite self.admin_site = admin_site def __deepcopy__(self, memo): obj = copy.copy(self) obj.widget = copy.deepcopy(self.widget, memo) obj.attrs = self.widget.attrs memo[id(self)] = obj return obj @property def media(self): return self.widget.media def render(self, name, value, *args, **kwargs): rel_to = self.rel.to info = (rel_to._meta.app_label, rel_to._meta.model_name) self.widget.choices = self.choices output = [self.widget.render(name, value, *args, **kwargs)] if self.can_add_related: related_url = reverse('admin:%s_%s_add' % info, current_app=self.admin_site.name) # TODO: "add_id_" is hard-coded here. This should instead use the # correct API to determine the ID dynamically. output.append('<a href="%s" class="add-another" id="add_id_%s" onclick="return showAddAnotherPopup(this);"> ' % (related_url, name)) output.append('<img src="%s" width="10" height="10" alt="%s"/></a>' % (static('admin/img/icon_addlink.gif'), _('Add Another'))) return mark_safe(''.join(output)) def build_attrs(self, extra_attrs=None, **kwargs): "Helper function for building an attribute dictionary." self.attrs = self.widget.build_attrs(extra_attrs=None, **kwargs) return self.attrs def value_from_datadict(self, data, files, name): return self.widget.value_from_datadict(data, files, name) def id_for_label(self, id_): return self.widget.id_for_label(id_) class AdminTextareaWidget(forms.Textarea): def __init__(self, attrs=None): final_attrs = {'class': 'vLargeTextField'} if attrs is not None: final_attrs.update(attrs) super(AdminTextareaWidget, self).__init__(attrs=final_attrs) class AdminTextInputWidget(forms.TextInput): def __init__(self, attrs=None): final_attrs = {'class': 'vTextField'} if attrs is not None: final_attrs.update(attrs) super(AdminTextInputWidget, self).__init__(attrs=final_attrs) class AdminEmailInputWidget(forms.EmailInput): def __init__(self, attrs=None): final_attrs = {'class': 'vTextField'} if attrs is not None: final_attrs.update(attrs) super(AdminEmailInputWidget, self).__init__(attrs=final_attrs) class AdminURLFieldWidget(forms.URLInput): def __init__(self, attrs=None): final_attrs = {'class': 'vURLField'} if attrs is not None: final_attrs.update(attrs) super(AdminURLFieldWidget, self).__init__(attrs=final_attrs) def render(self, name, value, attrs=None): html = super(AdminURLFieldWidget, self).render(name, value, attrs) if value: value = force_text(self._format_value(value)) final_attrs = {'href': smart_urlquote(value)} html = format_html( '<p class="url">{0} <a{1}>{2}</a><br />{3} {4}</p>', _('Currently:'), flatatt(final_attrs), value, _('Change:'), html ) return html class AdminIntegerFieldWidget(forms.TextInput): class_name = 'vIntegerField' def __init__(self, attrs=None): final_attrs = {'class': self.class_name} if attrs is not None: final_attrs.update(attrs) super(AdminIntegerFieldWidget, self).__init__(attrs=final_attrs) class AdminBigIntegerFieldWidget(AdminIntegerFieldWidget): class_name = 'vBigIntegerField' class AdminCommaSeparatedIntegerFieldWidget(forms.TextInput): def __init__(self, attrs=None): final_attrs = {'class': 'vCommaSeparatedIntegerField'} if attrs is not None: final_attrs.update(attrs) super(AdminCommaSeparatedIntegerFieldWidget, self).__init__(attrs=final_attrs)
./CrossVul/dataset_final_sorted/CWE-79/py/good_5730_0
crossvul-python_data_bad_3890_3
import hashlib import json import os import uuid from django import forms from django.conf import settings from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator from django.core.serializers.json import DjangoJSONEncoder from django.db import models from django.shortcuts import redirect, render from modelcluster.contrib.taggit import ClusterTaggableManager from modelcluster.fields import ParentalKey, ParentalManyToManyField from modelcluster.models import ClusterableModel from taggit.managers import TaggableManager from taggit.models import TaggedItemBase from wagtail.admin.edit_handlers import ( FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel, TabbedInterface) from wagtail.admin.forms import WagtailAdminPageForm from wagtail.admin.mail import send_mail from wagtail.contrib.forms.forms import FormBuilder from wagtail.contrib.forms.models import ( FORM_FIELD_CHOICES, AbstractEmailForm, AbstractFormField, AbstractFormSubmission) from wagtail.contrib.settings.models import BaseSetting, register_setting from wagtail.contrib.sitemaps import Sitemap from wagtail.contrib.table_block.blocks import TableBlock from wagtail.core.blocks import CharBlock, RichTextBlock, StructBlock from wagtail.core.fields import RichTextField, StreamField from wagtail.core.models import Orderable, Page, PageManager, PageQuerySet from wagtail.documents.edit_handlers import DocumentChooserPanel from wagtail.documents.models import AbstractDocument, Document from wagtail.images.blocks import ImageChooserBlock from wagtail.images.edit_handlers import ImageChooserPanel from wagtail.images.models import AbstractImage, AbstractRendition, Image from wagtail.search import index from wagtail.snippets.edit_handlers import SnippetChooserPanel from wagtail.snippets.models import register_snippet from wagtail.utils.decorators import cached_classmethod from .forms import FormClassAdditionalFieldPageForm, ValidatedPageForm from .views import CustomSubmissionsListView EVENT_AUDIENCE_CHOICES = ( ('public', "Public"), ('private', "Private"), ) COMMON_PANELS = ( FieldPanel('slug'), FieldPanel('seo_title'), FieldPanel('show_in_menus'), FieldPanel('search_description'), ) # Link fields class LinkFields(models.Model): link_external = models.URLField("External link", blank=True) link_page = models.ForeignKey( 'wagtailcore.Page', null=True, blank=True, related_name='+', on_delete=models.CASCADE ) link_document = models.ForeignKey( 'wagtaildocs.Document', null=True, blank=True, related_name='+', on_delete=models.CASCADE ) @property def link(self): if self.link_page: return self.link_page.url elif self.link_document: return self.link_document.url else: return self.link_external panels = [ FieldPanel('link_external'), PageChooserPanel('link_page'), DocumentChooserPanel('link_document'), ] class Meta: abstract = True # Carousel items class CarouselItem(LinkFields): image = models.ForeignKey( 'wagtailimages.Image', null=True, blank=True, on_delete=models.SET_NULL, related_name='+' ) embed_url = models.URLField("Embed URL", blank=True) caption = models.CharField(max_length=255, blank=True) panels = [ ImageChooserPanel('image'), FieldPanel('embed_url'), FieldPanel('caption'), MultiFieldPanel(LinkFields.panels, "Link"), ] class Meta: abstract = True # Related links class RelatedLink(LinkFields): title = models.CharField(max_length=255, help_text="Link title") panels = [ FieldPanel('title'), MultiFieldPanel(LinkFields.panels, "Link"), ] class Meta: abstract = True # Simple page class SimplePage(Page): content = models.TextField() content_panels = [ FieldPanel('title', classname="full title"), FieldPanel('content'), ] def get_admin_display_title(self): return "%s (simple page)" % super().get_admin_display_title() # Page with Excluded Fields when copied class PageWithExcludedCopyField(Page): content = models.TextField() # Exclude this field from being copied special_field = models.CharField( blank=True, max_length=255, default='Very Special') exclude_fields_in_copy = ['special_field'] content_panels = [ FieldPanel('title', classname="full title"), FieldPanel('special_field'), FieldPanel('content'), ] class PageWithOldStyleRouteMethod(Page): """ Prior to Wagtail 0.4, the route() method on Page returned an HttpResponse rather than a Page instance. As subclasses of Page may override route, we need to continue accepting this convention (albeit as a deprecated API). """ content = models.TextField() template = 'tests/simple_page.html' def route(self, request, path_components): return self.serve(request) # File page class FilePage(Page): file_field = models.FileField() FilePage.content_panels = [ FieldPanel('title', classname="full title"), FieldPanel('file_field'), ] # Event page class EventPageCarouselItem(Orderable, CarouselItem): page = ParentalKey('tests.EventPage', related_name='carousel_items', on_delete=models.CASCADE) class EventPageRelatedLink(Orderable, RelatedLink): page = ParentalKey('tests.EventPage', related_name='related_links', on_delete=models.CASCADE) class EventPageSpeakerAward(Orderable, models.Model): speaker = ParentalKey('tests.EventPageSpeaker', related_name='awards', on_delete=models.CASCADE) name = models.CharField("Award name", max_length=255) date_awarded = models.DateField(null=True, blank=True) panels = [ FieldPanel('name'), FieldPanel('date_awarded'), ] class EventPageSpeaker(Orderable, LinkFields, ClusterableModel): page = ParentalKey('tests.EventPage', related_name='speakers', related_query_name='speaker', on_delete=models.CASCADE) first_name = models.CharField("Name", max_length=255, blank=True) last_name = models.CharField("Surname", max_length=255, blank=True) image = models.ForeignKey( 'wagtailimages.Image', null=True, blank=True, on_delete=models.SET_NULL, related_name='+' ) @property def name_display(self): return self.first_name + " " + self.last_name panels = [ FieldPanel('first_name'), FieldPanel('last_name'), ImageChooserPanel('image'), MultiFieldPanel(LinkFields.panels, "Link"), InlinePanel('awards', label="Awards"), ] class EventCategory(models.Model): name = models.CharField("Name", max_length=255) def __str__(self): return self.name # Override the standard WagtailAdminPageForm to add validation on start/end dates # that appears as a non-field error class EventPageForm(WagtailAdminPageForm): def clean(self): cleaned_data = super().clean() # Make sure that the event starts before it ends start_date = cleaned_data['date_from'] end_date = cleaned_data['date_to'] if start_date and end_date and start_date > end_date: raise ValidationError('The end date must be after the start date') return cleaned_data class EventPage(Page): date_from = models.DateField("Start date", null=True) date_to = models.DateField( "End date", null=True, blank=True, help_text="Not required if event is on a single day" ) time_from = models.TimeField("Start time", null=True, blank=True) time_to = models.TimeField("End time", null=True, blank=True) audience = models.CharField(max_length=255, choices=EVENT_AUDIENCE_CHOICES) location = models.CharField(max_length=255) body = RichTextField(blank=True) cost = models.CharField(max_length=255) signup_link = models.URLField(blank=True) feed_image = models.ForeignKey( 'wagtailimages.Image', null=True, blank=True, on_delete=models.SET_NULL, related_name='+' ) categories = ParentalManyToManyField(EventCategory, blank=True) search_fields = [ index.SearchField('get_audience_display'), index.SearchField('location'), index.SearchField('body'), index.FilterField('url_path'), ] password_required_template = 'tests/event_page_password_required.html' base_form_class = EventPageForm EventPage.content_panels = [ FieldPanel('title', classname="full title"), FieldPanel('date_from'), FieldPanel('date_to'), FieldPanel('time_from'), FieldPanel('time_to'), FieldPanel('location'), FieldPanel('audience'), FieldPanel('cost'), FieldPanel('signup_link'), InlinePanel('carousel_items', label="Carousel items"), FieldPanel('body', classname="full"), InlinePanel('speakers', label="Speakers", heading="Speaker lineup"), InlinePanel('related_links', label="Related links"), FieldPanel('categories'), # InlinePanel related model uses `pk` not `id` InlinePanel('head_counts', label='Head Counts'), ] EventPage.promote_panels = [ MultiFieldPanel(COMMON_PANELS, "Common page configuration"), ImageChooserPanel('feed_image'), ] class HeadCountRelatedModelUsingPK(models.Model): """Related model that uses a custom primary key (pk) not id""" custom_id = models.AutoField(primary_key=True) event_page = ParentalKey( EventPage, on_delete=models.CASCADE, related_name='head_counts' ) head_count = models.IntegerField() panels = [FieldPanel('head_count')] # Override the standard WagtailAdminPageForm to add field that is not in model # so that we can test additional potential issues like comparing versions class FormClassAdditionalFieldPage(Page): location = models.CharField(max_length=255) body = RichTextField(blank=True) content_panels = [ FieldPanel('title', classname="full title"), FieldPanel('location'), FieldPanel('body'), FieldPanel('code'), # not in model, see set base_form_class ] base_form_class = FormClassAdditionalFieldPageForm # Just to be able to test multi table inheritance class SingleEventPage(EventPage): excerpt = models.TextField( max_length=255, blank=True, null=True, help_text="Short text to describe what is this action about" ) # Give this page model a custom URL routing scheme def get_url_parts(self, request=None): url_parts = super().get_url_parts(request=request) if url_parts is None: return None else: site_id, root_url, page_path = url_parts return (site_id, root_url, page_path + 'pointless-suffix/') def route(self, request, path_components): if path_components == ['pointless-suffix']: # treat this as equivalent to a request for this page return super().route(request, []) else: # fall back to default routing rules return super().route(request, path_components) def get_admin_display_title(self): return "%s (single event)" % super().get_admin_display_title() SingleEventPage.content_panels = [FieldPanel('excerpt')] + EventPage.content_panels # "custom" sitemap object class EventSitemap(Sitemap): pass # Event index (has a separate AJAX template, and a custom template context) class EventIndex(Page): intro = RichTextField(blank=True) ajax_template = 'tests/includes/event_listing.html' def get_events(self): return self.get_children().live().type(EventPage) def get_paginator(self): return Paginator(self.get_events(), 4) def get_context(self, request, page=1): # Pagination paginator = self.get_paginator() try: events = paginator.page(page) except PageNotAnInteger: events = paginator.page(1) except EmptyPage: events = paginator.page(paginator.num_pages) # Update context context = super().get_context(request) context['events'] = events return context def route(self, request, path_components): if self.live and len(path_components) == 1: try: return self.serve(request, page=int(path_components[0])) except (TypeError, ValueError): pass return super().route(request, path_components) def get_static_site_paths(self): # Get page count page_count = self.get_paginator().num_pages # Yield a path for each page for page in range(page_count): yield '/%d/' % (page + 1) # Yield from superclass for path in super().get_static_site_paths(): yield path def get_sitemap_urls(self, request=None): # Add past events url to sitemap return super().get_sitemap_urls(request=request) + [ { 'location': self.full_url + 'past/', 'lastmod': self.latest_revision_created_at } ] def get_cached_paths(self): return super().get_cached_paths() + [ '/past/' ] EventIndex.content_panels = [ FieldPanel('title', classname="full title"), FieldPanel('intro', classname="full"), ] class FormField(AbstractFormField): page = ParentalKey('FormPage', related_name='form_fields', on_delete=models.CASCADE) class FormPage(AbstractEmailForm): def get_context(self, request): context = super().get_context(request) context['greeting'] = "hello world" return context FormPage.content_panels = [ FieldPanel('title', classname="full title"), InlinePanel('form_fields', label="Form fields"), MultiFieldPanel([ FieldPanel('to_address', classname="full"), FieldPanel('from_address', classname="full"), FieldPanel('subject', classname="full"), ], "Email") ] # FormPage with a non-HTML extension class JadeFormField(AbstractFormField): page = ParentalKey('JadeFormPage', related_name='form_fields', on_delete=models.CASCADE) class JadeFormPage(AbstractEmailForm): template = "tests/form_page.jade" JadeFormPage.content_panels = [ FieldPanel('title', classname="full title"), InlinePanel('form_fields', label="Form fields"), MultiFieldPanel([ FieldPanel('to_address', classname="full"), FieldPanel('from_address', classname="full"), FieldPanel('subject', classname="full"), ], "Email") ] # Form page that redirects to a different page class RedirectFormField(AbstractFormField): page = ParentalKey('FormPageWithRedirect', related_name='form_fields', on_delete=models.CASCADE) class FormPageWithRedirect(AbstractEmailForm): thank_you_redirect_page = models.ForeignKey( 'wagtailcore.Page', null=True, blank=True, on_delete=models.SET_NULL, related_name='+', ) def get_context(self, request): context = super(FormPageWithRedirect, self).get_context(request) context['greeting'] = "hello world" return context def render_landing_page(self, request, form_submission=None, *args, **kwargs): """ Renders the landing page OR if a receipt_page_redirect is chosen redirects to this page. """ if self.thank_you_redirect_page: return redirect(self.thank_you_redirect_page.url, permanent=False) return super(FormPageWithRedirect, self).render_landing_page(request, form_submission, *args, **kwargs) FormPageWithRedirect.content_panels = [ FieldPanel('title', classname="full title"), PageChooserPanel('thank_you_redirect_page'), InlinePanel('form_fields', label="Form fields"), MultiFieldPanel([ FieldPanel('to_address', classname="full"), FieldPanel('from_address', classname="full"), FieldPanel('subject', classname="full"), ], "Email") ] # FormPage with a custom FormSubmission class FormPageWithCustomSubmission(AbstractEmailForm): """ This Form page: * Have custom submission model * Have custom related_name (see `FormFieldWithCustomSubmission.page`) * Saves reference to a user * Doesn't render html form, if submission for current user is present """ intro = RichTextField(blank=True) thank_you_text = RichTextField(blank=True) def get_context(self, request, *args, **kwargs): context = super().get_context(request) context['greeting'] = "hello world" return context def get_form_fields(self): return self.custom_form_fields.all() def get_data_fields(self): data_fields = [ ('username', 'Username'), ] data_fields += super().get_data_fields() return data_fields def get_submission_class(self): return CustomFormPageSubmission def process_form_submission(self, form): form_submission = self.get_submission_class().objects.create( form_data=json.dumps(form.cleaned_data, cls=DjangoJSONEncoder), page=self, user=form.user ) if self.to_address: addresses = [x.strip() for x in self.to_address.split(',')] content = '\n'.join([x[1].label + ': ' + str(form.data.get(x[0])) for x in form.fields.items()]) send_mail(self.subject, content, addresses, self.from_address,) # process_form_submission should now return the created form_submission return form_submission def serve(self, request, *args, **kwargs): if self.get_submission_class().objects.filter(page=self, user__pk=request.user.pk).exists(): return render( request, self.template, self.get_context(request) ) return super().serve(request, *args, **kwargs) FormPageWithCustomSubmission.content_panels = [ FieldPanel('title', classname="full title"), FieldPanel('intro', classname="full"), InlinePanel('custom_form_fields', label="Form fields"), FieldPanel('thank_you_text', classname="full"), MultiFieldPanel([ FieldPanel('to_address', classname="full"), FieldPanel('from_address', classname="full"), FieldPanel('subject', classname="full"), ], "Email") ] class FormFieldWithCustomSubmission(AbstractFormField): page = ParentalKey(FormPageWithCustomSubmission, on_delete=models.CASCADE, related_name='custom_form_fields') class CustomFormPageSubmission(AbstractFormSubmission): user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) def get_data(self): form_data = super().get_data() form_data.update({ 'username': self.user.username, }) return form_data # Custom form page with custom submission listing view and form submission class FormFieldForCustomListViewPage(AbstractFormField): page = ParentalKey( 'FormPageWithCustomSubmissionListView', related_name='form_fields', on_delete=models.CASCADE ) class FormPageWithCustomSubmissionListView(AbstractEmailForm): """Form Page with customised submissions listing view""" intro = RichTextField(blank=True) thank_you_text = RichTextField(blank=True) submissions_list_view_class = CustomSubmissionsListView def get_submission_class(self): return CustomFormPageSubmission def get_data_fields(self): data_fields = [ ('username', 'Username'), ] data_fields += super().get_data_fields() return data_fields content_panels = [ FieldPanel('title', classname="full title"), FieldPanel('intro', classname="full"), InlinePanel('form_fields', label="Form fields"), FieldPanel('thank_you_text', classname="full"), MultiFieldPanel([ FieldPanel('to_address', classname="full"), FieldPanel('from_address', classname="full"), FieldPanel('subject', classname="full"), ], "Email") ] # FormPage with cutom FormBuilder EXTENDED_CHOICES = FORM_FIELD_CHOICES + (('ipaddress', 'IP Address'),) class ExtendedFormField(AbstractFormField): """Override the field_type field with extended choices.""" page = ParentalKey( 'FormPageWithCustomFormBuilder', related_name='form_fields', on_delete=models.CASCADE) field_type = models.CharField( verbose_name='field type', max_length=16, choices=EXTENDED_CHOICES) class CustomFormBuilder(FormBuilder): """ A custom FormBuilder that has an 'ipaddress' field with customised create_singleline_field with shorter max_length """ def create_singleline_field(self, field, options): options['max_length'] = 120 # usual default is 255 return forms.CharField(**options) def create_ipaddress_field(self, field, options): return forms.GenericIPAddressField(**options) class FormPageWithCustomFormBuilder(AbstractEmailForm): """ A Form page that has a custom form builder and uses a custom form field model with additional field_type choices. """ form_builder = CustomFormBuilder content_panels = [ FieldPanel('title', classname="full title"), InlinePanel('form_fields', label="Form fields"), MultiFieldPanel([ FieldPanel('to_address', classname="full"), FieldPanel('from_address', classname="full"), FieldPanel('subject', classname="full"), ], "Email") ] # Snippets class AdvertPlacement(models.Model): page = ParentalKey('wagtailcore.Page', related_name='advert_placements', on_delete=models.CASCADE) advert = models.ForeignKey('tests.Advert', related_name='+', on_delete=models.CASCADE) colour = models.CharField(max_length=255) class AdvertTag(TaggedItemBase): content_object = ParentalKey('Advert', related_name='tagged_items', on_delete=models.CASCADE) class Advert(ClusterableModel): url = models.URLField(null=True, blank=True) text = models.CharField(max_length=255) tags = TaggableManager(through=AdvertTag, blank=True) panels = [ FieldPanel('url'), FieldPanel('text'), FieldPanel('tags'), ] def __str__(self): return self.text register_snippet(Advert) class AdvertWithCustomPrimaryKey(ClusterableModel): advert_id = models.CharField(max_length=255, primary_key=True) url = models.URLField(null=True, blank=True) text = models.CharField(max_length=255) panels = [ FieldPanel('url'), FieldPanel('text'), ] def __str__(self): return self.text register_snippet(AdvertWithCustomPrimaryKey) class AdvertWithCustomUUIDPrimaryKey(ClusterableModel): advert_id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) url = models.URLField(null=True, blank=True) text = models.CharField(max_length=255) panels = [ FieldPanel('url'), FieldPanel('text'), ] def __str__(self): return self.text register_snippet(AdvertWithCustomUUIDPrimaryKey) class AdvertWithTabbedInterface(models.Model): url = models.URLField(null=True, blank=True) text = models.CharField(max_length=255) something_else = models.CharField(max_length=255) advert_panels = [ FieldPanel('url'), FieldPanel('text'), ] other_panels = [ FieldPanel('something_else'), ] edit_handler = TabbedInterface([ ObjectList(advert_panels, heading='Advert'), ObjectList(other_panels, heading='Other'), ]) def __str__(self): return self.text class Meta: ordering = ('text',) register_snippet(AdvertWithTabbedInterface) class StandardIndex(Page): """ Index for the site """ parent_page_types = [Page] # A custom panel setup where all Promote fields are placed in the Content tab instead; # we use this to test that the 'promote' tab is left out of the output when empty StandardIndex.content_panels = [ FieldPanel('title', classname="full title"), FieldPanel('seo_title'), FieldPanel('slug'), InlinePanel('advert_placements', label="Adverts"), ] StandardIndex.promote_panels = [] class StandardChild(Page): pass # Test overriding edit_handler with a custom one StandardChild.edit_handler = TabbedInterface([ ObjectList(StandardChild.content_panels, heading='Content'), ObjectList(StandardChild.promote_panels, heading='Promote'), ObjectList(StandardChild.settings_panels, heading='Settings', classname='settings'), ObjectList([], heading='Dinosaurs'), ], base_form_class=WagtailAdminPageForm) class BusinessIndex(Page): """ Can be placed anywhere, can only have Business children """ subpage_types = ['tests.BusinessChild', 'tests.BusinessSubIndex'] class BusinessSubIndex(Page): """ Can be placed under BusinessIndex, and have BusinessChild children """ # BusinessNowherePage is 'incorrectly' added here as a possible child. # The rules on BusinessNowherePage prevent it from being a child here though. subpage_types = ['tests.BusinessChild', 'tests.BusinessNowherePage'] parent_page_types = ['tests.BusinessIndex', 'tests.BusinessChild'] class BusinessChild(Page): """ Can only be placed under Business indexes, no children allowed """ subpage_types = [] parent_page_types = ['tests.BusinessIndex', BusinessSubIndex] class BusinessNowherePage(Page): """ Not allowed to be placed anywhere """ parent_page_types = [] class TaggedPageTag(TaggedItemBase): content_object = ParentalKey('tests.TaggedPage', related_name='tagged_items', on_delete=models.CASCADE) class TaggedPage(Page): tags = ClusterTaggableManager(through=TaggedPageTag, blank=True) TaggedPage.content_panels = [ FieldPanel('title', classname="full title"), FieldPanel('tags'), ] class SingletonPage(Page): @classmethod def can_create_at(cls, parent): # You can only create one of these! return super(SingletonPage, cls).can_create_at(parent) \ and not cls.objects.exists() class SingletonPageViaMaxCount(Page): max_count = 1 class PageChooserModel(models.Model): page = models.ForeignKey('wagtailcore.Page', help_text='help text', on_delete=models.CASCADE) class EventPageChooserModel(models.Model): page = models.ForeignKey('tests.EventPage', help_text='more help text', on_delete=models.CASCADE) class SnippetChooserModel(models.Model): advert = models.ForeignKey(Advert, help_text='help text', on_delete=models.CASCADE) panels = [ SnippetChooserPanel('advert'), ] class SnippetChooserModelWithCustomPrimaryKey(models.Model): advertwithcustomprimarykey = models.ForeignKey(AdvertWithCustomPrimaryKey, help_text='help text', on_delete=models.CASCADE) panels = [ SnippetChooserPanel('advertwithcustomprimarykey'), ] class CustomImage(AbstractImage): caption = models.CharField(max_length=255, blank=True) fancy_caption = RichTextField(blank=True) not_editable_field = models.CharField(max_length=255, blank=True) admin_form_fields = Image.admin_form_fields + ( 'caption', 'fancy_caption', ) class CustomRendition(AbstractRendition): image = models.ForeignKey(CustomImage, related_name='renditions', on_delete=models.CASCADE) class Meta: unique_together = ( ('image', 'filter_spec', 'focal_point_key'), ) class CustomDocument(AbstractDocument): description = models.TextField(blank=True) fancy_description = RichTextField(blank=True) admin_form_fields = Document.admin_form_fields + ( 'description', 'fancy_description' ) class StreamModel(models.Model): body = StreamField([ ('text', CharBlock()), ('rich_text', RichTextBlock()), ('image', ImageChooserBlock()), ]) class ExtendedImageChooserBlock(ImageChooserBlock): """ Example of Block with custom get_api_representation method. If the request has an 'extended' query param, it returns a dict of id and title, otherwise, it returns the default value. """ def get_api_representation(self, value, context=None): image_id = super().get_api_representation(value, context=context) if 'request' in context and context['request'].query_params.get('extended', False): return { 'id': image_id, 'title': value.title } return image_id class StreamPage(Page): body = StreamField([ ('text', CharBlock()), ('rich_text', RichTextBlock()), ('image', ExtendedImageChooserBlock()), ('product', StructBlock([ ('name', CharBlock()), ('price', CharBlock()), ])), ]) api_fields = ('body',) content_panels = [ FieldPanel('title'), StreamFieldPanel('body'), ] class DefaultStreamPage(Page): body = StreamField([ ('text', CharBlock()), ('rich_text', RichTextBlock()), ('image', ImageChooserBlock()), ], default='') content_panels = [ FieldPanel('title'), StreamFieldPanel('body'), ] class MTIBasePage(Page): is_creatable = False class Meta: verbose_name = "MTI Base page" class MTIChildPage(MTIBasePage): # Should be creatable by default, no need to set anything pass class AbstractPage(Page): class Meta: abstract = True @register_setting class TestSetting(BaseSetting): title = models.CharField(max_length=100) email = models.EmailField(max_length=50) @register_setting(icon="tag") class IconSetting(BaseSetting): pass class NotYetRegisteredSetting(BaseSetting): pass @register_setting class FileUploadSetting(BaseSetting): file = models.FileField() class BlogCategory(models.Model): name = models.CharField(unique=True, max_length=80) class BlogCategoryBlogPage(models.Model): category = models.ForeignKey(BlogCategory, related_name="+", on_delete=models.CASCADE) page = ParentalKey('ManyToManyBlogPage', related_name='categories', on_delete=models.CASCADE) panels = [ FieldPanel('category'), ] class ManyToManyBlogPage(Page): """ A page type with two different kinds of M2M relation. We don't formally support these, but we don't want them to cause hard breakages either. """ body = RichTextField(blank=True) adverts = models.ManyToManyField(Advert, blank=True) blog_categories = models.ManyToManyField( BlogCategory, through=BlogCategoryBlogPage, blank=True) # make first_published_at editable on this page model settings_panels = Page.settings_panels + [ FieldPanel('first_published_at'), ] class OneToOnePage(Page): """ A Page containing a O2O relation. """ body = RichTextBlock(blank=True) page_ptr = models.OneToOneField(Page, parent_link=True, related_name='+', on_delete=models.CASCADE) class GenericSnippetPage(Page): """ A page containing a reference to an arbitrary snippet (or any model for that matter) linked by a GenericForeignKey """ snippet_content_type = models.ForeignKey(ContentType, on_delete=models.SET_NULL, null=True) snippet_object_id = models.PositiveIntegerField(null=True) snippet_content_object = GenericForeignKey('snippet_content_type', 'snippet_object_id') class CustomImageFilePath(AbstractImage): def get_upload_to(self, filename): """Create a path that's file-system friendly. By hashing the file's contents we guarantee an equal distribution of files within our root directories. This also gives us a better chance of uploading images with the same filename, but different contents - this isn't guaranteed as we're only using the first three characters of the checksum. """ original_filepath = super().get_upload_to(filename) folder_name, filename = original_filepath.split(os.path.sep) # Ensure that we consume the entire file, we can't guarantee that # the stream has not be partially (or entirely) consumed by # another process original_position = self.file.tell() self.file.seek(0) hash256 = hashlib.sha256() while True: data = self.file.read(256) if not data: break hash256.update(data) checksum = hash256.hexdigest() self.file.seek(original_position) return os.path.join(folder_name, checksum[:3], filename) class CustomPageQuerySet(PageQuerySet): def about_spam(self): return self.filter(title__contains='spam') CustomManager = PageManager.from_queryset(CustomPageQuerySet) class CustomManagerPage(Page): objects = CustomManager() class MyBasePage(Page): """ A base Page model, used to set site-wide defaults and overrides. """ objects = CustomManager() class Meta: abstract = True class MyCustomPage(MyBasePage): pass class ValidatedPage(Page): foo = models.CharField(max_length=255) base_form_class = ValidatedPageForm content_panels = Page.content_panels + [ FieldPanel('foo'), ] class DefaultRichTextFieldPage(Page): body = RichTextField() content_panels = [ FieldPanel('title', classname="full title"), FieldPanel('body'), ] class DefaultRichBlockFieldPage(Page): body = StreamField([ ('rich_text', RichTextBlock()), ]) content_panels = Page.content_panels + [ StreamFieldPanel('body') ] class CustomRichTextFieldPage(Page): body = RichTextField(editor='custom') content_panels = [ FieldPanel('title', classname="full title"), FieldPanel('body'), ] class CustomRichBlockFieldPage(Page): body = StreamField([ ('rich_text', RichTextBlock(editor='custom')), ]) content_panels = [ FieldPanel('title', classname="full title"), StreamFieldPanel('body'), ] class RichTextFieldWithFeaturesPage(Page): body = RichTextField(features=['quotation', 'embed', 'made-up-feature']) content_panels = [ FieldPanel('title', classname="full title"), FieldPanel('body'), ] # a page that only contains RichTextField within an InlinePanel, # to test that the inline child's form media gets pulled through class SectionedRichTextPageSection(Orderable): page = ParentalKey('tests.SectionedRichTextPage', related_name='sections', on_delete=models.CASCADE) body = RichTextField() panels = [ FieldPanel('body') ] class SectionedRichTextPage(Page): content_panels = [ FieldPanel('title', classname="full title"), InlinePanel('sections') ] class InlineStreamPageSection(Orderable): page = ParentalKey('tests.InlineStreamPage', related_name='sections', on_delete=models.CASCADE) body = StreamField([ ('text', CharBlock()), ('rich_text', RichTextBlock()), ('image', ImageChooserBlock()), ]) panels = [ StreamFieldPanel('body') ] class InlineStreamPage(Page): content_panels = [ FieldPanel('title', classname="full title"), InlinePanel('sections') ] class TableBlockStreamPage(Page): table = StreamField([('table', TableBlock())]) content_panels = [StreamFieldPanel('table')] class UserProfile(models.Model): # Wagtail's schema must be able to coexist alongside a custom UserProfile model user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) favourite_colour = models.CharField(max_length=255) class PanelSettings(TestSetting): panels = [ FieldPanel('title') ] class TabbedSettings(TestSetting): edit_handler = TabbedInterface([ ObjectList([ FieldPanel('title') ], heading='First tab'), ObjectList([ FieldPanel('email') ], heading='Second tab'), ]) class AlwaysShowInMenusPage(Page): show_in_menus_default = True # test for AddField migrations on StreamFields using various default values class AddedStreamFieldWithoutDefaultPage(Page): body = StreamField([ ('title', CharBlock()) ]) class AddedStreamFieldWithEmptyStringDefaultPage(Page): body = StreamField([ ('title', CharBlock()) ], default='') class AddedStreamFieldWithEmptyListDefaultPage(Page): body = StreamField([ ('title', CharBlock()) ], default=[]) # test customising edit handler definitions on a per-request basis class PerUserContentPanels(ObjectList): def _replace_children_with_per_user_config(self): self.children = self.instance.basic_content_panels if self.request.user.is_superuser: self.children = self.instance.superuser_content_panels self.children = [ child.bind_to(model=self.model, instance=self.instance, request=self.request, form=self.form) for child in self.children] def on_instance_bound(self): # replace list of children when both instance and request are available if self.request: self._replace_children_with_per_user_config() else: super().on_instance_bound() def on_request_bound(self): # replace list of children when both instance and request are available if self.instance: self._replace_children_with_per_user_config() else: super().on_request_bound() class PerUserPageMixin: basic_content_panels = [] superuser_content_panels = [] @cached_classmethod def get_edit_handler(cls): tabs = [] if cls.basic_content_panels and cls.superuser_content_panels: tabs.append(PerUserContentPanels(heading='Content')) if cls.promote_panels: tabs.append(ObjectList(cls.promote_panels, heading='Promote')) if cls.settings_panels: tabs.append(ObjectList(cls.settings_panels, heading='Settings', classname='settings')) edit_handler = TabbedInterface(tabs, base_form_class=cls.base_form_class) return edit_handler.bind_to(model=cls) class SecretPage(PerUserPageMixin, Page): boring_data = models.TextField() secret_data = models.TextField() basic_content_panels = Page.content_panels + [ FieldPanel('boring_data'), ] superuser_content_panels = basic_content_panels + [ FieldPanel('secret_data'), ] class SimpleParentPage(Page): # `BusinessIndex` has been added to bring it in line with other tests subpage_types = ['tests.SimpleChildPage', BusinessIndex] class SimpleChildPage(Page): # `Page` has been added to bring it in line with other tests parent_page_types = ['tests.SimpleParentPage', Page] max_count_per_parent = 1 class PersonPage(Page): first_name = models.CharField( max_length=255, verbose_name='First Name', ) last_name = models.CharField( max_length=255, verbose_name='Last Name', ) content_panels = Page.content_panels + [ MultiFieldPanel([ FieldPanel('first_name'), FieldPanel('last_name'), ], 'Person'), InlinePanel('addresses', label='Address'), ] class Meta: verbose_name = 'Person' verbose_name_plural = 'Persons' class Address(index.Indexed, ClusterableModel, Orderable): address = models.CharField( max_length=255, verbose_name='Address', ) tags = ClusterTaggableManager( through='tests.AddressTag', blank=True, ) person = ParentalKey( to='tests.PersonPage', related_name='addresses', verbose_name='Person' ) panels = [ FieldPanel('address'), FieldPanel('tags'), ] class Meta: verbose_name = 'Address' verbose_name_plural = 'Addresses' class AddressTag(TaggedItemBase): content_object = ParentalKey( to='tests.Address', on_delete=models.CASCADE, related_name='tagged_items' )
./CrossVul/dataset_final_sorted/CWE-79/py/bad_3890_3
crossvul-python_data_bad_1727_0
# coding: utf-8 """A tornado based IPython notebook server.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import print_function import base64 import datetime import errno import importlib import io import json import logging import os import random import re import select import signal import socket import ssl import sys import threading import webbrowser # check for pyzmq from IPython.utils.zmqrelated import check_for_zmq check_for_zmq('13', 'IPython.html') from jinja2 import Environment, FileSystemLoader # Install the pyzmq ioloop. This has to be done before anything else from # tornado is imported. from zmq.eventloop import ioloop ioloop.install() # check for tornado 3.1.0 msg = "The IPython Notebook requires tornado >= 4.0" try: import tornado except ImportError: raise ImportError(msg) try: version_info = tornado.version_info except AttributeError: raise ImportError(msg + ", but you have < 1.1.0") if version_info < (4,0): raise ImportError(msg + ", but you have %s" % tornado.version) from tornado import httpserver from tornado import web from tornado.log import LogFormatter, app_log, access_log, gen_log from IPython.html import ( DEFAULT_STATIC_FILES_PATH, DEFAULT_TEMPLATE_PATH_LIST, ) from .base.handlers import Template404 from .log import log_request from .services.kernels.kernelmanager import MappingKernelManager from .services.config import ConfigManager from .services.contents.manager import ContentsManager from .services.contents.filemanager import FileContentsManager from .services.clusters.clustermanager import ClusterManager from .services.sessions.sessionmanager import SessionManager from .auth.login import LoginHandler from .auth.logout import LogoutHandler from .base.handlers import IPythonHandler, FileFindHandler from IPython.config import Config from IPython.config.application import catch_config_error, boolean_flag from IPython.core.application import ( BaseIPythonApplication, base_flags, base_aliases, ) from IPython.core.profiledir import ProfileDir from IPython.kernel import KernelManager from IPython.kernel.kernelspec import KernelSpecManager from IPython.kernel.zmq.session import Session from IPython.nbformat.sign import NotebookNotary from IPython.utils.importstring import import_item from IPython.utils import submodule from IPython.utils.process import check_pid from IPython.utils.traitlets import ( Dict, Unicode, Integer, List, Bool, Bytes, Instance, TraitError, Type, ) from IPython.utils import py3compat from IPython.utils.path import filefind, get_ipython_dir from IPython.utils.sysinfo import get_sys_info from .nbextensions import SYSTEM_NBEXTENSIONS_DIRS from .utils import url_path_join #----------------------------------------------------------------------------- # Module globals #----------------------------------------------------------------------------- _examples = """ ipython notebook # start the notebook ipython notebook --profile=sympy # use the sympy profile ipython notebook --certfile=mycert.pem # use SSL/TLS certificate """ #----------------------------------------------------------------------------- # Helper functions #----------------------------------------------------------------------------- def random_ports(port, n): """Generate a list of n random ports near the given port. The first 5 ports will be sequential, and the remaining n-5 will be randomly selected in the range [port-2*n, port+2*n]. """ for i in range(min(5, n)): yield port + i for i in range(n-5): yield max(1, port + random.randint(-2*n, 2*n)) def load_handlers(name): """Load the (URL pattern, handler) tuples for each component.""" name = 'IPython.html.' + name mod = __import__(name, fromlist=['default_handlers']) return mod.default_handlers #----------------------------------------------------------------------------- # The Tornado web application #----------------------------------------------------------------------------- class NotebookWebApplication(web.Application): def __init__(self, ipython_app, kernel_manager, contents_manager, cluster_manager, session_manager, kernel_spec_manager, config_manager, log, base_url, default_url, settings_overrides, jinja_env_options): settings = self.init_settings( ipython_app, kernel_manager, contents_manager, cluster_manager, session_manager, kernel_spec_manager, config_manager, log, base_url, default_url, settings_overrides, jinja_env_options) handlers = self.init_handlers(settings) super(NotebookWebApplication, self).__init__(handlers, **settings) def init_settings(self, ipython_app, kernel_manager, contents_manager, cluster_manager, session_manager, kernel_spec_manager, config_manager, log, base_url, default_url, settings_overrides, jinja_env_options=None): _template_path = settings_overrides.get( "template_path", ipython_app.template_file_path, ) if isinstance(_template_path, py3compat.string_types): _template_path = (_template_path,) template_path = [os.path.expanduser(path) for path in _template_path] jenv_opt = jinja_env_options if jinja_env_options else {} env = Environment(loader=FileSystemLoader(template_path), **jenv_opt) sys_info = get_sys_info() if sys_info['commit_source'] == 'repository': # don't cache (rely on 304) when working from master version_hash = '' else: # reset the cache on server restart version_hash = datetime.datetime.now().strftime("%Y%m%d%H%M%S") settings = dict( # basics log_function=log_request, base_url=base_url, default_url=default_url, template_path=template_path, static_path=ipython_app.static_file_path, static_handler_class = FileFindHandler, static_url_prefix = url_path_join(base_url,'/static/'), static_handler_args = { # don't cache custom.js 'no_cache_paths': [url_path_join(base_url, 'static', 'custom')], }, version_hash=version_hash, # authentication cookie_secret=ipython_app.cookie_secret, login_url=url_path_join(base_url,'/login'), login_handler_class=ipython_app.login_handler_class, logout_handler_class=ipython_app.logout_handler_class, password=ipython_app.password, # managers kernel_manager=kernel_manager, contents_manager=contents_manager, cluster_manager=cluster_manager, session_manager=session_manager, kernel_spec_manager=kernel_spec_manager, config_manager=config_manager, # IPython stuff jinja_template_vars=ipython_app.jinja_template_vars, nbextensions_path=ipython_app.nbextensions_path, websocket_url=ipython_app.websocket_url, mathjax_url=ipython_app.mathjax_url, config=ipython_app.config, jinja2_env=env, terminals_available=False, # Set later if terminals are available ) # allow custom overrides for the tornado web app. settings.update(settings_overrides) return settings def init_handlers(self, settings): """Load the (URL pattern, handler) tuples for each component.""" # Order matters. The first handler to match the URL will handle the request. handlers = [] handlers.extend(load_handlers('tree.handlers')) handlers.extend([(r"/login", settings['login_handler_class'])]) handlers.extend([(r"/logout", settings['logout_handler_class'])]) handlers.extend(load_handlers('files.handlers')) handlers.extend(load_handlers('notebook.handlers')) handlers.extend(load_handlers('nbconvert.handlers')) handlers.extend(load_handlers('kernelspecs.handlers')) handlers.extend(load_handlers('edit.handlers')) handlers.extend(load_handlers('services.config.handlers')) handlers.extend(load_handlers('services.kernels.handlers')) handlers.extend(load_handlers('services.contents.handlers')) handlers.extend(load_handlers('services.clusters.handlers')) handlers.extend(load_handlers('services.sessions.handlers')) handlers.extend(load_handlers('services.nbconvert.handlers')) handlers.extend(load_handlers('services.kernelspecs.handlers')) handlers.extend(load_handlers('services.security.handlers')) handlers.append( (r"/nbextensions/(.*)", FileFindHandler, { 'path': settings['nbextensions_path'], 'no_cache_paths': ['/'], # don't cache anything in nbextensions }), ) # register base handlers last handlers.extend(load_handlers('base.handlers')) # set the URL that will be redirected from `/` handlers.append( (r'/?', web.RedirectHandler, { 'url' : settings['default_url'], 'permanent': False, # want 302, not 301 }) ) # prepend base_url onto the patterns that we match new_handlers = [] for handler in handlers: pattern = url_path_join(settings['base_url'], handler[0]) new_handler = tuple([pattern] + list(handler[1:])) new_handlers.append(new_handler) # add 404 on the end, which will catch everything that falls through new_handlers.append((r'(.*)', Template404)) return new_handlers class NbserverListApp(BaseIPythonApplication): description="List currently running notebook servers in this profile." flags = dict( json=({'NbserverListApp': {'json': True}}, "Produce machine-readable JSON output."), ) json = Bool(False, config=True, help="If True, each line of output will be a JSON object with the " "details from the server info file.") def start(self): if not self.json: print("Currently running servers:") for serverinfo in list_running_servers(self.profile): if self.json: print(json.dumps(serverinfo)) else: print(serverinfo['url'], "::", serverinfo['notebook_dir']) #----------------------------------------------------------------------------- # Aliases and Flags #----------------------------------------------------------------------------- flags = dict(base_flags) flags['no-browser']=( {'NotebookApp' : {'open_browser' : False}}, "Don't open the notebook in a browser after startup." ) flags['pylab']=( {'NotebookApp' : {'pylab' : 'warn'}}, "DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib." ) flags['no-mathjax']=( {'NotebookApp' : {'enable_mathjax' : False}}, """Disable MathJax MathJax is the javascript library IPython uses to render math/LaTeX. It is very large, so you may want to disable it if you have a slow internet connection, or for offline use of the notebook. When disabled, equations etc. will appear as their untransformed TeX source. """ ) # Add notebook manager flags flags.update(boolean_flag('script', 'FileContentsManager.save_script', 'DEPRECATED, IGNORED', 'DEPRECATED, IGNORED')) aliases = dict(base_aliases) aliases.update({ 'ip': 'NotebookApp.ip', 'port': 'NotebookApp.port', 'port-retries': 'NotebookApp.port_retries', 'transport': 'KernelManager.transport', 'keyfile': 'NotebookApp.keyfile', 'certfile': 'NotebookApp.certfile', 'notebook-dir': 'NotebookApp.notebook_dir', 'browser': 'NotebookApp.browser', 'pylab': 'NotebookApp.pylab', }) #----------------------------------------------------------------------------- # NotebookApp #----------------------------------------------------------------------------- class NotebookApp(BaseIPythonApplication): name = 'ipython-notebook' description = """ The IPython HTML Notebook. This launches a Tornado based HTML Notebook Server that serves up an HTML5/Javascript Notebook client. """ examples = _examples aliases = aliases flags = flags classes = [ KernelManager, ProfileDir, Session, MappingKernelManager, ContentsManager, FileContentsManager, NotebookNotary, KernelSpecManager, ] flags = Dict(flags) aliases = Dict(aliases) subcommands = dict( list=(NbserverListApp, NbserverListApp.description.splitlines()[0]), ) ipython_kernel_argv = List(Unicode) _log_formatter_cls = LogFormatter def _log_level_default(self): return logging.INFO def _log_datefmt_default(self): """Exclude date from default date format""" return "%H:%M:%S" def _log_format_default(self): """override default log format to include time""" return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" # create requested profiles by default, if they don't exist: auto_create = Bool(True) # file to be opened in the notebook server file_to_run = Unicode('', config=True) # Network related information allow_origin = Unicode('', config=True, help="""Set the Access-Control-Allow-Origin header Use '*' to allow any origin to access your server. Takes precedence over allow_origin_pat. """ ) allow_origin_pat = Unicode('', config=True, help="""Use a regular expression for the Access-Control-Allow-Origin header Requests from an origin matching the expression will get replies with: Access-Control-Allow-Origin: origin where `origin` is the origin of the request. Ignored if allow_origin is set. """ ) allow_credentials = Bool(False, config=True, help="Set the Access-Control-Allow-Credentials: true header" ) default_url = Unicode('/tree', config=True, help="The default URL to redirect to from `/`" ) ip = Unicode('localhost', config=True, help="The IP address the notebook server will listen on." ) def _ip_default(self): """Return localhost if available, 127.0.0.1 otherwise. On some (horribly broken) systems, localhost cannot be bound. """ s = socket.socket() try: s.bind(('localhost', 0)) except socket.error as e: self.log.warn("Cannot bind to localhost, using 127.0.0.1 as default ip\n%s", e) return '127.0.0.1' else: s.close() return 'localhost' def _ip_changed(self, name, old, new): if new == u'*': self.ip = u'' port = Integer(8888, config=True, help="The port the notebook server will listen on." ) port_retries = Integer(50, config=True, help="The number of additional ports to try if the specified port is not available." ) certfile = Unicode(u'', config=True, help="""The full path to an SSL/TLS certificate file.""" ) keyfile = Unicode(u'', config=True, help="""The full path to a private key file for usage with SSL/TLS.""" ) cookie_secret_file = Unicode(config=True, help="""The file where the cookie secret is stored.""" ) def _cookie_secret_file_default(self): if self.profile_dir is None: return '' return os.path.join(self.profile_dir.security_dir, 'notebook_cookie_secret') cookie_secret = Bytes(b'', config=True, help="""The random bytes used to secure cookies. By default this is a new random number every time you start the Notebook. Set it to a value in a config file to enable logins to persist across server sessions. Note: Cookie secrets should be kept private, do not share config files with cookie_secret stored in plaintext (you can read the value from a file). """ ) def _cookie_secret_default(self): if os.path.exists(self.cookie_secret_file): with io.open(self.cookie_secret_file, 'rb') as f: return f.read() else: secret = base64.encodestring(os.urandom(1024)) self._write_cookie_secret_file(secret) return secret def _write_cookie_secret_file(self, secret): """write my secret to my secret_file""" self.log.info("Writing notebook server cookie secret to %s", self.cookie_secret_file) with io.open(self.cookie_secret_file, 'wb') as f: f.write(secret) try: os.chmod(self.cookie_secret_file, 0o600) except OSError: self.log.warn( "Could not set permissions on %s", self.cookie_secret_file ) password = Unicode(u'', config=True, help="""Hashed password to use for web authentication. To generate, type in a python/IPython shell: from IPython.lib import passwd; passwd() The string should be of the form type:salt:hashed-password. """ ) open_browser = Bool(True, config=True, help="""Whether to open in a browser after starting. The specific browser used is platform dependent and determined by the python standard library `webbrowser` module, unless it is overridden using the --browser (NotebookApp.browser) configuration option. """) browser = Unicode(u'', config=True, help="""Specify what command to use to invoke a web browser when opening the notebook. If not specified, the default browser will be determined by the `webbrowser` standard library module, which allows setting of the BROWSER environment variable to override it. """) webapp_settings = Dict(config=True, help="DEPRECATED, use tornado_settings" ) def _webapp_settings_changed(self, name, old, new): self.log.warn("\n webapp_settings is deprecated, use tornado_settings.\n") self.tornado_settings = new tornado_settings = Dict(config=True, help="Supply overrides for the tornado.web.Application that the " "IPython notebook uses.") ssl_options = Dict(config=True, help="""Supply SSL options for the tornado HTTPServer. See the tornado docs for details.""") jinja_environment_options = Dict(config=True, help="Supply extra arguments that will be passed to Jinja environment.") jinja_template_vars = Dict( config=True, help="Extra variables to supply to jinja templates when rendering.", ) enable_mathjax = Bool(True, config=True, help="""Whether to enable MathJax for typesetting math/TeX MathJax is the javascript library IPython uses to render math/LaTeX. It is very large, so you may want to disable it if you have a slow internet connection, or for offline use of the notebook. When disabled, equations etc. will appear as their untransformed TeX source. """ ) def _enable_mathjax_changed(self, name, old, new): """set mathjax url to empty if mathjax is disabled""" if not new: self.mathjax_url = u'' base_url = Unicode('/', config=True, help='''The base URL for the notebook server. Leading and trailing slashes can be omitted, and will automatically be added. ''') def _base_url_changed(self, name, old, new): if not new.startswith('/'): self.base_url = '/'+new elif not new.endswith('/'): self.base_url = new+'/' base_project_url = Unicode('/', config=True, help="""DEPRECATED use base_url""") def _base_project_url_changed(self, name, old, new): self.log.warn("base_project_url is deprecated, use base_url") self.base_url = new extra_static_paths = List(Unicode, config=True, help="""Extra paths to search for serving static files. This allows adding javascript/css to be available from the notebook server machine, or overriding individual files in the IPython""" ) def _extra_static_paths_default(self): return [os.path.join(self.profile_dir.location, 'static')] @property def static_file_path(self): """return extra paths + the default location""" return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH] extra_template_paths = List(Unicode, config=True, help="""Extra paths to search for serving jinja templates. Can be used to override templates from IPython.html.templates.""" ) def _extra_template_paths_default(self): return [] @property def template_file_path(self): """return extra paths + the default locations""" return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST extra_nbextensions_path = List(Unicode, config=True, help="""extra paths to look for Javascript notebook extensions""" ) @property def nbextensions_path(self): """The path to look for Javascript notebook extensions""" return self.extra_nbextensions_path + [os.path.join(get_ipython_dir(), 'nbextensions')] + SYSTEM_NBEXTENSIONS_DIRS websocket_url = Unicode("", config=True, help="""The base URL for websockets, if it differs from the HTTP server (hint: it almost certainly doesn't). Should be in the form of an HTTP origin: ws[s]://hostname[:port] """ ) mathjax_url = Unicode("", config=True, help="""The url for MathJax.js.""" ) def _mathjax_url_default(self): if not self.enable_mathjax: return u'' static_url_prefix = self.tornado_settings.get("static_url_prefix", url_path_join(self.base_url, "static") ) # try local mathjax, either in nbextensions/mathjax or static/mathjax for (url_prefix, search_path) in [ (url_path_join(self.base_url, "nbextensions"), self.nbextensions_path), (static_url_prefix, self.static_file_path), ]: self.log.debug("searching for local mathjax in %s", search_path) try: mathjax = filefind(os.path.join('mathjax', 'MathJax.js'), search_path) except IOError: continue else: url = url_path_join(url_prefix, u"mathjax/MathJax.js") self.log.info("Serving local MathJax from %s at %s", mathjax, url) return url # no local mathjax, serve from CDN url = u"https://cdn.mathjax.org/mathjax/latest/MathJax.js" self.log.info("Using MathJax from CDN: %s", url) return url def _mathjax_url_changed(self, name, old, new): if new and not self.enable_mathjax: # enable_mathjax=False overrides mathjax_url self.mathjax_url = u'' else: self.log.info("Using MathJax: %s", new) contents_manager_class = Type( default_value=FileContentsManager, klass=ContentsManager, config=True, help='The notebook manager class to use.' ) kernel_manager_class = Type( default_value=MappingKernelManager, config=True, help='The kernel manager class to use.' ) session_manager_class = Type( default_value=SessionManager, config=True, help='The session manager class to use.' ) cluster_manager_class = Type( default_value=ClusterManager, config=True, help='The cluster manager class to use.' ) config_manager_class = Type( default_value=ConfigManager, config = True, help='The config manager class to use' ) kernel_spec_manager = Instance(KernelSpecManager) kernel_spec_manager_class = Type( default_value=KernelSpecManager, config=True, help=""" The kernel spec manager class to use. Should be a subclass of `IPython.kernel.kernelspec.KernelSpecManager`. The Api of KernelSpecManager is provisional and might change without warning between this version of IPython and the next stable one. """ ) login_handler_class = Type( default_value=LoginHandler, klass=web.RequestHandler, config=True, help='The login handler class to use.', ) logout_handler_class = Type( default_value=LogoutHandler, klass=web.RequestHandler, config=True, help='The logout handler class to use.', ) trust_xheaders = Bool(False, config=True, help=("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers" "sent by the upstream reverse proxy. Necessary if the proxy handles SSL") ) info_file = Unicode() def _info_file_default(self): info_file = "nbserver-%s.json"%os.getpid() return os.path.join(self.profile_dir.security_dir, info_file) pylab = Unicode('disabled', config=True, help=""" DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. """ ) def _pylab_changed(self, name, old, new): """when --pylab is specified, display a warning and exit""" if new != 'warn': backend = ' %s' % new else: backend = '' self.log.error("Support for specifying --pylab on the command line has been removed.") self.log.error( "Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself.".format(backend) ) self.exit(1) notebook_dir = Unicode(config=True, help="The directory to use for notebooks and kernels." ) def _notebook_dir_default(self): if self.file_to_run: return os.path.dirname(os.path.abspath(self.file_to_run)) else: return py3compat.getcwd() def _notebook_dir_changed(self, name, old, new): """Do a bit of validation of the notebook dir.""" if not os.path.isabs(new): # If we receive a non-absolute path, make it absolute. self.notebook_dir = os.path.abspath(new) return if not os.path.isdir(new): raise TraitError("No such notebook dir: %r" % new) # setting App.notebook_dir implies setting notebook and kernel dirs as well self.config.FileContentsManager.root_dir = new self.config.MappingKernelManager.root_dir = new server_extensions = List(Unicode(), config=True, help=("Python modules to load as notebook server extensions. " "This is an experimental API, and may change in future releases.") ) reraise_server_extension_failures = Bool( False, config=True, help="Reraise exceptions encountered loading server extensions?", ) def parse_command_line(self, argv=None): super(NotebookApp, self).parse_command_line(argv) if self.extra_args: arg0 = self.extra_args[0] f = os.path.abspath(arg0) self.argv.remove(arg0) if not os.path.exists(f): self.log.critical("No such file or directory: %s", f) self.exit(1) # Use config here, to ensure that it takes higher priority than # anything that comes from the profile. c = Config() if os.path.isdir(f): c.NotebookApp.notebook_dir = f elif os.path.isfile(f): c.NotebookApp.file_to_run = f self.update_config(c) def init_kernel_argv(self): """add the profile-dir to arguments to be passed to IPython kernels""" # FIXME: remove special treatment of IPython kernels # Kernel should get *absolute* path to profile directory self.ipython_kernel_argv = ["--profile-dir", self.profile_dir.location] def init_configurables(self): self.kernel_spec_manager = self.kernel_spec_manager_class( parent=self, ipython_dir=self.ipython_dir, ) self.kernel_manager = self.kernel_manager_class( parent=self, log=self.log, ipython_kernel_argv=self.ipython_kernel_argv, connection_dir=self.profile_dir.security_dir, ) self.contents_manager = self.contents_manager_class( parent=self, log=self.log, ) self.session_manager = self.session_manager_class( parent=self, log=self.log, kernel_manager=self.kernel_manager, contents_manager=self.contents_manager, ) self.cluster_manager = self.cluster_manager_class( parent=self, log=self.log, ) self.config_manager = self.config_manager_class( parent=self, log=self.log, profile_dir=self.profile_dir.location, ) def init_logging(self): # This prevents double log messages because tornado use a root logger that # self.log is a child of. The logging module dipatches log messages to a log # and all of its ancenstors until propagate is set to False. self.log.propagate = False for log in app_log, access_log, gen_log: # consistent log output name (NotebookApp instead of tornado.access, etc.) log.name = self.log.name # hook up tornado 3's loggers to our app handlers logger = logging.getLogger('tornado') logger.propagate = True logger.parent = self.log logger.setLevel(self.log.level) def init_webapp(self): """initialize tornado webapp and httpserver""" self.tornado_settings['allow_origin'] = self.allow_origin if self.allow_origin_pat: self.tornado_settings['allow_origin_pat'] = re.compile(self.allow_origin_pat) self.tornado_settings['allow_credentials'] = self.allow_credentials # ensure default_url starts with base_url if not self.default_url.startswith(self.base_url): self.default_url = url_path_join(self.base_url, self.default_url) self.web_app = NotebookWebApplication( self, self.kernel_manager, self.contents_manager, self.cluster_manager, self.session_manager, self.kernel_spec_manager, self.config_manager, self.log, self.base_url, self.default_url, self.tornado_settings, self.jinja_environment_options ) ssl_options = self.ssl_options if self.certfile: ssl_options['certfile'] = self.certfile if self.keyfile: ssl_options['keyfile'] = self.keyfile if not ssl_options: # None indicates no SSL config ssl_options = None else: # Disable SSLv3, since its use is discouraged. ssl_options['ssl_version']=ssl.PROTOCOL_TLSv1 self.login_handler_class.validate_security(self, ssl_options=ssl_options) self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options, xheaders=self.trust_xheaders) success = None for port in random_ports(self.port, self.port_retries+1): try: self.http_server.listen(port, self.ip) except socket.error as e: if e.errno == errno.EADDRINUSE: self.log.info('The port %i is already in use, trying another random port.' % port) continue elif e.errno in (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)): self.log.warn("Permission to listen on port %i denied" % port) continue else: raise else: self.port = port success = True break if not success: self.log.critical('ERROR: the notebook server could not be started because ' 'no available port could be found.') self.exit(1) @property def display_url(self): ip = self.ip if self.ip else '[all ip addresses on your system]' return self._url(ip) @property def connection_url(self): ip = self.ip if self.ip else 'localhost' return self._url(ip) def _url(self, ip): proto = 'https' if self.certfile else 'http' return "%s://%s:%i%s" % (proto, ip, self.port, self.base_url) def init_terminals(self): try: from .terminal import initialize initialize(self.web_app) self.web_app.settings['terminals_available'] = True except ImportError as e: log = self.log.debug if sys.platform == 'win32' else self.log.warn log("Terminals not available (error was %s)", e) def init_signal(self): if not sys.platform.startswith('win'): signal.signal(signal.SIGINT, self._handle_sigint) signal.signal(signal.SIGTERM, self._signal_stop) if hasattr(signal, 'SIGUSR1'): # Windows doesn't support SIGUSR1 signal.signal(signal.SIGUSR1, self._signal_info) if hasattr(signal, 'SIGINFO'): # only on BSD-based systems signal.signal(signal.SIGINFO, self._signal_info) def _handle_sigint(self, sig, frame): """SIGINT handler spawns confirmation dialog""" # register more forceful signal handler for ^C^C case signal.signal(signal.SIGINT, self._signal_stop) # request confirmation dialog in bg thread, to avoid # blocking the App thread = threading.Thread(target=self._confirm_exit) thread.daemon = True thread.start() def _restore_sigint_handler(self): """callback for restoring original SIGINT handler""" signal.signal(signal.SIGINT, self._handle_sigint) def _confirm_exit(self): """confirm shutdown on ^C A second ^C, or answering 'y' within 5s will cause shutdown, otherwise original SIGINT handler will be restored. This doesn't work on Windows. """ info = self.log.info info('interrupted') print(self.notebook_info()) sys.stdout.write("Shutdown this notebook server (y/[n])? ") sys.stdout.flush() r,w,x = select.select([sys.stdin], [], [], 5) if r: line = sys.stdin.readline() if line.lower().startswith('y') and 'n' not in line.lower(): self.log.critical("Shutdown confirmed") ioloop.IOLoop.current().stop() return else: print("No answer for 5s:", end=' ') print("resuming operation...") # no answer, or answer is no: # set it back to original SIGINT handler # use IOLoop.add_callback because signal.signal must be called # from main thread ioloop.IOLoop.current().add_callback(self._restore_sigint_handler) def _signal_stop(self, sig, frame): self.log.critical("received signal %s, stopping", sig) ioloop.IOLoop.current().stop() def _signal_info(self, sig, frame): print(self.notebook_info()) def init_components(self): """Check the components submodule, and warn if it's unclean""" status = submodule.check_submodule_status() if status == 'missing': self.log.warn("components submodule missing, running `git submodule update`") submodule.update_submodules(submodule.ipython_parent()) elif status == 'unclean': self.log.warn("components submodule unclean, you may see 404s on static/components") self.log.warn("run `setup.py submodule` or `git submodule update` to update") def init_server_extensions(self): """Load any extensions specified by config. Import the module, then call the load_jupyter_server_extension function, if one exists. The extension API is experimental, and may change in future releases. """ for modulename in self.server_extensions: try: mod = importlib.import_module(modulename) func = getattr(mod, 'load_jupyter_server_extension', None) if func is not None: func(self) except Exception: if self.reraise_server_extension_failures: raise self.log.warn("Error loading server extension %s", modulename, exc_info=True) @catch_config_error def initialize(self, argv=None): super(NotebookApp, self).initialize(argv) self.init_logging() self.init_kernel_argv() self.init_configurables() self.init_components() self.init_webapp() self.init_terminals() self.init_signal() self.init_server_extensions() def cleanup_kernels(self): """Shutdown all kernels. The kernels will shutdown themselves when this process no longer exists, but explicit shutdown allows the KernelManagers to cleanup the connection files. """ self.log.info('Shutting down kernels') self.kernel_manager.shutdown_all() def notebook_info(self): "Return the current working directory and the server url information" info = self.contents_manager.info_string() + "\n" info += "%d active kernels \n" % len(self.kernel_manager._kernels) return info + "The IPython Notebook is running at: %s" % self.display_url def server_info(self): """Return a JSONable dict of information about this server.""" return {'url': self.connection_url, 'hostname': self.ip if self.ip else 'localhost', 'port': self.port, 'secure': bool(self.certfile), 'base_url': self.base_url, 'notebook_dir': os.path.abspath(self.notebook_dir), 'pid': os.getpid() } def write_server_info_file(self): """Write the result of server_info() to the JSON file info_file.""" with open(self.info_file, 'w') as f: json.dump(self.server_info(), f, indent=2) def remove_server_info_file(self): """Remove the nbserver-<pid>.json file created for this server. Ignores the error raised when the file has already been removed. """ try: os.unlink(self.info_file) except OSError as e: if e.errno != errno.ENOENT: raise def start(self): """ Start the IPython Notebook server app, after initialization This method takes no arguments so all configuration and initialization must be done prior to calling this method.""" if self.subapp is not None: return self.subapp.start() info = self.log.info for line in self.notebook_info().split("\n"): info(line) info("Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).") self.write_server_info_file() if self.open_browser or self.file_to_run: try: browser = webbrowser.get(self.browser or None) except webbrowser.Error as e: self.log.warn('No web browser found: %s.' % e) browser = None if self.file_to_run: if not os.path.exists(self.file_to_run): self.log.critical("%s does not exist" % self.file_to_run) self.exit(1) relpath = os.path.relpath(self.file_to_run, self.notebook_dir) uri = url_path_join('notebooks', *relpath.split(os.sep)) else: uri = 'tree' if browser: b = lambda : browser.open(url_path_join(self.connection_url, uri), new=2) threading.Thread(target=b).start() self.io_loop = ioloop.IOLoop.current() if sys.platform.startswith('win'): # add no-op to wake every 5s # to handle signals that may be ignored by the inner loop pc = ioloop.PeriodicCallback(lambda : None, 5000) pc.start() try: self.io_loop.start() except KeyboardInterrupt: info("Interrupted...") finally: self.cleanup_kernels() self.remove_server_info_file() def stop(self): def _stop(): self.http_server.stop() self.io_loop.stop() self.io_loop.add_callback(_stop) def list_running_servers(profile='default'): """Iterate over the server info files of running notebook servers. Given a profile name, find nbserver-* files in the security directory of that profile, and yield dicts of their information, each one pertaining to a currently running notebook server instance. """ pd = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), name=profile) for file in os.listdir(pd.security_dir): if file.startswith('nbserver-'): with io.open(os.path.join(pd.security_dir, file), encoding='utf-8') as f: info = json.load(f) # Simple check whether that process is really still running # Also remove leftover files from IPython 2.x without a pid field if ('pid' in info) and check_pid(info['pid']): yield info else: # If the process has died, try to delete its info file try: os.unlink(file) except OSError: pass # TODO: This should warn or log or something #----------------------------------------------------------------------------- # Main entry point #----------------------------------------------------------------------------- launch_new_instance = NotebookApp.launch_instance
./CrossVul/dataset_final_sorted/CWE-79/py/bad_1727_0