repo_name
stringlengths
5
100
ref
stringlengths
12
67
path
stringlengths
4
244
copies
stringlengths
1
8
content
stringlengths
0
1.05M
onitake/ansible
refs/heads/devel
contrib/inventory/docker.py
11
#!/usr/bin/env python # # (c) 2016 Paul Durivage <paul.durivage@gmail.com> # Chris Houseknecht <house@redhat.com> # James Tanner <jtanner@redhat.com> # # This file is part of Ansible. # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # DOCUMENTATION = ''' Docker Inventory Script ======================= The inventory script generates dynamic inventory by making API requests to one or more Docker APIs. It's dynamic because the inventory is generated at run-time rather than being read from a static file. The script generates the inventory by connecting to one or many Docker APIs and inspecting the containers it finds at each API. Which APIs the script contacts can be defined using environment variables or a configuration file. Requirements ------------ Using the docker modules requires having docker-py <https://docker-py.readthedocs.io/en/stable/> installed on the host running Ansible. To install docker-py: pip install docker-py Run for Specific Host --------------------- When run for a specific container using the --host option this script returns the following hostvars: { "ansible_ssh_host": "", "ansible_ssh_port": 0, "docker_apparmorprofile": "", "docker_args": [], "docker_config": { "AttachStderr": false, "AttachStdin": false, "AttachStdout": false, "Cmd": [ "/hello" ], "Domainname": "", "Entrypoint": null, "Env": null, "Hostname": "9f2f80b0a702", "Image": "hello-world", "Labels": {}, "OnBuild": null, "OpenStdin": false, "StdinOnce": false, "Tty": false, "User": "", "Volumes": null, "WorkingDir": "" }, "docker_created": "2016-04-18T02:05:59.659599249Z", "docker_driver": "aufs", "docker_execdriver": "native-0.2", "docker_execids": null, "docker_graphdriver": { "Data": null, "Name": "aufs" }, "docker_hostconfig": { "Binds": null, "BlkioWeight": 0, "CapAdd": null, "CapDrop": null, "CgroupParent": "", "ConsoleSize": [ 0, 0 ], "ContainerIDFile": "", "CpuPeriod": 0, "CpuQuota": 0, "CpuShares": 0, "CpusetCpus": "", "CpusetMems": "", "Devices": null, "Dns": null, "DnsOptions": null, "DnsSearch": null, "ExtraHosts": null, "GroupAdd": null, "IpcMode": "", "KernelMemory": 0, "Links": null, "LogConfig": { "Config": {}, "Type": "json-file" }, "LxcConf": null, "Memory": 0, "MemoryReservation": 0, "MemorySwap": 0, "MemorySwappiness": null, "NetworkMode": "default", "OomKillDisable": false, "PidMode": "host", "PortBindings": null, "Privileged": false, "PublishAllPorts": false, "ReadonlyRootfs": false, "RestartPolicy": { "MaximumRetryCount": 0, "Name": "" }, "SecurityOpt": [ "label:disable" ], "UTSMode": "", "Ulimits": null, "VolumeDriver": "", "VolumesFrom": null }, "docker_hostnamepath": "/mnt/sda1/var/lib/docker/containers/9f2f80b0a702361d1ac432e6af816c19bda46da15c21264fb418c873de635a14/hostname", "docker_hostspath": "/mnt/sda1/var/lib/docker/containers/9f2f80b0a702361d1ac432e6af816c19bda46da15c21264fb418c873de635a14/hosts", "docker_id": "9f2f80b0a702361d1ac432e6af816c19bda46da15c21264fb418c873de635a14", "docker_image": "0a6ba66e537a53a5ea94f7c6a99c534c6adb12e3ed09326d4bf3b38f7c3ba4e7", "docker_logpath": "/mnt/sda1/var/lib/docker/containers/9f2f80b0a702361d1ac432e6af816c19bda46da15c21264fb418c873de635a14/9f2f80b0a702361d1ac432e6a-json.log", "docker_mountlabel": "", "docker_mounts": [], "docker_name": "/hello-world", "docker_networksettings": { "Bridge": "", "EndpointID": "", "Gateway": "", "GlobalIPv6Address": "", "GlobalIPv6PrefixLen": 0, "HairpinMode": false, "IPAddress": "", "IPPrefixLen": 0, "IPv6Gateway": "", "LinkLocalIPv6Address": "", "LinkLocalIPv6PrefixLen": 0, "MacAddress": "", "Networks": { "bridge": { "EndpointID": "", "Gateway": "", "GlobalIPv6Address": "", "GlobalIPv6PrefixLen": 0, "IPAddress": "", "IPPrefixLen": 0, "IPv6Gateway": "", "MacAddress": "" } }, "Ports": null, "SandboxID": "", "SandboxKey": "", "SecondaryIPAddresses": null, "SecondaryIPv6Addresses": null }, "docker_path": "/hello", "docker_processlabel": "", "docker_resolvconfpath": "/mnt/sda1/var/lib/docker/containers/9f2f80b0a702361d1ac432e6af816c19bda46da15c21264fb418c873de635a14/resolv.conf", "docker_restartcount": 0, "docker_short_id": "9f2f80b0a7023", "docker_state": { "Dead": false, "Error": "", "ExitCode": 0, "FinishedAt": "2016-04-18T02:06:00.296619369Z", "OOMKilled": false, "Paused": false, "Pid": 0, "Restarting": false, "Running": false, "StartedAt": "2016-04-18T02:06:00.272065041Z", "Status": "exited" } } Groups ------ When run in --list mode (the default), container instances are grouped by: - container id - container name - container short id - image_name (image_<image name>) - docker_host - running - stopped Configuration: -------------- You can control the behavior of the inventory script by passing arguments, defining environment variables, or creating a configuration file named docker.yml (sample provided in ansible/contrib/inventory). The order of precedence is command line args, then the docker.yml file and finally environment variables. Environment variables: ...................... To connect to a single Docker API the following variables can be defined in the environment to control the connection options. These are the same environment variables used by the Docker modules. DOCKER_HOST The URL or Unix socket path used to connect to the Docker API. Defaults to unix://var/run/docker.sock. DOCKER_API_VERSION: The version of the Docker API running on the Docker Host. Defaults to the latest version of the API supported by docker-py. DOCKER_TIMEOUT: The maximum amount of time in seconds to wait on a response fromm the API. Defaults to 60 seconds. DOCKER_TLS: Secure the connection to the API by using TLS without verifying the authenticity of the Docker host server. Defaults to False. DOCKER_TLS_VERIFY: Secure the connection to the API by using TLS and verifying the authenticity of the Docker host server. Default is False DOCKER_TLS_HOSTNAME: When verifying the authenticity of the Docker Host server, provide the expected name of the server. Defaults to localhost. DOCKER_CERT_PATH: Path to the directory containing the client certificate, client key and CA certificate. DOCKER_SSL_VERSION: Provide a valid SSL version number. Default value determined by docker-py, which at the time of this writing was 1.0 In addition to the connection variables there are a couple variables used to control the execution and output of the script: DOCKER_CONFIG_FILE Path to the configuration file. Defaults to ./docker.yml. DOCKER_PRIVATE_SSH_PORT: The private port (container port) on which SSH is listening for connections. Defaults to 22. DOCKER_DEFAULT_IP: The IP address to assign to ansible_host when the container's SSH port is mapped to interface '0.0.0.0'. Configuration File .................. Using a configuration file provides a means for defining a set of Docker APIs from which to build an inventory. The default name of the file is derived from the name of the inventory script. By default the script will look for basename of the script (i.e. docker) with an extension of '.yml'. You can also override the default name of the script by defining DOCKER_CONFIG_FILE in the environment. Here's what you can define in docker_inventory.yml: defaults Defines a default connection. Defaults will be taken from this and applied to any values not provided for a host defined in the hosts list. hosts If you wish to get inventory from more than one Docker host, define a hosts list. For the default host and each host in the hosts list define the following attributes: host: description: The URL or Unix socket path used to connect to the Docker API. required: yes tls: description: Connect using TLS without verifying the authenticity of the Docker host server. default: false required: false tls_verify: description: Connect using TLS without verifying the authenticity of the Docker host server. default: false required: false cert_path: description: Path to the client's TLS certificate file. default: null required: false cacert_path: description: Use a CA certificate when performing server verification by providing the path to a CA certificate file. default: null required: false key_path: description: Path to the client's TLS key file. default: null required: false version: description: The Docker API version. required: false default: will be supplied by the docker-py module. timeout: description: The amount of time in seconds to wait on an API response. required: false default: 60 default_ip: description: The IP address to assign to ansible_host when the container's SSH port is mapped to interface '0.0.0.0'. required: false default: 127.0.0.1 private_ssh_port: description: The port containers use for SSH required: false default: 22 Examples -------- # Connect to the Docker API on localhost port 4243 and format the JSON output DOCKER_HOST=tcp://localhost:4243 ./docker.py --pretty # Any container's ssh port exposed on 0.0.0.0 will be mapped to # another IP address (where Ansible will attempt to connect via SSH) DOCKER_DEFAULT_IP=1.2.3.4 ./docker.py --pretty # Run as input to a playbook: ansible-playbook -i ~/projects/ansible/contrib/inventory/docker.py docker_inventory_test.yml # Simple playbook to invoke with the above example: - name: Test docker_inventory hosts: all connection: local gather_facts: no tasks: - debug: msg="Container - {{ inventory_hostname }}" ''' import os import sys import json import argparse import re import yaml from collections import defaultdict # Manipulation of the path is needed because the docker-py # module is imported by the name docker, and because this file # is also named docker for path in [os.getcwd(), '', os.path.dirname(os.path.abspath(__file__))]: try: del sys.path[sys.path.index(path)] except: pass HAS_DOCKER_PY = True HAS_DOCKER_ERROR = False try: from docker.errors import APIError, TLSParameterError from docker.tls import TLSConfig from docker.constants import DEFAULT_TIMEOUT_SECONDS, DEFAULT_DOCKER_API_VERSION except ImportError as exc: HAS_DOCKER_ERROR = str(exc) HAS_DOCKER_PY = False # Client has recently been split into DockerClient and APIClient try: from docker import Client except ImportError as exc: try: from docker import APIClient as Client except ImportError as exc: HAS_DOCKER_ERROR = str(exc) HAS_DOCKER_PY = False class Client: pass DEFAULT_DOCKER_CONFIG_FILE = os.path.splitext(os.path.basename(__file__))[0] + '.yml' DEFAULT_DOCKER_HOST = 'unix://var/run/docker.sock' DEFAULT_TLS = False DEFAULT_TLS_VERIFY = False DEFAULT_TLS_HOSTNAME = "localhost" DEFAULT_IP = '127.0.0.1' DEFAULT_SSH_PORT = '22' BOOLEANS_TRUE = ['yes', 'on', '1', 'true', 1, True] BOOLEANS_FALSE = ['no', 'off', '0', 'false', 0, False] DOCKER_ENV_ARGS = dict( config_file='DOCKER_CONFIG_FILE', docker_host='DOCKER_HOST', api_version='DOCKER_API_VERSION', cert_path='DOCKER_CERT_PATH', ssl_version='DOCKER_SSL_VERSION', tls='DOCKER_TLS', tls_verify='DOCKER_TLS_VERIFY', tls_hostname='DOCKER_TLS_HOSTNAME', timeout='DOCKER_TIMEOUT', private_ssh_port='DOCKER_DEFAULT_SSH_PORT', default_ip='DOCKER_DEFAULT_IP', ) def fail(msg): sys.stderr.write("%s\n" % msg) sys.exit(1) def log(msg, pretty_print=False): if pretty_print: print(json.dumps(msg, sort_keys=True, indent=2)) else: print(msg + u'\n') class AnsibleDockerClient(Client): def __init__(self, auth_params, debug): self.auth_params = auth_params self.debug = debug self._connect_params = self._get_connect_params() try: super(AnsibleDockerClient, self).__init__(**self._connect_params) except APIError as exc: self.fail("Docker API error: %s" % exc) except Exception as exc: self.fail("Error connecting: %s" % exc) def fail(self, msg): fail(msg) def log(self, msg, pretty_print=False): if self.debug: log(msg, pretty_print) def _get_tls_config(self, **kwargs): self.log("get_tls_config:") for key in kwargs: self.log(" %s: %s" % (key, kwargs[key])) try: tls_config = TLSConfig(**kwargs) return tls_config except TLSParameterError as exc: self.fail("TLS config error: %s" % exc) def _get_connect_params(self): auth = self.auth_params self.log("auth params:") for key in auth: self.log(" %s: %s" % (key, auth[key])) if auth['tls'] or auth['tls_verify']: auth['docker_host'] = auth['docker_host'].replace('tcp://', 'https://') if auth['tls'] and auth['cert_path'] and auth['key_path']: # TLS with certs and no host verification tls_config = self._get_tls_config(client_cert=(auth['cert_path'], auth['key_path']), verify=False, ssl_version=auth['ssl_version']) return dict(base_url=auth['docker_host'], tls=tls_config, version=auth['api_version'], timeout=auth['timeout']) if auth['tls']: # TLS with no certs and not host verification tls_config = self._get_tls_config(verify=False, ssl_version=auth['ssl_version']) return dict(base_url=auth['docker_host'], tls=tls_config, version=auth['api_version'], timeout=auth['timeout']) if auth['tls_verify'] and auth['cert_path'] and auth['key_path']: # TLS with certs and host verification if auth['cacert_path']: tls_config = self._get_tls_config(client_cert=(auth['cert_path'], auth['key_path']), ca_cert=auth['cacert_path'], verify=True, assert_hostname=auth['tls_hostname'], ssl_version=auth['ssl_version']) else: tls_config = self._get_tls_config(client_cert=(auth['cert_path'], auth['key_path']), verify=True, assert_hostname=auth['tls_hostname'], ssl_version=auth['ssl_version']) return dict(base_url=auth['docker_host'], tls=tls_config, version=auth['api_version'], timeout=auth['timeout']) if auth['tls_verify'] and auth['cacert_path']: # TLS with cacert only tls_config = self._get_tls_config(ca_cert=auth['cacert_path'], assert_hostname=auth['tls_hostname'], verify=True, ssl_version=auth['ssl_version']) return dict(base_url=auth['docker_host'], tls=tls_config, version=auth['api_version'], timeout=auth['timeout']) if auth['tls_verify']: # TLS with verify and no certs tls_config = self._get_tls_config(verify=True, assert_hostname=auth['tls_hostname'], ssl_version=auth['ssl_version']) return dict(base_url=auth['docker_host'], tls=tls_config, version=auth['api_version'], timeout=auth['timeout']) # No TLS return dict(base_url=auth['docker_host'], version=auth['api_version'], timeout=auth['timeout']) def _handle_ssl_error(self, error): match = re.match(r"hostname.*doesn\'t match (\'.*\')", str(error)) if match: msg = "You asked for verification that Docker host name matches %s. The actual hostname is %s. " \ "Most likely you need to set DOCKER_TLS_HOSTNAME or pass tls_hostname with a value of %s. " \ "You may also use TLS without verification by setting the tls parameter to true." \ % (self.auth_params['tls_hostname'], match.group(1), match.group(1)) self.fail(msg) self.fail("SSL Exception: %s" % (error)) class EnvArgs(object): def __init__(self): self.config_file = None self.docker_host = None self.api_version = None self.cert_path = None self.ssl_version = None self.tls = None self.tls_verify = None self.tls_hostname = None self.timeout = None self.default_ssh_port = None self.default_ip = None class DockerInventory(object): def __init__(self): self._args = self._parse_cli_args() self._env_args = self._parse_env_args() self.groups = defaultdict(list) self.hostvars = defaultdict(dict) def run(self): config_from_file = self._parse_config_file() if not config_from_file: config_from_file = dict() docker_hosts = self.get_hosts(config_from_file) for host in docker_hosts: client = AnsibleDockerClient(host, self._args.debug) self.get_inventory(client, host) if not self._args.host: self.groups['docker_hosts'] = [host.get('docker_host') for host in docker_hosts] self.groups['_meta'] = dict( hostvars=self.hostvars ) print(self._json_format_dict(self.groups, pretty_print=self._args.pretty)) else: print(self._json_format_dict(self.hostvars.get(self._args.host, dict()), pretty_print=self._args.pretty)) sys.exit(0) def get_inventory(self, client, host): ssh_port = host.get('default_ssh_port') default_ip = host.get('default_ip') hostname = host.get('docker_host') try: containers = client.containers(all=True) except Exception as exc: self.fail("Error fetching containers for host %s - %s" % (hostname, str(exc))) for container in containers: id = container.get('Id') short_id = id[:13] try: name = container.get('Names', list()).pop(0).lstrip('/') except IndexError: name = short_id if not self._args.host or (self._args.host and self._args.host in [name, id, short_id]): try: inspect = client.inspect_container(id) except Exception as exc: self.fail("Error inspecting container %s - %s" % (name, str(exc))) running = inspect.get('State', dict()).get('Running') # Add container to groups image_name = inspect.get('Config', dict()).get('Image') if image_name: self.groups["image_%s" % (image_name)].append(name) self.groups[id].append(name) self.groups[name].append(name) if short_id not in self.groups: self.groups[short_id].append(name) self.groups[hostname].append(name) if running is True: self.groups['running'].append(name) else: self.groups['stopped'].append(name) # Figure ous ssh IP and Port try: # Lookup the public facing port Nat'ed to ssh port. port = client.port(container, ssh_port)[0] except (IndexError, AttributeError, TypeError): port = dict() try: ip = default_ip if port['HostIp'] == '0.0.0.0' else port['HostIp'] except KeyError: ip = '' facts = dict( ansible_ssh_host=ip, ansible_ssh_port=port.get('HostPort', int()), docker_name=name, docker_short_id=short_id ) for key in inspect: fact_key = self._slugify(key) facts[fact_key] = inspect.get(key) self.hostvars[name].update(facts) def _slugify(self, value): return 'docker_%s' % (re.sub(r'[^\w-]', '_', value).lower().lstrip('_')) def get_hosts(self, config): ''' Determine the list of docker hosts we need to talk to. :param config: dictionary read from config file. can be empty. :return: list of connection dictionaries ''' hosts = list() hosts_list = config.get('hosts') defaults = config.get('defaults', dict()) self.log('defaults:') self.log(defaults, pretty_print=True) def_host = defaults.get('host') def_tls = defaults.get('tls') def_tls_verify = defaults.get('tls_verify') def_tls_hostname = defaults.get('tls_hostname') def_ssl_version = defaults.get('ssl_version') def_cert_path = defaults.get('cert_path') def_cacert_path = defaults.get('cacert_path') def_key_path = defaults.get('key_path') def_version = defaults.get('version') def_timeout = defaults.get('timeout') def_ip = defaults.get('default_ip') def_ssh_port = defaults.get('private_ssh_port') if hosts_list: # use hosts from config file for host in hosts_list: docker_host = host.get('host') or def_host or self._args.docker_host or \ self._env_args.docker_host or DEFAULT_DOCKER_HOST api_version = host.get('version') or def_version or self._args.api_version or \ self._env_args.api_version or DEFAULT_DOCKER_API_VERSION tls_hostname = host.get('tls_hostname') or def_tls_hostname or self._args.tls_hostname or \ self._env_args.tls_hostname or DEFAULT_TLS_HOSTNAME tls_verify = host.get('tls_verify') or def_tls_verify or self._args.tls_verify or \ self._env_args.tls_verify or DEFAULT_TLS_VERIFY tls = host.get('tls') or def_tls or self._args.tls or self._env_args.tls or DEFAULT_TLS ssl_version = host.get('ssl_version') or def_ssl_version or self._args.ssl_version or \ self._env_args.ssl_version cert_path = host.get('cert_path') or def_cert_path or self._args.cert_path or \ self._env_args.cert_path if cert_path and cert_path == self._env_args.cert_path: cert_path = os.path.join(cert_path, 'cert.pem') cacert_path = host.get('cacert_path') or def_cacert_path or self._args.cacert_path or \ self._env_args.cert_path if cacert_path and cacert_path == self._env_args.cert_path: cacert_path = os.path.join(cacert_path, 'ca.pem') key_path = host.get('key_path') or def_key_path or self._args.key_path or \ self._env_args.cert_path if key_path and key_path == self._env_args.cert_path: key_path = os.path.join(key_path, 'key.pem') timeout = host.get('timeout') or def_timeout or self._args.timeout or self._env_args.timeout or \ DEFAULT_TIMEOUT_SECONDS default_ip = host.get('default_ip') or def_ip or self._env_args.default_ip or \ self._args.default_ip_address or DEFAULT_IP default_ssh_port = host.get('private_ssh_port') or def_ssh_port or self._args.private_ssh_port or \ DEFAULT_SSH_PORT host_dict = dict( docker_host=docker_host, api_version=api_version, tls=tls, tls_verify=tls_verify, tls_hostname=tls_hostname, cert_path=cert_path, cacert_path=cacert_path, key_path=key_path, ssl_version=ssl_version, timeout=timeout, default_ip=default_ip, default_ssh_port=default_ssh_port, ) hosts.append(host_dict) else: # use default definition docker_host = def_host or self._args.docker_host or self._env_args.docker_host or DEFAULT_DOCKER_HOST api_version = def_version or self._args.api_version or self._env_args.api_version or \ DEFAULT_DOCKER_API_VERSION tls_hostname = def_tls_hostname or self._args.tls_hostname or self._env_args.tls_hostname or \ DEFAULT_TLS_HOSTNAME tls_verify = def_tls_verify or self._args.tls_verify or self._env_args.tls_verify or DEFAULT_TLS_VERIFY tls = def_tls or self._args.tls or self._env_args.tls or DEFAULT_TLS ssl_version = def_ssl_version or self._args.ssl_version or self._env_args.ssl_version cert_path = def_cert_path or self._args.cert_path or self._env_args.cert_path if cert_path and cert_path == self._env_args.cert_path: cert_path = os.path.join(cert_path, 'cert.pem') cacert_path = def_cacert_path or self._args.cacert_path or self._env_args.cert_path if cacert_path and cacert_path == self._env_args.cert_path: cacert_path = os.path.join(cacert_path, 'ca.pem') key_path = def_key_path or self._args.key_path or self._env_args.cert_path if key_path and key_path == self._env_args.cert_path: key_path = os.path.join(key_path, 'key.pem') timeout = def_timeout or self._args.timeout or self._env_args.timeout or DEFAULT_TIMEOUT_SECONDS default_ip = def_ip or self._env_args.default_ip or self._args.default_ip_address or DEFAULT_IP default_ssh_port = def_ssh_port or self._args.private_ssh_port or DEFAULT_SSH_PORT host_dict = dict( docker_host=docker_host, api_version=api_version, tls=tls, tls_verify=tls_verify, tls_hostname=tls_hostname, cert_path=cert_path, cacert_path=cacert_path, key_path=key_path, ssl_version=ssl_version, timeout=timeout, default_ip=default_ip, default_ssh_port=default_ssh_port, ) hosts.append(host_dict) self.log("hosts: ") self.log(hosts, pretty_print=True) return hosts def _parse_config_file(self): config = dict() config_file = DEFAULT_DOCKER_CONFIG_FILE if self._args.config_file: config_file = self._args.config_file elif self._env_args.config_file: config_file = self._env_args.config_file config_file = os.path.abspath(config_file) if os.path.isfile(config_file): with open(config_file) as f: try: config = yaml.safe_load(f.read()) except Exception as exc: self.fail("Error: parsing %s - %s" % (config_file, str(exc))) else: msg = "Error: config file given by {} does not exist - " + config_file if self._args.config_file: self.fail(msg.format('command line argument')) elif self._env_args.config_file: self.fail(msg.format(DOCKER_ENV_ARGS.get('config_file'))) else: self.log(msg.format('DEFAULT_DOCKER_CONFIG_FILE')) return config def log(self, msg, pretty_print=False): if self._args.debug: log(msg, pretty_print) def fail(self, msg): fail(msg) def _parse_env_args(self): args = EnvArgs() for key, value in DOCKER_ENV_ARGS.items(): if os.environ.get(value): val = os.environ.get(value) if val in BOOLEANS_TRUE: val = True if val in BOOLEANS_FALSE: val = False setattr(args, key, val) return args def _parse_cli_args(self): # Parse command line arguments parser = argparse.ArgumentParser( description='Return Ansible inventory for one or more Docker hosts.') parser.add_argument('--list', action='store_true', default=True, help='List all containers (default: True)') parser.add_argument('--debug', action='store_true', default=False, help='Send debug messages to STDOUT') parser.add_argument('--host', action='store', help='Only get information for a specific container.') parser.add_argument('--pretty', action='store_true', default=False, help='Pretty print JSON output(default: False)') parser.add_argument('--config-file', action='store', default=None, help="Name of the config file to use. Default is %s" % (DEFAULT_DOCKER_CONFIG_FILE)) parser.add_argument('--docker-host', action='store', default=None, help="The base url or Unix sock path to connect to the docker daemon. Defaults to %s" % (DEFAULT_DOCKER_HOST)) parser.add_argument('--tls-hostname', action='store', default=None, help="Host name to expect in TLS certs. Defaults to %s" % DEFAULT_TLS_HOSTNAME) parser.add_argument('--api-version', action='store', default=None, help="Docker daemon API version. Defaults to %s" % (DEFAULT_DOCKER_API_VERSION)) parser.add_argument('--timeout', action='store', default=None, help="Docker connection timeout in seconds. Defaults to %s" % (DEFAULT_TIMEOUT_SECONDS)) parser.add_argument('--cacert-path', action='store', default=None, help="Path to the TLS certificate authority pem file.") parser.add_argument('--cert-path', action='store', default=None, help="Path to the TLS certificate pem file.") parser.add_argument('--key-path', action='store', default=None, help="Path to the TLS encryption key pem file.") parser.add_argument('--ssl-version', action='store', default=None, help="TLS version number") parser.add_argument('--tls', action='store_true', default=None, help="Use TLS. Defaults to %s" % (DEFAULT_TLS)) parser.add_argument('--tls-verify', action='store_true', default=None, help="Verify TLS certificates. Defaults to %s" % (DEFAULT_TLS_VERIFY)) parser.add_argument('--private-ssh-port', action='store', default=None, help="Default private container SSH Port. Defaults to %s" % (DEFAULT_SSH_PORT)) parser.add_argument('--default-ip-address', action='store', default=None, help="Default container SSH IP address. Defaults to %s" % (DEFAULT_IP)) return parser.parse_args() def _json_format_dict(self, data, pretty_print=False): # format inventory data for output if pretty_print: return json.dumps(data, sort_keys=True, indent=4) else: return json.dumps(data) def main(): if not HAS_DOCKER_PY: fail("Failed to import docker-py. Try `pip install docker-py` - %s" % (HAS_DOCKER_ERROR)) DockerInventory().run() main()
crackerhead/nemio-flask-old
refs/heads/master
lib/python2.7/site-packages/flask/signals.py
783
# -*- coding: utf-8 -*- """ flask.signals ~~~~~~~~~~~~~ Implements signals based on blinker if available, otherwise falls silently back to a noop :copyright: (c) 2011 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ signals_available = False try: from blinker import Namespace signals_available = True except ImportError: class Namespace(object): def signal(self, name, doc=None): return _FakeSignal(name, doc) class _FakeSignal(object): """If blinker is unavailable, create a fake class with the same interface that allows sending of signals but will fail with an error on anything else. Instead of doing anything on send, it will just ignore the arguments and do nothing instead. """ def __init__(self, name, doc=None): self.name = name self.__doc__ = doc def _fail(self, *args, **kwargs): raise RuntimeError('signalling support is unavailable ' 'because the blinker library is ' 'not installed.') send = lambda *a, **kw: None connect = disconnect = has_receivers_for = receivers_for = \ temporarily_connected_to = connected_to = _fail del _fail # the namespace for code signals. If you are not flask code, do # not put signals in here. Create your own namespace instead. _signals = Namespace() # core signals. For usage examples grep the sourcecode or consult # the API documentation in docs/api.rst as well as docs/signals.rst template_rendered = _signals.signal('template-rendered') request_started = _signals.signal('request-started') request_finished = _signals.signal('request-finished') request_tearing_down = _signals.signal('request-tearing-down') got_request_exception = _signals.signal('got-request-exception') appcontext_tearing_down = _signals.signal('appcontext-tearing-down') appcontext_pushed = _signals.signal('appcontext-pushed') appcontext_popped = _signals.signal('appcontext-popped') message_flashed = _signals.signal('message-flashed')
pbeeler/android_external_skia
refs/heads/sm-jb-mr1
bench/bench_compare.py
27
''' Created on May 16, 2011 @author: bungeman ''' import sys import getopt import bench_util def usage(): """Prints simple usage information.""" print '-o <file> the old bench output file.' print '-n <file> the new bench output file.' print '-h causes headers to be output.' print '-f <fieldSpec> which fields to output and in what order.' print ' Not specifying is the same as -f "bctondp".' print ' b: bench' print ' c: config' print ' t: time type' print ' o: old time' print ' n: new time' print ' d: diff' print ' p: percent diff' class BenchDiff: """A compare between data points produced by bench. (BenchDataPoint, BenchDataPoint)""" def __init__(self, old, new): self.old = old self.new = new self.diff = old.time - new.time diffp = 0 if old.time != 0: diffp = self.diff / old.time self.diffp = diffp def __repr__(self): return "BenchDiff(%s, %s)" % ( str(self.new), str(self.old), ) def main(): """Parses command line and writes output.""" try: opts, _ = getopt.getopt(sys.argv[1:], "f:o:n:h") except getopt.GetoptError, err: print str(err) usage() sys.exit(2) column_formats = { 'b' : '{bench: >28} ', 'c' : '{config: <4} ', 't' : '{time_type: <4} ', 'o' : '{old_time: >10.2f} ', 'n' : '{new_time: >10.2f} ', 'd' : '{diff: >+10.2f} ', 'p' : '{diffp: >+8.1%} ', } header_formats = { 'b' : '{bench: >28} ', 'c' : '{config: <4} ', 't' : '{time_type: <4} ', 'o' : '{old_time: >10} ', 'n' : '{new_time: >10} ', 'd' : '{diff: >10} ', 'p' : '{diffp: >8} ', } old = None new = None column_format = "" header_format = "" columns = 'bctondp' header = False for option, value in opts: if option == "-o": old = value elif option == "-n": new = value elif option == "-h": header = True elif option == "-f": columns = value else: usage() assert False, "unhandled option" if old is None or new is None: usage() sys.exit(2) for column_char in columns: if column_formats[column_char]: column_format += column_formats[column_char] header_format += header_formats[column_char] else: usage() sys.exit(2) if header: print header_format.format( bench='bench' , config='conf' , time_type='time' , old_time='old' , new_time='new' , diff='diff' , diffp='diffP' ) old_benches = bench_util.parse({}, open(old, 'r')) new_benches = bench_util.parse({}, open(new, 'r')) bench_diffs = [] for old_bench in old_benches: #filter new_benches for benches that match old_bench new_bench_match = [bench for bench in new_benches if old_bench.bench == bench.bench and old_bench.config == bench.config and old_bench.time_type == bench.time_type ] if (len(new_bench_match) < 1): continue bench_diffs.append(BenchDiff(old_bench, new_bench_match[0])) bench_diffs.sort(key=lambda d : [d.diffp, d.old.bench, d.old.config, d.old.time_type, ]) for bench_diff in bench_diffs: print column_format.format( bench=bench_diff.old.bench.strip() , config=bench_diff.old.config.strip() , time_type=bench_diff.old.time_type , old_time=bench_diff.old.time , new_time=bench_diff.new.time , diff=bench_diff.diff , diffp=bench_diff.diffp ) if __name__ == "__main__": main()
tonylixu/python-tools
refs/heads/master
python3/pysysadmin/setup.py
1
""" setup.py contains the module/package that will be installed has been packaged and distributed with Distutils """ from setuptools import setup setup( name = 'pysysadmin', description = 'Python system admin scripts', author = 'Tony Li Xu', author_email = 'tonylixu@gmail.com', license = 'Apache', packages=['pysysadmin'], setup_requires = ( 'pytest-runner', ), tests_require = ( 'pytest', 'pytest-datafiles', ), )
bitcraft/pyglet
refs/heads/master
tests/interactive/window/_window_caption.py
1
"""Test that the window caption can be set. Expected behaviour: Two windows will be opened, one with the caption "Window caption 1" counting up every second; the other with a Unicode string including some non-ASCII characters. Press escape or close either window to finished the test. """ import time import unittest from pyglet import window class WindowCaption(unittest.TestCase): def test_caption(self): print(__doc__) w1 = window.Window(400, 200, resizable=True) w2 = window.Window(400, 200, resizable=True) count = 1 w1.set_caption('Window caption %d' % count) w2.set_caption('\u00bfHabla espa\u00f1ol?') last_time = time.time() while not (w1.has_exit or w2.has_exit): if time.time() - last_time > 1: count += 1 w1.set_caption('Window caption %d' % count) last_time = time.time() w1.dispatch_events() w2.dispatch_events() w1.close() w2.close()
douban/code
refs/heads/master
dispatches/notifications/new_commits.py
3
#!/usr/bin/env python # -*- coding: utf-8 -*- from dispatches.notifications import NotificationDispatcher from vilya.libs.mailer import Mail, MailContext from vilya.models.mute import Mute #from vilya.models.actions.pull_commit import PullCommit # feed will need EMAIL_TITLE = "[%s] %s (#%s)" IN_REPLY_TO = '<%s-pull-%s@code>' COMMIT_LIMIT = 3 class Dispatcher(NotificationDispatcher): def __init__(self, data): NotificationDispatcher.__init__(self, data) self._deltacommits = data.get('deltacommits') self._pullreq = data.get('pullreq') self._ticket = data.get('ticket') self._sender = self._pullreq.from_proj.owner_name self._target = self._pullreq.to_proj @property def msgs(self): return [self.mail] @property def mail_receivers(self): ticket = self._ticket ticket_id = ticket.ticket_id target = self._target to_receivers = Mute.filter('ticket', target.name, ticket_id, [ticket.author, ]) cc_recievers = Mute.filter('ticket', target.name, ticket_id, ticket.participants) toaddr = Mail.addrs_by_usernames(to_receivers, target) ccaddr = Mail.addrs_by_usernames(cc_recievers, target) return toaddr, ccaddr @property def mail(self): ticket = self._ticket pullreq = self._pullreq target = self._target deltacommits = self._deltacommits sender = self._sender if len(deltacommits) > COMMIT_LIMIT: more = self.domain("/%s/compare/%s...%s" % ( pullreq.to_proj.name, deltacommits[-COMMIT_LIMIT-1].sha, deltacommits[0].sha,)) more_count = len(deltacommits) - COMMIT_LIMIT delta_commits = [(c.author.name, c.shortlog, self.domain(c.url), c.shortsha) for c in reversed(deltacommits[-COMMIT_LIMIT:])] hook_url = self.hook_url ticket_url = self.domain(ticket.url) in_reply_to = IN_REPLY_TO % (target.name, ticket.ticket_id) subject = EMAIL_TITLE % (target.name, ticket.title, ticket.ticket_id) fromaddr = Mail.customize_sender(sender, target.name) toaddr, ccaddr = self.mail_receivers return Mail(subject, to=toaddr, cc=ccaddr, from_=fromaddr, in_reply_to=in_reply_to, context=MailContext('new_commits', data=locals()))
pigeonflight/strider-plone
refs/heads/master
docker/appengine/lib/django-0.96/django/contrib/admin/templatetags/__init__.py
12133432
computersalat/ansible
refs/heads/devel
test/units/ansible_test/ci/__init__.py
12133432
CSC301H-Fall2013/JuakStore
refs/heads/master
site-packages/tests/regressiontests/signals_regress/__init__.py
12133432
manassolanki/erpnext
refs/heads/develop
erpnext/hotels/doctype/__init__.py
12133432
OpenWinCon/OpenWinNet
refs/heads/master
web-gui/myvenv/lib/python3.4/site-packages/django/conf/locale/sk/__init__.py
12133432
shubhdev/openedx
refs/heads/master
common/test/acceptance/pages/lms/account_settings.py
128
""" Base class for account settings page. """ from . import BASE_URL from bok_choy.page_object import PageObject from bok_choy.promise import EmptyPromise from .fields import FieldsMixin class AccountSettingsPage(FieldsMixin, PageObject): """ Tests for Account Settings Page. """ url = "{base}/{settings}".format(base=BASE_URL, settings='account/settings') def is_browser_on_page(self): return self.q(css='.account-settings-container').present def sections_structure(self): """ Return list of section titles and field titles for each section. Example: [ { 'title': 'Section Title' 'fields': ['Field 1 title', 'Field 2 title',...] }, ... ] """ structure = [] sections = self.q(css='.section') for section in sections: section_title_element = section.find_element_by_class_name('section-header') field_title_elements = section.find_elements_by_class_name('u-field-title') structure.append({ 'title': section_title_element.text, 'fields': [element.text for element in field_title_elements], }) return structure def _is_loading_in_progress(self): """ Check if loading indicator is visible. """ query = self.q(css='.ui-loading-indicator') return query.present and 'is-hidden' not in query.attrs('class')[0].split() def wait_for_loading_indicator(self): """ Wait for loading indicator to become visible. """ EmptyPromise(self._is_loading_in_progress, "Loading is in progress.").fulfill()
IvanGavran/scrapy
refs/heads/master
tests/test_spidermiddleware_depth.py
136
from unittest import TestCase from scrapy.spidermiddlewares.depth import DepthMiddleware from scrapy.http import Response, Request from scrapy.spiders import Spider from scrapy.statscollectors import StatsCollector from scrapy.utils.test import get_crawler class TestDepthMiddleware(TestCase): def setUp(self): crawler = get_crawler(Spider) self.spider = crawler._create_spider('scrapytest.org') self.stats = StatsCollector(crawler) self.stats.open_spider(self.spider) self.mw = DepthMiddleware(1, self.stats, True) def test_process_spider_output(self): req = Request('http://scrapytest.org') resp = Response('http://scrapytest.org') resp.request = req result = [Request('http://scrapytest.org')] out = list(self.mw.process_spider_output(resp, result, self.spider)) self.assertEquals(out, result) rdc = self.stats.get_value('request_depth_count/1', spider=self.spider) self.assertEquals(rdc, 1) req.meta['depth'] = 1 out2 = list(self.mw.process_spider_output(resp, result, self.spider)) self.assertEquals(out2, []) rdm = self.stats.get_value('request_depth_max', spider=self.spider) self.assertEquals(rdm, 1) def tearDown(self): self.stats.close_spider(self.spider, '')
Sarah-Alsinan/muypicky
refs/heads/master
lib/python3.6/site-packages/django/core/cache/backends/locmem.py
586
"Thread-safe in-memory cache backend." import time from contextlib import contextmanager from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache from django.utils.synch import RWLock try: from django.utils.six.moves import cPickle as pickle except ImportError: import pickle # Global in-memory store of cache data. Keyed by name, to provide # multiple named local memory caches. _caches = {} _expire_info = {} _locks = {} @contextmanager def dummy(): """A context manager that does nothing special.""" yield class LocMemCache(BaseCache): def __init__(self, name, params): BaseCache.__init__(self, params) self._cache = _caches.setdefault(name, {}) self._expire_info = _expire_info.setdefault(name, {}) self._lock = _locks.setdefault(name, RWLock()) def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): key = self.make_key(key, version=version) self.validate_key(key) pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL) with self._lock.writer(): if self._has_expired(key): self._set(key, pickled, timeout) return True return False def get(self, key, default=None, version=None, acquire_lock=True): key = self.make_key(key, version=version) self.validate_key(key) pickled = None with (self._lock.reader() if acquire_lock else dummy()): if not self._has_expired(key): pickled = self._cache[key] if pickled is not None: try: return pickle.loads(pickled) except pickle.PickleError: return default with (self._lock.writer() if acquire_lock else dummy()): try: del self._cache[key] del self._expire_info[key] except KeyError: pass return default def _set(self, key, value, timeout=DEFAULT_TIMEOUT): if len(self._cache) >= self._max_entries: self._cull() self._cache[key] = value self._expire_info[key] = self.get_backend_timeout(timeout) def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): key = self.make_key(key, version=version) self.validate_key(key) pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL) with self._lock.writer(): self._set(key, pickled, timeout) def incr(self, key, delta=1, version=None): with self._lock.writer(): value = self.get(key, version=version, acquire_lock=False) if value is None: raise ValueError("Key '%s' not found" % key) new_value = value + delta key = self.make_key(key, version=version) pickled = pickle.dumps(new_value, pickle.HIGHEST_PROTOCOL) self._cache[key] = pickled return new_value def has_key(self, key, version=None): key = self.make_key(key, version=version) self.validate_key(key) with self._lock.reader(): if not self._has_expired(key): return True with self._lock.writer(): try: del self._cache[key] del self._expire_info[key] except KeyError: pass return False def _has_expired(self, key): exp = self._expire_info.get(key, -1) if exp is None or exp > time.time(): return False return True def _cull(self): if self._cull_frequency == 0: self.clear() else: doomed = [k for (i, k) in enumerate(self._cache) if i % self._cull_frequency == 0] for k in doomed: self._delete(k) def _delete(self, key): try: del self._cache[key] except KeyError: pass try: del self._expire_info[key] except KeyError: pass def delete(self, key, version=None): key = self.make_key(key, version=version) self.validate_key(key) with self._lock.writer(): self._delete(key) def clear(self): self._cache.clear() self._expire_info.clear()
Azure/azure-sdk-for-python
refs/heads/sync-eng/common-js-nightly-docs-2-1768-ForTestPipeline
sdk/keyvault/azure-keyvault-keys/tests/test_keys_async.py
1
# ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ import asyncio import codecs from dateutil import parser as date_parse import functools import json import logging from azure.core.exceptions import ResourceExistsError, ResourceNotFoundError from azure.core.pipeline.policies import SansIOHTTPPolicy from azure.keyvault.keys import JsonWebKey from azure.keyvault.keys.aio import KeyClient from six import byte2int from _shared.test_case_async import KeyVaultTestCase from _test_case import client_setup, get_decorator, KeysTestCase all_api_versions = get_decorator(is_async=True) hsm_only = get_decorator(hsm_only=True, is_async=True) logging_enabled = get_decorator(is_async=True, logging_enable=True) logging_disabled = get_decorator(is_async=True, logging_enable=False) # used for logging tests class MockHandler(logging.Handler): def __init__(self): super(MockHandler, self).__init__() self.messages = [] def emit(self, record): self.messages.append(record) class KeyVaultKeyTest(KeysTestCase, KeyVaultTestCase): def _assert_jwks_equal(self, jwk1, jwk2): assert jwk1.kid == jwk2.kid assert jwk1.kty == jwk2.kty assert sorted(jwk1.key_ops) == sorted(jwk2.key_ops) assert jwk1.n == jwk2.n assert jwk1.e == jwk2.e assert jwk1.d == jwk2.d assert jwk1.dp == jwk2.dp assert jwk1.dq == jwk2.dq assert jwk1.qi == jwk2.qi assert jwk1.p == jwk2.p assert jwk1.q == jwk2.q assert jwk1.k == jwk2.k assert jwk1.t == jwk2.t assert jwk1.crv == jwk2.crv assert jwk1.x == jwk2.x assert jwk1.y == jwk2.y def _assert_key_attributes_equal(self, k1, k2): self.assertEqual(k1.name, k2.name) self.assertEqual(k1.vault_url, k2.vault_url) self.assertEqual(k1.enabled, k2.enabled) self.assertEqual(k1.not_before, k2.not_before) self.assertEqual(k1.expires_on, k2.expires_on) self.assertEqual(k1.created_on, k2.created_on) self.assertEqual(k1.updated_on, k2.updated_on) self.assertEqual(k1.tags, k2.tags) self.assertEqual(k1.recovery_level, k2.recovery_level) async def _create_rsa_key(self, client, key_name, **kwargs): key_ops = ["encrypt", "decrypt", "sign", "verify", "wrapKey", "unwrapKey"] hsm = kwargs.get("hardware_protected") or False if self.is_live: await asyncio.sleep(2) # to avoid throttling by the service created_key = await client.create_rsa_key(key_name, **kwargs) key_type = "RSA-HSM" if hsm else "RSA" self._validate_rsa_key_bundle(created_key, client.vault_url, key_name, key_type, key_ops) return created_key async def _create_ec_key(self, client, key_name, **kwargs): key_curve = kwargs.get("curve") or "P-256" hsm = kwargs.get("hardware_protected") or False if self.is_live: await asyncio.sleep(2) # to avoid throttling by the service created_key = await client.create_ec_key(key_name, **kwargs) key_type = "EC-HSM" if hsm else "EC" self._validate_ec_key_bundle(key_curve, created_key, client.vault_url, key_name, key_type) return created_key def _validate_ec_key_bundle(self, key_curve, key_attributes, vault, key_name, kty): prefix = "/".join(s.strip("/") for s in [vault, "keys", key_name]) key = key_attributes.key kid = key_attributes.id self.assertEqual(key_curve, key.crv) self.assertTrue(kid.index(prefix) == 0, "Key Id should start with '{}', but value is '{}'".format(prefix, kid)) self.assertEqual(key.kty, kty, "kty should by '{}', but is '{}'".format(key, key.kty)) self.assertTrue( key_attributes.properties.created_on and key_attributes.properties.updated_on, "Missing required date attributes.", ) def _validate_rsa_key_bundle(self, key_attributes, vault, key_name, kty, key_ops): prefix = "/".join(s.strip("/") for s in [vault, "keys", key_name]) key = key_attributes.key kid = key_attributes.id self.assertTrue(kid.index(prefix) == 0, "Key Id should start with '{}', but value is '{}'".format(prefix, kid)) self.assertEqual(key.kty, kty, "kty should by '{}', but is '{}'".format(key, key.kty)) self.assertTrue(key.n and key.e, "Bad RSA public material.") self.assertEqual( sorted(key_ops), sorted(key.key_ops), "keyOps should be '{}', but is '{}'".format(key_ops, key.key_ops) ) self.assertTrue( key_attributes.properties.created_on and key_attributes.properties.updated_on, "Missing required date attributes.", ) async def _update_key_properties(self, client, key): expires = date_parse.parse("2050-01-02T08:00:00.000Z") tags = {"foo": "updated tag"} key_bundle = await client.update_key_properties(key.name, expires_on=expires, tags=tags) self.assertEqual(tags, key_bundle.properties.tags) self.assertEqual(key.id, key_bundle.id) self.assertNotEqual(key.properties.updated_on, key_bundle.properties.updated_on) return key_bundle async def _validate_key_list(self, keys, expected): async for key in keys: if key.name in expected.keys(): self._assert_key_attributes_equal(expected[key.name].properties, key) del expected[key.name] self.assertEqual(len(expected), 0) async def _import_test_key(self, client, name, hardware_protected=False): def _to_bytes(hex): if len(hex) % 2: hex = "0{}".format(hex) return codecs.decode(hex, "hex_codec") key = JsonWebKey( kty="RSA-HSM" if hardware_protected else "RSA", key_ops=["encrypt", "decrypt", "sign", "verify", "wrapKey", "unwrapKey"], n=_to_bytes( "00a0914d00234ac683b21b4c15d5bed887bdc959c2e57af54ae734e8f00720d775d275e455207e3784ceeb60a50a4655dd72a7a94d271e8ee8f7959a669ca6e775bf0e23badae991b4529d978528b4bd90521d32dd2656796ba82b6bbfc7668c8f5eeb5053747fd199319d29a8440d08f4412d527ff9311eda71825920b47b1c46b11ab3e91d7316407e89c7f340f7b85a34042ce51743b27d4718403d34c7b438af6181be05e4d11eb985d38253d7fe9bf53fc2f1b002d22d2d793fa79a504b6ab42d0492804d7071d727a06cf3a8893aa542b1503f832b296371b6707d4dc6e372f8fe67d8ded1c908fde45ce03bc086a71487fa75e43aa0e0679aa0d20efe35" ), e=_to_bytes("10001"), d=_to_bytes( "627c7d24668148fe2252c7fa649ea8a5a9ed44d75c766cda42b29b660e99404f0e862d4561a6c95af6a83d213e0a2244b03cd28576473215073785fb067f015da19084ade9f475e08b040a9a2c7ba00253bb8125508c9df140b75161d266be347a5e0f6900fe1d8bbf78ccc25eeb37e0c9d188d6e1fc15169ba4fe12276193d77790d2326928bd60d0d01d6ead8d6ac4861abadceec95358fd6689c50a1671a4a936d2376440a41445501da4e74bfb98f823bd19c45b94eb01d98fc0d2f284507f018ebd929b8180dbe6381fdd434bffb7800aaabdd973d55f9eaf9bb88a6ea7b28c2a80231e72de1ad244826d665582c2362761019de2e9f10cb8bcc2625649" ), p=_to_bytes( "00d1deac8d68ddd2c1fd52d5999655b2cf1565260de5269e43fd2a85f39280e1708ffff0682166cb6106ee5ea5e9ffd9f98d0becc9ff2cda2febc97259215ad84b9051e563e14a051dce438bc6541a24ac4f014cf9732d36ebfc1e61a00d82cbe412090f7793cfbd4b7605be133dfc3991f7e1bed5786f337de5036fc1e2df4cf3" ), q=_to_bytes( "00c3dc66b641a9b73cd833bc439cd34fc6574465ab5b7e8a92d32595a224d56d911e74624225b48c15a670282a51c40d1dad4bc2e9a3c8dab0c76f10052dfb053bc6ed42c65288a8e8bace7a8881184323f94d7db17ea6dfba651218f931a93b8f738f3d8fd3f6ba218d35b96861a0f584b0ab88ddcf446b9815f4d287d83a3237" ), dp=_to_bytes( "00c9a159be7265cbbabc9afcc4967eb74fe58a4c4945431902d1142da599b760e03838f8cbd26b64324fea6bdc9338503f459793636e59b5361d1e6951e08ddb089e1b507be952a81fbeaf7e76890ea4f536e25505c3f648b1e88377dfc19b4c304e738dfca07211b792286a392a704d0f444c0a802539110b7f1f121c00cff0a9" ), dq=_to_bytes( "00a0bd4c0a3d9f64436a082374b5caf2488bac1568696153a6a5e4cd85d186db31e2f58f024c617d29f37b4e6b54c97a1e25efec59c4d1fd3061ac33509ce8cae5c11f4cd2e83f41a8264f785e78dc0996076ee23dfdfc43d67c463afaa0180c4a718357f9a6f270d542479a0f213870e661fb950abca4a14ca290570ba7983347" ), qi=_to_bytes( "009fe7ae42e92bc04fcd5780464bd21d0c8ac0c599f9af020fde6ab0a7e7d1d39902f5d8fb6c614184c4c1b103fb46e94cd10a6c8a40f9991a1f28269f326435b6c50276fda6493353c650a833f724d80c7d522ba16c79f0eb61f672736b68fb8be3243d10943c4ab7028d09e76cfb5892222e38bc4d35585bf35a88cd68c73b07" ), ) imported_key = await client.import_key(name, key) self._validate_rsa_key_bundle(imported_key, client.vault_url, name, key.kty, key.key_ops) return imported_key @all_api_versions() @client_setup async def test_key_crud_operations(self, client, is_hsm, **kwargs): self.assertIsNotNone(client) # create ec key ec_key_name = self.get_resource_name("crud-ec-key") tags = {"purpose": "unit test", "test name": "CreateECKeyTest"} ec_key = await self._create_ec_key( client, enabled=True, key_name=ec_key_name, hardware_protected=True, tags=tags ) assert ec_key.properties.enabled assert tags == ec_key.properties.tags # create ec with curve ec_key_curve_name = self.get_resource_name("crud-P-256-ec-key") created_ec_key_curve = await self._create_ec_key( client, key_name=ec_key_curve_name, curve="P-256", hardware_protected=is_hsm ) self.assertEqual("P-256", created_ec_key_curve.key.crv) # import key import_test_key_name = self.get_resource_name("import-test-key") await self._import_test_key(client, import_test_key_name, hardware_protected=is_hsm) # create rsa key rsa_key_name = self.get_resource_name("crud-rsa-key") tags = {"purpose": "unit test", "test name ": "CreateRSAKeyTest"} key_ops = ["encrypt", "decrypt", "sign", "verify", "wrapKey", "unwrapKey"] rsa_key = await self._create_rsa_key( client, key_name=rsa_key_name, key_operations=key_ops, size=2048, tags=tags, hardware_protected=is_hsm ) assert tags == rsa_key.properties.tags # get the created key with version key = await client.get_key(rsa_key.name, rsa_key.properties.version) self.assertEqual(key.properties.version, rsa_key.properties.version) self._assert_key_attributes_equal(rsa_key.properties, key.properties) # get key without version self._assert_key_attributes_equal( rsa_key.properties, (await client.get_key(rsa_key.name)).properties ) # update key with version if self.is_live: # wait to ensure the key's update time won't equal its creation time await asyncio.sleep(1) await self._update_key_properties(client, rsa_key) # delete the new key deleted_key = await client.delete_key(rsa_key.name) self.assertIsNotNone(deleted_key) self._assert_jwks_equal(rsa_key.key, deleted_key.key) self.assertEqual(deleted_key.id, rsa_key.id) self.assertTrue( deleted_key.recovery_id and deleted_key.deleted_date and deleted_key.scheduled_purge_date, "Missing required deleted key attributes.", ) # get the deleted key when soft deleted enabled deleted_key = await client.get_deleted_key(rsa_key.name) self.assertIsNotNone(deleted_key) self.assertEqual(rsa_key.id, deleted_key.id) @hsm_only() @client_setup async def test_rsa_public_exponent(self, client, **kwargs): """The public exponent of a Managed HSM RSA key can be specified during creation""" self.assertIsNotNone(client) key_name = self.get_resource_name("rsa-key") key = await self._create_rsa_key(client, key_name, hardware_protected=True, public_exponent=17) public_exponent = byte2int(key.key.e) assert public_exponent == 17 @all_api_versions() @client_setup async def test_backup_restore(self, client, is_hsm, **kwargs): self.assertIsNotNone(client) key_name = self.get_resource_name("keybak") # create key created_bundle = await self._create_rsa_key(client, key_name, hardware_protected=is_hsm) # backup key key_backup = await client.backup_key(created_bundle.name) self.assertIsNotNone(key_backup, "key_backup") # delete key await client.delete_key(created_bundle.name) # purge key await client.purge_deleted_key(created_bundle.name) # restore key restore_function = functools.partial(client.restore_key_backup, key_backup) restored_key = await self._poll_until_no_exception(restore_function, expected_exception=ResourceExistsError) self._assert_key_attributes_equal(created_bundle.properties, restored_key.properties) @all_api_versions() @client_setup async def test_key_list(self, client, is_hsm, **kwargs): self.assertIsNotNone(client) max_keys = self.list_test_size expected = {} # create many keys for x in range(max_keys): key_name = self.get_resource_name("key{}".format(x)) key = await self._create_rsa_key(client, key_name, hardware_protected=is_hsm) expected[key.name] = key # list keys result = client.list_properties_of_keys(max_page_size=max_keys - 1) async for key in result: if key.name in expected.keys(): self._assert_key_attributes_equal(expected[key.name].properties, key) del expected[key.name] self.assertEqual(len(expected), 0) @all_api_versions() @client_setup async def test_list_versions(self, client, is_hsm, **kwargs): self.assertIsNotNone(client) key_name = self.get_resource_name("testKey") max_keys = self.list_test_size expected = {} # create many key versions for _ in range(max_keys): key = await self._create_rsa_key(client, key_name, hardware_protected=is_hsm) expected[key.id] = key result = client.list_properties_of_key_versions(key_name, max_page_size=max_keys - 1) # validate list key versions with attributes async for key in result: if key.id in expected.keys(): expected_key = expected[key.id] del expected[key.id] self._assert_key_attributes_equal(expected_key.properties, key) self.assertEqual(0, len(expected)) @all_api_versions() @client_setup async def test_list_deleted_keys(self, client, is_hsm, **kwargs): self.assertIsNotNone(client) expected = {} # create keys to delete for i in range(self.list_test_size): key_name = self.get_resource_name("key{}".format(i)) expected[key_name] = await self._create_rsa_key(client, key_name, hardware_protected=is_hsm) # delete all keys for key_name in expected.keys(): await client.delete_key(key_name) # validate list deleted keys with attributes async for deleted_key in client.list_deleted_keys(): self.assertIsNotNone(deleted_key.deleted_date) self.assertIsNotNone(deleted_key.scheduled_purge_date) self.assertIsNotNone(deleted_key.recovery_id) # validate all our deleted keys are returned by list_deleted_keys result = client.list_deleted_keys() async for key in result: if key.name in expected.keys(): self._assert_key_attributes_equal(expected[key.name].properties, key.properties) del expected[key.name] self.assertEqual(len(expected), 0) @all_api_versions() @client_setup async def test_recover(self, client, is_hsm, **kwargs): self.assertIsNotNone(client) # create keys keys = {} for i in range(self.list_test_size): key_name = self.get_resource_name("key{}".format(i)) keys[key_name] = await self._create_rsa_key(client, key_name, hardware_protected=is_hsm) # delete them for key_name in keys.keys(): await client.delete_key(key_name) # recover them for key_name in keys.keys(): recovered_key = await client.recover_deleted_key(key_name) expected_key = keys[key_name] self._assert_key_attributes_equal(expected_key.properties, recovered_key.properties) # validate the recovered keys expected = {k: v for k, v in keys.items()} actual = {} for k in expected.keys(): actual[k] = await client.get_key(k) self.assertEqual(len(set(expected.keys()) & set(actual.keys())), len(expected)) @all_api_versions() @client_setup async def test_purge(self, client, is_hsm, **kwargs): self.assertIsNotNone(client) # create keys key_names = [self.get_resource_name("key{}".format(i)) for i in range(self.list_test_size)] for key_name in key_names: await self._create_rsa_key(client, key_name, hardware_protected=is_hsm) # delete them for key_name in key_names: await client.delete_key(key_name) # purge them for key_name in key_names: await client.purge_deleted_key(key_name) for key_name in key_names: await self._poll_until_exception( functools.partial(client.get_deleted_key, key_name), expected_exception=ResourceNotFoundError ) # validate none are returned by list_deleted_keys async for deleted_key in client.list_deleted_keys(): assert deleted_key.name not in key_names @logging_enabled() @client_setup async def test_logging_enabled(self, client, is_hsm, **kwargs): mock_handler = MockHandler() logger = logging.getLogger("azure") logger.addHandler(mock_handler) logger.setLevel(logging.DEBUG) rsa_key_name = self.get_resource_name("rsa-key-name") await self._create_rsa_key(client, rsa_key_name, size=2048, hardware_protected=is_hsm) for message in mock_handler.messages: if message.levelname == "DEBUG" and message.funcName == "on_request": try: body = json.loads(message.message) expected_kty = "RSA-HSM" if is_hsm else "RSA" if body["kty"] == expected_kty: return except (ValueError, KeyError): # this means the message is not JSON or has no kty property pass assert False, "Expected request body wasn't logged" @logging_disabled() @client_setup async def test_logging_disabled(self, client, is_hsm, **kwargs): mock_handler = MockHandler() logger = logging.getLogger("azure") logger.addHandler(mock_handler) logger.setLevel(logging.DEBUG) rsa_key_name = self.get_resource_name("rsa-key-name") await self._create_rsa_key(client, rsa_key_name, size=2048, hardware_protected=is_hsm) for message in mock_handler.messages: if message.levelname == "DEBUG" and message.funcName == "on_request": try: body = json.loads(message.message) expected_kty = "RSA-HSM" if is_hsm else "RSA" assert body["kty"] != expected_kty, "Client request body was logged" except (ValueError, KeyError): # this means the message is not JSON or has no kty property pass def test_service_headers_allowed_in_logs(): service_headers = {"x-ms-keyvault-network-info", "x-ms-keyvault-region", "x-ms-keyvault-service-version"} client = KeyClient("...", object()) assert service_headers.issubset(client._client._config.http_logging_policy.allowed_header_names) def test_custom_hook_policy(): class CustomHookPolicy(SansIOHTTPPolicy): pass client = KeyClient("...", object(), custom_hook_policy=CustomHookPolicy()) assert isinstance(client._client._config.custom_hook_policy, CustomHookPolicy)
vidgizmo/sublime-filterlines-with-invert
refs/heads/master
fold.py
1
import re import sublime import sublime_plugin from .filter import PromptFilterToLinesCommand from .filter import FilterToLinesCommand class PromptFoldToLinesCommand(PromptFilterToLinesCommand): def run(self, search_type='string'): self._run(search_type, "fold_to_lines", "Fold") class FoldToLinesCommand(FilterToLinesCommand): def show_filtered_lines(self, edit, lines): source_lines = self.view.lines(sublime.Region(0, self.view.size())) filtered_line_numbers = [self.view.rowcol(line.begin())[0] for line, _ in lines] regions = [] region = None for line in source_lines: matched = (self.view.rowcol(line.begin())[0] in filtered_line_numbers) ^ self.invert_search if matched: if region: regions.append(region) region = None else: if region: region = region.cover(line) else: region = sublime.Region(line.begin(), line.end()) if region: regions.append(region) if regions: self.view.fold(regions) def prepare_output_line(self, line, matches): pass
zhouzhenghui/python-for-android
refs/heads/master
python3-alpha/python3-src/Lib/test/test_isinstance.py
59
# Tests some corner cases with isinstance() and issubclass(). While these # tests use new style classes and properties, they actually do whitebox # testing of error conditions uncovered when using extension types. import unittest from test import support import sys class TestIsInstanceExceptions(unittest.TestCase): # Test to make sure that an AttributeError when accessing the instance's # class's bases is masked. This was actually a bug in Python 2.2 and # 2.2.1 where the exception wasn't caught but it also wasn't being cleared # (leading to an "undetected error" in the debug build). Set up is, # isinstance(inst, cls) where: # # - cls isn't a a type, or a tuple # - cls has a __bases__ attribute # - inst has a __class__ attribute # - inst.__class__ as no __bases__ attribute # # Sounds complicated, I know, but this mimics a situation where an # extension type raises an AttributeError when its __bases__ attribute is # gotten. In that case, isinstance() should return False. def test_class_has_no_bases(self): class I(object): def getclass(self): # This must return an object that has no __bases__ attribute return None __class__ = property(getclass) class C(object): def getbases(self): return () __bases__ = property(getbases) self.assertEqual(False, isinstance(I(), C())) # Like above except that inst.__class__.__bases__ raises an exception # other than AttributeError def test_bases_raises_other_than_attribute_error(self): class E(object): def getbases(self): raise RuntimeError __bases__ = property(getbases) class I(object): def getclass(self): return E() __class__ = property(getclass) class C(object): def getbases(self): return () __bases__ = property(getbases) self.assertRaises(RuntimeError, isinstance, I(), C()) # Here's a situation where getattr(cls, '__bases__') raises an exception. # If that exception is not AttributeError, it should not get masked def test_dont_mask_non_attribute_error(self): class I: pass class C(object): def getbases(self): raise RuntimeError __bases__ = property(getbases) self.assertRaises(RuntimeError, isinstance, I(), C()) # Like above, except that getattr(cls, '__bases__') raises an # AttributeError, which /should/ get masked as a TypeError def test_mask_attribute_error(self): class I: pass class C(object): def getbases(self): raise AttributeError __bases__ = property(getbases) self.assertRaises(TypeError, isinstance, I(), C()) # check that we don't mask non AttributeErrors # see: http://bugs.python.org/issue1574217 def test_isinstance_dont_mask_non_attribute_error(self): class C(object): def getclass(self): raise RuntimeError __class__ = property(getclass) c = C() self.assertRaises(RuntimeError, isinstance, c, bool) # test another code path class D: pass self.assertRaises(RuntimeError, isinstance, c, D) # These tests are similar to above, but tickle certain code paths in # issubclass() instead of isinstance() -- really PyObject_IsSubclass() # vs. PyObject_IsInstance(). class TestIsSubclassExceptions(unittest.TestCase): def test_dont_mask_non_attribute_error(self): class C(object): def getbases(self): raise RuntimeError __bases__ = property(getbases) class S(C): pass self.assertRaises(RuntimeError, issubclass, C(), S()) def test_mask_attribute_error(self): class C(object): def getbases(self): raise AttributeError __bases__ = property(getbases) class S(C): pass self.assertRaises(TypeError, issubclass, C(), S()) # Like above, but test the second branch, where the __bases__ of the # second arg (the cls arg) is tested. This means the first arg must # return a valid __bases__, and it's okay for it to be a normal -- # unrelated by inheritance -- class. def test_dont_mask_non_attribute_error_in_cls_arg(self): class B: pass class C(object): def getbases(self): raise RuntimeError __bases__ = property(getbases) self.assertRaises(RuntimeError, issubclass, B, C()) def test_mask_attribute_error_in_cls_arg(self): class B: pass class C(object): def getbases(self): raise AttributeError __bases__ = property(getbases) self.assertRaises(TypeError, issubclass, B, C()) # meta classes for creating abstract classes and instances class AbstractClass(object): def __init__(self, bases): self.bases = bases def getbases(self): return self.bases __bases__ = property(getbases) def __call__(self): return AbstractInstance(self) class AbstractInstance(object): def __init__(self, klass): self.klass = klass def getclass(self): return self.klass __class__ = property(getclass) # abstract classes AbstractSuper = AbstractClass(bases=()) AbstractChild = AbstractClass(bases=(AbstractSuper,)) # normal classes class Super: pass class Child(Super): pass # new-style classes class NewSuper(object): pass class NewChild(NewSuper): pass class TestIsInstanceIsSubclass(unittest.TestCase): # Tests to ensure that isinstance and issubclass work on abstract # classes and instances. Before the 2.2 release, TypeErrors were # raised when boolean values should have been returned. The bug was # triggered by mixing 'normal' classes and instances were with # 'abstract' classes and instances. This case tries to test all # combinations. def test_isinstance_normal(self): # normal instances self.assertEqual(True, isinstance(Super(), Super)) self.assertEqual(False, isinstance(Super(), Child)) self.assertEqual(False, isinstance(Super(), AbstractSuper)) self.assertEqual(False, isinstance(Super(), AbstractChild)) self.assertEqual(True, isinstance(Child(), Super)) self.assertEqual(False, isinstance(Child(), AbstractSuper)) def test_isinstance_abstract(self): # abstract instances self.assertEqual(True, isinstance(AbstractSuper(), AbstractSuper)) self.assertEqual(False, isinstance(AbstractSuper(), AbstractChild)) self.assertEqual(False, isinstance(AbstractSuper(), Super)) self.assertEqual(False, isinstance(AbstractSuper(), Child)) self.assertEqual(True, isinstance(AbstractChild(), AbstractChild)) self.assertEqual(True, isinstance(AbstractChild(), AbstractSuper)) self.assertEqual(False, isinstance(AbstractChild(), Super)) self.assertEqual(False, isinstance(AbstractChild(), Child)) def test_subclass_normal(self): # normal classes self.assertEqual(True, issubclass(Super, Super)) self.assertEqual(False, issubclass(Super, AbstractSuper)) self.assertEqual(False, issubclass(Super, Child)) self.assertEqual(True, issubclass(Child, Child)) self.assertEqual(True, issubclass(Child, Super)) self.assertEqual(False, issubclass(Child, AbstractSuper)) def test_subclass_abstract(self): # abstract classes self.assertEqual(True, issubclass(AbstractSuper, AbstractSuper)) self.assertEqual(False, issubclass(AbstractSuper, AbstractChild)) self.assertEqual(False, issubclass(AbstractSuper, Child)) self.assertEqual(True, issubclass(AbstractChild, AbstractChild)) self.assertEqual(True, issubclass(AbstractChild, AbstractSuper)) self.assertEqual(False, issubclass(AbstractChild, Super)) self.assertEqual(False, issubclass(AbstractChild, Child)) def test_subclass_tuple(self): # test with a tuple as the second argument classes self.assertEqual(True, issubclass(Child, (Child,))) self.assertEqual(True, issubclass(Child, (Super,))) self.assertEqual(False, issubclass(Super, (Child,))) self.assertEqual(True, issubclass(Super, (Child, Super))) self.assertEqual(False, issubclass(Child, ())) self.assertEqual(True, issubclass(Super, (Child, (Super,)))) self.assertEqual(True, issubclass(NewChild, (NewChild,))) self.assertEqual(True, issubclass(NewChild, (NewSuper,))) self.assertEqual(False, issubclass(NewSuper, (NewChild,))) self.assertEqual(True, issubclass(NewSuper, (NewChild, NewSuper))) self.assertEqual(False, issubclass(NewChild, ())) self.assertEqual(True, issubclass(NewSuper, (NewChild, (NewSuper,)))) self.assertEqual(True, issubclass(int, (int, (float, int)))) self.assertEqual(True, issubclass(str, (str, (Child, NewChild, str)))) def test_subclass_recursion_limit(self): # make sure that issubclass raises RuntimeError before the C stack is # blown self.assertRaises(RuntimeError, blowstack, issubclass, str, str) def test_isinstance_recursion_limit(self): # make sure that issubclass raises RuntimeError before the C stack is # blown self.assertRaises(RuntimeError, blowstack, isinstance, '', str) def blowstack(fxn, arg, compare_to): # Make sure that calling isinstance with a deeply nested tuple for its # argument will raise RuntimeError eventually. tuple_arg = (compare_to,) for cnt in range(sys.getrecursionlimit()+5): tuple_arg = (tuple_arg,) fxn(arg, tuple_arg) def test_main(): support.run_unittest( TestIsInstanceExceptions, TestIsSubclassExceptions, TestIsInstanceIsSubclass ) if __name__ == '__main__': test_main()
47lining/ansible
refs/heads/devel
lib/ansible/runner/action_plugins/win_template.py
9
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. import os import pipes from ansible.utils import template from ansible import utils from ansible import errors from ansible.runner.return_data import ReturnData import base64 class ActionModule(object): TRANSFERS_FILES = True def __init__(self, runner): self.runner = runner def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for template operations ''' if not self.runner.is_playbook: raise errors.AnsibleError("in current versions of ansible, templates are only usable in playbooks") # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) source = options.get('src', None) dest = options.get('dest', None) if (source is None and 'first_available_file' not in inject) or dest is None: result = dict(failed=True, msg="src and dest are required") return ReturnData(conn=conn, comm_ok=False, result=result) # if we have first_available_file in our vars # look up the files and use the first one we find as src if 'first_available_file' in inject: found = False for fn in self.runner.module_vars.get('first_available_file'): fn_orig = fn fnt = template.template(self.runner.basedir, fn, inject) fnd = utils.path_dwim(self.runner.basedir, fnt) if not os.path.exists(fnd) and '_original_file' in inject: fnd = utils.path_dwim_relative(inject['_original_file'], 'templates', fnt, self.runner.basedir, check=False) if os.path.exists(fnd): source = fnd found = True break if not found: result = dict(failed=True, msg="could not find src in first_available_file list") return ReturnData(conn=conn, comm_ok=False, result=result) else: source = template.template(self.runner.basedir, source, inject) if '_original_file' in inject: source = utils.path_dwim_relative(inject['_original_file'], 'templates', source, self.runner.basedir) else: source = utils.path_dwim(self.runner.basedir, source) if conn.shell.path_has_trailing_slash(dest): base = os.path.basename(source) dest = conn.shell.join_path(dest, base) # template the source data locally & get ready to transfer try: resultant = template.template_from_file(self.runner.basedir, source, inject, vault_password=self.runner.vault_pass) except Exception, e: result = dict(failed=True, msg=type(e).__name__ + ": " + str(e)) return ReturnData(conn=conn, comm_ok=False, result=result) local_checksum = utils.checksum_s(resultant) remote_checksum = self.runner._remote_checksum(conn, tmp, dest, inject) if local_checksum != remote_checksum: # template is different from the remote value # if showing diffs, we need to get the remote value dest_contents = '' if self.runner.diff: # using persist_files to keep the temp directory around to avoid needing to grab another dest_result = self.runner._execute_module(conn, tmp, 'slurp', "path=%s" % dest, inject=inject, persist_files=True) if 'content' in dest_result.result: dest_contents = dest_result.result['content'] if dest_result.result['encoding'] == 'base64': dest_contents = base64.b64decode(dest_contents) else: raise Exception("unknown encoding, failed: %s" % dest_result.result) xfered = self.runner._transfer_str(conn, tmp, 'source', resultant) # fix file permissions when the copy is done as a different user if self.runner.sudo and self.runner.sudo_user != 'root' or self.runner.su and self.runner.su_user != 'root': self.runner._remote_chmod(conn, 'a+r', xfered, tmp) # run the copy module new_module_args = dict( src=xfered, dest=dest, original_basename=os.path.basename(source), follow=True, ) module_args_tmp = utils.merge_module_args(module_args, new_module_args) if self.runner.noop_on_check(inject): return ReturnData(conn=conn, comm_ok=True, result=dict(changed=True), diff=dict(before_header=dest, after_header=source, before=dest_contents, after=resultant)) else: res = self.runner._execute_module(conn, tmp, 'win_copy', module_args_tmp, inject=inject, complex_args=complex_args) if res.result.get('changed', False): res.diff = dict(before=dest_contents, after=resultant) return res else: # when running the file module based on the template data, we do # not want the source filename (the name of the template) to be used, # since this would mess up links, so we clear the src param and tell # the module to follow links new_module_args = dict( src=None, follow=True, ) # be sure to inject the check mode param into the module args and # rely on the file module to report its changed status if self.runner.noop_on_check(inject): new_module_args['CHECKMODE'] = True module_args = utils.merge_module_args(module_args, new_module_args) return self.runner._execute_module(conn, tmp, 'win_file', module_args, inject=inject, complex_args=complex_args)
DivineHime/seishirou
refs/heads/master
lib/pip/_vendor/distlib/_backport/shutil.py
395
# -*- coding: utf-8 -*- # # Copyright (C) 2012 The Python Software Foundation. # See LICENSE.txt and CONTRIBUTORS.txt. # """Utility functions for copying and archiving files and directory trees. XXX The functions here don't copy the resource fork or other metadata on Mac. """ import os import sys import stat from os.path import abspath import fnmatch import collections import errno from . import tarfile try: import bz2 _BZ2_SUPPORTED = True except ImportError: _BZ2_SUPPORTED = False try: from pwd import getpwnam except ImportError: getpwnam = None try: from grp import getgrnam except ImportError: getgrnam = None __all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", "copytree", "move", "rmtree", "Error", "SpecialFileError", "ExecError", "make_archive", "get_archive_formats", "register_archive_format", "unregister_archive_format", "get_unpack_formats", "register_unpack_format", "unregister_unpack_format", "unpack_archive", "ignore_patterns"] class Error(EnvironmentError): pass class SpecialFileError(EnvironmentError): """Raised when trying to do a kind of operation (e.g. copying) which is not supported on a special file (e.g. a named pipe)""" class ExecError(EnvironmentError): """Raised when a command could not be executed""" class ReadError(EnvironmentError): """Raised when an archive cannot be read""" class RegistryError(Exception): """Raised when a registry operation with the archiving and unpacking registries fails""" try: WindowsError except NameError: WindowsError = None def copyfileobj(fsrc, fdst, length=16*1024): """copy data from file-like object fsrc to file-like object fdst""" while 1: buf = fsrc.read(length) if not buf: break fdst.write(buf) def _samefile(src, dst): # Macintosh, Unix. if hasattr(os.path, 'samefile'): try: return os.path.samefile(src, dst) except OSError: return False # All other platforms: check for same pathname. return (os.path.normcase(os.path.abspath(src)) == os.path.normcase(os.path.abspath(dst))) def copyfile(src, dst): """Copy data from src to dst""" if _samefile(src, dst): raise Error("`%s` and `%s` are the same file" % (src, dst)) for fn in [src, dst]: try: st = os.stat(fn) except OSError: # File most likely does not exist pass else: # XXX What about other special files? (sockets, devices...) if stat.S_ISFIFO(st.st_mode): raise SpecialFileError("`%s` is a named pipe" % fn) with open(src, 'rb') as fsrc: with open(dst, 'wb') as fdst: copyfileobj(fsrc, fdst) def copymode(src, dst): """Copy mode bits from src to dst""" if hasattr(os, 'chmod'): st = os.stat(src) mode = stat.S_IMODE(st.st_mode) os.chmod(dst, mode) def copystat(src, dst): """Copy all stat info (mode bits, atime, mtime, flags) from src to dst""" st = os.stat(src) mode = stat.S_IMODE(st.st_mode) if hasattr(os, 'utime'): os.utime(dst, (st.st_atime, st.st_mtime)) if hasattr(os, 'chmod'): os.chmod(dst, mode) if hasattr(os, 'chflags') and hasattr(st, 'st_flags'): try: os.chflags(dst, st.st_flags) except OSError as why: if (not hasattr(errno, 'EOPNOTSUPP') or why.errno != errno.EOPNOTSUPP): raise def copy(src, dst): """Copy data and mode bits ("cp src dst"). The destination may be a directory. """ if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) copyfile(src, dst) copymode(src, dst) def copy2(src, dst): """Copy data and all stat info ("cp -p src dst"). The destination may be a directory. """ if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) copyfile(src, dst) copystat(src, dst) def ignore_patterns(*patterns): """Function that can be used as copytree() ignore parameter. Patterns is a sequence of glob-style patterns that are used to exclude files""" def _ignore_patterns(path, names): ignored_names = [] for pattern in patterns: ignored_names.extend(fnmatch.filter(names, pattern)) return set(ignored_names) return _ignore_patterns def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, ignore_dangling_symlinks=False): """Recursively copy a directory tree. The destination directory must not already exist. If exception(s) occur, an Error is raised with a list of reasons. If the optional symlinks flag is true, symbolic links in the source tree result in symbolic links in the destination tree; if it is false, the contents of the files pointed to by symbolic links are copied. If the file pointed by the symlink doesn't exist, an exception will be added in the list of errors raised in an Error exception at the end of the copy process. You can set the optional ignore_dangling_symlinks flag to true if you want to silence this exception. Notice that this has no effect on platforms that don't support os.symlink. The optional ignore argument is a callable. If given, it is called with the `src` parameter, which is the directory being visited by copytree(), and `names` which is the list of `src` contents, as returned by os.listdir(): callable(src, names) -> ignored_names Since copytree() is called recursively, the callable will be called once for each directory that is copied. It returns a list of names relative to the `src` directory that should not be copied. The optional copy_function argument is a callable that will be used to copy each file. It will be called with the source path and the destination path as arguments. By default, copy2() is used, but any function that supports the same signature (like copy()) can be used. """ names = os.listdir(src) if ignore is not None: ignored_names = ignore(src, names) else: ignored_names = set() os.makedirs(dst) errors = [] for name in names: if name in ignored_names: continue srcname = os.path.join(src, name) dstname = os.path.join(dst, name) try: if os.path.islink(srcname): linkto = os.readlink(srcname) if symlinks: os.symlink(linkto, dstname) else: # ignore dangling symlink if the flag is on if not os.path.exists(linkto) and ignore_dangling_symlinks: continue # otherwise let the copy occurs. copy2 will raise an error copy_function(srcname, dstname) elif os.path.isdir(srcname): copytree(srcname, dstname, symlinks, ignore, copy_function) else: # Will raise a SpecialFileError for unsupported file types copy_function(srcname, dstname) # catch the Error from the recursive copytree so that we can # continue with other files except Error as err: errors.extend(err.args[0]) except EnvironmentError as why: errors.append((srcname, dstname, str(why))) try: copystat(src, dst) except OSError as why: if WindowsError is not None and isinstance(why, WindowsError): # Copying file access times may fail on Windows pass else: errors.extend((src, dst, str(why))) if errors: raise Error(errors) def rmtree(path, ignore_errors=False, onerror=None): """Recursively delete a directory tree. If ignore_errors is set, errors are ignored; otherwise, if onerror is set, it is called to handle the error with arguments (func, path, exc_info) where func is os.listdir, os.remove, or os.rmdir; path is the argument to that function that caused it to fail; and exc_info is a tuple returned by sys.exc_info(). If ignore_errors is false and onerror is None, an exception is raised. """ if ignore_errors: def onerror(*args): pass elif onerror is None: def onerror(*args): raise try: if os.path.islink(path): # symlinks to directories are forbidden, see bug #1669 raise OSError("Cannot call rmtree on a symbolic link") except OSError: onerror(os.path.islink, path, sys.exc_info()) # can't continue even if onerror hook returns return names = [] try: names = os.listdir(path) except os.error: onerror(os.listdir, path, sys.exc_info()) for name in names: fullname = os.path.join(path, name) try: mode = os.lstat(fullname).st_mode except os.error: mode = 0 if stat.S_ISDIR(mode): rmtree(fullname, ignore_errors, onerror) else: try: os.remove(fullname) except os.error: onerror(os.remove, fullname, sys.exc_info()) try: os.rmdir(path) except os.error: onerror(os.rmdir, path, sys.exc_info()) def _basename(path): # A basename() variant which first strips the trailing slash, if present. # Thus we always get the last component of the path, even for directories. return os.path.basename(path.rstrip(os.path.sep)) def move(src, dst): """Recursively move a file or directory to another location. This is similar to the Unix "mv" command. If the destination is a directory or a symlink to a directory, the source is moved inside the directory. The destination path must not already exist. If the destination already exists but is not a directory, it may be overwritten depending on os.rename() semantics. If the destination is on our current filesystem, then rename() is used. Otherwise, src is copied to the destination and then removed. A lot more could be done here... A look at a mv.c shows a lot of the issues this implementation glosses over. """ real_dst = dst if os.path.isdir(dst): if _samefile(src, dst): # We might be on a case insensitive filesystem, # perform the rename anyway. os.rename(src, dst) return real_dst = os.path.join(dst, _basename(src)) if os.path.exists(real_dst): raise Error("Destination path '%s' already exists" % real_dst) try: os.rename(src, real_dst) except OSError: if os.path.isdir(src): if _destinsrc(src, dst): raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) copytree(src, real_dst, symlinks=True) rmtree(src) else: copy2(src, real_dst) os.unlink(src) def _destinsrc(src, dst): src = abspath(src) dst = abspath(dst) if not src.endswith(os.path.sep): src += os.path.sep if not dst.endswith(os.path.sep): dst += os.path.sep return dst.startswith(src) def _get_gid(name): """Returns a gid, given a group name.""" if getgrnam is None or name is None: return None try: result = getgrnam(name) except KeyError: result = None if result is not None: return result[2] return None def _get_uid(name): """Returns an uid, given a user name.""" if getpwnam is None or name is None: return None try: result = getpwnam(name) except KeyError: result = None if result is not None: return result[2] return None def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, owner=None, group=None, logger=None): """Create a (possibly compressed) tar file from all the files under 'base_dir'. 'compress' must be "gzip" (the default), "bzip2", or None. 'owner' and 'group' can be used to define an owner and a group for the archive that is being built. If not provided, the current owner and group will be used. The output tar file will be named 'base_name' + ".tar", possibly plus the appropriate compression extension (".gz", or ".bz2"). Returns the output filename. """ tar_compression = {'gzip': 'gz', None: ''} compress_ext = {'gzip': '.gz'} if _BZ2_SUPPORTED: tar_compression['bzip2'] = 'bz2' compress_ext['bzip2'] = '.bz2' # flags for compression program, each element of list will be an argument if compress is not None and compress not in compress_ext: raise ValueError("bad value for 'compress', or compression format not " "supported : {0}".format(compress)) archive_name = base_name + '.tar' + compress_ext.get(compress, '') archive_dir = os.path.dirname(archive_name) if not os.path.exists(archive_dir): if logger is not None: logger.info("creating %s", archive_dir) if not dry_run: os.makedirs(archive_dir) # creating the tarball if logger is not None: logger.info('Creating tar archive') uid = _get_uid(owner) gid = _get_gid(group) def _set_uid_gid(tarinfo): if gid is not None: tarinfo.gid = gid tarinfo.gname = group if uid is not None: tarinfo.uid = uid tarinfo.uname = owner return tarinfo if not dry_run: tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) try: tar.add(base_dir, filter=_set_uid_gid) finally: tar.close() return archive_name def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False): # XXX see if we want to keep an external call here if verbose: zipoptions = "-r" else: zipoptions = "-rq" from distutils.errors import DistutilsExecError from distutils.spawn import spawn try: spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) except DistutilsExecError: # XXX really should distinguish between "couldn't find # external 'zip' command" and "zip failed". raise ExecError("unable to create zip file '%s': " "could neither import the 'zipfile' module nor " "find a standalone zip utility") % zip_filename def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None): """Create a zip file from all the files under 'base_dir'. The output zip file will be named 'base_name' + ".zip". Uses either the "zipfile" Python module (if available) or the InfoZIP "zip" utility (if installed and found on the default search path). If neither tool is available, raises ExecError. Returns the name of the output zip file. """ zip_filename = base_name + ".zip" archive_dir = os.path.dirname(base_name) if not os.path.exists(archive_dir): if logger is not None: logger.info("creating %s", archive_dir) if not dry_run: os.makedirs(archive_dir) # If zipfile module is not available, try spawning an external 'zip' # command. try: import zipfile except ImportError: zipfile = None if zipfile is None: _call_external_zip(base_dir, zip_filename, verbose, dry_run) else: if logger is not None: logger.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) if not dry_run: zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_DEFLATED) for dirpath, dirnames, filenames in os.walk(base_dir): for name in filenames: path = os.path.normpath(os.path.join(dirpath, name)) if os.path.isfile(path): zip.write(path, path) if logger is not None: logger.info("adding '%s'", path) zip.close() return zip_filename _ARCHIVE_FORMATS = { 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), 'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"), 'zip': (_make_zipfile, [], "ZIP file"), } if _BZ2_SUPPORTED: _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file") def get_archive_formats(): """Returns a list of supported formats for archiving and unarchiving. Each element of the returned sequence is a tuple (name, description) """ formats = [(name, registry[2]) for name, registry in _ARCHIVE_FORMATS.items()] formats.sort() return formats def register_archive_format(name, function, extra_args=None, description=''): """Registers an archive format. name is the name of the format. function is the callable that will be used to create archives. If provided, extra_args is a sequence of (name, value) tuples that will be passed as arguments to the callable. description can be provided to describe the format, and will be returned by the get_archive_formats() function. """ if extra_args is None: extra_args = [] if not isinstance(function, collections.Callable): raise TypeError('The %s object is not callable' % function) if not isinstance(extra_args, (tuple, list)): raise TypeError('extra_args needs to be a sequence') for element in extra_args: if not isinstance(element, (tuple, list)) or len(element) !=2: raise TypeError('extra_args elements are : (arg_name, value)') _ARCHIVE_FORMATS[name] = (function, extra_args, description) def unregister_archive_format(name): del _ARCHIVE_FORMATS[name] def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, dry_run=0, owner=None, group=None, logger=None): """Create an archive file (eg. zip or tar). 'base_name' is the name of the file to create, minus any format-specific extension; 'format' is the archive format: one of "zip", "tar", "bztar" or "gztar". 'root_dir' is a directory that will be the root directory of the archive; ie. we typically chdir into 'root_dir' before creating the archive. 'base_dir' is the directory where we start archiving from; ie. 'base_dir' will be the common prefix of all files and directories in the archive. 'root_dir' and 'base_dir' both default to the current directory. Returns the name of the archive file. 'owner' and 'group' are used when creating a tar archive. By default, uses the current owner and group. """ save_cwd = os.getcwd() if root_dir is not None: if logger is not None: logger.debug("changing into '%s'", root_dir) base_name = os.path.abspath(base_name) if not dry_run: os.chdir(root_dir) if base_dir is None: base_dir = os.curdir kwargs = {'dry_run': dry_run, 'logger': logger} try: format_info = _ARCHIVE_FORMATS[format] except KeyError: raise ValueError("unknown archive format '%s'" % format) func = format_info[0] for arg, val in format_info[1]: kwargs[arg] = val if format != 'zip': kwargs['owner'] = owner kwargs['group'] = group try: filename = func(base_name, base_dir, **kwargs) finally: if root_dir is not None: if logger is not None: logger.debug("changing back to '%s'", save_cwd) os.chdir(save_cwd) return filename def get_unpack_formats(): """Returns a list of supported formats for unpacking. Each element of the returned sequence is a tuple (name, extensions, description) """ formats = [(name, info[0], info[3]) for name, info in _UNPACK_FORMATS.items()] formats.sort() return formats def _check_unpack_options(extensions, function, extra_args): """Checks what gets registered as an unpacker.""" # first make sure no other unpacker is registered for this extension existing_extensions = {} for name, info in _UNPACK_FORMATS.items(): for ext in info[0]: existing_extensions[ext] = name for extension in extensions: if extension in existing_extensions: msg = '%s is already registered for "%s"' raise RegistryError(msg % (extension, existing_extensions[extension])) if not isinstance(function, collections.Callable): raise TypeError('The registered function must be a callable') def register_unpack_format(name, extensions, function, extra_args=None, description=''): """Registers an unpack format. `name` is the name of the format. `extensions` is a list of extensions corresponding to the format. `function` is the callable that will be used to unpack archives. The callable will receive archives to unpack. If it's unable to handle an archive, it needs to raise a ReadError exception. If provided, `extra_args` is a sequence of (name, value) tuples that will be passed as arguments to the callable. description can be provided to describe the format, and will be returned by the get_unpack_formats() function. """ if extra_args is None: extra_args = [] _check_unpack_options(extensions, function, extra_args) _UNPACK_FORMATS[name] = extensions, function, extra_args, description def unregister_unpack_format(name): """Removes the pack format from the registry.""" del _UNPACK_FORMATS[name] def _ensure_directory(path): """Ensure that the parent directory of `path` exists""" dirname = os.path.dirname(path) if not os.path.isdir(dirname): os.makedirs(dirname) def _unpack_zipfile(filename, extract_dir): """Unpack zip `filename` to `extract_dir` """ try: import zipfile except ImportError: raise ReadError('zlib not supported, cannot unpack this archive.') if not zipfile.is_zipfile(filename): raise ReadError("%s is not a zip file" % filename) zip = zipfile.ZipFile(filename) try: for info in zip.infolist(): name = info.filename # don't extract absolute paths or ones with .. in them if name.startswith('/') or '..' in name: continue target = os.path.join(extract_dir, *name.split('/')) if not target: continue _ensure_directory(target) if not name.endswith('/'): # file data = zip.read(info.filename) f = open(target, 'wb') try: f.write(data) finally: f.close() del data finally: zip.close() def _unpack_tarfile(filename, extract_dir): """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` """ try: tarobj = tarfile.open(filename) except tarfile.TarError: raise ReadError( "%s is not a compressed or uncompressed tar file" % filename) try: tarobj.extractall(extract_dir) finally: tarobj.close() _UNPACK_FORMATS = { 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"), 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"), 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file") } if _BZ2_SUPPORTED: _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [], "bzip2'ed tar-file") def _find_unpack_format(filename): for name, info in _UNPACK_FORMATS.items(): for extension in info[0]: if filename.endswith(extension): return name return None def unpack_archive(filename, extract_dir=None, format=None): """Unpack an archive. `filename` is the name of the archive. `extract_dir` is the name of the target directory, where the archive is unpacked. If not provided, the current working directory is used. `format` is the archive format: one of "zip", "tar", or "gztar". Or any other registered format. If not provided, unpack_archive will use the filename extension and see if an unpacker was registered for that extension. In case none is found, a ValueError is raised. """ if extract_dir is None: extract_dir = os.getcwd() if format is not None: try: format_info = _UNPACK_FORMATS[format] except KeyError: raise ValueError("Unknown unpack format '{0}'".format(format)) func = format_info[1] func(filename, extract_dir, **dict(format_info[2])) else: # we need to look at the registered unpackers supported extensions format = _find_unpack_format(filename) if format is None: raise ReadError("Unknown archive format '{0}'".format(filename)) func = _UNPACK_FORMATS[format][1] kwargs = dict(_UNPACK_FORMATS[format][2]) func(filename, extract_dir, **kwargs)
charlesvdv/servo
refs/heads/master
tests/wpt/web-platform-tests/tools/py/py/_path/common.py
171
""" """ import os, sys, posixpath import py # Moved from local.py. iswin32 = sys.platform == "win32" or (getattr(os, '_name', False) == 'nt') class Checkers: _depend_on_existence = 'exists', 'link', 'dir', 'file' def __init__(self, path): self.path = path def dir(self): raise NotImplementedError def file(self): raise NotImplementedError def dotfile(self): return self.path.basename.startswith('.') def ext(self, arg): if not arg.startswith('.'): arg = '.' + arg return self.path.ext == arg def exists(self): raise NotImplementedError def basename(self, arg): return self.path.basename == arg def basestarts(self, arg): return self.path.basename.startswith(arg) def relto(self, arg): return self.path.relto(arg) def fnmatch(self, arg): return self.path.fnmatch(arg) def endswith(self, arg): return str(self.path).endswith(arg) def _evaluate(self, kw): for name, value in kw.items(): invert = False meth = None try: meth = getattr(self, name) except AttributeError: if name[:3] == 'not': invert = True try: meth = getattr(self, name[3:]) except AttributeError: pass if meth is None: raise TypeError( "no %r checker available for %r" % (name, self.path)) try: if py.code.getrawcode(meth).co_argcount > 1: if (not meth(value)) ^ invert: return False else: if bool(value) ^ bool(meth()) ^ invert: return False except (py.error.ENOENT, py.error.ENOTDIR, py.error.EBUSY): # EBUSY feels not entirely correct, # but its kind of necessary since ENOMEDIUM # is not accessible in python for name in self._depend_on_existence: if name in kw: if kw.get(name): return False name = 'not' + name if name in kw: if not kw.get(name): return False return True class NeverRaised(Exception): pass class PathBase(object): """ shared implementation for filesystem path objects.""" Checkers = Checkers def __div__(self, other): return self.join(str(other)) __truediv__ = __div__ # py3k def basename(self): """ basename part of path. """ return self._getbyspec('basename')[0] basename = property(basename, None, None, basename.__doc__) def dirname(self): """ dirname part of path. """ return self._getbyspec('dirname')[0] dirname = property(dirname, None, None, dirname.__doc__) def purebasename(self): """ pure base name of the path.""" return self._getbyspec('purebasename')[0] purebasename = property(purebasename, None, None, purebasename.__doc__) def ext(self): """ extension of the path (including the '.').""" return self._getbyspec('ext')[0] ext = property(ext, None, None, ext.__doc__) def dirpath(self, *args, **kwargs): """ return the directory path joined with any given path arguments. """ return self.new(basename='').join(*args, **kwargs) def read_binary(self): """ read and return a bytestring from reading the path. """ with self.open('rb') as f: return f.read() def read_text(self, encoding): """ read and return a Unicode string from reading the path. """ with self.open("r", encoding=encoding) as f: return f.read() def read(self, mode='r'): """ read and return a bytestring from reading the path. """ with self.open(mode) as f: return f.read() def readlines(self, cr=1): """ read and return a list of lines from the path. if cr is False, the newline will be removed from the end of each line. """ if not cr: content = self.read('rU') return content.split('\n') else: f = self.open('rU') try: return f.readlines() finally: f.close() def load(self): """ (deprecated) return object unpickled from self.read() """ f = self.open('rb') try: return py.error.checked_call(py.std.pickle.load, f) finally: f.close() def move(self, target): """ move this path to target. """ if target.relto(self): raise py.error.EINVAL(target, "cannot move path into a subdirectory of itself") try: self.rename(target) except py.error.EXDEV: # invalid cross-device link self.copy(target) self.remove() def __repr__(self): """ return a string representation of this path. """ return repr(str(self)) def check(self, **kw): """ check a path for existence and properties. Without arguments, return True if the path exists, otherwise False. valid checkers:: file=1 # is a file file=0 # is not a file (may not even exist) dir=1 # is a dir link=1 # is a link exists=1 # exists You can specify multiple checker definitions, for example:: path.check(file=1, link=1) # a link pointing to a file """ if not kw: kw = {'exists' : 1} return self.Checkers(self)._evaluate(kw) def fnmatch(self, pattern): """return true if the basename/fullname matches the glob-'pattern'. valid pattern characters:: * matches everything ? matches any single character [seq] matches any character in seq [!seq] matches any char not in seq If the pattern contains a path-separator then the full path is used for pattern matching and a '*' is prepended to the pattern. if the pattern doesn't contain a path-separator the pattern is only matched against the basename. """ return FNMatcher(pattern)(self) def relto(self, relpath): """ return a string which is the relative part of the path to the given 'relpath'. """ if not isinstance(relpath, (str, PathBase)): raise TypeError("%r: not a string or path object" %(relpath,)) strrelpath = str(relpath) if strrelpath and strrelpath[-1] != self.sep: strrelpath += self.sep #assert strrelpath[-1] == self.sep #assert strrelpath[-2] != self.sep strself = self.strpath if sys.platform == "win32" or getattr(os, '_name', None) == 'nt': if os.path.normcase(strself).startswith( os.path.normcase(strrelpath)): return strself[len(strrelpath):] elif strself.startswith(strrelpath): return strself[len(strrelpath):] return "" def ensure_dir(self, *args): """ ensure the path joined with args is a directory. """ return self.ensure(*args, **{"dir": True}) def bestrelpath(self, dest): """ return a string which is a relative path from self (assumed to be a directory) to dest such that self.join(bestrelpath) == dest and if not such path can be determined return dest. """ try: if self == dest: return os.curdir base = self.common(dest) if not base: # can be the case on windows return str(dest) self2base = self.relto(base) reldest = dest.relto(base) if self2base: n = self2base.count(self.sep) + 1 else: n = 0 l = [os.pardir] * n if reldest: l.append(reldest) target = dest.sep.join(l) return target except AttributeError: return str(dest) def exists(self): return self.check() def isdir(self): return self.check(dir=1) def isfile(self): return self.check(file=1) def parts(self, reverse=False): """ return a root-first list of all ancestor directories plus the path itself. """ current = self l = [self] while 1: last = current current = current.dirpath() if last == current: break l.append(current) if not reverse: l.reverse() return l def common(self, other): """ return the common part shared with the other path or None if there is no common part. """ last = None for x, y in zip(self.parts(), other.parts()): if x != y: return last last = x return last def __add__(self, other): """ return new path object with 'other' added to the basename""" return self.new(basename=self.basename+str(other)) def __cmp__(self, other): """ return sort value (-1, 0, +1). """ try: return cmp(self.strpath, other.strpath) except AttributeError: return cmp(str(self), str(other)) # self.path, other.path) def __lt__(self, other): try: return self.strpath < other.strpath except AttributeError: return str(self) < str(other) def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False): """ yields all paths below the current one fil is a filter (glob pattern or callable), if not matching the path will not be yielded, defaulting to None (everything is returned) rec is a filter (glob pattern or callable) that controls whether a node is descended, defaulting to None ignore is an Exception class that is ignoredwhen calling dirlist() on any of the paths (by default, all exceptions are reported) bf if True will cause a breadthfirst search instead of the default depthfirst. Default: False sort if True will sort entries within each directory level. """ for x in Visitor(fil, rec, ignore, bf, sort).gen(self): yield x def _sortlist(self, res, sort): if sort: if hasattr(sort, '__call__'): res.sort(sort) else: res.sort() def samefile(self, other): """ return True if other refers to the same stat object as self. """ return self.strpath == str(other) class Visitor: def __init__(self, fil, rec, ignore, bf, sort): if isinstance(fil, str): fil = FNMatcher(fil) if isinstance(rec, str): self.rec = FNMatcher(rec) elif not hasattr(rec, '__call__') and rec: self.rec = lambda path: True else: self.rec = rec self.fil = fil self.ignore = ignore self.breadthfirst = bf self.optsort = sort and sorted or (lambda x: x) def gen(self, path): try: entries = path.listdir() except self.ignore: return rec = self.rec dirs = self.optsort([p for p in entries if p.check(dir=1) and (rec is None or rec(p))]) if not self.breadthfirst: for subdir in dirs: for p in self.gen(subdir): yield p for p in self.optsort(entries): if self.fil is None or self.fil(p): yield p if self.breadthfirst: for subdir in dirs: for p in self.gen(subdir): yield p class FNMatcher: def __init__(self, pattern): self.pattern = pattern def __call__(self, path): pattern = self.pattern if (pattern.find(path.sep) == -1 and iswin32 and pattern.find(posixpath.sep) != -1): # Running on Windows, the pattern has no Windows path separators, # and the pattern has one or more Posix path separators. Replace # the Posix path separators with the Windows path separator. pattern = pattern.replace(posixpath.sep, path.sep) if pattern.find(path.sep) == -1: name = path.basename else: name = str(path) # path.strpath # XXX svn? if not os.path.isabs(pattern): pattern = '*' + path.sep + pattern return py.std.fnmatch.fnmatch(name, pattern)
allotria/intellij-community
refs/heads/master
plugins/hg4idea/testData/bin/mercurial/verify.py
93
# verify.py - repository integrity checking for Mercurial # # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from node import nullid, short from i18n import _ import os import revlog, util, error def verify(repo): lock = repo.lock() try: return _verify(repo) finally: lock.release() def _normpath(f): # under hg < 2.4, convert didn't sanitize paths properly, so a # converted repo may contain repeated slashes while '//' in f: f = f.replace('//', '/') return f def _verify(repo): repo = repo.unfiltered() mflinkrevs = {} filelinkrevs = {} filenodes = {} revisions = 0 badrevs = set() errors = [0] warnings = [0] ui = repo.ui cl = repo.changelog mf = repo.manifest lrugetctx = util.lrucachefunc(repo.changectx) if not repo.cancopy(): raise util.Abort(_("cannot verify bundle or remote repos")) def err(linkrev, msg, filename=None): if linkrev is not None: badrevs.add(linkrev) else: linkrev = '?' msg = "%s: %s" % (linkrev, msg) if filename: msg = "%s@%s" % (filename, msg) ui.warn(" " + msg + "\n") errors[0] += 1 def exc(linkrev, msg, inst, filename=None): if isinstance(inst, KeyboardInterrupt): ui.warn(_("interrupted")) raise if not str(inst): inst = repr(inst) err(linkrev, "%s: %s" % (msg, inst), filename) def warn(msg): ui.warn(msg + "\n") warnings[0] += 1 def checklog(obj, name, linkrev): if not len(obj) and (havecl or havemf): err(linkrev, _("empty or missing %s") % name) return d = obj.checksize() if d[0]: err(None, _("data length off by %d bytes") % d[0], name) if d[1]: err(None, _("index contains %d extra bytes") % d[1], name) if obj.version != revlog.REVLOGV0: if not revlogv1: warn(_("warning: `%s' uses revlog format 1") % name) elif revlogv1: warn(_("warning: `%s' uses revlog format 0") % name) def checkentry(obj, i, node, seen, linkrevs, f): lr = obj.linkrev(obj.rev(node)) if lr < 0 or (havecl and lr not in linkrevs): if lr < 0 or lr >= len(cl): msg = _("rev %d points to nonexistent changeset %d") else: msg = _("rev %d points to unexpected changeset %d") err(None, msg % (i, lr), f) if linkrevs: if f and len(linkrevs) > 1: try: # attempt to filter down to real linkrevs linkrevs = [l for l in linkrevs if lrugetctx(l)[f].filenode() == node] except Exception: pass warn(_(" (expected %s)") % " ".join(map(str, linkrevs))) lr = None # can't be trusted try: p1, p2 = obj.parents(node) if p1 not in seen and p1 != nullid: err(lr, _("unknown parent 1 %s of %s") % (short(p1), short(node)), f) if p2 not in seen and p2 != nullid: err(lr, _("unknown parent 2 %s of %s") % (short(p2), short(node)), f) except Exception, inst: exc(lr, _("checking parents of %s") % short(node), inst, f) if node in seen: err(lr, _("duplicate revision %d (%d)") % (i, seen[node]), f) seen[node] = i return lr if os.path.exists(repo.sjoin("journal")): ui.warn(_("abandoned transaction found - run hg recover\n")) revlogv1 = cl.version != revlog.REVLOGV0 if ui.verbose or not revlogv1: ui.status(_("repository uses revlog format %d\n") % (revlogv1 and 1 or 0)) havecl = len(cl) > 0 havemf = len(mf) > 0 ui.status(_("checking changesets\n")) refersmf = False seen = {} checklog(cl, "changelog", 0) total = len(repo) for i in repo: ui.progress(_('checking'), i, total=total, unit=_('changesets')) n = cl.node(i) checkentry(cl, i, n, seen, [i], "changelog") try: changes = cl.read(n) if changes[0] != nullid: mflinkrevs.setdefault(changes[0], []).append(i) refersmf = True for f in changes[3]: filelinkrevs.setdefault(_normpath(f), []).append(i) except Exception, inst: refersmf = True exc(i, _("unpacking changeset %s") % short(n), inst) ui.progress(_('checking'), None) ui.status(_("checking manifests\n")) seen = {} if refersmf: # Do not check manifest if there are only changelog entries with # null manifests. checklog(mf, "manifest", 0) total = len(mf) for i in mf: ui.progress(_('checking'), i, total=total, unit=_('manifests')) n = mf.node(i) lr = checkentry(mf, i, n, seen, mflinkrevs.get(n, []), "manifest") if n in mflinkrevs: del mflinkrevs[n] else: err(lr, _("%s not in changesets") % short(n), "manifest") try: for f, fn in mf.readdelta(n).iteritems(): if not f: err(lr, _("file without name in manifest")) elif f != "/dev/null": filenodes.setdefault(_normpath(f), {}).setdefault(fn, lr) except Exception, inst: exc(lr, _("reading manifest delta %s") % short(n), inst) ui.progress(_('checking'), None) ui.status(_("crosschecking files in changesets and manifests\n")) total = len(mflinkrevs) + len(filelinkrevs) + len(filenodes) count = 0 if havemf: for c, m in sorted([(c, m) for m in mflinkrevs for c in mflinkrevs[m]]): count += 1 if m == nullid: continue ui.progress(_('crosschecking'), count, total=total) err(c, _("changeset refers to unknown manifest %s") % short(m)) mflinkrevs = None # del is bad here due to scope issues for f in sorted(filelinkrevs): count += 1 ui.progress(_('crosschecking'), count, total=total) if f not in filenodes: lr = filelinkrevs[f][0] err(lr, _("in changeset but not in manifest"), f) if havecl: for f in sorted(filenodes): count += 1 ui.progress(_('crosschecking'), count, total=total) if f not in filelinkrevs: try: fl = repo.file(f) lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]]) except Exception: lr = None err(lr, _("in manifest but not in changeset"), f) ui.progress(_('crosschecking'), None) ui.status(_("checking files\n")) storefiles = set() for f, f2, size in repo.store.datafiles(): if not f: err(None, _("cannot decode filename '%s'") % f2) elif size > 0 or not revlogv1: storefiles.add(_normpath(f)) files = sorted(set(filenodes) | set(filelinkrevs)) total = len(files) for i, f in enumerate(files): ui.progress(_('checking'), i, item=f, total=total) try: linkrevs = filelinkrevs[f] except KeyError: # in manifest but not in changelog linkrevs = [] if linkrevs: lr = linkrevs[0] else: lr = None try: fl = repo.file(f) except error.RevlogError, e: err(lr, _("broken revlog! (%s)") % e, f) continue for ff in fl.files(): try: storefiles.remove(ff) except KeyError: err(lr, _("missing revlog!"), ff) checklog(fl, f, lr) seen = {} rp = None for i in fl: revisions += 1 n = fl.node(i) lr = checkentry(fl, i, n, seen, linkrevs, f) if f in filenodes: if havemf and n not in filenodes[f]: err(lr, _("%s not in manifests") % (short(n)), f) else: del filenodes[f][n] # verify contents try: l = len(fl.read(n)) rp = fl.renamed(n) if l != fl.size(i): if len(fl.revision(n)) != fl.size(i): err(lr, _("unpacked size is %s, %s expected") % (l, fl.size(i)), f) except Exception, inst: exc(lr, _("unpacking %s") % short(n), inst, f) # check renames try: if rp: if lr is not None and ui.verbose: ctx = lrugetctx(lr) found = False for pctx in ctx.parents(): if rp[0] in pctx: found = True break if not found: warn(_("warning: copy source of '%s' not" " in parents of %s") % (f, ctx)) fl2 = repo.file(rp[0]) if not len(fl2): err(lr, _("empty or missing copy source revlog %s:%s") % (rp[0], short(rp[1])), f) elif rp[1] == nullid: ui.note(_("warning: %s@%s: copy source" " revision is nullid %s:%s\n") % (f, lr, rp[0], short(rp[1]))) else: fl2.rev(rp[1]) except Exception, inst: exc(lr, _("checking rename of %s") % short(n), inst, f) # cross-check if f in filenodes: fns = [(lr, n) for n, lr in filenodes[f].iteritems()] for lr, node in sorted(fns): err(lr, _("%s in manifests not found") % short(node), f) ui.progress(_('checking'), None) for f in storefiles: warn(_("warning: orphan revlog '%s'") % f) ui.status(_("%d files, %d changesets, %d total revisions\n") % (len(files), len(cl), revisions)) if warnings[0]: ui.warn(_("%d warnings encountered!\n") % warnings[0]) if errors[0]: ui.warn(_("%d integrity errors encountered!\n") % errors[0]) if badrevs: ui.warn(_("(first damaged changeset appears to be %d)\n") % min(badrevs)) return 1
jorik041/plaso
refs/heads/master
plaso/formatters/firefox.py
3
# -*- coding: utf-8 -*- """The Mozilla Firefox history event formatter.""" from plaso.formatters import interface from plaso.formatters import manager from plaso.lib import errors class FirefoxBookmarkAnnotationFormatter(interface.ConditionalEventFormatter): """The Firefox bookmark annotation event formatter.""" DATA_TYPE = u'firefox:places:bookmark_annotation' FORMAT_STRING_PIECES = [ u'Bookmark Annotation: [{content}]', u'to bookmark [{title}]', u'({url})'] FORMAT_STRING_SHORT_PIECES = [u'Bookmark Annotation: {title}'] SOURCE_LONG = u'Firefox History' SOURCE_SHORT = u'WEBHIST' class FirefoxBookmarkFolderFormatter(interface.EventFormatter): """The Firefox bookmark folder event formatter.""" DATA_TYPE = u'firefox:places:bookmark_folder' FORMAT_STRING = u'{title}' SOURCE_LONG = u'Firefox History' SOURCE_SHORT = u'WEBHIST' class FirefoxBookmarkFormatter(interface.ConditionalEventFormatter): """The Firefox URL bookmark event formatter.""" DATA_TYPE = u'firefox:places:bookmark' FORMAT_STRING_PIECES = [ u'Bookmark {type}', u'{title}', u'({url})', u'[{places_title}]', u'visit count {visit_count}'] FORMAT_STRING_SHORT_PIECES = [ u'Bookmarked {title}', u'({url})'] SOURCE_LONG = u'Firefox History' SOURCE_SHORT = u'WEBHIST' class FirefoxPageVisitFormatter(interface.ConditionalEventFormatter): """The Firefox page visited event formatter.""" DATA_TYPE = u'firefox:places:page_visited' # Transitions defined in the source file: # src/toolkit/components/places/nsINavHistoryService.idl # Also contains further explanation into what each of these settings mean. _URL_TRANSITIONS = { 1: u'LINK', 2: u'TYPED', 3: u'BOOKMARK', 4: u'EMBED', 5: u'REDIRECT_PERMANENT', 6: u'REDIRECT_TEMPORARY', 7: u'DOWNLOAD', 8: u'FRAMED_LINK', } _URL_TRANSITIONS.setdefault(u'UNKOWN') # TODO: Make extra conditional formatting. FORMAT_STRING_PIECES = [ u'{url}', u'({title})', u'[count: {visit_count}]', u'Host: {host}', u'{extra_string}'] FORMAT_STRING_SHORT_PIECES = [u'URL: {url}'] SOURCE_LONG = u'Firefox History' SOURCE_SHORT = u'WEBHIST' def GetMessages(self, unused_formatter_mediator, event_object): """Determines the formatted message strings for an event object. Args: formatter_mediator: the formatter mediator object (instance of FormatterMediator). event_object: the event object (instance of EventObject). Returns: A tuple containing the formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter. """ if self.DATA_TYPE != event_object.data_type: raise errors.WrongFormatter(u'Unsupported data type: {0:s}.'.format( event_object.data_type)) event_values = event_object.GetValues() visit_type = event_values.get(u'visit_type', 0) transition = self._URL_TRANSITIONS.get(visit_type, None) if transition: transition_str = u'Transition: {0!s}'.format(transition) extra = event_values.get(u'extra', None) if extra: if transition: extra.append(transition_str) event_values[u'extra_string'] = u' '.join(extra) elif transition: event_values[u'extra_string'] = transition_str return self._ConditionalFormatMessages(event_values) class FirefoxDowloadFormatter(interface.EventFormatter): """The Firefox download event formatter.""" DATA_TYPE = u'firefox:downloads:download' FORMAT_STRING = ( u'{url} ({full_path}). Received: {received_bytes} bytes ' u'out of: {total_bytes} bytes.') FORMAT_STRING_SHORT = u'{full_path} downloaded ({received_bytes} bytes)' SOURCE_LONG = u'Firefox History' SOURCE_SHORT = u'WEBHIST' manager.FormattersManager.RegisterFormatters([ FirefoxBookmarkAnnotationFormatter, FirefoxBookmarkFolderFormatter, FirefoxBookmarkFormatter, FirefoxPageVisitFormatter, FirefoxDowloadFormatter])
bufferapp/buffer-django-nonrel
refs/heads/master
django/db/backends/postgresql/__init__.py
12133432
TechBK/horizon-dev
refs/heads/master
openstack_dashboard/dashboards/techbk_head/newpanel/__init__.py
12133432
ademuk/django-oscar
refs/heads/master
tests/_site/apps/customer/migrations/__init__.py
12133432
alrusdi/lettuce
refs/heads/master
tests/integration/lib/Django-1.3/django/bin/__init__.py
12133432
cpodlesny/lisbon
refs/heads/master
src/offer/__init__.py
12133432
mbauskar/internal-hr
refs/heads/develop
erpnext/hr/doctype/hr_settings/__init__.py
12133432
hamzehd/edx-platform
refs/heads/master
lms/djangoapps/survey/views.py
79
""" View endpoints for Survey """ import logging import json from django.contrib.auth.decorators import login_required from django.http import ( HttpResponse, HttpResponseRedirect, HttpResponseNotFound ) from django.core.urlresolvers import reverse from django.views.decorators.http import require_POST from django.conf import settings from django.utils.html import escape from opaque_keys.edx.keys import CourseKey from edxmako.shortcuts import render_to_response from survey.models import SurveyForm from microsite_configuration import microsite log = logging.getLogger("edx.survey") @login_required def view_survey(request, survey_name): """ View to render the survey to the end user """ redirect_url = request.GET.get('redirect_url') return view_student_survey(request.user, survey_name, redirect_url=redirect_url) def view_student_survey(user, survey_name, course=None, redirect_url=None, is_required=False, skip_redirect_url=None): """ Shared utility method to render a survey form NOTE: This method is shared between the Survey and Courseware Djangoapps """ redirect_url = redirect_url if redirect_url else reverse('dashboard') dashboard_redirect_url = reverse('dashboard') skip_redirect_url = skip_redirect_url if skip_redirect_url else dashboard_redirect_url survey = SurveyForm.get(survey_name, throw_if_not_found=False) if not survey: return HttpResponseRedirect(redirect_url) # the result set from get_answers, has an outer key with the user_id # just remove that outer key to make the JSON payload simplier existing_answers = survey.get_answers(user=user).get(user.id, {}) platform_name = microsite.get_value('platform_name', settings.PLATFORM_NAME) context = { 'existing_data_json': json.dumps(existing_answers), 'postback_url': reverse('submit_answers', args=[survey_name]), 'redirect_url': redirect_url, 'skip_redirect_url': skip_redirect_url, 'dashboard_redirect_url': dashboard_redirect_url, 'survey_form': survey.form, 'is_required': is_required, 'mail_to_link': microsite.get_value('email_from_address', settings.CONTACT_EMAIL), 'platform_name': platform_name, 'course': course, } return render_to_response("survey/survey.html", context) @require_POST @login_required def submit_answers(request, survey_name): """ Form submission post-back endpoint. NOTE: We do not have a formal definition of a Survey Form, it's just some authored HTML form fields (via Django Admin site). Therefore we do not do any validation of the submission server side. It is assumed that all validation is done via JavaScript in the survey.html file """ survey = SurveyForm.get(survey_name, throw_if_not_found=False) if not survey: return HttpResponseNotFound() answers = {} for key in request.POST.keys(): # support multi-SELECT form values, by string concatenating them with a comma separator array_val = request.POST.getlist(key) answers[key] = request.POST[key] if len(array_val) == 0 else ','.join(array_val) # the URL we are supposed to redirect to is # in a hidden form field redirect_url = answers['_redirect_url'] if '_redirect_url' in answers else reverse('dashboard') course_key = CourseKey.from_string(answers['course_id']) if 'course_id' in answers else None allowed_field_names = survey.get_field_names() # scrub the answers to make sure nothing malicious from the user gets stored in # our database, e.g. JavaScript filtered_answers = {} for answer_key in answers.keys(): # only allow known input fields if answer_key in allowed_field_names: filtered_answers[answer_key] = escape(answers[answer_key]) survey.save_user_answers(request.user, filtered_answers, course_key) response_params = json.dumps({ # The HTTP end-point for the payment processor. "redirect_url": redirect_url, }) return HttpResponse(response_params, content_type="text/json")
palerdot/calibre
refs/heads/master
src/calibre/utils/socket_inheritance.py
5
#!/usr/bin/env python # vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:fdm=marker:ai from __future__ import (unicode_literals, division, absolute_import, print_function) ''' Created on 29 Nov 2013 @author: charles Code taken from https://mail.python.org/pipermail/python-dev/2007-June/073745.html modified to make it work ''' from calibre.constants import iswindows def get_socket_inherit(socket): ''' Returns True if the socket has been set to allow inheritance across forks and execs to child processes, otherwise False ''' try: if iswindows: import win32api, win32con flags = win32api.GetHandleInformation(socket.fileno()) return bool(flags & win32con.HANDLE_FLAG_INHERIT) else: import fcntl flags = fcntl.fcntl(socket.fileno(), fcntl.F_GETFD) return not bool(flags & fcntl.FD_CLOEXEC) except: import traceback traceback.print_exc() def set_socket_inherit(sock, inherit): ''' Mark a socket as inheritable or non-inheritable to child processes. This should be called right after socket creation if you want to prevent the socket from being inherited by child processes. Note that for sockets, a new socket returned from accept() will be inheritable even if the listener socket was not; so you should call set_socket_inherit for the new socket as well. ''' try: if iswindows: import win32api, win32con if inherit: flags = win32con.HANDLE_FLAG_INHERIT else: flags = 0 win32api.SetHandleInformation(sock.fileno(), win32con.HANDLE_FLAG_INHERIT, flags) else: import fcntl fd = sock.fileno() flags = fcntl.fcntl(fd, fcntl.F_GETFD) & ~fcntl.FD_CLOEXEC if not inherit: flags = flags | fcntl.FD_CLOEXEC fcntl.fcntl(fd, fcntl.F_SETFD, flags) except: import traceback traceback.print_exc() def test(): import socket s = socket.socket() orig = get_socket_inherit(s) set_socket_inherit(s, orig ^ True) if orig == get_socket_inherit(s): raise RuntimeError('Failed to change socket inheritance status') print ('OK!') if __name__ == '__main__': test()
karolciba/playground
refs/heads/master
neural/trex.py
1
width = 1200 height_down = 25 height_up = 200 # 250 # with scores
Evervolv/android_external_chromium_org
refs/heads/kitkat
chrome/android/host_driven_tests/DummyTest.py
23
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os from pylib.host_driven import test_case from pylib.host_driven import tests_annotations class DummyTest(test_case.HostDrivenTestCase): """Dummy host-driven test for testing the framework itself.""" @tests_annotations.Smoke def testPass(self): return self._RunJavaTests( 'org.chromium.chrome.browser.test', ['DummyTest.testPass'])
googleads/google-ads-python
refs/heads/master
google/ads/googleads/v7/services/services/ad_service/transports/base.py
1
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import abc import typing import pkg_resources from google import auth from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore from google.ads.googleads.v7.resources.types import ad from google.ads.googleads.v7.services.types import ad_service try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-ads",).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() class AdServiceTransport(metaclass=abc.ABCMeta): """Abstract transport class for AdService.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/adwords",) def __init__( self, *, host: str = "googleads.googleapis.com", credentials: credentials.Credentials = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. Args: host (Optional[str]): The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" self._host = host # If no credentials are provided, then determine the appropriate # defaults. if credentials is None: credentials, _ = auth.default(scopes=self.AUTH_SCOPES) # Save the credentials. self._credentials = credentials # Lifted into its own function so it can be stubbed out during tests. self._prep_wrapped_messages(client_info) def _prep_wrapped_messages(self, client_info): # Precomputed wrapped methods self._wrapped_methods = { self.get_ad: gapic_v1.method.wrap_method( self.get_ad, default_timeout=None, client_info=client_info, ), self.mutate_ads: gapic_v1.method.wrap_method( self.mutate_ads, default_timeout=None, client_info=client_info, ), } @property def get_ad(self) -> typing.Callable[[ad_service.GetAdRequest], ad.Ad]: raise NotImplementedError @property def mutate_ads( self, ) -> typing.Callable[ [ad_service.MutateAdsRequest], ad_service.MutateAdsResponse ]: raise NotImplementedError __all__ = ("AdServiceTransport",)
mhils/mitmproxy
refs/heads/master
examples/contrib/remote-debug.py
4
""" This script enables remote debugging of the mitmproxy console *UI* with PyCharm. For general debugging purposes, it is easier to just debug mitmdump within PyCharm. Usage: - pip install pydevd on the mitmproxy machine - Open the Run/Debug Configuration dialog box in PyCharm, and select the Python Remote Debug configuration type. - Debugging works in the way that mitmproxy connects to the debug server on startup. Specify host and port that mitmproxy can use to reach your PyCharm instance on startup. - Adjust this inline script accordingly. - Start debug server in PyCharm - Set breakpoints - Start mitmproxy -s remote_debug.py """ def load(l): import pydevd_pycharm pydevd_pycharm.settrace("localhost", port=5678, stdoutToServer=True, stderrToServer=True, suspend=False)
sergiofasilva/WebRealTimeEngine
refs/heads/master
webrealtimeengine/controllers/items.py
1
# -*- coding: utf-8 -*- ''' Created on 4 de Set de 2013 @author: Sergio Silva ''' import logging import traceback import time import json import handler from random import randint from handler import Handler from collections import namedtuple class Items(Handler): @handler.response_page_format() def get(self): t0 = time.clock() data = '' try: kw = {} item = namedtuple('id', 'content') rnd = randint(1, 200) #only if rnd <= 100 if ( rnd <= 100): content = "Item: {}".format(str(rnd)) item.id = rnd item.content = content kw['item'] = item item_html = self.render_str('item.html', **kw) d = {} d['id'] = item.id d['item'] = item_html data = json.dumps(d) #self.write(data) except Exception: logging.error("Items.get: {}".format(traceback.format_exc())) self.http_error(500) finally: logging.debug('COMPLETE RENDER Items IN {} SECONDS'.format( str(time.clock() - t0))) return data
hurricup/intellij-community
refs/heads/master
python/testData/dotNet/import_class_from_module_alias.py
80
import clr clr.AddReferenceByPartialName("PythonLibs") from c<caret>om.just.like.java import LikeJavaClass as MyClass print MyClass
igor-rangel7l/igorrangelteste.repository
refs/heads/master
script.module.urlresolver/lib/urlresolver/plugins/videowood.py
1
""" urlresolver XBMC Addon Copyright (C) 2015 tknorris This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import re from t0mm0.common.net import Net from lib import jsunpack from urlresolver.plugnplay.interfaces import UrlResolver from urlresolver.plugnplay.interfaces import PluginSettings from urlresolver.plugnplay import Plugin class VideowoodResolver(Plugin, UrlResolver, PluginSettings): implements = [UrlResolver, PluginSettings] name = "videowood" domains = ['videowood.tv'] pattern = '(?://|\.)(videowood\.tv)/(?:embed/|video/)([0-9a-z]+)' def __init__(self): p = self.get_setting('priority') or 100 self.priority = int(p) self.net = Net() def get_media_url(self, host, media_id): web_url = self.get_url(host, media_id) stream_url = None headers = {'Referer': web_url} html = self.net.http_GET(web_url, headers=headers).content if "This video doesn't exist." in html: raise UrlResolver.ResolverError('The requested video was not found.') packed = re.search('(eval\(function\(p,a,c,k,e,d\)\{.+\))', html) unpacked = None if packed: # change radix before trying to unpack, 58-61 seen in testing, 62 worked for all packed = re.sub(r"(.+}\('.*', *)\d+(, *\d+, *'.*?'\.split\('\|'\))", "\g<01>62\g<02>", packed.group(1)) unpacked = jsunpack.unpack(packed) if unpacked: r = re.search('.+["\']file["\']\s*:\s*["\'](.+?/video\\\.+?)["\']', unpacked) if r: stream_url = r.group(1).replace('\\', '') if stream_url: return stream_url else: raise UrlResolver.ResolverError('File not found') def get_url(self, host, media_id): return 'http://videowood.tv/embed/%s' % media_id def get_host_and_id(self, url): r = re.search(self.pattern, url) if r: return r.groups() else: return False def valid_url(self, url, host): return re.search(self.pattern, url) or self.name in host
davidcusatis/horizon
refs/heads/master
openstack_dashboard/dashboards/project/vpn/workflows.py
11
# Copyright 2013, Mirantis Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.utils.translation import ugettext_lazy as _ from horizon import exceptions from horizon import forms from horizon import workflows from openstack_dashboard import api class AddVPNServiceAction(workflows.Action): name = forms.CharField(max_length=80, label=_("Name"), required=False) description = forms.CharField( initial="", required=False, max_length=80, label=_("Description")) router_id = forms.ChoiceField(label=_("Router")) subnet_id = forms.ChoiceField(label=_("Subnet")) admin_state_up = forms.ChoiceField( choices=[(True, _('UP')), (False, _('DOWN'))], label=_("Admin State"), help_text=_("The state of VPN service to start in. " "If DOWN (False) VPN service does not forward packets."), required=False) def __init__(self, request, *args, **kwargs): super(AddVPNServiceAction, self).__init__(request, *args, **kwargs) def populate_subnet_id_choices(self, request, context): subnet_id_choices = [('', _("Select a Subnet"))] try: tenant_id = request.user.tenant_id networks = api.neutron.network_list_for_tenant(request, tenant_id) except Exception: exceptions.handle(request, _('Unable to retrieve networks list.')) networks = [] for n in networks: for s in n['subnets']: subnet_id_choices.append((s.id, s.cidr)) self.fields['subnet_id'].choices = subnet_id_choices return subnet_id_choices def populate_router_id_choices(self, request, context): router_id_choices = [('', _("Select a Router"))] try: tenant_id = request.user.tenant_id routers = api.neutron.router_list(request, tenant_id=tenant_id) except Exception: exceptions.handle(request, _('Unable to retrieve routers list.')) routers = [] for r in routers: router_id_choices.append((r.id, r.name)) self.fields['router_id'].choices = router_id_choices return router_id_choices class Meta(object): name = _("Add New VPN Service") permissions = ('openstack.services.network',) help_text = _("Create VPN Service for current project.\n\n" "The VPN service is attached to a router " "and references to a single subnet " "to push to a remote site.\n" "Specify a name, description, router, and subnet " "for the VPN Service. " "Admin State is UP (True) by default.\n\n" "The router, subnet and admin state " "fields are required, " "all others are optional." ) class AddVPNServiceStep(workflows.Step): action_class = AddVPNServiceAction contributes = ("name", "description", "subnet_id", "router_id", "admin_state_up") def contribute(self, data, context): context = super(AddVPNServiceStep, self).contribute(data, context) if data: return context class AddVPNService(workflows.Workflow): slug = "addvpnservice" name = _("Add VPN Service") finalize_button_name = _("Add") success_message = _('Added VPN Service "%s".') failure_message = _('Unable to add VPN Service "%s".') success_url = "horizon:project:vpn:index" default_steps = (AddVPNServiceStep,) def format_status_message(self, message): return message % self.context.get('name') def handle(self, request, context): try: api.vpn.vpnservice_create(request, **context) return True except Exception: return False class AddIKEPolicyAction(workflows.Action): name = forms.CharField(max_length=80, label=_("Name"), required=False) description = forms.CharField( initial="", required=False, max_length=80, label=_("Description")) auth_algorithm = forms.ChoiceField(label=_("Authorization algorithm"), required=False) encryption_algorithm = forms.ChoiceField(label=_("Encryption algorithm"), required=False) ike_version = forms.ChoiceField(label=_("IKE version"), required=False) lifetime_units = forms.ChoiceField(label=_("Lifetime units for IKE keys"), required=False) lifetime_value = forms.IntegerField( min_value=60, label=_("Lifetime value for IKE keys"), initial=3600, help_text=_("Equal to or greater than 60"), required=False) pfs = forms.ChoiceField(label=_("Perfect Forward Secrecy"), required=False) phase1_negotiation_mode = forms.ChoiceField( label=_("IKE Phase1 negotiation mode"), required=False) def __init__(self, request, *args, **kwargs): super(AddIKEPolicyAction, self).__init__(request, *args, **kwargs) auth_algorithm_choices = [("sha1", "sha1")] self.fields['auth_algorithm'].choices = auth_algorithm_choices # Currently this field has only one choice, so mark it as readonly. self.fields['auth_algorithm'].widget.attrs['readonly'] = True encryption_algorithm_choices = [("3des", "3des"), ("aes-128", "aes-128"), ("aes-192", "aes-192"), ("aes-256", "aes-256")] self.fields[ 'encryption_algorithm'].choices = encryption_algorithm_choices self.fields['encryption_algorithm'].initial = "aes-128" ike_version_choices = [("v1", "v1"), ("v2", "v2")] self.fields['ike_version'].choices = ike_version_choices lifetime_units_choices = [("seconds", "seconds")] self.fields['lifetime_units'].choices = lifetime_units_choices # Currently this field has only one choice, so mark it as readonly. self.fields['lifetime_units'].widget.attrs['readonly'] = True pfs_choices = [("group2", "group2"), ("group5", "group5"), ("group14", "group14")] self.fields['pfs'].choices = pfs_choices self.fields['pfs'].initial = "group5" phase1_neg_mode_choices = [("main", "main")] self.fields[ 'phase1_negotiation_mode'].choices = phase1_neg_mode_choices # Currently this field has only one choice, so mark it as readonly. self.fields['phase1_negotiation_mode'].widget.attrs['readonly'] = True class Meta(object): name = _("Add New IKE Policy") permissions = ('openstack.services.network',) help_text = _("Create IKE Policy for current project.\n\n" "An IKE policy is an association " "of the following attributes:\n\n" "<li>Authorization algorithm: " "Auth algorithm limited to SHA1 only.</li>" "<li>Encryption algorithm: " "The type of algorithm " "(3des, aes-128, aes-192, aes-256) " "used in the IKE Policy.</li>" "<li>IKE version: The type of version (v1/v2) " "that needs to be filtered.</li>" "<li>Lifetime: Life time consists of units and value. " "Units in 'seconds' " "and the default value is 3600.</li>" "<li>Perfect Forward Secrecy: " "PFS limited to using Diffie-Hellman " "groups 2, 5(default) and 14.</li>" "<li>IKE Phase 1 negotiation mode: " "Limited to 'main' mode only.</li>\n" "All fields are optional." ) class AddIKEPolicyStep(workflows.Step): action_class = AddIKEPolicyAction contributes = ("name", "description", "auth_algorithm", "encryption_algorithm", "ike_version", "lifetime_units", "lifetime_value", "pfs", "phase1_negotiation_mode") def contribute(self, data, context): context = super(AddIKEPolicyStep, self).contribute(data, context) context['lifetime'] = {'units': data['lifetime_units'], 'value': data['lifetime_value']} context.pop('lifetime_units') context.pop('lifetime_value') if data: return context class AddIKEPolicy(workflows.Workflow): slug = "addikepolicy" name = _("Add IKE Policy") finalize_button_name = _("Add") success_message = _('Added IKE Policy "%s".') failure_message = _('Unable to add IKE Policy "%s".') success_url = "horizon:project:vpn:index" default_steps = (AddIKEPolicyStep,) def format_status_message(self, message): return message % self.context.get('name') def handle(self, request, context): try: api.vpn.ikepolicy_create(request, **context) return True except Exception: return False class AddIPSecPolicyAction(workflows.Action): name = forms.CharField(max_length=80, label=_("Name"), required=False) description = forms.CharField( initial="", required=False, max_length=80, label=_("Description")) auth_algorithm = forms.ChoiceField(label=_("Authorization algorithm"), required=False) encapsulation_mode = forms.ChoiceField(label=_("Encapsulation mode"), required=False) encryption_algorithm = forms.ChoiceField(label=_("Encryption algorithm"), required=False) lifetime_units = forms.ChoiceField(label=_("Lifetime units"), required=False) lifetime_value = forms.IntegerField( min_value=60, label=_("Lifetime value for IKE keys "), initial=3600, help_text=_("Equal to or greater than 60"), required=False) pfs = forms.ChoiceField(label=_("Perfect Forward Secrecy"), required=False) transform_protocol = forms.ChoiceField(label=_("Transform Protocol"), required=False) def __init__(self, request, *args, **kwargs): super(AddIPSecPolicyAction, self).__init__(request, *args, **kwargs) auth_algorithm_choices = [("sha1", "sha1")] self.fields['auth_algorithm'].choices = auth_algorithm_choices # Currently this field has only one choice, so mark it as readonly. self.fields['auth_algorithm'].widget.attrs['readonly'] = True encapsulation_mode_choices = [("tunnel", "tunnel"), ("transport", "transport")] self.fields['encapsulation_mode'].choices = encapsulation_mode_choices encryption_algorithm_choices = [("3des", "3des"), ("aes-128", "aes-128"), ("aes-192", "aes-192"), ("aes-256", "aes-256")] self.fields[ 'encryption_algorithm'].choices = encryption_algorithm_choices self.fields['encryption_algorithm'].initial = "aes-128" lifetime_units_choices = [("seconds", "seconds")] self.fields['lifetime_units'].choices = lifetime_units_choices # Currently this field has only one choice, so mark it as readonly. self.fields['lifetime_units'].widget.attrs['readonly'] = True pfs_choices = [("group2", "group2"), ("group5", "group5"), ("group14", "group14")] self.fields['pfs'].choices = pfs_choices self.fields['pfs'].initial = "group5" transform_protocol_choices = [("esp", "esp"), ("ah", "ah"), ("ah-esp", "ah-esp")] self.fields['transform_protocol'].choices = transform_protocol_choices class Meta(object): name = _("Add New IPSec Policy") permissions = ('openstack.services.network',) help_text = _("Create IPSec Policy for current project.\n\n" "An IPSec policy is an association " "of the following attributes:\n\n" "<li>Authorization algorithm: " "Auth_algorithm limited to SHA1 only.</li>" "<li>Encapsulation mode: " "The type of IPsec tunnel (tunnel/transport) " "to be used.</li>" "<li>Encryption algorithm: " "The type of algorithm " "(3des, aes-128, aes-192, aes-256) " "used in the IPSec Policy.</li>" "<li>Lifetime: Life time consists of units and value. " "Units in 'seconds' " "and the default value is 3600.</li>" "<li>Perfect Forward Secrecy: " "PFS limited to using Diffie-Hellman " "groups 2, 5(default) and 14.</li>" "<li>Transform Protocol: " "The type of protocol " "(esp, ah, ah-esp) used in IPSec Policy.</li>\n" "All fields are optional." ) class AddIPSecPolicyStep(workflows.Step): action_class = AddIPSecPolicyAction contributes = ("name", "description", "auth_algorithm", "encapsulation_mode", "encryption_algorithm", "lifetime_units", "lifetime_value", "pfs", "transform_protocol") def contribute(self, data, context): context = super(AddIPSecPolicyStep, self).contribute(data, context) context['lifetime'] = {'units': data['lifetime_units'], 'value': data['lifetime_value']} context.pop('lifetime_units') context.pop('lifetime_value') if data: return context class AddIPSecPolicy(workflows.Workflow): slug = "addipsecpolicy" name = _("Add IPSec Policy") finalize_button_name = _("Add") success_message = _('Added IPSec Policy "%s".') failure_message = _('Unable to add IPSec Policy "%s".') success_url = "horizon:project:vpn:index" default_steps = (AddIPSecPolicyStep,) def format_status_message(self, message): return message % self.context.get('name') def handle(self, request, context): try: api.vpn.ipsecpolicy_create(request, **context) return True except Exception: return False class AddIPSecSiteConnectionAction(workflows.Action): name = forms.CharField(max_length=80, label=_("Name"), required=False) description = forms.CharField( initial="", required=False, max_length=80, label=_("Description")) vpnservice_id = forms.ChoiceField( label=_("VPN Service associated with this connection")) ikepolicy_id = forms.ChoiceField( label=_("IKE Policy associated with this connection")) ipsecpolicy_id = forms.ChoiceField( label=_("IPSec Policy associated with this connection")) peer_address = forms.IPField( label=_("Peer gateway public IPv4/IPv6 Address or FQDN"), help_text=_("Peer gateway public IPv4/IPv6 address or FQDN for " "the VPN Connection"), version=forms.IPv4 | forms.IPv6, mask=False) peer_id = forms.IPField( label=_("Peer router identity for authentication (Peer ID)"), help_text=_("Peer router identity for authentication. " "Can be IPv4/IPv6 address, e-mail, key ID, or FQDN"), version=forms.IPv4 | forms.IPv6, mask=False) peer_cidrs = forms.MultiIPField( label=_("Remote peer subnet(s)"), help_text=_("Remote peer subnet(s) address(es) " "with mask(s) in CIDR format " "separated with commas if needed " "(e.g. 20.1.0.0/24, 21.1.0.0/24)"), version=forms.IPv4 | forms.IPv6, mask=True) psk = forms.CharField( max_length=80, label=_("Pre-Shared Key (PSK) string"), help_text=_("The pre-defined key string " "between the two peers of the VPN connection")) def populate_ikepolicy_id_choices(self, request, context): ikepolicy_id_choices = [('', _("Select IKE Policy"))] try: tenant_id = self.request.user.tenant_id ikepolicies = api.vpn.ikepolicy_list(request, tenant_id=tenant_id) except Exception: exceptions.handle(request, _('Unable to retrieve IKE Policies list.')) ikepolicies = [] for p in ikepolicies: ikepolicy_id_choices.append((p.id, p.name)) self.fields['ikepolicy_id'].choices = ikepolicy_id_choices return ikepolicy_id_choices def populate_ipsecpolicy_id_choices(self, request, context): ipsecpolicy_id_choices = [('', _("Select IPSec Policy"))] try: tenant_id = self.request.user.tenant_id ipsecpolicies = api.vpn.ipsecpolicy_list(request, tenant_id=tenant_id) except Exception: exceptions.handle(request, _('Unable to retrieve IPSec Policies list.')) ipsecpolicies = [] for p in ipsecpolicies: ipsecpolicy_id_choices.append((p.id, p.name)) self.fields['ipsecpolicy_id'].choices = ipsecpolicy_id_choices return ipsecpolicy_id_choices def populate_vpnservice_id_choices(self, request, context): vpnservice_id_choices = [('', _("Select VPN Service"))] try: tenant_id = self.request.user.tenant_id vpnservices = api.vpn.vpnservice_list(request, tenant_id=tenant_id) except Exception: exceptions.handle(request, _('Unable to retrieve VPN Services list.')) vpnservices = [] for s in vpnservices: vpnservice_id_choices.append((s.id, s.name)) self.fields['vpnservice_id'].choices = vpnservice_id_choices return vpnservice_id_choices class Meta(object): name = _("Add New IPSec Site Connection") permissions = ('openstack.services.network',) help_text = _("Create IPSec Site Connection for current project.\n\n" "Assign a name and description for the " "IPSec Site Connection. " "All fields in this tab are required." ) class AddIPSecSiteConnectionStep(workflows.Step): action_class = AddIPSecSiteConnectionAction contributes = ("name", "description", "vpnservice_id", "ikepolicy_id", "ipsecpolicy_id", "peer_address", "peer_id", "peer_cidrs", "psk") class AddIPSecSiteConnectionOptionalAction(workflows.Action): mtu = forms.IntegerField( min_value=68, label=_("Maximum Transmission Unit size for the connection"), initial=1500, required=False, help_text=_("Equal to or greater than 68 if the local subnet is IPv4. " "Equal to or greater than 1280 if the local subnet " "is IPv6.")) dpd_action = forms.ChoiceField(label=_("Dead peer detection actions"), required=False) dpd_interval = forms.IntegerField( min_value=1, label=_("Dead peer detection interval"), initial=30, required=False, help_text=_("Valid integer lesser than DPD timeout")) dpd_timeout = forms.IntegerField( min_value=1, label=_("Dead peer detection timeout"), initial=120, required=False, help_text=_("Valid integer greater than the DPD interval")) initiator = forms.ChoiceField(label=_("Initiator state"), required=False) admin_state_up = forms.ChoiceField( choices=[(True, _('UP')), (False, _('DOWN'))], label=_("Admin State"), required=False, help_text=_("The state of IPSec site connection to start in. " "If DOWN (False), IPSec site connection " "does not forward packets.")) def __init__(self, request, *args, **kwargs): super(AddIPSecSiteConnectionOptionalAction, self).__init__( request, *args, **kwargs) initiator_choices = [("bi-directional", "bi-directional"), ("response-only", "response-only")] self.fields['initiator'].choices = initiator_choices def populate_dpd_action_choices(self, request, context): dpd_action_choices = [("hold", "hold"), ("clear", "clear"), ("disabled", "disabled"), ("restart", "restart"), ("restart-by-peer", "restart-by-peer")] self.fields['dpd_action'].choices = dpd_action_choices return dpd_action_choices def clean(self): cleaned_data = super(AddIPSecSiteConnectionOptionalAction, self).clean() interval = cleaned_data.get('dpd_interval') timeout = cleaned_data.get('dpd_timeout') if not interval < timeout: msg = _("DPD Timeout must be greater than DPD Interval") self._errors['dpd_timeout'] = self.error_class([msg]) return cleaned_data class Meta(object): name = _("Optional Parameters") permissions = ('openstack.services.network',) help_text = _("Fields in this tab are optional. " "You can configure the detail of " "IPSec site connection created." ) class AddIPSecSiteConnectionOptionalStep(workflows.Step): action_class = AddIPSecSiteConnectionOptionalAction contributes = ("dpd_action", "dpd_interval", "dpd_timeout", "initiator", "mtu", "admin_state_up") def contribute(self, data, context): context = super( AddIPSecSiteConnectionOptionalStep, self).contribute(data, context) context['dpd'] = {'action': data['dpd_action'], 'interval': data['dpd_interval'], 'timeout': data['dpd_timeout']} context.pop('dpd_action') context.pop('dpd_interval') context.pop('dpd_timeout') cidrs = context['peer_cidrs'] context['peer_cidrs'] = cidrs.replace(" ", "").split(",") if data: return context class AddIPSecSiteConnection(workflows.Workflow): slug = "addipsecsiteconnection" name = _("Add IPSec Site Connection") finalize_button_name = _("Add") success_message = _('Added IPSec Site Connection "%s".') failure_message = _('Unable to add IPSec Site Connection "%s".') success_url = "horizon:project:vpn:index" default_steps = (AddIPSecSiteConnectionStep, AddIPSecSiteConnectionOptionalStep) def format_status_message(self, message): return message % self.context.get('name') def handle(self, request, context): try: api.vpn.ipsecsiteconnection_create(request, **context) return True except Exception: return False
xionzz/earthquake
refs/heads/master
venv/lib/python2.7/site-packages/numpy/distutils/command/build.py
53
from __future__ import division, absolute_import, print_function import os import sys from distutils.command.build import build as old_build from distutils.util import get_platform from numpy.distutils.command.config_compiler import show_fortran_compilers class build(old_build): sub_commands = [('config_cc', lambda *args: True), ('config_fc', lambda *args: True), ('build_src', old_build.has_ext_modules), ] + old_build.sub_commands user_options = old_build.user_options + [ ('fcompiler=', None, "specify the Fortran compiler type"), ] help_options = old_build.help_options + [ ('help-fcompiler', None, "list available Fortran compilers", show_fortran_compilers), ] def initialize_options(self): old_build.initialize_options(self) self.fcompiler = None def finalize_options(self): build_scripts = self.build_scripts old_build.finalize_options(self) plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3]) if build_scripts is None: self.build_scripts = os.path.join(self.build_base, 'scripts' + plat_specifier) def run(self): old_build.run(self)
ebukoz/thrive
refs/heads/develop
erpnext/accounts/doctype/cash_flow_mapping/__init__.py
12133432
retomerz/intellij-community
refs/heads/master
python/testData/psi/Ellipsis.py
83
d[...] = 1
xeBuz/Ordbogen
refs/heads/master
app/models/tokens.py
1
import time from app import db, app from datetime import datetime, timedelta from werkzeug.security import gen_salt from app.models.users import Users from .base import BaseModel def generate_key(): return gen_salt(60) def generate_expiration(): expiration = datetime.now() + timedelta(hours=app.config['TOKEN_TTL_HOURS']) return expiration.strftime('%s') class Tokens(BaseModel): __tablename__ = 'ob_tokens' id = db.Column(db.Integer, primary_key=True) key = db.Column(db.String(60), default=generate_key()) user_id = db.Column(db.Integer(), db.ForeignKey(Users.id)) user = db.relation(Users) expiration = db.Column(db.Integer, default=generate_expiration()) def __repr__(self): return self.key @property def serialize(self): """ Serialize the Model for the JSON responses :return: """ return { 'access_key': self.key, 'expiration': self.readable_expiration, 'user': { 'name': self.user.name, 'e-mail': self.user.mail } } @property def readable_expiration(self): """ Convert the timestamp in a human readable value :return: """ value = datetime.fromtimestamp(self.expiration) return value.strftime('%Y-%m-%d %H:%M:%S') def expired(self): """ Validate if the current time is greater than the expiration date. If the time is greater, the token has expired :return: """ now = time.time() if float(now) > float(self.expiration): self.delete() return True else: return False
Vvucinic/Wander
refs/heads/master
venv_2_7/lib/python2.7/site-packages/Django-1.9-py2.7.egg/django/contrib/gis/gdal/error.py
535
""" This module houses the GDAL & SRS Exception objects, and the check_err() routine which checks the status code returned by GDAL/OGR methods. """ # #### GDAL & SRS Exceptions #### class GDALException(Exception): pass # Legacy name OGRException = GDALException class SRSException(Exception): pass class OGRIndexError(GDALException, KeyError): """ This exception is raised when an invalid index is encountered, and has the 'silent_variable_feature' attribute set to true. This ensures that django's templates proceed to use the next lookup type gracefully when an Exception is raised. Fixes ticket #4740. """ silent_variable_failure = True # #### GDAL/OGR error checking codes and routine #### # OGR Error Codes OGRERR_DICT = { 1: (GDALException, 'Not enough data.'), 2: (GDALException, 'Not enough memory.'), 3: (GDALException, 'Unsupported geometry type.'), 4: (GDALException, 'Unsupported operation.'), 5: (GDALException, 'Corrupt data.'), 6: (GDALException, 'OGR failure.'), 7: (SRSException, 'Unsupported SRS.'), 8: (GDALException, 'Invalid handle.'), } # CPL Error Codes # http://www.gdal.org/cpl__error_8h.html CPLERR_DICT = { 1: (GDALException, 'AppDefined'), 2: (GDALException, 'OutOfMemory'), 3: (GDALException, 'FileIO'), 4: (GDALException, 'OpenFailed'), 5: (GDALException, 'IllegalArg'), 6: (GDALException, 'NotSupported'), 7: (GDALException, 'AssertionFailed'), 8: (GDALException, 'NoWriteAccess'), 9: (GDALException, 'UserInterrupt'), 10: (GDALException, 'ObjectNull'), } ERR_NONE = 0 def check_err(code, cpl=False): """ Checks the given CPL/OGRERR, and raises an exception where appropriate. """ err_dict = CPLERR_DICT if cpl else OGRERR_DICT if code == ERR_NONE: return elif code in err_dict: e, msg = err_dict[code] raise e(msg) else: raise GDALException('Unknown error code: "%s"' % code)
ingokegel/intellij-community
refs/heads/master
python/testData/formatter/fStringFragmentWrappingSplitInsideExpression_after.py
12
s = f'aaaaaa' \ f'{oct(42)}'
hippyvm/hippyvm
refs/heads/master
testing/test_refcount.py
2
import py from hippy.objects.base import W_Root from testing.test_interpreter import BaseTestInterpreter class TestRefCount(BaseTestInterpreter): @py.test.yield_fixture(autouse=True) def note_copies(self): def note_making_a_copy(array): self.space.ec.interpreter.writestr(array.var_dump(self.space, indent='', recursion={})) old_func = W_Root.__dict__['_note_making_a_copy'] W_Root._note_making_a_copy = note_making_a_copy yield W_Root._note_making_a_copy = old_func def test_no_copies(self): output = self.run("$a = array(5, 6, 7);") assert output == [] def test_no_copies_2(self): output = self.run("$x = 7; $a = array(5, 6, $x);") assert output == [] def test_one_copy(self): output = self.run("$a = array(5, 6, 7); $b = $a; $b[] = 8;") assert ''.join(output) == """\ array(3) { [0]=> int(5) [1]=> int(6) [2]=> int(7) } """ def test_inplace_append_from_immut(self): output = self.run("$a = array(5, 6, 7); $a[] = 8; $a[] = 9;") assert ''.join(output) == """\ array(3) { [0]=> int(5) [1]=> int(6) [2]=> int(7) } """ def test_inplace_append(self): output = self.run("$x = 7; $a = array(5, 6, $x); $a[] = 8;") assert output == [] def test_append_needs_copy(self): output = self.run("$x = 7; $a = array(5, 6, $x); $b = $a; $a[] = 8;") assert ''.join(output) == """\ array(3) { [0]=> int(5) [1]=> int(6) [2]=> int(7) } """ def test_array_updates_in_function(self): output = self.run(''' function f($a) { $a[0] = $a[1]; // copy $a[1] = $a[2]; // no more copies afterward $a[2] = $a[3]; $a[3] = $a[4]; } f(array(5, 6, 7, 8, 9)); ''') assert ''.join(output) == """\ array(5) { [0]=> int(5) [1]=> int(6) [2]=> int(7) [3]=> int(8) [4]=> int(9) } """ def test_string_copies(self): output = self.run(''' $a = "abc"; $b = $a; $c = $b; $c[0] = 1; $a[0] = 5; ''') assert ''.join(output) == """\ string(3) "abc" string(3) "abc" """ def test_string_copies2(self): output = self.run(''' $a = "abc"; $a[0] = "3"; $b = $a; $c = $b[0]; ''') assert ''.join(output) == """\ string(3) "abc" """ def test_string_copies3(self): output = self.run(''' $a = "abc"; $a[0] = "3"; $b = $a; $a[0] = "4"; ''') assert ''.join(output) == """\ string(3) "abc" string(3) "3bc" """ def test_instance_array(self): output = self.run(''' class A { } $a = new A; $n = 7; $a->x = array(9, 8, $n); $a->x[] = 6; # does a copy the first time $a->x[] = 5; # not a copy any more afterwards ''') assert ''.join(output) == """\ array(3) { [0]=> int(9) [1]=> int(8) [2]=> int(7) } """ def test_instance_array_2(self): output = self.run(''' class A { } $a = new A; $n = 7; $a->x = array(9, 8, $n); $a->x[] = 6; # does a copy the first time $b = $a->x; $a->x[] = 5; # another copy ''') assert ''.join(output) == """\ array(3) { [0]=> int(9) [1]=> int(8) [2]=> int(7) } array(4) { [0]=> int(9) [1]=> int(8) [2]=> int(7) [3]=> int(6) } """ def test_instance_array_3(self): output = self.run(''' class A { } $a = new A; $a->x = array(9, 8, 7); $a->x[] = 6; # does a copy the first time $n = 6; $a->x = array($n, $n, $n); # replaces the W_Reference $a->x[] = 5; # not a copy any more afterwards ''') assert ''.join(output) == """\ array(3) { [0]=> int(9) [1]=> int(8) [2]=> int(7) } """ def test_function_call(self): output = self.run(''' function f($a) { } $a = array(5, 6, 7); f($a); ''') assert ''.join(output) == "" def test_function_call_by_ref(self): output = self.run(''' function f(&$a) { $a[] = 8; } $x = 7; $a = array(5, 6, $x); f($a); ''') assert ''.join(output) == "" def test_call_builtin_count(self): output = self.run(''' $a = array(5, 6, 7); count($a); ''') assert ''.join(output) == ""
open-switch/ops-quagga
refs/heads/master
ops-tests/feature/bgp/test_bgp_ft_ip_community_list.py
2
# -*- coding: utf-8 -*- # (C) Copyright 2015 Hewlett Packard Enterprise Development LP # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # ########################################################################## """ OpenSwitch Test for vlan related configurations. """ from bgp_config import BgpConfig from vtysh_utils import SwitchVtyshUtils from interface_utils import verify_turn_on_interfaces TOPOLOGY = """ # Nodes [type=openswitch name="OpenSwitch 1"] ops1 [type=openswitch name="Openswitch 2"] ops2 [type=openswitch name="Openswitch 3"] ops3 # Links ops1:if01 -- ops2:if01 ops1:if02 -- ops3:if01 """ num_of_switches = 3 num_hosts_per_switch = 0 switch_prefix = "s" default_pl = "8" default_netmask = "255.0.0.0" switches = [] bgpconfigarr = [] bgp_config1 = [] bgp_config2 = [] bgp_config3 = [] all_cfg_array = [] def configure_switch_ips(step): step("\n########## Configuring switch IPs.. ##########\n") bgp_router_id = ['9.0.0.1', '9.0.0.2', '10.0.0.2'] i = 0 for switch in switches: if switch.name == "ops1": switch("configure terminal") switch("interface %s" % switch.ports["if02"]) switch("no shutdown") switch("ip address 10.0.0.1/8") switch("end") # Configure the IPs between the switches switch("configure terminal") switch("interface %s" % switch.ports["if01"]) switch("no shutdown") switch("ip address %s/%s" % (bgp_router_id[i], default_pl)) switch("end") i += 1 def verify_interface_on(step): step("\n########## Verifying interface are up ########## \n") for switch in switches: int1 = switch.ports["if01"] if switch.name == "ops1": int2 = switch.ports["if02"] ports = [int1, int2] else: ports = [int1] verify_turn_on_interfaces(switch, ports) def setup_bgp_config(step): global bgpconfigarr, bgp_config1, bgp_config2, bgp_config3 step("\n########## Setup of BGP configurations... ##########\n") # Create BGP configurations bgp_config1 = BgpConfig("1", "9.0.0.1", "15.0.0.0") bgp_config2 = BgpConfig("2", "9.0.0.2", "12.0.0.0") bgp_config3 = BgpConfig("3", "10.0.0.2", "20.0.0.0") # Add the neighbors for each BGP config bgp_config1.add_neighbor(bgp_config2) bgp_config2.add_neighbor(bgp_config1) bgpconfigarr = [bgp_config1, bgp_config2, bgp_config3] def apply_bgp_config(step): global all_cfg_array step("\n########## Applying BGP configurations... ##########\n") all_cfg_array = [] i = 0 for bgp_cfg in bgpconfigarr: step("### Applying configurations for BGP: %s ###\n" % bgp_cfg.routerid) cfg_array = [] if i == 0: step("### Applying community list configurations for BGP1") cfg_array.append("ip community-list BGP_IN permit 2:0") cfg_array.append("ip community-list BGP_IN deny 3:0") step("### Applying route-map configurations for BGP1") cfg_array.append("route-map BGP_RMAP permit 10") cfg_array.append("match community BGP_IN") if i == 1: cfg_array.append("route-map BGP_2 permit 10") cfg_array.append("set community 2:0") if i == 2: cfg_array.append("route-map BGP_3 permit 10") cfg_array.append("set community 3:0") SwitchVtyshUtils.vtysh_cfg_cmd(switches[i], cfg_array) del cfg_array[:] # Initiate BGP configuration cfg_array.append("router bgp %s" % bgp_cfg.asn) cfg_array.append("bgp router-id %s" % bgp_cfg.routerid) # Add the networks this bgp will be advertising for network in bgp_cfg.networks: cfg_array.append("network %s/%s" % (network, default_pl)) if i == 0: cfg_array.append("neighbor 10.0.0.2 remote-as 3") cfg_array.append("neighbor 10.0.0.2 route-map BGP_RMAP in") cfg_array.append("neighbor 9.0.0.2 remote-as 2") cfg_array.append("neighbor 9.0.0.2 route-map BGP_RMAP in") if i == 1: cfg_array.append("neighbor 9.0.0.1 remote-as 1") cfg_array.append("neighbor 9.0.0.1 route-map BGP_2 out") if i == 2: cfg_array.append("neighbor 10.0.0.1 remote-as 1") cfg_array.append("neighbor 10.0.0.1 route-map BGP_3 out") SwitchVtyshUtils.vtysh_cfg_cmd(switches[i], cfg_array) # Add the configuration arrays to an array so that it can be used # for verification later. all_cfg_array.append(cfg_array) i += 1 def verify_bgp_running(step): step("\n########## Verifying bgp processes.. ##########\n") for switch in switches: pid = switch("pgrep -f bgpd", shell="bash").strip() assert (pid != ""), "bgpd process not running on switch %s" % \ switch.name step("### bgpd process exists on switch %s ###\n" % switch.name) def verify_bgp_configs(step): step("\n########## Verifying all configurations.. ##########\n") i = 0 for switch in switches: bgp_cfg_array = all_cfg_array[i] for cfg in bgp_cfg_array: res = SwitchVtyshUtils.verify_cfg_exist(switch, [cfg]) assert res, "Config \"%s\" was not correctly configured!" % cfg i += 1 step("### All configurations were verified successfully ###\n") def verify_bgp_routes(step): step("\n########## Verifying routes... ##########\n") # For each bgp, verify that it is indeed advertising itstep verify_advertised_routes(step) # For each switch, verify the number of routes received verify_routes_received(step) def verify_advertised_routes(step): step("### Verifying advertised routes... ###\n") i = 0 for bgp_cfg in bgpconfigarr: switch = switches[i] next_hop = "0.0.0.0" for network in bgp_cfg.networks: found = SwitchVtyshUtils.wait_for_route(switch, network, next_hop) assert found, "Could not find route (%s -> %s) on %s" % \ (network, next_hop, switch.name) i += 1 def verify_routes_received(step): step("### Verifying routes received... ###\n") switch = switches[0] # Network of BGP2 should be permitted by BGP1 network = "12.0.0.0/8" next_hop = "9.0.0.2" step("### Verifying route for switch %s ###\n" % switch.name) step("### Network: %s, Next-hop: %s - Should exist... ###\n" % (network, next_hop)) found = SwitchVtyshUtils.wait_for_route(switch, network, next_hop) assert found, "Route %s -> %s exists on %s" % \ (network, next_hop, switch.name) # Network of BGP3 should NOT be permitted by BGP1 network = "20.0.0.0/8" next_hop = "10.0.0.2" verify_route_exists = False step("### Verifying routes for switch %s ###\n" % switch.name) step("### Network: %s, Next-hop: %s - Should NOT exist... ###\n" % (network, next_hop)) found = SwitchVtyshUtils.wait_for_route(switch, network, next_hop, verify_route_exists) assert not found, "Route %s -> %s does not exist on %s" % \ (network, next_hop, switch.name) # Network of BGP1 should be permitted by BGP2 switch = switches[1] network = "15.0.0.0/8" next_hop = "9.0.0.1" step("### Verifying route for switch %s ###\n" % switch.name) step("### Network: %s, Next-hop: %s - Should exist... ###\n" % (network, next_hop)) found = SwitchVtyshUtils.wait_for_route(switch, network, next_hop) assert found, "Route %s -> %s exists on %s" % \ (network, next_hop, switch.name) # Network of BGP1 should be permitted by BGP3 switch = switches[2] network = "15.0.0.0/8" next_hop = "10.0.0.1" step("### Verifying route for switch %s ###\n" % switch.name) step("### Network: %s, Next-hop: %s - Should exist... ###\n" % (network, next_hop)) found = SwitchVtyshUtils.wait_for_route(switch, network, next_hop) assert found, "Route %s -> %s exists on %s" % \ (network, next_hop, switch.name) def verify_no_ip_community(step): step("\n########## Verifying no ip prefix-list ##########\n") switch = switches[0] cmd = "no ip community-list BGP_IN" step("### Unconfiguring ip community-list BGP_IN ###\n") SwitchVtyshUtils.vtysh_cfg_cmd(switch, [cmd]) cfg = "ip community-list BGP_IN permit 2:0" step("### Checking ip community-list config ###\n") exists = SwitchVtyshUtils.verify_cfg_exist(switch, []) assert not exists, "Config \"%s\" was not removed" % cfg cfg = "ip community-list BGP_IN permit 3:0" step("### Checking ip community-list config ###\n") exists = SwitchVtyshUtils.verify_cfg_exist(switch, []) assert not exists, "Config \"%s\" was not removed" % cfg step("### ip community-list configs were successfully removed ###\n") def test_bgp_ft_ip_community_list(topology, step): global switches ops1 = topology.get('ops1') ops2 = topology.get('ops2') ops3 = topology.get('ops3') assert ops1 is not None assert ops2 is not None assert ops3 is not None switches = [ops1, ops2, ops3] ops1.name = "ops1" ops2.name = "ops2" ops3.name = "ops3" configure_switch_ips(step) verify_interface_on(step) setup_bgp_config(step) verify_bgp_running(step) apply_bgp_config(step) verify_bgp_configs(step) verify_bgp_routes(step) verify_no_ip_community(step)
nox/servo
refs/heads/master
components/script/dom/bindings/codegen/parser/tests/test_extended_attributes.py
149
import WebIDL def WebIDLTest(parser, harness): parser.parse(""" [NoInterfaceObject] interface TestExtendedAttr { [Unforgeable] readonly attribute byte b; }; """) results = parser.finish() parser = parser.reset() parser.parse(""" [Pref="foo.bar",Pref=flop] interface TestExtendedAttr { [Pref="foo.bar"] attribute byte b; }; """) results = parser.finish() parser = parser.reset() parser.parse(""" interface TestLenientThis { [LenientThis] attribute byte b; }; """) results = parser.finish() harness.ok(results[0].members[0].hasLenientThis(), "Should have a lenient this") parser = parser.reset() threw = False try: parser.parse(""" interface TestLenientThis2 { [LenientThis=something] attribute byte b; }; """) results = parser.finish() except: threw = True harness.ok(threw, "[LenientThis] must take no arguments") parser = parser.reset() parser.parse(""" interface TestClamp { void testClamp([Clamp] long foo); void testNotClamp(long foo); }; """) results = parser.finish() # Pull out the first argument out of the arglist of the first (and # only) signature. harness.ok(results[0].members[0].signatures()[0][1][0].clamp, "Should be clamped") harness.ok(not results[0].members[1].signatures()[0][1][0].clamp, "Should not be clamped") parser = parser.reset() threw = False try: parser.parse(""" interface TestClamp2 { void testClamp([Clamp=something] long foo); }; """) results = parser.finish() except: threw = True harness.ok(threw, "[Clamp] must take no arguments") parser = parser.reset() parser.parse(""" interface TestEnforceRange { void testEnforceRange([EnforceRange] long foo); void testNotEnforceRange(long foo); }; """) results = parser.finish() # Pull out the first argument out of the arglist of the first (and # only) signature. harness.ok(results[0].members[0].signatures()[0][1][0].enforceRange, "Should be enforceRange") harness.ok(not results[0].members[1].signatures()[0][1][0].enforceRange, "Should not be enforceRange") parser = parser.reset() threw = False try: parser.parse(""" interface TestEnforceRange2 { void testEnforceRange([EnforceRange=something] long foo); }; """) results = parser.finish() except: threw = True harness.ok(threw, "[EnforceRange] must take no arguments")
zhengjue/mytornado
refs/heads/master
omserver/OMserverweb/manage.py
1
#!/usr/bin/env python import os import sys if __name__ == "__main__": <<<<<<< HEAD # os.environ.setdefault("DJANGO_SETTINGS_MODULE", "OMserverweb.settings") os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") ======= os.environ.setdefault("DJANGO_SETTINGS_MODULE", "OMserverweb.settings") # os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") >>>>>>> 94a4bbcaa2a9b06f28fe9deea9e23b30711c484d from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
mixmar91/whatsapp
refs/heads/master
yowsup/layers/protocol_contacts/protocolentities/test_notification_contact_update.py
68
from yowsup.layers.protocol_contacts.protocolentities import UpdateContactNotificationProtocolEntity from yowsup.structs.protocolentity import ProtocolEntityTest import time import unittest entity = UpdateContactNotificationProtocolEntity("1234", "jid@s.whatsapp.net", int(time.time()), "notify", False,"contactjid@s.whatsapp.net") class UpdateContactNotificationProtocolEntityTest(ProtocolEntityTest, unittest.TestCase): def setUp(self): self.ProtocolEntity = UpdateContactNotificationProtocolEntity self.node = entity.toProtocolTreeNode()
yamila-moreno/django
refs/heads/master
tests/test_discovery_sample/pattern_tests.py
641
from unittest import TestCase class Test(TestCase): def test_sample(self): self.assertEqual(1, 1)
kybriainfotech/iSocioCRM
refs/heads/8.0
addons/website_livechat/models/__init__.py
427
import website
ettm2012/MissionPlanner
refs/heads/master
Lib/site-packages/scipy/stats/tests/test_continuous_extra.py
53
# contains additional tests for continuous distributions # # NOTE: one test, _est_cont_skip, that is renamed so that nose doesn't # run it, # 6 distributions return nan for entropy # truncnorm fails by design for private method _ppf test import numpy.testing as npt import numpy as np from scipy import stats from test_continuous_basic import distcont DECIMAL = 5 @npt.dec.slow def test_cont_extra(): for distname, arg in distcont[:]: distfn = getattr(stats, distname) yield check_ppf_limits, distfn, arg, distname + \ ' ppf limit test' yield check_isf_limits, distfn, arg, distname + \ ' isf limit test' yield check_loc_scale, distfn, arg, distname + \ ' loc, scale test' @npt.dec.slow def _est_cont_skip(): for distname, arg in distcont: distfn = getattr(stats, distname) #entropy test checks only for isnan, currently 6 isnan left yield check_entropy, distfn, arg, distname + \ ' entropy nan test' # _ppf test has 1 failure be design yield check_ppf_private, distfn, arg, distname + \ ' _ppf private test' def test_540_567(): # test for nan returned in tickets 540, 567 npt.assert_almost_equal(stats.norm.cdf(-1.7624320982),0.03899815971089126, decimal=10, err_msg = 'test_540_567') npt.assert_almost_equal(stats.norm.cdf(-1.7624320983),0.038998159702449846, decimal=10, err_msg = 'test_540_567') npt.assert_almost_equal(stats.norm.cdf(1.38629436112, loc=0.950273420309, scale=0.204423758009),0.98353464004309321, decimal=10, err_msg = 'test_540_567') def check_ppf_limits(distfn,arg,msg): below,low,upp,above = distfn.ppf([-1,0,1,2], *arg) #print distfn.name, distfn.a, low, distfn.b, upp #print distfn.name,below,low,upp,above assert_equal_inf_nan(distfn.a,low, msg + 'ppf lower bound') assert_equal_inf_nan(distfn.b,upp, msg + 'ppf upper bound') npt.assert_(np.isnan(below), msg + 'ppf out of bounds - below') npt.assert_(np.isnan(above), msg + 'ppf out of bounds - above') def check_ppf_private(distfn,arg,msg): #fails by design for trunk norm self.nb not defined ppfs = distfn._ppf(np.array([0.1,0.5,0.9]), *arg) npt.assert_(not np.any(np.isnan(ppfs)), msg + 'ppf private is nan') def check_isf_limits(distfn,arg,msg): below,low,upp,above = distfn.isf([-1,0,1,2], *arg) #print distfn.name, distfn.a, low, distfn.b, upp #print distfn.name,below,low,upp,above assert_equal_inf_nan(distfn.a,upp, msg + 'isf lower bound') assert_equal_inf_nan(distfn.b,low, msg + 'isf upper bound') npt.assert_(np.isnan(below), msg + 'isf out of bounds - below') npt.assert_(np.isnan(above), msg + 'isf out of bounds - above') def check_loc_scale(distfn,arg,msg): m,v = distfn.stats(*arg) loc, scale = 10.0, 10.0 mt,vt = distfn.stats(loc=loc, scale=scale, *arg) assert_equal_inf_nan(m*scale+loc,mt,msg + 'mean') assert_equal_inf_nan(v*scale*scale,vt,msg + 'var') def check_entropy(distfn,arg,msg): ent = distfn.entropy(*arg) #print 'Entropy =', ent npt.assert_(not np.isnan(ent), msg + 'test Entropy is nan') def assert_equal_inf_nan(v1,v2,msg): npt.assert_(not np.isnan(v1)) if not np.isinf(v1): npt.assert_almost_equal(v1, v2, decimal=DECIMAL, err_msg = msg + \ ' - finite') else: npt.assert_(np.isinf(v2) or np.isnan(v2), msg + ' - infinite, v2=%s' % str(v2)) if __name__ == "__main__": import nose #nose.run(argv=['', __file__]) nose.runmodule(argv=[__file__,'-s'], exit=False)
marc-white/ads
refs/heads/master
ads/sandbox.py
4
""" Sandbox environment that wraps relevant classes so that they receive mock responses rather than contact the live API """ import re from ads import search from .search import SearchQuery as _SearchQuery, Article as _Article from .metrics import MetricsQuery as _MetricsQuery from .export import ExportQuery as _ExportQuery from .tests.mocks import MockSolrResponse, MockMetricsResponse, \ MockExportResponse class Article(_Article): """ Wrapper for ads.search.Article """ def _get_field(self, field): with MockSolrResponse(_SearchQuery.HTTP_ENDPOINT): return super(Article, self)._get_field(field) class SearchQuery(_SearchQuery): """ Wrapper for ads.SearchQuery """ def execute(self): with MockSolrResponse(SearchQuery.HTTP_ENDPOINT): super(SearchQuery, self).execute() class MetricsQuery(_MetricsQuery): """ Wrapper for ads.SearchQuery """ def execute(self): with MockMetricsResponse(MetricsQuery.HTTP_ENDPOINT): return super(MetricsQuery, self).execute() class ExportQuery(_ExportQuery): """ Wrapper for ads.SearchQuery """ def execute(self): with MockExportResponse(re.compile(ExportQuery.HTTP_ENDPOINT)): return super(ExportQuery, self).execute() # Monkey patch relevant classes that are called in ads.search search.Article = Article search.MetricsQuery = MetricsQuery search.ExportQuery = ExportQuery
andris210296/andris-projeto
refs/heads/master
backend/venv/lib/python2.7/site-packages/unidecode/x022.py
165
data = ( '[?]', # 0x00 '[?]', # 0x01 '[?]', # 0x02 '[?]', # 0x03 '[?]', # 0x04 '[?]', # 0x05 '[?]', # 0x06 '[?]', # 0x07 '[?]', # 0x08 '[?]', # 0x09 '[?]', # 0x0a '[?]', # 0x0b '[?]', # 0x0c '[?]', # 0x0d '[?]', # 0x0e '[?]', # 0x0f '[?]', # 0x10 '[?]', # 0x11 '-', # 0x12 '[?]', # 0x13 '[?]', # 0x14 '/', # 0x15 '\\', # 0x16 '*', # 0x17 '[?]', # 0x18 '[?]', # 0x19 '[?]', # 0x1a '[?]', # 0x1b '[?]', # 0x1c '[?]', # 0x1d '[?]', # 0x1e '[?]', # 0x1f '[?]', # 0x20 '[?]', # 0x21 '[?]', # 0x22 '|', # 0x23 '[?]', # 0x24 '[?]', # 0x25 '[?]', # 0x26 '[?]', # 0x27 '[?]', # 0x28 '[?]', # 0x29 '[?]', # 0x2a '[?]', # 0x2b '[?]', # 0x2c '[?]', # 0x2d '[?]', # 0x2e '[?]', # 0x2f '[?]', # 0x30 '[?]', # 0x31 '[?]', # 0x32 '[?]', # 0x33 '[?]', # 0x34 '[?]', # 0x35 ':', # 0x36 '[?]', # 0x37 '[?]', # 0x38 '[?]', # 0x39 '[?]', # 0x3a '[?]', # 0x3b '~', # 0x3c '[?]', # 0x3d '[?]', # 0x3e '[?]', # 0x3f '[?]', # 0x40 '[?]', # 0x41 '[?]', # 0x42 '[?]', # 0x43 '[?]', # 0x44 '[?]', # 0x45 '[?]', # 0x46 '[?]', # 0x47 '[?]', # 0x48 '[?]', # 0x49 '[?]', # 0x4a '[?]', # 0x4b '[?]', # 0x4c '[?]', # 0x4d '[?]', # 0x4e '[?]', # 0x4f '[?]', # 0x50 '[?]', # 0x51 '[?]', # 0x52 '[?]', # 0x53 '[?]', # 0x54 '[?]', # 0x55 '[?]', # 0x56 '[?]', # 0x57 '[?]', # 0x58 '[?]', # 0x59 '[?]', # 0x5a '[?]', # 0x5b '[?]', # 0x5c '[?]', # 0x5d '[?]', # 0x5e '[?]', # 0x5f '[?]', # 0x60 '[?]', # 0x61 '[?]', # 0x62 '[?]', # 0x63 '<=', # 0x64 '>=', # 0x65 '<=', # 0x66 '>=', # 0x67 '[?]', # 0x68 '[?]', # 0x69 '[?]', # 0x6a '[?]', # 0x6b '[?]', # 0x6c '[?]', # 0x6d '[?]', # 0x6e '[?]', # 0x6f '[?]', # 0x70 '[?]', # 0x71 '[?]', # 0x72 '[?]', # 0x73 '[?]', # 0x74 '[?]', # 0x75 '[?]', # 0x76 '[?]', # 0x77 '[?]', # 0x78 '[?]', # 0x79 '[?]', # 0x7a '[?]', # 0x7b '[?]', # 0x7c '[?]', # 0x7d '[?]', # 0x7e '[?]', # 0x7f '[?]', # 0x80 '[?]', # 0x81 '[?]', # 0x82 '[?]', # 0x83 '[?]', # 0x84 '[?]', # 0x85 '[?]', # 0x86 '[?]', # 0x87 '[?]', # 0x88 '[?]', # 0x89 '[?]', # 0x8a '[?]', # 0x8b '[?]', # 0x8c '[?]', # 0x8d '[?]', # 0x8e '[?]', # 0x8f '[?]', # 0x90 '[?]', # 0x91 '[?]', # 0x92 '[?]', # 0x93 '[?]', # 0x94 '[?]', # 0x95 '[?]', # 0x96 '[?]', # 0x97 '[?]', # 0x98 '[?]', # 0x99 '[?]', # 0x9a '[?]', # 0x9b '[?]', # 0x9c '[?]', # 0x9d '[?]', # 0x9e '[?]', # 0x9f '[?]', # 0xa0 '[?]', # 0xa1 '[?]', # 0xa2 '[?]', # 0xa3 '[?]', # 0xa4 '[?]', # 0xa5 '[?]', # 0xa6 '[?]', # 0xa7 '[?]', # 0xa8 '[?]', # 0xa9 '[?]', # 0xaa '[?]', # 0xab '[?]', # 0xac '[?]', # 0xad '[?]', # 0xae '[?]', # 0xaf '[?]', # 0xb0 '[?]', # 0xb1 '[?]', # 0xb2 '[?]', # 0xb3 '[?]', # 0xb4 '[?]', # 0xb5 '[?]', # 0xb6 '[?]', # 0xb7 '[?]', # 0xb8 '[?]', # 0xb9 '[?]', # 0xba '[?]', # 0xbb '[?]', # 0xbc '[?]', # 0xbd '[?]', # 0xbe '[?]', # 0xbf '[?]', # 0xc0 '[?]', # 0xc1 '[?]', # 0xc2 '[?]', # 0xc3 '[?]', # 0xc4 '[?]', # 0xc5 '[?]', # 0xc6 '[?]', # 0xc7 '[?]', # 0xc8 '[?]', # 0xc9 '[?]', # 0xca '[?]', # 0xcb '[?]', # 0xcc '[?]', # 0xcd '[?]', # 0xce '[?]', # 0xcf '[?]', # 0xd0 '[?]', # 0xd1 '[?]', # 0xd2 '[?]', # 0xd3 '[?]', # 0xd4 '[?]', # 0xd5 '[?]', # 0xd6 '[?]', # 0xd7 '[?]', # 0xd8 '[?]', # 0xd9 '[?]', # 0xda '[?]', # 0xdb '[?]', # 0xdc '[?]', # 0xdd '[?]', # 0xde '[?]', # 0xdf '[?]', # 0xe0 '[?]', # 0xe1 '[?]', # 0xe2 '[?]', # 0xe3 '[?]', # 0xe4 '[?]', # 0xe5 '[?]', # 0xe6 '[?]', # 0xe7 '[?]', # 0xe8 '[?]', # 0xe9 '[?]', # 0xea '[?]', # 0xeb '[?]', # 0xec '[?]', # 0xed '[?]', # 0xee '[?]', # 0xef '[?]', # 0xf0 '[?]', # 0xf1 '[?]', # 0xf2 '[?]', # 0xf3 '[?]', # 0xf4 '[?]', # 0xf5 '[?]', # 0xf6 '[?]', # 0xf7 '[?]', # 0xf8 '[?]', # 0xf9 '[?]', # 0xfa '[?]', # 0xfb '[?]', # 0xfc '[?]', # 0xfd '[?]', # 0xfe )
OpenAcademy-OpenStack/nova-scheduler
refs/heads/master
nova/virt/images.py
15
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # Copyright (c) 2010 Citrix Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Handling of VM disk images. """ import os from oslo.config import cfg from nova import exception from nova.image import glance from nova.openstack.common import fileutils from nova.openstack.common.gettextutils import _ from nova.openstack.common import imageutils from nova.openstack.common import log as logging from nova import utils LOG = logging.getLogger(__name__) image_opts = [ cfg.BoolOpt('force_raw_images', default=True, help='Force backing images to raw format'), ] CONF = cfg.CONF CONF.register_opts(image_opts) def qemu_img_info(path): """Return an object containing the parsed output from qemu-img info.""" # TODO(mikal): this code should not be referring to a libvirt specific # flag. if not os.path.exists(path) and CONF.libvirt.images_type != 'rbd': return imageutils.QemuImgInfo() out, err = utils.execute('env', 'LC_ALL=C', 'LANG=C', 'qemu-img', 'info', path) return imageutils.QemuImgInfo(out) def convert_image(source, dest, out_format, run_as_root=False): """Convert image to other format.""" cmd = ('qemu-img', 'convert', '-O', out_format, source, dest) utils.execute(*cmd, run_as_root=run_as_root) def fetch(context, image_href, path, _user_id, _project_id, max_size=0): # TODO(vish): Improve context handling and add owner and auth data # when it is added to glance. Right now there is no # auth checking in glance, so we assume that access was # checked before we got here. (image_service, image_id) = glance.get_remote_image_service(context, image_href) with fileutils.remove_path_on_error(path): image_service.download(context, image_id, dst_path=path) def fetch_to_raw(context, image_href, path, user_id, project_id, max_size=0): path_tmp = "%s.part" % path fetch(context, image_href, path_tmp, user_id, project_id, max_size=max_size) with fileutils.remove_path_on_error(path_tmp): data = qemu_img_info(path_tmp) fmt = data.file_format if fmt is None: raise exception.ImageUnacceptable( reason=_("'qemu-img info' parsing failed."), image_id=image_href) backing_file = data.backing_file if backing_file is not None: raise exception.ImageUnacceptable(image_id=image_href, reason=(_("fmt=%(fmt)s backed by: %(backing_file)s") % {'fmt': fmt, 'backing_file': backing_file})) # We can't generally shrink incoming images, so disallow # images > size of the flavor we're booting. Checking here avoids # an immediate DoS where we convert large qcow images to raw # (which may compress well but not be sparse). # TODO(p-draigbrady): loop through all flavor sizes, so that # we might continue here and not discard the download. # If we did that we'd have to do the higher level size checks # irrespective of whether the base image was prepared or not. disk_size = data.virtual_size if max_size and max_size < disk_size: msg = _('%(base)s virtual size %(disk_size)s ' 'larger than flavor root disk size %(size)s') LOG.error(msg % {'base': path, 'disk_size': disk_size, 'size': max_size}) raise exception.FlavorDiskTooSmall() if fmt != "raw" and CONF.force_raw_images: staged = "%s.converted" % path LOG.debug("%s was %s, converting to raw" % (image_href, fmt)) with fileutils.remove_path_on_error(staged): convert_image(path_tmp, staged, 'raw') os.unlink(path_tmp) data = qemu_img_info(staged) if data.file_format != "raw": raise exception.ImageUnacceptable(image_id=image_href, reason=_("Converted to raw, but format is now %s") % data.file_format) os.rename(staged, path) else: os.rename(path_tmp, path)
LarryHillyer/PoolHost
refs/heads/master
PoolHost/env/Lib/site-packages/django/db/models/fields/__init__.py
33
# -*- coding: utf-8 -*- from __future__ import unicode_literals import collections import copy import datetime import decimal import itertools import uuid import warnings from base64 import b64decode, b64encode from functools import total_ordering from django import forms from django.apps import apps from django.conf import settings from django.core import checks, exceptions, validators # When the _meta object was formalized, this exception was moved to # django.core.exceptions. It is retained here for backwards compatibility # purposes. from django.core.exceptions import FieldDoesNotExist # NOQA from django.db import connection, connections, router from django.db.models.query_utils import DeferredAttribute, RegisterLookupMixin from django.utils import six, timezone from django.utils.datastructures import DictWrapper from django.utils.dateparse import ( parse_date, parse_datetime, parse_duration, parse_time, ) from django.utils.deprecation import ( RemovedInDjango20Warning, warn_about_renamed_method, ) from django.utils.duration import duration_string from django.utils.encoding import ( force_bytes, force_text, python_2_unicode_compatible, smart_text, ) from django.utils.functional import Promise, cached_property, curry from django.utils.ipv6 import clean_ipv6_address from django.utils.itercompat import is_iterable from django.utils.text import capfirst from django.utils.translation import ugettext_lazy as _ # Avoid "TypeError: Item in ``from list'' not a string" -- unicode_literals # makes these strings unicode __all__ = [str(x) for x in ( 'AutoField', 'BLANK_CHOICE_DASH', 'BigAutoField', 'BigIntegerField', 'BinaryField', 'BooleanField', 'CharField', 'CommaSeparatedIntegerField', 'DateField', 'DateTimeField', 'DecimalField', 'DurationField', 'EmailField', 'Empty', 'Field', 'FieldDoesNotExist', 'FilePathField', 'FloatField', 'GenericIPAddressField', 'IPAddressField', 'IntegerField', 'NOT_PROVIDED', 'NullBooleanField', 'PositiveIntegerField', 'PositiveSmallIntegerField', 'SlugField', 'SmallIntegerField', 'TextField', 'TimeField', 'URLField', 'UUIDField', )] class Empty(object): pass class NOT_PROVIDED: pass # The values to use for "blank" in SelectFields. Will be appended to the start # of most "choices" lists. BLANK_CHOICE_DASH = [("", "---------")] def _load_field(app_label, model_name, field_name): return apps.get_model(app_label, model_name)._meta.get_field(field_name) # A guide to Field parameters: # # * name: The name of the field specified in the model. # * attname: The attribute to use on the model object. This is the same as # "name", except in the case of ForeignKeys, where "_id" is # appended. # * db_column: The db_column specified in the model (or None). # * column: The database column for this field. This is the same as # "attname", except if db_column is specified. # # Code that introspects values, or does other dynamic things, should use # attname. For example, this gets the primary key value of object "obj": # # getattr(obj, opts.pk.attname) def _empty(of_cls): new = Empty() new.__class__ = of_cls return new @total_ordering @python_2_unicode_compatible class Field(RegisterLookupMixin): """Base class for all field types""" # Designates whether empty strings fundamentally are allowed at the # database level. empty_strings_allowed = True empty_values = list(validators.EMPTY_VALUES) # These track each time a Field instance is created. Used to retain order. # The auto_creation_counter is used for fields that Django implicitly # creates, creation_counter is used for all user-specified fields. creation_counter = 0 auto_creation_counter = -1 default_validators = [] # Default set of validators default_error_messages = { 'invalid_choice': _('Value %(value)r is not a valid choice.'), 'null': _('This field cannot be null.'), 'blank': _('This field cannot be blank.'), 'unique': _('%(model_name)s with this %(field_label)s ' 'already exists.'), # Translators: The 'lookup_type' is one of 'date', 'year' or 'month'. # Eg: "Title must be unique for pub_date year" 'unique_for_date': _("%(field_label)s must be unique for " "%(date_field_label)s %(lookup_type)s."), } system_check_deprecated_details = None system_check_removed_details = None # Field flags hidden = False many_to_many = None many_to_one = None one_to_many = None one_to_one = None related_model = None # Generic field type description, usually overridden by subclasses def _description(self): return _('Field of type: %(field_type)s') % { 'field_type': self.__class__.__name__ } description = property(_description) def __init__(self, verbose_name=None, name=None, primary_key=False, max_length=None, unique=False, blank=False, null=False, db_index=False, rel=None, default=NOT_PROVIDED, editable=True, serialize=True, unique_for_date=None, unique_for_month=None, unique_for_year=None, choices=None, help_text='', db_column=None, db_tablespace=None, auto_created=False, validators=[], error_messages=None): self.name = name self.verbose_name = verbose_name # May be set by set_attributes_from_name self._verbose_name = verbose_name # Store original for deconstruction self.primary_key = primary_key self.max_length, self._unique = max_length, unique self.blank, self.null = blank, null self.remote_field = rel self.is_relation = self.remote_field is not None self.default = default self.editable = editable self.serialize = serialize self.unique_for_date = unique_for_date self.unique_for_month = unique_for_month self.unique_for_year = unique_for_year if isinstance(choices, collections.Iterator): choices = list(choices) self.choices = choices or [] self.help_text = help_text self.db_index = db_index self.db_column = db_column self.db_tablespace = db_tablespace or settings.DEFAULT_INDEX_TABLESPACE self.auto_created = auto_created # Adjust the appropriate creation counter, and save our local copy. if auto_created: self.creation_counter = Field.auto_creation_counter Field.auto_creation_counter -= 1 else: self.creation_counter = Field.creation_counter Field.creation_counter += 1 self._validators = validators # Store for deconstruction later messages = {} for c in reversed(self.__class__.__mro__): messages.update(getattr(c, 'default_error_messages', {})) messages.update(error_messages or {}) self._error_messages = error_messages # Store for deconstruction later self.error_messages = messages def __str__(self): """ Return "app_label.model_label.field_name". """ model = self.model app = model._meta.app_label return '%s.%s.%s' % (app, model._meta.object_name, self.name) def __repr__(self): """ Displays the module, class and name of the field. """ path = '%s.%s' % (self.__class__.__module__, self.__class__.__name__) name = getattr(self, 'name', None) if name is not None: return '<%s: %s>' % (path, name) return '<%s>' % path def check(self, **kwargs): errors = [] errors.extend(self._check_field_name()) errors.extend(self._check_choices()) errors.extend(self._check_db_index()) errors.extend(self._check_null_allowed_for_primary_keys()) errors.extend(self._check_backend_specific_checks(**kwargs)) errors.extend(self._check_deprecation_details()) return errors def _check_field_name(self): """ Check if field name is valid, i.e. 1) does not end with an underscore, 2) does not contain "__" and 3) is not "pk". """ if self.name.endswith('_'): return [ checks.Error( 'Field names must not end with an underscore.', obj=self, id='fields.E001', ) ] elif '__' in self.name: return [ checks.Error( 'Field names must not contain "__".', obj=self, id='fields.E002', ) ] elif self.name == 'pk': return [ checks.Error( "'pk' is a reserved word that cannot be used as a field name.", obj=self, id='fields.E003', ) ] else: return [] @property def rel(self): warnings.warn( "Usage of field.rel has been deprecated. Use field.remote_field instead.", RemovedInDjango20Warning, 2) return self.remote_field def _check_choices(self): if self.choices: if (isinstance(self.choices, six.string_types) or not is_iterable(self.choices)): return [ checks.Error( "'choices' must be an iterable (e.g., a list or tuple).", obj=self, id='fields.E004', ) ] elif any(isinstance(choice, six.string_types) or not is_iterable(choice) or len(choice) != 2 for choice in self.choices): return [ checks.Error( "'choices' must be an iterable containing " "(actual value, human readable name) tuples.", obj=self, id='fields.E005', ) ] else: return [] else: return [] def _check_db_index(self): if self.db_index not in (None, True, False): return [ checks.Error( "'db_index' must be None, True or False.", obj=self, id='fields.E006', ) ] else: return [] def _check_null_allowed_for_primary_keys(self): if (self.primary_key and self.null and not connection.features.interprets_empty_strings_as_nulls): # We cannot reliably check this for backends like Oracle which # consider NULL and '' to be equal (and thus set up # character-based fields a little differently). return [ checks.Error( 'Primary keys must not have null=True.', hint=('Set null=False on the field, or ' 'remove primary_key=True argument.'), obj=self, id='fields.E007', ) ] else: return [] def _check_backend_specific_checks(self, **kwargs): app_label = self.model._meta.app_label for db in connections: if router.allow_migrate(db, app_label, model_name=self.model._meta.model_name): return connections[db].validation.check_field(self, **kwargs) return [] def _check_deprecation_details(self): if self.system_check_removed_details is not None: return [ checks.Error( self.system_check_removed_details.get( 'msg', '%s has been removed except for support in historical ' 'migrations.' % self.__class__.__name__ ), hint=self.system_check_removed_details.get('hint'), obj=self, id=self.system_check_removed_details.get('id', 'fields.EXXX'), ) ] elif self.system_check_deprecated_details is not None: return [ checks.Warning( self.system_check_deprecated_details.get( 'msg', '%s has been deprecated.' % self.__class__.__name__ ), hint=self.system_check_deprecated_details.get('hint'), obj=self, id=self.system_check_deprecated_details.get('id', 'fields.WXXX'), ) ] return [] def get_col(self, alias, output_field=None): if output_field is None: output_field = self if alias != self.model._meta.db_table or output_field != self: from django.db.models.expressions import Col return Col(alias, self, output_field) else: return self.cached_col @cached_property def cached_col(self): from django.db.models.expressions import Col return Col(self.model._meta.db_table, self) def select_format(self, compiler, sql, params): """ Custom format for select clauses. For example, GIS columns need to be selected as AsText(table.col) on MySQL as the table.col data can't be used by Django. """ return sql, params def deconstruct(self): """ Returns enough information to recreate the field as a 4-tuple: * The name of the field on the model, if contribute_to_class has been run * The import path of the field, including the class: django.db.models.IntegerField This should be the most portable version, so less specific may be better. * A list of positional arguments * A dict of keyword arguments Note that the positional or keyword arguments must contain values of the following types (including inner values of collection types): * None, bool, str, unicode, int, long, float, complex, set, frozenset, list, tuple, dict * UUID * datetime.datetime (naive), datetime.date * top-level classes, top-level functions - will be referenced by their full import path * Storage instances - these have their own deconstruct() method This is because the values here must be serialized into a text format (possibly new Python code, possibly JSON) and these are the only types with encoding handlers defined. There's no need to return the exact way the field was instantiated this time, just ensure that the resulting field is the same - prefer keyword arguments over positional ones, and omit parameters with their default values. """ # Short-form way of fetching all the default parameters keywords = {} possibles = { "verbose_name": None, "primary_key": False, "max_length": None, "unique": False, "blank": False, "null": False, "db_index": False, "default": NOT_PROVIDED, "editable": True, "serialize": True, "unique_for_date": None, "unique_for_month": None, "unique_for_year": None, "choices": [], "help_text": '', "db_column": None, "db_tablespace": settings.DEFAULT_INDEX_TABLESPACE, "auto_created": False, "validators": [], "error_messages": None, } attr_overrides = { "unique": "_unique", "error_messages": "_error_messages", "validators": "_validators", "verbose_name": "_verbose_name", } equals_comparison = {"choices", "validators", "db_tablespace"} for name, default in possibles.items(): value = getattr(self, attr_overrides.get(name, name)) # Unroll anything iterable for choices into a concrete list if name == "choices" and isinstance(value, collections.Iterable): value = list(value) # Do correct kind of comparison if name in equals_comparison: if value != default: keywords[name] = value else: if value is not default: keywords[name] = value # Work out path - we shorten it for known Django core fields path = "%s.%s" % (self.__class__.__module__, self.__class__.__name__) if path.startswith("django.db.models.fields.related"): path = path.replace("django.db.models.fields.related", "django.db.models") if path.startswith("django.db.models.fields.files"): path = path.replace("django.db.models.fields.files", "django.db.models") if path.startswith("django.db.models.fields.proxy"): path = path.replace("django.db.models.fields.proxy", "django.db.models") if path.startswith("django.db.models.fields"): path = path.replace("django.db.models.fields", "django.db.models") # Return basic info - other fields should override this. return ( force_text(self.name, strings_only=True), path, [], keywords, ) def clone(self): """ Uses deconstruct() to clone a new copy of this Field. Will not preserve any class attachments/attribute names. """ name, path, args, kwargs = self.deconstruct() return self.__class__(*args, **kwargs) def __eq__(self, other): # Needed for @total_ordering if isinstance(other, Field): return self.creation_counter == other.creation_counter return NotImplemented def __lt__(self, other): # This is needed because bisect does not take a comparison function. if isinstance(other, Field): return self.creation_counter < other.creation_counter return NotImplemented def __hash__(self): return hash(self.creation_counter) def __deepcopy__(self, memodict): # We don't have to deepcopy very much here, since most things are not # intended to be altered after initial creation. obj = copy.copy(self) if self.remote_field: obj.remote_field = copy.copy(self.remote_field) if hasattr(self.remote_field, 'field') and self.remote_field.field is self: obj.remote_field.field = obj memodict[id(self)] = obj return obj def __copy__(self): # We need to avoid hitting __reduce__, so define this # slightly weird copy construct. obj = Empty() obj.__class__ = self.__class__ obj.__dict__ = self.__dict__.copy() return obj def __reduce__(self): """ Pickling should return the model._meta.fields instance of the field, not a new copy of that field. So, we use the app registry to load the model and then the field back. """ if not hasattr(self, 'model'): # Fields are sometimes used without attaching them to models (for # example in aggregation). In this case give back a plain field # instance. The code below will create a new empty instance of # class self.__class__, then update its dict with self.__dict__ # values - so, this is very close to normal pickle. return _empty, (self.__class__,), self.__dict__ return _load_field, (self.model._meta.app_label, self.model._meta.object_name, self.name) def get_pk_value_on_save(self, instance): """ Hook to generate new PK values on save. This method is called when saving instances with no primary key value set. If this method returns something else than None, then the returned value is used when saving the new instance. """ if self.default: return self.get_default() return None def to_python(self, value): """ Converts the input value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ return value @cached_property def validators(self): """ Some validators can't be created at field initialization time. This method provides a way to delay their creation until required. """ return list(itertools.chain(self.default_validators, self._validators)) def run_validators(self, value): if value in self.empty_values: return errors = [] for v in self.validators: try: v(value) except exceptions.ValidationError as e: if hasattr(e, 'code') and e.code in self.error_messages: e.message = self.error_messages[e.code] errors.extend(e.error_list) if errors: raise exceptions.ValidationError(errors) def validate(self, value, model_instance): """ Validates value and throws ValidationError. Subclasses should override this to provide validation logic. """ if not self.editable: # Skip validation for non-editable fields. return if self.choices and value not in self.empty_values: for option_key, option_value in self.choices: if isinstance(option_value, (list, tuple)): # This is an optgroup, so look inside the group for # options. for optgroup_key, optgroup_value in option_value: if value == optgroup_key: return elif value == option_key: return raise exceptions.ValidationError( self.error_messages['invalid_choice'], code='invalid_choice', params={'value': value}, ) if value is None and not self.null: raise exceptions.ValidationError(self.error_messages['null'], code='null') if not self.blank and value in self.empty_values: raise exceptions.ValidationError(self.error_messages['blank'], code='blank') def clean(self, value, model_instance): """ Convert the value's type and run validation. Validation errors from to_python and validate are propagated. The correct value is returned if no error is raised. """ value = self.to_python(value) self.validate(value, model_instance) self.run_validators(value) return value def db_check(self, connection): """ Return the database column check constraint for this field, for the provided connection. Works the same way as db_type() for the case that get_internal_type() does not map to a preexisting model field. """ data = DictWrapper(self.__dict__, connection.ops.quote_name, "qn_") try: return connection.data_type_check_constraints[self.get_internal_type()] % data except KeyError: return None def db_type(self, connection): """ Return the database column data type for this field, for the provided connection. """ # The default implementation of this method looks at the # backend-specific data_types dictionary, looking up the field by its # "internal type". # # A Field class can implement the get_internal_type() method to specify # which *preexisting* Django Field class it's most similar to -- i.e., # a custom field might be represented by a TEXT column type, which is # the same as the TextField Django field type, which means the custom # field's get_internal_type() returns 'TextField'. # # But the limitation of the get_internal_type() / data_types approach # is that it cannot handle database column types that aren't already # mapped to one of the built-in Django field types. In this case, you # can implement db_type() instead of get_internal_type() to specify # exactly which wacky database column type you want to use. data = DictWrapper(self.__dict__, connection.ops.quote_name, "qn_") try: return connection.data_types[self.get_internal_type()] % data except KeyError: return None def rel_db_type(self, connection): """ Return the data type that a related field pointing to this field should use. For example, this method is called by ForeignKey and OneToOneField to determine its data type. """ return self.db_type(connection) def db_parameters(self, connection): """ Extension of db_type(), providing a range of different return values (type, checks). This will look at db_type(), allowing custom model fields to override it. """ type_string = self.db_type(connection) check_string = self.db_check(connection) return { "type": type_string, "check": check_string, } def db_type_suffix(self, connection): return connection.data_types_suffix.get(self.get_internal_type()) def get_db_converters(self, connection): if hasattr(self, 'from_db_value'): return [self.from_db_value] return [] @property def unique(self): return self._unique or self.primary_key def set_attributes_from_name(self, name): if not self.name: self.name = name self.attname, self.column = self.get_attname_column() self.concrete = self.column is not None if self.verbose_name is None and self.name: self.verbose_name = self.name.replace('_', ' ') def contribute_to_class(self, cls, name, private_only=False, virtual_only=NOT_PROVIDED): """ Register the field with the model class it belongs to. If private_only is True, a separate instance of this field will be created for every subclass of cls, even if cls is not an abstract model. """ if virtual_only is not NOT_PROVIDED: warnings.warn( "The `virtual_only` argument of Field.contribute_to_class() " "has been renamed to `private_only`.", RemovedInDjango20Warning, stacklevel=2 ) private_only = virtual_only self.set_attributes_from_name(name) self.model = cls if private_only: cls._meta.add_field(self, private=True) else: cls._meta.add_field(self) if self.column: # Don't override classmethods with the descriptor. This means that # if you have a classmethod and a field with the same name, then # such fields can't be deferred (we don't have a check for this). if not getattr(cls, self.attname, None): setattr(cls, self.attname, DeferredAttribute(self.attname, cls)) if self.choices: setattr(cls, 'get_%s_display' % self.name, curry(cls._get_FIELD_display, field=self)) def get_filter_kwargs_for_object(self, obj): """ Return a dict that when passed as kwargs to self.model.filter(), would yield all instances having the same value for this field as obj has. """ return {self.name: getattr(obj, self.attname)} def get_attname(self): return self.name def get_attname_column(self): attname = self.get_attname() column = self.db_column or attname return attname, column def get_cache_name(self): return '_%s_cache' % self.name def get_internal_type(self): return self.__class__.__name__ def pre_save(self, model_instance, add): """ Returns field's value just before saving. """ return getattr(model_instance, self.attname) def get_prep_value(self, value): """ Perform preliminary non-db specific value checks and conversions. """ if isinstance(value, Promise): value = value._proxy____cast() return value def get_db_prep_value(self, value, connection, prepared=False): """Returns field's value prepared for interacting with the database backend. Used by the default implementations of get_db_prep_save(). """ if not prepared: value = self.get_prep_value(value) return value def get_db_prep_save(self, value, connection): """ Returns field's value prepared for saving into a database. """ return self.get_db_prep_value(value, connection=connection, prepared=False) def has_default(self): """ Returns a boolean of whether this field has a default value. """ return self.default is not NOT_PROVIDED def get_default(self): """ Returns the default value for this field. """ if self.has_default(): if callable(self.default): return self.default() return self.default if not self.empty_strings_allowed or self.null and not connection.features.interprets_empty_strings_as_nulls: return None return "" def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH, limit_choices_to=None): """Returns choices with a default blank choices included, for use as SelectField choices for this field.""" blank_defined = False choices = list(self.choices) if self.choices else [] named_groups = choices and isinstance(choices[0][1], (list, tuple)) if not named_groups: for choice, __ in choices: if choice in ('', None): blank_defined = True break first_choice = (blank_choice if include_blank and not blank_defined else []) if self.choices: return first_choice + choices rel_model = self.remote_field.model limit_choices_to = limit_choices_to or self.get_limit_choices_to() if hasattr(self.remote_field, 'get_related_field'): lst = [(getattr(x, self.remote_field.get_related_field().attname), smart_text(x)) for x in rel_model._default_manager.complex_filter( limit_choices_to)] else: lst = [(x._get_pk_val(), smart_text(x)) for x in rel_model._default_manager.complex_filter( limit_choices_to)] return first_choice + lst @warn_about_renamed_method( 'Field', '_get_val_from_obj', 'value_from_object', RemovedInDjango20Warning ) def _get_val_from_obj(self, obj): if obj is not None: return getattr(obj, self.attname) else: return self.get_default() def value_to_string(self, obj): """ Returns a string value of this field from the passed obj. This is used by the serialization framework. """ return smart_text(self.value_from_object(obj)) def _get_flatchoices(self): """Flattened version of choices tuple.""" flat = [] for choice, value in self.choices: if isinstance(value, (list, tuple)): flat.extend(value) else: flat.append((choice, value)) return flat flatchoices = property(_get_flatchoices) def save_form_data(self, instance, data): setattr(instance, self.name, data) def formfield(self, form_class=None, choices_form_class=None, **kwargs): """ Returns a django.forms.Field instance for this database Field. """ defaults = {'required': not self.blank, 'label': capfirst(self.verbose_name), 'help_text': self.help_text} if self.has_default(): if callable(self.default): defaults['initial'] = self.default defaults['show_hidden_initial'] = True else: defaults['initial'] = self.get_default() if self.choices: # Fields with choices get special treatment. include_blank = (self.blank or not (self.has_default() or 'initial' in kwargs)) defaults['choices'] = self.get_choices(include_blank=include_blank) defaults['coerce'] = self.to_python if self.null: defaults['empty_value'] = None if choices_form_class is not None: form_class = choices_form_class else: form_class = forms.TypedChoiceField # Many of the subclass-specific formfield arguments (min_value, # max_value) don't apply for choice fields, so be sure to only pass # the values that TypedChoiceField will understand. for k in list(kwargs): if k not in ('coerce', 'empty_value', 'choices', 'required', 'widget', 'label', 'initial', 'help_text', 'error_messages', 'show_hidden_initial'): del kwargs[k] defaults.update(kwargs) if form_class is None: form_class = forms.CharField return form_class(**defaults) def value_from_object(self, obj): """ Returns the value of this field in the given model instance. """ return getattr(obj, self.attname) class AutoField(Field): description = _("Integer") empty_strings_allowed = False default_error_messages = { 'invalid': _("'%(value)s' value must be an integer."), } def __init__(self, *args, **kwargs): kwargs['blank'] = True super(AutoField, self).__init__(*args, **kwargs) def check(self, **kwargs): errors = super(AutoField, self).check(**kwargs) errors.extend(self._check_primary_key()) return errors def _check_primary_key(self): if not self.primary_key: return [ checks.Error( 'AutoFields must set primary_key=True.', obj=self, id='fields.E100', ), ] else: return [] def deconstruct(self): name, path, args, kwargs = super(AutoField, self).deconstruct() del kwargs['blank'] kwargs['primary_key'] = True return name, path, args, kwargs def get_internal_type(self): return "AutoField" def to_python(self, value): if value is None: return value try: return int(value) except (TypeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def rel_db_type(self, connection): return IntegerField().db_type(connection=connection) def validate(self, value, model_instance): pass def get_db_prep_value(self, value, connection, prepared=False): if not prepared: value = self.get_prep_value(value) value = connection.ops.validate_autopk_value(value) return value def get_prep_value(self, value): value = super(AutoField, self).get_prep_value(value) if value is None: return None return int(value) def contribute_to_class(self, cls, name, **kwargs): assert not cls._meta.has_auto_field, \ "A model can't have more than one AutoField." super(AutoField, self).contribute_to_class(cls, name, **kwargs) cls._meta.has_auto_field = True cls._meta.auto_field = self def formfield(self, **kwargs): return None class BigAutoField(AutoField): description = _("Big (8 byte) integer") def get_internal_type(self): return "BigAutoField" def rel_db_type(self, connection): return BigIntegerField().db_type(connection=connection) class BooleanField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _("'%(value)s' value must be either True or False."), } description = _("Boolean (Either True or False)") def __init__(self, *args, **kwargs): kwargs['blank'] = True super(BooleanField, self).__init__(*args, **kwargs) def check(self, **kwargs): errors = super(BooleanField, self).check(**kwargs) errors.extend(self._check_null(**kwargs)) return errors def _check_null(self, **kwargs): if getattr(self, 'null', False): return [ checks.Error( 'BooleanFields do not accept null values.', hint='Use a NullBooleanField instead.', obj=self, id='fields.E110', ) ] else: return [] def deconstruct(self): name, path, args, kwargs = super(BooleanField, self).deconstruct() del kwargs['blank'] return name, path, args, kwargs def get_internal_type(self): return "BooleanField" def to_python(self, value): if value in (True, False): # if value is 1 or 0 than it's equal to True or False, but we want # to return a true bool for semantic reasons. return bool(value) if value in ('t', 'True', '1'): return True if value in ('f', 'False', '0'): return False raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def get_prep_value(self, value): value = super(BooleanField, self).get_prep_value(value) if value is None: return None return self.to_python(value) def formfield(self, **kwargs): # Unlike most fields, BooleanField figures out include_blank from # self.null instead of self.blank. if self.choices: include_blank = not (self.has_default() or 'initial' in kwargs) defaults = {'choices': self.get_choices(include_blank=include_blank)} else: defaults = {'form_class': forms.BooleanField} defaults.update(kwargs) return super(BooleanField, self).formfield(**defaults) class CharField(Field): description = _("String (up to %(max_length)s)") def __init__(self, *args, **kwargs): super(CharField, self).__init__(*args, **kwargs) self.validators.append(validators.MaxLengthValidator(self.max_length)) def check(self, **kwargs): errors = super(CharField, self).check(**kwargs) errors.extend(self._check_max_length_attribute(**kwargs)) return errors def _check_max_length_attribute(self, **kwargs): if self.max_length is None: return [ checks.Error( "CharFields must define a 'max_length' attribute.", obj=self, id='fields.E120', ) ] elif not isinstance(self.max_length, six.integer_types) or self.max_length <= 0: return [ checks.Error( "'max_length' must be a positive integer.", obj=self, id='fields.E121', ) ] else: return [] def get_internal_type(self): return "CharField" def to_python(self, value): if isinstance(value, six.string_types) or value is None: return value return smart_text(value) def get_prep_value(self, value): value = super(CharField, self).get_prep_value(value) return self.to_python(value) def formfield(self, **kwargs): # Passing max_length to forms.CharField means that the value's length # will be validated twice. This is considered acceptable since we want # the value in the form field (to pass into widget for example). defaults = {'max_length': self.max_length} defaults.update(kwargs) return super(CharField, self).formfield(**defaults) class CommaSeparatedIntegerField(CharField): default_validators = [validators.validate_comma_separated_integer_list] description = _("Comma-separated integers") system_check_deprecated_details = { 'msg': ( 'CommaSeparatedIntegerField has been deprecated. Support ' 'for it (except in historical migrations) will be removed ' 'in Django 2.0.' ), 'hint': ( 'Use CharField(validators=[validate_comma_separated_integer_list]) instead.' ), 'id': 'fields.W901', } def formfield(self, **kwargs): defaults = { 'error_messages': { 'invalid': _('Enter only digits separated by commas.'), } } defaults.update(kwargs) return super(CommaSeparatedIntegerField, self).formfield(**defaults) class DateTimeCheckMixin(object): def check(self, **kwargs): errors = super(DateTimeCheckMixin, self).check(**kwargs) errors.extend(self._check_mutually_exclusive_options()) errors.extend(self._check_fix_default_value()) return errors def _check_mutually_exclusive_options(self): # auto_now, auto_now_add, and default are mutually exclusive # options. The use of more than one of these options together # will trigger an Error mutually_exclusive_options = [self.auto_now_add, self.auto_now, self.has_default()] enabled_options = [option not in (None, False) for option in mutually_exclusive_options].count(True) if enabled_options > 1: return [ checks.Error( "The options auto_now, auto_now_add, and default " "are mutually exclusive. Only one of these options " "may be present.", obj=self, id='fields.E160', ) ] else: return [] def _check_fix_default_value(self): return [] class DateField(DateTimeCheckMixin, Field): empty_strings_allowed = False default_error_messages = { 'invalid': _("'%(value)s' value has an invalid date format. It must be " "in YYYY-MM-DD format."), 'invalid_date': _("'%(value)s' value has the correct format (YYYY-MM-DD) " "but it is an invalid date."), } description = _("Date (without time)") def __init__(self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs): self.auto_now, self.auto_now_add = auto_now, auto_now_add if auto_now or auto_now_add: kwargs['editable'] = False kwargs['blank'] = True super(DateField, self).__init__(verbose_name, name, **kwargs) def _check_fix_default_value(self): """ Adds a warning to the checks framework stating, that using an actual date or datetime value is probably wrong; it's only being evaluated on server start-up. For details see ticket #21905 """ if not self.has_default(): return [] now = timezone.now() if not timezone.is_naive(now): now = timezone.make_naive(now, timezone.utc) value = self.default if isinstance(value, datetime.datetime): if not timezone.is_naive(value): value = timezone.make_naive(value, timezone.utc) value = value.date() elif isinstance(value, datetime.date): # Nothing to do, as dates don't have tz information pass else: # No explicit date / datetime value -- no checks necessary return [] offset = datetime.timedelta(days=1) lower = (now - offset).date() upper = (now + offset).date() if lower <= value <= upper: return [ checks.Warning( 'Fixed default value provided.', hint='It seems you set a fixed date / time / datetime ' 'value as default for this field. This may not be ' 'what you want. If you want to have the current date ' 'as default, use `django.utils.timezone.now`', obj=self, id='fields.W161', ) ] return [] def deconstruct(self): name, path, args, kwargs = super(DateField, self).deconstruct() if self.auto_now: kwargs['auto_now'] = True if self.auto_now_add: kwargs['auto_now_add'] = True if self.auto_now or self.auto_now_add: del kwargs['editable'] del kwargs['blank'] return name, path, args, kwargs def get_internal_type(self): return "DateField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.datetime): if settings.USE_TZ and timezone.is_aware(value): # Convert aware datetimes to the default time zone # before casting them to dates (#17742). default_timezone = timezone.get_default_timezone() value = timezone.make_naive(value, default_timezone) return value.date() if isinstance(value, datetime.date): return value try: parsed = parse_date(value) if parsed is not None: return parsed except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_date'], code='invalid_date', params={'value': value}, ) raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = datetime.date.today() setattr(model_instance, self.attname, value) return value else: return super(DateField, self).pre_save(model_instance, add) def contribute_to_class(self, cls, name, **kwargs): super(DateField, self).contribute_to_class(cls, name, **kwargs) if not self.null: setattr( cls, 'get_next_by_%s' % self.name, curry(cls._get_next_or_previous_by_FIELD, field=self, is_next=True) ) setattr( cls, 'get_previous_by_%s' % self.name, curry(cls._get_next_or_previous_by_FIELD, field=self, is_next=False) ) def get_prep_value(self, value): value = super(DateField, self).get_prep_value(value) return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): # Casts dates into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_datefield_value(value) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): defaults = {'form_class': forms.DateField} defaults.update(kwargs) return super(DateField, self).formfield(**defaults) class DateTimeField(DateField): empty_strings_allowed = False default_error_messages = { 'invalid': _("'%(value)s' value has an invalid format. It must be in " "YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."), 'invalid_date': _("'%(value)s' value has the correct format " "(YYYY-MM-DD) but it is an invalid date."), 'invalid_datetime': _("'%(value)s' value has the correct format " "(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) " "but it is an invalid date/time."), } description = _("Date (with time)") # __init__ is inherited from DateField def _check_fix_default_value(self): """ Adds a warning to the checks framework stating, that using an actual date or datetime value is probably wrong; it's only being evaluated on server start-up. For details see ticket #21905 """ if not self.has_default(): return [] now = timezone.now() if not timezone.is_naive(now): now = timezone.make_naive(now, timezone.utc) value = self.default if isinstance(value, datetime.datetime): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset upper = now + second_offset if timezone.is_aware(value): value = timezone.make_naive(value, timezone.utc) elif isinstance(value, datetime.date): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset lower = datetime.datetime(lower.year, lower.month, lower.day) upper = now + second_offset upper = datetime.datetime(upper.year, upper.month, upper.day) value = datetime.datetime(value.year, value.month, value.day) else: # No explicit date / datetime value -- no checks necessary return [] if lower <= value <= upper: return [ checks.Warning( 'Fixed default value provided.', hint='It seems you set a fixed date / time / datetime ' 'value as default for this field. This may not be ' 'what you want. If you want to have the current date ' 'as default, use `django.utils.timezone.now`', obj=self, id='fields.W161', ) ] return [] def get_internal_type(self): return "DateTimeField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.datetime): return value if isinstance(value, datetime.date): value = datetime.datetime(value.year, value.month, value.day) if settings.USE_TZ: # For backwards compatibility, interpret naive datetimes in # local time. This won't work during DST change, but we can't # do much about it, so we let the exceptions percolate up the # call stack. warnings.warn("DateTimeField %s.%s received a naive datetime " "(%s) while time zone support is active." % (self.model.__name__, self.name, value), RuntimeWarning) default_timezone = timezone.get_default_timezone() value = timezone.make_aware(value, default_timezone) return value try: parsed = parse_datetime(value) if parsed is not None: return parsed except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_datetime'], code='invalid_datetime', params={'value': value}, ) try: parsed = parse_date(value) if parsed is not None: return datetime.datetime(parsed.year, parsed.month, parsed.day) except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_date'], code='invalid_date', params={'value': value}, ) raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = timezone.now() setattr(model_instance, self.attname, value) return value else: return super(DateTimeField, self).pre_save(model_instance, add) # contribute_to_class is inherited from DateField, it registers # get_next_by_FOO and get_prev_by_FOO def get_prep_value(self, value): value = super(DateTimeField, self).get_prep_value(value) value = self.to_python(value) if value is not None and settings.USE_TZ and timezone.is_naive(value): # For backwards compatibility, interpret naive datetimes in local # time. This won't work during DST change, but we can't do much # about it, so we let the exceptions percolate up the call stack. try: name = '%s.%s' % (self.model.__name__, self.name) except AttributeError: name = '(unbound)' warnings.warn("DateTimeField %s received a naive datetime (%s)" " while time zone support is active." % (name, value), RuntimeWarning) default_timezone = timezone.get_default_timezone() value = timezone.make_aware(value, default_timezone) return value def get_db_prep_value(self, value, connection, prepared=False): # Casts datetimes into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_datetimefield_value(value) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): defaults = {'form_class': forms.DateTimeField} defaults.update(kwargs) return super(DateTimeField, self).formfield(**defaults) class DecimalField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _("'%(value)s' value must be a decimal number."), } description = _("Decimal number") def __init__(self, verbose_name=None, name=None, max_digits=None, decimal_places=None, **kwargs): self.max_digits, self.decimal_places = max_digits, decimal_places super(DecimalField, self).__init__(verbose_name, name, **kwargs) def check(self, **kwargs): errors = super(DecimalField, self).check(**kwargs) digits_errors = self._check_decimal_places() digits_errors.extend(self._check_max_digits()) if not digits_errors: errors.extend(self._check_decimal_places_and_max_digits(**kwargs)) else: errors.extend(digits_errors) return errors def _check_decimal_places(self): try: decimal_places = int(self.decimal_places) if decimal_places < 0: raise ValueError() except TypeError: return [ checks.Error( "DecimalFields must define a 'decimal_places' attribute.", obj=self, id='fields.E130', ) ] except ValueError: return [ checks.Error( "'decimal_places' must be a non-negative integer.", obj=self, id='fields.E131', ) ] else: return [] def _check_max_digits(self): try: max_digits = int(self.max_digits) if max_digits <= 0: raise ValueError() except TypeError: return [ checks.Error( "DecimalFields must define a 'max_digits' attribute.", obj=self, id='fields.E132', ) ] except ValueError: return [ checks.Error( "'max_digits' must be a positive integer.", obj=self, id='fields.E133', ) ] else: return [] def _check_decimal_places_and_max_digits(self, **kwargs): if int(self.decimal_places) > int(self.max_digits): return [ checks.Error( "'max_digits' must be greater or equal to 'decimal_places'.", obj=self, id='fields.E134', ) ] return [] @cached_property def validators(self): return super(DecimalField, self).validators + [ validators.DecimalValidator(self.max_digits, self.decimal_places) ] def deconstruct(self): name, path, args, kwargs = super(DecimalField, self).deconstruct() if self.max_digits is not None: kwargs['max_digits'] = self.max_digits if self.decimal_places is not None: kwargs['decimal_places'] = self.decimal_places return name, path, args, kwargs def get_internal_type(self): return "DecimalField" def to_python(self, value): if value is None: return value try: return decimal.Decimal(value) except decimal.InvalidOperation: raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def _format(self, value): if isinstance(value, six.string_types): return value else: return self.format_number(value) def format_number(self, value): """ Formats a number into a string with the requisite number of digits and decimal places. """ # Method moved to django.db.backends.utils. # # It is preserved because it is used by the oracle backend # (django.db.backends.oracle.query), and also for # backwards-compatibility with any external code which may have used # this method. from django.db.backends import utils return utils.format_number(value, self.max_digits, self.decimal_places) def get_db_prep_save(self, value, connection): return connection.ops.adapt_decimalfield_value(self.to_python(value), self.max_digits, self.decimal_places) def get_prep_value(self, value): value = super(DecimalField, self).get_prep_value(value) return self.to_python(value) def formfield(self, **kwargs): defaults = { 'max_digits': self.max_digits, 'decimal_places': self.decimal_places, 'form_class': forms.DecimalField, } defaults.update(kwargs) return super(DecimalField, self).formfield(**defaults) class DurationField(Field): """Stores timedelta objects. Uses interval on postgres, INVERAL DAY TO SECOND on Oracle, and bigint of microseconds on other databases. """ empty_strings_allowed = False default_error_messages = { 'invalid': _("'%(value)s' value has an invalid format. It must be in " "[DD] [HH:[MM:]]ss[.uuuuuu] format.") } description = _("Duration") def get_internal_type(self): return "DurationField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.timedelta): return value try: parsed = parse_duration(value) except ValueError: pass else: if parsed is not None: return parsed raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def get_db_prep_value(self, value, connection, prepared=False): if connection.features.has_native_duration_field: return value if value is None: return None # Discard any fractional microseconds due to floating point arithmetic. return int(round(value.total_seconds() * 1000000)) def get_db_converters(self, connection): converters = [] if not connection.features.has_native_duration_field: converters.append(connection.ops.convert_durationfield_value) return converters + super(DurationField, self).get_db_converters(connection) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else duration_string(val) def formfield(self, **kwargs): defaults = { 'form_class': forms.DurationField, } defaults.update(kwargs) return super(DurationField, self).formfield(**defaults) class EmailField(CharField): default_validators = [validators.validate_email] description = _("Email address") def __init__(self, *args, **kwargs): # max_length=254 to be compliant with RFCs 3696 and 5321 kwargs['max_length'] = kwargs.get('max_length', 254) super(EmailField, self).__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super(EmailField, self).deconstruct() # We do not exclude max_length if it matches default as we want to change # the default in future. return name, path, args, kwargs def formfield(self, **kwargs): # As with CharField, this will cause email validation to be performed # twice. defaults = { 'form_class': forms.EmailField, } defaults.update(kwargs) return super(EmailField, self).formfield(**defaults) class FilePathField(Field): description = _("File path") def __init__(self, verbose_name=None, name=None, path='', match=None, recursive=False, allow_files=True, allow_folders=False, **kwargs): self.path, self.match, self.recursive = path, match, recursive self.allow_files, self.allow_folders = allow_files, allow_folders kwargs['max_length'] = kwargs.get('max_length', 100) super(FilePathField, self).__init__(verbose_name, name, **kwargs) def check(self, **kwargs): errors = super(FilePathField, self).check(**kwargs) errors.extend(self._check_allowing_files_or_folders(**kwargs)) return errors def _check_allowing_files_or_folders(self, **kwargs): if not self.allow_files and not self.allow_folders: return [ checks.Error( "FilePathFields must have either 'allow_files' or 'allow_folders' set to True.", obj=self, id='fields.E140', ) ] return [] def deconstruct(self): name, path, args, kwargs = super(FilePathField, self).deconstruct() if self.path != '': kwargs['path'] = self.path if self.match is not None: kwargs['match'] = self.match if self.recursive is not False: kwargs['recursive'] = self.recursive if self.allow_files is not True: kwargs['allow_files'] = self.allow_files if self.allow_folders is not False: kwargs['allow_folders'] = self.allow_folders if kwargs.get("max_length") == 100: del kwargs["max_length"] return name, path, args, kwargs def get_prep_value(self, value): value = super(FilePathField, self).get_prep_value(value) if value is None: return None return six.text_type(value) def formfield(self, **kwargs): defaults = { 'path': self.path, 'match': self.match, 'recursive': self.recursive, 'form_class': forms.FilePathField, 'allow_files': self.allow_files, 'allow_folders': self.allow_folders, } defaults.update(kwargs) return super(FilePathField, self).formfield(**defaults) def get_internal_type(self): return "FilePathField" class FloatField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _("'%(value)s' value must be a float."), } description = _("Floating point number") def get_prep_value(self, value): value = super(FloatField, self).get_prep_value(value) if value is None: return None return float(value) def get_internal_type(self): return "FloatField" def to_python(self, value): if value is None: return value try: return float(value) except (TypeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def formfield(self, **kwargs): defaults = {'form_class': forms.FloatField} defaults.update(kwargs) return super(FloatField, self).formfield(**defaults) class IntegerField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _("'%(value)s' value must be an integer."), } description = _("Integer") def check(self, **kwargs): errors = super(IntegerField, self).check(**kwargs) errors.extend(self._check_max_length_warning()) return errors def _check_max_length_warning(self): if self.max_length is not None: return [ checks.Warning( "'max_length' is ignored when used with IntegerField", hint="Remove 'max_length' from field", obj=self, id='fields.W122', ) ] return [] @cached_property def validators(self): # These validators can't be added at field initialization time since # they're based on values retrieved from `connection`. validators_ = super(IntegerField, self).validators internal_type = self.get_internal_type() min_value, max_value = connection.ops.integer_field_range(internal_type) if min_value is not None: for validator in validators_: if isinstance(validator, validators.MinValueValidator) and validator.limit_value >= min_value: break else: validators_.append(validators.MinValueValidator(min_value)) if max_value is not None: for validator in validators_: if isinstance(validator, validators.MaxValueValidator) and validator.limit_value <= max_value: break else: validators_.append(validators.MaxValueValidator(max_value)) return validators_ def get_prep_value(self, value): value = super(IntegerField, self).get_prep_value(value) if value is None: return None return int(value) def get_internal_type(self): return "IntegerField" def to_python(self, value): if value is None: return value try: return int(value) except (TypeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def formfield(self, **kwargs): defaults = {'form_class': forms.IntegerField} defaults.update(kwargs) return super(IntegerField, self).formfield(**defaults) class BigIntegerField(IntegerField): empty_strings_allowed = False description = _("Big (8 byte) integer") MAX_BIGINT = 9223372036854775807 def get_internal_type(self): return "BigIntegerField" def formfield(self, **kwargs): defaults = {'min_value': -BigIntegerField.MAX_BIGINT - 1, 'max_value': BigIntegerField.MAX_BIGINT} defaults.update(kwargs) return super(BigIntegerField, self).formfield(**defaults) class IPAddressField(Field): empty_strings_allowed = False description = _("IPv4 address") system_check_removed_details = { 'msg': ( 'IPAddressField has been removed except for support in ' 'historical migrations.' ), 'hint': 'Use GenericIPAddressField instead.', 'id': 'fields.E900', } def __init__(self, *args, **kwargs): kwargs['max_length'] = 15 super(IPAddressField, self).__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super(IPAddressField, self).deconstruct() del kwargs['max_length'] return name, path, args, kwargs def get_prep_value(self, value): value = super(IPAddressField, self).get_prep_value(value) if value is None: return None return six.text_type(value) def get_internal_type(self): return "IPAddressField" class GenericIPAddressField(Field): empty_strings_allowed = False description = _("IP address") default_error_messages = {} def __init__(self, verbose_name=None, name=None, protocol='both', unpack_ipv4=False, *args, **kwargs): self.unpack_ipv4 = unpack_ipv4 self.protocol = protocol self.default_validators, invalid_error_message = \ validators.ip_address_validators(protocol, unpack_ipv4) self.default_error_messages['invalid'] = invalid_error_message kwargs['max_length'] = 39 super(GenericIPAddressField, self).__init__(verbose_name, name, *args, **kwargs) def check(self, **kwargs): errors = super(GenericIPAddressField, self).check(**kwargs) errors.extend(self._check_blank_and_null_values(**kwargs)) return errors def _check_blank_and_null_values(self, **kwargs): if not getattr(self, 'null', False) and getattr(self, 'blank', False): return [ checks.Error( 'GenericIPAddressFields cannot have blank=True if null=False, ' 'as blank values are stored as nulls.', obj=self, id='fields.E150', ) ] return [] def deconstruct(self): name, path, args, kwargs = super(GenericIPAddressField, self).deconstruct() if self.unpack_ipv4 is not False: kwargs['unpack_ipv4'] = self.unpack_ipv4 if self.protocol != "both": kwargs['protocol'] = self.protocol if kwargs.get("max_length") == 39: del kwargs['max_length'] return name, path, args, kwargs def get_internal_type(self): return "GenericIPAddressField" def to_python(self, value): if value is None: return None if not isinstance(value, six.string_types): value = force_text(value) value = value.strip() if ':' in value: return clean_ipv6_address(value, self.unpack_ipv4, self.error_messages['invalid']) return value def get_db_prep_value(self, value, connection, prepared=False): if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_ipaddressfield_value(value) def get_prep_value(self, value): value = super(GenericIPAddressField, self).get_prep_value(value) if value is None: return None if value and ':' in value: try: return clean_ipv6_address(value, self.unpack_ipv4) except exceptions.ValidationError: pass return six.text_type(value) def formfield(self, **kwargs): defaults = { 'protocol': self.protocol, 'form_class': forms.GenericIPAddressField, } defaults.update(kwargs) return super(GenericIPAddressField, self).formfield(**defaults) class NullBooleanField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _("'%(value)s' value must be either None, True or False."), } description = _("Boolean (Either True, False or None)") def __init__(self, *args, **kwargs): kwargs['null'] = True kwargs['blank'] = True super(NullBooleanField, self).__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super(NullBooleanField, self).deconstruct() del kwargs['null'] del kwargs['blank'] return name, path, args, kwargs def get_internal_type(self): return "NullBooleanField" def to_python(self, value): if value is None: return None if value in (True, False): return bool(value) if value in ('None',): return None if value in ('t', 'True', '1'): return True if value in ('f', 'False', '0'): return False raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def get_prep_value(self, value): value = super(NullBooleanField, self).get_prep_value(value) if value is None: return None return self.to_python(value) def formfield(self, **kwargs): defaults = { 'form_class': forms.NullBooleanField, 'required': not self.blank, 'label': capfirst(self.verbose_name), 'help_text': self.help_text} defaults.update(kwargs) return super(NullBooleanField, self).formfield(**defaults) class PositiveIntegerRelDbTypeMixin(object): def rel_db_type(self, connection): """ Return the data type that a related field pointing to this field should use. In most cases, a foreign key pointing to a positive integer primary key will have an integer column data type but some databases (e.g. MySQL) have an unsigned integer type. In that case (related_fields_match_type=True), the primary key should return its db_type. """ if connection.features.related_fields_match_type: return self.db_type(connection) else: return IntegerField().db_type(connection=connection) class PositiveIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField): description = _("Positive integer") def get_internal_type(self): return "PositiveIntegerField" def formfield(self, **kwargs): defaults = {'min_value': 0} defaults.update(kwargs) return super(PositiveIntegerField, self).formfield(**defaults) class PositiveSmallIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField): description = _("Positive small integer") def get_internal_type(self): return "PositiveSmallIntegerField" def formfield(self, **kwargs): defaults = {'min_value': 0} defaults.update(kwargs) return super(PositiveSmallIntegerField, self).formfield(**defaults) class SlugField(CharField): default_validators = [validators.validate_slug] description = _("Slug (up to %(max_length)s)") def __init__(self, *args, **kwargs): kwargs['max_length'] = kwargs.get('max_length', 50) # Set db_index=True unless it's been set manually. if 'db_index' not in kwargs: kwargs['db_index'] = True self.allow_unicode = kwargs.pop('allow_unicode', False) if self.allow_unicode: self.default_validators = [validators.validate_unicode_slug] super(SlugField, self).__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super(SlugField, self).deconstruct() if kwargs.get("max_length") == 50: del kwargs['max_length'] if self.db_index is False: kwargs['db_index'] = False else: del kwargs['db_index'] if self.allow_unicode is not False: kwargs['allow_unicode'] = self.allow_unicode return name, path, args, kwargs def get_internal_type(self): return "SlugField" def formfield(self, **kwargs): defaults = {'form_class': forms.SlugField, 'allow_unicode': self.allow_unicode} defaults.update(kwargs) return super(SlugField, self).formfield(**defaults) class SmallIntegerField(IntegerField): description = _("Small integer") def get_internal_type(self): return "SmallIntegerField" class TextField(Field): description = _("Text") def get_internal_type(self): return "TextField" def to_python(self, value): if isinstance(value, six.string_types) or value is None: return value return smart_text(value) def get_prep_value(self, value): value = super(TextField, self).get_prep_value(value) return self.to_python(value) def formfield(self, **kwargs): # Passing max_length to forms.CharField means that the value's length # will be validated twice. This is considered acceptable since we want # the value in the form field (to pass into widget for example). defaults = {'max_length': self.max_length, 'widget': forms.Textarea} defaults.update(kwargs) return super(TextField, self).formfield(**defaults) class TimeField(DateTimeCheckMixin, Field): empty_strings_allowed = False default_error_messages = { 'invalid': _("'%(value)s' value has an invalid format. It must be in " "HH:MM[:ss[.uuuuuu]] format."), 'invalid_time': _("'%(value)s' value has the correct format " "(HH:MM[:ss[.uuuuuu]]) but it is an invalid time."), } description = _("Time") def __init__(self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs): self.auto_now, self.auto_now_add = auto_now, auto_now_add if auto_now or auto_now_add: kwargs['editable'] = False kwargs['blank'] = True super(TimeField, self).__init__(verbose_name, name, **kwargs) def _check_fix_default_value(self): """ Adds a warning to the checks framework stating, that using an actual time or datetime value is probably wrong; it's only being evaluated on server start-up. For details see ticket #21905 """ if not self.has_default(): return [] now = timezone.now() if not timezone.is_naive(now): now = timezone.make_naive(now, timezone.utc) value = self.default if isinstance(value, datetime.datetime): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset upper = now + second_offset if timezone.is_aware(value): value = timezone.make_naive(value, timezone.utc) elif isinstance(value, datetime.time): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset upper = now + second_offset value = datetime.datetime.combine(now.date(), value) if timezone.is_aware(value): value = timezone.make_naive(value, timezone.utc).time() else: # No explicit time / datetime value -- no checks necessary return [] if lower <= value <= upper: return [ checks.Warning( 'Fixed default value provided.', hint='It seems you set a fixed date / time / datetime ' 'value as default for this field. This may not be ' 'what you want. If you want to have the current date ' 'as default, use `django.utils.timezone.now`', obj=self, id='fields.W161', ) ] return [] def deconstruct(self): name, path, args, kwargs = super(TimeField, self).deconstruct() if self.auto_now is not False: kwargs["auto_now"] = self.auto_now if self.auto_now_add is not False: kwargs["auto_now_add"] = self.auto_now_add if self.auto_now or self.auto_now_add: del kwargs['blank'] del kwargs['editable'] return name, path, args, kwargs def get_internal_type(self): return "TimeField" def to_python(self, value): if value is None: return None if isinstance(value, datetime.time): return value if isinstance(value, datetime.datetime): # Not usually a good idea to pass in a datetime here (it loses # information), but this can be a side-effect of interacting with a # database backend (e.g. Oracle), so we'll be accommodating. return value.time() try: parsed = parse_time(value) if parsed is not None: return parsed except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_time'], code='invalid_time', params={'value': value}, ) raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = datetime.datetime.now().time() setattr(model_instance, self.attname, value) return value else: return super(TimeField, self).pre_save(model_instance, add) def get_prep_value(self, value): value = super(TimeField, self).get_prep_value(value) return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): # Casts times into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_timefield_value(value) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): defaults = {'form_class': forms.TimeField} defaults.update(kwargs) return super(TimeField, self).formfield(**defaults) class URLField(CharField): default_validators = [validators.URLValidator()] description = _("URL") def __init__(self, verbose_name=None, name=None, **kwargs): kwargs['max_length'] = kwargs.get('max_length', 200) super(URLField, self).__init__(verbose_name, name, **kwargs) def deconstruct(self): name, path, args, kwargs = super(URLField, self).deconstruct() if kwargs.get("max_length") == 200: del kwargs['max_length'] return name, path, args, kwargs def formfield(self, **kwargs): # As with CharField, this will cause URL validation to be performed # twice. defaults = { 'form_class': forms.URLField, } defaults.update(kwargs) return super(URLField, self).formfield(**defaults) class BinaryField(Field): description = _("Raw binary data") empty_values = [None, b''] def __init__(self, *args, **kwargs): kwargs['editable'] = False super(BinaryField, self).__init__(*args, **kwargs) if self.max_length is not None: self.validators.append(validators.MaxLengthValidator(self.max_length)) def deconstruct(self): name, path, args, kwargs = super(BinaryField, self).deconstruct() del kwargs['editable'] return name, path, args, kwargs def get_internal_type(self): return "BinaryField" def get_placeholder(self, value, compiler, connection): return connection.ops.binary_placeholder_sql(value) def get_default(self): if self.has_default() and not callable(self.default): return self.default default = super(BinaryField, self).get_default() if default == '': return b'' return default def get_db_prep_value(self, value, connection, prepared=False): value = super(BinaryField, self).get_db_prep_value(value, connection, prepared) if value is not None: return connection.Database.Binary(value) return value def value_to_string(self, obj): """Binary data is serialized as base64""" return b64encode(force_bytes(self.value_from_object(obj))).decode('ascii') def to_python(self, value): # If it's a string, it should be base64-encoded data if isinstance(value, six.text_type): return six.memoryview(b64decode(force_bytes(value))) return value class UUIDField(Field): default_error_messages = { 'invalid': _("'%(value)s' is not a valid UUID."), } description = 'Universally unique identifier' empty_strings_allowed = False def __init__(self, verbose_name=None, **kwargs): kwargs['max_length'] = 32 super(UUIDField, self).__init__(verbose_name, **kwargs) def deconstruct(self): name, path, args, kwargs = super(UUIDField, self).deconstruct() del kwargs['max_length'] return name, path, args, kwargs def get_internal_type(self): return "UUIDField" def get_db_prep_value(self, value, connection, prepared=False): if value is None: return None if not isinstance(value, uuid.UUID): try: value = uuid.UUID(value) except AttributeError: raise TypeError(self.error_messages['invalid'] % {'value': value}) if connection.features.has_native_uuid_field: return value return value.hex def to_python(self, value): if value and not isinstance(value, uuid.UUID): try: return uuid.UUID(value) except ValueError: raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) return value def formfield(self, **kwargs): defaults = { 'form_class': forms.UUIDField, } defaults.update(kwargs) return super(UUIDField, self).formfield(**defaults)
kidchang/compassv2-api
refs/heads/master
compass/tasks/__init__.py
48
# Copyright 2014 Huawei Technologies Co. Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.
JohnLZeller/jenkinsapi
refs/heads/master
jenkinsapi/executor.py
10
""" Module for jenkinsapi Executer class """ from jenkinsapi.jenkinsbase import JenkinsBase import logging log = logging.getLogger(__name__) class Executor(JenkinsBase): """ Class to hold information on nodes that are attached as slaves to the master jenkins instance """ def __init__(self, baseurl, nodename, jenkins_obj, number): """ Init a node object by providing all relevant pointers to it :param baseurl: basic url for querying information on a node :param nodename: hostname of the node :param jenkins_obj: ref to the jenkins obj :return: Node obj """ self.nodename = nodename self.number = number self.jenkins = jenkins_obj self.baseurl = baseurl JenkinsBase.__init__(self, baseurl) def __str__(self): return '%s %s' % (self.nodename, self.number) def get_jenkins_obj(self): return self.jenkins def get_progress(self): """Returns percentage""" return self.poll(tree='progress')['progress'] def get_number(self): """ Get Executor number. """ return self.poll(tree='number')['number'] def is_idle(self): """ Returns Boolean: whether Executor is idle or not. """ return self.poll(tree='idle')['idle'] def likely_stuck(self): """ Returns Boolean: whether Executor is likely stuck or not. """ return self.poll(tree='likelyStuck')['likelyStuck'] def get_current_executable(self): """ Returns the current Queue.Task this executor is running. """ return self.poll(tree='currentExecutable')['currentExecutable']
NiclasEriksen/py-towerwars
refs/heads/master
src/numpy/lib/polynomial.py
35
""" Functions to operate on polynomials. """ from __future__ import division, absolute_import, print_function __all__ = ['poly', 'roots', 'polyint', 'polyder', 'polyadd', 'polysub', 'polymul', 'polydiv', 'polyval', 'poly1d', 'polyfit', 'RankWarning'] import re import warnings import numpy.core.numeric as NX from numpy.core import isscalar, abs, finfo, atleast_1d, hstack, dot from numpy.lib.twodim_base import diag, vander from numpy.lib.function_base import trim_zeros, sort_complex from numpy.lib.type_check import iscomplex, real, imag from numpy.linalg import eigvals, lstsq, inv class RankWarning(UserWarning): """ Issued by `polyfit` when the Vandermonde matrix is rank deficient. For more information, a way to suppress the warning, and an example of `RankWarning` being issued, see `polyfit`. """ pass def poly(seq_of_zeros): """ Find the coefficients of a polynomial with the given sequence of roots. Returns the coefficients of the polynomial whose leading coefficient is one for the given sequence of zeros (multiple roots must be included in the sequence as many times as their multiplicity; see Examples). A square matrix (or array, which will be treated as a matrix) can also be given, in which case the coefficients of the characteristic polynomial of the matrix are returned. Parameters ---------- seq_of_zeros : array_like, shape (N,) or (N, N) A sequence of polynomial roots, or a square array or matrix object. Returns ------- c : ndarray 1D array of polynomial coefficients from highest to lowest degree: ``c[0] * x**(N) + c[1] * x**(N-1) + ... + c[N-1] * x + c[N]`` where c[0] always equals 1. Raises ------ ValueError If input is the wrong shape (the input must be a 1-D or square 2-D array). See Also -------- polyval : Evaluate a polynomial at a point. roots : Return the roots of a polynomial. polyfit : Least squares polynomial fit. poly1d : A one-dimensional polynomial class. Notes ----- Specifying the roots of a polynomial still leaves one degree of freedom, typically represented by an undetermined leading coefficient. [1]_ In the case of this function, that coefficient - the first one in the returned array - is always taken as one. (If for some reason you have one other point, the only automatic way presently to leverage that information is to use ``polyfit``.) The characteristic polynomial, :math:`p_a(t)`, of an `n`-by-`n` matrix **A** is given by :math:`p_a(t) = \\mathrm{det}(t\\, \\mathbf{I} - \\mathbf{A})`, where **I** is the `n`-by-`n` identity matrix. [2]_ References ---------- .. [1] M. Sullivan and M. Sullivan, III, "Algebra and Trignometry, Enhanced With Graphing Utilities," Prentice-Hall, pg. 318, 1996. .. [2] G. Strang, "Linear Algebra and Its Applications, 2nd Edition," Academic Press, pg. 182, 1980. Examples -------- Given a sequence of a polynomial's zeros: >>> np.poly((0, 0, 0)) # Multiple root example array([1, 0, 0, 0]) The line above represents z**3 + 0*z**2 + 0*z + 0. >>> np.poly((-1./2, 0, 1./2)) array([ 1. , 0. , -0.25, 0. ]) The line above represents z**3 - z/4 >>> np.poly((np.random.random(1.)[0], 0, np.random.random(1.)[0])) array([ 1. , -0.77086955, 0.08618131, 0. ]) #random Given a square array object: >>> P = np.array([[0, 1./3], [-1./2, 0]]) >>> np.poly(P) array([ 1. , 0. , 0.16666667]) Or a square matrix object: >>> np.poly(np.matrix(P)) array([ 1. , 0. , 0.16666667]) Note how in all cases the leading coefficient is always 1. """ seq_of_zeros = atleast_1d(seq_of_zeros) sh = seq_of_zeros.shape if len(sh) == 2 and sh[0] == sh[1] and sh[0] != 0: seq_of_zeros = eigvals(seq_of_zeros) elif len(sh) == 1: pass else: raise ValueError("input must be 1d or non-empty square 2d array.") if len(seq_of_zeros) == 0: return 1.0 a = [1] for k in range(len(seq_of_zeros)): a = NX.convolve(a, [1, -seq_of_zeros[k]], mode='full') if issubclass(a.dtype.type, NX.complexfloating): # if complex roots are all complex conjugates, the roots are real. roots = NX.asarray(seq_of_zeros, complex) pos_roots = sort_complex(NX.compress(roots.imag > 0, roots)) neg_roots = NX.conjugate(sort_complex( NX.compress(roots.imag < 0, roots))) if (len(pos_roots) == len(neg_roots) and NX.alltrue(neg_roots == pos_roots)): a = a.real.copy() return a def roots(p): """ Return the roots of a polynomial with coefficients given in p. The values in the rank-1 array `p` are coefficients of a polynomial. If the length of `p` is n+1 then the polynomial is described by:: p[0] * x**n + p[1] * x**(n-1) + ... + p[n-1]*x + p[n] Parameters ---------- p : array_like Rank-1 array of polynomial coefficients. Returns ------- out : ndarray An array containing the complex roots of the polynomial. Raises ------ ValueError When `p` cannot be converted to a rank-1 array. See also -------- poly : Find the coefficients of a polynomial with a given sequence of roots. polyval : Evaluate a polynomial at a point. polyfit : Least squares polynomial fit. poly1d : A one-dimensional polynomial class. Notes ----- The algorithm relies on computing the eigenvalues of the companion matrix [1]_. References ---------- .. [1] R. A. Horn & C. R. Johnson, *Matrix Analysis*. Cambridge, UK: Cambridge University Press, 1999, pp. 146-7. Examples -------- >>> coeff = [3.2, 2, 1] >>> np.roots(coeff) array([-0.3125+0.46351241j, -0.3125-0.46351241j]) """ # If input is scalar, this makes it an array p = atleast_1d(p) if len(p.shape) != 1: raise ValueError("Input must be a rank-1 array.") # find non-zero array entries non_zero = NX.nonzero(NX.ravel(p))[0] # Return an empty array if polynomial is all zeros if len(non_zero) == 0: return NX.array([]) # find the number of trailing zeros -- this is the number of roots at 0. trailing_zeros = len(p) - non_zero[-1] - 1 # strip leading and trailing zeros p = p[int(non_zero[0]):int(non_zero[-1])+1] # casting: if incoming array isn't floating point, make it floating point. if not issubclass(p.dtype.type, (NX.floating, NX.complexfloating)): p = p.astype(float) N = len(p) if N > 1: # build companion matrix and find its eigenvalues (the roots) A = diag(NX.ones((N-2,), p.dtype), -1) A[0,:] = -p[1:] / p[0] roots = eigvals(A) else: roots = NX.array([]) # tack any zeros onto the back of the array roots = hstack((roots, NX.zeros(trailing_zeros, roots.dtype))) return roots def polyint(p, m=1, k=None): """ Return an antiderivative (indefinite integral) of a polynomial. The returned order `m` antiderivative `P` of polynomial `p` satisfies :math:`\\frac{d^m}{dx^m}P(x) = p(x)` and is defined up to `m - 1` integration constants `k`. The constants determine the low-order polynomial part .. math:: \\frac{k_{m-1}}{0!} x^0 + \\ldots + \\frac{k_0}{(m-1)!}x^{m-1} of `P` so that :math:`P^{(j)}(0) = k_{m-j-1}`. Parameters ---------- p : {array_like, poly1d} Polynomial to differentiate. A sequence is interpreted as polynomial coefficients, see `poly1d`. m : int, optional Order of the antiderivative. (Default: 1) k : {None, list of `m` scalars, scalar}, optional Integration constants. They are given in the order of integration: those corresponding to highest-order terms come first. If ``None`` (default), all constants are assumed to be zero. If `m = 1`, a single scalar can be given instead of a list. See Also -------- polyder : derivative of a polynomial poly1d.integ : equivalent method Examples -------- The defining property of the antiderivative: >>> p = np.poly1d([1,1,1]) >>> P = np.polyint(p) >>> P poly1d([ 0.33333333, 0.5 , 1. , 0. ]) >>> np.polyder(P) == p True The integration constants default to zero, but can be specified: >>> P = np.polyint(p, 3) >>> P(0) 0.0 >>> np.polyder(P)(0) 0.0 >>> np.polyder(P, 2)(0) 0.0 >>> P = np.polyint(p, 3, k=[6,5,3]) >>> P poly1d([ 0.01666667, 0.04166667, 0.16666667, 3. , 5. , 3. ]) Note that 3 = 6 / 2!, and that the constants are given in the order of integrations. Constant of the highest-order polynomial term comes first: >>> np.polyder(P, 2)(0) 6.0 >>> np.polyder(P, 1)(0) 5.0 >>> P(0) 3.0 """ m = int(m) if m < 0: raise ValueError("Order of integral must be positive (see polyder)") if k is None: k = NX.zeros(m, float) k = atleast_1d(k) if len(k) == 1 and m > 1: k = k[0]*NX.ones(m, float) if len(k) < m: raise ValueError( "k must be a scalar or a rank-1 array of length 1 or >m.") truepoly = isinstance(p, poly1d) p = NX.asarray(p) if m == 0: if truepoly: return poly1d(p) return p else: # Note: this must work also with object and integer arrays y = NX.concatenate((p.__truediv__(NX.arange(len(p), 0, -1)), [k[0]])) val = polyint(y, m - 1, k=k[1:]) if truepoly: return poly1d(val) return val def polyder(p, m=1): """ Return the derivative of the specified order of a polynomial. Parameters ---------- p : poly1d or sequence Polynomial to differentiate. A sequence is interpreted as polynomial coefficients, see `poly1d`. m : int, optional Order of differentiation (default: 1) Returns ------- der : poly1d A new polynomial representing the derivative. See Also -------- polyint : Anti-derivative of a polynomial. poly1d : Class for one-dimensional polynomials. Examples -------- The derivative of the polynomial :math:`x^3 + x^2 + x^1 + 1` is: >>> p = np.poly1d([1,1,1,1]) >>> p2 = np.polyder(p) >>> p2 poly1d([3, 2, 1]) which evaluates to: >>> p2(2.) 17.0 We can verify this, approximating the derivative with ``(f(x + h) - f(x))/h``: >>> (p(2. + 0.001) - p(2.)) / 0.001 17.007000999997857 The fourth-order derivative of a 3rd-order polynomial is zero: >>> np.polyder(p, 2) poly1d([6, 2]) >>> np.polyder(p, 3) poly1d([6]) >>> np.polyder(p, 4) poly1d([ 0.]) """ m = int(m) if m < 0: raise ValueError("Order of derivative must be positive (see polyint)") truepoly = isinstance(p, poly1d) p = NX.asarray(p) n = len(p) - 1 y = p[:-1] * NX.arange(n, 0, -1) if m == 0: val = p else: val = polyder(y, m - 1) if truepoly: val = poly1d(val) return val def polyfit(x, y, deg, rcond=None, full=False, w=None, cov=False): """ Least squares polynomial fit. Fit a polynomial ``p(x) = p[0] * x**deg + ... + p[deg]`` of degree `deg` to points `(x, y)`. Returns a vector of coefficients `p` that minimises the squared error. Parameters ---------- x : array_like, shape (M,) x-coordinates of the M sample points ``(x[i], y[i])``. y : array_like, shape (M,) or (M, K) y-coordinates of the sample points. Several data sets of sample points sharing the same x-coordinates can be fitted at once by passing in a 2D-array that contains one dataset per column. deg : int Degree of the fitting polynomial rcond : float, optional Relative condition number of the fit. Singular values smaller than this relative to the largest singular value will be ignored. The default value is len(x)*eps, where eps is the relative precision of the float type, about 2e-16 in most cases. full : bool, optional Switch determining nature of return value. When it is False (the default) just the coefficients are returned, when True diagnostic information from the singular value decomposition is also returned. w : array_like, shape (M,), optional weights to apply to the y-coordinates of the sample points. cov : bool, optional Return the estimate and the covariance matrix of the estimate If full is True, then cov is not returned. Returns ------- p : ndarray, shape (M,) or (M, K) Polynomial coefficients, highest power first. If `y` was 2-D, the coefficients for `k`-th data set are in ``p[:,k]``. residuals, rank, singular_values, rcond : Present only if `full` = True. Residuals of the least-squares fit, the effective rank of the scaled Vandermonde coefficient matrix, its singular values, and the specified value of `rcond`. For more details, see `linalg.lstsq`. V : ndarray, shape (M,M) or (M,M,K) Present only if `full` = False and `cov`=True. The covariance matrix of the polynomial coefficient estimates. The diagonal of this matrix are the variance estimates for each coefficient. If y is a 2-D array, then the covariance matrix for the `k`-th data set are in ``V[:,:,k]`` Warns ----- RankWarning The rank of the coefficient matrix in the least-squares fit is deficient. The warning is only raised if `full` = False. The warnings can be turned off by >>> import warnings >>> warnings.simplefilter('ignore', np.RankWarning) See Also -------- polyval : Computes polynomial values. linalg.lstsq : Computes a least-squares fit. scipy.interpolate.UnivariateSpline : Computes spline fits. Notes ----- The solution minimizes the squared error .. math :: E = \\sum_{j=0}^k |p(x_j) - y_j|^2 in the equations:: x[0]**n * p[0] + ... + x[0] * p[n-1] + p[n] = y[0] x[1]**n * p[0] + ... + x[1] * p[n-1] + p[n] = y[1] ... x[k]**n * p[0] + ... + x[k] * p[n-1] + p[n] = y[k] The coefficient matrix of the coefficients `p` is a Vandermonde matrix. `polyfit` issues a `RankWarning` when the least-squares fit is badly conditioned. This implies that the best fit is not well-defined due to numerical error. The results may be improved by lowering the polynomial degree or by replacing `x` by `x` - `x`.mean(). The `rcond` parameter can also be set to a value smaller than its default, but the resulting fit may be spurious: including contributions from the small singular values can add numerical noise to the result. Note that fitting polynomial coefficients is inherently badly conditioned when the degree of the polynomial is large or the interval of sample points is badly centered. The quality of the fit should always be checked in these cases. When polynomial fits are not satisfactory, splines may be a good alternative. References ---------- .. [1] Wikipedia, "Curve fitting", http://en.wikipedia.org/wiki/Curve_fitting .. [2] Wikipedia, "Polynomial interpolation", http://en.wikipedia.org/wiki/Polynomial_interpolation Examples -------- >>> x = np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) >>> y = np.array([0.0, 0.8, 0.9, 0.1, -0.8, -1.0]) >>> z = np.polyfit(x, y, 3) >>> z array([ 0.08703704, -0.81349206, 1.69312169, -0.03968254]) It is convenient to use `poly1d` objects for dealing with polynomials: >>> p = np.poly1d(z) >>> p(0.5) 0.6143849206349179 >>> p(3.5) -0.34732142857143039 >>> p(10) 22.579365079365115 High-order polynomials may oscillate wildly: >>> p30 = np.poly1d(np.polyfit(x, y, 30)) /... RankWarning: Polyfit may be poorly conditioned... >>> p30(4) -0.80000000000000204 >>> p30(5) -0.99999999999999445 >>> p30(4.5) -0.10547061179440398 Illustration: >>> import matplotlib.pyplot as plt >>> xp = np.linspace(-2, 6, 100) >>> _ = plt.plot(x, y, '.', xp, p(xp), '-', xp, p30(xp), '--') >>> plt.ylim(-2,2) (-2, 2) >>> plt.show() """ order = int(deg) + 1 x = NX.asarray(x) + 0.0 y = NX.asarray(y) + 0.0 # check arguments. if deg < 0: raise ValueError("expected deg >= 0") if x.ndim != 1: raise TypeError("expected 1D vector for x") if x.size == 0: raise TypeError("expected non-empty vector for x") if y.ndim < 1 or y.ndim > 2: raise TypeError("expected 1D or 2D array for y") if x.shape[0] != y.shape[0]: raise TypeError("expected x and y to have same length") # set rcond if rcond is None: rcond = len(x)*finfo(x.dtype).eps # set up least squares equation for powers of x lhs = vander(x, order) rhs = y # apply weighting if w is not None: w = NX.asarray(w) + 0.0 if w.ndim != 1: raise TypeError("expected a 1-d array for weights") if w.shape[0] != y.shape[0]: raise TypeError("expected w and y to have the same length") lhs *= w[:, NX.newaxis] if rhs.ndim == 2: rhs *= w[:, NX.newaxis] else: rhs *= w # scale lhs to improve condition number and solve scale = NX.sqrt((lhs*lhs).sum(axis=0)) lhs /= scale c, resids, rank, s = lstsq(lhs, rhs, rcond) c = (c.T/scale).T # broadcast scale coefficients # warn on rank reduction, which indicates an ill conditioned matrix if rank != order and not full: msg = "Polyfit may be poorly conditioned" warnings.warn(msg, RankWarning) if full: return c, resids, rank, s, rcond elif cov: Vbase = inv(dot(lhs.T, lhs)) Vbase /= NX.outer(scale, scale) # Some literature ignores the extra -2.0 factor in the denominator, but # it is included here because the covariance of Multivariate Student-T # (which is implied by a Bayesian uncertainty analysis) includes it. # Plus, it gives a slightly more conservative estimate of uncertainty. fac = resids / (len(x) - order - 2.0) if y.ndim == 1: return c, Vbase * fac else: return c, Vbase[:,:, NX.newaxis] * fac else: return c def polyval(p, x): """ Evaluate a polynomial at specific values. If `p` is of length N, this function returns the value: ``p[0]*x**(N-1) + p[1]*x**(N-2) + ... + p[N-2]*x + p[N-1]`` If `x` is a sequence, then `p(x)` is returned for each element of `x`. If `x` is another polynomial then the composite polynomial `p(x(t))` is returned. Parameters ---------- p : array_like or poly1d object 1D array of polynomial coefficients (including coefficients equal to zero) from highest degree to the constant term, or an instance of poly1d. x : array_like or poly1d object A number, a 1D array of numbers, or an instance of poly1d, "at" which to evaluate `p`. Returns ------- values : ndarray or poly1d If `x` is a poly1d instance, the result is the composition of the two polynomials, i.e., `x` is "substituted" in `p` and the simplified result is returned. In addition, the type of `x` - array_like or poly1d - governs the type of the output: `x` array_like => `values` array_like, `x` a poly1d object => `values` is also. See Also -------- poly1d: A polynomial class. Notes ----- Horner's scheme [1]_ is used to evaluate the polynomial. Even so, for polynomials of high degree the values may be inaccurate due to rounding errors. Use carefully. References ---------- .. [1] I. N. Bronshtein, K. A. Semendyayev, and K. A. Hirsch (Eng. trans. Ed.), *Handbook of Mathematics*, New York, Van Nostrand Reinhold Co., 1985, pg. 720. Examples -------- >>> np.polyval([3,0,1], 5) # 3 * 5**2 + 0 * 5**1 + 1 76 >>> np.polyval([3,0,1], np.poly1d(5)) poly1d([ 76.]) >>> np.polyval(np.poly1d([3,0,1]), 5) 76 >>> np.polyval(np.poly1d([3,0,1]), np.poly1d(5)) poly1d([ 76.]) """ p = NX.asarray(p) if isinstance(x, poly1d): y = 0 else: x = NX.asarray(x) y = NX.zeros_like(x) for i in range(len(p)): y = x * y + p[i] return y def polyadd(a1, a2): """ Find the sum of two polynomials. Returns the polynomial resulting from the sum of two input polynomials. Each input must be either a poly1d object or a 1D sequence of polynomial coefficients, from highest to lowest degree. Parameters ---------- a1, a2 : array_like or poly1d object Input polynomials. Returns ------- out : ndarray or poly1d object The sum of the inputs. If either input is a poly1d object, then the output is also a poly1d object. Otherwise, it is a 1D array of polynomial coefficients from highest to lowest degree. See Also -------- poly1d : A one-dimensional polynomial class. poly, polyadd, polyder, polydiv, polyfit, polyint, polysub, polyval Examples -------- >>> np.polyadd([1, 2], [9, 5, 4]) array([9, 6, 6]) Using poly1d objects: >>> p1 = np.poly1d([1, 2]) >>> p2 = np.poly1d([9, 5, 4]) >>> print p1 1 x + 2 >>> print p2 2 9 x + 5 x + 4 >>> print np.polyadd(p1, p2) 2 9 x + 6 x + 6 """ truepoly = (isinstance(a1, poly1d) or isinstance(a2, poly1d)) a1 = atleast_1d(a1) a2 = atleast_1d(a2) diff = len(a2) - len(a1) if diff == 0: val = a1 + a2 elif diff > 0: zr = NX.zeros(diff, a1.dtype) val = NX.concatenate((zr, a1)) + a2 else: zr = NX.zeros(abs(diff), a2.dtype) val = a1 + NX.concatenate((zr, a2)) if truepoly: val = poly1d(val) return val def polysub(a1, a2): """ Difference (subtraction) of two polynomials. Given two polynomials `a1` and `a2`, returns ``a1 - a2``. `a1` and `a2` can be either array_like sequences of the polynomials' coefficients (including coefficients equal to zero), or `poly1d` objects. Parameters ---------- a1, a2 : array_like or poly1d Minuend and subtrahend polynomials, respectively. Returns ------- out : ndarray or poly1d Array or `poly1d` object of the difference polynomial's coefficients. See Also -------- polyval, polydiv, polymul, polyadd Examples -------- .. math:: (2 x^2 + 10 x - 2) - (3 x^2 + 10 x -4) = (-x^2 + 2) >>> np.polysub([2, 10, -2], [3, 10, -4]) array([-1, 0, 2]) """ truepoly = (isinstance(a1, poly1d) or isinstance(a2, poly1d)) a1 = atleast_1d(a1) a2 = atleast_1d(a2) diff = len(a2) - len(a1) if diff == 0: val = a1 - a2 elif diff > 0: zr = NX.zeros(diff, a1.dtype) val = NX.concatenate((zr, a1)) - a2 else: zr = NX.zeros(abs(diff), a2.dtype) val = a1 - NX.concatenate((zr, a2)) if truepoly: val = poly1d(val) return val def polymul(a1, a2): """ Find the product of two polynomials. Finds the polynomial resulting from the multiplication of the two input polynomials. Each input must be either a poly1d object or a 1D sequence of polynomial coefficients, from highest to lowest degree. Parameters ---------- a1, a2 : array_like or poly1d object Input polynomials. Returns ------- out : ndarray or poly1d object The polynomial resulting from the multiplication of the inputs. If either inputs is a poly1d object, then the output is also a poly1d object. Otherwise, it is a 1D array of polynomial coefficients from highest to lowest degree. See Also -------- poly1d : A one-dimensional polynomial class. poly, polyadd, polyder, polydiv, polyfit, polyint, polysub, polyval convolve : Array convolution. Same output as polymul, but has parameter for overlap mode. Examples -------- >>> np.polymul([1, 2, 3], [9, 5, 1]) array([ 9, 23, 38, 17, 3]) Using poly1d objects: >>> p1 = np.poly1d([1, 2, 3]) >>> p2 = np.poly1d([9, 5, 1]) >>> print p1 2 1 x + 2 x + 3 >>> print p2 2 9 x + 5 x + 1 >>> print np.polymul(p1, p2) 4 3 2 9 x + 23 x + 38 x + 17 x + 3 """ truepoly = (isinstance(a1, poly1d) or isinstance(a2, poly1d)) a1, a2 = poly1d(a1), poly1d(a2) val = NX.convolve(a1, a2) if truepoly: val = poly1d(val) return val def polydiv(u, v): """ Returns the quotient and remainder of polynomial division. The input arrays are the coefficients (including any coefficients equal to zero) of the "numerator" (dividend) and "denominator" (divisor) polynomials, respectively. Parameters ---------- u : array_like or poly1d Dividend polynomial's coefficients. v : array_like or poly1d Divisor polynomial's coefficients. Returns ------- q : ndarray Coefficients, including those equal to zero, of the quotient. r : ndarray Coefficients, including those equal to zero, of the remainder. See Also -------- poly, polyadd, polyder, polydiv, polyfit, polyint, polymul, polysub, polyval Notes ----- Both `u` and `v` must be 0-d or 1-d (ndim = 0 or 1), but `u.ndim` need not equal `v.ndim`. In other words, all four possible combinations - ``u.ndim = v.ndim = 0``, ``u.ndim = v.ndim = 1``, ``u.ndim = 1, v.ndim = 0``, and ``u.ndim = 0, v.ndim = 1`` - work. Examples -------- .. math:: \\frac{3x^2 + 5x + 2}{2x + 1} = 1.5x + 1.75, remainder 0.25 >>> x = np.array([3.0, 5.0, 2.0]) >>> y = np.array([2.0, 1.0]) >>> np.polydiv(x, y) (array([ 1.5 , 1.75]), array([ 0.25])) """ truepoly = (isinstance(u, poly1d) or isinstance(u, poly1d)) u = atleast_1d(u) + 0.0 v = atleast_1d(v) + 0.0 # w has the common type w = u[0] + v[0] m = len(u) - 1 n = len(v) - 1 scale = 1. / v[0] q = NX.zeros((max(m - n + 1, 1),), w.dtype) r = u.copy() for k in range(0, m-n+1): d = scale * r[k] q[k] = d r[k:k+n+1] -= d*v while NX.allclose(r[0], 0, rtol=1e-14) and (r.shape[-1] > 1): r = r[1:] if truepoly: return poly1d(q), poly1d(r) return q, r _poly_mat = re.compile(r"[*][*]([0-9]*)") def _raise_power(astr, wrap=70): n = 0 line1 = '' line2 = '' output = ' ' while True: mat = _poly_mat.search(astr, n) if mat is None: break span = mat.span() power = mat.groups()[0] partstr = astr[n:span[0]] n = span[1] toadd2 = partstr + ' '*(len(power)-1) toadd1 = ' '*(len(partstr)-1) + power if ((len(line2) + len(toadd2) > wrap) or (len(line1) + len(toadd1) > wrap)): output += line1 + "\n" + line2 + "\n " line1 = toadd1 line2 = toadd2 else: line2 += partstr + ' '*(len(power)-1) line1 += ' '*(len(partstr)-1) + power output += line1 + "\n" + line2 return output + astr[n:] class poly1d(object): """ A one-dimensional polynomial class. A convenience class, used to encapsulate "natural" operations on polynomials so that said operations may take on their customary form in code (see Examples). Parameters ---------- c_or_r : array_like The polynomial's coefficients, in decreasing powers, or if the value of the second parameter is True, the polynomial's roots (values where the polynomial evaluates to 0). For example, ``poly1d([1, 2, 3])`` returns an object that represents :math:`x^2 + 2x + 3`, whereas ``poly1d([1, 2, 3], True)`` returns one that represents :math:`(x-1)(x-2)(x-3) = x^3 - 6x^2 + 11x -6`. r : bool, optional If True, `c_or_r` specifies the polynomial's roots; the default is False. variable : str, optional Changes the variable used when printing `p` from `x` to `variable` (see Examples). Examples -------- Construct the polynomial :math:`x^2 + 2x + 3`: >>> p = np.poly1d([1, 2, 3]) >>> print np.poly1d(p) 2 1 x + 2 x + 3 Evaluate the polynomial at :math:`x = 0.5`: >>> p(0.5) 4.25 Find the roots: >>> p.r array([-1.+1.41421356j, -1.-1.41421356j]) >>> p(p.r) array([ -4.44089210e-16+0.j, -4.44089210e-16+0.j]) These numbers in the previous line represent (0, 0) to machine precision Show the coefficients: >>> p.c array([1, 2, 3]) Display the order (the leading zero-coefficients are removed): >>> p.order 2 Show the coefficient of the k-th power in the polynomial (which is equivalent to ``p.c[-(i+1)]``): >>> p[1] 2 Polynomials can be added, subtracted, multiplied, and divided (returns quotient and remainder): >>> p * p poly1d([ 1, 4, 10, 12, 9]) >>> (p**3 + 4) / p (poly1d([ 1., 4., 10., 12., 9.]), poly1d([ 4.])) ``asarray(p)`` gives the coefficient array, so polynomials can be used in all functions that accept arrays: >>> p**2 # square of polynomial poly1d([ 1, 4, 10, 12, 9]) >>> np.square(p) # square of individual coefficients array([1, 4, 9]) The variable used in the string representation of `p` can be modified, using the `variable` parameter: >>> p = np.poly1d([1,2,3], variable='z') >>> print p 2 1 z + 2 z + 3 Construct a polynomial from its roots: >>> np.poly1d([1, 2], True) poly1d([ 1, -3, 2]) This is the same polynomial as obtained by: >>> np.poly1d([1, -1]) * np.poly1d([1, -2]) poly1d([ 1, -3, 2]) """ coeffs = None order = None variable = None __hash__ = None def __init__(self, c_or_r, r=0, variable=None): if isinstance(c_or_r, poly1d): for key in c_or_r.__dict__.keys(): self.__dict__[key] = c_or_r.__dict__[key] if variable is not None: self.__dict__['variable'] = variable return if r: c_or_r = poly(c_or_r) c_or_r = atleast_1d(c_or_r) if len(c_or_r.shape) > 1: raise ValueError("Polynomial must be 1d only.") c_or_r = trim_zeros(c_or_r, trim='f') if len(c_or_r) == 0: c_or_r = NX.array([0.]) self.__dict__['coeffs'] = c_or_r self.__dict__['order'] = len(c_or_r) - 1 if variable is None: variable = 'x' self.__dict__['variable'] = variable def __array__(self, t=None): if t: return NX.asarray(self.coeffs, t) else: return NX.asarray(self.coeffs) def __repr__(self): vals = repr(self.coeffs) vals = vals[6:-1] return "poly1d(%s)" % vals def __len__(self): return self.order def __str__(self): thestr = "0" var = self.variable # Remove leading zeros coeffs = self.coeffs[NX.logical_or.accumulate(self.coeffs != 0)] N = len(coeffs)-1 def fmt_float(q): s = '%.4g' % q if s.endswith('.0000'): s = s[:-5] return s for k in range(len(coeffs)): if not iscomplex(coeffs[k]): coefstr = fmt_float(real(coeffs[k])) elif real(coeffs[k]) == 0: coefstr = '%sj' % fmt_float(imag(coeffs[k])) else: coefstr = '(%s + %sj)' % (fmt_float(real(coeffs[k])), fmt_float(imag(coeffs[k]))) power = (N-k) if power == 0: if coefstr != '0': newstr = '%s' % (coefstr,) else: if k == 0: newstr = '0' else: newstr = '' elif power == 1: if coefstr == '0': newstr = '' elif coefstr == 'b': newstr = var else: newstr = '%s %s' % (coefstr, var) else: if coefstr == '0': newstr = '' elif coefstr == 'b': newstr = '%s**%d' % (var, power,) else: newstr = '%s %s**%d' % (coefstr, var, power) if k > 0: if newstr != '': if newstr.startswith('-'): thestr = "%s - %s" % (thestr, newstr[1:]) else: thestr = "%s + %s" % (thestr, newstr) else: thestr = newstr return _raise_power(thestr) def __call__(self, val): return polyval(self.coeffs, val) def __neg__(self): return poly1d(-self.coeffs) def __pos__(self): return self def __mul__(self, other): if isscalar(other): return poly1d(self.coeffs * other) else: other = poly1d(other) return poly1d(polymul(self.coeffs, other.coeffs)) def __rmul__(self, other): if isscalar(other): return poly1d(other * self.coeffs) else: other = poly1d(other) return poly1d(polymul(self.coeffs, other.coeffs)) def __add__(self, other): other = poly1d(other) return poly1d(polyadd(self.coeffs, other.coeffs)) def __radd__(self, other): other = poly1d(other) return poly1d(polyadd(self.coeffs, other.coeffs)) def __pow__(self, val): if not isscalar(val) or int(val) != val or val < 0: raise ValueError("Power to non-negative integers only.") res = [1] for _ in range(val): res = polymul(self.coeffs, res) return poly1d(res) def __sub__(self, other): other = poly1d(other) return poly1d(polysub(self.coeffs, other.coeffs)) def __rsub__(self, other): other = poly1d(other) return poly1d(polysub(other.coeffs, self.coeffs)) def __div__(self, other): if isscalar(other): return poly1d(self.coeffs/other) else: other = poly1d(other) return polydiv(self, other) __truediv__ = __div__ def __rdiv__(self, other): if isscalar(other): return poly1d(other/self.coeffs) else: other = poly1d(other) return polydiv(other, self) __rtruediv__ = __rdiv__ def __eq__(self, other): if self.coeffs.shape != other.coeffs.shape: return False return (self.coeffs == other.coeffs).all() def __ne__(self, other): return not self.__eq__(other) def __setattr__(self, key, val): raise ValueError("Attributes cannot be changed this way.") def __getattr__(self, key): if key in ['r', 'roots']: return roots(self.coeffs) elif key in ['c', 'coef', 'coefficients']: return self.coeffs elif key in ['o']: return self.order else: try: return self.__dict__[key] except KeyError: raise AttributeError( "'%s' has no attribute '%s'" % (self.__class__, key)) def __getitem__(self, val): ind = self.order - val if val > self.order: return 0 if val < 0: return 0 return self.coeffs[ind] def __setitem__(self, key, val): ind = self.order - key if key < 0: raise ValueError("Does not support negative powers.") if key > self.order: zr = NX.zeros(key-self.order, self.coeffs.dtype) self.__dict__['coeffs'] = NX.concatenate((zr, self.coeffs)) self.__dict__['order'] = key ind = 0 self.__dict__['coeffs'][ind] = val return def __iter__(self): return iter(self.coeffs) def integ(self, m=1, k=0): """ Return an antiderivative (indefinite integral) of this polynomial. Refer to `polyint` for full documentation. See Also -------- polyint : equivalent function """ return poly1d(polyint(self.coeffs, m=m, k=k)) def deriv(self, m=1): """ Return a derivative of this polynomial. Refer to `polyder` for full documentation. See Also -------- polyder : equivalent function """ return poly1d(polyder(self.coeffs, m=m)) # Stuff to do on module import warnings.simplefilter('always', RankWarning)
PRIMEDesigner15/PRIMEDesigner15
refs/heads/master
dependencies/Lib/test/unittests/test_codeop.py
118
""" Test cases for codeop.py Nick Mathewson """ import unittest from test.support import run_unittest, is_jython from codeop import compile_command, PyCF_DONT_IMPLY_DEDENT import io if is_jython: import sys def unify_callables(d): for n,v in d.items(): if hasattr(v, '__call__'): d[n] = True return d class CodeopTests(unittest.TestCase): def assertValid(self, str, symbol='single'): '''succeed iff str is a valid piece of code''' if is_jython: code = compile_command(str, "<input>", symbol) self.assertTrue(code) if symbol == "single": d,r = {},{} saved_stdout = sys.stdout sys.stdout = io.StringIO() try: exec(code, d) exec(compile(str,"<input>","single"), r) finally: sys.stdout = saved_stdout elif symbol == 'eval': ctx = {'a': 2} d = { 'value': eval(code,ctx) } r = { 'value': eval(str,ctx) } self.assertEqual(unify_callables(r),unify_callables(d)) else: expected = compile(str, "<input>", symbol, PyCF_DONT_IMPLY_DEDENT) self.assertEqual(compile_command(str, "<input>", symbol), expected) def assertIncomplete(self, str, symbol='single'): '''succeed iff str is the start of a valid piece of code''' self.assertEqual(compile_command(str, symbol=symbol), None) def assertInvalid(self, str, symbol='single', is_syntax=1): '''succeed iff str is the start of an invalid piece of code''' try: compile_command(str,symbol=symbol) self.fail("No exception raised for invalid code") except SyntaxError: self.assertTrue(is_syntax) except OverflowError: self.assertTrue(not is_syntax) def test_valid(self): av = self.assertValid # special case if not is_jython: self.assertEqual(compile_command(""), compile("pass", "<input>", 'single', PyCF_DONT_IMPLY_DEDENT)) self.assertEqual(compile_command("\n"), compile("pass", "<input>", 'single', PyCF_DONT_IMPLY_DEDENT)) else: av("") av("\n") av("a = 1") av("\na = 1") av("a = 1\n") av("a = 1\n\n") av("\n\na = 1\n\n") av("def x():\n pass\n") av("if 1:\n pass\n") av("\n\nif 1: pass\n") av("\n\nif 1: pass\n\n") av("def x():\n\n pass\n") av("def x():\n pass\n \n") av("def x():\n pass\n \n") av("pass\n") av("3**3\n") av("if 9==3:\n pass\nelse:\n pass\n") av("if 1:\n pass\n if 1:\n pass\n else:\n pass\n") av("#a\n#b\na = 3\n") av("#a\n\n \na=3\n") av("a=3\n\n") av("a = 9+ \\\n3") av("3**3","eval") av("(lambda z: \n z**3)","eval") av("9+ \\\n3","eval") av("9+ \\\n3\n","eval") av("\n\na**3","eval") av("\n \na**3","eval") av("#a\n#b\na**3","eval") av("\n\na = 1\n\n") av("\n\nif 1: a=1\n\n") av("if 1:\n pass\n if 1:\n pass\n else:\n pass\n") av("#a\n\n \na=3\n\n") av("\n\na**3","eval") av("\n \na**3","eval") av("#a\n#b\na**3","eval") av("def f():\n try: pass\n finally: [x for x in (1,2)]\n") av("def f():\n pass\n#foo\n") av("@a.b.c\ndef f():\n pass\n") def test_incomplete(self): ai = self.assertIncomplete ai("(a **") ai("(a,b,") ai("(a,b,(") ai("(a,b,(") ai("a = (") ai("a = {") ai("b + {") ai("if 9==3:\n pass\nelse:") ai("if 9==3:\n pass\nelse:\n") ai("if 9==3:\n pass\nelse:\n pass") ai("if 1:") ai("if 1:\n") ai("if 1:\n pass\n if 1:\n pass\n else:") ai("if 1:\n pass\n if 1:\n pass\n else:\n") ai("if 1:\n pass\n if 1:\n pass\n else:\n pass") ai("def x():") ai("def x():\n") ai("def x():\n\n") ai("def x():\n pass") ai("def x():\n pass\n ") ai("def x():\n pass\n ") ai("\n\ndef x():\n pass") ai("a = 9+ \\") ai("a = 'a\\") ai("a = '''xy") ai("","eval") ai("\n","eval") ai("(","eval") ai("(\n\n\n","eval") ai("(9+","eval") ai("9+ \\","eval") ai("lambda z: \\","eval") ai("if True:\n if True:\n if True: \n") ai("@a(") ai("@a(b") ai("@a(b,") ai("@a(b,c") ai("@a(b,c,") ai("from a import (") ai("from a import (b") ai("from a import (b,") ai("from a import (b,c") ai("from a import (b,c,") ai("["); ai("[a"); ai("[a,"); ai("[a,b"); ai("[a,b,"); ai("{"); ai("{a"); ai("{a:"); ai("{a:b"); ai("{a:b,"); ai("{a:b,c"); ai("{a:b,c:"); ai("{a:b,c:d"); ai("{a:b,c:d,"); ai("a(") ai("a(b") ai("a(b,") ai("a(b,c") ai("a(b,c,") ai("a[") ai("a[b") ai("a[b,") ai("a[b:") ai("a[b:c") ai("a[b:c:") ai("a[b:c:d") ai("def a(") ai("def a(b") ai("def a(b,") ai("def a(b,c") ai("def a(b,c,") ai("(") ai("(a") ai("(a,") ai("(a,b") ai("(a,b,") ai("if a:\n pass\nelif b:") ai("if a:\n pass\nelif b:\n pass\nelse:") ai("while a:") ai("while a:\n pass\nelse:") ai("for a in b:") ai("for a in b:\n pass\nelse:") ai("try:") ai("try:\n pass\nexcept:") ai("try:\n pass\nfinally:") ai("try:\n pass\nexcept:\n pass\nfinally:") ai("with a:") ai("with a as b:") ai("class a:") ai("class a(") ai("class a(b") ai("class a(b,") ai("class a():") ai("[x for") ai("[x for x in") ai("[x for x in (") ai("(x for") ai("(x for x in") ai("(x for x in (") def test_invalid(self): ai = self.assertInvalid ai("a b") ai("a @") ai("a b @") ai("a ** @") ai("a = ") ai("a = 9 +") ai("def x():\n\npass\n") ai("\n\n if 1: pass\n\npass") ai("a = 9+ \\\n") ai("a = 'a\\ ") ai("a = 'a\\\n") ai("a = 1","eval") ai("a = (","eval") ai("]","eval") ai("())","eval") ai("[}","eval") ai("9+","eval") ai("lambda z:","eval") ai("a b","eval") ai("return 2.3") ai("if (a == 1 and b = 2): pass") ai("del 1") ai("del ()") ai("del (1,)") ai("del [1]") ai("del '1'") ai("[i for i in range(10)] = (1, 2, 3)") def test_filename(self): self.assertEqual(compile_command("a = 1\n", "abc").co_filename, compile("a = 1\n", "abc", 'single').co_filename) self.assertNotEqual(compile_command("a = 1\n", "abc").co_filename, compile("a = 1\n", "def", 'single').co_filename) def test_main(): run_unittest(CodeopTests) if __name__ == "__main__": test_main()
palerdot/calibre
refs/heads/master
src/calibre/gui2/convert/pdf_input.py
24
# -*- coding: utf-8 -*- __license__ = 'GPL 3' __copyright__ = '2009, John Schember <john@nachtimwald.com>' __docformat__ = 'restructuredtext en' from calibre.gui2.convert.pdf_input_ui import Ui_Form from calibre.gui2.convert import Widget, QDoubleSpinBox class PluginWidget(Widget, Ui_Form): TITLE = _('PDF Input') HELP = _('Options specific to')+' PDF '+_('input') COMMIT_NAME = 'pdf_input' ICON = I('mimetypes/pdf.png') def __init__(self, parent, get_option, get_help, db=None, book_id=None): Widget.__init__(self, parent, ['no_images', 'unwrap_factor']) self.db, self.book_id = db, book_id self.initialize_options(get_option, get_help, db, book_id) def set_value_handler(self, g, val): if val is None and isinstance(g, QDoubleSpinBox): g.setValue(0.0) return True
oorestisime/debsources
refs/heads/master
debsources/plugins/hook_sloccount.py
5
# Copyright (C) 2013-2015 The Debsources developers <info@sources.debian.net>. # See the AUTHORS file at the top-level directory of this distribution and at # https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=AUTHORS;hb=HEAD # # This file is part of Debsources. Debsources is free software: you can # redistribute it and/or modify it under the terms of the GNU Affero General # Public License as published by the Free Software Foundation, either version 3 # of the License, or (at your option) any later version. For more information # see the COPYING file at the top-level directory of this distribution and at # https://anonscm.debian.org/gitweb/?p=qa/debsources.git;a=blob;f=COPYING;hb=HEAD from __future__ import absolute_import import logging import os import re import subprocess import six from debsources import db_storage from debsources.models import SlocCount conf = None SLOCCOUNT_FLAGS = ['--addlangall'] MY_NAME = 'sloccount' MY_EXT = '.' + MY_NAME def slocfile_path(pkgdir): return pkgdir + MY_EXT def grep(args): """boolean wrapper around GREP(1) """ rc = None with open(os.devnull, 'w') as null: rc = subprocess.call(['grep'] + args, stdout=null, stderr=null) return (rc == 0) SLOC_TBL_HEADER = re.compile('^Totals grouped by language') SLOC_TBL_FOOTER = re.compile('^\s*$') SLOC_TBL_LINE = re.compile('^(?P<lang>[^:]+):\s+(?P<locs>\d+)') def parse_sloccount(path): """parse SLOCCOUNT(1) output and return a mapping from languages to locs language names are the same returned by sloccount, normalized to lowercase """ slocs = {} in_table = False with open(path) as sloccount: for line in sloccount: if in_table: m = re.match(SLOC_TBL_FOOTER, line) if m: break m = re.match(SLOC_TBL_LINE, line) if m: slocs[m.group('lang')] = int(m.group('locs')) else: m = re.match(SLOC_TBL_HEADER, line) if m: in_table = True return slocs def add_package(session, pkg, pkgdir, file_table): global conf logging.debug('add-package %s' % pkg) slocfile = slocfile_path(pkgdir) slocfile_tmp = slocfile + '.new' if 'hooks.fs' in conf['backends']: if not os.path.exists(slocfile): # run sloccount only if needed try: cmd = ['sloccount'] + SLOCCOUNT_FLAGS + [pkgdir] with open(slocfile_tmp, 'w') as out: subprocess.check_call(cmd, stdout=out, stderr=subprocess.STDOUT) except subprocess.CalledProcessError: if not grep(['^SLOC total is zero,', slocfile_tmp]): # rationale: sloccount fails when it can't find source code raise finally: os.rename(slocfile_tmp, slocfile) if 'hooks.db' in conf['backends']: slocs = parse_sloccount(slocfile) db_package = db_storage.lookup_package(session, pkg['package'], pkg['version']) if not session.query(SlocCount).filter_by(package_id=db_package.id)\ .first(): # ASSUMPTION: if *a* loc count of this package has already been # added to the db in the past, then *all* of them have, as # additions are part of the same transaction for (lang, locs) in six.iteritems(slocs): sloccount = SlocCount(db_package, lang, locs) session.add(sloccount) def rm_package(session, pkg, pkgdir, file_table): global conf logging.debug('rm-package %s' % pkg) if 'hooks.fs' in conf['backends']: slocfile = slocfile_path(pkgdir) if os.path.exists(slocfile): os.unlink(slocfile) if 'hooks.db' in conf['backends']: db_package = db_storage.lookup_package(session, pkg['package'], pkg['version']) session.query(SlocCount) \ .filter_by(package_id=db_package.id) \ .delete() def init_plugin(debsources): global conf conf = debsources['config'] debsources['subscribe']('add-package', add_package, title='sloccount') debsources['subscribe']('rm-package', rm_package, title='sloccount') debsources['declare_ext'](MY_EXT, MY_NAME)
shipci/boto
refs/heads/develop
boto/mashups/server.py
153
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. """ High-level abstraction of an EC2 server """ import boto import boto.utils from boto.compat import StringIO from boto.mashups.iobject import IObject from boto.pyami.config import Config, BotoConfigPath from boto.mashups.interactive import interactive_shell from boto.sdb.db.model import Model from boto.sdb.db.property import StringProperty import os class ServerSet(list): def __getattr__(self, name): results = [] is_callable = False for server in self: try: val = getattr(server, name) if callable(val): is_callable = True results.append(val) except: results.append(None) if is_callable: self.map_list = results return self.map return results def map(self, *args): results = [] for fn in self.map_list: results.append(fn(*args)) return results class Server(Model): @property def ec2(self): if self._ec2 is None: self._ec2 = boto.connect_ec2() return self._ec2 @classmethod def Inventory(cls): """ Returns a list of Server instances, one for each Server object persisted in the db """ l = ServerSet() rs = cls.find() for server in rs: l.append(server) return l @classmethod def Register(cls, name, instance_id, description=''): s = cls() s.name = name s.instance_id = instance_id s.description = description s.save() return s def __init__(self, id=None, **kw): super(Server, self).__init__(id, **kw) self._reservation = None self._instance = None self._ssh_client = None self._pkey = None self._config = None self._ec2 = None name = StringProperty(unique=True, verbose_name="Name") instance_id = StringProperty(verbose_name="Instance ID") config_uri = StringProperty() ami_id = StringProperty(verbose_name="AMI ID") zone = StringProperty(verbose_name="Availability Zone") security_group = StringProperty(verbose_name="Security Group", default="default") key_name = StringProperty(verbose_name="Key Name") elastic_ip = StringProperty(verbose_name="Elastic IP") instance_type = StringProperty(verbose_name="Instance Type") description = StringProperty(verbose_name="Description") log = StringProperty() def setReadOnly(self, value): raise AttributeError def getInstance(self): if not self._instance: if self.instance_id: try: rs = self.ec2.get_all_reservations([self.instance_id]) except: return None if len(rs) > 0: self._reservation = rs[0] self._instance = self._reservation.instances[0] return self._instance instance = property(getInstance, setReadOnly, None, 'The Instance for the server') def getAMI(self): if self.instance: return self.instance.image_id ami = property(getAMI, setReadOnly, None, 'The AMI for the server') def getStatus(self): if self.instance: self.instance.update() return self.instance.state status = property(getStatus, setReadOnly, None, 'The status of the server') def getHostname(self): if self.instance: return self.instance.public_dns_name hostname = property(getHostname, setReadOnly, None, 'The public DNS name of the server') def getPrivateHostname(self): if self.instance: return self.instance.private_dns_name private_hostname = property(getPrivateHostname, setReadOnly, None, 'The private DNS name of the server') def getLaunchTime(self): if self.instance: return self.instance.launch_time launch_time = property(getLaunchTime, setReadOnly, None, 'The time the Server was started') def getConsoleOutput(self): if self.instance: return self.instance.get_console_output() console_output = property(getConsoleOutput, setReadOnly, None, 'Retrieve the console output for server') def getGroups(self): if self._reservation: return self._reservation.groups else: return None groups = property(getGroups, setReadOnly, None, 'The Security Groups controlling access to this server') def getConfig(self): if not self._config: remote_file = BotoConfigPath local_file = '%s.ini' % self.instance.id self.get_file(remote_file, local_file) self._config = Config(local_file) return self._config def setConfig(self, config): local_file = '%s.ini' % self.instance.id fp = open(local_file) config.write(fp) fp.close() self.put_file(local_file, BotoConfigPath) self._config = config config = property(getConfig, setConfig, None, 'The instance data for this server') def set_config(self, config): """ Set SDB based config """ self._config = config self._config.dump_to_sdb("botoConfigs", self.id) def load_config(self): self._config = Config(do_load=False) self._config.load_from_sdb("botoConfigs", self.id) def stop(self): if self.instance: self.instance.stop() def start(self): self.stop() ec2 = boto.connect_ec2() ami = ec2.get_all_images(image_ids = [str(self.ami_id)])[0] groups = ec2.get_all_security_groups(groupnames=[str(self.security_group)]) if not self._config: self.load_config() if not self._config.has_section("Credentials"): self._config.add_section("Credentials") self._config.set("Credentials", "aws_access_key_id", ec2.aws_access_key_id) self._config.set("Credentials", "aws_secret_access_key", ec2.aws_secret_access_key) if not self._config.has_section("Pyami"): self._config.add_section("Pyami") if self._manager.domain: self._config.set('Pyami', 'server_sdb_domain', self._manager.domain.name) self._config.set("Pyami", 'server_sdb_name', self.name) cfg = StringIO() self._config.write(cfg) cfg = cfg.getvalue() r = ami.run(min_count=1, max_count=1, key_name=self.key_name, security_groups = groups, instance_type = self.instance_type, placement = self.zone, user_data = cfg) i = r.instances[0] self.instance_id = i.id self.put() if self.elastic_ip: ec2.associate_address(self.instance_id, self.elastic_ip) def reboot(self): if self.instance: self.instance.reboot() def get_ssh_client(self, key_file=None, host_key_file='~/.ssh/known_hosts', uname='root'): import paramiko if not self.instance: print('No instance yet!') return if not self._ssh_client: if not key_file: iobject = IObject() key_file = iobject.get_filename('Path to OpenSSH Key file') self._pkey = paramiko.RSAKey.from_private_key_file(key_file) self._ssh_client = paramiko.SSHClient() self._ssh_client.load_system_host_keys() self._ssh_client.load_host_keys(os.path.expanduser(host_key_file)) self._ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self._ssh_client.connect(self.instance.public_dns_name, username=uname, pkey=self._pkey) return self._ssh_client def get_file(self, remotepath, localpath): ssh_client = self.get_ssh_client() sftp_client = ssh_client.open_sftp() sftp_client.get(remotepath, localpath) def put_file(self, localpath, remotepath): ssh_client = self.get_ssh_client() sftp_client = ssh_client.open_sftp() sftp_client.put(localpath, remotepath) def listdir(self, remotepath): ssh_client = self.get_ssh_client() sftp_client = ssh_client.open_sftp() return sftp_client.listdir(remotepath) def shell(self, key_file=None): ssh_client = self.get_ssh_client(key_file) channel = ssh_client.invoke_shell() interactive_shell(channel) def bundle_image(self, prefix, key_file, cert_file, size): print('bundling image...') print('\tcopying cert and pk over to /mnt directory on server') ssh_client = self.get_ssh_client() sftp_client = ssh_client.open_sftp() path, name = os.path.split(key_file) remote_key_file = '/mnt/%s' % name self.put_file(key_file, remote_key_file) path, name = os.path.split(cert_file) remote_cert_file = '/mnt/%s' % name self.put_file(cert_file, remote_cert_file) print('\tdeleting %s' % BotoConfigPath) # delete the metadata.ini file if it exists try: sftp_client.remove(BotoConfigPath) except: pass command = 'sudo ec2-bundle-vol ' command += '-c %s -k %s ' % (remote_cert_file, remote_key_file) command += '-u %s ' % self._reservation.owner_id command += '-p %s ' % prefix command += '-s %d ' % size command += '-d /mnt ' if self.instance.instance_type == 'm1.small' or self.instance_type == 'c1.medium': command += '-r i386' else: command += '-r x86_64' print('\t%s' % command) t = ssh_client.exec_command(command) response = t[1].read() print('\t%s' % response) print('\t%s' % t[2].read()) print('...complete!') def upload_bundle(self, bucket, prefix): print('uploading bundle...') command = 'ec2-upload-bundle ' command += '-m /mnt/%s.manifest.xml ' % prefix command += '-b %s ' % bucket command += '-a %s ' % self.ec2.aws_access_key_id command += '-s %s ' % self.ec2.aws_secret_access_key print('\t%s' % command) ssh_client = self.get_ssh_client() t = ssh_client.exec_command(command) response = t[1].read() print('\t%s' % response) print('\t%s' % t[2].read()) print('...complete!') def create_image(self, bucket=None, prefix=None, key_file=None, cert_file=None, size=None): iobject = IObject() if not bucket: bucket = iobject.get_string('Name of S3 bucket') if not prefix: prefix = iobject.get_string('Prefix for AMI file') if not key_file: key_file = iobject.get_filename('Path to RSA private key file') if not cert_file: cert_file = iobject.get_filename('Path to RSA public cert file') if not size: size = iobject.get_int('Size (in MB) of bundled image') self.bundle_image(prefix, key_file, cert_file, size) self.upload_bundle(bucket, prefix) print('registering image...') self.image_id = self.ec2.register_image('%s/%s.manifest.xml' % (bucket, prefix)) return self.image_id def attach_volume(self, volume, device="/dev/sdp"): """ Attach an EBS volume to this server :param volume: EBS Volume to attach :type volume: boto.ec2.volume.Volume :param device: Device to attach to (default to /dev/sdp) :type device: string """ if hasattr(volume, "id"): volume_id = volume.id else: volume_id = volume return self.ec2.attach_volume(volume_id=volume_id, instance_id=self.instance_id, device=device) def detach_volume(self, volume): """ Detach an EBS volume from this server :param volume: EBS Volume to detach :type volume: boto.ec2.volume.Volume """ if hasattr(volume, "id"): volume_id = volume.id else: volume_id = volume return self.ec2.detach_volume(volume_id=volume_id, instance_id=self.instance_id) def install_package(self, package_name): print('installing %s...' % package_name) command = 'yum -y install %s' % package_name print('\t%s' % command) ssh_client = self.get_ssh_client() t = ssh_client.exec_command(command) response = t[1].read() print('\t%s' % response) print('\t%s' % t[2].read()) print('...complete!')
caioserra/apiAdwords
refs/heads/master
examples/adspygoogle/dfp/v201208/get_orders_by_statement.py
4
#!/usr/bin/python # # Copyright 2012 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This code example gets all orders for a given advertiser. The statement retrieves up to the maximum page size limit of 500. To create orders, run create_orders.py. To determine which companies are advertisers, run get_companies_by_statement.py.""" __author__ = 'api.shamjeff@gmail.com (Jeff Sham)' # Locate the client library. If module was installed via "setup.py" script, then # the following two lines are not needed. import os import sys sys.path.insert(0, os.path.join('..', '..', '..', '..')) # Import appropriate classes from the client library. from adspygoogle import DfpClient # Initialize client object. client = DfpClient(path=os.path.join('..', '..', '..', '..')) # Initialize appropriate service. order_service = client.GetService('OrderService', version='v201208') # Set id of the advertiser (company) to get orders for. advertiser_id = 'INSERT_ADVERTISER_COMPANY_ID_HERE' # Create statement object to get all orders for a given advertiser. values = [{ 'key': 'advertiserId', 'value': { 'xsi_type': 'NumberValue', 'value': advertiser_id } }] filter_statement = {'query': 'WHERE advertiserId = :advertiserId LIMIT 500', 'values': values} # Get orders by statement. response = order_service.GetOrdersByStatement(filter_statement)[0] orders = [] if 'results' in response: orders = response['results'] # Display results. for order in orders: print ('Order with id \'%s\' name \'%s\' was found.' % (order['id'], order['name'])) print print 'Number of results found: %s' % len(orders)
JoaoVasques/aws-devtool
refs/heads/master
eb/macosx/python3/lib/aws/requests/packages/oauthlib/common.py
74
# -*- coding: utf-8 -*- from __future__ import absolute_import """ oauthlib.common ~~~~~~~~~~~~~~ This module provides data structures and utilities common to all implementations of OAuth. """ import random import re import string import time import urllib import urlparse UNICODE_ASCII_CHARACTER_SET = (string.ascii_letters.decode('ascii') + string.digits.decode('ascii')) always_safe = (u'ABCDEFGHIJKLMNOPQRSTUVWXYZ' u'abcdefghijklmnopqrstuvwxyz' u'0123456789' u'_.-') def quote(s, safe=u'/'): encoded = s.encode("utf-8") quoted = urllib.quote(encoded, safe) return quoted.decode("utf-8") def unquote(s): encoded = s.encode("utf-8") unquoted = urllib.unquote(encoded) return unquoted.decode("utf-8") def urlencode(params): utf8_params = encode_params_utf8(params) urlencoded = urllib.urlencode(utf8_params) return urlencoded.decode("utf-8") def encode_params_utf8(params): """Ensures that all parameters in a list of 2-element tuples are encoded to bytestrings using UTF-8 """ encoded = [] for k, v in params: encoded.append(( k.encode('utf-8') if isinstance(k, unicode) else k, v.encode('utf-8') if isinstance(v, unicode) else v)) return encoded def decode_params_utf8(params): """Ensures that all parameters in a list of 2-element tuples are decoded to unicode using UTF-8. """ decoded = [] for k, v in params: decoded.append(( k.decode('utf-8') if isinstance(k, str) else k, v.decode('utf-8') if isinstance(v, str) else v)) return decoded urlencoded = set(always_safe) | set(u'=&;%+~') def urldecode(query): """Decode a query string in x-www-form-urlencoded format into a sequence of two-element tuples. Unlike urlparse.parse_qsl(..., strict_parsing=True) urldecode will enforce correct formatting of the query string by validation. If validation fails a ValueError will be raised. urllib.parse_qsl will only raise errors if any of name-value pairs omits the equals sign. """ # Check if query contains invalid characters if query and not set(query) <= urlencoded: raise ValueError('Invalid characters in query string.') # Check for correctly hex encoded values using a regular expression # All encoded values begin with % followed by two hex characters # correct = %00, %A0, %0A, %FF # invalid = %G0, %5H, %PO invalid_hex = u'%[^0-9A-Fa-f]|%[0-9A-Fa-f][^0-9A-Fa-f]' if len(re.findall(invalid_hex, query)): raise ValueError('Invalid hex encoding in query string.') query = query.decode('utf-8') if isinstance(query, str) else query # We want to allow queries such as "c2" whereas urlparse.parse_qsl # with the strict_parsing flag will not. params = urlparse.parse_qsl(query, keep_blank_values=True) # unicode all the things return decode_params_utf8(params) def extract_params(raw): """Extract parameters and return them as a list of 2-tuples. Will successfully extract parameters from urlencoded query strings, dicts, or lists of 2-tuples. Empty strings/dicts/lists will return an empty list of parameters. Any other input will result in a return value of None. """ if isinstance(raw, basestring): try: params = urldecode(raw) except ValueError: params = None elif hasattr(raw, '__iter__'): try: dict(raw) except ValueError: params = None except TypeError: params = None else: params = list(raw.items() if isinstance(raw, dict) else raw) params = decode_params_utf8(params) else: params = None return params def generate_nonce(): """Generate pseudorandom nonce that is unlikely to repeat. Per `section 3.3`_ of the OAuth 1 RFC 5849 spec. Per `section 3.2.1`_ of the MAC Access Authentication spec. A random 64-bit number is appended to the epoch timestamp for both randomness and to decrease the likelihood of collisions. .. _`section 3.2.1`: http://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-3.2.1 .. _`section 3.3`: http://tools.ietf.org/html/rfc5849#section-3.3 """ return unicode(unicode(random.getrandbits(64)) + generate_timestamp()) def generate_timestamp(): """Get seconds since epoch (UTC). Per `section 3.3`_ of the OAuth 1 RFC 5849 spec. Per `section 3.2.1`_ of the MAC Access Authentication spec. .. _`section 3.2.1`: http://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-3.2.1 .. _`section 3.3`: http://tools.ietf.org/html/rfc5849#section-3.3 """ return unicode(int(time.time())) def generate_token(length=30, chars=UNICODE_ASCII_CHARACTER_SET): """Generates a non-guessable OAuth token OAuth (1 and 2) does not specify the format of tokens except that they should be strings of random characters. Tokens should not be guessable and entropy when generating the random characters is important. Which is why SystemRandom is used instead of the default random.choice method. """ rand = random.SystemRandom() return u''.join(rand.choice(chars) for x in range(length)) def add_params_to_qs(query, params): """Extend a query with a list of two-tuples.""" queryparams = urlparse.parse_qsl(query, keep_blank_values=True) queryparams.extend(params) return urlencode(queryparams) def add_params_to_uri(uri, params): """Add a list of two-tuples to the uri query components.""" sch, net, path, par, query, fra = urlparse.urlparse(uri) query = add_params_to_qs(query, params) return urlparse.urlunparse((sch, net, path, par, query, fra)) def safe_string_equals(a, b): """ Near-constant time string comparison. Used in order to avoid timing attacks on sensitive information such as secret keys during request verification (`rootLabs`_). .. _`rootLabs`: http://rdist.root.org/2010/01/07/timing-independent-array-comparison/ """ if len(a) != len(b): return False result = 0 for x, y in zip(a, b): result |= ord(x) ^ ord(y) return result == 0 class Request(object): """A malleable representation of a signable HTTP request. Body argument may contain any data, but parameters will only be decoded if they are one of: * urlencoded query string * dict * list of 2-tuples Anything else will be treated as raw body data to be passed through unmolested. """ def __init__(self, uri, http_method=u'GET', body=None, headers=None): self.uri = uri self.http_method = http_method self.headers = headers or {} self.body = body self.decoded_body = extract_params(body) self.oauth_params = [] @property def uri_query(self): return urlparse.urlparse(self.uri).query @property def uri_query_params(self): return urlparse.parse_qsl(self.uri_query, keep_blank_values=True, strict_parsing=True)
akretion/l10n-belgium
refs/heads/8.0
account_companyweb/model/__init__.py
6
# -*- coding: utf-8 -*- # ############################################################################## # # Authors: Adrien Peiffer # Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from . import res_config from . import res_partner
heke123/chromium-crosswalk
refs/heads/master
chrome/common/extensions/docs/server2/permissions_data_source.py
37
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from itertools import ifilter from operator import itemgetter from data_source import DataSource from extensions_paths import PRIVATE_TEMPLATES from future import Future from platform_util import GetPlatforms def _ListifyPermissions(permissions): '''Filter out any permissions that do not have a description or with a name that ends with Private then sort permissions features by name into a list. ''' def filter_permissions(perm): return 'description' in perm and not perm['name'].endswith('Private') return sorted( ifilter(filter_permissions, permissions.itervalues()), key=itemgetter('name')) def _AddDependencyDescriptions(permissions, api_features): '''Use |api_features| to determine the dependencies APIs have on permissions. Add descriptions to |permissions| based on those dependencies. ''' for name, permission in permissions.iteritems(): # Don't overwrite the description created by expanding a partial template. if 'partial' in permission: continue has_deps = False if name in api_features: for dependency in api_features[name].get('dependencies', ()): if dependency.startswith('permission:'): has_deps = True if has_deps: permission['partial'] = 'permissions/generic_description.html' class PermissionsDataSource(DataSource): '''Load and format permissions features to be used by templates. ''' def __init__(self, server_instance, request): self._platform_bundle = server_instance.platform_bundle self._object_store = server_instance.object_store_creator.Create( PermissionsDataSource) self._template_cache = server_instance.compiled_fs_factory.ForTemplates( server_instance.host_file_system_provider.GetMaster()) def _CreatePermissionsDataForPlatform(self, platform): features_bundle = self._platform_bundle.GetFeaturesBundle(platform) api_features_future = features_bundle.GetAPIFeatures() permission_features_future = features_bundle.GetPermissionFeatures() def resolve(): api_features = api_features_future.Get() permission_features = permission_features_future.Get() _AddDependencyDescriptions(permission_features, api_features) # Turn partial templates into descriptions, ensure anchors are set. for permission in permission_features.values(): if not 'anchor' in permission: permission['anchor'] = permission['name'] if 'partial' in permission: permission['description'] = self._template_cache.GetFromFile( PRIVATE_TEMPLATES + permission['partial']).Get() del permission['partial'] return _ListifyPermissions(permission_features) return Future(callback=resolve) def _CreatePermissionsData(self): permissions_data_futures = dict( (platform, self._CreatePermissionsDataForPlatform(platform)) for platform in GetPlatforms()) def resolve(): return dict(('declare_' + platform, future.Get()) for platform, future in permissions_data_futures.iteritems()) return Future(callback=resolve) def _GetCachedPermissionsData(self): data = self._object_store.Get('permissions_data').Get() if data is None: data = self._CreatePermissionsData().Get() self._object_store.Set('permissions_data', data) return data def get(self, key): return self._GetCachedPermissionsData().get(key) def Refresh(self): return self._CreatePermissionsData()
liboyin/algo-prac
refs/heads/master
mathematics/roman_nums.py
1
from bisect import bisect_left N = 13 # up to 3999 DEC = [1, 4, 5, 9, 10, 40, 50, 90, 100, 400, 500, 900, 1000] ROM = ['I', 'IV', 'V', 'IX', 'X', 'XL', 'L', 'XC', 'C', 'CD', 'D', 'CM', 'M'] def dec_to_roman(x): assert 0 <= x <= 3999 r = [] while x > 0: i = bisect_left(DEC, x) if i == N or x < DEC[i]: i -= 1 r.append(ROM[i]) x -= DEC[i] return ''.join(r) def roman_to_dec(xs): d = dict(zip(ROM, DEC)) r = 0 while xs: if xs[:2] in d: r += d[xs[:2]] xs = xs[2:] else: r += d[xs[0]] xs = xs[1:] return r if __name__ == '__main__': for d, r in [(0, ''), (1, 'I'), (2, 'II'), (3, 'III'), (4, 'IV'), (5, 'V'), (6, 'VI'), (7, 'VII'), (8, 'VIII'), (9, 'IX'), (10, 'X'), (11, 'XI'), (12, 'XII'), (13, 'XIII'), (14, 'XIV'), (15, 'XV'), (16, 'XVI'), (17, 'XVII'), (18, 'XVIII'), (19, 'XIX'), (20, 'XX'), (21, 'XXI'), (22, 'XXII'), (23, 'XXIII'), (24, 'XXIV'), (25, 'XXV'), (26, 'XXVI'), (27, 'XXVII'), (28, 'XXVIII'), (29, 'XXIX'), (30, 'XXX'), (31, 'XXXI'), (32, 'XXXII'), (33, 'XXXIII'), (34, 'XXXIV'), (35, 'XXXV'), (36, 'XXXVI'), (37, 'XXXVII'), (38, 'XXXVIII'), (39, 'XXXIX'), (40, 'XL'), (41, 'XLI'), (42, 'XLII'), (43, 'XLIII'), (44, 'XLIV'), (45, 'XLV'), (46, 'XLVI'), (47, 'XLVII'), (48, 'XLVIII'), (49, 'XLIX'), (50, 'L'), (51, 'LI'), (52, 'LII'), (53, 'LIII'), (54, 'LIV'), (55, 'LV'), (56, 'LVI'), (57, 'LVII'), (58, 'LVIII'), (59, 'LIX'), (60, 'LX'), (61, 'LXI'), (62, 'LXII'), (63, 'LXIII'), (64, 'LXIV'), (65, 'LXV'), (66, 'LXVI'), (67, 'LXVII'), (68, 'LXVIII'), (69, 'LXIX'), (70, 'LXX'), (71, 'LXXI'), (72, 'LXXII'), (73, 'LXXIII'), (74, 'LXXIV'), (75, 'LXXV'), (76, 'LXXVI'), (77, 'LXXVII'), (78, 'LXXVIII'), (79, 'LXXIX'), (80, 'LXXX'), (81, 'LXXXI'), (82, 'LXXXII'), (83, 'LXXXIII'), (84, 'LXXXIV'), (85, 'LXXXV'), (86, 'LXXXVI'), (87, 'LXXXVII'), (88, 'LXXXVIII'), (89, 'LXXXIX'), (90, 'XC'), (91, 'XCI'), (92, 'XCII'), (93, 'XCIII'), (94, 'XCIV'), (95, 'XCV'), (96, 'XCVI'), (97, 'XCVII'), (98, 'XCVIII'), (99, 'XCIX'), (100, 'C'), (200, 'CC'), (300, 'CCC'), (400, 'CD'), (500, 'D'), (600, 'DC'), (700, 'DCC'), (800, 'DCCC'), (900, 'CM'), (1000, 'M'), (1100, 'MC'), (1200, 'MCC'), (1300, 'MCCC'), (1400, 'MCD'), (1500, 'MD'), (1600, 'MDC'), (1700, 'MDCC'), (1800, 'MDCCC'), (1900, 'MCM'), (1990, 'MCMXC'), (1991, 'MCMXCI'), (1992, 'MCMXCII'), (1993, 'MCMXCIII'), (1994, 'MCMXCIV'), (1995, 'MCMXCV'), (1996, 'MCMXCVI'), (1997, 'MCMXCVII'), (1998, 'MCMXCVIII'), (1999, 'MCMXCIX'), (2000, 'MM'), (2001, 'MMI'), (2002, 'MMII'), (2003, 'MMIII'), (2004, 'MMIV'), (2005, 'MMV'), (2006, 'MMVI'), (2007, 'MMVII'), (2008, 'MMVIII'), (2009, 'MMIX'), (2010, 'MMX'), (2011, 'MMXI'), (2012, 'MMXII'), (2013, 'MMXIII'), (2014, 'MMXIV'), (2015, 'MMXV'), (2016, 'MMXVI'), (2017, 'MMXVII'), (2018, 'MMXVIII'), (2019, 'MMXIX'), (2020, 'MMXX'), (3549, 'MMMDXLIX')]: assert dec_to_roman(d) == r assert roman_to_dec(r) == d
mjoe/spksrc
refs/heads/develop
spk/subliminal/src/app/setup.py
42
#!/usr/local/subliminal/env/bin/python from application import db, direct if __name__ == '__main__': db.setup() subliminal = direct.Subliminal() subliminal.setup()
liosha2007/temporary-groupdocs-python-sdk
refs/heads/master
groupdocs/models/UserInfoResult.py
2
#!/usr/bin/env python """ Copyright 2012 GroupDocs. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ class UserInfoResult: """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.""" def __init__(self): self.swaggerTypes = { 'user': 'UserInfo' } self.user = None # UserInfo
Hironsan/Brain_Hacker
refs/heads/master
tests/models/__init__.py
1349
# -*- coding: utf-8 -*-
eon01/vagrant_nodejs
refs/heads/master
salt/formulas/vim/files/pyflakes/ftplugin/python/pyflakes/pyflakes/test/test_undefined_names.py
40
from _ast import PyCF_ONLY_AST from twisted.trial.unittest import TestCase from pyflakes import messages as m, checker from pyflakes.test import harness class Test(harness.Test): def test_undefined(self): self.flakes('bar', m.UndefinedName) def test_definedInListComp(self): self.flakes('[a for a in range(10) if a]') def test_functionsNeedGlobalScope(self): self.flakes(''' class a: def b(): fu fu = 1 ''') def test_builtins(self): self.flakes('range(10)') def test_magicGlobalsFile(self): """ Use of the C{__file__} magic global should not emit an undefined name warning. """ self.flakes('__file__') def test_magicGlobalsBuiltins(self): """ Use of the C{__builtins__} magic global should not emit an undefined name warning. """ self.flakes('__builtins__') def test_magicGlobalsName(self): """ Use of the C{__name__} magic global should not emit an undefined name warning. """ self.flakes('__name__') def test_magicGlobalsPath(self): """ Use of the C{__path__} magic global should not emit an undefined name warning, if you refer to it from a file called __init__.py. """ self.flakes('__path__', m.UndefinedName) self.flakes('__path__', filename='package/__init__.py') def test_globalImportStar(self): '''Can't find undefined names with import *''' self.flakes('from fu import *; bar', m.ImportStarUsed) def test_localImportStar(self): '''A local import * still allows undefined names to be found in upper scopes''' self.flakes(''' def a(): from fu import * bar ''', m.ImportStarUsed, m.UndefinedName) def test_unpackedParameter(self): '''Unpacked function parameters create bindings''' self.flakes(''' def a((bar, baz)): bar; baz ''') def test_definedByGlobal(self): '''"global" can make an otherwise undefined name in another function defined''' self.flakes(''' def a(): global fu; fu = 1 def b(): fu ''') test_definedByGlobal.todo = '' def test_globalInGlobalScope(self): """ A global statement in the global scope is ignored. """ self.flakes(''' global x def foo(): print x ''', m.UndefinedName) def test_del(self): '''del deletes bindings''' self.flakes('a = 1; del a; a', m.UndefinedName) def test_delGlobal(self): '''del a global binding from a function''' self.flakes(''' a = 1 def f(): global a del a a ''') def test_delUndefined(self): '''del an undefined name''' self.flakes('del a', m.UndefinedName) def test_globalFromNestedScope(self): '''global names are available from nested scopes''' self.flakes(''' a = 1 def b(): def c(): a ''') def test_laterRedefinedGlobalFromNestedScope(self): """ Test that referencing a local name that shadows a global, before it is defined, generates a warning. """ self.flakes(''' a = 1 def fun(): a a = 2 return a ''', m.UndefinedLocal) def test_laterRedefinedGlobalFromNestedScope2(self): """ Test that referencing a local name in a nested scope that shadows a global declared in an enclosing scope, before it is defined, generates a warning. """ self.flakes(''' a = 1 def fun(): global a def fun2(): a a = 2 return a ''', m.UndefinedLocal) def test_intermediateClassScopeIgnored(self): """ If a name defined in an enclosing scope is shadowed by a local variable and the name is used locally before it is bound, an unbound local warning is emitted, even if there is a class scope between the enclosing scope and the local scope. """ self.flakes(''' def f(): x = 1 class g: def h(self): a = x x = None print x, a print x ''', m.UndefinedLocal) def test_doubleNestingReportsClosestName(self): """ Test that referencing a local name in a nested scope that shadows a variable declared in two different outer scopes before it is defined in the innermost scope generates an UnboundLocal warning which refers to the nearest shadowed name. """ exc = self.flakes(''' def a(): x = 1 def b(): x = 2 # line 5 def c(): x x = 3 return x return x return x ''', m.UndefinedLocal).messages[0] self.assertEqual(exc.message_args, ('x', 5)) def test_laterRedefinedGlobalFromNestedScope3(self): """ Test that referencing a local name in a nested scope that shadows a global, before it is defined, generates a warning. """ self.flakes(''' def fun(): a = 1 def fun2(): a a = 1 return a return a ''', m.UndefinedLocal) def test_nestedClass(self): '''nested classes can access enclosing scope''' self.flakes(''' def f(foo): class C: bar = foo def f(self): return foo return C() f(123).f() ''') def test_badNestedClass(self): '''free variables in nested classes must bind at class creation''' self.flakes(''' def f(): class C: bar = foo foo = 456 return foo f() ''', m.UndefinedName) def test_definedAsStarArgs(self): '''star and double-star arg names are defined''' self.flakes(''' def f(a, *b, **c): print a, b, c ''') def test_definedInGenExp(self): """ Using the loop variable of a generator expression results in no warnings. """ self.flakes('(a for a in xrange(10) if a)') class NameTests(TestCase): """ Tests for some extra cases of name handling. """ def test_impossibleContext(self): """ A Name node with an unrecognized context results in a RuntimeError being raised. """ tree = compile("x = 10", "<test>", "exec", PyCF_ONLY_AST) # Make it into something unrecognizable. tree.body[0].targets[0].ctx = object() self.assertRaises(RuntimeError, checker.Checker, tree)
pastebt/you-get
refs/heads/web
tests/test.py
20
#!/usr/bin/env python import unittest from you_get import * from you_get.extractors import * from you_get.common import * class YouGetTests(unittest.TestCase): def test_freesound(self): freesound.download("http://www.freesound.org/people/Corsica_S/sounds/184419/", info_only=True) def test_magisto(self): magisto.download("http://www.magisto.com/album/video/f3x9AAQORAkfDnIFDA", info_only=True) def test_mixcloud(self): mixcloud.download("http://www.mixcloud.com/beatbopz/beat-bopz-disco-mix/", info_only=True) mixcloud.download("http://www.mixcloud.com/DJVadim/north-america-are-you-ready/", info_only=True) def test_vimeo(self): vimeo.download("http://vimeo.com/56810854", info_only=True) def test_youtube(self): youtube.download("http://www.youtube.com/watch?v=pzKerr0JIPA", info_only=True) youtube.download("http://youtu.be/pzKerr0JIPA", info_only=True) youtube.download("http://www.youtube.com/attribution_link?u=/watch?v%3DldAKIzq7bvs%26feature%3Dshare", info_only=True)
passalis/sef
refs/heads/master
setup.py
1
from setuptools import setup from sef_dr.version import __version__ setup( name='PySEF', version=__version__, author='N. Passalis', author_email='passalis@csd.auth.gr', packages=['sef_dr',], url='https://github.com/passalis/sef', license='LICENSE.txt', description='Package that implements the Similarity Embedding Framework on top of the PyTorch library.', setup_requires=[ "scikit-learn >= 0.19.1", "numpy >= 1.13.3", "scipy >= 1.0.0", "torchvision >= 0.2.0" ], install_requires=[ "scikit-learn >= 0.19.1", "numpy >= 1.13.3", "scipy >= 1.0.0", "torchvision >= 0.2.0" ], )
ksmit799/Toontown-Source
refs/heads/master
toontown/estate/DistributedAnimatedStatuary.py
6
from pandac.PandaModules import NodePath from direct.directnotify import DirectNotifyGlobal from toontown.toonbase import ToontownGlobals from toontown.estate import DistributedStatuary from toontown.estate import GardenGlobals from direct.actor import Actor class DistributedAnimatedStatuary(DistributedStatuary.DistributedStatuary): notify = DirectNotifyGlobal.directNotify.newCategory('DistributedAnimatedStatuary') def __init__(self, cr): self.notify.debug('constructing DistributedAnimatedStatuary') DistributedStatuary.DistributedStatuary.__init__(self, cr) def loadModel(self): self.rotateNode = self.plantPath.attachNewNode('rotate') self.model = Actor.Actor() animPath = self.modelPath + self.anims[1] self.model.loadModel(self.modelPath + self.anims[0]) self.model.loadAnims(dict([[self.anims[1], animPath]])) colNode = self.model.find('**/+CollisionNode') if self.typeIndex == 234: colNode.setScale(0.5) if not colNode.isEmpty(): score, multiplier = ToontownGlobals.PinballScoring[ToontownGlobals.PinballStatuary] if self.pinballScore: score = self.pinballScore[0] multiplier = self.pinballScore[1] scoreNodePath = NodePath('statuary-%d-%d' % (score, multiplier)) colNode.setName('statuaryCol') scoreNodePath.reparentTo(colNode.getParent()) colNode.reparentTo(scoreNodePath) self.model.setScale(self.worldScale) self.model.reparentTo(self.rotateNode) self.model.loop(self.anims[1]) def setTypeIndex(self, typeIndex): DistributedStatuary.DistributedStatuary.setTypeIndex(self, typeIndex) self.anims = GardenGlobals.PlantAttributes[typeIndex]['anims'] def setupShadow(self): if self.typeIndex == 234: pass else: DistributedStatuary.DistributedStatuary.setupShadow()
anilmuthineni/tensorflow
refs/heads/master
tensorflow/python/training/training_ops_test.py
53
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for tensorflow.learning.training_ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import itertools import numpy as np from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework.test_util import TensorFlowTestCase from tensorflow.python.ops import variables from tensorflow.python.platform import googletest from tensorflow.python.training import training_ops class TrainingOpsTest(TensorFlowTestCase): def _toType(self, dtype): if dtype == np.float16: return dtypes.float16 elif dtype == np.float32: return dtypes.float32 elif dtype == np.float64: return dtypes.float64 elif dtype == np.int32: return dtypes.int32 elif dtype == np.int64: return dtypes.int64 else: assert False, (dtype) def _testTypes(self, x, alpha, delta, use_gpu=None): self.setUp() with self.test_session(use_gpu=use_gpu): var = variables.Variable(x) variables.global_variables_initializer().run() self.assertAllCloseAccordingToType(x, var.eval()) apply_sgd = training_ops.apply_gradient_descent(var, alpha, delta) out = apply_sgd.eval() self.assertShapeEqual(out, apply_sgd) self.assertAllCloseAccordingToType(x - alpha * delta, out) def testApplyGradientDescent(self): for (dtype, use_gpu) in itertools.product( [np.float16, np.float32, np.float64], [False, True]): x = np.arange(100).astype(dtype) alpha = np.array(2.0).astype(dtype) delta = np.arange(100).astype(dtype) self._testTypes(x, alpha, delta, use_gpu) def _testTypesForAdagrad(self, x, y, lr, grad, use_gpu=None): self.setUp() with self.test_session(use_gpu=use_gpu): var = variables.Variable(x) accum = variables.Variable(y) variables.global_variables_initializer().run() self.assertAllCloseAccordingToType(x, var.eval()) apply_adagrad = training_ops.apply_adagrad(var, accum, lr, grad) out = apply_adagrad.eval() self.assertShapeEqual(out, apply_adagrad) self.assertAllCloseAccordingToType(x - lr * grad * (y + grad * grad)** (-0.5), out) self.assertAllCloseAccordingToType(y + grad * grad, accum.eval()) def _testTypesForFtrl(self, x, y, z, lr, grad, use_gpu=None, l1=0.0, l2=0.0, lr_power=-0.5): self.setUp() with self.test_session(use_gpu=use_gpu): var = variables.Variable(x) accum = variables.Variable(y) linear = variables.Variable(z) variables.global_variables_initializer().run() self.assertAllCloseAccordingToType(x, var.eval()) apply_ftrl = training_ops.apply_ftrl(var, accum, linear, grad, lr, l1, l2, lr_power) out = apply_ftrl.eval() self.assertShapeEqual(out, apply_ftrl) accum_update = y + grad * grad linear_update = z + grad - (accum_update**(-lr_power) - y** (-lr_power)) / lr * x quadratic = 1.0 / (accum_update**(lr_power) * lr) + 2 * l2 expected_out = np.array([( np.sign(linear_update[i]) * l1 - linear_update[i]) / (quadratic[i]) if np.abs(linear_update[i]) > l1 else 0.0 for i in range(linear_update.size)]) self.assertAllCloseAccordingToType(accum_update, accum.eval()) if x.dtype == np.float16: # The calculations here really are not very precise in float16. self.assertAllClose(linear_update, linear.eval(), rtol=2e-2, atol=2e-2) self.assertAllClose(expected_out, out, rtol=2e-2, atol=2e-2) else: self.assertAllClose(linear_update, linear.eval()) self.assertAllClose(expected_out, out) def testApplyAdagrad(self): for (dtype, use_gpu) in itertools.product( [np.float16, np.float32, np.float64], [False, True]): x = np.arange(100).astype(dtype) y = np.arange(1, 101).astype(dtype) lr = np.array(2.0).astype(dtype) grad = np.arange(100).astype(dtype) self._testTypesForAdagrad(x, y, lr, grad, use_gpu) def testApplyFtrl(self): for dtype in [np.float16, np.float32, np.float64]: x = np.arange(100).astype(dtype) y = np.arange(1, 101).astype(dtype) z = np.arange(102, 202).astype(dtype) lr = np.array(2.0).astype(dtype) l1 = np.array(3.0).astype(dtype) l2 = np.array(4.0).astype(dtype) grad = np.arange(100).astype(dtype) self._testTypesForFtrl(x, y, z, lr, grad, use_gpu=False, l1=l1, l2=l2) def _testTypesForSparseAdagrad(self, x, y, lr, grad, indices): self.setUp() with self.test_session(use_gpu=False): var = variables.Variable(x) accum = variables.Variable(y) variables.global_variables_initializer().run() self.assertAllCloseAccordingToType(x, var.eval()) sparse_apply_adagrad = training_ops.sparse_apply_adagrad( var, accum, lr, grad, constant_op.constant(indices, self._toType(indices.dtype))) out = sparse_apply_adagrad.eval() self.assertShapeEqual(out, sparse_apply_adagrad) for (i, index) in enumerate(indices): self.assertAllCloseAccordingToType( x[index] - lr * grad[i] * (y[index] + grad[i] * grad[i])**(-0.5), var.eval()[index]) self.assertAllCloseAccordingToType(y[index] + grad[i] * grad[i], accum.eval()[index]) def _testTypesForSparseFtrl(self, x, y, z, lr, grad, indices, l1=0.0, l2=0.0, lr_power=-0.5): self.setUp() with self.test_session(use_gpu=False): var = variables.Variable(x) accum = variables.Variable(y) linear = variables.Variable(z) variables.global_variables_initializer().run() self.assertAllCloseAccordingToType(x, var.eval()) sparse_apply_ftrl = training_ops.sparse_apply_ftrl( var, accum, linear, grad, constant_op.constant(indices, self._toType(indices.dtype)), lr, l1, l2, lr_power=lr_power) out = sparse_apply_ftrl.eval() self.assertShapeEqual(out, sparse_apply_ftrl) for (i, index) in enumerate(indices): self.assertAllCloseAccordingToType(x[index] - lr * grad[i] * (y[index] + grad[i] * grad[i])** (lr_power), var.eval()[index]) self.assertAllCloseAccordingToType(y[index] + grad[i] * grad[i], accum.eval()[index]) def testSparseApplyAdagrad(self): for (dtype, index_type) in itertools.product( [np.float16, np.float32, np.float64], [np.int32, np.int64]): x_val = [np.arange(10), np.arange(10, 20), np.arange(20, 30)] y_val = [np.arange(1, 11), np.arange(11, 21), np.arange(21, 31)] x = np.array(x_val).astype(dtype) y = np.array(y_val).astype(dtype) lr = np.array(2.0).astype(dtype) grad_val = [np.arange(10), np.arange(10)] grad = np.array(grad_val).astype(dtype) indices = np.array([0, 2]).astype(index_type) self._testTypesForSparseAdagrad(x, y, lr, grad, indices) def testSparseApplyAdagradDim1(self): for (dtype, index_type) in itertools.product( [np.float16, np.float32, np.float64], [np.int32, np.int64]): x_val = [[1.0], [2.0], [3.0]] y_val = [[4.0], [5.0], [6.0]] x = np.array(x_val).astype(dtype) y = np.array(y_val).astype(dtype) lr = np.array(2.0).astype(dtype) grad_val = [[1.5], [2.5]] grad = np.array(grad_val).astype(dtype) indices = np.array([0, 2]).astype(index_type) self._testTypesForSparseAdagrad(x, y, lr, grad, indices) def testSparseApplyFtrlDim1(self): for (dtype, index_type) in itertools.product( [np.float16, np.float32, np.float64], [np.int32, np.int64]): x_val = [[0.0], [0.0], [0.0]] y_val = [[4.0], [5.0], [6.0]] z_val = [[0.0], [0.0], [0.0]] x = np.array(x_val).astype(dtype) y = np.array(y_val).astype(dtype) z = np.array(z_val).astype(dtype) lr = np.array(2.0).astype(dtype) grad_val = [[1.5], [2.5]] grad = np.array(grad_val).astype(dtype) indices = np.array([0, 2]).astype(index_type) self._testTypesForSparseFtrl(x, y, z, lr, grad, indices) def testApplyAdam(self): for dtype, use_gpu in itertools.product( [np.float16, np.float32, np.float64], [False, True]): var = np.arange(100).astype(dtype) m = np.arange(1, 101).astype(dtype) v = np.arange(101, 201).astype(dtype) grad = np.arange(100).astype(dtype) self._testTypesForAdam(var, m, v, grad, use_gpu) def _testTypesForAdam(self, var, m, v, grad, use_gpu): self.setUp() with self.test_session(use_gpu=use_gpu): var_t = variables.Variable(var) m_t = variables.Variable(m) v_t = variables.Variable(v) t = 1 beta1 = np.array(0.9, dtype=var.dtype) beta2 = np.array(0.999, dtype=var.dtype) beta1_power = beta1**t beta2_power = beta2**t lr = np.array(0.001, dtype=var.dtype) epsilon = np.array(1e-8, dtype=var.dtype) beta1_t = constant_op.constant(beta1, self._toType(var.dtype), []) beta2_t = constant_op.constant(beta2, self._toType(var.dtype), []) beta1_power_t = variables.Variable(beta1_power) beta2_power_t = variables.Variable(beta2_power) lr_t = constant_op.constant(lr, self._toType(var.dtype), []) epsilon_t = constant_op.constant(epsilon, self._toType(var.dtype), []) variables.global_variables_initializer().run() self.assertAllCloseAccordingToType(var, var_t.eval()) new_var, _, _ = self._adamUpdateNumpy(var, grad, t, m, v, lr, beta1, beta2, epsilon) apply_adam = training_ops.apply_adam(var_t, m_t, v_t, beta1_power_t, beta2_power_t, lr_t, beta1_t, beta2_t, epsilon_t, grad) out = apply_adam.eval() self.assertShapeEqual(out, apply_adam) self.assertAllCloseAccordingToType(new_var, out) def _adamUpdateNumpy(self, param, g_t, t, m, v, alpha, beta1, beta2, epsilon): alpha_t = alpha * np.sqrt(1 - beta2**t) / (1 - beta1**t) m_t = beta1 * m + (1 - beta1) * g_t v_t = beta2 * v + (1 - beta2) * g_t * g_t param_t = param - alpha_t * m_t / (np.sqrt(v_t) + epsilon) return param_t, m_t, v_t if __name__ == '__main__': googletest.main()
gsehub/edx-platform
refs/heads/gsehub-release
openedx/core/djangoapps/password_policy/settings/common.py
24
""" Default settings for the password_policy app. """ def plugin_settings(settings): """ Adds default settings for the password_policy app. """ # Settings for managing the rollout of password policy compliance enforcement. settings.PASSWORD_POLICY_COMPLIANCE_ROLLOUT_CONFIG = { # Global switch to enable/disable password policy compliance enforcement on login. 'ENFORCE_COMPLIANCE_ON_LOGIN': False, # The date that staff users (users with is_staff permissions) will be required to be compliant with # current password policy requirements. After this date, non-compliant users will be forced to reset their # password before logging in. # # This should be a timezone-aware date string parsable by dateutils.parser.parse # Ex: 2018-04-19 00:00:00+00:00 'STAFF_USER_COMPLIANCE_DEADLINE': None, # The date that users with elevated privileges (users with entries in the course_access_roles table) will be # required to be compliant with current password policy requirements. After this date, non-compliant users will # be forced to reset their password before logging in. # # This should be a timezone-aware date string parsable by dateutils.parser.parse # Ex: 2018-04-19 00:00:00+00:00 'ELEVATED_PRIVILEGE_USER_COMPLIANCE_DEADLINE': None, # The date that all users will be required to be compliant with current password policy requirements. After # this date, non-compliant users will be forced to reset their password before logging in. # # This should be a timezone-aware date string parsable by dateutils.parser.parse # Ex: 2018-04-19 00:00:00+00:00 'GENERAL_USER_COMPLIANCE_DEADLINE': None, }
jc0n/scrapy
refs/heads/master
scrapy/core/scheduler.py
20
import os import json import logging from os.path import join, exists from scrapy.utils.reqser import request_to_dict, request_from_dict from scrapy.utils.misc import load_object from scrapy.utils.job import job_dir logger = logging.getLogger(__name__) class Scheduler(object): def __init__(self, dupefilter, jobdir=None, dqclass=None, mqclass=None, logunser=False, stats=None, pqclass=None): self.df = dupefilter self.dqdir = self._dqdir(jobdir) self.pqclass = pqclass self.dqclass = dqclass self.mqclass = mqclass self.logunser = logunser self.stats = stats @classmethod def from_crawler(cls, crawler): settings = crawler.settings dupefilter_cls = load_object(settings['DUPEFILTER_CLASS']) dupefilter = dupefilter_cls.from_settings(settings) pqclass = load_object(settings['SCHEDULER_PRIORITY_QUEUE']) dqclass = load_object(settings['SCHEDULER_DISK_QUEUE']) mqclass = load_object(settings['SCHEDULER_MEMORY_QUEUE']) logunser = settings.getbool('LOG_UNSERIALIZABLE_REQUESTS', settings.getbool('SCHEDULER_DEBUG')) return cls(dupefilter, jobdir=job_dir(settings), logunser=logunser, stats=crawler.stats, pqclass=pqclass, dqclass=dqclass, mqclass=mqclass) def has_pending_requests(self): return len(self) > 0 def open(self, spider): self.spider = spider self.mqs = self.pqclass(self._newmq) self.dqs = self._dq() if self.dqdir else None return self.df.open() def close(self, reason): if self.dqs: prios = self.dqs.close() with open(join(self.dqdir, 'active.json'), 'w') as f: json.dump(prios, f) return self.df.close(reason) def enqueue_request(self, request): if not request.dont_filter and self.df.request_seen(request): self.df.log(request, self.spider) return False dqok = self._dqpush(request) if dqok: self.stats.inc_value('scheduler/enqueued/disk', spider=self.spider) else: self._mqpush(request) self.stats.inc_value('scheduler/enqueued/memory', spider=self.spider) self.stats.inc_value('scheduler/enqueued', spider=self.spider) return True def next_request(self): request = self.mqs.pop() if request: self.stats.inc_value('scheduler/dequeued/memory', spider=self.spider) else: request = self._dqpop() if request: self.stats.inc_value('scheduler/dequeued/disk', spider=self.spider) if request: self.stats.inc_value('scheduler/dequeued', spider=self.spider) return request def __len__(self): return len(self.dqs) + len(self.mqs) if self.dqs else len(self.mqs) def _dqpush(self, request): if self.dqs is None: return try: reqd = request_to_dict(request, self.spider) self.dqs.push(reqd, -request.priority) except ValueError as e: # non serializable request if self.logunser: msg = ("Unable to serialize request: %(request)s - reason:" " %(reason)s - no more unserializable requests will be" " logged (stats being collected)") logger.warning(msg, {'request': request, 'reason': e}, exc_info=True, extra={'spider': self.spider}) self.logunser = False self.stats.inc_value('scheduler/unserializable', spider=self.spider) return else: return True def _mqpush(self, request): self.mqs.push(request, -request.priority) def _dqpop(self): if self.dqs: d = self.dqs.pop() if d: return request_from_dict(d, self.spider) def _newmq(self, priority): return self.mqclass() def _newdq(self, priority): return self.dqclass(join(self.dqdir, 'p%s' % priority)) def _dq(self): activef = join(self.dqdir, 'active.json') if exists(activef): with open(activef) as f: prios = json.load(f) else: prios = () q = self.pqclass(self._newdq, startprios=prios) if q: logger.info("Resuming crawl (%(queuesize)d requests scheduled)", {'queuesize': len(q)}, extra={'spider': self.spider}) return q def _dqdir(self, jobdir): if jobdir: dqdir = join(jobdir, 'requests.queue') if not exists(dqdir): os.makedirs(dqdir) return dqdir
openfun/edx-platform
refs/heads/master
common/djangoapps/course_modes/migrations/__init__.py
12133432
Paul-Jouhaud/url_shortener
refs/heads/master
urlShortenerServer/urlShortenerServer/__init__.py
12133432
rimbalinux/LMD3
refs/heads/master
django/contrib/localflavor/it/__init__.py
12133432
cmunk/protwis
refs/heads/master
build/__init__.py
12133432
ryanahall/django
refs/heads/master
django/contrib/messages/apps.py
591
from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ class MessagesConfig(AppConfig): name = 'django.contrib.messages' verbose_name = _("Messages")
benbox69/pyload
refs/heads/stable
module/plugins/hoster/LinksnappyCom.py
12
# -*- coding: utf-8 -*- import re import urlparse from module.common.json_layer import json_loads, json_dumps from module.plugins.internal.MultiHoster import MultiHoster, create_getInfo class LinksnappyCom(MultiHoster): __name__ = "LinksnappyCom" __type__ = "hoster" __version__ = "0.11" __status__ = "testing" __pattern__ = r'https?://(?:[^/]+\.)?linksnappy\.com' __config__ = [("use_premium" , "bool", "Use premium account if available" , True), ("revertfailed", "bool", "Revert to standard download if fails", True)] __description__ = """Linksnappy.com multi-hoster plugin""" __license__ = "GPLv3" __authors__ = [("stickell", "l.stickell@yahoo.it")] def handle_premium(self, pyfile): host = self._get_host(pyfile.url) json_params = json_dumps({'link' : pyfile.url, 'type' : host, 'username': self.user, 'password': self.account.get_info(self.user)['login']['password']}) r = self.load("http://linksnappy.com/api/linkgen", post={'genLinks': json_params}) self.log_debug("JSON data: " + r) j = json_loads(r)['links'][0] if j['error']: self.error(_("Error converting the link")) pyfile.name = j['filename'] self.link = j['generated'] @staticmethod def _get_host(url): host = urlparse.urlsplit(url).netloc return re.search(r'[\w-]+\.\w+$', host).group(0) getInfo = create_getInfo(LinksnappyCom)
wuhengzhi/chromium-crosswalk
refs/heads/master
tools/variations/fieldtrial_util.py
10
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import json import sys def _hex(ch): hv = hex(ord(ch)).replace('0x', '') hv.zfill(2) return hv.upper() # URL escapes the delimiter characters from the output. urllib.quote is not # used because it cannot escape '.'. def _escape(str): result = str # Must perform replace on '%' first before the others. for c in '%:/.,': result = result.replace(c, '%' + _hex(c)) return result def _FindDuplicates(entries): seen = set() duplicates = set() for entry in entries: if entry in seen: duplicates.add(entry) else: seen.add(entry) return duplicates def _CheckForDuplicateFeatures(enable_features, disable_features): enable_features_set = set(enable_features) if len(enable_features_set) != len(enable_features): raise Exception('Duplicate feature(s) in enable_features: ' + ', '.join(_FindDuplicates(enable_features))) disable_features_set = set(disable_features) if len(disable_features_set) != len(disable_features): raise Exception('Duplicate feature(s) in disable_features: ' + ', '.join(_FindDuplicates(disable_features))) features_in_both = enable_features_set.intersection(disable_features_set) if len(features_in_both) > 0: raise Exception('Conflicting features set as both enabled and disabled: ' + ', '.join(features_in_both)) # Generate a list of command-line switches to enable field trials defined in # fieldtrial_testing_config_*.json. def GenerateArgs(config_path): try: with open(config_path, 'r') as base_file: variations = json.load(base_file) except (IOError, ValueError): return [] field_trials = [] params = [] enable_features = [] disable_features = [] for trial, groups in variations.iteritems(): if not len(groups): continue # For now, only take the first group. group = groups[0] trial_group = [trial, group['group_name']] field_trials.extend(trial_group) param_list = [] if 'params' in group: for key, value in group['params'].iteritems(): param_list.append(key) param_list.append(value) if len(param_list): # Escape the variables for the command-line. trial_group = [_escape(x) for x in trial_group] param_list = [_escape(x) for x in param_list] param = '%s:%s' % ('.'.join(trial_group), '/'.join(param_list)) params.append(param) if 'enable_features' in group: enable_features.extend(group['enable_features']) if 'disable_features' in group: disable_features.extend(group['disable_features']) if not len(field_trials): return [] _CheckForDuplicateFeatures(enable_features, disable_features) args = ['--force-fieldtrials=%s' % '/'.join(field_trials)] if len(params): args.append('--force-fieldtrial-params=%s' % ','.join(params)) if len(enable_features): args.append('--enable-features=%s' % ','.join(enable_features)) if len(disable_features): args.append('--disable-features=%s' % ','.join(disable_features)) return args def main(): if len(sys.argv) < 3: print 'Usage: fieldtrial_util.py [base_config_path] [platform_config_path]' exit(-1) print GenerateArgs(sys.argv[1], sys.argv[2]) if __name__ == '__main__': main()
Vignesh2208/Awlsim
refs/heads/master
awlsim/core/util.py
2
# -*- coding: utf-8 -*- # # AWL simulator - utility functions # # Copyright 2012-2015 Michael Buesch <m@bues.ch> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # from __future__ import division, absolute_import, print_function, unicode_literals from awlsim.common.compat import * from awlsim.common.util import * import sys import fractions # Returns the index of a list element, or -1 if not found. # If translate if not None, it should be a callable that translates # a list entry. Arguments are index, entry. def listIndex(_list, value, start=0, stop=-1, translate=None): if stop < 0: stop = len(_list) if translate: for i, ent in enumerate(_list[start:stop], start): if translate(i, ent) == value: return i return -1 try: return _list.index(value, start, stop) except ValueError: return -1 # Convert an integer list to a human readable string. # Example: [1, 2, 3] -> "1, 2 or 3" def listToHumanStr(lst, lastSep="or"): if not lst: return "" lst = toList(lst) string = ", ".join(str(i) for i in lst) # Replace last comma with 'lastSep' string = string[::-1].replace(",", lastSep[::-1] + " ", 1)[::-1] return string # Expand the elements of a list. # 'expander' is the expansion callback. 'expander' takes # one list element as argument. It returns a list. def listExpand(lst, expander): ret = [] for item in lst: ret.extend(expander(item)) return ret # Fully partition a string by separator 'sep'. # Returns a list of strings: # [ "first-element", sep, "second-element", sep, ... ] # If 'keepEmpty' is True, empty elements are kept. def strPartitionFull(string, sep, keepEmpty=True): first, ret = True, [] for elem in string.split(sep): if not first: ret.append(sep) if elem or keepEmpty: ret.append(elem) first = False return ret def str2bool(string, default=False): s = string.lower() if s in ("true", "yes", "on", "enable", "enabled"): return True if s in ("false", "no", "off", "disable", "disabled"): return False try: return bool(int(s, 10)) except ValueError: return default # Returns value, if value is a list. # Returns a list with the elements of value, if value is a tuple. # Returns a list with the elements of value, if value is a set. # Otherwise returns a list with value as element. def toList(value): if isinstance(value, list): return value if isinstance(value, tuple): return list(value) if isinstance(value, set): return sorted(value) return [ value, ] # Returns value, if value is a set. # Returns a set with the elements of value, if value is a tuple. # Returns a set with the elements of value, if value is a list. # Otherwise returns a set with value as single element. def toSet(value): if isinstance(value, set): return value if isinstance(value, list) or\ isinstance(value, tuple): return set(value) return { value, } def pivotDict(inDict): outDict = {} for key, value in inDict.items(): if value in outDict: raise KeyError("Ambiguous key in pivot dict") outDict[value] = key return outDict # Get "Greatest Common Divisor" def math_gcd(*args): return reduce(fractions.gcd, args) # Get "Least Common Multiple" def math_lcm(*args): return reduce(lambda x, y: x * y // math_gcd(x, y), args)
jmartinm/invenio
refs/heads/master
modules/webmessage/lib/webmessage_unit_tests.py
16
# -*- coding: utf-8 -*- ## ## This file is part of Invenio. ## Copyright (C) 2009, 2010, 2011 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """Unit tests for WebMessage.""" __revision__ = \ "$Id$" from invenio.testutils import InvenioTestCase from invenio import webmessage_mailutils from invenio.testutils import make_test_suite, run_test_suite class TestQuotingMessage(InvenioTestCase): """Test for quoting messages.""" def test_simple_quoting_per_block(self): """webmessage - test quoting simple message (HTML, per block)""" text = """Dear romeo I received your mail >>Would you like to come with me to the restaurant? Of course! >>>>When could we get together? Reply to my question please. see you...""" expected_text = """Dear romeo<br/> I received your mail<br/> <div class="commentbox"> \tWould you like to come with me to the restaurant?<br/> </div> Of course!<br/> <div class="commentbox"> \t<div class="commentbox"> \t\tWhen could we get together?<br/> \t</div> </div> Reply to my question please.<br/> see you...<br/> """ res = webmessage_mailutils.email_quoted_txt2html(text, tabs_before=0, indent_txt='>>', linebreak_txt="\n", indent_html=('<div class="commentbox">', "</div>"), linebreak_html='<br/>') self.assertEqual(res, expected_text) def test_simple_quoting_per_line(self): """webmessage - test quoting simple message (HTML, per line)""" text = """Dear romeo I received your mail >>Would you like to come with me to the restaurant? >>I discovered a really nice one. Of course! >>>>When could we get together? Reply to my question please. see you...""" expected_text = """Dear romeo&nbsp;<br/> I received your mail&nbsp;<br/> <blockquote><div>Would you like to come with me to the restaurant?&nbsp;</div></blockquote>&nbsp;<br/> <blockquote><div>I discovered a really nice one.&nbsp;</div></blockquote>&nbsp;<br/> Of course!&nbsp;<br/> <blockquote><div><blockquote><div>When could we get together?&nbsp;</div></blockquote>&nbsp;</div></blockquote>&nbsp;<br/> Reply to my question please.&nbsp;<br/> see you...&nbsp;<br/> """ res = webmessage_mailutils.email_quoted_txt2html(text, tabs_before=0, indent_txt='>>', linebreak_txt="\n", indent_html=('<blockquote><div>', '&nbsp;</div></blockquote>'), linebreak_html="&nbsp;<br/>", indent_block=False) self.assertEqual(res, expected_text) def test_quoting_message(self): """webmessage - test quoting message (text)""" text = """C'est un lapin, lapin de bois. >>Quoi? Un cadeau. >>What? A present. >>Oh, un cadeau""" expected_text = """>>C'est un lapin, lapin de bois. >>>>Quoi? >>Un cadeau. >>>>What? >>A present. >>>>Oh, un cadeau """ res = webmessage_mailutils.email_quote_txt(text, indent_txt='>>', linebreak_input="\n", linebreak_output="\n") self.assertEqual(res, expected_text) def test_indenting_rule_message(self): """webmessage - return email-like indenting rule""" text = """>>Brave Sir Robin ran away... <img src="malicious_script"/>*No!* >>bravely ran away away... I didn't!*<script>malicious code</script> >>When danger reared its ugly head, he bravely turned his tail and fled. <form onload="malicious"></form>*I never did!* """ expected_text = """>>Brave Sir Robin ran away... &lt;img src="malicious_script" /&gt;*No!* >>bravely ran away away... I didn't!*&lt;script&gt;malicious code&lt;/script&gt; >>When danger reared its ugly head, he bravely turned his tail and fled. &lt;form onload="malicious"&gt;&lt;/form&gt;*I never did!* """ res = webmessage_mailutils.escape_email_quoted_text(text, indent_txt='>>', linebreak_txt='\n') self.assertEqual(res, expected_text) TEST_SUITE = make_test_suite(TestQuotingMessage) if __name__ == "__main__": run_test_suite(TEST_SUITE)
yewang15215/django
refs/heads/master
tests/validation/test_picklable.py
576
import pickle from unittest import TestCase from django.core.exceptions import ValidationError class PickableValidationErrorTestCase(TestCase): def test_validationerror_is_picklable(self): original = ValidationError('a', code='something') unpickled = pickle.loads(pickle.dumps(original)) self.assertIs(unpickled, unpickled.error_list[0]) self.assertEqual(original.message, unpickled.message) self.assertEqual(original.code, unpickled.code) original = ValidationError('a', code='something') unpickled = pickle.loads(pickle.dumps(ValidationError(original))) self.assertIs(unpickled, unpickled.error_list[0]) self.assertEqual(original.message, unpickled.message) self.assertEqual(original.code, unpickled.code) original = ValidationError(['a', 'b']) unpickled = pickle.loads(pickle.dumps(original)) self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message) self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message) original = ValidationError(['a', 'b']) unpickled = pickle.loads(pickle.dumps(ValidationError(original))) self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message) self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message) original = ValidationError([ValidationError('a'), ValidationError('b')]) unpickled = pickle.loads(pickle.dumps(original)) self.assertIs(unpickled.args[0][0], unpickled.error_list[0]) self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message) self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message) message_dict = {'field1': ['a', 'b'], 'field2': ['c', 'd']} original = ValidationError(message_dict) unpickled = pickle.loads(pickle.dumps(original)) self.assertEqual(unpickled.message_dict, message_dict)
timthelion/FreeCAD
refs/heads/master
src/Mod/Sketcher/TestSketcherApp.py
27
# (c) Juergen Riegel (FreeCAD@juergen-riegel.net) 2011 LGPL * # * # This file is part of the FreeCAD CAx development system. * # * # This program is free software; you can redistribute it and/or modify * # it under the terms of the GNU Lesser General Public License (LGPL) * # as published by the Free Software Foundation; either version 2 of * # the License, or (at your option) any later version. * # for detail see the LICENCE text file. * # * # FreeCAD is distributed in the hope that it will be useful, * # but WITHOUT ANY WARRANTY; without even the implied warranty of * # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * # GNU Library General Public License for more details. * # * # You should have received a copy of the GNU Library General Public * # License along with FreeCAD; if not, write to the Free Software * # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * # USA * #************************************************************************** import FreeCAD, os, sys, unittest, Part, Sketcher App = FreeCAD def CreateBoxSketchSet(SketchFeature): SketchFeature.addGeometry(Part.Line(FreeCAD.Vector(-99.230339,36.960674,0),FreeCAD.Vector(69.432587,36.960674,0))) SketchFeature.addGeometry(Part.Line(FreeCAD.Vector(69.432587,36.960674,0),FreeCAD.Vector(69.432587,-53.196629,0))) SketchFeature.addGeometry(Part.Line(FreeCAD.Vector(69.432587,-53.196629,0),FreeCAD.Vector(-99.230339,-53.196629,0))) SketchFeature.addGeometry(Part.Line(FreeCAD.Vector(-99.230339,-53.196629,0),FreeCAD.Vector(-99.230339,36.960674,0))) # add the constraints SketchFeature.addConstraint(Sketcher.Constraint('Coincident',0,2,1,1)) SketchFeature.addConstraint(Sketcher.Constraint('Coincident',1,2,2,1)) SketchFeature.addConstraint(Sketcher.Constraint('Coincident',2,2,3,1)) SketchFeature.addConstraint(Sketcher.Constraint('Coincident',3,2,0,1)) SketchFeature.addConstraint(Sketcher.Constraint('Horizontal',0)) SketchFeature.addConstraint(Sketcher.Constraint('Horizontal',2)) SketchFeature.addConstraint(Sketcher.Constraint('Vertical',1)) SketchFeature.addConstraint(Sketcher.Constraint('Vertical',3)) # add dimensions SketchFeature.addConstraint(Sketcher.Constraint('Distance',1,81.370787)) SketchFeature.addConstraint(Sketcher.Constraint('Distance',0,187.573036)) def CreateSlotPlateSet(SketchFeature): SketchFeature.addGeometry(Part.Line(App.Vector(60.029362,-30.279360,0),App.Vector(-120.376335,-30.279360,0))) SketchFeature.addConstraint(Sketcher.Constraint('Horizontal',0)) SketchFeature.addGeometry(Part.Line(App.Vector(-120.376335,-30.279360,0),App.Vector(-70.193062,38.113884,0))) SketchFeature.addConstraint(Sketcher.Constraint('Coincident',0,2,1,1)) SketchFeature.addGeometry(Part.Line(App.Vector(-70.193062,38.113884,0),App.Vector(60.241116,37.478645,0))) SketchFeature.addConstraint(Sketcher.Constraint('Coincident',1,2,2,1)) SketchFeature.addConstraint(Sketcher.Constraint('Horizontal',2)) SketchFeature.addGeometry(Part.ArcOfCircle(Part.Circle(App.Vector(60.039921,3.811391,0),App.Vector(0,0,1),35.127132),-1.403763,1.419522)) SketchFeature.addConstraint(Sketcher.Constraint('Coincident',3,2,2,2)) SketchFeature.addConstraint(Sketcher.Constraint('Coincident',3,1,0,1)) SketchFeature.addConstraint(Sketcher.Constraint('Tangent',3,2)) SketchFeature.addConstraint(Sketcher.Constraint('Tangent',3,0)) SketchFeature.addConstraint(Sketcher.Constraint('Angle',0,2,1,1,0.947837)) SketchFeature.addConstraint(Sketcher.Constraint('Distance',0,184.127425)) SketchFeature.setDatum(9,200.000000) SketchFeature.addConstraint(Sketcher.Constraint('Radius',3,38.424808)) SketchFeature.setDatum(10,40.000000) SketchFeature.setDatum(8,0.872665) SketchFeature.addConstraint(Sketcher.Constraint('DistanceX',0,2,0.0)) SketchFeature.setDatum(11,0.000000) SketchFeature.movePoint(0,2,App.Vector(-0.007829,-33.376450,0)) SketchFeature.movePoint(0,2,App.Vector(-0.738149,-10.493386,0)) SketchFeature.movePoint(0,2,App.Vector(-0.007829,2.165328,0)) SketchFeature.addConstraint(Sketcher.Constraint('DistanceY',0,2,2.165328)) SketchFeature.setDatum(12,0.000000) def CreateSlotPlateInnerSet(SketchFeature): SketchFeature.addGeometry(Part.Circle(App.Vector(195.055893,39.562252,0),App.Vector(0,0,1),29.846098)) SketchFeature.addGeometry(Part.Line(App.Vector(150.319031,13.449363,0),App.Vector(36.700474,13.139774,0))) SketchFeature.addConstraint(Sketcher.Constraint('Horizontal',5)) SketchFeature.addGeometry(Part.Line(App.Vector(36.700474,13.139774,0),App.Vector(77.566010,63.292927,0))) SketchFeature.addConstraint(Sketcher.Constraint('Coincident',5,2,6,1)) SketchFeature.addGeometry(Part.Line(App.Vector(77.566010,63.292927,0),App.Vector(148.151917,63.602505,0))) SketchFeature.addConstraint(Sketcher.Constraint('Coincident',6,2,7,1)) SketchFeature.addConstraint(Sketcher.Constraint('Horizontal',7)) SketchFeature.addConstraint(Sketcher.Constraint('Parallel',1,6)) SketchFeature.addGeometry(Part.ArcOfCircle(Part.Circle(App.Vector(192.422913,38.216347,0),App.Vector(0,0,1),45.315174),2.635158,3.602228)) SketchFeature.addConstraint(Sketcher.Constraint('Coincident',7,2,8,1)) SketchFeature.addConstraint(Sketcher.Constraint('Coincident',8,2,5,1)) #--------------------------------------------------------------------------- # define the test cases to test the FreeCAD Sketcher module #--------------------------------------------------------------------------- class SketcherSolverTestCases(unittest.TestCase): def setUp(self): self.Doc = FreeCAD.newDocument("SketchSolverTest") def testBoxCase(self): self.Box = self.Doc.addObject('Sketcher::SketchObject','SketchBox') CreateBoxSketchSet(self.Box) self.Doc.recompute() # moving a point of the sketch self.Box.movePoint(0,2,App.Vector(88.342697,28.174158,0)) # fully constrain self.Box.addConstraint(Sketcher.Constraint('DistanceX',1,2,90.0)) self.Box.addConstraint(Sketcher.Constraint('DistanceY',1,2,-50.0)) self.Doc.recompute() def testSlotCase(self): self.Slot = self.Doc.addObject('Sketcher::SketchObject','SketchSlot') CreateSlotPlateSet(self.Slot) self.Doc.recompute() # test if all edges created self.failUnless(len(self.Slot.Shape.Edges) == 4) CreateSlotPlateInnerSet(self.Slot) self.Doc.recompute() self.failUnless(len(self.Slot.Shape.Edges) == 9) def tearDown(self): #closing doc FreeCAD.closeDocument("SketchSolverTest") #print ("omit close document for debuging")
t0mk/ansible
refs/heads/devel
lib/ansible/modules/cloud/ovirt/ovirt_host_pm.py
8
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (c) 2016 Red Hat, Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'version': '1.0'} DOCUMENTATION = ''' --- module: ovirt_host_pm short_description: Module to manage power management of hosts in oVirt version_added: "2.3" author: "Ondra Machacek (@machacekondra)" description: - "Module to manage power management of hosts in oVirt." options: name: description: - "Name of the the host to manage." required: true aliases: ['host'] state: description: - "Should the host be present/absent." choices: ['present', 'absent'] default: present address: description: - "Address of the power management interface." username: description: - "Username to be used to connect to power management interface." password: description: - "Password of the user specified in C(username) parameter." type: description: - "Type of the power management. oVirt predefined values are I(drac5), I(ipmilan), I(rsa), I(bladecenter), I(alom), I(apc), I(apc_snmp), I(eps), I(wti), I(rsb), I(cisco_ucs), I(drac7), I(hpblade), I(ilo), I(ilo2), I(ilo3), I(ilo4), I(ilo_ssh), but user can have defined custom type." port: description: - "Power management interface port." slot: description: - "Power management slot." options: description: - "Dictionary of additional fence agent options." - "Additional information about options can be found at U(https://fedorahosted.org/cluster/wiki/FenceArguments)." encrypt_options: description: - "If (true) options will be encrypted when send to agent." aliases: ['encrypt'] order: description: - "Integer value specifying, by default it's added at the end." extends_documentation_fragment: ovirt ''' EXAMPLES = ''' # Examples don't contain auth parameter for simplicity, # look at ovirt_auth module to see how to reuse authentication: # Add fence agent to host 'myhost' - ovirt_host_pm: name: myhost address: 1.2.3.4 options: myoption1: x myoption2: y username: admin password: admin port: 3333 type: ipmilan # Remove ipmilan fence agent with address 1.2.3.4 on host 'myhost' - ovirt_host_pm: state: absent name: myhost address: 1.2.3.4 type: ipmilan ''' RETURN = ''' id: description: ID of the agent which is managed returned: On success if agent is found. type: str sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c agent: description: "Dictionary of all the agent attributes. Agent attributes can be found on your oVirt instance at following url: https://ovirt.example.com/ovirt-engine/api/model#types/agent." returned: On success if agent is found. ''' import traceback try: import ovirtsdk4.types as otypes except ImportError: pass from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ovirt import ( BaseModule, check_sdk, create_connection, equal, ovirt_full_argument_spec, search_by_name, ) class HostModule(BaseModule): def build_entity(self): return otypes.Host( power_management=otypes.PowerManagement( enabled=True, ), ) def update_check(self, entity): return equal(True, entity.power_management.enabled) class HostPmModule(BaseModule): def build_entity(self): return otypes.Agent( address=self._module.params['address'], encrypt_options=self._module.params['encrypt_options'], options=[ otypes.Option( name=name, value=value, ) for name, value in self._module.params['options'].items() ] if self._module.params['options'] else None, password=self._module.params['password'], port=self._module.params['port'], type=self._module.params['type'], username=self._module.params['username'], order=self._module.params.get('order', 100), ) def update_check(self, entity): return ( equal(self._module.params.get('address'), entity.address) and equal(self._module.params.get('encrypt_options'), entity.encrypt_options) and equal(self._module.params.get('password'), entity.password) and equal(self._module.params.get('username'), entity.username) and equal(self._module.params.get('port'), entity.port) and equal(self._module.params.get('type'), entity.type) ) def main(): argument_spec = ovirt_full_argument_spec( state=dict( choices=['present', 'absent'], default='present', ), name=dict(default=None, required=True, aliases=['host']), address=dict(default=None), username=dict(default=None), password=dict(default=None, no_log=True), type=dict(default=None), port=dict(default=None, type='int'), slot=dict(default=None), options=dict(default=None, type='dict'), encrypt_options=dict(default=None, type='bool', aliases=['encrypt']), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, ) check_sdk(module) try: auth = module.params.pop('auth') connection = create_connection(auth) hosts_service = connection.system_service().hosts_service() host = search_by_name(hosts_service, module.params['name']) fence_agents_service = hosts_service.host_service(host.id).fence_agents_service() host_pm_module = HostPmModule( connection=connection, module=module, service=fence_agents_service, ) host_module = HostModule( connection=connection, module=module, service=hosts_service, ) state = module.params['state'] if state == 'present': agent = host_pm_module.search_entity( search_params={ 'address': module.params['address'], 'type': module.params['type'], } ) ret = host_pm_module.create(entity=agent) # Enable Power Management, if it's not enabled: host_module.create(entity=host) elif state == 'absent': agent = host_pm_module.search_entity( search_params={ 'address': module.params['address'], 'type': module.params['type'], } ) ret = host_pm_module.remove(entity=agent) module.exit_json(**ret) except Exception as e: module.fail_json(msg=str(e), exception=traceback.format_exc()) finally: connection.close(logout=auth.get('token') is None) if __name__ == "__main__": main()
siosio/intellij-community
refs/heads/master
python/testData/completion/superInitParams.after.py
83
class B: def __init__(self, auno=True): pass class C(B): pass c = C(auno=)
Osmose/kitsune
refs/heads/master
kitsune/customercare/tests/test_helpers.py
17
from datetime import datetime from nose.tools import eq_ from kitsune.customercare.helpers import isotime, round_percent, utctimesince from kitsune.sumo.tests import TestCase def test_isotime(): """Test isotime helper.""" time = datetime(2009, 12, 25, 10, 11, 12) eq_(isotime(time), '2009-12-25T18:11:12Z') assert isotime(None) is None class RoundPercentTests(TestCase): """Tests for round_percent.""" def test_high_percent_int(self): eq_('90', str(round_percent(90))) def test_high_percent_float(self): eq_('90', str(round_percent(90.3456))) def test_low_percent_int(self): eq_('6.0', str(round_percent(6))) def test_low_percent_float(self): eq_('6.3', str(round_percent(6.299))) class UTCTimesinceTests(TestCase): """Tests for the utctimesince function These are largely copied from sumo.tests.test_helpers.TimesinceTests """ def test_none(self): """If None is passed in, utctimesince returns ''.""" eq_('', utctimesince(None)) def test_trunc(self): """Assert it returns only the most significant time division.""" eq_('1 year ago', utctimesince(datetime(2000, 1, 2), now=datetime(2001, 2, 3))) def test_future(self): """Test future date and omitting "now" kwarg""" eq_('', utctimesince(datetime(9999, 1, 2)))
joshbohde/scikit-learn
refs/heads/master
sklearn/linear_model/tests/test_omp.py
1
# Author: Vlad Niculae # License: BSD style import warnings import numpy as np from nose.tools import assert_raises from numpy.testing import assert_equal, assert_array_almost_equal from .. import orthogonal_mp, orthogonal_mp_gram, OrthogonalMatchingPursuit from ...utils.fixes import count_nonzero from ...datasets import make_sparse_coded_signal n_samples, n_features, n_nonzero_coefs, n_targets = 20, 30, 5, 3 y, X, gamma = make_sparse_coded_signal(n_targets, n_features, n_samples, n_nonzero_coefs, random_state=0) G, Xy = np.dot(X.T, X), np.dot(X.T, y) # this makes X (n_samples, n_features) # and y (n_samples, 3) def test_correct_shapes(): assert_equal(orthogonal_mp(X, y[:, 0], n_nonzero_coefs=5).shape, (n_features,)) assert_equal(orthogonal_mp(X, y, n_nonzero_coefs=5).shape, (n_features, 3)) def test_correct_shapes_gram(): assert_equal(orthogonal_mp_gram(G, Xy[:, 0], n_nonzero_coefs=5).shape, (n_features,)) assert_equal(orthogonal_mp_gram(G, Xy, n_nonzero_coefs=5).shape, (n_features, 3)) def test_n_nonzero_coefs(): assert count_nonzero(orthogonal_mp(X, y[:, 0], n_nonzero_coefs=5)) <= 5 assert count_nonzero(orthogonal_mp(X, y[:, 0], n_nonzero_coefs=5, precompute_gram=True)) <= 5 def test_eps(): eps = 0.5 gamma = orthogonal_mp(X, y[:, 0], eps=eps) gamma_gram = orthogonal_mp(X, y[:, 0], eps=eps, precompute_gram=True) assert np.sum((y[:, 0] - np.dot(X, gamma)) ** 2) <= eps assert np.sum((y[:, 0] - np.dot(X, gamma_gram)) ** 2) <= eps def test_with_without_gram(): assert_array_almost_equal( orthogonal_mp(X, y, n_nonzero_coefs=5), orthogonal_mp(X, y, n_nonzero_coefs=5, precompute_gram=True)) def test_with_without_gram_eps(): assert_array_almost_equal( orthogonal_mp(X, y, eps=1.), orthogonal_mp(X, y, eps=1., precompute_gram=True)) def test_unreachable_accuracy(): with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') assert_array_almost_equal( orthogonal_mp(X, y, eps=0), orthogonal_mp(X, y, n_nonzero_coefs=n_features)) assert_array_almost_equal( orthogonal_mp(X, y, eps=0, precompute_gram=True), orthogonal_mp(X, y, precompute_gram=True, n_nonzero_coefs=n_features)) assert len(w) > 0 # warnings should be raised def test_bad_input(): assert_raises(ValueError, orthogonal_mp, X, y, eps=-1) assert_raises(ValueError, orthogonal_mp, X, y, n_nonzero_coefs=-1) assert_raises(ValueError, orthogonal_mp, X, y, n_nonzero_coefs=n_features + 1) assert_raises(ValueError, orthogonal_mp_gram, G, Xy, eps=-1) assert_raises(ValueError, orthogonal_mp_gram, G, Xy, n_nonzero_coefs=-1) assert_raises(ValueError, orthogonal_mp_gram, G, Xy, n_nonzero_coefs=n_features + 1) def test_perfect_signal_recovery(): # XXX: use signal generator idx, = gamma[:, 0].nonzero() gamma_rec = orthogonal_mp(X, y[:, 0], 5) gamma_gram = orthogonal_mp_gram(G, Xy[:, 0], 5) assert_equal(idx, np.flatnonzero(gamma_rec)) assert_equal(idx, np.flatnonzero(gamma_gram)) assert_array_almost_equal(gamma[:, 0], gamma_rec, decimal=2) assert_array_almost_equal(gamma[:, 0], gamma_gram, decimal=2) def test_estimator_shapes(): omp = OrthogonalMatchingPursuit(n_nonzero_coefs=n_nonzero_coefs) omp.fit(X, y[:, 0]) assert_equal(omp.coef_.shape, (n_features,)) assert_equal(omp.intercept_.shape, ()) assert count_nonzero(omp.coef_) <= n_nonzero_coefs omp.fit(X, y) assert_equal(omp.coef_.shape, (n_targets, n_features)) assert_equal(omp.intercept_.shape, (n_targets,)) assert count_nonzero(omp.coef_) <= n_targets * n_nonzero_coefs omp.fit(X, y, Gram=G, Xy=Xy[:, 0]) assert_equal(omp.coef_.shape, (n_features,)) assert_equal(omp.intercept_.shape, ()) assert count_nonzero(omp.coef_) <= n_nonzero_coefs omp.fit(X, y, Gram=G, Xy=Xy) assert_equal(omp.coef_.shape, (n_targets, n_features)) assert_equal(omp.intercept_.shape, (n_targets,)) assert count_nonzero(omp.coef_) <= n_targets * n_nonzero_coefs def test_identical_regressors(): newX = X.copy() newX[:, 1] = newX[:, 0] gamma = np.zeros(n_features) gamma[0] = gamma[1] = 1. newy = np.dot(newX, gamma) with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') orthogonal_mp(newX, newy, 2) assert len(w) == 1
liris/websocket-client
refs/heads/master
setup.py
1
import sys from setuptools import setup import pkg_resources VERSION = "0.57.0" NAME = "websocket_client" install_requires = ["six"] tests_require = [] if sys.version_info[0] == 2 and sys.version_info[1] < 7: tests_require.append('unittest2==0.8.0') insecure_pythons = '2.6, ' + ', '.join("2.7.{pv}".format(pv=pv) for pv in range(10)) extras_require = { ':python_version in "{ips}"'.format(ips=insecure_pythons): ['backports.ssl_match_hostname'], ':python_version in "2.6"': ['argparse'], } try: if 'bdist_wheel' not in sys.argv: for key, value in extras_require.items(): if key.startswith(':') and pkg_resources.evaluate_marker(key[1:]): install_requires.extend(value) except Exception: import logging logging.getLogger(__name__).exception( 'Something went wrong calculating platform specific dependencies, so ' "you're getting them all!" ) for key, value in extras_require.items(): if key.startswith(':'): install_requires.extend(value) setup( name=NAME, version=VERSION, description="WebSocket client for Python. hybi13 is supported.", long_description=open("README.rst").read(), author="liris", author_email="liris.pp@gmail.com", license="BSD", url="https://github.com/websocket-client/websocket-client.git", python_requires='>=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', classifiers=[ "Development Status :: 4 - Beta", "License :: OSI Approved :: BSD License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX", "Operating System :: Microsoft :: Windows", "Topic :: Internet", "Topic :: Software Development :: Libraries :: Python Modules", "Intended Audience :: Developers", ], keywords='websockets', scripts=["bin/wsdump.py"], install_requires=install_requires, packages=["websocket", "websocket.tests"], package_data={ 'websocket.tests': ['data/*.txt'] }, tests_require=tests_require, test_suite="websocket.tests" )
petemounce/ansible
refs/heads/devel
lib/ansible/template/template.py
70
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import jinja2 __all__ = ['AnsibleJ2Template'] class AnsibleJ2Template(jinja2.environment.Template): ''' A helper class, which prevents Jinja2 from running _jinja2_vars through dict(). Without this, {% include %} and similar will create new contexts unlike the special one created in template_from_file. This ensures they are all alike, except for potential locals. ''' def new_context(self, vars=None, shared=False, locals=None): return self.environment.context_class(self.environment, vars.add_locals(locals), self.name, self.blocks)
RohitDas/cubeproject
refs/heads/master
manage.py
27
#!/usr/bin/env python # Copyright 2015 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
Gabrielcarvfer/NS3
refs/heads/master
src/core/examples/sample-rng-plot.py
12
# -*- Mode:Python; -*- # /* # * This program is free software; you can redistribute it and/or modify # * it under the terms of the GNU General Public License version 2 as # * published by the Free Software Foundation # * # * This program is distributed in the hope that it will be useful, # * but WITHOUT ANY WARRANTY; without even the implied warranty of # * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # * GNU General Public License for more details. # * # * You should have received a copy of the GNU General Public License # * along with this program; if not, write to the Free Software # * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # */ ## @file # @ingroup core-examples # @ingroup randomvariable # Demonstrate use of ns-3 as a random number generator integrated with # plotting tools. # # This is adapted from Gustavo Carneiro's ns-3 tutorial import numpy as np import matplotlib.pyplot as plt import ns.core # mu, var = 100, 225 rng = ns.core.NormalRandomVariable() rng.SetAttribute("Mean", ns.core.DoubleValue(100.0)) rng.SetAttribute("Variance", ns.core.DoubleValue(225.0)) x = [rng.GetValue() for t in range(10000)] # the histogram of the data n, bins, patches = plt.hist(x, 50, normed=1, facecolor='g', alpha=0.75) plt.title('ns-3 histogram') plt.text(60, .025, r'$\mu=100,\ \sigma=15$') plt.axis([40, 160, 0, 0.03]) plt.grid(True) plt.show()
bensternthal/bedrock
refs/heads/master
bedrock/redirects/urls.py
5
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from django.conf import settings from django.conf.urls import patterns from bedrock.base.helpers import static from pipeline.collector import default_collector from pipeline.packager import Packager from util import redirect def tabzilla_css_redirect(r): packer = Packager() tabzilla_package = packer.package_for('css', 'tabzilla') if not settings.DEBUG: file_path = tabzilla_package.output_filename else: default_collector.collect() paths = packer.compile(tabzilla_package.paths) file_path = paths[0] return static(file_path) urlpatterns = patterns( '', redirect(r'^b2g', 'firefox.partners.index'), redirect(r'^b2g/faq', 'firefox.partners.index'), redirect(r'^b2g/about', 'firefox.partners.index'), # Bug 781914 redirect(r'^contribute/areas.html$', 'mozorg.contribute'), redirect(r'^contribute/universityambassadors', 'mozorg.contribute.studentambassadors.landing'), # Bug 1144949 redirect(r'^contribute/page/?$', 'https://wiki.mozilla.org/Webdev/GetInvolved/mozilla.org'), # Bug 763665, 1148127 redirect(r'^projects/$', 'firefox.family.index'), # Bug 792185 Brand Toolkit -> Style Guide redirect(r'^firefox/brand/$', 'styleguide.home'), redirect(r'^firefox/brand/platform/$', 'styleguide.identity.firefox-family-platform'), redirect(r'^firefox/brand/identity/$', 'styleguide.identity.firefox-branding'), redirect(r'^firefox/brand/identity/channel-logos/$', 'styleguide.identity.firefox-channels'), redirect(r'^firefox/brand/identity/wordmarks/$', 'styleguide.identity.firefox-wordmarks'), redirect(r'^firefox/brand/identity/typefaces/$', 'styleguide.communications.typefaces'), redirect(r'^firefox/brand/artwork/$', 'styleguide.home'), redirect(r'^firefox/brand/artwork/gear/$', 'styleguide.home'), redirect(r'^firefox/brand/website/$', 'styleguide.websites.sandstone-intro'), redirect(r'^firefox/brand/website/domain-strategy/$', 'styleguide.websites.domains-overview'), redirect(r'^firefox/brand/copy/$', 'styleguide.communications.copy-tone'), redirect(r'^firefox/brand/copy/l10n/$', 'styleguide.communications.translation'), redirect(r'^firefox/brand/copy/rules/$', 'styleguide.communications.copy-rules'), redirect(r'^firefox/brand/downloads/$', 'styleguide.home'), # Bug 1071318 redirect(r'^firefox/mobile/$', 'firefox.android.index'), # Bug 804810 Identity Guidelines -> Style Guide redirect(r'^foundation/identity-guidelines/index.html', 'styleguide.home'), redirect(r'^foundation/identity-guidelines/mozilla-foundation.html', 'styleguide.identity.mozilla-branding'), redirect(r'^foundation/identity-guidelines/thunderbird.html', 'styleguide.identity.thunderbird-logo'), # Bug 945474 - delete Marketplace marketing product page # and redirect redirect(r'^apps/$', 'https://marketplace.firefox.com/'), # Bug 800467 /apps/partners -> # marketplace.firefox.com/developers redirect(r'apps/partners/$', 'https://marketplace.firefox.com/developers/'), # Bug 815527 /m/privacy.html -> /privacy/firefox/ redirect(r'^m/privacy.html$', 'privacy.notices.firefox'), # Bug 1109318 /privacy/you -> privacy/tips/ redirect(r'^privacy/you/$', 'privacy.privacy-day'), # Bug 821047 /about/mission.html -> /mission/ redirect(r'^about/mission.html$', '/mission/'), # Bug 1171763 - delete researchers and projects and redirect redirect(r'^research/.+', '/research/'), # Bug 800298 /webmaker/ -> wm.o and /webmaker/videos/ -> # wm.o/videos/ redirect(r'webmaker/$', 'https://webmaker.org'), redirect(r'webmaker/videos/$', 'https://webmaker.org/videos/'), # Bug 819317 /gameon/ -> gameon.m.o redirect(r'gameon/$', 'https://gameon.mozilla.org'), # Tabzilla redirect(r'tabzilla/media/js/tabzilla\.js$', 'tabzilla'), redirect(r'tabzilla/media/css/tabzilla\.css$', tabzilla_css_redirect), # Bug 822817 /telemetry/ -> # https://wiki.mozilla.org/Telemetry/FAQ redirect(r'telemetry/$', 'https://wiki.mozilla.org/Telemetry/FAQ'), # Bug 854561 - move /projects/mozilla-based/ to # /about/mozilla-based/ redirect(r'^projects/mozilla-based/$', '/about/mozilla-based/'), # Bug 867773 - Redirect the Persona "Developer FAQ" link # to MDN redirect(r'^persona/developer-faq/$', 'https://developer.mozilla.org/persona'), # Bug 981176 - For now we'll hard-code a redirect to 1.3 # In the future this should automatically go to the # latest version's notes redirect(r'^firefox/os/notes/$', '/firefox/os/notes/1.3/'), # Bug 896585 - Send /contact/ to the spaces landing redirect(r'^contact/$', '/contact/spaces/'), # Bug 1102336 /foundation/annualreport/ -> # /foundation/annualreport/2013/ redirect(r'^foundation/annualreport/$', 'foundation.annualreport.2013.index', name='foundation.annualreport'), # Bug 997577 - /legal/ -> /about/legal/ redirect(r'^legal/fraud-report/$', '/about/legal/fraud-report/'), redirect(r'^legal/eula/$', '/about/legal/eula/'), redirect(r'^legal/eula/firefox-2/$', '/about/legal/eula/firefox-2/'), redirect(r'^legal/eula/firefox-3/$', '/about/legal/eula/firefox-3/'), # Bug 1073269 /dnt/ -> /firefox/dnt/ redirect(r'^dnt/$', 'firefox.dnt'), )
adfinis-forks/aptly
refs/heads/master
system/t09_repo/drop.py
14
from lib import BaseTest class DropRepo1Test(BaseTest): """ drop repo: regular drop """ fixtureCmds = [ "aptly repo create repo1", ] runCmd = "aptly repo drop repo1" def check(self): self.check_output() self.check_cmd_output("aptly repo show repo1", "repo-show", expected_code=1) class DropRepo2Test(BaseTest): """ drop repo: in use by snapshots """ fixtureCmds = [ "aptly repo create repo2", "aptly repo add repo2 ${files}", "aptly snapshot create local from repo repo2", ] runCmd = "aptly repo drop repo2" expectedCode = 1 class DropRepo3Test(BaseTest): """ drop repo: force """ fixtureCmds = [ "aptly repo create repo3", "aptly repo add repo3 ${files}", "aptly snapshot create local from repo repo3", ] runCmd = "aptly repo drop --force repo3" class DropRepo4Test(BaseTest): """ drop repo: no such repo """ runCmd = "aptly repo drop repo4" expectedCode = 1 class DropRepo5Test(BaseTest): """ drop repo: published """ fixtureCmds = [ "aptly repo create repo5", "aptly repo add repo5 ${files}", "aptly publish repo -skip-signing -distribution=squeeze repo5", ] runCmd = "aptly repo drop repo5" expectedCode = 1
hbrunn/OCB
refs/heads/8.0
addons/base_import/tests/__init__.py
317
from . import test_cases