repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
emidln/django_roa | env/lib/python2.7/site-packages/django/contrib/contenttypes/tests.py | 152 | 2951 | from django import db
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.contrib.contenttypes.views import shortcut
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpRequest
from django.test import TestCase
class ContentTypesTests(TestCase):
def setUp(self):
# First, let's make sure we're dealing with a blank slate (and that
# DEBUG is on so that queries get logged)
self.old_DEBUG = settings.DEBUG
self.old_Site_meta_installed = Site._meta.installed
settings.DEBUG = True
ContentType.objects.clear_cache()
db.reset_queries()
def tearDown(self):
settings.DEBUG = self.old_DEBUG
Site._meta.installed = self.old_Site_meta_installed
ContentType.objects.clear_cache()
def test_lookup_cache(self):
"""
Make sure that the content type cache (see ContentTypeManager)
works correctly. Lookups for a particular content type -- by model or
by ID -- should hit the database only on the first lookup.
"""
# At this point, a lookup for a ContentType should hit the DB
ContentType.objects.get_for_model(ContentType)
self.assertEqual(1, len(db.connection.queries))
# A second hit, though, won't hit the DB, nor will a lookup by ID
ct = ContentType.objects.get_for_model(ContentType)
self.assertEqual(1, len(db.connection.queries))
ContentType.objects.get_for_id(ct.id)
self.assertEqual(1, len(db.connection.queries))
# Once we clear the cache, another lookup will again hit the DB
ContentType.objects.clear_cache()
ContentType.objects.get_for_model(ContentType)
len(db.connection.queries)
self.assertEqual(2, len(db.connection.queries))
def test_shortcut_view(self):
"""
Check that the shortcut view (used for the admin "view on site"
functionality) returns a complete URL regardless of whether the sites
framework is installed
"""
request = HttpRequest()
request.META = {
"SERVER_NAME": "Example.com",
"SERVER_PORT": "80",
}
from django.contrib.auth.models import User
user_ct = ContentType.objects.get_for_model(User)
obj = User.objects.create(username="john")
if Site._meta.installed:
current_site = Site.objects.get_current()
response = shortcut(request, user_ct.id, obj.id)
self.assertEqual("http://%s/users/john/" % current_site.domain,
response._headers.get("location")[1])
Site._meta.installed = False
response = shortcut(request, user_ct.id, obj.id)
self.assertEqual("http://Example.com/users/john/",
response._headers.get("location")[1])
| bsd-3-clause |
jnerin/ansible | lib/ansible/modules/network/panos/panos_security_rule.py | 15 | 20157 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, techbizdev <techbizdev@paloaltonetworks.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: panos_security_rule
short_description: Create security rule policy on PAN-OS devices or Panorama management console.
description:
- Security policies allow you to enforce rules and take action, and can be as general or specific as needed.
The policy rules are compared against the incoming traffic in sequence, and because the first rule that matches the traffic is applied,
the more specific rules must precede the more general ones.
author: "Ivan Bojer (@ivanbojer), Robert Hagen (@rnh556)"
version_added: "2.4"
requirements:
- pan-python can be obtained from PyPi U(https://pypi.python.org/pypi/pan-python)
- pandevice can be obtained from PyPi U(https://pypi.python.org/pypi/pandevice)
- xmltodict can be obtained from PyPi U(https://pypi.python.org/pypi/xmltodict)
notes:
- Checkmode is not supported.
- Panorama is supported.
options:
ip_address:
description:
- IP address (or hostname) of PAN-OS device being configured.
required: true
username:
description:
- Username credentials to use for auth unless I(api_key) is set.
default: "admin"
password:
description:
- Password credentials to use for auth unless I(api_key) is set.
required: true
api_key:
description:
- API key that can be used instead of I(username)/I(password) credentials.
operation:
description:
- The action to be taken. Supported values are I(add)/I(update)/I(find)/I(delete).
default: 'add'
rule_name:
description:
- Name of the security rule.
required: true
rule_type:
description:
- Type of security rule (version 6.1 of PanOS and above).
default: "universal"
description:
description:
- Description for the security rule.
default: "None"
tag_name:
description:
- Administrative tags that can be added to the rule. Note, tags must be already defined.
default: "None"
source_zone:
description:
- List of source zones.
default: "any"
destination_zone:
description:
- List of destination zones.
default: "any"
source_ip:
description:
- List of source addresses.
default: "any"
source_user:
description:
- Use users to enforce policy for individual users or a group of users.
default: "any"
hip_profiles:
description: >
- If you are using GlobalProtect with host information profile (HIP) enabled, you can also base the policy
on information collected by GlobalProtect. For example, the user access level can be determined HIP that
notifies the firewall about the user's local configuration.
default: "any"
destination_ip:
description:
- List of destination addresses.
default: "any"
application:
description:
- List of applications.
default: "any"
service:
description:
- List of services.
default: "application-default"
log_start:
description:
- Whether to log at session start.
default: false
log_end:
description:
- Whether to log at session end.
default: true
action:
description:
- Action to apply once rules maches.
default: "allow"
group_profile:
description: >
- Security profile group that is already defined in the system. This property supersedes antivirus,
vulnerability, spyware, url_filtering, file_blocking, data_filtering, and wildfire_analysis properties.
default: None
antivirus:
description:
- Name of the already defined antivirus profile.
default: None
vulnerability:
description:
- Name of the already defined vulnerability profile.
default: None
spyware:
description:
- Name of the already defined spyware profile.
default: None
url_filtering:
description:
- Name of the already defined url_filtering profile.
default: None
file_blocking:
description:
- Name of the already defined file_blocking profile.
default: None
data_filtering:
description:
- Name of the already defined data_filtering profile.
default: None
wildfire_analysis:
description:
- Name of the already defined wildfire_analysis profile.
default: None
devicegroup:
description: >
- Device groups are used for the Panorama interaction with Firewall(s). The group must exists on Panorama.
If device group is not define we assume that we are contacting Firewall.
default: None
commit:
description:
- Commit configuration if changed.
default: true
'''
EXAMPLES = '''
- name: add an SSH inbound rule to devicegroup
panos_security_rule:
ip_address: '{{ ip_address }}'
username: '{{ username }}'
password: '{{ password }}'
operation: 'add'
rule_name: 'SSH permit'
description: 'SSH rule test'
tag_name: ['ProjectX']
source_zone: ['public']
destination_zone: ['private']
source: ['any']
source_user: ['any']
destination: ['1.1.1.1']
category: ['any']
application: ['ssh']
service: ['application-default']
hip_profiles: ['any']
action: 'allow'
devicegroup: 'Cloud Edge'
- name: add a rule to allow HTTP multimedia only from CDNs
panos_security_rule:
ip_address: '10.5.172.91'
username: 'admin'
password: 'paloalto'
operation: 'add'
rule_name: 'HTTP Multimedia'
description: 'Allow HTTP multimedia only to host at 1.1.1.1'
source_zone: ['public']
destination_zone: ['private']
source: ['any']
source_user: ['any']
destination: ['1.1.1.1']
category: ['content-delivery-networks']
application: ['http-video', 'http-audio']
service: ['service-http', 'service-https']
hip_profiles: ['any']
action: 'allow'
- name: add a more complex rule that uses security profiles
panos_security_rule:
ip_address: '{{ ip_address }}'
username: '{{ username }}'
password: '{{ password }}'
operation: 'add'
rule_name: 'Allow HTTP w profile'
log_start: false
log_end: true
action: 'allow'
antivirus: 'default'
vulnerability: 'default'
spyware: 'default'
url_filtering: 'default'
wildfire_analysis: 'default'
- name: delete a devicegroup security rule
panos_security_rule:
ip_address: '{{ ip_address }}'
api_key: '{{ api_key }}'
operation: 'delete'
rule_name: 'Allow telnet'
devicegroup: 'DC Firewalls'
- name: find a specific security rule
panos_security_rule:
ip_address: '{{ ip_address }}'
password: '{{ password }}'
operation: 'find'
rule_name: 'Allow RDP to DCs'
register: result
- debug: msg='{{result.stdout_lines}}'
'''
RETURN = '''
# Default return values
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import get_exception
try:
import pan.xapi
from pan.xapi import PanXapiError
import pandevice
from pandevice import base
from pandevice import firewall
from pandevice import panorama
from pandevice import objects
from pandevice import policies
import xmltodict
import json
HAS_LIB = True
except ImportError:
HAS_LIB = False
def get_devicegroup(device, devicegroup):
dg_list = device.refresh_devices()
for group in dg_list:
if isinstance(group, pandevice.panorama.DeviceGroup):
if group.name == devicegroup:
return group
return False
def get_rulebase(device, devicegroup):
# Build the rulebase
if isinstance(device, pandevice.firewall.Firewall):
rulebase = pandevice.policies.Rulebase()
device.add(rulebase)
elif isinstance(device, pandevice.panorama.Panorama):
dg = panorama.DeviceGroup(devicegroup)
device.add(dg)
rulebase = policies.PreRulebase()
dg.add(rulebase)
else:
return False
policies.SecurityRule.refreshall(rulebase)
return rulebase
def find_rule(rulebase, rule_name):
# Search for the rule name
rule = rulebase.find(rule_name)
if rule:
return rule
else:
return False
def rule_is_match(propose_rule, current_rule):
match_check = ['name', 'description', 'group_profile', 'antivirus', 'vulnerability',
'spyware', 'url_filtering', 'file_blocking', 'data_filtering',
'wildfire_analysis', 'type', 'action', 'tag', 'log_start', 'log_end']
list_check = ['tozone', 'fromzone', 'source', 'source_user', 'destination', 'category',
'application', 'service', 'hip_profiles']
for check in match_check:
propose_check = getattr(propose_rule, check, None)
current_check = getattr(current_rule, check, None)
if propose_check != current_check:
return False
for check in list_check:
propose_check = getattr(propose_rule, check, [])
current_check = getattr(current_rule, check, [])
if set(propose_check) != set(current_check):
return False
return True
def create_security_rule(**kwargs):
security_rule = policies.SecurityRule(
name=kwargs['rule_name'],
description=kwargs['description'],
fromzone=kwargs['source_zone'],
source=kwargs['source_ip'],
source_user=kwargs['source_user'],
hip_profiles=kwargs['hip_profiles'],
tozone=kwargs['destination_zone'],
destination=kwargs['destination_ip'],
application=kwargs['application'],
service=kwargs['service'],
category=kwargs['category'],
log_start=kwargs['log_start'],
log_end=kwargs['log_end'],
action=kwargs['action'],
type=kwargs['rule_type']
)
if 'tag_name' in kwargs:
security_rule.tag = kwargs['tag_name']
# profile settings
if 'group_profile' in kwargs:
security_rule.group = kwargs['group_profile']
else:
if 'antivirus' in kwargs:
security_rule.virus = kwargs['antivirus']
if 'vulnerability' in kwargs:
security_rule.vulnerability = kwargs['vulnerability']
if 'spyware' in kwargs:
security_rule.spyware = kwargs['spyware']
if 'url_filtering' in kwargs:
security_rule.url_filtering = kwargs['url_filtering']
if 'file_blocking' in kwargs:
security_rule.file_blocking = kwargs['file_blocking']
if 'data_filtering' in kwargs:
security_rule.data_filtering = kwargs['data_filtering']
if 'wildfire_analysis' in kwargs:
security_rule.wildfire_analysis = kwargs['wildfire_analysis']
return security_rule
def add_rule(rulebase, sec_rule):
if rulebase:
rulebase.add(sec_rule)
sec_rule.create()
return True
else:
return False
def update_rule(rulebase, nat_rule):
if rulebase:
rulebase.add(nat_rule)
nat_rule.apply()
return True
else:
return False
def main():
argument_spec = dict(
ip_address=dict(required=True),
password=dict(no_log=True),
username=dict(default='admin'),
api_key=dict(no_log=True),
operation=dict(default='add', choices=['add', 'update', 'delete', 'find']),
rule_name=dict(required=True),
description=dict(default=''),
tag_name=dict(type='list'),
destination_zone=dict(type='list', default=['any']),
source_zone=dict(type='list', default=['any']),
source_ip=dict(type='list', default=["any"]),
source_user=dict(type='list', default=['any']),
destination_ip=dict(type='list', default=["any"]),
category=dict(type='list', default=['any']),
application=dict(type='list', default=['any']),
service=dict(type='list', default=['application-default']),
hip_profiles=dict(type='list', default=['any']),
group_profile=dict(),
antivirus=dict(),
vulnerability=dict(),
spyware=dict(),
url_filtering=dict(),
file_blocking=dict(),
data_filtering=dict(),
wildfire_analysis=dict(),
log_start=dict(type='bool', default=False),
log_end=dict(type='bool', default=True),
rule_type=dict(default='universal'),
action=dict(default='allow'),
devicegroup=dict(),
commit=dict(type='bool', default=True)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False,
required_one_of=[['api_key', 'password']])
if not HAS_LIB:
module.fail_json(msg='Missing required libraries.')
ip_address = module.params["ip_address"]
password = module.params["password"]
username = module.params['username']
api_key = module.params['api_key']
operation = module.params['operation']
rule_name = module.params['rule_name']
description = module.params['description']
tag_name = module.params['tag_name']
source_zone = module.params['source_zone']
source_ip = module.params['source_ip']
source_user = module.params['source_user']
hip_profiles = module.params['hip_profiles']
destination_zone = module.params['destination_zone']
destination_ip = module.params['destination_ip']
application = module.params['application']
service = module.params['service']
category = module.params['category']
log_start = module.params['log_start']
log_end = module.params['log_end']
action = module.params['action']
group_profile = module.params['group_profile']
antivirus = module.params['antivirus']
vulnerability = module.params['vulnerability']
spyware = module.params['spyware']
url_filtering = module.params['url_filtering']
file_blocking = module.params['file_blocking']
data_filtering = module.params['data_filtering']
wildfire_analysis = module.params['wildfire_analysis']
rule_type = module.params['rule_type']
devicegroup = module.params['devicegroup']
commit = module.params['commit']
# Create the device with the appropriate pandevice type
device = base.PanDevice.create_from_device(ip_address, username, password, api_key=api_key)
# If Panorama, validate the devicegroup
dev_group = None
if devicegroup and isinstance(device, panorama.Panorama):
dev_group = get_devicegroup(device, devicegroup)
if dev_group:
device.add(dev_group)
else:
module.fail_json(msg='\'%s\' device group not found in Panorama. Is the name correct?' % devicegroup)
# Get the rulebase
rulebase = get_rulebase(device, dev_group)
# Which action shall we take on the object?
if operation == "find":
# Search for the object
match = find_rule(rulebase, rule_name)
# If found, format and return the result
if match:
match_dict = xmltodict.parse(match.element_str())
module.exit_json(
stdout_lines=json.dumps(match_dict, indent=2),
msg='Rule matched'
)
else:
module.fail_json(msg='Rule \'%s\' not found. Is the name correct?' % rule_name)
elif operation == "delete":
# Search for the object
match = find_rule(rulebase, rule_name)
# If found, delete it
if match:
try:
if commit:
match.delete()
except PanXapiError:
exc = get_exception()
module.fail_json(msg=exc.message)
module.exit_json(changed=True, msg='Rule \'%s\' successfully deleted' % rule_name)
else:
module.fail_json(msg='Rule \'%s\' not found. Is the name correct?' % rule_name)
elif operation == "add":
new_rule = create_security_rule(
rule_name=rule_name,
description=description,
tag_name=tag_name,
source_zone=source_zone,
destination_zone=destination_zone,
source_ip=source_ip,
source_user=source_user,
destination_ip=destination_ip,
category=category,
application=application,
service=service,
hip_profiles=hip_profiles,
group_profile=group_profile,
antivirus=antivirus,
vulnerability=vulnerability,
spyware=spyware,
url_filtering=url_filtering,
file_blocking=file_blocking,
data_filtering=data_filtering,
wildfire_analysis=wildfire_analysis,
log_start=log_start,
log_end=log_end,
rule_type=rule_type,
action=action
)
# Search for the rule. Fail if found.
match = find_rule(rulebase, rule_name)
if match:
if rule_is_match(match, new_rule):
module.exit_json(changed=False, msg='Rule \'%s\' is already in place' % rule_name)
else:
module.fail_json(msg='Rule \'%s\' already exists. Use operation: \'update\' to change it.' % rule_name)
else:
try:
changed = add_rule(rulebase, new_rule)
if changed and commit:
device.commit(sync=True)
except PanXapiError:
exc = get_exception()
module.fail_json(msg=exc.message)
module.exit_json(changed=changed, msg='Rule \'%s\' successfully added' % rule_name)
elif operation == 'update':
# Search for the rule. Update if found.
match = find_rule(rulebase, rule_name)
if match:
try:
new_rule = create_security_rule(
rule_name=rule_name,
description=description,
tag_name=tag_name,
source_zone=source_zone,
destination_zone=destination_zone,
source_ip=source_ip,
source_user=source_user,
destination_ip=destination_ip,
category=category,
application=application,
service=service,
hip_profiles=hip_profiles,
group_profile=group_profile,
antivirus=antivirus,
vulnerability=vulnerability,
spyware=spyware,
url_filtering=url_filtering,
file_blocking=file_blocking,
data_filtering=data_filtering,
wildfire_analysis=wildfire_analysis,
log_start=log_start,
log_end=log_end,
rule_type=rule_type,
action=action
)
changed = update_rule(rulebase, new_rule)
if changed and commit:
device.commit(sync=True)
except PanXapiError:
exc = get_exception()
module.fail_json(msg=exc.message)
module.exit_json(changed=changed, msg='Rule \'%s\' successfully updated' % rule_name)
else:
module.fail_json(msg='Rule \'%s\' does not exist. Use operation: \'add\' to add it.' % rule_name)
if __name__ == '__main__':
main()
| gpl-3.0 |
skyddv/neutron | neutron/scheduler/base_scheduler.py | 46 | 2778 | # Copyright (c) 2015 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from operator import attrgetter
import random
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
class BaseScheduler(object):
"""The base scheduler (agnostic to resource type).
Child classes of BaseScheduler must define the
self.resource_filter to filter agents of
particular type.
"""
resource_filter = None
@abc.abstractmethod
def select(self, plugin, context, resource_hostable_agents,
num_agents_needed):
"""Return a subset of agents based on the specific scheduling logic."""
def schedule(self, plugin, context, resource):
"""Select and bind agents to a given resource."""
if not self.resource_filter:
return
# filter the agents that can host the resource
filtered_agents_dict = self.resource_filter.filter_agents(
plugin, context, resource)
num_agents = filtered_agents_dict['n_agents']
hostable_agents = filtered_agents_dict['hostable_agents']
chosen_agents = self.select(plugin, context, hostable_agents,
num_agents)
# bind the resource to the agents
self.resource_filter.bind(context, chosen_agents, resource['id'])
return chosen_agents
class BaseChanceScheduler(BaseScheduler):
"""Choose agents randomly."""
def __init__(self, resource_filter):
self.resource_filter = resource_filter
def select(self, plugin, context, resource_hostable_agents,
num_agents_needed):
chosen_agents = random.sample(resource_hostable_agents,
num_agents_needed)
return chosen_agents
class BaseWeightScheduler(BaseScheduler):
"""Choose agents based on load."""
def __init__(self, resource_filter):
self.resource_filter = resource_filter
def select(self, plugin, context, resource_hostable_agents,
num_agents_needed):
chosen_agents = sorted(resource_hostable_agents,
key=attrgetter('load'))[0:num_agents_needed]
return chosen_agents
| apache-2.0 |
xxsergzzxx/python-for-android | python3-alpha/extra_modules/pyxmpp2/ext/delay.py | 46 | 6722 | #
# (C) Copyright 2003-2010 Jacek Konieczny <jajcus@jajcus.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License Version
# 2.1 as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
"""Delayed delivery mark (jabber:x:delay) handling.
Normative reference:
- `JEP 91 <http://www.jabber.org/jeps/jep-0091.html>`__
"""
__docformat__="restructuredtext en"
raise ImportError("{0} is not yet rewritten for PyXMPP2".format(__name__))
import libxml2
import time
import datetime
from ..jid import JID
from ..utils import to_utf8,from_utf8
from ..xmlextra import get_node_ns_uri
from ..utils import datetime_utc_to_local,datetime_local_to_utc
from ..objects import StanzaPayloadObject
from ..exceptions import BadRequestProtocolError, JIDMalformedProtocolError, JIDError
DELAY_NS="jabber:x:delay"
class Delay(StanzaPayloadObject):
"""
Delayed delivery tag.
Represents 'jabber:x:delay' (JEP-0091) element of a Jabber stanza.
:Ivariables:
- `delay_from`: the "from" value of the delay element
- `reason`: the "reason" (content) of the delay element
- `timestamp`: the UTC timestamp as naive datetime object
"""
xml_element_name = "x"
xml_element_namespace = DELAY_NS
def __init__(self,node_or_datetime,delay_from=None,reason=None,utc=True):
"""
Initialize the Delay object.
:Parameters:
- `node_or_datetime`: an XML node to parse or the timestamp.
- `delay_from`: JID of the entity which adds the delay mark
(when `node_or_datetime` is a timestamp).
- `reason`: reason of the delay (when `node_or_datetime` is a
timestamp).
- `utc`: if `True` then the timestamp is assumed to be UTC,
otherwise it is assumed to be local time.
:Types:
- `node_or_datetime`: `libxml2.xmlNode` or `datetime.datetime`
- `delay_from`: `pyxmpp.JID`
- `reason`: `str`
- `utc`: `bool`"""
if isinstance(node_or_datetime,libxml2.xmlNode):
self.from_xml(node_or_datetime)
else:
if utc:
self.timestamp=node_or_datetime
else:
self.timestamp=datetime_local_to_utc(node_or_datetime)
self.delay_from=JID(delay_from)
self.reason=str(reason)
def from_xml(self,xmlnode):
"""Initialize Delay object from an XML node.
:Parameters:
- `xmlnode`: the jabber:x:delay XML element.
:Types:
- `xmlnode`: `libxml2.xmlNode`"""
if xmlnode.type!="element":
raise ValueError("XML node is not a jabber:x:delay element (not an element)")
ns=get_node_ns_uri(xmlnode)
if ns and ns!=DELAY_NS or xmlnode.name!="x":
raise ValueError("XML node is not a jabber:x:delay element")
stamp=xmlnode.prop("stamp")
if stamp.endswith("Z"):
stamp=stamp[:-1]
if "-" in stamp:
stamp=stamp.split("-",1)[0]
try:
tm = time.strptime(stamp, "%Y%m%dT%H:%M:%S")
except ValueError:
raise BadRequestProtocolError("Bad timestamp")
tm=tm[0:8]+(0,)
self.timestamp=datetime.datetime.fromtimestamp(time.mktime(tm))
delay_from=from_utf8(xmlnode.prop("from"))
if delay_from:
try:
self.delay_from = JID(delay_from)
except JIDError:
raise JIDMalformedProtocolError("Bad JID in the jabber:x:delay 'from' attribute")
else:
self.delay_from = None
self.reason = from_utf8(xmlnode.getContent())
def complete_xml_element(self, xmlnode, _unused):
"""Complete the XML node with `self` content.
Should be overriden in classes derived from `StanzaPayloadObject`.
:Parameters:
- `xmlnode`: XML node with the element being built. It has already
right name and namespace, but no attributes or content.
- `_unused`: document to which the element belongs.
:Types:
- `xmlnode`: `libxml2.xmlNode`
- `_unused`: `libxml2.xmlDoc`"""
tm=self.timestamp.strftime("%Y%m%dT%H:%M:%S")
xmlnode.setProp("stamp",tm)
if self.delay_from:
xmlnode.setProp("from",self.delay_from.as_utf8())
if self.reason:
xmlnode.setContent(to_utf8(self.reason))
def get_datetime_local(self):
"""Get the timestamp as a local time.
:return: the timestamp of the delay element represented in the local
timezone.
:returntype: `datetime.datetime`"""
r=datetime_utc_to_local(self.timestamp)
return r
def get_datetime_utc(self):
"""Get the timestamp as a UTC.
:return: the timestamp of the delay element represented in UTC.
:returntype: `datetime.datetime`"""
return self.timestamp
def __str__(self):
n=self.as_xml()
r=n.serialize()
n.freeNode()
return r
def __cmp__(self,other):
return cmp(timestamp, other.timestamp)
def get_delays(stanza):
"""Get jabber:x:delay elements from the stanza.
:Parameters:
- `stanza`: a, probably delayed, stanza.
:Types:
- `stanza`: `pyxmpp.stanza.Stanza`
:return: list of delay tags sorted by the timestamp.
:returntype: `list` of `Delay`"""
delays=[]
n=stanza.xmlnode.children
while n:
if n.type=="element" and get_node_ns_uri(n)==DELAY_NS and n.name=="x":
delays.append(Delay(n))
n=n.__next__
delays.sort()
return delays
def get_delay(stanza):
"""Get the oldest jabber:x:delay elements from the stanza.
:Parameters:
- `stanza`: a, probably delayed, stanza.
:Types:
- `stanza`: `pyxmpp.stanza.Stanza`
The return value, if not `None`, contains a quite reliable
timestamp of a delayed (e.g. from offline storage) message.
:return: the oldest delay tag of the stanza or `None`.
:returntype: `Delay`"""
delays=get_delays(stanza)
if not delays:
return None
return get_delays(stanza)[0]
# vi: sts=4 et sw=4
| apache-2.0 |
sgraham/nope | third_party/webdriver/pylib/selenium/webdriver/chrome/service.py | 17 | 3451 | #!/usr/bin/python
#
# Copyright 2011 Webdriver_name committers
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
from subprocess import PIPE
import time
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common import utils
class Service(object):
"""
Object that manages the starting and stopping of the ChromeDriver
"""
def __init__(self, executable_path, port=0, service_args=None, log_path=None):
"""
Creates a new instance of the Service
:Args:
- executable_path : Path to the ChromeDriver
- port : Port the service is running on
- service_args : List of args to pass to the chromedriver service
- log_path : Path for the chromedriver service to log to"""
self.port = port
self.path = executable_path
self.service_args = service_args or []
if log_path:
self.service_args.append('--log-path=%s' % log_path)
if self.port == 0:
self.port = utils.free_port()
def start(self):
"""
Starts the ChromeDriver Service.
:Exceptions:
- WebDriverException : Raised either when it can't start the service
or when it can't connect to the service
"""
try:
self.process = subprocess.Popen([
self.path,
"--port=%d" % self.port] +
self.service_args, stdout=PIPE, stderr=PIPE)
except:
raise WebDriverException(
"ChromeDriver executable needs to be available in the path. \
Please download from http://code.google.com/p/selenium/downloads/list\
and read up at http://code.google.com/p/selenium/wiki/ChromeDriver")
count = 0
while not utils.is_connectable(self.port):
count += 1
time.sleep(1)
if count == 30:
raise WebDriverException("Can not connect to the ChromeDriver")
@property
def service_url(self):
"""
Gets the url of the ChromeDriver Service
"""
return "http://localhost:%d" % self.port
def stop(self):
"""
Tells the ChromeDriver to stop and cleans up the process
"""
#If its dead dont worry
if self.process is None:
return
#Tell the Server to die!
import urllib2
urllib2.urlopen("http://127.0.0.1:%d/shutdown" % self.port)
count = 0
while utils.is_connectable(self.port):
if count == 30:
break
count += 1
time.sleep(1)
#Tell the Server to properly die in case
try:
if self.process:
self.process.kill()
self.process.wait()
except WindowsError:
# kill may not be available under windows environment
pass
| bsd-3-clause |
EvanK/ansible | test/units/modules/network/ironware/test_ironware_config.py | 30 | 6809 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from units.compat.mock import patch
from ansible.modules.network.ironware import ironware_config
from .ironware_module import TestIronwareModule, load_fixture
from units.modules.utils import set_module_args
class TestIronwareConfigModule(TestIronwareModule):
module = ironware_config
def setUp(self):
super(TestIronwareConfigModule, self).setUp()
self.mock_get_config = patch('ansible.modules.network.ironware.ironware_config.get_config')
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch('ansible.modules.network.ironware.ironware_config.load_config')
self.load_config = self.mock_load_config.start()
self.mock_run_commands = patch('ansible.modules.network.ironware.ironware_config.run_commands')
self.run_commands = self.mock_run_commands.start()
def tearDown(self):
super(TestIronwareConfigModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
self.mock_run_commands.stop()
def load_fixtures(self, commands=None):
config_file = 'ironware_config_config.cfg'
self.get_config.return_value = load_fixture(config_file)
self.load_config.return_value = None
def execute_module(self, failed=False, changed=False, updates=None, sort=True, defaults=False):
self.load_fixtures(updates)
if failed:
result = self.failed()
self.assertTrue(result['failed'], result)
else:
result = self.changed(changed)
self.assertEqual(result['changed'], changed, result)
if updates is not None:
if sort:
self.assertEqual(sorted(updates), sorted(result['updates']), result['updates'])
else:
self.assertEqual(updates, result['updates'], result['updates'])
return result
def test_ironware_config_unchanged(self):
src = load_fixture('ironware_config_config.cfg')
set_module_args(dict(src=src))
self.execute_module()
def test_ironware_config_src(self):
src = load_fixture('ironware_config_src.cfg')
set_module_args(dict(src=src))
updates = ['hostname foo', 'interface ethernet 1/1',
'no ip address']
self.execute_module(changed=True, updates=updates)
def test_ironware_config_backup(self):
set_module_args(dict(backup=True))
result = self.execute_module()
self.assertIn('__backup__', result)
def test_ironware_config_save_always(self):
self.run_commands.return_value = "hostname foobar"
set_module_args(dict(save_when='always'))
self.execute_module(changed=True)
self.assertEqual(self.run_commands.call_count, 1)
self.assertEqual(self.get_config.call_count, 1)
self.assertEqual(self.load_config.call_count, 0)
def test_ironware_config_lines_wo_parents(self):
set_module_args(dict(lines=['hostname foobar']))
updates = ['hostname foobar']
self.execute_module(changed=True, updates=updates)
def test_ironware_config_lines_w_parents(self):
set_module_args(dict(lines=['disable'], parents=['interface ethernet 1/1']))
updates = ['interface ethernet 1/1', 'disable']
self.execute_module(changed=True, updates=updates)
def test_ironware_config_before(self):
set_module_args(dict(lines=['hostname foo'], before=['test1', 'test2']))
updates = ['test1', 'test2', 'hostname foo']
self.execute_module(changed=True, updates=updates, sort=False)
def test_ironware_config_after(self):
set_module_args(dict(lines=['hostname foo'], after=['test1', 'test2']))
updates = ['hostname foo', 'test1', 'test2']
self.execute_module(changed=True, updates=updates, sort=False)
def test_ironware_config_before_after_no_change(self):
set_module_args(dict(lines=['hostname router'],
before=['test1', 'test2'],
after=['test3', 'test4']))
self.execute_module()
def test_ironware_config_config(self):
config = 'hostname localhost'
set_module_args(dict(lines=['hostname router'], config=config))
updates = ['hostname router']
self.execute_module(changed=True, updates=updates)
def test_ironware_config_replace_block(self):
lines = ['port-name test string', 'test string']
parents = ['interface ethernet 1/1']
set_module_args(dict(lines=lines, replace='block', parents=parents))
updates = parents + lines
self.execute_module(changed=True, updates=updates)
def test_ironware_config_match_none(self):
lines = ['hostname router']
set_module_args(dict(lines=lines, match='none'))
self.execute_module(changed=True, updates=lines)
def test_ironware_config_match_none(self):
lines = ['ip address 1.2.3.4 255.255.255.0', 'port-name test string']
parents = ['interface ethernet 1/1']
set_module_args(dict(lines=lines, parents=parents, match='none'))
updates = parents + lines
self.execute_module(changed=True, updates=updates, sort=False)
def test_ironware_config_match_strict(self):
lines = ['ip address 1.2.3.4 255.255.255.0', 'port-name test string',
'disable']
parents = ['interface ethernet 1/1']
set_module_args(dict(lines=lines, parents=parents, match='strict'))
updates = parents + ['disable']
self.execute_module(changed=True, updates=updates, sort=False)
def test_ironware_config_match_exact(self):
lines = ['ip address 1.2.3.4 255.255.255.0', 'port-name test string',
'disable']
parents = ['interface ethernet 1/1']
set_module_args(dict(lines=lines, parents=parents, match='exact'))
updates = parents + lines
self.execute_module(changed=True, updates=updates, sort=False)
| gpl-3.0 |
wangjun/odoo | addons/payment_paypal/controllers/main.py | 260 | 3738 | # -*- coding: utf-8 -*-
try:
import simplejson as json
except ImportError:
import json
import logging
import pprint
import urllib2
import werkzeug
from openerp import http, SUPERUSER_ID
from openerp.http import request
_logger = logging.getLogger(__name__)
class PaypalController(http.Controller):
_notify_url = '/payment/paypal/ipn/'
_return_url = '/payment/paypal/dpn/'
_cancel_url = '/payment/paypal/cancel/'
def _get_return_url(self, **post):
""" Extract the return URL from the data coming from paypal. """
return_url = post.pop('return_url', '')
if not return_url:
custom = json.loads(post.pop('custom', False) or '{}')
return_url = custom.get('return_url', '/')
return return_url
def paypal_validate_data(self, **post):
""" Paypal IPN: three steps validation to ensure data correctness
- step 1: return an empty HTTP 200 response -> will be done at the end
by returning ''
- step 2: POST the complete, unaltered message back to Paypal (preceded
by cmd=_notify-validate), with same encoding
- step 3: paypal send either VERIFIED or INVALID (single word)
Once data is validated, process it. """
res = False
new_post = dict(post, cmd='_notify-validate')
cr, uid, context = request.cr, request.uid, request.context
reference = post.get('item_number')
tx = None
if reference:
tx_ids = request.registry['payment.transaction'].search(cr, uid, [('reference', '=', reference)], context=context)
if tx_ids:
tx = request.registry['payment.transaction'].browse(cr, uid, tx_ids[0], context=context)
paypal_urls = request.registry['payment.acquirer']._get_paypal_urls(cr, uid, tx and tx.acquirer_id and tx.acquirer_id.environment or 'prod', context=context)
validate_url = paypal_urls['paypal_form_url']
urequest = urllib2.Request(validate_url, werkzeug.url_encode(new_post))
uopen = urllib2.urlopen(urequest)
resp = uopen.read()
if resp == 'VERIFIED':
_logger.info('Paypal: validated data')
res = request.registry['payment.transaction'].form_feedback(cr, SUPERUSER_ID, post, 'paypal', context=context)
elif resp == 'INVALID':
_logger.warning('Paypal: answered INVALID on data verification')
else:
_logger.warning('Paypal: unrecognized paypal answer, received %s instead of VERIFIED or INVALID' % resp.text)
return res
@http.route('/payment/paypal/ipn/', type='http', auth='none', methods=['POST'])
def paypal_ipn(self, **post):
""" Paypal IPN. """
_logger.info('Beginning Paypal IPN form_feedback with post data %s', pprint.pformat(post)) # debug
self.paypal_validate_data(**post)
return ''
@http.route('/payment/paypal/dpn', type='http', auth="none", methods=['POST'])
def paypal_dpn(self, **post):
""" Paypal DPN """
_logger.info('Beginning Paypal DPN form_feedback with post data %s', pprint.pformat(post)) # debug
return_url = self._get_return_url(**post)
self.paypal_validate_data(**post)
return werkzeug.utils.redirect(return_url)
@http.route('/payment/paypal/cancel', type='http', auth="none")
def paypal_cancel(self, **post):
""" When the user cancels its Paypal payment: GET on this route """
cr, uid, context = request.cr, SUPERUSER_ID, request.context
_logger.info('Beginning Paypal cancel with post data %s', pprint.pformat(post)) # debug
return_url = self._get_return_url(**post)
return werkzeug.utils.redirect(return_url)
| agpl-3.0 |
emyarod/OSS | 1_intro/6.00.1x/Week 3/L5 Problems/L5 Problem 9.py | 1 | 1795 | # A semordnilap is a word or a phrase that spells a different word when
# backwards ("semordnilap" is a semordnilap of "palindromes"). Here are some
# examples:
#
# nametag / gateman
# dog / god
# live / evil
# desserts / stressed
#
# Write a recursive program, `semordnilap`, that takes in two words and says if
# they are semordnilap.
#
# This recursive function is not entirely straightforward. There are a few
# things that you need to check the first time you look at the inputs that you
# should not check on subsequent recursive calls: you need to make sure that the
# strings are not single characters, and also you need to be sure that the
# strings are not equal. If you do this check every time you call your function,
# though, this will end up interfering with the recursive base case (which we
# don't want!).
#
# The idea of a wrapper function is really important. You'll see more wrapper
# functions later. To introduce you to the idea, we are providing you with the
# wrapper function; your job is to write the recursive function semordnilap that
# the wrapper function calls. Here is the wrapper function:
#
# def semordnilapWrapper(str1, str2):
# # A single-length string cannot be semordnilap
# if len(str1) == 1 or len(str2) == 1:
# return False
#
# # Equal strings cannot be semordnilap
# if str1 == str2:
# return False
#
# return semordnilap(str1, str2)
def semordnilap(str1, str2):
'''
str1: a string
str2: a string
returns: True if str1 and str2 are semordnilap;
False otherwise.
'''
if len(str1) != len(str2):
return False
elif str1[0] != str2[-1]:
return False
elif str1[0] == str2[-1]:
return True
return semordnilap(str1[1:], str2[:-1]) | mit |
luhanhan/horizon | openstack_dashboard/test/api_tests/glance_rest_tests.py | 35 | 3890 | # Copyright 2015, Rackspace, US, Inc.
# Copyright 2015, Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from openstack_dashboard.api.rest import glance
from openstack_dashboard.test import helpers as test
class ImagesRestTestCase(test.TestCase):
@mock.patch.object(glance.api, 'glance')
def test_image_get_single(self, gc):
request = self.mock_rest_request()
gc.image_get.return_value.to_dict.return_value = {'name': '1'}
response = glance.Image().get(request, "1")
self.assertStatusCode(response, 200)
gc.image_get.assert_called_once_with(request, "1")
@mock.patch.object(glance.api, 'glance')
def test_image_get_list_detailed(self, gc):
kwargs = {
'sort_dir': 'desc',
'sort_key': 'namespace',
'marker': 1,
'paginate': False,
}
filters = {'name': 'fedora'}
request = self.mock_rest_request(
**{'GET': dict(kwargs, **filters)})
gc.image_list_detailed.return_value = ([
mock.Mock(**{'to_dict.return_value': {'name': 'fedora'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'cirros'}})
], False, False)
response = glance.Images().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.content,
'{"items": [{"name": "fedora"}, {"name": "cirros"}]'
', "has_more_data": false, "has_prev_data": false}')
gc.image_list_detailed.assert_called_once_with(request,
filters=filters,
**kwargs)
@mock.patch.object(glance.api, 'glance')
def test_namespace_get_list(self, gc):
request = self.mock_rest_request(**{'GET': {}})
gc.metadefs_namespace_full_list.return_value = (
[{'namespace': '1'}, {'namespace': '2'}], False, False
)
response = glance.MetadefsNamespaces().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.content,
'{"items": [{"namespace": "1"}, {"namespace": "2"}]'
', "has_more_data": false, "has_prev_data": false}')
gc.metadefs_namespace_full_list.assert_called_once_with(
request, filters={}
)
@mock.patch.object(glance.api, 'glance')
def test_namespace_get_list_kwargs_and_filters(self, gc):
kwargs = {
'sort_dir': 'desc',
'sort_key': 'namespace',
'marker': 1,
'paginate': False,
}
filters = {'resource_types': 'type'}
request = self.mock_rest_request(
**{'GET': dict(kwargs, **filters)})
gc.metadefs_namespace_full_list.return_value = (
[{'namespace': '1'}, {'namespace': '2'}], False, False
)
response = glance.MetadefsNamespaces().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.content,
'{"items": [{"namespace": "1"}, {"namespace": "2"}]'
', "has_more_data": false, "has_prev_data": false}')
gc.metadefs_namespace_full_list.assert_called_once_with(
request, filters=filters, **kwargs
)
| apache-2.0 |
axilleas/ansible-modules-core | cloud/amazon/ec2_key.py | 29 | 7267 | #!/usr/bin/python
# -*- coding: utf-8 -*-
DOCUMENTATION = '''
---
module: ec2_key
version_added: "1.5"
short_description: maintain an ec2 key pair.
description:
- maintains ec2 key pairs. This module has a dependency on python-boto >= 2.5
options:
name:
description:
- Name of the key pair.
required: true
key_material:
description:
- Public key material.
required: false
region:
description:
- the EC2 region to use
required: false
default: null
aliases: []
state:
description:
- create or delete keypair
required: false
default: 'present'
aliases: []
wait:
description:
- Wait for the specified action to complete before returning.
required: false
default: false
aliases: []
version_added: "1.6"
wait_timeout:
description:
- How long before wait gives up, in seconds
required: false
default: 300
aliases: []
version_added: "1.6"
extends_documentation_fragment: aws
author: Vincent Viallet
'''
EXAMPLES = '''
# Note: None of these examples set aws_access_key, aws_secret_key, or region.
# It is assumed that their matching environment variables are set.
# Creates a new ec2 key pair named `example` if not present, returns generated
# private key
- name: example ec2 key
local_action:
module: ec2_key
name: example
# Creates a new ec2 key pair named `example` if not present using provided key
# material
- name: example2 ec2 key
local_action:
module: ec2_key
name: example2
key_material: 'ssh-rsa AAAAxyz...== me@example.com'
state: present
# Creates a new ec2 key pair named `example` if not present using provided key
# material
- name: example3 ec2 key
local_action:
module: ec2_key
name: example3
key_material: "{{ item }}"
with_file: /path/to/public_key.id_rsa.pub
# Removes ec2 key pair by name
- name: remove example key
local_action:
module: ec2_key
name: example
state: absent
'''
try:
import boto.ec2
except ImportError:
print "failed=True msg='boto required for this module'"
sys.exit(1)
import random
import string
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
name=dict(required=True),
key_material=dict(required=False),
state = dict(default='present', choices=['present', 'absent']),
wait = dict(type='bool', default=False),
wait_timeout = dict(default=300),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
name = module.params['name']
state = module.params.get('state')
key_material = module.params.get('key_material')
wait = module.params.get('wait')
wait_timeout = int(module.params.get('wait_timeout'))
changed = False
ec2 = ec2_connect(module)
# find the key if present
key = ec2.get_key_pair(name)
# Ensure requested key is absent
if state == 'absent':
if key:
'''found a match, delete it'''
try:
key.delete()
if wait:
start = time.time()
action_complete = False
while (time.time() - start) < wait_timeout:
if not ec2.get_key_pair(name):
action_complete = True
break
time.sleep(1)
if not action_complete:
module.fail_json(msg="timed out while waiting for the key to be removed")
except Exception, e:
module.fail_json(msg="Unable to delete key pair '%s' - %s" % (key, e))
else:
key = None
changed = True
else:
'''no match found, no changes required'''
# Ensure requested key is present
elif state == 'present':
if key:
# existing key found
if key_material:
# EC2's fingerprints are non-trivial to generate, so push this key
# to a temporary name and make ec2 calculate the fingerprint for us.
#
# http://blog.jbrowne.com/?p=23
# https://forums.aws.amazon.com/thread.jspa?messageID=352828
# find an unused name
test = 'empty'
while test:
randomchars = [random.choice(string.ascii_letters + string.digits) for x in range(0,10)]
tmpkeyname = "ansible-" + ''.join(randomchars)
test = ec2.get_key_pair(tmpkeyname)
# create tmp key
tmpkey = ec2.import_key_pair(tmpkeyname, key_material)
# get tmp key fingerprint
tmpfingerprint = tmpkey.fingerprint
# delete tmp key
tmpkey.delete()
if key.fingerprint != tmpfingerprint:
if not module.check_mode:
key.delete()
key = ec2.import_key_pair(name, key_material)
if wait:
start = time.time()
action_complete = False
while (time.time() - start) < wait_timeout:
if ec2.get_key_pair(name):
action_complete = True
break
time.sleep(1)
if not action_complete:
module.fail_json(msg="timed out while waiting for the key to be re-created")
changed = True
pass
# if the key doesn't exist, create it now
else:
'''no match found, create it'''
if not module.check_mode:
if key_material:
'''We are providing the key, need to import'''
key = ec2.import_key_pair(name, key_material)
else:
'''
No material provided, let AWS handle the key creation and
retrieve the private key
'''
key = ec2.create_key_pair(name)
if wait:
start = time.time()
action_complete = False
while (time.time() - start) < wait_timeout:
if ec2.get_key_pair(name):
action_complete = True
break
time.sleep(1)
if not action_complete:
module.fail_json(msg="timed out while waiting for the key to be created")
changed = True
if key:
data = {
'name': key.name,
'fingerprint': key.fingerprint
}
if key.material:
data.update({'private_key': key.material})
module.exit_json(changed=changed, key=data)
else:
module.exit_json(changed=changed, key=None)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| gpl-3.0 |
mancoast/CPythonPyc_test | cpython/278_test_tempfile.py | 21 | 33801 | # tempfile.py unit tests.
import tempfile
import errno
import io
import os
import signal
import shutil
import sys
import re
import warnings
import contextlib
import unittest
from test import test_support as support
warnings.filterwarnings("ignore",
category=RuntimeWarning,
message="mktemp", module=__name__)
if hasattr(os, 'stat'):
import stat
has_stat = 1
else:
has_stat = 0
has_textmode = (tempfile._text_openflags != tempfile._bin_openflags)
has_spawnl = hasattr(os, 'spawnl')
# TEST_FILES may need to be tweaked for systems depending on the maximum
# number of files that can be opened at one time (see ulimit -n)
if sys.platform in ('openbsd3', 'openbsd4'):
TEST_FILES = 48
else:
TEST_FILES = 100
# This is organized as one test for each chunk of code in tempfile.py,
# in order of their appearance in the file. Testing which requires
# threads is not done here.
# Common functionality.
class TC(unittest.TestCase):
str_check = re.compile(r"[a-zA-Z0-9_-]{6}$")
def failOnException(self, what, ei=None):
if ei is None:
ei = sys.exc_info()
self.fail("%s raised %s: %s" % (what, ei[0], ei[1]))
def nameCheck(self, name, dir, pre, suf):
(ndir, nbase) = os.path.split(name)
npre = nbase[:len(pre)]
nsuf = nbase[len(nbase)-len(suf):]
# check for equality of the absolute paths!
self.assertEqual(os.path.abspath(ndir), os.path.abspath(dir),
"file '%s' not in directory '%s'" % (name, dir))
self.assertEqual(npre, pre,
"file '%s' does not begin with '%s'" % (nbase, pre))
self.assertEqual(nsuf, suf,
"file '%s' does not end with '%s'" % (nbase, suf))
nbase = nbase[len(pre):len(nbase)-len(suf)]
self.assertTrue(self.str_check.match(nbase),
"random string '%s' does not match /^[a-zA-Z0-9_-]{6}$/"
% nbase)
test_classes = []
class test_exports(TC):
def test_exports(self):
# There are no surprising symbols in the tempfile module
dict = tempfile.__dict__
expected = {
"NamedTemporaryFile" : 1,
"TemporaryFile" : 1,
"mkstemp" : 1,
"mkdtemp" : 1,
"mktemp" : 1,
"TMP_MAX" : 1,
"gettempprefix" : 1,
"gettempdir" : 1,
"tempdir" : 1,
"template" : 1,
"SpooledTemporaryFile" : 1
}
unexp = []
for key in dict:
if key[0] != '_' and key not in expected:
unexp.append(key)
self.assertTrue(len(unexp) == 0,
"unexpected keys: %s" % unexp)
test_classes.append(test_exports)
class test__RandomNameSequence(TC):
"""Test the internal iterator object _RandomNameSequence."""
def setUp(self):
self.r = tempfile._RandomNameSequence()
def test_get_six_char_str(self):
# _RandomNameSequence returns a six-character string
s = self.r.next()
self.nameCheck(s, '', '', '')
def test_many(self):
# _RandomNameSequence returns no duplicate strings (stochastic)
dict = {}
r = self.r
for i in xrange(TEST_FILES):
s = r.next()
self.nameCheck(s, '', '', '')
self.assertNotIn(s, dict)
dict[s] = 1
def test_supports_iter(self):
# _RandomNameSequence supports the iterator protocol
i = 0
r = self.r
try:
for s in r:
i += 1
if i == 20:
break
except:
self.failOnException("iteration")
@unittest.skipUnless(hasattr(os, 'fork'),
"os.fork is required for this test")
def test_process_awareness(self):
# ensure that the random source differs between
# child and parent.
read_fd, write_fd = os.pipe()
pid = None
try:
pid = os.fork()
if not pid:
os.close(read_fd)
os.write(write_fd, next(self.r).encode("ascii"))
os.close(write_fd)
# bypass the normal exit handlers- leave those to
# the parent.
os._exit(0)
parent_value = next(self.r)
child_value = os.read(read_fd, len(parent_value)).decode("ascii")
finally:
if pid:
# best effort to ensure the process can't bleed out
# via any bugs above
try:
os.kill(pid, signal.SIGKILL)
except EnvironmentError:
pass
os.close(read_fd)
os.close(write_fd)
self.assertNotEqual(child_value, parent_value)
test_classes.append(test__RandomNameSequence)
class test__candidate_tempdir_list(TC):
"""Test the internal function _candidate_tempdir_list."""
def test_nonempty_list(self):
# _candidate_tempdir_list returns a nonempty list of strings
cand = tempfile._candidate_tempdir_list()
self.assertFalse(len(cand) == 0)
for c in cand:
self.assertIsInstance(c, basestring)
def test_wanted_dirs(self):
# _candidate_tempdir_list contains the expected directories
# Make sure the interesting environment variables are all set.
with support.EnvironmentVarGuard() as env:
for envname in 'TMPDIR', 'TEMP', 'TMP':
dirname = os.getenv(envname)
if not dirname:
env[envname] = os.path.abspath(envname)
cand = tempfile._candidate_tempdir_list()
for envname in 'TMPDIR', 'TEMP', 'TMP':
dirname = os.getenv(envname)
if not dirname: raise ValueError
self.assertIn(dirname, cand)
try:
dirname = os.getcwd()
except (AttributeError, os.error):
dirname = os.curdir
self.assertIn(dirname, cand)
# Not practical to try to verify the presence of OS-specific
# paths in this list.
test_classes.append(test__candidate_tempdir_list)
# We test _get_default_tempdir some more by testing gettempdir.
class TestGetDefaultTempdir(TC):
"""Test _get_default_tempdir()."""
def test_no_files_left_behind(self):
# use a private empty directory
our_temp_directory = tempfile.mkdtemp()
try:
# force _get_default_tempdir() to consider our empty directory
def our_candidate_list():
return [our_temp_directory]
with support.swap_attr(tempfile, "_candidate_tempdir_list",
our_candidate_list):
# verify our directory is empty after _get_default_tempdir()
tempfile._get_default_tempdir()
self.assertEqual(os.listdir(our_temp_directory), [])
def raise_OSError(*args, **kwargs):
raise OSError(-1)
with support.swap_attr(io, "open", raise_OSError):
# test again with failing io.open()
with self.assertRaises(IOError) as cm:
tempfile._get_default_tempdir()
self.assertEqual(cm.exception.errno, errno.ENOENT)
self.assertEqual(os.listdir(our_temp_directory), [])
open = io.open
def bad_writer(*args, **kwargs):
fp = open(*args, **kwargs)
fp.write = raise_OSError
return fp
with support.swap_attr(io, "open", bad_writer):
# test again with failing write()
with self.assertRaises(IOError) as cm:
tempfile._get_default_tempdir()
self.assertEqual(cm.exception.errno, errno.ENOENT)
self.assertEqual(os.listdir(our_temp_directory), [])
finally:
shutil.rmtree(our_temp_directory)
test_classes.append(TestGetDefaultTempdir)
class test__get_candidate_names(TC):
"""Test the internal function _get_candidate_names."""
def test_retval(self):
# _get_candidate_names returns a _RandomNameSequence object
obj = tempfile._get_candidate_names()
self.assertIsInstance(obj, tempfile._RandomNameSequence)
def test_same_thing(self):
# _get_candidate_names always returns the same object
a = tempfile._get_candidate_names()
b = tempfile._get_candidate_names()
self.assertTrue(a is b)
test_classes.append(test__get_candidate_names)
@contextlib.contextmanager
def _inside_empty_temp_dir():
dir = tempfile.mkdtemp()
try:
with support.swap_attr(tempfile, 'tempdir', dir):
yield
finally:
support.rmtree(dir)
def _mock_candidate_names(*names):
return support.swap_attr(tempfile,
'_get_candidate_names',
lambda: iter(names))
class test__mkstemp_inner(TC):
"""Test the internal function _mkstemp_inner."""
class mkstemped:
_bflags = tempfile._bin_openflags
_tflags = tempfile._text_openflags
_close = os.close
_unlink = os.unlink
def __init__(self, dir, pre, suf, bin):
if bin: flags = self._bflags
else: flags = self._tflags
(self.fd, self.name) = tempfile._mkstemp_inner(dir, pre, suf, flags)
def write(self, str):
os.write(self.fd, str)
def __del__(self):
self._close(self.fd)
self._unlink(self.name)
def do_create(self, dir=None, pre="", suf="", bin=1):
if dir is None:
dir = tempfile.gettempdir()
try:
file = self.mkstemped(dir, pre, suf, bin)
except:
self.failOnException("_mkstemp_inner")
self.nameCheck(file.name, dir, pre, suf)
return file
def test_basic(self):
# _mkstemp_inner can create files
self.do_create().write("blat")
self.do_create(pre="a").write("blat")
self.do_create(suf="b").write("blat")
self.do_create(pre="a", suf="b").write("blat")
self.do_create(pre="aa", suf=".txt").write("blat")
def test_basic_many(self):
# _mkstemp_inner can create many files (stochastic)
extant = range(TEST_FILES)
for i in extant:
extant[i] = self.do_create(pre="aa")
def test_choose_directory(self):
# _mkstemp_inner can create files in a user-selected directory
dir = tempfile.mkdtemp()
try:
self.do_create(dir=dir).write("blat")
finally:
os.rmdir(dir)
@unittest.skipUnless(has_stat, 'os.stat not available')
def test_file_mode(self):
# _mkstemp_inner creates files with the proper mode
file = self.do_create()
mode = stat.S_IMODE(os.stat(file.name).st_mode)
expected = 0600
if sys.platform in ('win32', 'os2emx'):
# There's no distinction among 'user', 'group' and 'world';
# replicate the 'user' bits.
user = expected >> 6
expected = user * (1 + 8 + 64)
self.assertEqual(mode, expected)
@unittest.skipUnless(has_spawnl, 'os.spawnl not available')
def test_noinherit(self):
# _mkstemp_inner file handles are not inherited by child processes
if support.verbose:
v="v"
else:
v="q"
file = self.do_create()
fd = "%d" % file.fd
try:
me = __file__
except NameError:
me = sys.argv[0]
# We have to exec something, so that FD_CLOEXEC will take
# effect. The core of this test is therefore in
# tf_inherit_check.py, which see.
tester = os.path.join(os.path.dirname(os.path.abspath(me)),
"tf_inherit_check.py")
# On Windows a spawn* /path/ with embedded spaces shouldn't be quoted,
# but an arg with embedded spaces should be decorated with double
# quotes on each end
if sys.platform in ('win32',):
decorated = '"%s"' % sys.executable
tester = '"%s"' % tester
else:
decorated = sys.executable
retval = os.spawnl(os.P_WAIT, sys.executable, decorated, tester, v, fd)
self.assertFalse(retval < 0,
"child process caught fatal signal %d" % -retval)
self.assertFalse(retval > 0, "child process reports failure %d"%retval)
@unittest.skipUnless(has_textmode, "text mode not available")
def test_textmode(self):
# _mkstemp_inner can create files in text mode
self.do_create(bin=0).write("blat\n")
# XXX should test that the file really is a text file
def default_mkstemp_inner(self):
return tempfile._mkstemp_inner(tempfile.gettempdir(),
tempfile.template,
'',
tempfile._bin_openflags)
def test_collision_with_existing_file(self):
# _mkstemp_inner tries another name when a file with
# the chosen name already exists
with _inside_empty_temp_dir(), \
_mock_candidate_names('aaa', 'aaa', 'bbb'):
(fd1, name1) = self.default_mkstemp_inner()
os.close(fd1)
self.assertTrue(name1.endswith('aaa'))
(fd2, name2) = self.default_mkstemp_inner()
os.close(fd2)
self.assertTrue(name2.endswith('bbb'))
def test_collision_with_existing_directory(self):
# _mkstemp_inner tries another name when a directory with
# the chosen name already exists
with _inside_empty_temp_dir(), \
_mock_candidate_names('aaa', 'aaa', 'bbb'):
dir = tempfile.mkdtemp()
self.assertTrue(dir.endswith('aaa'))
(fd, name) = self.default_mkstemp_inner()
os.close(fd)
self.assertTrue(name.endswith('bbb'))
test_classes.append(test__mkstemp_inner)
class test_gettempprefix(TC):
"""Test gettempprefix()."""
def test_sane_template(self):
# gettempprefix returns a nonempty prefix string
p = tempfile.gettempprefix()
self.assertIsInstance(p, basestring)
self.assertTrue(len(p) > 0)
def test_usable_template(self):
# gettempprefix returns a usable prefix string
# Create a temp directory, avoiding use of the prefix.
# Then attempt to create a file whose name is
# prefix + 'xxxxxx.xxx' in that directory.
p = tempfile.gettempprefix() + "xxxxxx.xxx"
d = tempfile.mkdtemp(prefix="")
try:
p = os.path.join(d, p)
try:
fd = os.open(p, os.O_RDWR | os.O_CREAT)
except:
self.failOnException("os.open")
os.close(fd)
os.unlink(p)
finally:
os.rmdir(d)
test_classes.append(test_gettempprefix)
class test_gettempdir(TC):
"""Test gettempdir()."""
def test_directory_exists(self):
# gettempdir returns a directory which exists
dir = tempfile.gettempdir()
self.assertTrue(os.path.isabs(dir) or dir == os.curdir,
"%s is not an absolute path" % dir)
self.assertTrue(os.path.isdir(dir),
"%s is not a directory" % dir)
def test_directory_writable(self):
# gettempdir returns a directory writable by the user
# sneaky: just instantiate a NamedTemporaryFile, which
# defaults to writing into the directory returned by
# gettempdir.
try:
file = tempfile.NamedTemporaryFile()
file.write("blat")
file.close()
except:
self.failOnException("create file in %s" % tempfile.gettempdir())
def test_same_thing(self):
# gettempdir always returns the same object
a = tempfile.gettempdir()
b = tempfile.gettempdir()
self.assertTrue(a is b)
test_classes.append(test_gettempdir)
class test_mkstemp(TC):
"""Test mkstemp()."""
def do_create(self, dir=None, pre="", suf=""):
if dir is None:
dir = tempfile.gettempdir()
try:
(fd, name) = tempfile.mkstemp(dir=dir, prefix=pre, suffix=suf)
(ndir, nbase) = os.path.split(name)
adir = os.path.abspath(dir)
self.assertEqual(adir, ndir,
"Directory '%s' incorrectly returned as '%s'" % (adir, ndir))
except:
self.failOnException("mkstemp")
try:
self.nameCheck(name, dir, pre, suf)
finally:
os.close(fd)
os.unlink(name)
def test_basic(self):
# mkstemp can create files
self.do_create()
self.do_create(pre="a")
self.do_create(suf="b")
self.do_create(pre="a", suf="b")
self.do_create(pre="aa", suf=".txt")
self.do_create(dir=".")
def test_choose_directory(self):
# mkstemp can create directories in a user-selected directory
dir = tempfile.mkdtemp()
try:
self.do_create(dir=dir)
finally:
os.rmdir(dir)
test_classes.append(test_mkstemp)
class test_mkdtemp(TC):
"""Test mkdtemp()."""
def do_create(self, dir=None, pre="", suf=""):
if dir is None:
dir = tempfile.gettempdir()
try:
name = tempfile.mkdtemp(dir=dir, prefix=pre, suffix=suf)
except:
self.failOnException("mkdtemp")
try:
self.nameCheck(name, dir, pre, suf)
return name
except:
os.rmdir(name)
raise
def test_basic(self):
# mkdtemp can create directories
os.rmdir(self.do_create())
os.rmdir(self.do_create(pre="a"))
os.rmdir(self.do_create(suf="b"))
os.rmdir(self.do_create(pre="a", suf="b"))
os.rmdir(self.do_create(pre="aa", suf=".txt"))
def test_basic_many(self):
# mkdtemp can create many directories (stochastic)
extant = range(TEST_FILES)
try:
for i in extant:
extant[i] = self.do_create(pre="aa")
finally:
for i in extant:
if(isinstance(i, basestring)):
os.rmdir(i)
def test_choose_directory(self):
# mkdtemp can create directories in a user-selected directory
dir = tempfile.mkdtemp()
try:
os.rmdir(self.do_create(dir=dir))
finally:
os.rmdir(dir)
@unittest.skipUnless(has_stat, 'os.stat not available')
def test_mode(self):
# mkdtemp creates directories with the proper mode
dir = self.do_create()
try:
mode = stat.S_IMODE(os.stat(dir).st_mode)
mode &= 0777 # Mask off sticky bits inherited from /tmp
expected = 0700
if sys.platform in ('win32', 'os2emx'):
# There's no distinction among 'user', 'group' and 'world';
# replicate the 'user' bits.
user = expected >> 6
expected = user * (1 + 8 + 64)
self.assertEqual(mode, expected)
finally:
os.rmdir(dir)
def test_collision_with_existing_file(self):
# mkdtemp tries another name when a file with
# the chosen name already exists
with _inside_empty_temp_dir(), \
_mock_candidate_names('aaa', 'aaa', 'bbb'):
file = tempfile.NamedTemporaryFile(delete=False)
file.close()
self.assertTrue(file.name.endswith('aaa'))
dir = tempfile.mkdtemp()
self.assertTrue(dir.endswith('bbb'))
def test_collision_with_existing_directory(self):
# mkdtemp tries another name when a directory with
# the chosen name already exists
with _inside_empty_temp_dir(), \
_mock_candidate_names('aaa', 'aaa', 'bbb'):
dir1 = tempfile.mkdtemp()
self.assertTrue(dir1.endswith('aaa'))
dir2 = tempfile.mkdtemp()
self.assertTrue(dir2.endswith('bbb'))
test_classes.append(test_mkdtemp)
class test_mktemp(TC):
"""Test mktemp()."""
# For safety, all use of mktemp must occur in a private directory.
# We must also suppress the RuntimeWarning it generates.
def setUp(self):
self.dir = tempfile.mkdtemp()
def tearDown(self):
if self.dir:
os.rmdir(self.dir)
self.dir = None
class mktemped:
_unlink = os.unlink
_bflags = tempfile._bin_openflags
def __init__(self, dir, pre, suf):
self.name = tempfile.mktemp(dir=dir, prefix=pre, suffix=suf)
# Create the file. This will raise an exception if it's
# mysteriously appeared in the meanwhile.
os.close(os.open(self.name, self._bflags, 0600))
def __del__(self):
self._unlink(self.name)
def do_create(self, pre="", suf=""):
try:
file = self.mktemped(self.dir, pre, suf)
except:
self.failOnException("mktemp")
self.nameCheck(file.name, self.dir, pre, suf)
return file
def test_basic(self):
# mktemp can choose usable file names
self.do_create()
self.do_create(pre="a")
self.do_create(suf="b")
self.do_create(pre="a", suf="b")
self.do_create(pre="aa", suf=".txt")
def test_many(self):
# mktemp can choose many usable file names (stochastic)
extant = range(TEST_FILES)
for i in extant:
extant[i] = self.do_create(pre="aa")
## def test_warning(self):
## # mktemp issues a warning when used
## warnings.filterwarnings("error",
## category=RuntimeWarning,
## message="mktemp")
## self.assertRaises(RuntimeWarning,
## tempfile.mktemp, dir=self.dir)
test_classes.append(test_mktemp)
# We test _TemporaryFileWrapper by testing NamedTemporaryFile.
class test_NamedTemporaryFile(TC):
"""Test NamedTemporaryFile()."""
def do_create(self, dir=None, pre="", suf="", delete=True):
if dir is None:
dir = tempfile.gettempdir()
try:
file = tempfile.NamedTemporaryFile(dir=dir, prefix=pre, suffix=suf,
delete=delete)
except:
self.failOnException("NamedTemporaryFile")
self.nameCheck(file.name, dir, pre, suf)
return file
def test_basic(self):
# NamedTemporaryFile can create files
self.do_create()
self.do_create(pre="a")
self.do_create(suf="b")
self.do_create(pre="a", suf="b")
self.do_create(pre="aa", suf=".txt")
def test_creates_named(self):
# NamedTemporaryFile creates files with names
f = tempfile.NamedTemporaryFile()
self.assertTrue(os.path.exists(f.name),
"NamedTemporaryFile %s does not exist" % f.name)
def test_del_on_close(self):
# A NamedTemporaryFile is deleted when closed
dir = tempfile.mkdtemp()
try:
f = tempfile.NamedTemporaryFile(dir=dir)
f.write('blat')
f.close()
self.assertFalse(os.path.exists(f.name),
"NamedTemporaryFile %s exists after close" % f.name)
finally:
os.rmdir(dir)
def test_dis_del_on_close(self):
# Tests that delete-on-close can be disabled
dir = tempfile.mkdtemp()
tmp = None
try:
f = tempfile.NamedTemporaryFile(dir=dir, delete=False)
tmp = f.name
f.write('blat')
f.close()
self.assertTrue(os.path.exists(f.name),
"NamedTemporaryFile %s missing after close" % f.name)
finally:
if tmp is not None:
os.unlink(tmp)
os.rmdir(dir)
def test_multiple_close(self):
# A NamedTemporaryFile can be closed many times without error
f = tempfile.NamedTemporaryFile()
f.write('abc\n')
f.close()
try:
f.close()
f.close()
except:
self.failOnException("close")
def test_context_manager(self):
# A NamedTemporaryFile can be used as a context manager
with tempfile.NamedTemporaryFile() as f:
self.assertTrue(os.path.exists(f.name))
self.assertFalse(os.path.exists(f.name))
def use_closed():
with f:
pass
self.assertRaises(ValueError, use_closed)
def test_no_leak_fd(self):
# Issue #21058: don't leak file descriptor when fdopen() fails
old_close = os.close
old_fdopen = os.fdopen
closed = []
def close(fd):
closed.append(fd)
def fdopen(*args):
raise ValueError()
os.close = close
os.fdopen = fdopen
try:
self.assertRaises(ValueError, tempfile.NamedTemporaryFile)
self.assertEqual(len(closed), 1)
finally:
os.close = old_close
os.fdopen = old_fdopen
# How to test the mode and bufsize parameters?
test_classes.append(test_NamedTemporaryFile)
class test_SpooledTemporaryFile(TC):
"""Test SpooledTemporaryFile()."""
def do_create(self, max_size=0, dir=None, pre="", suf=""):
if dir is None:
dir = tempfile.gettempdir()
try:
file = tempfile.SpooledTemporaryFile(max_size=max_size, dir=dir, prefix=pre, suffix=suf)
except:
self.failOnException("SpooledTemporaryFile")
return file
def test_basic(self):
# SpooledTemporaryFile can create files
f = self.do_create()
self.assertFalse(f._rolled)
f = self.do_create(max_size=100, pre="a", suf=".txt")
self.assertFalse(f._rolled)
def test_del_on_close(self):
# A SpooledTemporaryFile is deleted when closed
dir = tempfile.mkdtemp()
try:
f = tempfile.SpooledTemporaryFile(max_size=10, dir=dir)
self.assertFalse(f._rolled)
f.write('blat ' * 5)
self.assertTrue(f._rolled)
filename = f.name
f.close()
self.assertFalse(os.path.exists(filename),
"SpooledTemporaryFile %s exists after close" % filename)
finally:
os.rmdir(dir)
def test_rewrite_small(self):
# A SpooledTemporaryFile can be written to multiple within the max_size
f = self.do_create(max_size=30)
self.assertFalse(f._rolled)
for i in range(5):
f.seek(0, 0)
f.write('x' * 20)
self.assertFalse(f._rolled)
def test_write_sequential(self):
# A SpooledTemporaryFile should hold exactly max_size bytes, and roll
# over afterward
f = self.do_create(max_size=30)
self.assertFalse(f._rolled)
f.write('x' * 20)
self.assertFalse(f._rolled)
f.write('x' * 10)
self.assertFalse(f._rolled)
f.write('x')
self.assertTrue(f._rolled)
def test_writelines(self):
# Verify writelines with a SpooledTemporaryFile
f = self.do_create()
f.writelines((b'x', b'y', b'z'))
f.seek(0)
buf = f.read()
self.assertEqual(buf, b'xyz')
def test_writelines_sequential(self):
# A SpooledTemporaryFile should hold exactly max_size bytes, and roll
# over afterward
f = self.do_create(max_size=35)
f.writelines((b'x' * 20, b'x' * 10, b'x' * 5))
self.assertFalse(f._rolled)
f.write(b'x')
self.assertTrue(f._rolled)
def test_xreadlines(self):
f = self.do_create(max_size=20)
f.write(b'abc\n' * 5)
f.seek(0)
self.assertFalse(f._rolled)
self.assertEqual(list(f.xreadlines()), [b'abc\n'] * 5)
f.write(b'x\ny')
self.assertTrue(f._rolled)
f.seek(0)
self.assertEqual(list(f.xreadlines()), [b'abc\n'] * 5 + [b'x\n', b'y'])
def test_sparse(self):
# A SpooledTemporaryFile that is written late in the file will extend
# when that occurs
f = self.do_create(max_size=30)
self.assertFalse(f._rolled)
f.seek(100, 0)
self.assertFalse(f._rolled)
f.write('x')
self.assertTrue(f._rolled)
def test_fileno(self):
# A SpooledTemporaryFile should roll over to a real file on fileno()
f = self.do_create(max_size=30)
self.assertFalse(f._rolled)
self.assertTrue(f.fileno() > 0)
self.assertTrue(f._rolled)
def test_multiple_close_before_rollover(self):
# A SpooledTemporaryFile can be closed many times without error
f = tempfile.SpooledTemporaryFile()
f.write('abc\n')
self.assertFalse(f._rolled)
f.close()
try:
f.close()
f.close()
except:
self.failOnException("close")
def test_multiple_close_after_rollover(self):
# A SpooledTemporaryFile can be closed many times without error
f = tempfile.SpooledTemporaryFile(max_size=1)
f.write('abc\n')
self.assertTrue(f._rolled)
f.close()
try:
f.close()
f.close()
except:
self.failOnException("close")
def test_bound_methods(self):
# It should be OK to steal a bound method from a SpooledTemporaryFile
# and use it independently; when the file rolls over, those bound
# methods should continue to function
f = self.do_create(max_size=30)
read = f.read
write = f.write
seek = f.seek
write("a" * 35)
write("b" * 35)
seek(0, 0)
self.assertTrue(read(70) == 'a'*35 + 'b'*35)
def test_properties(self):
f = tempfile.SpooledTemporaryFile(max_size=10)
f.write(b'x' * 10)
self.assertFalse(f._rolled)
self.assertEqual(f.mode, 'w+b')
self.assertIsNone(f.name)
with self.assertRaises(AttributeError):
f.newlines
with self.assertRaises(AttributeError):
f.encoding
f.write(b'x')
self.assertTrue(f._rolled)
self.assertEqual(f.mode, 'w+b')
self.assertIsNotNone(f.name)
with self.assertRaises(AttributeError):
f.newlines
with self.assertRaises(AttributeError):
f.encoding
def test_context_manager_before_rollover(self):
# A SpooledTemporaryFile can be used as a context manager
with tempfile.SpooledTemporaryFile(max_size=1) as f:
self.assertFalse(f._rolled)
self.assertFalse(f.closed)
self.assertTrue(f.closed)
def use_closed():
with f:
pass
self.assertRaises(ValueError, use_closed)
def test_context_manager_during_rollover(self):
# A SpooledTemporaryFile can be used as a context manager
with tempfile.SpooledTemporaryFile(max_size=1) as f:
self.assertFalse(f._rolled)
f.write('abc\n')
f.flush()
self.assertTrue(f._rolled)
self.assertFalse(f.closed)
self.assertTrue(f.closed)
def use_closed():
with f:
pass
self.assertRaises(ValueError, use_closed)
def test_context_manager_after_rollover(self):
# A SpooledTemporaryFile can be used as a context manager
f = tempfile.SpooledTemporaryFile(max_size=1)
f.write('abc\n')
f.flush()
self.assertTrue(f._rolled)
with f:
self.assertFalse(f.closed)
self.assertTrue(f.closed)
def use_closed():
with f:
pass
self.assertRaises(ValueError, use_closed)
test_classes.append(test_SpooledTemporaryFile)
class test_TemporaryFile(TC):
"""Test TemporaryFile()."""
def test_basic(self):
# TemporaryFile can create files
# No point in testing the name params - the file has no name.
try:
tempfile.TemporaryFile()
except:
self.failOnException("TemporaryFile")
def test_has_no_name(self):
# TemporaryFile creates files with no names (on this system)
dir = tempfile.mkdtemp()
f = tempfile.TemporaryFile(dir=dir)
f.write('blat')
# Sneaky: because this file has no name, it should not prevent
# us from removing the directory it was created in.
try:
os.rmdir(dir)
except:
ei = sys.exc_info()
# cleanup
f.close()
os.rmdir(dir)
self.failOnException("rmdir", ei)
def test_multiple_close(self):
# A TemporaryFile can be closed many times without error
f = tempfile.TemporaryFile()
f.write('abc\n')
f.close()
try:
f.close()
f.close()
except:
self.failOnException("close")
# How to test the mode and bufsize parameters?
if tempfile.NamedTemporaryFile is not tempfile.TemporaryFile:
test_classes.append(test_TemporaryFile)
def test_main():
support.run_unittest(*test_classes)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
klmitch/nova | nova/tests/functional/api_sample_tests/test_instance_actions.py | 3 | 6192 | # Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.functional.api_sample_tests import test_servers
from nova.tests.functional import api_samples_test_base
class ServerActionsSampleJsonTest(test_servers.ServersSampleBase):
microversion = None
ADMIN_API = True
sample_dir = 'os-instance-actions'
def setUp(self):
super(ServerActionsSampleJsonTest, self).setUp()
# Create and stop a server
self.uuid = self._post_server()
self._get_response('servers/%s/action' % self.uuid, 'POST',
'{"os-stop": null}')
response = self._do_get('servers/%s/os-instance-actions' % self.uuid)
response_data = api_samples_test_base.pretty_data(response.content)
actions = api_samples_test_base.objectify(response_data)
self.action_stop = actions['instanceActions'][0]
self._wait_for_state_change({'id': self.uuid}, 'SHUTOFF')
def _get_subs(self):
return {
'uuid': self.uuid,
'project_id': self.action_stop['project_id']
}
def test_instance_action_get(self):
req_id = self.action_stop['request_id']
response = self._do_get('servers/%s/os-instance-actions/%s' %
(self.uuid, req_id))
# Non-admins can see event details except for the "traceback" field
# starting in the 2.51 microversion.
if self.ADMIN_API:
name = 'instance-action-get-resp'
else:
name = 'instance-action-get-non-admin-resp'
self._verify_response(name, self._get_subs(), response, 200)
def test_instance_actions_list(self):
response = self._do_get('servers/%s/os-instance-actions' % self.uuid)
self._verify_response('instance-actions-list-resp', self._get_subs(),
response, 200)
class ServerActionsV221SampleJsonTest(ServerActionsSampleJsonTest):
microversion = '2.21'
scenarios = [('v2_21', {'api_major_version': 'v2.1'})]
class ServerActionsV251AdminSampleJsonTest(ServerActionsSampleJsonTest):
"""Tests the 2.51 microversion for the os-instance-actions API.
The 2.51 microversion allows non-admins to see instance action event
details *except* for the traceback field.
The tests in this class are run as an admin user so all fields will be
displayed.
"""
microversion = '2.51'
scenarios = [('v2_51', {'api_major_version': 'v2.1'})]
class ServerActionsV251NonAdminSampleJsonTest(ServerActionsSampleJsonTest):
"""Tests the 2.51 microversion for the os-instance-actions API.
The 2.51 microversion allows non-admins to see instance action event
details *except* for the traceback field.
The tests in this class are run as a non-admin user so all fields except
for the ``traceback`` field will be displayed.
"""
ADMIN_API = False
microversion = '2.51'
scenarios = [('v2_51', {'api_major_version': 'v2.1'})]
class ServerActionsV258SampleJsonTest(ServerActionsV251AdminSampleJsonTest):
microversion = '2.58'
scenarios = [('v2_58', {'api_major_version': 'v2.1'})]
def test_instance_actions_list_with_limit(self):
response = self._do_get('servers/%s/os-instance-actions'
'?limit=1' % self.uuid)
self._verify_response('instance-actions-list-with-limit-resp',
self._get_subs(), response, 200)
def test_instance_actions_list_with_marker(self):
marker = self.action_stop['request_id']
response = self._do_get('servers/%s/os-instance-actions'
'?marker=%s' % (self.uuid, marker))
self._verify_response('instance-actions-list-with-marker-resp',
self._get_subs(), response, 200)
def test_instance_actions_with_changes_since(self):
stop_action_time = self.action_stop['start_time']
response = self._do_get(
'servers/%s/os-instance-actions'
'?changes-since=%s' % (self.uuid, stop_action_time))
self._verify_response(
'instance-actions-list-with-changes-since',
self._get_subs(), response, 200)
class ServerActionsV258NonAdminSampleJsonTest(ServerActionsV258SampleJsonTest):
ADMIN_API = False
class ServerActionsV262SampleJsonTest(ServerActionsV258SampleJsonTest):
microversion = '2.62'
scenarios = [('v2_62', {'api_major_version': 'v2.1'})]
def _get_subs(self):
return {
'uuid': self.uuid,
'project_id': self.action_stop['project_id'],
'event_host': r'\w+',
'event_hostId': '[a-f0-9]+'
}
class ServerActionsV262NonAdminSampleJsonTest(ServerActionsV262SampleJsonTest):
ADMIN_API = False
class ServerActionsV266SampleJsonTest(ServerActionsV262SampleJsonTest):
microversion = '2.66'
scenarios = [('v2_66', {'api_major_version': 'v2.1'})]
def test_instance_actions_with_changes_before(self):
stop_action_time = self.action_stop['updated_at']
response = self._do_get(
'servers/%s/os-instance-actions'
'?changes-before=%s' % (self.uuid, stop_action_time))
self._verify_response(
'instance-actions-list-with-changes-before',
self._get_subs(), response, 200)
class ServerActionsV284SampleJsonTest(ServerActionsV266SampleJsonTest):
microversion = '2.84'
scenarios = [('2.84', {'api_major_version': 'v2.1'})]
class ServerActionsV284NonAdminSampleJsonTest(ServerActionsV284SampleJsonTest):
ADMIN_API = False
| apache-2.0 |
Stavitsky/neutron | neutron/db/migration/alembic_migrations/versions/3c346828361e_metering_label_shared.py | 15 | 1107 | # Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""metering_label_shared
Revision ID: 3c346828361e
Revises: 16a27a58e093
Create Date: 2014-08-27 15:03:46.537290
"""
# revision identifiers, used by Alembic.
revision = '3c346828361e'
down_revision = '16a27a58e093'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('meteringlabels', sa.Column('shared', sa.Boolean(),
server_default=sa.sql.false(),
nullable=True))
| apache-2.0 |
h-naoto/gobgp | test/lib/bagpipe.py | 4 | 2768 | # Copyright (C) 2015 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from base import *
class BagpipeContainer(BGPContainer):
SHARED_VOLUME = '/root/shared_volume'
def __init__(self, name, asn, router_id,
ctn_image_name='yoshima/bagpipe-bgp'):
super(BagpipeContainer, self).__init__(name, asn, router_id,
ctn_image_name)
self.shared_volumes.append((self.config_dir, self.SHARED_VOLUME))
def run(self):
super(BagpipeContainer, self).run()
cmd = CmdBuffer(' ')
cmd << 'docker exec'
cmd << '{0} cp {1}/bgp.conf'.format(self.name, self.SHARED_VOLUME)
cmd << '/etc/bagpipe-bgp/'
local(str(cmd), capture=True)
cmd = 'docker exec {0} service bagpipe-bgp start'.format(self.name)
local(cmd, capture=True)
def create_config(self):
c = CmdBuffer()
c << '[BGP]'
if len(self.ip_addrs) > 0:
c << 'local_address={0}'.format(self.ip_addrs[0][1].split('/')[0])
for peer, info in self.peers.iteritems():
c << 'peers={0}'.format(info['neigh_addr'].split('/')[0])
c << 'my_as={0}'.format(self.asn)
c << 'enable_rtc=True'
c << '[API]'
c << 'api_host=localhost'
c << 'api_port=8082'
c << '[DATAPLANE_DRIVER_IPVPN]'
c << 'dataplane_driver = DummyDataplaneDriver'
c << '[DATAPLANE_DRIVER_EVPN]'
c << 'dataplane_driver = DummyDataplaneDriver'
with open('{0}/bgp.conf'.format(self.config_dir), 'w') as f:
print colors.yellow(str(c))
f.writelines(str(c))
def reload_config(self):
cmd = CmdBuffer(' ')
cmd << 'docker exec'
cmd << '{0} cp {1}/bgp.conf'.format(self.name, self.SHARED_VOLUME)
cmd << '/etc/bagpipe-bgp/'
local(str(cmd), capture=True)
cmd = 'docker exec {0} service bagpipe-bgp restart'.format(self.name)
local(cmd, capture=True)
def pipework(self, bridge, ip_addr, intf_name=""):
super(BagpipeContainer, self).pipework(bridge, ip_addr, intf_name)
self.create_config()
if self.is_running:
self.reload_config()
| apache-2.0 |
jalabort/ijcv-2014-aam | aam/transform.py | 1 | 23059 | import scipy
import numpy as np
from menpo.base import DP, Targetable, Vectorizable
from menpo.shape import PointCloud
from menpo.transform import Transform, AlignmentSimilarity
from menpo.model import Similarity2dInstanceModel
from menpo.model.modelinstance import ModelInstance
# Point Distribution Models ---------------------------------------------------
class PDM(ModelInstance, DP):
r"""Specialization of :map:`ModelInstance` for use with spatial data.
"""
def __init__(self, model, sigma2=1):
super(PDM, self).__init__(model)
self._set_prior(sigma2)
def _set_prior(self, sigma2):
self.j_prior = sigma2 / self.model.eigenvalues
self.h_prior = np.diag(self.j_prior)
@property
def n_dims(self):
r"""
The number of dimensions of the spatial instance of the model
:type: int
"""
return self.model.template_instance.n_dims
def d_dp(self, points):
"""
Returns the Jacobian of the PCA model reshaped to have the standard
Jacobian shape:
n_points x n_params x n_dims
which maps to
n_features x n_components x n_dims
on the linear model
Returns
-------
jacobian : (n_features, n_components, n_dims) ndarray
The Jacobian of the model in the standard Jacobian shape.
"""
d_dp = self.model.d_dp.T.reshape(self.model.n_active_components,
-1, self.n_dims)
return d_dp.swapaxes(0, 1)
class GlobalPDM(PDM):
r"""
"""
def __init__(self, model, global_transform_cls, sigma2=1):
# Start the global_transform as an identity (first call to
# from_vector_inplace() or set_target() will update this)
self.global_transform = global_transform_cls(model.mean, model.mean)
super(GlobalPDM, self).__init__(model, sigma2)
def _set_prior(self, sigma2):
sim_prior = np.ones((4,))
pdm_prior = sigma2 / self.model.eigenvalues
self.j_prior = np.hstack((sim_prior, pdm_prior))
self.h_prior = np.diag(self.j_prior)
@property
def n_global_parameters(self):
r"""
The number of parameters in the `global_transform`
:type: int
"""
return self.global_transform.n_parameters
@property
def global_parameters(self):
r"""
The parameters for the global transform.
:type: (`n_global_parameters`,) ndarray
"""
return self.global_transform.as_vector()
def _new_target_from_state(self):
r"""
Return the appropriate target for the model weights provided,
accounting for the effect of the global transform
Returns
-------
new_target: :class:`menpo.shape.PointCloud`
A new target for the weights provided
"""
return self.global_transform.apply(self.model.instance(self.weights))
def _weights_for_target(self, target):
r"""
Return the appropriate model weights for target provided, accounting
for the effect of the global transform. Note that this method
updates the global transform to be in the correct state.
Parameters
----------
target: :class:`menpo.shape.PointCloud`
The target that the statistical model will try to reproduce
Returns
-------
weights: (P,) ndarray
Weights of the statistical model that generate the closest
PointCloud to the requested target
"""
self._update_global_transform(target)
projected_target = self.global_transform.pseudoinverse.apply(target)
# now we have the target in model space, project it to recover the
# weights
new_weights = self.model.project(projected_target)
# TODO investigate the impact of this, could be problematic
# the model can't perfectly reproduce the target we asked for -
# reset the global_transform.target to what it CAN produce
#refined_target = self._target_for_weights(new_weights)
#self.global_transform.target = refined_target
return new_weights
def _update_global_transform(self, target):
self.global_transform.set_target(target)
def _as_vector(self):
r"""
Return the current parameters of this transform - this is the
just the linear model's weights
Returns
-------
params : (`n_parameters`,) ndarray
The vector of parameters
"""
return np.hstack([self.global_parameters, self.weights])
def from_vector_inplace(self, vector):
# First, update the global transform
global_parameters = vector[:self.n_global_parameters]
self._update_global_weights(global_parameters)
# Now extract the weights, and let super handle the update
weights = vector[self.n_global_parameters:]
PDM.from_vector_inplace(self, weights)
def _update_global_weights(self, global_weights):
r"""
Hook that allows for overriding behavior when the global weights are
set. Default implementation simply asks global_transform to
update itself from vector.
"""
self.global_transform.from_vector_inplace(global_weights)
def d_dp(self, points):
# d_dp is always evaluated at the mean shape
points = self.model.mean.points
# compute dX/dp
# dX/dq is the Jacobian of the global transform evaluated at the
# current target
# (n_points, n_global_params, n_dims)
dX_dq = self._global_transform_d_dp(points)
# by application of the chain rule dX/db is the Jacobian of the
# model transformed by the linear component of the global transform
# (n_points, n_weights, n_dims)
dS_db = PDM.d_dp(self, [])
# (n_points, n_dims, n_dims)
dX_dS = self.global_transform.d_dx(points)
# (n_points, n_weights, n_dims)
dX_db = np.einsum('ilj, idj -> idj', dX_dS, dS_db)
# dX/dp is simply the concatenation of the previous two terms
# (n_points, n_params, n_dims)
return np.hstack((dX_dq, dX_db))
def _global_transform_d_dp(self, points):
return self.global_transform.d_dp(points)
class OrthoPDM(GlobalPDM):
r"""
"""
def __init__(self, model, sigma2=1):
# 1. Construct similarity model from the mean of the model
self.similarity_model = Similarity2dInstanceModel(model.mean)
# 2. Orthonormalize model and similarity model
model_cpy = model.copy()
model_cpy.orthonormalize_against_inplace(self.similarity_model)
self.similarity_weights = self.similarity_model.project(model_cpy.mean)
super(OrthoPDM, self).__init__(model_cpy, AlignmentSimilarity, sigma2)
@property
def global_parameters(self):
r"""
The parameters for the global transform.
:type: (`n_global_parameters`,) ndarray
"""
return self.similarity_weights
def _update_global_transform(self, target):
self.similarity_weights = self.similarity_model.project(target)
self._update_global_weights(self.similarity_weights)
def _update_global_weights(self, global_weights):
self.similarity_weights = global_weights
new_target = self.similarity_model.instance(global_weights)
self.global_transform.set_target(new_target)
def _global_transform_d_dp(self, points):
return self.similarity_model.d_dp.T.reshape(
self.n_global_parameters, -1, self.n_dims).swapaxes(0, 1)
# Linear Warps ----------------------------------------------------------------
class LinearWarp(OrthoPDM, Transform):
def __init__(self, model, n_landmarks, sigma2=1):
super(LinearWarp, self).__init__(model, sigma2)
self.n_landmarks = n_landmarks
self.W = np.vstack((self.similarity_model.components,
self.model.components))
V = self.W[:, :self.n_dims*self.n_landmarks]
self.pinv_V = scipy.linalg.pinv(V)
@property
def dense_target(self):
return PointCloud(self.target.points[self.n_landmarks:])
@property
def sparse_target(self):
return PointCloud(self.target.points[:self.n_landmarks])
def set_target(self, target):
if target.n_points == self.n_landmarks:
# densify target
target = np.dot(np.dot(target.as_vector(), self.pinv_V), self.W)
target = PointCloud(np.reshape(target, (-1, self.n_dims)))
OrthoPDM.set_target(self, target)
def _apply(self, _, **kwargs):
return self.target.points[self.n_landmarks:]
def d_dp(self, _):
return OrthoPDM.d_dp(self, _)[self.n_landmarks:, ...]
# Non-Linear Warps ------------------------------------------------------------
class ModelDrivenTransform(Transform, Targetable, Vectorizable, DP):
r"""
A transform that couples a traditional landmark-based transform to a
statistical model such that source points of the alignment transform
are the points of the model. The weights of the transform are just
the weights of statistical model.
If no source is provided, the mean of the model is defined as the
source landmarks of the transform.
Parameters
----------
model : :class:`menpo.model.base.StatisticalModel`
A linear statistical shape model.
transform_cls : :class:`menpo.transform.AlignableTransform`
A class of :class:`menpo.transform.base.AlignableTransform`
The align constructor will be called on this with the source
and target landmarks. The target is
set to the points generated from the model using the
provide weights - the source is either given or set to the
model's mean.
source : :class:`menpo.shape.base.PointCloud`
The source landmarks of the transform. If None, the mean of the model
is used.
Default: None
"""
def __init__(self, model, transform_cls, source=None, sigma2=1):
self.pdm = PDM(model, sigma2=sigma2)
self._cached_points, self.dW_dl = None, None
self.transform = transform_cls(source, self.target)
@property
def n_dims(self):
r"""
The number of dimensions that the transform supports.
:type: int
"""
return self.pdm.n_dims
def _apply(self, x, **kwargs):
r"""
Apply this transform to the given object. Uses the internal transform.
Parameters
----------
x : (N, D) ndarray or a transformable object
The object to be transformed.
kwargs : dict
Passed through to transforms `apply_inplace` method.
Returns
--------
transformed : (N, D) ndarray or object
The transformed object
"""
return self.transform._apply(x, **kwargs)
@property
def target(self):
return self.pdm.target
def _target_setter(self, new_target):
r"""
On a new target being set, we need to:
Parameters
----------
new_target: :class:`PointCloud`
The new_target that we want to set.
"""
self.pdm.set_target(new_target)
def _new_target_from_state(self):
# We delegate to PDM to handle all our Targetable duties. As a
# result, *we* never need to call _sync_target_for_state, so we have
# no need for an implementation of this method. Of course the
# interface demands it, so the stub is here. Contrast with
# _target_setter, which is required, because we will have to handle
# external calls to set_target().
pass
def _sync_state_from_target(self):
# Let the pdm update its state
self.pdm._sync_state_from_target()
# and update our transform to the new state
self.transform.set_target(self.target)
@property
def n_parameters(self):
r"""
The total number of parameters.
Simply ``n_weights``.
:type: int
"""
return self.pdm.n_parameters
def _as_vector(self):
r"""
Return the current weights of this transform - this is the
just the linear model's weights
Returns
-------
params : (`n_parameters`,) ndarray
The vector of weights
"""
return self.pdm.as_vector()
def from_vector_inplace(self, vector):
r"""
Updates the ModelDrivenTransform's state from it's
vectorized form.
"""
self.pdm.from_vector_inplace(vector)
# By here the pdm has updated our target state, we just need to
# update the transform
self.transform.set_target(self.target)
def d_dp(self, points):
r"""
The derivative of this MDT wrt parametrization changes evaluated at
points.
This is done by chaining the derivative of points wrt the
source landmarks on the transform (dW/dL) together with the Jacobian
of the linear model wrt its weights (dX/dp).
Parameters
----------
points: ndarray shape (n_points, n_dims)
The spatial points at which the derivative should be evaluated.
Returns
-------
ndarray shape (n_points, n_params, n_dims)
The jacobian wrt parameterization
"""
# check if re-computation of dW/dl can be avoided
if not np.array_equal(self._cached_points, points):
# recompute dW/dl, the derivative each point wrt
# the source landmarks
self.dW_dl = self.transform.d_dl(points)
# cache points
self._cached_points = points
# dX/dp is simply the Jacobian of the PDM
dX_dp = self.pdm.d_dp(points)
# PREVIOUS
# dW_dX: n_points x n_centres x n_dims
# dX_dp: n_centres x n_params x n_dims
# dW_dl: n_points x (n_dims) x n_centres x n_dims
# dX_dp: (n_points x n_dims) x n_params
dW_dp = np.einsum('ild, lpd -> ipd', self.dW_dl, dX_dp)
# dW_dp: n_points x n_params x n_dims
return dW_dp
def jp(self):
r"""
References
----------
.. [1] G. Papandreou and P. Maragos, "Adaptive and Constrained
Algorithms for Inverse Compositional Active Appearance Model
Fitting", CVPR08
"""
# the incremental warp is always evaluated at p=0, ie the mean shape
points = self.pdm.model.mean.points
# compute:
# - dW/dp when p=0
# - dW/dp when p!=0
# - dW/dx when p!=0 evaluated at the source landmarks
# dW/dp when p=0 and when p!=0 are the same and simply given by
# the Jacobian of the model
# (n_points, n_params, n_dims)
dW_dp_0 = self.pdm.d_dp(points)
# (n_points, n_params, n_dims)
dW_dp = dW_dp_0
# (n_points, n_dims, n_dims)
dW_dx = self.transform.d_dx(points)
# (n_points, n_params, n_dims)
dW_dx_dW_dp_0 = np.einsum('ijk, ilk -> eilk', dW_dx, dW_dp_0)
# (n_params, n_params)
J = np.einsum('ijk, ilk -> jl', dW_dp, dW_dx_dW_dp_0)
# (n_params, n_params)
H = np.einsum('ijk, ilk -> jl', dW_dp, dW_dp)
# (n_params, n_params)
Jp = np.linalg.solve(H, J)
return Jp
@property
def j_prior(self):
return self.pdm.j_prior
@property
def h_prior(self):
return self.pdm.h_prior
class GlobalMDTransform(ModelDrivenTransform):
r"""
A transform that couples an alignment transform to a
statistical model together with a global similarity transform,
such that the weights of the transform are fully specified by
both the weights of statistical model and the weights of the
similarity transform. The model is assumed to
generate an instance which is then transformed by the similarity
transform; the result defines the target landmarks of the transform.
If no source is provided, the mean of the model is defined as the
source landmarks of the transform.
Parameters
----------
model : :class:`menpo.model.base.StatisticalModel`
A linear statistical shape model.
transform_cls : :class:`menpo.transform.AlignableTransform`
A class of :class:`menpo.transform.base.AlignableTransform`
The align constructor will be called on this with the source
and target landmarks. The target is
set to the points generated from the model using the
provide weights - the source is either given or set to the
model's mean.
global_transform : :class:`menpo.transform.AlignableTransform`
A class of :class:`menpo.transform.base.AlignableTransform`
The global transform that should be applied to the model output.
Doesn't have to have been constructed from the .align() constructor.
Note that the GlobalMDTransform isn't guaranteed to hold on to the
exact object passed in here - so don't expect external changes to
the global_transform to be reflected in the behavior of this object.
source : :class:`menpo.shape.base.PointCloud`, optional
The source landmarks of the transform. If no `source` is provided the
mean of the model is used.
weights : (P,) ndarray, optional
The reconstruction weights that will be fed to the model in order to
generate an instance of the target landmarks.
composition: 'both', 'warp' or 'model', optional
The composition approximation employed by this
ModelDrivenTransform.
Default: `both`
"""
def __init__(self, model, transform_cls, global_transform, source=None,
sigma2=1):
self.pdm = GlobalPDM(model, global_transform, sigma2=sigma2)
self._cached_points = None
self.transform = transform_cls(source, self.target)
def jp(self):
r"""
Composes two ModelDrivenTransforms together based on the
first order approximation proposed by Papandreou and Maragos in [1].
Parameters
----------
delta : (N,) ndarray
Vectorized :class:`ModelDrivenTransform` to be applied **before**
self
Returns
--------
transform : self
self, updated to the result of the composition
References
----------
.. [1] G. Papandreou and P. Maragos, "Adaptive and Constrained
Algorithms for Inverse Compositional Active Appearance Model
Fitting", CVPR08
"""
# the incremental warp is always evaluated at p=0, ie the mean shape
points = self.pdm.model.mean.points
# compute:
# - dW/dp when p=0
# - dW/dp when p!=0
# - dW/dx when p!=0 evaluated at the source landmarks
# dW/dq when p=0 and when p!=0 are the same and given by the
# Jacobian of the global transform evaluated at the mean of the
# model
# (n_points, n_global_params, n_dims)
dW_dq = self.pdm._global_transform_d_dp(points)
# dW/db when p=0, is the Jacobian of the model
# (n_points, n_weights, n_dims)
dW_db_0 = PDM.d_dp(self.pdm, points)
# dW/dp when p=0, is simply the concatenation of the previous
# two terms
# (n_points, n_params, n_dims)
dW_dp_0 = np.hstack((dW_dq, dW_db_0))
# by application of the chain rule dW_db when p!=0,
# is the Jacobian of the global transform wrt the points times
# the Jacobian of the model: dX(S)/db = dX/dS * dS/db
# (n_points, n_dims, n_dims)
dW_dS = self.pdm.global_transform.d_dx(points)
# (n_points, n_weights, n_dims)
dW_db = np.einsum('ilj, idj -> idj', dW_dS, dW_db_0)
# dW/dp is simply the concatenation of dW_dq with dW_db
# (n_points, n_params, n_dims)
dW_dp = np.hstack((dW_dq, dW_db))
# dW/dx is the jacobian of the transform evaluated at the source
# landmarks
# (n_points, n_dims, n_dims)
dW_dx = self.transform.d_dx(points)
# (n_points, n_params, n_dims)
dW_dx_dW_dp_0 = np.einsum('ijk, ilk -> ilk', dW_dx, dW_dp_0)
# (n_params, n_params)
J = np.einsum('ijk, ilk -> jl', dW_dp, dW_dx_dW_dp_0)
# (n_params, n_params)
H = np.einsum('ijk, ilk -> jl', dW_dp, dW_dp)
# (n_params, n_params)
Jp = np.linalg.solve(H, J)
return Jp
class OrthoMDTransform(GlobalMDTransform):
r"""
A transform that couples an alignment transform to a
statistical model together with a global similarity transform,
such that the weights of the transform are fully specified by
both the weights of statistical model and the weights of the
similarity transform. The model is assumed to
generate an instance which is then transformed by the similarity
transform; the result defines the target landmarks of the transform.
If no source is provided, the mean of the model is defined as the
source landmarks of the transform.
This transform (in contrast to the :class:`GlobalMDTransform`)
additionally orthonormalizes both the global and the model basis against
each other, ensuring that orthogonality and normalization is enforced
across the unified bases.
Parameters
----------
model : :class:`menpo.model.base.StatisticalModel`
A linear statistical shape model.
transform_cls : :class:`menpo.transform.AlignableTransform`
A class of :class:`menpo.transform.base.AlignableTransform`
The align constructor will be called on this with the source
and target landmarks. The target is
set to the points generated from the model using the
provide weights - the source is either given or set to the
model's mean.
global_transform : :class:`menpo.transform.Aligna
bleTransform`
A class of :class:`menpo.transform.base.AlignableTransform`
The global transform that should be applied to the model output.
Doesn't have to have been constructed from the .align() constructor.
Note that the GlobalMDTransform isn't guaranteed to hold on to the
exact object passed in here - so don't expect external changes to
the global_transform to be reflected in the behavior of this object.
source : :class:`menpo.shape.base.PointCloud`, optional
The source landmarks of the transform. If no `source` is provided the
mean of the model is used.
"""
def __init__(self, model, transform_cls, source=None, sigma2=1):
self.pdm = OrthoPDM(model, sigma2=sigma2)
self._cached_points = None
self.transform = transform_cls(source, self.target)
| bsd-2-clause |
carolinux/QGIS | python/plugins/GdalTools/tools/doTranslate.py | 12 | 13134 | # -*- coding: utf-8 -*-
"""
***************************************************************************
doTranslate.py
---------------------
Date : June 2010
Copyright : (C) 2010 by Giuseppe Sucameli
Email : brush dot tyler at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Giuseppe Sucameli'
__date__ = 'June 2010'
__copyright__ = '(C) 2010, Giuseppe Sucameli'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from PyQt4.QtCore import QObject, Qt, SIGNAL, QCoreApplication, QDir
from PyQt4.QtGui import QWidget, QMessageBox
from ui_widgetTranslate import Ui_GdalToolsWidget as Ui_Widget
from widgetBatchBase import GdalToolsBaseBatchWidget as BaseBatchWidget
from dialogSRS import GdalToolsSRSDialog as SRSDialog
import GdalTools_utils as Utils
import re
class GdalToolsDialog(QWidget, Ui_Widget, BaseBatchWidget):
def __init__(self, iface):
QWidget.__init__(self)
self.iface = iface
self.canvas = self.iface.mapCanvas()
self.expand_method = ('gray', 'rgb', 'rgba')
self.setupUi(self)
BaseBatchWidget.__init__(self, self.iface, "gdal_translate")
self.outSelector.setType(self.outSelector.FILE)
# set the default QSpinBoxes and QProgressBar value
self.outsizeSpin.setValue(25)
self.progressBar.setValue(0)
self.progressBar.hide()
self.formatLabel.hide()
self.formatCombo.hide()
if Utils.GdalConfig.versionNum() < 1700:
index = self.expandCombo.findText('gray', Qt.MatchFixedString)
if index >= 0:
self.expandCombo.removeItem(index)
self.outputFormat = Utils.fillRasterOutputFormat()
self.setParamsStatus([
(self.inSelector, SIGNAL("filenameChanged()")),
(self.outSelector, SIGNAL("filenameChanged()")),
(self.targetSRSEdit, SIGNAL("textChanged(const QString &)"), self.targetSRSCheck),
(self.selectTargetSRSButton, None, self.targetSRSCheck),
(self.creationOptionsWidget, SIGNAL("optionsChanged()")),
(self.outsizeSpin, SIGNAL("valueChanged(const QString &)"), self.outsizeCheck),
(self.nodataSpin, SIGNAL("valueChanged(int)"), self.nodataCheck),
(self.expandCombo, SIGNAL("currentIndexChanged(int)"), self.expandCheck, 1600),
(self.sdsCheck, SIGNAL("stateChanged(int)")),
(self.srcwinEdit, SIGNAL("textChanged(const QString &)"), self.srcwinCheck),
(self.prjwinEdit, SIGNAL("textChanged(const QString &)"), self.prjwinCheck)
])
#self.connect(self.canvas, SIGNAL("layersChanged()"), self.fillInputLayerCombo)
self.connect(self.inSelector, SIGNAL("layerChanged()"), self.fillTargetSRSEditDefault)
self.connect(self.inSelector, SIGNAL("selectClicked()"), self.fillInputFile)
self.connect(self.outSelector, SIGNAL("selectClicked()"), self.fillOutputFileEdit)
self.connect(self.selectTargetSRSButton, SIGNAL("clicked()"), self.fillTargetSRSEdit)
self.connect(self.batchCheck, SIGNAL("stateChanged( int )"), self.switchToolMode)
# add raster filters to combo
self.formatCombo.addItems(Utils.FileFilter.allRastersFilter().split(";;"))
def switchToolMode(self):
self.setCommandViewerEnabled(not self.batchCheck.isChecked())
self.progressBar.setVisible(self.batchCheck.isChecked())
self.formatLabel.setVisible(self.batchCheck.isChecked())
self.formatCombo.setVisible(self.batchCheck.isChecked())
self.inSelector.setType(self.inSelector.FILE if self.batchCheck.isChecked() else self.inSelector.FILE_LAYER)
self.outSelector.clear()
if self.batchCheck.isChecked():
self.inFileLabel = self.label_3.text()
self.outFileLabel = self.label_2.text()
self.label_3.setText(QCoreApplication.translate("GdalTools", "&Input directory"))
self.label_2.setText(QCoreApplication.translate("GdalTools", "&Output directory"))
QObject.disconnect(self.inSelector, SIGNAL("selectClicked()"), self.fillInputFile)
QObject.disconnect(self.outSelector, SIGNAL("selectClicked()"), self.fillOutputFileEdit)
QObject.connect(self.inSelector, SIGNAL("selectClicked()"), self. fillInputDir)
QObject.connect(self.outSelector, SIGNAL("selectClicked()"), self.fillOutputDir)
else:
self.label_3.setText(self.inFileLabel)
self.label_2.setText(self.outFileLabel)
QObject.disconnect(self.inSelector, SIGNAL("selectClicked()"), self.fillInputDir)
QObject.disconnect(self.outSelector, SIGNAL("selectClicked()"), self.fillOutputDir)
QObject.connect(self.inSelector, SIGNAL("selectClicked()"), self.fillInputFile)
QObject.connect(self.outSelector, SIGNAL("selectClicked()"), self.fillOutputFileEdit)
def onLayersChanged(self):
self.inSelector.setLayers(Utils.LayerRegistry.instance().getRasterLayers())
def fillInputFile(self):
lastUsedFilter = Utils.FileFilter.lastUsedRasterFilter()
inputFile = Utils.FileDialog.getOpenFileName(self, self.tr("Select the input file for Translate"), Utils.FileFilter.allRastersFilter(), lastUsedFilter)
if not inputFile:
return
Utils.FileFilter.setLastUsedRasterFilter(lastUsedFilter)
self.inSelector.setFilename(inputFile)
# get SRS for target file if necessary and possible
self.refreshTargetSRS()
def fillInputDir(self):
inputDir = Utils.FileDialog.getExistingDirectory(self, self.tr("Select the input directory with files to Translate"))
if not inputDir:
return
self.inSelector.setFilename(inputDir)
filter = Utils.getRasterExtensions()
workDir = QDir(inputDir)
workDir.setFilter(QDir.Files | QDir.NoSymLinks | QDir.NoDotAndDotDot)
workDir.setNameFilters(filter)
# search for a valid SRS, then use it as default target SRS
srs = ''
for fname in workDir.entryList():
fl = inputDir + "/" + fname
srs = Utils.getRasterSRS(self, fl)
if srs:
break
self.targetSRSEdit.setText(srs)
def fillOutputFileEdit(self):
lastUsedFilter = Utils.FileFilter.lastUsedRasterFilter()
outputFile = Utils.FileDialog.getSaveFileName(self, self.tr("Select the raster file to save the results to"), Utils.FileFilter.saveRastersFilter(), lastUsedFilter)
if not outputFile:
return
Utils.FileFilter.setLastUsedRasterFilter(lastUsedFilter)
self.outputFormat = Utils.fillRasterOutputFormat(lastUsedFilter, outputFile)
self.outSelector.setFilename(outputFile)
def fillOutputDir(self):
outputDir = Utils.FileDialog.getExistingDirectory(self, self.tr("Select the output directory to save the results to"))
if not outputDir:
return
self.outSelector.setFilename(outputDir)
def fillTargetSRSEditDefault(self):
if self.inSelector.layer() is None:
return
self.refreshTargetSRS()
def refreshTargetSRS(self):
self.targetSRSEdit.setText(Utils.getRasterSRS(self, self.getInputFileName()))
def fillTargetSRSEdit(self):
dialog = SRSDialog("Select the target SRS", self)
if dialog.exec_():
self.targetSRSEdit.setText(dialog.getProjection())
def getArguments(self):
arguments = []
if self.targetSRSCheck.isChecked() and self.targetSRSEdit.text():
arguments.append("-a_srs")
arguments.append(self.targetSRSEdit.text())
if self.creationOptionsGroupBox.isChecked():
for opt in self.creationOptionsWidget.options():
arguments.extend(["-co", opt])
if self.outsizeCheck.isChecked() and self.outsizeSpin.value() != 100:
arguments.append("-outsize")
arguments.append(self.outsizeSpin.text())
arguments.append(self.outsizeSpin.text())
if self.expandCheck.isChecked():
arguments.append("-expand")
arguments.append(self.expand_method[self.expandCombo.currentIndex()])
if self.nodataCheck.isChecked():
arguments.append("-a_nodata")
arguments.append(unicode(self.nodataSpin.value()))
if self.sdsCheck.isChecked():
arguments.append("-sds")
if self.srcwinCheck.isChecked() and self.srcwinEdit.text():
coordList = self.srcwinEdit.text().split() # split the string on whitespace(s)
if len(coordList) == 4 and coordList[3]:
try:
for x in coordList:
int(x)
except ValueError:
#print "Coordinates must be integer numbers."
QMessageBox.critical(self, self.tr("Translate - srcwin"), self.tr("Image coordinates (pixels) must be integer numbers."))
else:
arguments.append("-srcwin")
for x in coordList:
arguments.append(x)
if self.prjwinCheck.isChecked() and self.prjwinEdit.text():
coordList = self.prjwinEdit.text().split() # split the string on whitespace(s)
if len(coordList) == 4 and coordList[3]:
try:
for x in coordList:
float(x)
except ValueError:
#print "Coordinates must be integer numbers."
QMessageBox.critical(self, self.tr("Translate - prjwin"), self.tr("Image coordinates (geographic) must be numbers."))
else:
arguments.append("-projwin")
for x in coordList:
arguments.append(x)
if self.isBatchEnabled():
if self.formatCombo.currentIndex() != 0:
arguments.append("-of")
arguments.append(Utils.fillRasterOutputFormat(self.formatCombo.currentText()))
return arguments
else:
return arguments
outputFn = self.getOutputFileName()
if outputFn:
arguments.append("-of")
arguments.append(self.outputFormat)
arguments.append(self.getInputFileName())
arguments.append(outputFn)
# set creation options filename/layer for validation
if self.inSelector.layer():
self.creationOptionsWidget.setRasterLayer(self.inSelector.layer())
else:
self.creationOptionsWidget.setRasterFileName(self.getInputFileName())
return arguments
def getInputFileName(self):
return self.inSelector.filename()
def getOutputFileName(self):
return self.outSelector.filename()
def addLayerIntoCanvas(self, fileInfo):
self.iface.addRasterLayer(fileInfo.filePath())
def isBatchEnabled(self):
return self.batchCheck.isChecked()
def setProgressRange(self, maximum):
self.progressBar.setRange(0, maximum)
def updateProgress(self, index, total):
if index < total:
self.progressBar.setValue(index + 1)
else:
self.progressBar.setValue(0)
def batchRun(self):
exts = re.sub('\).*$', '', re.sub('^.*\(', '', self.formatCombo.currentText())).split(" ")
if len(exts) > 0 and exts[0] != "*" and exts[0] != "*.*":
outExt = exts[0].replace("*", "")
else:
outExt = ".tif"
self.base.enableRun(False)
self.base.setCursor(Qt.WaitCursor)
inDir = self.getInputFileName()
outDir = self.getOutputFileName()
filter = Utils.getRasterExtensions()
workDir = QDir(inDir)
workDir.setFilter(QDir.Files | QDir.NoSymLinks | QDir.NoDotAndDotDot)
workDir.setNameFilters(filter)
files = workDir.entryList()
self.inFiles = []
self.outFiles = []
for f in files:
self.inFiles.append(inDir + "/" + f)
if outDir is not None:
outFile = re.sub("\.[a-zA-Z0-9]{2,4}", outExt, f)
self.outFiles.append(outDir + "/" + outFile)
self.errors = []
self.batchIndex = 0
self.batchTotal = len(self.inFiles)
self.setProgressRange(self.batchTotal)
self.runItem(self.batchIndex, self.batchTotal)
| gpl-2.0 |
schinckel/django-countries | django_countries/data.py | 1 | 17503 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This is a self-generating script that contains all of the iso3166-1 data.
To regenerate, a CSV file must be created that contains the latest data. Here's
how to do that:
1. Visit https://www.iso.org/obp
2. Click the "Country Codes" radio option and click the search button
3. Filter by "Officially assigned codes"
4. Change the results per page to 300
5. Copy the html table and paste into Libreoffice Calc / Excel
6. Delete the French name column
7. Save as a CSV file in django_countries/iso3166-1.csv
8. Run this script from the command line
"""
from __future__ import unicode_literals
import glob
import os
from django_countries.base import CountriesBase
try:
from django.utils.translation import ugettext_lazy as _
except ImportError: # pragma: no cover
# Allows this module to be executed without Django installed.
def _(x):
return x
# Nicely titled (and translatable) country names.
COUNTRIES = {
"AF": _("Afghanistan"),
"AX": _("Åland Islands"),
"AL": _("Albania"),
"DZ": _("Algeria"),
"AS": _("American Samoa"),
"AD": _("Andorra"),
"AO": _("Angola"),
"AI": _("Anguilla"),
"AQ": _("Antarctica"),
"AG": _("Antigua and Barbuda"),
"AR": _("Argentina"),
"AM": _("Armenia"),
"AW": _("Aruba"),
"AU": _("Australia"),
"AT": _("Austria"),
"AZ": _("Azerbaijan"),
"BS": _("Bahamas"),
"BH": _("Bahrain"),
"BD": _("Bangladesh"),
"BB": _("Barbados"),
"BY": _("Belarus"),
"BE": _("Belgium"),
"BZ": _("Belize"),
"BJ": _("Benin"),
"BM": _("Bermuda"),
"BT": _("Bhutan"),
"BO": _("Bolivia (Plurinational State of)"),
"BQ": _("Bonaire, Sint Eustatius and Saba"),
"BA": _("Bosnia and Herzegovina"),
"BW": _("Botswana"),
"BV": _("Bouvet Island"),
"BR": _("Brazil"),
"IO": _("British Indian Ocean Territory"),
"BN": _("Brunei Darussalam"),
"BG": _("Bulgaria"),
"BF": _("Burkina Faso"),
"BI": _("Burundi"),
"CV": _("Cabo Verde"),
"KH": _("Cambodia"),
"CM": _("Cameroon"),
"CA": _("Canada"),
"KY": _("Cayman Islands"),
"CF": _("Central African Republic"),
"TD": _("Chad"),
"CL": _("Chile"),
"CN": _("China"),
"CX": _("Christmas Island"),
"CC": _("Cocos (Keeling) Islands"),
"CO": _("Colombia"),
"KM": _("Comoros"),
"CD": _("Congo (the Democratic Republic of the)"),
"CG": _("Congo"),
"CK": _("Cook Islands"),
"CR": _("Costa Rica"),
"CI": _("Côte d'Ivoire"),
"HR": _("Croatia"),
"CU": _("Cuba"),
"CW": _("Curaçao"),
"CY": _("Cyprus"),
"CZ": _("Czechia"),
"DK": _("Denmark"),
"DJ": _("Djibouti"),
"DM": _("Dominica"),
"DO": _("Dominican Republic"),
"EC": _("Ecuador"),
"EG": _("Egypt"),
"SV": _("El Salvador"),
"GQ": _("Equatorial Guinea"),
"ER": _("Eritrea"),
"EE": _("Estonia"),
"ET": _("Ethiopia"),
"FK": _("Falkland Islands [Malvinas]"),
"FO": _("Faroe Islands"),
"FJ": _("Fiji"),
"FI": _("Finland"),
"FR": _("France"),
"GF": _("French Guiana"),
"PF": _("French Polynesia"),
"TF": _("French Southern Territories"),
"GA": _("Gabon"),
"GM": _("Gambia"),
"GE": _("Georgia"),
"DE": _("Germany"),
"GH": _("Ghana"),
"GI": _("Gibraltar"),
"GR": _("Greece"),
"GL": _("Greenland"),
"GD": _("Grenada"),
"GP": _("Guadeloupe"),
"GU": _("Guam"),
"GT": _("Guatemala"),
"GG": _("Guernsey"),
"GN": _("Guinea"),
"GW": _("Guinea-Bissau"),
"GY": _("Guyana"),
"HT": _("Haiti"),
"HM": _("Heard Island and McDonald Islands"),
"VA": _("Holy See"),
"HN": _("Honduras"),
"HK": _("Hong Kong"),
"HU": _("Hungary"),
"IS": _("Iceland"),
"IN": _("India"),
"ID": _("Indonesia"),
"IR": _("Iran (Islamic Republic of)"),
"IQ": _("Iraq"),
"IE": _("Ireland"),
"IM": _("Isle of Man"),
"IL": _("Israel"),
"IT": _("Italy"),
"JM": _("Jamaica"),
"JP": _("Japan"),
"JE": _("Jersey"),
"JO": _("Jordan"),
"KZ": _("Kazakhstan"),
"KE": _("Kenya"),
"KI": _("Kiribati"),
"KP": _("Korea (the Democratic People's Republic of)"),
"KR": _("Korea (the Republic of)"),
"KW": _("Kuwait"),
"KG": _("Kyrgyzstan"),
"LA": _("Lao People's Democratic Republic"),
"LV": _("Latvia"),
"LB": _("Lebanon"),
"LS": _("Lesotho"),
"LR": _("Liberia"),
"LY": _("Libya"),
"LI": _("Liechtenstein"),
"LT": _("Lithuania"),
"LU": _("Luxembourg"),
"MO": _("Macao"),
"MK": _("Macedonia (the former Yugoslav Republic of)"),
"MG": _("Madagascar"),
"MW": _("Malawi"),
"MY": _("Malaysia"),
"MV": _("Maldives"),
"ML": _("Mali"),
"MT": _("Malta"),
"MH": _("Marshall Islands"),
"MQ": _("Martinique"),
"MR": _("Mauritania"),
"MU": _("Mauritius"),
"YT": _("Mayotte"),
"MX": _("Mexico"),
"FM": _("Micronesia (Federated States of)"),
"MD": _("Moldova (the Republic of)"),
"MC": _("Monaco"),
"MN": _("Mongolia"),
"ME": _("Montenegro"),
"MS": _("Montserrat"),
"MA": _("Morocco"),
"MZ": _("Mozambique"),
"MM": _("Myanmar"),
"NA": _("Namibia"),
"NR": _("Nauru"),
"NP": _("Nepal"),
"NL": _("Netherlands"),
"NC": _("New Caledonia"),
"NZ": _("New Zealand"),
"NI": _("Nicaragua"),
"NE": _("Niger"),
"NG": _("Nigeria"),
"NU": _("Niue"),
"NF": _("Norfolk Island"),
"MP": _("Northern Mariana Islands"),
"NO": _("Norway"),
"OM": _("Oman"),
"PK": _("Pakistan"),
"PW": _("Palau"),
"PS": _("Palestine, State of"),
"PA": _("Panama"),
"PG": _("Papua New Guinea"),
"PY": _("Paraguay"),
"PE": _("Peru"),
"PH": _("Philippines"),
"PN": _("Pitcairn"),
"PL": _("Poland"),
"PT": _("Portugal"),
"PR": _("Puerto Rico"),
"QA": _("Qatar"),
"RE": _("Réunion"),
"RO": _("Romania"),
"RU": _("Russian Federation"),
"RW": _("Rwanda"),
"BL": _("Saint Barthélemy"),
"SH": _("Saint Helena, Ascension and Tristan da Cunha"),
"KN": _("Saint Kitts and Nevis"),
"LC": _("Saint Lucia"),
"MF": _("Saint Martin (French part)"),
"PM": _("Saint Pierre and Miquelon"),
"VC": _("Saint Vincent and the Grenadines"),
"WS": _("Samoa"),
"SM": _("San Marino"),
"ST": _("Sao Tome and Principe"),
"SA": _("Saudi Arabia"),
"SN": _("Senegal"),
"RS": _("Serbia"),
"SC": _("Seychelles"),
"SL": _("Sierra Leone"),
"SG": _("Singapore"),
"SX": _("Sint Maarten (Dutch part)"),
"SK": _("Slovakia"),
"SI": _("Slovenia"),
"SB": _("Solomon Islands"),
"SO": _("Somalia"),
"ZA": _("South Africa"),
"GS": _("South Georgia and the South Sandwich Islands"),
"SS": _("South Sudan"),
"ES": _("Spain"),
"LK": _("Sri Lanka"),
"SD": _("Sudan"),
"SR": _("Suriname"),
"SJ": _("Svalbard and Jan Mayen"),
"SZ": _("Swaziland"),
"SE": _("Sweden"),
"CH": _("Switzerland"),
"SY": _("Syrian Arab Republic"),
"TW": _("Taiwan (Province of China)"),
"TJ": _("Tajikistan"),
"TZ": _("Tanzania, United Republic of"),
"TH": _("Thailand"),
"TL": _("Timor-Leste"),
"TG": _("Togo"),
"TK": _("Tokelau"),
"TO": _("Tonga"),
"TT": _("Trinidad and Tobago"),
"TN": _("Tunisia"),
"TR": _("Turkey"),
"TM": _("Turkmenistan"),
"TC": _("Turks and Caicos Islands"),
"TV": _("Tuvalu"),
"UG": _("Uganda"),
"UA": _("Ukraine"),
"AE": _("United Arab Emirates"),
"GB": _("United Kingdom of Great Britain and Northern Ireland"),
"UM": _("United States Minor Outlying Islands"),
"US": _("United States of America"),
"UY": _("Uruguay"),
"UZ": _("Uzbekistan"),
"VU": _("Vanuatu"),
"VE": _("Venezuela (Bolivarian Republic of)"),
"VN": _("Viet Nam"),
"VG": _("Virgin Islands (British)"),
"VI": _("Virgin Islands (U.S.)"),
"WF": _("Wallis and Futuna"),
"EH": _("Western Sahara"),
"YE": _("Yemen"),
"ZM": _("Zambia"),
"ZW": _("Zimbabwe"),
}
ALT_CODES = {
"AF": ("AFG", 4),
"AX": ("ALA", 248),
"AL": ("ALB", 8),
"DZ": ("DZA", 12),
"AS": ("ASM", 16),
"AD": ("AND", 20),
"AO": ("AGO", 24),
"AI": ("AIA", 660),
"AQ": ("ATA", 10),
"AG": ("ATG", 28),
"AR": ("ARG", 32),
"AM": ("ARM", 51),
"AW": ("ABW", 533),
"AU": ("AUS", 36),
"AT": ("AUT", 40),
"AZ": ("AZE", 31),
"BS": ("BHS", 44),
"BH": ("BHR", 48),
"BD": ("BGD", 50),
"BB": ("BRB", 52),
"BY": ("BLR", 112),
"BE": ("BEL", 56),
"BZ": ("BLZ", 84),
"BJ": ("BEN", 204),
"BM": ("BMU", 60),
"BT": ("BTN", 64),
"BO": ("BOL", 68),
"BQ": ("BES", 535),
"BA": ("BIH", 70),
"BW": ("BWA", 72),
"BV": ("BVT", 74),
"BR": ("BRA", 76),
"IO": ("IOT", 86),
"BN": ("BRN", 96),
"BG": ("BGR", 100),
"BF": ("BFA", 854),
"BI": ("BDI", 108),
"CV": ("CPV", 132),
"KH": ("KHM", 116),
"CM": ("CMR", 120),
"CA": ("CAN", 124),
"KY": ("CYM", 136),
"CF": ("CAF", 140),
"TD": ("TCD", 148),
"CL": ("CHL", 152),
"CN": ("CHN", 156),
"CX": ("CXR", 162),
"CC": ("CCK", 166),
"CO": ("COL", 170),
"KM": ("COM", 174),
"CD": ("COD", 180),
"CG": ("COG", 178),
"CK": ("COK", 184),
"CR": ("CRI", 188),
"CI": ("CIV", 384),
"HR": ("HRV", 191),
"CU": ("CUB", 192),
"CW": ("CUW", 531),
"CY": ("CYP", 196),
"CZ": ("CZE", 203),
"DK": ("DNK", 208),
"DJ": ("DJI", 262),
"DM": ("DMA", 212),
"DO": ("DOM", 214),
"EC": ("ECU", 218),
"EG": ("EGY", 818),
"SV": ("SLV", 222),
"GQ": ("GNQ", 226),
"ER": ("ERI", 232),
"EE": ("EST", 233),
"ET": ("ETH", 231),
"FK": ("FLK", 238),
"FO": ("FRO", 234),
"FJ": ("FJI", 242),
"FI": ("FIN", 246),
"FR": ("FRA", 250),
"GF": ("GUF", 254),
"PF": ("PYF", 258),
"TF": ("ATF", 260),
"GA": ("GAB", 266),
"GM": ("GMB", 270),
"GE": ("GEO", 268),
"DE": ("DEU", 276),
"GH": ("GHA", 288),
"GI": ("GIB", 292),
"GR": ("GRC", 300),
"GL": ("GRL", 304),
"GD": ("GRD", 308),
"GP": ("GLP", 312),
"GU": ("GUM", 316),
"GT": ("GTM", 320),
"GG": ("GGY", 831),
"GN": ("GIN", 324),
"GW": ("GNB", 624),
"GY": ("GUY", 328),
"HT": ("HTI", 332),
"HM": ("HMD", 334),
"VA": ("VAT", 336),
"HN": ("HND", 340),
"HK": ("HKG", 344),
"HU": ("HUN", 348),
"IS": ("ISL", 352),
"IN": ("IND", 356),
"ID": ("IDN", 360),
"IR": ("IRN", 364),
"IQ": ("IRQ", 368),
"IE": ("IRL", 372),
"IM": ("IMN", 833),
"IL": ("ISR", 376),
"IT": ("ITA", 380),
"JM": ("JAM", 388),
"JP": ("JPN", 392),
"JE": ("JEY", 832),
"JO": ("JOR", 400),
"KZ": ("KAZ", 398),
"KE": ("KEN", 404),
"KI": ("KIR", 296),
"KP": ("PRK", 408),
"KR": ("KOR", 410),
"KW": ("KWT", 414),
"KG": ("KGZ", 417),
"LA": ("LAO", 418),
"LV": ("LVA", 428),
"LB": ("LBN", 422),
"LS": ("LSO", 426),
"LR": ("LBR", 430),
"LY": ("LBY", 434),
"LI": ("LIE", 438),
"LT": ("LTU", 440),
"LU": ("LUX", 442),
"MO": ("MAC", 446),
"MK": ("MKD", 807),
"MG": ("MDG", 450),
"MW": ("MWI", 454),
"MY": ("MYS", 458),
"MV": ("MDV", 462),
"ML": ("MLI", 466),
"MT": ("MLT", 470),
"MH": ("MHL", 584),
"MQ": ("MTQ", 474),
"MR": ("MRT", 478),
"MU": ("MUS", 480),
"YT": ("MYT", 175),
"MX": ("MEX", 484),
"FM": ("FSM", 583),
"MD": ("MDA", 498),
"MC": ("MCO", 492),
"MN": ("MNG", 496),
"ME": ("MNE", 499),
"MS": ("MSR", 500),
"MA": ("MAR", 504),
"MZ": ("MOZ", 508),
"MM": ("MMR", 104),
"NA": ("NAM", 516),
"NR": ("NRU", 520),
"NP": ("NPL", 524),
"NL": ("NLD", 528),
"NC": ("NCL", 540),
"NZ": ("NZL", 554),
"NI": ("NIC", 558),
"NE": ("NER", 562),
"NG": ("NGA", 566),
"NU": ("NIU", 570),
"NF": ("NFK", 574),
"MP": ("MNP", 580),
"NO": ("NOR", 578),
"OM": ("OMN", 512),
"PK": ("PAK", 586),
"PW": ("PLW", 585),
"PS": ("PSE", 275),
"PA": ("PAN", 591),
"PG": ("PNG", 598),
"PY": ("PRY", 600),
"PE": ("PER", 604),
"PH": ("PHL", 608),
"PN": ("PCN", 612),
"PL": ("POL", 616),
"PT": ("PRT", 620),
"PR": ("PRI", 630),
"QA": ("QAT", 634),
"RE": ("REU", 638),
"RO": ("ROU", 642),
"RU": ("RUS", 643),
"RW": ("RWA", 646),
"BL": ("BLM", 652),
"SH": ("SHN", 654),
"KN": ("KNA", 659),
"LC": ("LCA", 662),
"MF": ("MAF", 663),
"PM": ("SPM", 666),
"VC": ("VCT", 670),
"WS": ("WSM", 882),
"SM": ("SMR", 674),
"ST": ("STP", 678),
"SA": ("SAU", 682),
"SN": ("SEN", 686),
"RS": ("SRB", 688),
"SC": ("SYC", 690),
"SL": ("SLE", 694),
"SG": ("SGP", 702),
"SX": ("SXM", 534),
"SK": ("SVK", 703),
"SI": ("SVN", 705),
"SB": ("SLB", 90),
"SO": ("SOM", 706),
"ZA": ("ZAF", 710),
"GS": ("SGS", 239),
"SS": ("SSD", 728),
"ES": ("ESP", 724),
"LK": ("LKA", 144),
"SD": ("SDN", 729),
"SR": ("SUR", 740),
"SJ": ("SJM", 744),
"SZ": ("SWZ", 748),
"SE": ("SWE", 752),
"CH": ("CHE", 756),
"SY": ("SYR", 760),
"TW": ("TWN", 158),
"TJ": ("TJK", 762),
"TZ": ("TZA", 834),
"TH": ("THA", 764),
"TL": ("TLS", 626),
"TG": ("TGO", 768),
"TK": ("TKL", 772),
"TO": ("TON", 776),
"TT": ("TTO", 780),
"TN": ("TUN", 788),
"TR": ("TUR", 792),
"TM": ("TKM", 795),
"TC": ("TCA", 796),
"TV": ("TUV", 798),
"UG": ("UGA", 800),
"UA": ("UKR", 804),
"AE": ("ARE", 784),
"GB": ("GBR", 826),
"UM": ("UMI", 581),
"US": ("USA", 840),
"UY": ("URY", 858),
"UZ": ("UZB", 860),
"VU": ("VUT", 548),
"VE": ("VEN", 862),
"VN": ("VNM", 704),
"VG": ("VGB", 92),
"VI": ("VIR", 850),
"WF": ("WLF", 876),
"EH": ("ESH", 732),
"YE": ("YEM", 887),
"ZM": ("ZMB", 894),
"ZW": ("ZWE", 716),
}
def self_generate(
output_filename, filename='iso3166-1.csv'): # pragma: no cover
"""
The following code can be used for self-generation of this file.
It requires a UTF-8 CSV file containing the short ISO name and two letter
country code as the first two columns.
"""
import csv
import re
countries = []
alt_codes = []
with open(filename, 'rb') as csv_file:
for row in csv.reader(csv_file):
name = row[0].decode('utf-8').rstrip('*')
name = re.sub(r'\(the\)', '', name)
if name:
countries.append((name, row[1].decode('utf-8')))
alt_codes.append((
row[1].decode('utf-8'),
row[2].decode('utf-8'),
int(row[3]),
))
with open(__file__, 'r') as source_file:
contents = source_file.read()
# Write countries.
bits = re.match(
'(.*\nCOUNTRIES = \{\n)(.*?)(\n\}.*)', contents, re.DOTALL).groups()
country_list = []
for name, code in countries:
name = name.replace('"', r'\"').strip()
country_list.append(
' "{code}": _("{name}"),'.format(name=name, code=code))
content = bits[0]
content += '\n'.join(country_list).encode('utf-8')
# Write alt codes.
alt_bits = re.match(
'(.*\nALT_CODES = \{\n)(.*)(\n\}.*)', bits[2], re.DOTALL).groups()
alt_list = []
for code, code3, codenum in alt_codes:
name = name.replace('"', r'\"').strip()
alt_list.append(
' "{code}": ("{code3}", {codenum}),'.format(
code=code, code3=code3, codenum=codenum))
content += alt_bits[0]
content += '\n'.join(alt_list).encode('utf-8')
content += alt_bits[2]
# Generate file.
with open(output_filename, 'wb') as output_file:
output_file.write(content)
return countries
def check_flags(verbosity=1):
files = {}
this_dir = os.path.dirname(__file__)
for path in glob.glob(os.path.join(this_dir, 'static', 'flags', '*.gif')):
files[os.path.basename(os.path.splitext(path)[0]).upper()] = path
flags_missing = set(COUNTRIES) - set(files)
if flags_missing: # pragma: no cover
print("The following country codes are missing a flag:")
for code in sorted(flags_missing):
print(" {0} ({1})".format(code, COUNTRIES[code]))
elif verbosity: # pragma: no cover
print("All country codes have flags. :)")
code_missing = set(files) - set(COUNTRIES)
# Special-case EU and __
for special_code in ('EU', '__'):
code_missing.discard(special_code)
if code_missing: # pragma: no cover
print("")
print("The following flags don't have a matching country code:")
for path in sorted(code_missing):
print(" {0}".format(path))
def check_common_names():
common_names_missing = set(CountriesBase.COMMON_NAMES) - set(COUNTRIES)
if common_names_missing: # pragma: no cover
print("")
print(
"The following common names do not match an official country "
"code:")
for code in sorted(common_names_missing):
print(" {0}".format(code))
if __name__ == '__main__': # pragma: no cover
countries = self_generate(__file__)
print('Wrote {0} countries.'.format(len(countries)))
print("")
check_flags()
check_common_names()
| mit |
kanagasabapathi/python-for-android | python3-alpha/python3-src/Lib/ctypes/test/test_numbers.py | 45 | 8794 | from ctypes import *
import unittest
import struct
def valid_ranges(*types):
# given a sequence of numeric types, collect their _type_
# attribute, which is a single format character compatible with
# the struct module, use the struct module to calculate the
# minimum and maximum value allowed for this format.
# Returns a list of (min, max) values.
result = []
for t in types:
fmt = t._type_
size = struct.calcsize(fmt)
a = struct.unpack(fmt, (b"\x00"*32)[:size])[0]
b = struct.unpack(fmt, (b"\xFF"*32)[:size])[0]
c = struct.unpack(fmt, (b"\x7F"+b"\x00"*32)[:size])[0]
d = struct.unpack(fmt, (b"\x80"+b"\xFF"*32)[:size])[0]
result.append((min(a, b, c, d), max(a, b, c, d)))
return result
ArgType = type(byref(c_int(0)))
unsigned_types = [c_ubyte, c_ushort, c_uint, c_ulong]
signed_types = [c_byte, c_short, c_int, c_long, c_longlong]
bool_types = []
float_types = [c_double, c_float]
try:
c_ulonglong
c_longlong
except NameError:
pass
else:
unsigned_types.append(c_ulonglong)
signed_types.append(c_longlong)
try:
c_bool
except NameError:
pass
else:
bool_types.append(c_bool)
unsigned_ranges = valid_ranges(*unsigned_types)
signed_ranges = valid_ranges(*signed_types)
bool_values = [True, False, 0, 1, -1, 5000, 'test', [], [1]]
################################################################
class NumberTestCase(unittest.TestCase):
def test_default_init(self):
# default values are set to zero
for t in signed_types + unsigned_types + float_types:
self.assertEqual(t().value, 0)
def test_unsigned_values(self):
# the value given to the constructor is available
# as the 'value' attribute
for t, (l, h) in zip(unsigned_types, unsigned_ranges):
self.assertEqual(t(l).value, l)
self.assertEqual(t(h).value, h)
def test_signed_values(self):
# see above
for t, (l, h) in zip(signed_types, signed_ranges):
self.assertEqual(t(l).value, l)
self.assertEqual(t(h).value, h)
def test_bool_values(self):
from operator import truth
for t, v in zip(bool_types, bool_values):
self.assertEqual(t(v).value, truth(v))
def test_typeerror(self):
# Only numbers are allowed in the contructor,
# otherwise TypeError is raised
for t in signed_types + unsigned_types + float_types:
self.assertRaises(TypeError, t, "")
self.assertRaises(TypeError, t, None)
## def test_valid_ranges(self):
## # invalid values of the correct type
## # raise ValueError (not OverflowError)
## for t, (l, h) in zip(unsigned_types, unsigned_ranges):
## self.assertRaises(ValueError, t, l-1)
## self.assertRaises(ValueError, t, h+1)
def test_from_param(self):
# the from_param class method attribute always
# returns PyCArgObject instances
for t in signed_types + unsigned_types + float_types:
self.assertEqual(ArgType, type(t.from_param(0)))
def test_byref(self):
# calling byref returns also a PyCArgObject instance
for t in signed_types + unsigned_types + float_types + bool_types:
parm = byref(t())
self.assertEqual(ArgType, type(parm))
def test_floats(self):
# c_float and c_double can be created from
# Python int, long and float
class FloatLike(object):
def __float__(self):
return 2.0
f = FloatLike()
for t in float_types:
self.assertEqual(t(2.0).value, 2.0)
self.assertEqual(t(2).value, 2.0)
self.assertEqual(t(2).value, 2.0)
self.assertEqual(t(f).value, 2.0)
def test_integers(self):
class FloatLike(object):
def __float__(self):
return 2.0
f = FloatLike()
class IntLike(object):
def __int__(self):
return 2
i = IntLike()
# integers cannot be constructed from floats,
# but from integer-like objects
for t in signed_types + unsigned_types:
self.assertRaises(TypeError, t, 3.14)
self.assertRaises(TypeError, t, f)
self.assertEqual(t(i).value, 2)
def test_sizes(self):
for t in signed_types + unsigned_types + float_types + bool_types:
try:
size = struct.calcsize(t._type_)
except struct.error:
continue
# sizeof of the type...
self.assertEqual(sizeof(t), size)
# and sizeof of an instance
self.assertEqual(sizeof(t()), size)
def test_alignments(self):
for t in signed_types + unsigned_types + float_types:
code = t._type_ # the typecode
align = struct.calcsize("c%c" % code) - struct.calcsize(code)
# alignment of the type...
self.assertEqual((code, alignment(t)),
(code, align))
# and alignment of an instance
self.assertEqual((code, alignment(t())),
(code, align))
def test_int_from_address(self):
from array import array
for t in signed_types + unsigned_types:
# the array module doesn't support all format codes
# (no 'q' or 'Q')
try:
array(t._type_)
except ValueError:
continue
a = array(t._type_, [100])
# v now is an integer at an 'external' memory location
v = t.from_address(a.buffer_info()[0])
self.assertEqual(v.value, a[0])
self.assertEqual(type(v), t)
# changing the value at the memory location changes v's value also
a[0] = 42
self.assertEqual(v.value, a[0])
def test_float_from_address(self):
from array import array
for t in float_types:
a = array(t._type_, [3.14])
v = t.from_address(a.buffer_info()[0])
self.assertEqual(v.value, a[0])
self.assertTrue(type(v) is t)
a[0] = 2.3456e17
self.assertEqual(v.value, a[0])
self.assertTrue(type(v) is t)
def test_char_from_address(self):
from ctypes import c_char
from array import array
a = array('b', [0])
a[0] = ord('x')
v = c_char.from_address(a.buffer_info()[0])
self.assertEqual(v.value, b'x')
self.assertTrue(type(v) is c_char)
a[0] = ord('?')
self.assertEqual(v.value, b'?')
# array does not support c_bool / 't'
# def test_bool_from_address(self):
# from ctypes import c_bool
# from array import array
# a = array(c_bool._type_, [True])
# v = t.from_address(a.buffer_info()[0])
# self.assertEqual(v.value, a[0])
# self.assertEqual(type(v) is t)
# a[0] = False
# self.assertEqual(v.value, a[0])
# self.assertEqual(type(v) is t)
def test_init(self):
# c_int() can be initialized from Python's int, and c_int.
# Not from c_long or so, which seems strange, abd should
# probably be changed:
self.assertRaises(TypeError, c_int, c_long(42))
## def test_perf(self):
## check_perf()
from ctypes import _SimpleCData
class c_int_S(_SimpleCData):
_type_ = "i"
__slots__ = []
def run_test(rep, msg, func, arg=None):
## items = [None] * rep
items = range(rep)
from time import clock
if arg is not None:
start = clock()
for i in items:
func(arg); func(arg); func(arg); func(arg); func(arg)
stop = clock()
else:
start = clock()
for i in items:
func(); func(); func(); func(); func()
stop = clock()
print("%15s: %.2f us" % (msg, ((stop-start)*1e6/5/rep)))
def check_perf():
# Construct 5 objects
from ctypes import c_int
REP = 200000
run_test(REP, "int()", int)
run_test(REP, "int(999)", int)
run_test(REP, "c_int()", c_int)
run_test(REP, "c_int(999)", c_int)
run_test(REP, "c_int_S()", c_int_S)
run_test(REP, "c_int_S(999)", c_int_S)
# Python 2.3 -OO, win2k, P4 700 MHz:
#
# int(): 0.87 us
# int(999): 0.87 us
# c_int(): 3.35 us
# c_int(999): 3.34 us
# c_int_S(): 3.23 us
# c_int_S(999): 3.24 us
# Python 2.2 -OO, win2k, P4 700 MHz:
#
# int(): 0.89 us
# int(999): 0.89 us
# c_int(): 9.99 us
# c_int(999): 10.02 us
# c_int_S(): 9.87 us
# c_int_S(999): 9.85 us
if __name__ == '__main__':
## check_perf()
unittest.main()
| apache-2.0 |
hunter007/django | tests/gis_tests/geoapp/test_sitemaps.py | 190 | 3235 | from __future__ import unicode_literals
import zipfile
from io import BytesIO
from xml.dom import minidom
from django.conf import settings
from django.contrib.sites.models import Site
from django.test import (
TestCase, ignore_warnings, modify_settings, override_settings,
skipUnlessDBFeature,
)
from django.utils.deprecation import RemovedInDjango110Warning
from .models import City, Country
@modify_settings(INSTALLED_APPS={'append': ['django.contrib.sites', 'django.contrib.sitemaps']})
@override_settings(ROOT_URLCONF='gis_tests.geoapp.urls')
@skipUnlessDBFeature("gis_enabled")
class GeoSitemapTest(TestCase):
def setUp(self):
super(GeoSitemapTest, self).setUp()
Site(id=settings.SITE_ID, domain="example.com", name="example.com").save()
def assertChildNodes(self, elem, expected):
"Taken from syndication/tests.py."
actual = set(n.nodeName for n in elem.childNodes)
expected = set(expected)
self.assertEqual(actual, expected)
@ignore_warnings(category=RemovedInDjango110Warning)
def test_geositemap_kml(self):
"Tests KML/KMZ geographic sitemaps."
for kml_type in ('kml', 'kmz'):
# The URL for the sitemaps in urls.py have been updated
# with a name but since reversing by Python path is tried first
# before reversing by name and works since we're giving
# name='django.contrib.gis.sitemaps.views.(kml|kmz)', we need
# to silence the erroneous warning until reversing by dotted
# path is removed. The test will work without modification when
# it's removed.
doc = minidom.parseString(self.client.get('/sitemaps/%s.xml' % kml_type).content)
# Ensuring the right sitemaps namespace is present.
urlset = doc.firstChild
self.assertEqual(urlset.getAttribute('xmlns'), 'http://www.sitemaps.org/schemas/sitemap/0.9')
urls = urlset.getElementsByTagName('url')
self.assertEqual(2, len(urls)) # Should only be 2 sitemaps.
for url in urls:
self.assertChildNodes(url, ['loc'])
# Getting the relative URL since we don't have a real site.
kml_url = url.getElementsByTagName('loc')[0].childNodes[0].data.split('http://example.com')[1]
if kml_type == 'kml':
kml_doc = minidom.parseString(self.client.get(kml_url).content)
elif kml_type == 'kmz':
# Have to decompress KMZ before parsing.
buf = BytesIO(self.client.get(kml_url).content)
with zipfile.ZipFile(buf) as zf:
self.assertEqual(1, len(zf.filelist))
self.assertEqual('doc.kml', zf.filelist[0].filename)
kml_doc = minidom.parseString(zf.read('doc.kml'))
# Ensuring the correct number of placemarks are in the KML doc.
if 'city' in kml_url:
model = City
elif 'country' in kml_url:
model = Country
self.assertEqual(model.objects.count(), len(kml_doc.getElementsByTagName('Placemark')))
| bsd-3-clause |
seanli9jan/tensorflow | tensorflow/python/kernel_tests/matrix_exponential_op_test.py | 3 | 9042 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.linalg.linalg_impl.matrix_exponential."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.ops.linalg import linalg_impl
from tensorflow.python.platform import benchmark
from tensorflow.python.platform import test
def np_expm(x): # pylint: disable=invalid-name
"""Slow but accurate Taylor series matrix exponential."""
y = np.zeros(x.shape, dtype=x.dtype)
xn = np.eye(x.shape[0], dtype=x.dtype)
for n in range(40):
if n > 0:
xn /= float(n)
y += xn
xn = np.dot(xn, x)
return y
class ExponentialOpTest(test.TestCase):
def _verifyExponential(self, x, np_type):
inp = x.astype(np_type)
with self.cached_session(use_gpu=True):
tf_ans = linalg_impl.matrix_exponential(inp)
if x.size == 0:
np_ans = np.empty(x.shape, dtype=np_type)
else:
if x.ndim > 2:
np_ans = np.zeros(inp.shape, dtype=np_type)
for i in itertools.product(*[range(x) for x in inp.shape[:-2]]):
np_ans[i] = np_expm(inp[i])
else:
np_ans = np_expm(inp)
out = tf_ans.eval()
self.assertAllClose(np_ans, out, rtol=1e-4, atol=1e-3)
def _verifyExponentialReal(self, x):
for np_type in [np.float32, np.float64]:
self._verifyExponential(x, np_type)
def _verifyExponentialComplex(self, x):
for np_type in [np.complex64, np.complex128]:
self._verifyExponential(x, np_type)
def _makeBatch(self, matrix1, matrix2):
matrix_batch = np.concatenate(
[np.expand_dims(matrix1, 0),
np.expand_dims(matrix2, 0)])
matrix_batch = np.tile(matrix_batch, [2, 3, 1, 1])
return matrix_batch
def testNonsymmetricReal(self):
# 2x2 matrices
matrix1 = np.array([[1., 2.], [3., 4.]])
matrix2 = np.array([[1., 3.], [3., 5.]])
self._verifyExponentialReal(matrix1)
self._verifyExponentialReal(matrix2)
# A multidimensional batch of 2x2 matrices
self._verifyExponentialReal(self._makeBatch(matrix1, matrix2))
def testNonsymmetricComplex(self):
matrix1 = np.array([[1., 2.], [3., 4.]])
matrix2 = np.array([[1., 3.], [3., 5.]])
matrix1 = matrix1.astype(np.complex64)
matrix1 += 1j * matrix1
matrix2 = matrix2.astype(np.complex64)
matrix2 += 1j * matrix2
self._verifyExponentialComplex(matrix1)
self._verifyExponentialComplex(matrix2)
# Complex batch
self._verifyExponentialComplex(self._makeBatch(matrix1, matrix2))
def testSymmetricPositiveDefiniteReal(self):
# 2x2 matrices
matrix1 = np.array([[2., 1.], [1., 2.]])
matrix2 = np.array([[3., -1.], [-1., 3.]])
self._verifyExponentialReal(matrix1)
self._verifyExponentialReal(matrix2)
# A multidimensional batch of 2x2 matrices
self._verifyExponentialReal(self._makeBatch(matrix1, matrix2))
def testSymmetricPositiveDefiniteComplex(self):
matrix1 = np.array([[2., 1.], [1., 2.]])
matrix2 = np.array([[3., -1.], [-1., 3.]])
matrix1 = matrix1.astype(np.complex64)
matrix1 += 1j * matrix1
matrix2 = matrix2.astype(np.complex64)
matrix2 += 1j * matrix2
self._verifyExponentialComplex(matrix1)
self._verifyExponentialComplex(matrix2)
# Complex batch
self._verifyExponentialComplex(self._makeBatch(matrix1, matrix2))
def testNonSquareMatrix(self):
# When the exponential of a non-square matrix is attempted we should return
# an error
with self.assertRaises(ValueError):
linalg_impl.matrix_exponential(np.array([[1., 2., 3.], [3., 4., 5.]]))
def testWrongDimensions(self):
# The input to the exponential should be at least a 2-dimensional tensor.
tensor3 = constant_op.constant([1., 2.])
with self.assertRaises(ValueError):
linalg_impl.matrix_exponential(tensor3)
def testEmpty(self):
self._verifyExponentialReal(np.empty([0, 2, 2]))
self._verifyExponentialReal(np.empty([2, 0, 0]))
def testDynamic(self):
with self.session(use_gpu=True) as sess:
inp = array_ops.placeholder(ops.dtypes.float32)
expm = linalg_impl.matrix_exponential(inp)
matrix = np.array([[1., 2.], [3., 4.]])
sess.run(expm, feed_dict={inp: matrix})
def testConcurrentExecutesWithoutError(self):
with self.session(use_gpu=True) as sess:
matrix1 = random_ops.random_normal([5, 5], seed=42)
matrix2 = random_ops.random_normal([5, 5], seed=42)
expm1 = linalg_impl.matrix_exponential(matrix1)
expm2 = linalg_impl.matrix_exponential(matrix2)
expm = sess.run([expm1, expm2])
self.assertAllEqual(expm[0], expm[1])
class MatrixExponentialBenchmark(test.Benchmark):
shapes = [
(4, 4),
(10, 10),
(16, 16),
(101, 101),
(256, 256),
(1000, 1000),
(1024, 1024),
(2048, 2048),
(513, 4, 4),
(513, 16, 16),
(513, 256, 256),
]
def _GenerateMatrix(self, shape):
batch_shape = shape[:-2]
shape = shape[-2:]
assert shape[0] == shape[1]
n = shape[0]
matrix = np.ones(shape).astype(np.float32) / (
2.0 * n) + np.diag(np.ones(n).astype(np.float32))
return variables.Variable(np.tile(matrix, batch_shape + (1, 1)))
def benchmarkMatrixExponentialOp(self):
for shape in self.shapes:
with ops.Graph().as_default(), \
session.Session(config=benchmark.benchmark_config()) as sess, \
ops.device("/cpu:0"):
matrix = self._GenerateMatrix(shape)
expm = linalg_impl.matrix_exponential(matrix)
variables.global_variables_initializer().run()
self.run_op_benchmark(
sess,
control_flow_ops.group(expm),
min_iters=25,
name="matrix_exponential_cpu_{shape}".format(
shape=shape))
if test.is_gpu_available(True):
with ops.Graph().as_default(), \
session.Session(config=benchmark.benchmark_config()) as sess, \
ops.device("/gpu:0"):
matrix = self._GenerateMatrix(shape)
expm = linalg_impl.matrix_exponential(matrix)
variables.global_variables_initializer().run()
self.run_op_benchmark(
sess,
control_flow_ops.group(expm),
min_iters=25,
name="matrix_exponential_gpu_{shape}".format(
shape=shape))
def _TestRandomSmall(dtype, batch_dims, size):
def Test(self):
np.random.seed(42)
shape = batch_dims + (size, size)
matrix = np.random.uniform(
low=-1.0, high=1.0,
size=shape).astype(dtype)
self._verifyExponentialReal(matrix)
return Test
def _TestL1Norms(dtype, shape, scale):
def Test(self):
np.random.seed(42)
matrix = np.random.uniform(
low=-1.0, high=1.0,
size=np.prod(shape)).reshape(shape).astype(dtype)
print(dtype, shape, scale, matrix)
l1_norm = np.max(np.sum(np.abs(matrix), axis=matrix.ndim-2))
matrix /= l1_norm
self._verifyExponentialReal(scale * matrix)
return Test
if __name__ == "__main__":
for dtype_ in [np.float32, np.float64, np.complex64, np.complex128]:
for batch_ in [(), (2,), (2, 2)]:
for size_ in [4, 7]:
name = "%s_%d_%d" % (dtype_.__name__, len(batch_), size_)
setattr(ExponentialOpTest, "testL1Norms_" + name,
_TestRandomSmall(dtype_, batch_, size_))
for shape_ in [(3, 3), (2, 3, 3)]:
for dtype_ in [np.float32, np.complex64]:
for scale_ in [0.1, 1.5, 5.0, 20.0]:
name = "%s_%d_%d" % (dtype_.__name__, len(shape_), int(scale_*10))
setattr(ExponentialOpTest, "testL1Norms_" + name,
_TestL1Norms(dtype_, shape_, scale_))
for dtype_ in [np.float64, np.complex128]:
for scale_ in [0.01, 0.2, 0.5, 1.5, 6.0, 25.0]:
name = "%s_%d_%d" % (dtype_.__name__, len(shape_), int(scale_*100))
setattr(ExponentialOpTest, "testL1Norms_" + name,
_TestL1Norms(dtype_, shape_, scale_))
test.main()
| apache-2.0 |
stanlyxiang/incubator-hawq | tools/bin/pythonSrc/pychecker-0.8.18/pychecker/Stack.py | 8 | 3656 | #!/usr/bin/env python
# Copyright (c) 2001-2002, MetaSlash Inc. All rights reserved.
"""
Module to hold manipulation of elements on the stack.
"""
import types
from pychecker import utils
DATA_UNKNOWN = "-unknown-"
LOCALS = 'locals'
# These should really be defined by subclasses
TYPE_UNKNOWN = "-unknown-"
TYPE_FUNC_RETURN = "-return-value-"
TYPE_ATTRIBUTE = "-attribute-"
TYPE_COMPARISON = "-comparison-"
TYPE_GLOBAL = "-global-"
TYPE_EXCEPT = "-except-"
class Item :
"Representation of data on the stack"
def __init__(self, data, dataType, const = 0, length = 0) :
self.data = data
self.type = dataType
self.const = const
self.length = length
self.is_really_string = 0
def __str__(self) :
if type(self.data) == types.TupleType :
value = '('
for item in self.data :
value = value + utils.safestr(item) + ', '
# strip off the ', ' for multiple items
if len(self.data) > 1 :
value = value[:-2]
return value + ')'
return utils.safestr(self.data)
def __repr__(self):
return 'Stack Item: (%r, %r, %d)' % (self.data, self.type, self.const)
def isNone(self) :
return (self.type != TYPE_UNKNOWN and self.data is None or
(self.data == 'None' and not self.const))
def isImplicitNone(self) :
return self.data is None and self.const
def isMethodCall(self, c, methodArgName):
return self.type == TYPE_ATTRIBUTE and c != None and \
len(self.data) == 2 and self.data[0] == methodArgName
def isLocals(self) :
return self.type == types.DictType and self.data == LOCALS
def setStringType(self, value = types.StringType):
self.is_really_string = value == types.StringType
def getType(self, typeMap) :
if self.type != types.StringType or self.is_really_string:
return self.type
if self.const :
return type(self.data)
if type(self.data) == types.StringType :
localTypes = typeMap.get(self.data, [])
if len(localTypes) == 1 :
return localTypes[0]
return TYPE_UNKNOWN
def getName(self) :
if self.type == TYPE_ATTRIBUTE and type(self.data) != types.StringType:
strValue = ""
# convert the tuple into a string ('self', 'data') -> self.data
for item in self.data :
strValue = '%s.%s' % (strValue, utils.safestr(item))
return strValue[1:]
return utils.safestr(self.data)
def addAttribute(self, attr) :
if type(self.data) == types.TupleType :
self.data = self.data + (attr,)
else :
self.data = (self.data, attr)
self.type = TYPE_ATTRIBUTE
def makeDict(values = (), const = 1) :
values = tuple(values)
if not values:
values = ('<on-stack>',)
return Item(values, types.DictType, const, len(values))
def makeTuple(values = (), const = 1) :
return Item(tuple(values), types.TupleType, const, len(values))
def makeList(values = [], const = 1) :
return Item(values, types.ListType, const, len(values))
def makeFuncReturnValue(stackValue, argCount) :
data = DATA_UNKNOWN
# vars() without params == locals()
if stackValue.type == TYPE_GLOBAL and \
(stackValue.data == LOCALS or
(argCount == 0 and stackValue.data == 'vars')) :
data = LOCALS
return Item(data, TYPE_FUNC_RETURN)
def makeComparison(stackItems, comparison) :
return Item((stackItems[0], comparison, stackItems[1]), TYPE_COMPARISON)
| apache-2.0 |
zsiciarz/django | tests/sitemaps_tests/test_generic.py | 22 | 1679 | from datetime import datetime
from django.contrib.sitemaps import GenericSitemap
from django.test import override_settings
from .base import SitemapTestsBase
from .models import TestModel
@override_settings(ABSOLUTE_URL_OVERRIDES={})
class GenericViewsSitemapTests(SitemapTestsBase):
def test_generic_sitemap_attributes(self):
datetime_value = datetime.now()
queryset = TestModel.objects.all()
generic_sitemap = GenericSitemap(
info_dict={
'queryset': queryset,
'date_field': datetime_value,
},
priority=0.6,
changefreq='monthly',
protocol='https',
)
attr_values = (
('date_field', datetime_value),
('priority', 0.6),
('changefreq', 'monthly'),
('protocol', 'https'),
)
for attr_name, expected_value in attr_values:
with self.subTest(attr_name=attr_name):
self.assertEqual(getattr(generic_sitemap, attr_name), expected_value)
self.assertCountEqual(generic_sitemap.queryset, queryset)
def test_generic_sitemap(self):
"A minimal generic sitemap can be rendered"
response = self.client.get('/generic/sitemap.xml')
expected = ''
for pk in TestModel.objects.values_list("id", flat=True):
expected += "<url><loc>%s/testmodel/%s/</loc></url>" % (self.base_url, pk)
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
%s
</urlset>
""" % expected
self.assertXMLEqual(response.content.decode(), expected_content)
| bsd-3-clause |
ChristosChristofidis/bokeh | bokeh/models/mappers.py | 34 | 1948 | """ Models for mapping values from one range or space to another.
"""
from __future__ import absolute_import
from ..plot_object import PlotObject
from ..properties import Float, Color, Enum, Seq
from ..enums import Palette
from .. import palettes
class ColorMapper(PlotObject):
""" Base class for color mapper types. `ColorMapper`` is not
generally useful to instantiate on its own.
"""
class LinearColorMapper(ColorMapper):
""" Map numbers in a range [*low*, *high*] linearly into a
sequence of colors (a palette).
For example, if the range is [0, 99] and the palette is
``['red', 'green', 'blue']``, the values would be mapped as
follows::
x < 0 : 'red' # values < low are clamped
0 >= x < 33 : 'red'
33 >= x < 66 : 'green'
66 >= x < 99 : 'blue'
99 >= x : 'blue' # values > high are clamped
"""
palette = Seq(Color, help="""
A sequence of colors to use as the target palette for mapping.
This property can also be set as a ``String``, to the name of
any of the palettes shown in :ref:`bokeh_dot_palettes`.
""").accepts(Enum(Palette), lambda pal: getattr(palettes, pal))
low = Float(help="""
The minimum value of the range to map into the palette. Values below
this are clamped to ``low``.
""")
high = Float(help="""
The maximum value of the range to map into the palette. Values above
this are clamped to ``high``.
""")
# TODO: (jc) what is the color code for transparent?
# TODO: (bev) better docstring
reserve_color = Color("#ffffff", help="""
Used by Abstract Rendering.
""")
# TODO: (bev) better docstring
reserve_val = Float(default=None, help="""
Used by Abstract Rendering.
""")
def __init__(self, palette=None, **kwargs):
if palette is not None: kwargs['palette'] = palette
super(LinearColorMapper, self).__init__(**kwargs)
| bsd-3-clause |
ujjvala-addsol/addsol_hr | openerp/service/db.py | 64 | 13823 | # -*- coding: utf-8 -*-
from contextlib import closing
from functools import wraps
import logging
import os
import shutil
import threading
import traceback
import tempfile
import zipfile
import psycopg2
import openerp
from openerp import SUPERUSER_ID
from openerp.exceptions import Warning
import openerp.release
import openerp.sql_db
import openerp.tools
import security
_logger = logging.getLogger(__name__)
class DatabaseExists(Warning):
pass
# This should be moved to openerp.modules.db, along side initialize().
def _initialize_db(id, db_name, demo, lang, user_password):
try:
db = openerp.sql_db.db_connect(db_name)
with closing(db.cursor()) as cr:
# TODO this should be removed as it is done by RegistryManager.new().
openerp.modules.db.initialize(cr)
openerp.tools.config['lang'] = lang
cr.commit()
registry = openerp.modules.registry.RegistryManager.new(
db_name, demo, None, update_module=True)
with closing(db.cursor()) as cr:
if lang:
modobj = registry['ir.module.module']
mids = modobj.search(cr, SUPERUSER_ID, [('state', '=', 'installed')])
modobj.update_translations(cr, SUPERUSER_ID, mids, lang)
# update admin's password and lang
values = {'password': user_password, 'lang': lang}
registry['res.users'].write(cr, SUPERUSER_ID, [SUPERUSER_ID], values)
cr.execute('SELECT login, password FROM res_users ORDER BY login')
cr.commit()
except Exception, e:
_logger.exception('CREATE DATABASE failed:')
def dispatch(method, params):
if method in ['create', 'get_progress', 'drop', 'dump', 'restore', 'rename',
'change_admin_password', 'migrate_databases',
'create_database', 'duplicate_database']:
passwd = params[0]
params = params[1:]
security.check_super(passwd)
elif method in ['db_exist', 'list', 'list_lang', 'server_version']:
# params = params
# No security check for these methods
pass
else:
raise KeyError("Method not found: %s" % method)
fn = globals()['exp_' + method]
return fn(*params)
def _create_empty_database(name):
db = openerp.sql_db.db_connect('postgres')
with closing(db.cursor()) as cr:
chosen_template = openerp.tools.config['db_template']
cr.execute("SELECT datname FROM pg_database WHERE datname = %s",
(name,))
if cr.fetchall():
raise DatabaseExists("database %r already exists!" % (name,))
else:
cr.autocommit(True) # avoid transaction block
cr.execute("""CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "%s" """ % (name, chosen_template))
def exp_create_database(db_name, demo, lang, user_password='admin'):
""" Similar to exp_create but blocking."""
_logger.info('Create database `%s`.', db_name)
_create_empty_database(db_name)
_initialize_db(id, db_name, demo, lang, user_password)
return True
def exp_duplicate_database(db_original_name, db_name):
_logger.info('Duplicate database `%s` to `%s`.', db_original_name, db_name)
openerp.sql_db.close_db(db_original_name)
db = openerp.sql_db.db_connect('postgres')
with closing(db.cursor()) as cr:
cr.autocommit(True) # avoid transaction block
cr.execute("""CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "%s" """ % (db_name, db_original_name))
from_fs = openerp.tools.config.filestore(db_original_name)
to_fs = openerp.tools.config.filestore(db_name)
if os.path.exists(from_fs) and not os.path.exists(to_fs):
shutil.copytree(from_fs, to_fs)
return True
def _drop_conn(cr, db_name):
# Try to terminate all other connections that might prevent
# dropping the database
try:
# PostgreSQL 9.2 renamed pg_stat_activity.procpid to pid:
# http://www.postgresql.org/docs/9.2/static/release-9-2.html#AEN110389
pid_col = 'pid' if cr._cnx.server_version >= 90200 else 'procpid'
cr.execute("""SELECT pg_terminate_backend(%(pid_col)s)
FROM pg_stat_activity
WHERE datname = %%s AND
%(pid_col)s != pg_backend_pid()""" % {'pid_col': pid_col},
(db_name,))
except Exception:
pass
def exp_drop(db_name):
if db_name not in exp_list(True):
return False
openerp.modules.registry.RegistryManager.delete(db_name)
openerp.sql_db.close_db(db_name)
db = openerp.sql_db.db_connect('postgres')
with closing(db.cursor()) as cr:
cr.autocommit(True) # avoid transaction block
_drop_conn(cr, db_name)
try:
cr.execute('DROP DATABASE "%s"' % db_name)
except Exception, e:
_logger.error('DROP DB: %s failed:\n%s', db_name, e)
raise Exception("Couldn't drop database %s: %s" % (db_name, e))
else:
_logger.info('DROP DB: %s', db_name)
fs = openerp.tools.config.filestore(db_name)
if os.path.exists(fs):
shutil.rmtree(fs)
return True
def _set_pg_password_in_environment(func):
""" On systems where pg_restore/pg_dump require an explicit
password (i.e. when not connecting via unix sockets, and most
importantly on Windows), it is necessary to pass the PG user
password in the environment or in a special .pgpass file.
This decorator handles setting
:envvar:`PGPASSWORD` if it is not already
set, and removing it afterwards.
See also http://www.postgresql.org/docs/8.4/static/libpq-envars.html
.. note:: This is not thread-safe, and should never be enabled for
SaaS (giving SaaS users the super-admin password is not a good idea
anyway)
"""
@wraps(func)
def wrapper(*args, **kwargs):
if os.environ.get('PGPASSWORD') or not openerp.tools.config['db_password']:
return func(*args, **kwargs)
else:
os.environ['PGPASSWORD'] = openerp.tools.config['db_password']
try:
return func(*args, **kwargs)
finally:
del os.environ['PGPASSWORD']
return wrapper
def exp_dump(db_name):
with tempfile.TemporaryFile() as t:
dump_db(db_name, t)
t.seek(0)
return t.read().encode('base64')
@_set_pg_password_in_environment
def dump_db(db, stream):
"""Dump database `db` into file-like object `stream`"""
with openerp.tools.osutil.tempdir() as dump_dir:
registry = openerp.modules.registry.RegistryManager.get(db)
with registry.cursor() as cr:
filestore = registry['ir.attachment']._filestore(cr, SUPERUSER_ID)
if os.path.exists(filestore):
shutil.copytree(filestore, os.path.join(dump_dir, 'filestore'))
dump_file = os.path.join(dump_dir, 'dump.sql')
cmd = ['pg_dump', '--format=p', '--no-owner', '--file=' + dump_file]
if openerp.tools.config['db_user']:
cmd.append('--username=' + openerp.tools.config['db_user'])
if openerp.tools.config['db_host']:
cmd.append('--host=' + openerp.tools.config['db_host'])
if openerp.tools.config['db_port']:
cmd.append('--port=' + str(openerp.tools.config['db_port']))
cmd.append(db)
if openerp.tools.exec_pg_command(*cmd):
_logger.error('DUMP DB: %s failed! Please verify the configuration of the database '
'password on the server. You may need to create a .pgpass file for '
'authentication, or specify `db_password` in the server configuration '
'file.', db)
raise Exception("Couldn't dump database")
openerp.tools.osutil.zip_dir(dump_dir, stream, include_dir=False)
_logger.info('DUMP DB successful: %s', db)
def exp_restore(db_name, data, copy=False):
data_file = tempfile.NamedTemporaryFile(delete=False)
try:
data_file.write(data.decode('base64'))
data_file.close()
restore_db(db_name, data_file.name, copy=copy)
finally:
os.unlink(data_file.name)
return True
@_set_pg_password_in_environment
def restore_db(db, dump_file, copy=False):
assert isinstance(db, basestring)
if exp_db_exist(db):
_logger.warning('RESTORE DB: %s already exists', db)
raise Exception("Database already exists")
_create_empty_database(db)
filestore_path = None
with openerp.tools.osutil.tempdir() as dump_dir:
if zipfile.is_zipfile(dump_file):
# v8 format
with zipfile.ZipFile(dump_file, 'r') as z:
# only extract known members!
filestore = [m for m in z.namelist() if m.startswith('filestore/')]
z.extractall(dump_dir, ['dump.sql'] + filestore)
if filestore:
filestore_path = os.path.join(dump_dir, 'filestore')
pg_cmd = 'psql'
pg_args = ['-q', '-f', os.path.join(dump_dir, 'dump.sql')]
else:
# <= 7.0 format (raw pg_dump output)
pg_cmd = 'pg_restore'
pg_args = ['--no-owner', dump_file]
args = []
if openerp.tools.config['db_user']:
args.append('--username=' + openerp.tools.config['db_user'])
if openerp.tools.config['db_host']:
args.append('--host=' + openerp.tools.config['db_host'])
if openerp.tools.config['db_port']:
args.append('--port=' + str(openerp.tools.config['db_port']))
args.append('--dbname=' + db)
pg_args = args + pg_args
if openerp.tools.exec_pg_command(pg_cmd, *pg_args):
raise Exception("Couldn't restore database")
registry = openerp.modules.registry.RegistryManager.new(db)
with registry.cursor() as cr:
if copy:
# if it's a copy of a database, force generation of a new dbuuid
registry['ir.config_parameter'].init(cr, force=True)
if filestore_path:
filestore_dest = registry['ir.attachment']._filestore(cr, SUPERUSER_ID)
shutil.move(filestore_path, filestore_dest)
if openerp.tools.config['unaccent']:
try:
with cr.savepoint():
cr.execute("CREATE EXTENSION unaccent")
except psycopg2.Error:
pass
_logger.info('RESTORE DB: %s', db)
def exp_rename(old_name, new_name):
openerp.modules.registry.RegistryManager.delete(old_name)
openerp.sql_db.close_db(old_name)
db = openerp.sql_db.db_connect('postgres')
with closing(db.cursor()) as cr:
cr.autocommit(True) # avoid transaction block
_drop_conn(cr, old_name)
try:
cr.execute('ALTER DATABASE "%s" RENAME TO "%s"' % (old_name, new_name))
_logger.info('RENAME DB: %s -> %s', old_name, new_name)
except Exception, e:
_logger.error('RENAME DB: %s -> %s failed:\n%s', old_name, new_name, e)
raise Exception("Couldn't rename database %s to %s: %s" % (old_name, new_name, e))
old_fs = openerp.tools.config.filestore(old_name)
new_fs = openerp.tools.config.filestore(new_name)
if os.path.exists(old_fs) and not os.path.exists(new_fs):
shutil.move(old_fs, new_fs)
return True
@openerp.tools.mute_logger('openerp.sql_db')
def exp_db_exist(db_name):
## Not True: in fact, check if connection to database is possible. The database may exists
return bool(openerp.sql_db.db_connect(db_name))
def exp_list(document=False):
if not openerp.tools.config['list_db'] and not document:
raise openerp.exceptions.AccessDenied()
chosen_template = openerp.tools.config['db_template']
templates_list = tuple(set(['template0', 'template1', 'postgres', chosen_template]))
db = openerp.sql_db.db_connect('postgres')
with closing(db.cursor()) as cr:
try:
db_user = openerp.tools.config["db_user"]
if not db_user and os.name == 'posix':
import pwd
db_user = pwd.getpwuid(os.getuid())[0]
if not db_user:
cr.execute("select usename from pg_user where usesysid=(select datdba from pg_database where datname=%s)", (openerp.tools.config["db_name"],))
res = cr.fetchone()
db_user = res and str(res[0])
if db_user:
cr.execute("select datname from pg_database where datdba=(select usesysid from pg_user where usename=%s) and datname not in %s order by datname", (db_user, templates_list))
else:
cr.execute("select datname from pg_database where datname not in %s order by datname", (templates_list,))
res = [openerp.tools.ustr(name) for (name,) in cr.fetchall()]
except Exception:
res = []
res.sort()
return res
def exp_change_admin_password(new_password):
openerp.tools.config['admin_passwd'] = new_password
openerp.tools.config.save()
return True
def exp_list_lang():
return openerp.tools.scan_languages()
def exp_server_version():
""" Return the version of the server
Used by the client to verify the compatibility with its own version
"""
return openerp.release.version
def exp_migrate_databases(databases):
for db in databases:
_logger.info('migrate database %s', db)
openerp.tools.config['update']['base'] = True
openerp.modules.registry.RegistryManager.new(db, force_demo=False, update_module=True)
return True
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
MeGotsThis/Hanabi-AI | multitagbot_v2_2/hint.py | 3 | 1420 | class Hint:
def __init__(self, *, fitness=0, to=None, color=None, value=None):
self.fitness = fitness
self.to = to
self.color = color
self.value = value
def give(self, bot):
assert self.to is not None
assert self.to != bot.position
if self.color is not None:
bot.give_color_clue(self.to, self.color)
elif self.value is not None:
bot.give_value_clue(self.to, self.value)
else:
assert False
def __str__(self):
return '''\
Fitness: {fitness}, To: {to}, Color: {color}, Value: {value}'''.format(
fitness=self.fitness, to=self.to,
color=self.color, value=self.value)
def __repr__(self):
args = []
if self.fitness != 0:
args.append('fitness=' + repr(self.fitness))
if self.to is not None:
args.append('to=' + repr(self.to))
if self.color is not None:
args.append('color=' + repr(self.color))
if self.value is not None:
args.append('value=' + repr(self.value))
return 'Hint(' + ', '.join(args) + ')'
def __eq__(self, other):
if not isinstance(other, Hint):
return False
return (self.fitness == other.fitness
and self.to == other.to
and self.color == other.to
and self.value == other.value)
| gpl-3.0 |
my-first/octocoin | contrib/devtools/symbol-check.py | 149 | 4348 | #!/usr/bin/python
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
A script to check that the (Linux) executables produced by gitian only contain
allowed gcc, glibc and libstdc++ version symbols. This makes sure they are
still compatible with the minimum supported Linux distribution versions.
Example usage:
find ../gitian-builder/build -type f -executable | xargs python contrib/devtools/symbol-check.py
'''
from __future__ import division, print_function
import subprocess
import re
import sys
# Debian 6.0.9 (Squeeze) has:
#
# - g++ version 4.4.5 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=g%2B%2B)
# - libc version 2.11.3 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=libc6)
# - libstdc++ version 4.4.5 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=libstdc%2B%2B6)
#
# Ubuntu 10.04.4 (Lucid Lynx) has:
#
# - g++ version 4.4.3 (http://packages.ubuntu.com/search?keywords=g%2B%2B&searchon=names&suite=lucid§ion=all)
# - libc version 2.11.1 (http://packages.ubuntu.com/search?keywords=libc6&searchon=names&suite=lucid§ion=all)
# - libstdc++ version 4.4.3 (http://packages.ubuntu.com/search?suite=lucid§ion=all&arch=any&keywords=libstdc%2B%2B&searchon=names)
#
# Taking the minimum of these as our target.
#
# According to GNU ABI document (http://gcc.gnu.org/onlinedocs/libstdc++/manual/abi.html) this corresponds to:
# GCC 4.4.0: GCC_4.4.0
# GCC 4.4.2: GLIBCXX_3.4.13, CXXABI_1.3.3
# (glibc) GLIBC_2_11
#
MAX_VERSIONS = {
'GCC': (4,4,0),
'CXXABI': (1,3,3),
'GLIBCXX': (3,4,13),
'GLIBC': (2,11)
}
# Ignore symbols that are exported as part of every executable
IGNORE_EXPORTS = {
'_edata', '_end', '_init', '__bss_start', '_fini'
}
READELF_CMD = '/usr/bin/readelf'
CPPFILT_CMD = '/usr/bin/c++filt'
class CPPFilt(object):
'''
Demangle C++ symbol names.
Use a pipe to the 'c++filt' command.
'''
def __init__(self):
self.proc = subprocess.Popen(CPPFILT_CMD, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
def __call__(self, mangled):
self.proc.stdin.write(mangled + '\n')
return self.proc.stdout.readline().rstrip()
def close(self):
self.proc.stdin.close()
self.proc.stdout.close()
self.proc.wait()
def read_symbols(executable, imports=True):
'''
Parse an ELF executable and return a list of (symbol,version) tuples
for dynamic, imported symbols.
'''
p = subprocess.Popen([READELF_CMD, '--dyn-syms', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Could not read symbols for %s: %s' % (executable, stderr.strip()))
syms = []
for line in stdout.split('\n'):
line = line.split()
if len(line)>7 and re.match('[0-9]+:$', line[0]):
(sym, _, version) = line[7].partition('@')
is_import = line[6] == 'UND'
if version.startswith('@'):
version = version[1:]
if is_import == imports:
syms.append((sym, version))
return syms
def check_version(max_versions, version):
if '_' in version:
(lib, _, ver) = version.rpartition('_')
else:
lib = version
ver = '0'
ver = tuple([int(x) for x in ver.split('.')])
if not lib in max_versions:
return False
return ver <= max_versions[lib]
if __name__ == '__main__':
cppfilt = CPPFilt()
retval = 0
for filename in sys.argv[1:]:
# Check imported symbols
for sym,version in read_symbols(filename, True):
if version and not check_version(MAX_VERSIONS, version):
print('%s: symbol %s from unsupported version %s' % (filename, cppfilt(sym), version))
retval = 1
# Check exported symbols
for sym,version in read_symbols(filename, False):
if sym in IGNORE_EXPORTS:
continue
print('%s: export of symbol %s not allowed' % (filename, cppfilt(sym)))
retval = 1
exit(retval)
| mit |
alexpearce/thesis | scripts/background_categories.py | 1 | 3206 | from __future__ import absolute_import, division, print_function
import os
import matplotlib.pyplot as plt
import ROOT
import root_pandas
from histograms import histogram
from root_converters import roocurve, tgraphasymerrors
from plotting_utilities import (
COLOURS as colours,
set_axis_labels
)
PREFIX = 'root://eoslhcb.cern.ch//eos/lhcb/user/a/apearce/CharmProduction/2015_MagDown_MC/{0}' # noqa
FNAME = 'DVntuple.root'
DATA_PATHS = [
os.path.join(PREFIX, str(idx), FNAME)
for idx in range(1, 3)
]
EVT_TYPES = {
'D0ToKpi': 27163003,
'DpToKpipi': 21263010
}
def background_categories(mode):
"""Plot BKGCAT values."""
tree = 'Tuple{0}/DecayTree'.format(mode)
parent = mode.split('To')[0]
columns = [
'{0}_M'.format(parent),
'{0}_BKGCAT'.format(parent)
]
paths = [p.format(EVT_TYPES[mode]) for p in DATA_PATHS]
df = root_pandas.read_root(paths, key=tree, columns=columns)
df.columns = ['M', 'BKGCAT']
if mode == 'D0ToKpi':
mrange = (1800, 1930)
elif mode == 'DpToKpipi':
mrange = (1805, 1935)
nbins = mrange[1] - mrange[0]
signal = df.M[(df.BKGCAT == 0) | (df.BKGCAT == 10)]
ghost = df.M[(df.BKGCAT == 60)]
other = df.M[~((df.BKGCAT == 0) | (df.BKGCAT == 10) | (df.BKGCAT == 60))]
fig = plt.figure(figsize=(8, 8))
ax = fig.add_subplot(1, 1, 1)
histogram([signal, ghost, other], range=mrange, bins=nbins,
label=['Signal', 'Ghost background', 'Other background'], ax=ax)
# Don't have the y-axis go to zero, and add some padding at the top
ax.set_ylim(bottom=0.1, top=2*ax.get_ylim()[1])
ax.set_yscale('log')
set_axis_labels(ax, mode)
ax.legend(loc='best')
fig.savefig('output/{0}_BKGCAT.pdf'.format(mode))
def fits(mode):
f = ROOT.TFile('~/Physics/CharmProduction/analysis/{0}_2015_MagDown_truth_matching_fit.root'.format(mode)) # noqa
w = f.Get('workspace_{0}'.format(mode))
parent = mode.split('To')[0]
x = w.var('{0}_M'.format(parent))
pdf_tot = w.pdf('pdf_m_tot')
pdf_bkg = w.pdf('pdf_m_tot')
data = w.data('data_binned')
frame = x.frame()
data.plotOn(frame)
pdf_bkg.plotOn(frame)
pdf_tot.plotOn(frame, ROOT.RooFit.Components('*bkg*'))
plotobjs = [frame.getObject(i) for i in range(int(frame.numItems()))]
tgraph, tcurve_tot, tcurve_bkg = plotobjs
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
roocurve(ax, tcurve_bkg, color=colours.red, linestyle=':',
label='Background')
roocurve(ax, tcurve_tot, color=colours.blue,
label='Total fit')
tgraphasymerrors(ax, tgraph, color=colours.black, label='MC data')
ax.set_xlim((frame.GetXaxis().GetXmin(), frame.GetXaxis().GetXmax()))
ax.set_ylim(top=1.2*ax.get_ylim()[1])
# Swap the legend entry order so the data is first
handles, labels = ax.get_legend_handles_labels()
ax.legend(handles[::-1], labels[::-1], loc='best')
set_axis_labels(ax, mode)
fig.savefig('output/{0}_BKGCAT_fit.pdf'.format(mode))
if __name__ == '__main__':
# background_categories('D0ToKpi')
# background_categories('DpToKpipi')
fits('D0ToKpi')
fits('DpToKpipi')
| mit |
mtmarsh2/vislab | vislab/tests/vw3.py | 4 | 6227 | import logging
import unittest
import pandas as pd
import numpy as np
import gzip
import os
import test_context
import vislab.predict
import vislab.vw3
class TestVW(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.temp_dirname = vislab.util.cleardirs(
test_context.temp_dirname + '/test_vw')
def test_write_data_in_vw_format_float(self):
feat_df = pd.DataFrame(
data=[
np.array([3.24, 5.666, 1., 0.0000001, 0.]),
np.array([1.00000003, 5, 2, 0.001, -0.000001]),
],
index=['loller', 'taco']
)
feat_name = 'best'
assert(len(feat_df.columns) > 1)
expected = """\
idloller |best 0:3.24 1:5.666 2:1.0
idtaco |best 0:1.0 1:5.0 2:2.0 3:0.001
"""
output_filename = self.temp_dirname + \
'/test_write_data_in_vw_format.txt'
try:
os.remove(output_filename)
except:
pass
vislab.vw3.write_data_in_vw_format(feat_df, feat_name, output_filename)
with open(output_filename) as f:
actual = f.read()
assert(expected == actual)
# Try writing to gzipped file
output_filename = self.temp_dirname + \
'/test_write_data_in_vw_format.txt.gz'
try:
os.remove(output_filename)
except:
pass
vislab.vw3.write_data_in_vw_format(feat_df, feat_name, output_filename)
with gzip.open(output_filename) as f:
actual = f.read()
assert(expected == actual)
def test_write_data_in_vw_format_single_column(self):
feat_df = pd.DataFrame(
data=[
(np.array([2.0003, 2]),),
(np.array([True, False, True, False, False, True]),)
],
index=['id', 'badman']
)
feat_name = 'best'
assert(len(feat_df.columns) == 1)
expected = """\
idid |best 0:2.0003 1:2.0
idbadman |best 0 2 5
"""
output_filename = self.temp_dirname + \
'/test_write_data_in_vw_format_single_column.txt'
try:
os.remove(output_filename)
except:
pass
vislab.vw3.write_data_in_vw_format(feat_df, feat_name, output_filename)
with open(output_filename) as f:
actual = f.read()
assert(expected == actual)
def test__cache_data(self):
# These test file were created from the 'classifier tests' notebook.
feat_filenames = [
test_context.support_dirname + '/simple/first.txt',
test_context.support_dirname + '/simple/second.txt.gz'
]
label_df_filename = test_context.support_dirname + \
'/simple/label_df.h5'
output_dirname = vislab.util.makedirs(
self.temp_dirname + '/cache_data')
cache_cmd, preview_cmd = vislab.vw3._cache_cmd(
label_df_filename, feat_filenames, output_dirname,
2, bit_precision=18, verbose=False, force=False)
vislab.util.run_through_bash_script(
[cache_cmd, preview_cmd], None, verbose=False)
assert(os.path.exists(output_dirname + '/cache.vw'))
expected = """\
-1 1.000000 0|first 0:0.907699 1:0.910662 |second 0:1.057998
-1 1.000000 1|first 0:-0.375222 1:2.900907 |second 0:0.831044
-1 1.000000 2|first 0:-0.276823 1:1.717314 |second 0:-0.345345
-1 1.000000 3|first 0:0.596906 1:1.522828 |second 0:-0.766781
-1 1.000000 4|first 0:0.540094 1:0.094393 |second 0:-0.919987
1 1.000000 5|first 0:-0.972403 1:2.213648 |second 0:-0.0831
-1 1.000000 6|first 0:0.098378 1:0.200471 |second 0:-0.9833
1 1.000000 7|first 0:-0.755463 1:2.802532 |second 0:-0.642245
1 1.000000 8|first 0:-0.326318 1:0.74197 |second 0:1.21393
1 1.000000 9|first 0:-2.115056 1:0.353851 |second 0:1.62912
"""
with open(output_dirname + '/cache_preview.txt') as f:
actual = f.read()
assert(expected == actual)
def test__get_feat_filenames(self):
feat_names = ['first', 'second']
feat_dirname = test_context.support_dirname + '/simple'
vislab.vw3._get_feat_filenames(feat_names, feat_dirname)
def test_vw_fit_simple(self):
label_df_filename = test_context.support_dirname + \
'/simple/label_df.h5'
label_df = pd.read_hdf(label_df_filename, 'df')
dataset = vislab.predict.get_binary_or_regression_dataset(
label_df, 'simple', 'label')
feat_dirname = test_context.support_dirname + '/simple'
vw = vislab.vw3.VW(self.temp_dirname + '/vw_simple')
feat_names = ['first']
pred_df, test_score, val_score, train_score = vw.fit_and_predict(
dataset, feat_names, feat_dirname)
print(feat_names, test_score, val_score, train_score)
#assert(test_score > 0.7 and test_score < 0.8)
feat_names = ['second']
pred_df, test_score, val_score, train_score = vw.fit_and_predict(
dataset, feat_names, feat_dirname)
print(feat_names, test_score, val_score, train_score)
#assert(test_score > 0.9)
feat_names = ['first', 'second']
pred_df, test_score, val_score, train_score = vw.fit_and_predict(
dataset, feat_names, feat_dirname)
print(feat_names, test_score, val_score, train_score)
#assert(test_score > 0.9)
def test_vw_fit_iris(self):
label_df_filename = test_context.support_dirname + \
'/iris/label_df.h5'
label_df = pd.read_hdf(label_df_filename, 'df')
dataset = vislab.predict.get_multiclass_dataset(
label_df, 'iris', 'labels', ['label_0', 'label_1', 'label_2'])
feat_dirname = test_context.support_dirname + '/iris'
vw = vislab.vw3.VW(self.temp_dirname + '/vw_iris', num_passes=[10, 50, 100])
feat_names = ['all']
pred_df, test_score, val_score, train_score = vw.fit_and_predict(
dataset, feat_names, feat_dirname)
print(feat_names, test_score, val_score, train_score)
assert(test_score > 0.8)
# TODO: really want > .9 accuracy!
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()
| bsd-2-clause |
mfherbst/spack | var/spack/repos/builtin/packages/py-memory-profiler/package.py | 5 | 1681 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyMemoryProfiler(PythonPackage):
"""A module for monitoring memory usage of a python program"""
homepage = "https://github.com/fabianp/memory_profiler"
url = "https://pypi.io/packages/source/m/memory_profiler/memory_profiler-0.47.tar.gz"
version('0.47', 'ed340aaaa0c7118f2a4c5b4edec6da1e')
depends_on('py-setuptools', type='build')
depends_on('py-psutil', type=('build', 'run'))
| lgpl-2.1 |
yuchangfu/pythonfun | flaskenv/Lib/encodings/iso8859_3.py | 593 | 13345 | """ Python Character Mapping Codec iso8859_3 generated from 'MAPPINGS/ISO8859/8859-3.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-3',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u0126' # 0xA1 -> LATIN CAPITAL LETTER H WITH STROKE
u'\u02d8' # 0xA2 -> BREVE
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\ufffe'
u'\u0124' # 0xA6 -> LATIN CAPITAL LETTER H WITH CIRCUMFLEX
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xa8' # 0xA8 -> DIAERESIS
u'\u0130' # 0xA9 -> LATIN CAPITAL LETTER I WITH DOT ABOVE
u'\u015e' # 0xAA -> LATIN CAPITAL LETTER S WITH CEDILLA
u'\u011e' # 0xAB -> LATIN CAPITAL LETTER G WITH BREVE
u'\u0134' # 0xAC -> LATIN CAPITAL LETTER J WITH CIRCUMFLEX
u'\xad' # 0xAD -> SOFT HYPHEN
u'\ufffe'
u'\u017b' # 0xAF -> LATIN CAPITAL LETTER Z WITH DOT ABOVE
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\u0127' # 0xB1 -> LATIN SMALL LETTER H WITH STROKE
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\xb4' # 0xB4 -> ACUTE ACCENT
u'\xb5' # 0xB5 -> MICRO SIGN
u'\u0125' # 0xB6 -> LATIN SMALL LETTER H WITH CIRCUMFLEX
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\xb8' # 0xB8 -> CEDILLA
u'\u0131' # 0xB9 -> LATIN SMALL LETTER DOTLESS I
u'\u015f' # 0xBA -> LATIN SMALL LETTER S WITH CEDILLA
u'\u011f' # 0xBB -> LATIN SMALL LETTER G WITH BREVE
u'\u0135' # 0xBC -> LATIN SMALL LETTER J WITH CIRCUMFLEX
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\ufffe'
u'\u017c' # 0xBF -> LATIN SMALL LETTER Z WITH DOT ABOVE
u'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\ufffe'
u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\u010a' # 0xC5 -> LATIN CAPITAL LETTER C WITH DOT ABOVE
u'\u0108' # 0xC6 -> LATIN CAPITAL LETTER C WITH CIRCUMFLEX
u'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xcc' # 0xCC -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\ufffe'
u'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xd2' # 0xD2 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\u0120' # 0xD5 -> LATIN CAPITAL LETTER G WITH DOT ABOVE
u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd7' # 0xD7 -> MULTIPLICATION SIGN
u'\u011c' # 0xD8 -> LATIN CAPITAL LETTER G WITH CIRCUMFLEX
u'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\u016c' # 0xDD -> LATIN CAPITAL LETTER U WITH BREVE
u'\u015c' # 0xDE -> LATIN CAPITAL LETTER S WITH CIRCUMFLEX
u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
u'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\ufffe'
u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\u010b' # 0xE5 -> LATIN SMALL LETTER C WITH DOT ABOVE
u'\u0109' # 0xE6 -> LATIN SMALL LETTER C WITH CIRCUMFLEX
u'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xec' # 0xEC -> LATIN SMALL LETTER I WITH GRAVE
u'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
u'\ufffe'
u'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE
u'\xf2' # 0xF2 -> LATIN SMALL LETTER O WITH GRAVE
u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\u0121' # 0xF5 -> LATIN SMALL LETTER G WITH DOT ABOVE
u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf7' # 0xF7 -> DIVISION SIGN
u'\u011d' # 0xF8 -> LATIN SMALL LETTER G WITH CIRCUMFLEX
u'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
u'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
u'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u016d' # 0xFD -> LATIN SMALL LETTER U WITH BREVE
u'\u015d' # 0xFE -> LATIN SMALL LETTER S WITH CIRCUMFLEX
u'\u02d9' # 0xFF -> DOT ABOVE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| gpl-3.0 |
jas14/khmer | scripts/filter-stoptags.py | 2 | 2982 | #! /usr/bin/env python
#
# This file is part of khmer, https://github.com/dib-lab/khmer/, and is
# Copyright (C) Michigan State University, 2009-2015. It is licensed under
# the three-clause BSD license; see LICENSE.
# Contact: khmer-project@idyll.org
#
# pylint: disable=invalid-name,missing-docstring
"""
Sequence trimming using stoptags.
Trim sequences at k-mers in the given stoptags file. Output sequences
will be placed in 'infile.stopfilt'.
% python scripts/filter-stoptags.py <stoptags> <data1> [ <data2> <...> ]
Use '-h' for parameter help.
"""
from __future__ import print_function
import os
import khmer
import argparse
import textwrap
import sys
from khmer.thread_utils import ThreadedSequenceProcessor, verbose_loader
from khmer.kfile import check_input_files, check_space
from khmer.khmer_args import info
# @CTB K should be loaded from file...
DEFAULT_K = 32
def get_parser():
epilog = """
Load stoptags in from the given .stoptags file and use them to trim
or remove the sequences in <file1-N>. Trimmed sequences will be placed in
<fileN>.stopfilt.
"""
parser = argparse.ArgumentParser(
description="Trim sequences at stoptags.",
epilog=textwrap.dedent(epilog),
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--ksize', '-k', default=DEFAULT_K, type=int,
help='k-mer size')
parser.add_argument('stoptags_file', metavar='input_stoptags_filename')
parser.add_argument('input_filenames', metavar='input_sequence_filename',
nargs='+')
parser.add_argument('--version', action='version', version='%(prog)s ' +
khmer.__version__)
parser.add_argument('-f', '--force', default=False, action='store_true',
help='Overwrite output file if it exists')
return parser
def main():
info('filter-stoptags.py', ['graph'])
args = get_parser().parse_args()
stoptags = args.stoptags_file
infiles = args.input_filenames
for _ in infiles:
check_input_files(_, args.force)
check_space(infiles, args.force)
print('loading stop tags, with K', args.ksize, file=sys.stderr)
htable = khmer.Hashbits(args.ksize, 1, 1)
htable.load_stop_tags(stoptags)
def process_fn(record):
name = record['name']
seq = record['sequence']
if 'N' in seq:
return None, None
trim_seq, trim_at = htable.trim_on_stoptags(seq)
if trim_at >= args.ksize:
return name, trim_seq
return None, None
# the filtering loop
for infile in infiles:
print('filtering', infile, file=sys.stderr)
outfile = os.path.basename(infile) + '.stopfilt'
outfp = open(outfile, 'w')
tsp = ThreadedSequenceProcessor(process_fn)
tsp.start(verbose_loader(infile), outfp)
print('output in', outfile, file=sys.stderr)
if __name__ == '__main__':
main()
| bsd-3-clause |
gregdek/ansible | test/integration/targets/inventory_plugin_config/test_inventory.py | 92 | 1318 | from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
name: test_inventory
plugin_type: inventory
authors:
- Pierre-Louis Bonicoli (@pilou-)
short_description: test inventory
description:
- test inventory (fetch parameters using config API)
options:
departments:
description: test parameter
type: list
default:
- seine-et-marne
- haute-garonne
required: False
'''
EXAMPLES = '''
# Example command line: ansible-inventory --list -i test_inventory.yml
plugin: test_inventory
departments:
- paris
'''
from ansible.plugins.inventory import BaseInventoryPlugin
class InventoryModule(BaseInventoryPlugin):
NAME = 'test_inventory'
def verify_file(self, path):
return True
def parse(self, inventory, loader, path, cache=True):
super(InventoryModule, self).parse(inventory, loader, path)
self._read_config_data(path=path)
departments = self.get_option('departments')
group = 'test_group'
host = 'test_host'
self.inventory.add_group(group)
self.inventory.add_host(group=group, host=host)
self.inventory.set_variable(host, 'departments', departments)
| gpl-3.0 |
LyonsLab/coge | bin/last_wrapper/Bio/SeqIO/_convert.py | 3 | 15717 | # Copyright 2009 by Peter Cock. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Optimised sequence conversion code (PRIVATE).
You are not expected to access this module, or any of its code, directly. This
is all handled internally by the Bio.SeqIO.convert(...) function which is the
public interface for this.
The idea here is rather while doing this will work:
from Bio import SeqIO
records = SeqIO.parse(in_handle, in_format)
count = SeqIO.write(records, out_handle, out_format)
it is shorter to write:
from Bio import SeqIO
count = SeqIO.convert(in_handle, in_format, out_handle, out_format)
Also, the convert function can take a number of special case optimisations. This
means that using Bio.SeqIO.convert() may be faster, as well as more convenient.
All these file format specific optimisations are handled by this (private) module.
"""
from Bio import SeqIO
#NOTE - Lots of lazy imports further on...
def _genbank_convert_fasta(in_handle, out_handle, alphabet=None):
"""Fast GenBank to FASTA (PRIVATE)."""
#We don't need to parse the features...
from Bio.GenBank.Scanner import GenBankScanner
records = GenBankScanner().parse_records(in_handle, do_features=False)
#For FASTA output we can ignore the alphabet too
return SeqIO.write(records, out_handle, "fasta")
def _embl_convert_fasta(in_handle, out_handle, alphabet=None):
"""Fast EMBL to FASTA (PRIVATE)."""
#We don't need to parse the features...
from Bio.GenBank.Scanner import EmblScanner
records = EmblScanner().parse_records(in_handle, do_features=False)
#For FASTA output we can ignore the alphabet too
return SeqIO.write(records, out_handle, "fasta")
def _fastq_generic(in_handle, out_handle, mapping):
"""FASTQ helper function where can't have data loss by truncation (PRIVATE)."""
from Bio.SeqIO.QualityIO import FastqGeneralIterator
#For real speed, don't even make SeqRecord and Seq objects!
count = 0
null = chr(0)
for title, seq, old_qual in FastqGeneralIterator(in_handle):
count += 1
#map the qual...
qual = old_qual.translate(mapping)
if null in qual:
raise ValueError("Invalid character in quality string")
out_handle.write("@%s\n%s\n+\n%s\n" % (title, seq, qual))
return count
def _fastq_generic2(in_handle, out_handle, mapping, truncate_char, truncate_msg):
"""FASTQ helper function where there could be data loss by truncation (PRIVATE)."""
from Bio.SeqIO.QualityIO import FastqGeneralIterator
#For real speed, don't even make SeqRecord and Seq objects!
count = 0
null = chr(0)
for title, seq, old_qual in FastqGeneralIterator(in_handle):
count += 1
#map the qual...
qual = old_qual.translate(mapping)
if null in qual:
raise ValueError("Invalid character in quality string")
if truncate_char in qual:
qual = qual.replace(truncate_char, chr(126))
import warnings
warnings.warn(truncate_msg)
out_handle.write("@%s\n%s\n+\n%s\n" % (title, seq, qual))
return count
def _fastq_sanger_convert_fastq_sanger(in_handle, out_handle, alphabet=None):
"""Fast Sanger FASTQ to Sanger FASTQ conversion (PRIVATE).
Useful for removing line wrapping and the redundant second identifier
on the plus lines. Will check also check the quality string is valid.
Avoids creating SeqRecord and Seq objects in order to speed up this
conversion.
"""
#Map unexpected chars to null
mapping = "".join([chr(0) for ascii in range(0, 33)] \
+[chr(ascii) for ascii in range(33, 127)] \
+[chr(0) for ascii in range(127, 256)])
assert len(mapping)==256
return _fastq_generic(in_handle, out_handle, mapping)
def _fastq_solexa_convert_fastq_solexa(in_handle, out_handle, alphabet=None):
"""Fast Solexa FASTQ to Solexa FASTQ conversion (PRIVATE).
Useful for removing line wrapping and the redundant second identifier
on the plus lines. Will check also check the quality string is valid.
Avoids creating SeqRecord and Seq objects in order to speed up this
conversion.
"""
#Map unexpected chars to null
mapping = "".join([chr(0) for ascii in range(0, 59)] \
+[chr(ascii) for ascii in range(59, 127)] \
+[chr(0) for ascii in range(127, 256)])
assert len(mapping)==256
return _fastq_generic(in_handle, out_handle, mapping)
def _fastq_illumina_convert_fastq_illumina(in_handle, out_handle, alphabet=None):
"""Fast Illumina 1.3+ FASTQ to Illumina 1.3+ FASTQ conversion (PRIVATE).
Useful for removing line wrapping and the redundant second identifier
on the plus lines. Will check also check the quality string is valid.
Avoids creating SeqRecord and Seq objects in order to speed up this
conversion.
"""
#Map unexpected chars to null
mapping = "".join([chr(0) for ascii in range(0, 64)] \
+[chr(ascii) for ascii in range(64,127)] \
+[chr(0) for ascii in range(127,256)])
assert len(mapping)==256
return _fastq_generic(in_handle, out_handle, mapping)
def _fastq_illumina_convert_fastq_sanger(in_handle, out_handle, alphabet=None):
"""Fast Illumina 1.3+ FASTQ to Sanger FASTQ conversion (PRIVATE).
Avoids creating SeqRecord and Seq objects in order to speed up this
conversion.
"""
#Map unexpected chars to null
mapping = "".join([chr(0) for ascii in range(0, 64)] \
+[chr(33+q) for q in range(0, 62+1)] \
+[chr(0) for ascii in range(127, 256)])
assert len(mapping)==256
return _fastq_generic(in_handle, out_handle, mapping)
def _fastq_sanger_convert_fastq_illumina(in_handle, out_handle, alphabet=None):
"""Fast Sanger FASTQ to Illumina 1.3+ FASTQ conversion (PRIVATE).
Avoids creating SeqRecord and Seq objects in order to speed up this
conversion. Will issue a warning if the scores had to be truncated at 62
(maximum possible in the Illumina 1.3+ FASTQ format)
"""
#Map unexpected chars to null
trunc_char = chr(1)
mapping = "".join([chr(0) for ascii in range(0, 33)] \
+[chr(64+q) for q in range(0, 62+1) ] \
+[trunc_char for ascii in range(96,127)] \
+[chr(0) for ascii in range(127, 256)])
assert len(mapping)==256
return _fastq_generic2(in_handle, out_handle, mapping, trunc_char,
"Data loss - max PHRED quality 62 in Illumina 1.3+ FASTQ")
def _fastq_solexa_convert_fastq_sanger(in_handle, out_handle, alphabet=None):
"""Fast Solexa FASTQ to Sanger FASTQ conversion (PRIVATE).
Avoids creating SeqRecord and Seq objects in order to speed up this
conversion.
"""
#Map unexpected chars to null
from Bio.SeqIO.QualityIO import phred_quality_from_solexa
mapping = "".join([chr(0) for ascii in range(0, 59)] \
+[chr(33+int(round(phred_quality_from_solexa(q)))) \
for q in range(-5, 62+1)]\
+[chr(0) for ascii in range(127, 256)])
assert len(mapping)==256
return _fastq_generic(in_handle, out_handle, mapping)
def _fastq_sanger_convert_fastq_solexa(in_handle, out_handle, alphabet=None):
"""Fast Sanger FASTQ to Solexa FASTQ conversion (PRIVATE).
Avoids creating SeqRecord and Seq objects in order to speed up this
conversion. Will issue a warning if the scores had to be truncated at 62
(maximum possible in the Solexa FASTQ format)
"""
#Map unexpected chars to null
from Bio.SeqIO.QualityIO import solexa_quality_from_phred
trunc_char = chr(1)
mapping = "".join([chr(0) for ascii in range(0, 33)] \
+[chr(64+int(round(solexa_quality_from_phred(q)))) \
for q in range(0, 62+1)] \
+[trunc_char for ascii in range(96, 127)] \
+[chr(0) for ascii in range(127, 256)])
assert len(mapping)==256
return _fastq_generic2(in_handle, out_handle, mapping, trunc_char,
"Data loss - max Solexa quality 62 in Solexa FASTQ")
def _fastq_solexa_convert_fastq_illumina(in_handle, out_handle, alphabet=None):
"""Fast Solexa FASTQ to Illumina 1.3+ FASTQ conversion (PRIVATE).
Avoids creating SeqRecord and Seq objects in order to speed up this
conversion.
"""
#Map unexpected chars to null
from Bio.SeqIO.QualityIO import phred_quality_from_solexa
mapping = "".join([chr(0) for ascii in range(0, 59)] \
+[chr(64+int(round(phred_quality_from_solexa(q)))) \
for q in range(-5, 62+1)]\
+[chr(0) for ascii in range(127, 256)])
assert len(mapping)==256
return _fastq_generic(in_handle, out_handle, mapping)
def _fastq_illumina_convert_fastq_solexa(in_handle, out_handle, alphabet=None):
"""Fast Illumina 1.3+ FASTQ to Solexa FASTQ conversion (PRIVATE).
Avoids creating SeqRecord and Seq objects in order to speed up this
conversion.
"""
#Map unexpected chars to null
from Bio.SeqIO.QualityIO import solexa_quality_from_phred
trunc_char = chr(1)
mapping = "".join([chr(0) for ascii in range(0, 64)] \
+[chr(64+int(round(solexa_quality_from_phred(q)))) \
for q in range(0, 62+1)] \
+[chr(0) for ascii in range(127, 256)])
assert len(mapping)==256
return _fastq_generic(in_handle, out_handle, mapping)
def _fastq_convert_fasta(in_handle, out_handle, alphabet=None):
"""Fast FASTQ to FASTA conversion (PRIVATE).
Avoids dealing with the FASTQ quality encoding, and creating SeqRecord and
Seq objects in order to speed up this conversion.
NOTE - This does NOT check the characters used in the FASTQ quality string
are valid!
"""
from Bio.SeqIO.QualityIO import FastqGeneralIterator
#For real speed, don't even make SeqRecord and Seq objects!
count = 0
for title, seq, qual in FastqGeneralIterator(in_handle):
count += 1
out_handle.write(">%s\n" % title)
#Do line wrapping
for i in range(0, len(seq), 60):
out_handle.write(seq[i:i+60] + "\n")
return count
def _fastq_convert_tab(in_handle, out_handle, alphabet=None):
"""Fast FASTQ to simple tabbed conversion (PRIVATE).
Avoids dealing with the FASTQ quality encoding, and creating SeqRecord and
Seq objects in order to speed up this conversion.
NOTE - This does NOT check the characters used in the FASTQ quality string
are valid!
"""
from Bio.SeqIO.QualityIO import FastqGeneralIterator
#For real speed, don't even make SeqRecord and Seq objects!
count = 0
for title, seq, qual in FastqGeneralIterator(in_handle):
count += 1
out_handle.write("%s\t%s\n" % (title.split(None, 1)[0], seq))
return count
def _fastq_convert_qual(in_handle, out_handle, mapping):
"""FASTQ helper function for QUAL output (PRIVATE).
Mapping should be a dictionary mapping expected ASCII characters from the
FASTQ quality string to PHRED quality scores (as strings).
"""
from Bio.SeqIO.QualityIO import FastqGeneralIterator
#For real speed, don't even make SeqRecord and Seq objects!
count = 0
for title, seq, qual in FastqGeneralIterator(in_handle):
count += 1
out_handle.write(">%s\n" % title)
#map the qual...
try:
qualities_strs = [mapping[ascii] for ascii in qual]
except KeyError:
raise ValueError("Invalid character in quality string")
data = " ".join(qualities_strs)
while True:
if len(data) <= 60:
out_handle.write(data + "\n")
break
else:
#By construction there must be spaces in the first 60 chars
#(unless we have 60 digit or higher quality scores!)
i = data.rfind(" ", 0, 60)
out_handle.write(data[:i] + "\n")
data = data[i+1:]
return count
def _fastq_sanger_convert_qual(in_handle, out_handle, alphabet=None):
"""Fast Sanger FASTQ to QUAL conversion (PRIVATE)."""
mapping = dict((chr(q+33), str(q)) for q in range(0,93+1))
return _fastq_convert_qual(in_handle, out_handle, mapping)
def _fastq_solexa_convert_qual(in_handle, out_handle, alphabet=None):
"""Fast Solexa FASTQ to QUAL conversion (PRIVATE)."""
from Bio.SeqIO.QualityIO import phred_quality_from_solexa
mapping = dict((chr(q+64), str(int(round(phred_quality_from_solexa(q))))) \
for q in range(-5,62+1))
return _fastq_convert_qual(in_handle, out_handle, mapping)
def _fastq_illumina_convert_qual(in_handle, out_handle, alphabet=None):
"""Fast Illumina 1.3+ FASTQ to QUAL conversion (PRIVATE)."""
mapping = dict((chr(q+64), str(q)) for q in range(0,62+1))
return _fastq_convert_qual(in_handle, out_handle, mapping)
#TODO? - Handling aliases explicitly would let us shorten this list:
_converter = {
("genbank", "fasta") : _genbank_convert_fasta,
("gb", "fasta") : _genbank_convert_fasta,
("embl", "fasta") : _embl_convert_fasta,
("fastq", "fasta") : _fastq_convert_fasta,
("fastq-sanger", "fasta") : _fastq_convert_fasta,
("fastq-solexa", "fasta") : _fastq_convert_fasta,
("fastq-illumina", "fasta") : _fastq_convert_fasta,
("fastq", "tab") : _fastq_convert_tab,
("fastq-sanger", "tab") : _fastq_convert_tab,
("fastq-solexa", "tab") : _fastq_convert_tab,
("fastq-illumina", "tab") : _fastq_convert_tab,
("fastq", "fastq") : _fastq_sanger_convert_fastq_sanger,
("fastq-sanger", "fastq") : _fastq_sanger_convert_fastq_sanger,
("fastq-solexa", "fastq") : _fastq_solexa_convert_fastq_sanger,
("fastq-illumina", "fastq") : _fastq_illumina_convert_fastq_sanger,
("fastq", "fastq-sanger") : _fastq_sanger_convert_fastq_sanger,
("fastq-sanger", "fastq-sanger") : _fastq_sanger_convert_fastq_sanger,
("fastq-solexa", "fastq-sanger") : _fastq_solexa_convert_fastq_sanger,
("fastq-illumina", "fastq-sanger") : _fastq_illumina_convert_fastq_sanger,
("fastq", "fastq-solexa") : _fastq_sanger_convert_fastq_solexa,
("fastq-sanger", "fastq-solexa") : _fastq_sanger_convert_fastq_solexa,
("fastq-solexa", "fastq-solexa") : _fastq_solexa_convert_fastq_solexa,
("fastq-illumina", "fastq-solexa") : _fastq_illumina_convert_fastq_solexa,
("fastq", "fastq-illumina") : _fastq_sanger_convert_fastq_illumina,
("fastq-sanger", "fastq-illumina") : _fastq_sanger_convert_fastq_illumina,
("fastq-solexa", "fastq-illumina") : _fastq_solexa_convert_fastq_illumina,
("fastq-illumina", "fastq-illumina") : _fastq_illumina_convert_fastq_illumina,
("fastq", "qual") : _fastq_sanger_convert_qual,
("fastq-sanger", "qual") : _fastq_sanger_convert_qual,
("fastq-solexa", "qual") : _fastq_solexa_convert_qual,
("fastq-illumina", "qual") : _fastq_illumina_convert_qual,
}
def _handle_convert(in_handle, in_format, out_handle, out_format, alphabet=None):
"""SeqIO conversion function (PRIVATE)."""
try:
f = _converter[(in_format, out_format)]
except KeyError:
f = None
if f:
return f(in_handle, out_handle, alphabet)
else:
records = SeqIO.parse(in_handle, in_format, alphabet)
return SeqIO.write(records, out_handle, out_format)
| bsd-2-clause |
matthiasdiener/spack | var/spack/repos/builtin/packages/gapfiller/package.py | 5 | 2227 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import os
class Gapfiller(Package):
"""GapFiller is a stand-alone program for closing gaps within
pre-assembled scaffolds.
Note: A manual download is required for GapFiller.
Spack will search your current directory for the download file.
Alternatively, add this file to a mirror so that Spack can find it.
For instructions on how to set up a mirror, see
http://spack.readthedocs.io/en/latest/mirrors.html"""
homepage = "https://www.baseclear.com/genomics/bioinformatics/basetools/gapfiller"
version('1.10', '54d5e2ada131a1305a66e41c0d380382')
def url_for_version(self, version):
return "file://{0}/39GapFiller_v{1}_linux-x86_64.tar.gz".format(
os.getcwd(), version.dashed)
depends_on('perl', type=('build', 'run'))
def install(self, spec, prefix):
install_tree('bowtie', prefix.bowtie)
install_tree('bwa', prefix.bwa)
install('GapFiller.pl', prefix)
| lgpl-2.1 |
cloudbrain/cloudbrain | src/cloudbrain/modules/sources/beat.py | 3 | 1899 | import time
import logging
import threading
from cloudbrain.modules.interface import ModuleInterface
_NANOSECONDS = 1000000
_LOGGER = logging.getLogger(__name__)
"""
Publish constant values at regular intervals on each channel.
Note that this module could be easily turned into a transformer that turns
bpm in beats by subscribing to the BPMTransformer output.
"""
def _publish(publisher, metric_name, data_to_send, bpm):
while 1:
publisher.publish(metric_name, data_to_send)
beat_sleep_interval_in_s = 60.0 / bpm
time.sleep(beat_sleep_interval_in_s)
class BeatSource(ModuleInterface):
def __init__(self, subscribers, publishers, beat_amplitude, bpm):
super(BeatSource, self).__init__(subscribers, publishers)
_LOGGER.debug("Subscribers: %s" % self.subscribers)
_LOGGER.debug("Publishers: %s" % self.publishers)
self.beat_amplitude = beat_amplitude
self.bpm = bpm
self.threads = []
def start(self):
for publisher in self.publishers:
for pub_metric_buffer in publisher.metric_buffers.values():
metric_name = pub_metric_buffer.name
num_channels = pub_metric_buffer.num_channels
data_to_send = {'timestamp': int(time.time() * _NANOSECONDS)}
for i in range(num_channels):
channel_name = 'channel_%s' % i
data_to_send[channel_name] = self.beat_amplitude
t = threading.Thread(target=_publish, args=(publisher,
metric_name,
data_to_send,
self.bpm,))
self.threads.append(t)
t.start()
def stop(self):
for t in self.threads:
t.join()
| agpl-3.0 |
eudoxos/woodem | scripts/test-OLD/WireMatPM/net-2part-strain.py | 3 | 3628 | # -*- coding: utf-8 -*-
# encoding: utf-8
from woo import utils, ymport, qt
#### logging
from woo import log
log.setLevel('Law2_ScGeom_WirePhys_WirePM',log.TRACE) # must compile with debug option to get logs
#log.setLevel('Law2_ScGeom_WirePhys_WirePM',log.DEBUG)
#log.setLevel('',log.WARN)
## definition of some colors for colored text output in terminal
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
BLACK = '\033[0m'
#### short description of script
print BLUE+'Simple test for two particles to test contact law with '+RED+'UniaxialStrainer'+BLUE+'.'+BLACK
#### define parameters for the net
# mesh opening size
mos = 80./1000.
a = mos/sqrt(3)
# wire diameter
d = 2.7/1000.
# particle radius
radius = d*5.
# define piecewise lineare stress-strain curve
strainStressValues=[(0.0019230769,2.5e8),(0.0192,3.2195e8),(0.05,3.8292e8),(0.15,5.1219e8),(0.25,5.5854e8),(0.3,5.6585e8),(0.35,5.6585e8)]
# elastic material properties
particleVolume = 4./3.*pow(radius,3)*pi
particleMass = 3.9/1000.
density = particleMass/particleVolume
young = strainStressValues[0][1] / strainStressValues[0][0]
poisson = 0.3
#### material definition
netMat = O.materials.append(WireMat(young=young,poisson=poisson,density=density,isDoubleTwist=False,diameter=d,strainStressValues=strainStressValues,lambdaEps=0.4,lambdak=0.21))
#### create boddies, default: dynamic=True
O.bodies.append( utils.sphere([0,0,0], radius, wire=False, color=[1,0,0], highlight=False, material=netMat) )
O.bodies.append( utils.sphere([0,a,0], radius, wire=False, color=[0,1,0], highlight=False, material=netMat) )
FixedSphere=O.bodies[0]
MovingSphere=O.bodies[1]
FixedSphere.dynamic=True
MovingSphere.dynamic=True
#### initialize values for UniaxialStrainer
bb = utils.uniaxialTestFeatures(axis=1)
negIds,posIds,axis,crossSectionArea=bb['negIds'],bb['posIds'],bb['axis'],bb['area']
strainRateTension = 1./a
setSpeeds = True
#### define simulation to create link
interactionRadius=2.
O.engines = [
ForceResetter(),
InsertionSortCollider( [Bo1_Sphere_Aabb(aabbEnlargeFactor=interactionRadius,label='aabb')] ),
InteractionLoop(
[Ig2_Sphere_Sphere_ScGeom(interactionDetectionFactor=interactionRadius,label='Ig2ssGeom')],
[Ip2_WireMat_WireMat_WirePhys(linkThresholdIteration=1,label='interactionPhys')],
[Law2_ScGeom_WirePhys_WirePM(linkThresholdIteration=1,label='interactionLaw')]
),
NewtonIntegrator(damping=0.)
]
#### create link (no time step needed since loading is involved in this step)
O.step() # create cohesive link (cohesiveTresholdIteration=1)
#### initializes now the interaction detection factor
aabb.aabbEnlargeFactor=-1.
Ig2ssGeom.interactionDetectionFactor=-1.
## time step definition
O.dt = 1e-5
## critical time step proposed by Bertrand
#O.dt = 0.2*sqrt(particleMass/(2.*O.interactions[0,1].phys.kn))
#### plot some results
from math import *
from woo import plot
plot.plots={'un':('Fn',)}
plot.plot(noShow=False, subPlots=False)
def addPlotData():
try:
i=O.interactions[FixedSphere.id,MovingSphere.id]
plot.addData( Fn=i.phys.normalForce.norm(), un=(O.bodies[1].state.pos[1]-O.bodies[0].state.pos[1])-a )
#plot.saveGnuplot('net-2part-strain')
except:
print "No interaction!"
O.pause()
#### define simulation
O.engines += [UniaxialStrainer(strainRate=strainRateTension,axis=axis,asymmetry=1,posIds=posIds,negIds=negIds,crossSectionArea=crossSectionArea,blockDisplacements=True,blockRotations=True,setSpeeds=setSpeeds,label='strainer')] + [PyRunner(initRun=True,iterPeriod=1,command='addPlotData()')]
#### to see it
v=qt.Controller()
v=qt.View()
rr=qt.Renderer()
rr.intrAllWire=True
| gpl-2.0 |
jhawkesworth/ansible | test/units/module_utils/facts/hardware/test_linux.py | 93 | 7748 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from units.compat import unittest
from units.compat.mock import Mock, patch
from ansible.module_utils.facts import timeout
from ansible.module_utils.facts.hardware import linux
from . linux_data import LSBLK_OUTPUT, LSBLK_OUTPUT_2, LSBLK_UUIDS, MTAB, MTAB_ENTRIES, BIND_MOUNTS, STATVFS_INFO, UDEVADM_UUID, UDEVADM_OUTPUT
with open(os.path.join(os.path.dirname(__file__), '../fixtures/findmount_output.txt')) as f:
FINDMNT_OUTPUT = f.read()
GET_MOUNT_SIZE = {}
def mock_get_mount_size(mountpoint):
return STATVFS_INFO.get(mountpoint, {})
class TestFactsLinuxHardwareGetMountFacts(unittest.TestCase):
# FIXME: mock.patch instead
def setUp(self):
timeout.GATHER_TIMEOUT = 10
def tearDown(self):
timeout.GATHER_TIMEOUT = None
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._mtab_entries', return_value=MTAB_ENTRIES)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._find_bind_mounts', return_value=BIND_MOUNTS)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._lsblk_uuid', return_value=LSBLK_UUIDS)
@patch('ansible.module_utils.facts.hardware.linux.get_mount_size', side_effect=mock_get_mount_size)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._udevadm_uuid', return_value=UDEVADM_UUID)
def test_get_mount_facts(self,
mock_get_mount_size,
mock_lsblk_uuid,
mock_find_bind_mounts,
mock_mtab_entries,
mock_udevadm_uuid):
module = Mock()
# Returns a LinuxHardware-ish
lh = linux.LinuxHardware(module=module, load_on_init=False)
# Nothing returned, just self.facts modified as a side effect
mount_facts = lh.get_mount_facts()
self.assertIsInstance(mount_facts, dict)
self.assertIn('mounts', mount_facts)
self.assertIsInstance(mount_facts['mounts'], list)
self.assertIsInstance(mount_facts['mounts'][0], dict)
home_expected = {'block_available': 1001578731,
'block_size': 4096,
'block_total': 105871006,
'block_used': 5713133,
'device': '/dev/mapper/fedora_dhcp129--186-home',
'fstype': 'ext4',
'inode_available': 26860880,
'inode_total': 26902528,
'inode_used': 41648,
'mount': '/home',
'options': 'rw,seclabel,relatime,data=ordered',
'size_available': 410246647808,
'size_total': 433647640576,
'uuid': 'N/A'}
home_info = [x for x in mount_facts['mounts'] if x['mount'] == '/home'][0]
self.maxDiff = 4096
self.assertDictEqual(home_info, home_expected)
@patch('ansible.module_utils.facts.hardware.linux.get_file_content', return_value=MTAB)
def test_get_mtab_entries(self, mock_get_file_content):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
mtab_entries = lh._mtab_entries()
self.assertIsInstance(mtab_entries, list)
self.assertIsInstance(mtab_entries[0], list)
self.assertEqual(len(mtab_entries), 38)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_findmnt', return_value=(0, FINDMNT_OUTPUT, ''))
def test_find_bind_mounts(self, mock_run_findmnt):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
bind_mounts = lh._find_bind_mounts()
# If bind_mounts becomes another seq type, feel free to change
self.assertIsInstance(bind_mounts, set)
self.assertEqual(len(bind_mounts), 1)
self.assertIn('/not/a/real/bind_mount', bind_mounts)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_findmnt', return_value=(37, '', ''))
def test_find_bind_mounts_non_zero(self, mock_run_findmnt):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
bind_mounts = lh._find_bind_mounts()
self.assertIsInstance(bind_mounts, set)
self.assertEqual(len(bind_mounts), 0)
def test_find_bind_mounts_no_findmnts(self):
module = Mock()
module.get_bin_path = Mock(return_value=None)
lh = linux.LinuxHardware(module=module, load_on_init=False)
bind_mounts = lh._find_bind_mounts()
self.assertIsInstance(bind_mounts, set)
self.assertEqual(len(bind_mounts), 0)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_lsblk', return_value=(0, LSBLK_OUTPUT, ''))
def test_lsblk_uuid(self, mock_run_lsblk):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertIn(b'/dev/loop9', lsblk_uuids)
self.assertIn(b'/dev/sda1', lsblk_uuids)
self.assertEqual(lsblk_uuids[b'/dev/sda1'], b'32caaec3-ef40-4691-a3b6-438c3f9bc1c0')
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_lsblk', return_value=(37, LSBLK_OUTPUT, ''))
def test_lsblk_uuid_non_zero(self, mock_run_lsblk):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertEqual(len(lsblk_uuids), 0)
def test_lsblk_uuid_no_lsblk(self):
module = Mock()
module.get_bin_path = Mock(return_value=None)
lh = linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertEqual(len(lsblk_uuids), 0)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_lsblk', return_value=(0, LSBLK_OUTPUT_2, ''))
def test_lsblk_uuid_dev_with_space_in_name(self, mock_run_lsblk):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertIn(b'/dev/loop0', lsblk_uuids)
self.assertIn(b'/dev/sda1', lsblk_uuids)
self.assertEqual(lsblk_uuids[b'/dev/mapper/an-example-mapper with a space in the name'], b'84639acb-013f-4d2f-9392-526a572b4373')
self.assertEqual(lsblk_uuids[b'/dev/sda1'], b'32caaec3-ef40-4691-a3b6-438c3f9bc1c0')
def test_udevadm_uuid(self):
module = Mock()
module.run_command = Mock(return_value=(0, UDEVADM_OUTPUT, '')) # (rc, out, err)
lh = linux.LinuxHardware(module=module, load_on_init=False)
udevadm_uuid = lh._udevadm_uuid('mock_device')
self.assertEqual(udevadm_uuid, '57b1a3e7-9019-4747-9809-7ec52bba9179')
| gpl-3.0 |
scrollback/kuma | kuma/wiki/migrations/0015_add_topical_parent.py | 5 | 13477 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Document.parent_topic'
db.add_column('wiki_document', 'parent_topic', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='children', null=True, to=orm['wiki.Document']), keep_default=False)
def backwards(self, orm):
# Deleting field 'Document.parent_topic'
db.delete_column('wiki_document', 'parent_topic_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'tidings.watch': {
'Meta': {'object_name': 'Watch'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'db_index': 'True', 'max_length': '75', 'null': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.CharField', [], {'max_length': '30', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'wiki.document': {
'Meta': {'unique_together': "(('parent', 'locale'), ('slug', 'locale'))", 'object_name': 'Document'},
'category': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'current_revision': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'current_for+'", 'null': 'True', 'to': "orm['wiki.Revision']"}),
'html': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_localizable': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'is_template': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'locale': ('kuma.core.fields.LocaleField', [], {'default': "'en-US'", 'max_length': '7', 'db_index': 'True'}),
'mindtouch_page_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_index': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'translations'", 'null': 'True', 'to': "orm['wiki.Document']"}),
'parent_topic': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['wiki.Document']"}),
'related_documents': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['wiki.Document']", 'through': "orm['wiki.RelatedDocument']", 'symmetrical': 'False'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'wiki.documenttag': {
'Meta': {'object_name': 'DocumentTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'wiki.editortoolbar': {
'Meta': {'object_name': 'EditorToolbar'},
'code': ('django.db.models.fields.TextField', [], {'max_length': '2000'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_toolbars'", 'to': "orm['auth.User']"}),
'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'wiki.firefoxversion': {
'Meta': {'unique_together': "(('item_id', 'document'),)", 'object_name': 'FirefoxVersion'},
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'firefox_version_set'", 'to': "orm['wiki.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_id': ('django.db.models.fields.IntegerField', [], {})
},
'wiki.helpfulvote': {
'Meta': {'object_name': 'HelpfulVote'},
'anonymous_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'poll_votes'", 'null': 'True', 'to': "orm['auth.User']"}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'poll_votes'", 'to': "orm['wiki.Document']"}),
'helpful': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user_agent': ('django.db.models.fields.CharField', [], {'max_length': '1000'})
},
'wiki.operatingsystem': {
'Meta': {'unique_together': "(('item_id', 'document'),)", 'object_name': 'OperatingSystem'},
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'operating_system_set'", 'to': "orm['wiki.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_id': ('django.db.models.fields.IntegerField', [], {})
},
'wiki.relateddocument': {
'Meta': {'ordering': "['-in_common']", 'object_name': 'RelatedDocument'},
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'related_from'", 'to': "orm['wiki.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_common': ('django.db.models.fields.IntegerField', [], {}),
'related': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'related_to'", 'to': "orm['wiki.Document']"})
},
'wiki.reviewtag': {
'Meta': {'object_name': 'ReviewTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'wiki.reviewtaggedrevision': {
'Meta': {'object_name': 'ReviewTaggedRevision'},
'content_object': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Revision']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.ReviewTag']"})
},
'wiki.revision': {
'Meta': {'object_name': 'Revision'},
'based_on': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Revision']", 'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'content': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_revisions'", 'to': "orm['auth.User']"}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['wiki.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'is_mindtouch_migration': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'mindtouch_old_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'reviewed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'reviewer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reviewed_revisions'", 'null': 'True', 'to': "orm['auth.User']"}),
'show_toc': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'significance': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_index': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {}),
'tags': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_index': 'True'})
},
'wiki.taggeddocument': {
'Meta': {'object_name': 'TaggedDocument'},
'content_object': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.DocumentTag']"})
}
}
complete_apps = ['wiki']
| mpl-2.0 |
feer56/Kitsune2 | kitsune/wiki/tests/test_facets.py | 16 | 4768 | from nose.tools import eq_
from kitsune.products.tests import product, topic
from kitsune.search.tests.test_es import ElasticTestCase
from kitsune.sumo.tests import TestCase
from kitsune.wiki.facets import (
topics_for, documents_for, _documents_for, _db_documents_for)
from kitsune.wiki.tests import revision
class TestFacetHelpersMixin(object):
def facets_setUp(self):
# Create products
self.desktop = product(slug='firefox', save=True)
self.mobile = product(slug='mobile', save=True)
# Create topics
self.general_d = topic(
product=self.desktop, slug='general', save=True)
self.bookmarks_d = topic(
product=self.desktop, slug='bookmarks', save=True)
self.sync_d = topic(product=self.desktop, slug='sync', save=True)
self.general_m = topic(
product=self.mobile, slug='general', save=True)
self.bookmarks_m = topic(
product=self.mobile, slug='bookmarks', save=True)
self.sync_m = topic(product=self.mobile, slug='sync', save=True)
# Set up documents.
doc1 = revision(is_approved=True, save=True).document
doc1.topics.add(self.general_d)
doc1.topics.add(self.bookmarks_d)
doc1.products.add(self.desktop)
doc2 = revision(is_approved=True, save=True).document
doc2.topics.add(self.bookmarks_d)
doc2.topics.add(self.bookmarks_m)
doc2.topics.add(self.sync_d)
doc2.topics.add(self.sync_m)
doc2.products.add(self.desktop)
doc2.products.add(self.mobile)
# An archived article shouldn't show up
doc3 = revision(is_approved=True, save=True).document
doc3.is_archived = True
doc3.save()
doc3.topics.add(self.general_d)
doc3.topics.add(self.bookmarks_d)
doc3.products.add(self.desktop)
# A template article shouldn't show up either
doc4 = revision(is_approved=True, save=True).document
doc4.category = 60
doc4.title = 'Template: Test'
doc4.save()
doc4.topics.add(self.general_d)
doc4.topics.add(self.bookmarks_d)
doc4.products.add(self.desktop)
# An article without current revision should be "invisible"
# to everything.
doc5 = revision(is_approved=False, save=True).document
doc5.topics.add(self.general_d)
doc5.topics.add(self.general_m)
doc5.topics.add(self.bookmarks_d)
doc5.topics.add(self.bookmarks_m)
doc5.topics.add(self.sync_d)
doc5.topics.add(self.sync_m)
doc5.products.add(self.desktop)
doc5.products.add(self.mobile)
class TestFacetHelpers(TestCase, TestFacetHelpersMixin):
def setUp(self):
super(TestFacetHelpers, self).setUp()
self.facets_setUp()
def test_topics_for_products(self):
"""Verify topics_for() returns topics for passed products."""
desktop_topics = topics_for(product=self.desktop)
eq_(len(desktop_topics), 3)
mobile_topics = topics_for(product=self.mobile)
eq_(len(mobile_topics), 2)
class TestFacetHelpersES(ElasticTestCase, TestFacetHelpersMixin):
def setUp(self):
super(TestFacetHelpersES, self).setUp()
self.facets_setUp()
self.refresh()
def _test_documents_for(self, d_f):
general_documents = d_f(
locale='en-US', topics=[self.general_d])
eq_(len(general_documents), 1)
bookmarks_documents = d_f(
locale='en-US', topics=[self.bookmarks_d])
eq_(len(bookmarks_documents), 2)
sync_documents = d_f(locale='en-US', topics=[self.sync_d])
eq_(len(sync_documents), 1)
general_bookmarks_documents = d_f(
locale='en-US', topics=[self.general_d, self.bookmarks_d])
eq_(len(general_bookmarks_documents), 1)
general_bookmarks_documents = d_f(
locale='es', topics=[self.general_d, self.bookmarks_d])
eq_(len(general_bookmarks_documents), 0)
general_sync_documents = d_f(
locale='en-US', topics=[self.general_d, self.sync_d])
eq_(len(general_sync_documents), 0)
def test_documents_for(self):
"""Verify documents_for() returns documents for passed topics."""
# Test the default ES version
self._test_documents_for(_documents_for)
# Test the DB version
self._test_documents_for(_db_documents_for)
def test_documents_for_fallback(self):
"""Verify the fallback in documents_for."""
general_bookmarks_documents, fallback = documents_for(
locale='es', topics=[self.general_d, self.bookmarks_d])
eq_(len(general_bookmarks_documents), 0)
eq_(len(fallback), 1)
| bsd-3-clause |
Anonymous-X6/django | tests/template_tests/syntax_tests/test_named_endblock.py | 521 | 2312 | from django.template import TemplateSyntaxError
from django.test import SimpleTestCase
from ..utils import setup
class NamedEndblockTests(SimpleTestCase):
@setup({'namedendblocks01': '1{% block first %}_{% block second %}'
'2{% endblock second %}_{% endblock first %}3'})
def test_namedendblocks01(self):
output = self.engine.render_to_string('namedendblocks01')
self.assertEqual(output, '1_2_3')
# Unbalanced blocks
@setup({'namedendblocks02': '1{% block first %}_{% block second %}'
'2{% endblock first %}_{% endblock second %}3'})
def test_namedendblocks02(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('namedendblocks02')
@setup({'namedendblocks03': '1{% block first %}_{% block second %}'
'2{% endblock %}_{% endblock second %}3'})
def test_namedendblocks03(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('namedendblocks03')
@setup({'namedendblocks04': '1{% block first %}_{% block second %}'
'2{% endblock second %}_{% endblock third %}3'})
def test_namedendblocks04(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('namedendblocks04')
@setup({'namedendblocks05': '1{% block first %}_{% block second %}2{% endblock first %}'})
def test_namedendblocks05(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('namedendblocks05')
# Mixed named and unnamed endblocks
@setup({'namedendblocks06': '1{% block first %}_{% block second %}'
'2{% endblock %}_{% endblock first %}3'})
def test_namedendblocks06(self):
"""
Mixed named and unnamed endblocks
"""
output = self.engine.render_to_string('namedendblocks06')
self.assertEqual(output, '1_2_3')
@setup({'namedendblocks07': '1{% block first %}_{% block second %}'
'2{% endblock second %}_{% endblock %}3'})
def test_namedendblocks07(self):
output = self.engine.render_to_string('namedendblocks07')
self.assertEqual(output, '1_2_3')
| bsd-3-clause |
rottenbytes/Sick-Beard | sickbeard/clients/requests/api.py | 637 | 4333 | # -*- coding: utf-8 -*-
"""
requests.api
~~~~~~~~~~~~
This module implements the Requests API.
:copyright: (c) 2012 by Kenneth Reitz.
:license: Apache2, see LICENSE for more details.
"""
from . import sessions
def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request <Request>`.
Returns :class:`Response <Response>` object.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of 'name': file-like-objects (or {'name': ('filename', fileobj)}) for multipart encoding upload.
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) Float describing the timeout of the request.
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
Usage::
>>> import requests
>>> req = requests.request('GET', 'http://httpbin.org/get')
<Response [200]>
"""
session = sessions.Session()
return session.request(method=method, url=url, **kwargs)
def get(url, **kwargs):
"""Sends a GET request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return request('get', url, **kwargs)
def options(url, **kwargs):
"""Sends a OPTIONS request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return request('options', url, **kwargs)
def head(url, **kwargs):
"""Sends a HEAD request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', False)
return request('head', url, **kwargs)
def post(url, data=None, **kwargs):
"""Sends a POST request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return request('post', url, data=data, **kwargs)
def put(url, data=None, **kwargs):
"""Sends a PUT request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return request('put', url, data=data, **kwargs)
def patch(url, data=None, **kwargs):
"""Sends a PATCH request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return request('patch', url, data=data, **kwargs)
def delete(url, **kwargs):
"""Sends a DELETE request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return request('delete', url, **kwargs)
| gpl-3.0 |
40223231/2015cd_midterm2 | static/Brython3.1.1-20150328-091302/Lib/unittest/mock.py | 739 | 71473 | # mock.py
# Test tools for mocking and patching.
# Maintained by Michael Foord
# Backport for other versions of Python available from
# http://pypi.python.org/pypi/mock
__all__ = (
'Mock',
'MagicMock',
'patch',
'sentinel',
'DEFAULT',
'ANY',
'call',
'create_autospec',
'FILTER_DIR',
'NonCallableMock',
'NonCallableMagicMock',
'mock_open',
'PropertyMock',
)
__version__ = '1.0'
import inspect
import pprint
import sys
from functools import wraps
BaseExceptions = (BaseException,)
if 'java' in sys.platform:
# jython
import java
BaseExceptions = (BaseException, java.lang.Throwable)
FILTER_DIR = True
# Workaround for issue #12370
# Without this, the __class__ properties wouldn't be set correctly
_safe_super = super
def _is_instance_mock(obj):
# can't use isinstance on Mock objects because they override __class__
# The base class for all mocks is NonCallableMock
return issubclass(type(obj), NonCallableMock)
def _is_exception(obj):
return (
isinstance(obj, BaseExceptions) or
isinstance(obj, type) and issubclass(obj, BaseExceptions)
)
class _slotted(object):
__slots__ = ['a']
DescriptorTypes = (
type(_slotted.a),
property,
)
def _getsignature(func, skipfirst, instance=False):
if isinstance(func, type) and not instance:
try:
func = func.__init__
except AttributeError:
return
skipfirst = True
elif not isinstance(func, FunctionTypes):
# for classes where instance is True we end up here too
try:
func = func.__call__
except AttributeError:
return
try:
argspec = inspect.getfullargspec(func)
except TypeError:
# C function / method, possibly inherited object().__init__
return
regargs, varargs, varkw, defaults, kwonly, kwonlydef, ann = argspec
# instance methods and classmethods need to lose the self argument
if getattr(func, '__self__', None) is not None:
regargs = regargs[1:]
if skipfirst:
# this condition and the above one are never both True - why?
regargs = regargs[1:]
signature = inspect.formatargspec(
regargs, varargs, varkw, defaults,
kwonly, kwonlydef, ann, formatvalue=lambda value: "")
return signature[1:-1], func
def _check_signature(func, mock, skipfirst, instance=False):
if not _callable(func):
return
result = _getsignature(func, skipfirst, instance)
if result is None:
return
signature, func = result
# can't use self because "self" is common as an argument name
# unfortunately even not in the first place
src = "lambda _mock_self, %s: None" % signature
checksig = eval(src, {})
_copy_func_details(func, checksig)
type(mock)._mock_check_sig = checksig
def _copy_func_details(func, funcopy):
funcopy.__name__ = func.__name__
funcopy.__doc__ = func.__doc__
# we explicitly don't copy func.__dict__ into this copy as it would
# expose original attributes that should be mocked
funcopy.__module__ = func.__module__
funcopy.__defaults__ = func.__defaults__
funcopy.__kwdefaults__ = func.__kwdefaults__
def _callable(obj):
if isinstance(obj, type):
return True
if getattr(obj, '__call__', None) is not None:
return True
return False
def _is_list(obj):
# checks for list or tuples
# XXXX badly named!
return type(obj) in (list, tuple)
def _instance_callable(obj):
"""Given an object, return True if the object is callable.
For classes, return True if instances would be callable."""
if not isinstance(obj, type):
# already an instance
return getattr(obj, '__call__', None) is not None
# *could* be broken by a class overriding __mro__ or __dict__ via
# a metaclass
for base in (obj,) + obj.__mro__:
if base.__dict__.get('__call__') is not None:
return True
return False
def _set_signature(mock, original, instance=False):
# creates a function with signature (*args, **kwargs) that delegates to a
# mock. It still does signature checking by calling a lambda with the same
# signature as the original.
if not _callable(original):
return
skipfirst = isinstance(original, type)
result = _getsignature(original, skipfirst, instance)
if result is None:
# was a C function (e.g. object().__init__ ) that can't be mocked
return
signature, func = result
src = "lambda %s: None" % signature
checksig = eval(src, {})
_copy_func_details(func, checksig)
name = original.__name__
if not name.isidentifier():
name = 'funcopy'
context = {'_checksig_': checksig, 'mock': mock}
src = """def %s(*args, **kwargs):
_checksig_(*args, **kwargs)
return mock(*args, **kwargs)""" % name
exec (src, context)
funcopy = context[name]
_setup_func(funcopy, mock)
return funcopy
def _setup_func(funcopy, mock):
funcopy.mock = mock
# can't use isinstance with mocks
if not _is_instance_mock(mock):
return
def assert_called_with(*args, **kwargs):
return mock.assert_called_with(*args, **kwargs)
def assert_called_once_with(*args, **kwargs):
return mock.assert_called_once_with(*args, **kwargs)
def assert_has_calls(*args, **kwargs):
return mock.assert_has_calls(*args, **kwargs)
def assert_any_call(*args, **kwargs):
return mock.assert_any_call(*args, **kwargs)
def reset_mock():
funcopy.method_calls = _CallList()
funcopy.mock_calls = _CallList()
mock.reset_mock()
ret = funcopy.return_value
if _is_instance_mock(ret) and not ret is mock:
ret.reset_mock()
funcopy.called = False
funcopy.call_count = 0
funcopy.call_args = None
funcopy.call_args_list = _CallList()
funcopy.method_calls = _CallList()
funcopy.mock_calls = _CallList()
funcopy.return_value = mock.return_value
funcopy.side_effect = mock.side_effect
funcopy._mock_children = mock._mock_children
funcopy.assert_called_with = assert_called_with
funcopy.assert_called_once_with = assert_called_once_with
funcopy.assert_has_calls = assert_has_calls
funcopy.assert_any_call = assert_any_call
funcopy.reset_mock = reset_mock
mock._mock_delegate = funcopy
def _is_magic(name):
return '__%s__' % name[2:-2] == name
class _SentinelObject(object):
"A unique, named, sentinel object."
def __init__(self, name):
self.name = name
def __repr__(self):
return 'sentinel.%s' % self.name
class _Sentinel(object):
"""Access attributes to return a named object, usable as a sentinel."""
def __init__(self):
self._sentinels = {}
def __getattr__(self, name):
if name == '__bases__':
# Without this help(unittest.mock) raises an exception
raise AttributeError
return self._sentinels.setdefault(name, _SentinelObject(name))
sentinel = _Sentinel()
DEFAULT = sentinel.DEFAULT
_missing = sentinel.MISSING
_deleted = sentinel.DELETED
def _copy(value):
if type(value) in (dict, list, tuple, set):
return type(value)(value)
return value
_allowed_names = set(
[
'return_value', '_mock_return_value', 'side_effect',
'_mock_side_effect', '_mock_parent', '_mock_new_parent',
'_mock_name', '_mock_new_name'
]
)
def _delegating_property(name):
_allowed_names.add(name)
_the_name = '_mock_' + name
def _get(self, name=name, _the_name=_the_name):
sig = self._mock_delegate
if sig is None:
return getattr(self, _the_name)
return getattr(sig, name)
def _set(self, value, name=name, _the_name=_the_name):
sig = self._mock_delegate
if sig is None:
self.__dict__[_the_name] = value
else:
setattr(sig, name, value)
return property(_get, _set)
class _CallList(list):
def __contains__(self, value):
if not isinstance(value, list):
return list.__contains__(self, value)
len_value = len(value)
len_self = len(self)
if len_value > len_self:
return False
for i in range(0, len_self - len_value + 1):
sub_list = self[i:i+len_value]
if sub_list == value:
return True
return False
def __repr__(self):
return pprint.pformat(list(self))
def _check_and_set_parent(parent, value, name, new_name):
if not _is_instance_mock(value):
return False
if ((value._mock_name or value._mock_new_name) or
(value._mock_parent is not None) or
(value._mock_new_parent is not None)):
return False
_parent = parent
while _parent is not None:
# setting a mock (value) as a child or return value of itself
# should not modify the mock
if _parent is value:
return False
_parent = _parent._mock_new_parent
if new_name:
value._mock_new_parent = parent
value._mock_new_name = new_name
if name:
value._mock_parent = parent
value._mock_name = name
return True
class Base(object):
_mock_return_value = DEFAULT
_mock_side_effect = None
def __init__(self, *args, **kwargs):
pass
class NonCallableMock(Base):
"""A non-callable version of `Mock`"""
def __new__(cls, *args, **kw):
# every instance has its own class
# so we can create magic methods on the
# class without stomping on other mocks
new = type(cls.__name__, (cls,), {'__doc__': cls.__doc__})
instance = object.__new__(new)
return instance
def __init__(
self, spec=None, wraps=None, name=None, spec_set=None,
parent=None, _spec_state=None, _new_name='', _new_parent=None,
**kwargs
):
if _new_parent is None:
_new_parent = parent
__dict__ = self.__dict__
__dict__['_mock_parent'] = parent
__dict__['_mock_name'] = name
__dict__['_mock_new_name'] = _new_name
__dict__['_mock_new_parent'] = _new_parent
if spec_set is not None:
spec = spec_set
spec_set = True
self._mock_add_spec(spec, spec_set)
__dict__['_mock_children'] = {}
__dict__['_mock_wraps'] = wraps
__dict__['_mock_delegate'] = None
__dict__['_mock_called'] = False
__dict__['_mock_call_args'] = None
__dict__['_mock_call_count'] = 0
__dict__['_mock_call_args_list'] = _CallList()
__dict__['_mock_mock_calls'] = _CallList()
__dict__['method_calls'] = _CallList()
if kwargs:
self.configure_mock(**kwargs)
_safe_super(NonCallableMock, self).__init__(
spec, wraps, name, spec_set, parent,
_spec_state
)
def attach_mock(self, mock, attribute):
"""
Attach a mock as an attribute of this one, replacing its name and
parent. Calls to the attached mock will be recorded in the
`method_calls` and `mock_calls` attributes of this one."""
mock._mock_parent = None
mock._mock_new_parent = None
mock._mock_name = ''
mock._mock_new_name = None
setattr(self, attribute, mock)
def mock_add_spec(self, spec, spec_set=False):
"""Add a spec to a mock. `spec` can either be an object or a
list of strings. Only attributes on the `spec` can be fetched as
attributes from the mock.
If `spec_set` is True then only attributes on the spec can be set."""
self._mock_add_spec(spec, spec_set)
def _mock_add_spec(self, spec, spec_set):
_spec_class = None
if spec is not None and not _is_list(spec):
if isinstance(spec, type):
_spec_class = spec
else:
_spec_class = _get_class(spec)
spec = dir(spec)
__dict__ = self.__dict__
__dict__['_spec_class'] = _spec_class
__dict__['_spec_set'] = spec_set
__dict__['_mock_methods'] = spec
def __get_return_value(self):
ret = self._mock_return_value
if self._mock_delegate is not None:
ret = self._mock_delegate.return_value
if ret is DEFAULT:
ret = self._get_child_mock(
_new_parent=self, _new_name='()'
)
self.return_value = ret
return ret
def __set_return_value(self, value):
if self._mock_delegate is not None:
self._mock_delegate.return_value = value
else:
self._mock_return_value = value
_check_and_set_parent(self, value, None, '()')
__return_value_doc = "The value to be returned when the mock is called."
return_value = property(__get_return_value, __set_return_value,
__return_value_doc)
@property
def __class__(self):
if self._spec_class is None:
return type(self)
return self._spec_class
called = _delegating_property('called')
call_count = _delegating_property('call_count')
call_args = _delegating_property('call_args')
call_args_list = _delegating_property('call_args_list')
mock_calls = _delegating_property('mock_calls')
def __get_side_effect(self):
delegated = self._mock_delegate
if delegated is None:
return self._mock_side_effect
return delegated.side_effect
def __set_side_effect(self, value):
value = _try_iter(value)
delegated = self._mock_delegate
if delegated is None:
self._mock_side_effect = value
else:
delegated.side_effect = value
side_effect = property(__get_side_effect, __set_side_effect)
def reset_mock(self):
"Restore the mock object to its initial state."
self.called = False
self.call_args = None
self.call_count = 0
self.mock_calls = _CallList()
self.call_args_list = _CallList()
self.method_calls = _CallList()
for child in self._mock_children.values():
if isinstance(child, _SpecState):
continue
child.reset_mock()
ret = self._mock_return_value
if _is_instance_mock(ret) and ret is not self:
ret.reset_mock()
def configure_mock(self, **kwargs):
"""Set attributes on the mock through keyword arguments.
Attributes plus return values and side effects can be set on child
mocks using standard dot notation and unpacking a dictionary in the
method call:
>>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
>>> mock.configure_mock(**attrs)"""
for arg, val in sorted(kwargs.items(),
# we sort on the number of dots so that
# attributes are set before we set attributes on
# attributes
key=lambda entry: entry[0].count('.')):
args = arg.split('.')
final = args.pop()
obj = self
for entry in args:
obj = getattr(obj, entry)
setattr(obj, final, val)
def __getattr__(self, name):
if name == '_mock_methods':
raise AttributeError(name)
elif self._mock_methods is not None:
if name not in self._mock_methods or name in _all_magics:
raise AttributeError("Mock object has no attribute %r" % name)
elif _is_magic(name):
raise AttributeError(name)
result = self._mock_children.get(name)
if result is _deleted:
raise AttributeError(name)
elif result is None:
wraps = None
if self._mock_wraps is not None:
# XXXX should we get the attribute without triggering code
# execution?
wraps = getattr(self._mock_wraps, name)
result = self._get_child_mock(
parent=self, name=name, wraps=wraps, _new_name=name,
_new_parent=self
)
self._mock_children[name] = result
elif isinstance(result, _SpecState):
result = create_autospec(
result.spec, result.spec_set, result.instance,
result.parent, result.name
)
self._mock_children[name] = result
return result
def __repr__(self):
_name_list = [self._mock_new_name]
_parent = self._mock_new_parent
last = self
dot = '.'
if _name_list == ['()']:
dot = ''
seen = set()
while _parent is not None:
last = _parent
_name_list.append(_parent._mock_new_name + dot)
dot = '.'
if _parent._mock_new_name == '()':
dot = ''
_parent = _parent._mock_new_parent
# use ids here so as not to call __hash__ on the mocks
if id(_parent) in seen:
break
seen.add(id(_parent))
_name_list = list(reversed(_name_list))
_first = last._mock_name or 'mock'
if len(_name_list) > 1:
if _name_list[1] not in ('()', '().'):
_first += '.'
_name_list[0] = _first
name = ''.join(_name_list)
name_string = ''
if name not in ('mock', 'mock.'):
name_string = ' name=%r' % name
spec_string = ''
if self._spec_class is not None:
spec_string = ' spec=%r'
if self._spec_set:
spec_string = ' spec_set=%r'
spec_string = spec_string % self._spec_class.__name__
return "<%s%s%s id='%s'>" % (
type(self).__name__,
name_string,
spec_string,
id(self)
)
def __dir__(self):
"""Filter the output of `dir(mock)` to only useful members."""
if not FILTER_DIR:
return object.__dir__(self)
extras = self._mock_methods or []
from_type = dir(type(self))
from_dict = list(self.__dict__)
from_type = [e for e in from_type if not e.startswith('_')]
from_dict = [e for e in from_dict if not e.startswith('_') or
_is_magic(e)]
return sorted(set(extras + from_type + from_dict +
list(self._mock_children)))
def __setattr__(self, name, value):
if name in _allowed_names:
# property setters go through here
return object.__setattr__(self, name, value)
elif (self._spec_set and self._mock_methods is not None and
name not in self._mock_methods and
name not in self.__dict__):
raise AttributeError("Mock object has no attribute '%s'" % name)
elif name in _unsupported_magics:
msg = 'Attempting to set unsupported magic method %r.' % name
raise AttributeError(msg)
elif name in _all_magics:
if self._mock_methods is not None and name not in self._mock_methods:
raise AttributeError("Mock object has no attribute '%s'" % name)
if not _is_instance_mock(value):
setattr(type(self), name, _get_method(name, value))
original = value
value = lambda *args, **kw: original(self, *args, **kw)
else:
# only set _new_name and not name so that mock_calls is tracked
# but not method calls
_check_and_set_parent(self, value, None, name)
setattr(type(self), name, value)
self._mock_children[name] = value
elif name == '__class__':
self._spec_class = value
return
else:
if _check_and_set_parent(self, value, name, name):
self._mock_children[name] = value
return object.__setattr__(self, name, value)
def __delattr__(self, name):
if name in _all_magics and name in type(self).__dict__:
delattr(type(self), name)
if name not in self.__dict__:
# for magic methods that are still MagicProxy objects and
# not set on the instance itself
return
if name in self.__dict__:
object.__delattr__(self, name)
obj = self._mock_children.get(name, _missing)
if obj is _deleted:
raise AttributeError(name)
if obj is not _missing:
del self._mock_children[name]
self._mock_children[name] = _deleted
def _format_mock_call_signature(self, args, kwargs):
name = self._mock_name or 'mock'
return _format_call_signature(name, args, kwargs)
def _format_mock_failure_message(self, args, kwargs):
message = 'Expected call: %s\nActual call: %s'
expected_string = self._format_mock_call_signature(args, kwargs)
call_args = self.call_args
if len(call_args) == 3:
call_args = call_args[1:]
actual_string = self._format_mock_call_signature(*call_args)
return message % (expected_string, actual_string)
def assert_called_with(_mock_self, *args, **kwargs):
"""assert that the mock was called with the specified arguments.
Raises an AssertionError if the args and keyword args passed in are
different to the last call to the mock."""
self = _mock_self
if self.call_args is None:
expected = self._format_mock_call_signature(args, kwargs)
raise AssertionError('Expected call: %s\nNot called' % (expected,))
if self.call_args != (args, kwargs):
msg = self._format_mock_failure_message(args, kwargs)
raise AssertionError(msg)
def assert_called_once_with(_mock_self, *args, **kwargs):
"""assert that the mock was called exactly once and with the specified
arguments."""
self = _mock_self
if not self.call_count == 1:
msg = ("Expected '%s' to be called once. Called %s times." %
(self._mock_name or 'mock', self.call_count))
raise AssertionError(msg)
return self.assert_called_with(*args, **kwargs)
def assert_has_calls(self, calls, any_order=False):
"""assert the mock has been called with the specified calls.
The `mock_calls` list is checked for the calls.
If `any_order` is False (the default) then the calls must be
sequential. There can be extra calls before or after the
specified calls.
If `any_order` is True then the calls can be in any order, but
they must all appear in `mock_calls`."""
if not any_order:
if calls not in self.mock_calls:
raise AssertionError(
'Calls not found.\nExpected: %r\n'
'Actual: %r' % (calls, self.mock_calls)
)
return
all_calls = list(self.mock_calls)
not_found = []
for kall in calls:
try:
all_calls.remove(kall)
except ValueError:
not_found.append(kall)
if not_found:
raise AssertionError(
'%r not all found in call list' % (tuple(not_found),)
)
def assert_any_call(self, *args, **kwargs):
"""assert the mock has been called with the specified arguments.
The assert passes if the mock has *ever* been called, unlike
`assert_called_with` and `assert_called_once_with` that only pass if
the call is the most recent one."""
kall = call(*args, **kwargs)
if kall not in self.call_args_list:
expected_string = self._format_mock_call_signature(args, kwargs)
raise AssertionError(
'%s call not found' % expected_string
)
def _get_child_mock(self, **kw):
"""Create the child mocks for attributes and return value.
By default child mocks will be the same type as the parent.
Subclasses of Mock may want to override this to customize the way
child mocks are made.
For non-callable mocks the callable variant will be used (rather than
any custom subclass)."""
_type = type(self)
if not issubclass(_type, CallableMixin):
if issubclass(_type, NonCallableMagicMock):
klass = MagicMock
elif issubclass(_type, NonCallableMock) :
klass = Mock
else:
klass = _type.__mro__[1]
return klass(**kw)
def _try_iter(obj):
if obj is None:
return obj
if _is_exception(obj):
return obj
if _callable(obj):
return obj
try:
return iter(obj)
except TypeError:
# XXXX backwards compatibility
# but this will blow up on first call - so maybe we should fail early?
return obj
class CallableMixin(Base):
def __init__(self, spec=None, side_effect=None, return_value=DEFAULT,
wraps=None, name=None, spec_set=None, parent=None,
_spec_state=None, _new_name='', _new_parent=None, **kwargs):
self.__dict__['_mock_return_value'] = return_value
_safe_super(CallableMixin, self).__init__(
spec, wraps, name, spec_set, parent,
_spec_state, _new_name, _new_parent, **kwargs
)
self.side_effect = side_effect
def _mock_check_sig(self, *args, **kwargs):
# stub method that can be replaced with one with a specific signature
pass
def __call__(_mock_self, *args, **kwargs):
# can't use self in-case a function / method we are mocking uses self
# in the signature
_mock_self._mock_check_sig(*args, **kwargs)
return _mock_self._mock_call(*args, **kwargs)
def _mock_call(_mock_self, *args, **kwargs):
self = _mock_self
self.called = True
self.call_count += 1
self.call_args = _Call((args, kwargs), two=True)
self.call_args_list.append(_Call((args, kwargs), two=True))
_new_name = self._mock_new_name
_new_parent = self._mock_new_parent
self.mock_calls.append(_Call(('', args, kwargs)))
seen = set()
skip_next_dot = _new_name == '()'
do_method_calls = self._mock_parent is not None
name = self._mock_name
while _new_parent is not None:
this_mock_call = _Call((_new_name, args, kwargs))
if _new_parent._mock_new_name:
dot = '.'
if skip_next_dot:
dot = ''
skip_next_dot = False
if _new_parent._mock_new_name == '()':
skip_next_dot = True
_new_name = _new_parent._mock_new_name + dot + _new_name
if do_method_calls:
if _new_name == name:
this_method_call = this_mock_call
else:
this_method_call = _Call((name, args, kwargs))
_new_parent.method_calls.append(this_method_call)
do_method_calls = _new_parent._mock_parent is not None
if do_method_calls:
name = _new_parent._mock_name + '.' + name
_new_parent.mock_calls.append(this_mock_call)
_new_parent = _new_parent._mock_new_parent
# use ids here so as not to call __hash__ on the mocks
_new_parent_id = id(_new_parent)
if _new_parent_id in seen:
break
seen.add(_new_parent_id)
ret_val = DEFAULT
effect = self.side_effect
if effect is not None:
if _is_exception(effect):
raise effect
if not _callable(effect):
result = next(effect)
if _is_exception(result):
raise result
if result is DEFAULT:
result = self.return_value
return result
ret_val = effect(*args, **kwargs)
if ret_val is DEFAULT:
ret_val = self.return_value
if (self._mock_wraps is not None and
self._mock_return_value is DEFAULT):
return self._mock_wraps(*args, **kwargs)
if ret_val is DEFAULT:
ret_val = self.return_value
return ret_val
class Mock(CallableMixin, NonCallableMock):
"""
Create a new `Mock` object. `Mock` takes several optional arguments
that specify the behaviour of the Mock object:
* `spec`: This can be either a list of strings or an existing object (a
class or instance) that acts as the specification for the mock object. If
you pass in an object then a list of strings is formed by calling dir on
the object (excluding unsupported magic attributes and methods). Accessing
any attribute not in this list will raise an `AttributeError`.
If `spec` is an object (rather than a list of strings) then
`mock.__class__` returns the class of the spec object. This allows mocks
to pass `isinstance` tests.
* `spec_set`: A stricter variant of `spec`. If used, attempting to *set*
or get an attribute on the mock that isn't on the object passed as
`spec_set` will raise an `AttributeError`.
* `side_effect`: A function to be called whenever the Mock is called. See
the `side_effect` attribute. Useful for raising exceptions or
dynamically changing return values. The function is called with the same
arguments as the mock, and unless it returns `DEFAULT`, the return
value of this function is used as the return value.
If `side_effect` is an iterable then each call to the mock will return
the next value from the iterable. If any of the members of the iterable
are exceptions they will be raised instead of returned.
* `return_value`: The value returned when the mock is called. By default
this is a new Mock (created on first access). See the
`return_value` attribute.
* `wraps`: Item for the mock object to wrap. If `wraps` is not None then
calling the Mock will pass the call through to the wrapped object
(returning the real result). Attribute access on the mock will return a
Mock object that wraps the corresponding attribute of the wrapped object
(so attempting to access an attribute that doesn't exist will raise an
`AttributeError`).
If the mock has an explicit `return_value` set then calls are not passed
to the wrapped object and the `return_value` is returned instead.
* `name`: If the mock has a name then it will be used in the repr of the
mock. This can be useful for debugging. The name is propagated to child
mocks.
Mocks can also be called with arbitrary keyword arguments. These will be
used to set attributes on the mock after it is created.
"""
def _dot_lookup(thing, comp, import_path):
try:
return getattr(thing, comp)
except AttributeError:
__import__(import_path)
return getattr(thing, comp)
def _importer(target):
components = target.split('.')
import_path = components.pop(0)
thing = __import__(import_path)
for comp in components:
import_path += ".%s" % comp
thing = _dot_lookup(thing, comp, import_path)
return thing
def _is_started(patcher):
# XXXX horrible
return hasattr(patcher, 'is_local')
class _patch(object):
attribute_name = None
_active_patches = set()
def __init__(
self, getter, attribute, new, spec, create,
spec_set, autospec, new_callable, kwargs
):
if new_callable is not None:
if new is not DEFAULT:
raise ValueError(
"Cannot use 'new' and 'new_callable' together"
)
if autospec is not None:
raise ValueError(
"Cannot use 'autospec' and 'new_callable' together"
)
self.getter = getter
self.attribute = attribute
self.new = new
self.new_callable = new_callable
self.spec = spec
self.create = create
self.has_local = False
self.spec_set = spec_set
self.autospec = autospec
self.kwargs = kwargs
self.additional_patchers = []
def copy(self):
patcher = _patch(
self.getter, self.attribute, self.new, self.spec,
self.create, self.spec_set,
self.autospec, self.new_callable, self.kwargs
)
patcher.attribute_name = self.attribute_name
patcher.additional_patchers = [
p.copy() for p in self.additional_patchers
]
return patcher
def __call__(self, func):
if isinstance(func, type):
return self.decorate_class(func)
return self.decorate_callable(func)
def decorate_class(self, klass):
for attr in dir(klass):
if not attr.startswith(patch.TEST_PREFIX):
continue
attr_value = getattr(klass, attr)
if not hasattr(attr_value, "__call__"):
continue
patcher = self.copy()
setattr(klass, attr, patcher(attr_value))
return klass
def decorate_callable(self, func):
if hasattr(func, 'patchings'):
func.patchings.append(self)
return func
@wraps(func)
def patched(*args, **keywargs):
extra_args = []
entered_patchers = []
exc_info = tuple()
try:
for patching in patched.patchings:
arg = patching.__enter__()
entered_patchers.append(patching)
if patching.attribute_name is not None:
keywargs.update(arg)
elif patching.new is DEFAULT:
extra_args.append(arg)
args += tuple(extra_args)
return func(*args, **keywargs)
except:
if (patching not in entered_patchers and
_is_started(patching)):
# the patcher may have been started, but an exception
# raised whilst entering one of its additional_patchers
entered_patchers.append(patching)
# Pass the exception to __exit__
exc_info = sys.exc_info()
# re-raise the exception
raise
finally:
for patching in reversed(entered_patchers):
patching.__exit__(*exc_info)
patched.patchings = [self]
return patched
def get_original(self):
target = self.getter()
name = self.attribute
original = DEFAULT
local = False
try:
original = target.__dict__[name]
except (AttributeError, KeyError):
original = getattr(target, name, DEFAULT)
else:
local = True
if not self.create and original is DEFAULT:
raise AttributeError(
"%s does not have the attribute %r" % (target, name)
)
return original, local
def __enter__(self):
"""Perform the patch."""
new, spec, spec_set = self.new, self.spec, self.spec_set
autospec, kwargs = self.autospec, self.kwargs
new_callable = self.new_callable
self.target = self.getter()
# normalise False to None
if spec is False:
spec = None
if spec_set is False:
spec_set = None
if autospec is False:
autospec = None
if spec is not None and autospec is not None:
raise TypeError("Can't specify spec and autospec")
if ((spec is not None or autospec is not None) and
spec_set not in (True, None)):
raise TypeError("Can't provide explicit spec_set *and* spec or autospec")
original, local = self.get_original()
if new is DEFAULT and autospec is None:
inherit = False
if spec is True:
# set spec to the object we are replacing
spec = original
if spec_set is True:
spec_set = original
spec = None
elif spec is not None:
if spec_set is True:
spec_set = spec
spec = None
elif spec_set is True:
spec_set = original
if spec is not None or spec_set is not None:
if original is DEFAULT:
raise TypeError("Can't use 'spec' with create=True")
if isinstance(original, type):
# If we're patching out a class and there is a spec
inherit = True
Klass = MagicMock
_kwargs = {}
if new_callable is not None:
Klass = new_callable
elif spec is not None or spec_set is not None:
this_spec = spec
if spec_set is not None:
this_spec = spec_set
if _is_list(this_spec):
not_callable = '__call__' not in this_spec
else:
not_callable = not callable(this_spec)
if not_callable:
Klass = NonCallableMagicMock
if spec is not None:
_kwargs['spec'] = spec
if spec_set is not None:
_kwargs['spec_set'] = spec_set
# add a name to mocks
if (isinstance(Klass, type) and
issubclass(Klass, NonCallableMock) and self.attribute):
_kwargs['name'] = self.attribute
_kwargs.update(kwargs)
new = Klass(**_kwargs)
if inherit and _is_instance_mock(new):
# we can only tell if the instance should be callable if the
# spec is not a list
this_spec = spec
if spec_set is not None:
this_spec = spec_set
if (not _is_list(this_spec) and not
_instance_callable(this_spec)):
Klass = NonCallableMagicMock
_kwargs.pop('name')
new.return_value = Klass(_new_parent=new, _new_name='()',
**_kwargs)
elif autospec is not None:
# spec is ignored, new *must* be default, spec_set is treated
# as a boolean. Should we check spec is not None and that spec_set
# is a bool?
if new is not DEFAULT:
raise TypeError(
"autospec creates the mock for you. Can't specify "
"autospec and new."
)
if original is DEFAULT:
raise TypeError("Can't use 'autospec' with create=True")
spec_set = bool(spec_set)
if autospec is True:
autospec = original
new = create_autospec(autospec, spec_set=spec_set,
_name=self.attribute, **kwargs)
elif kwargs:
# can't set keyword args when we aren't creating the mock
# XXXX If new is a Mock we could call new.configure_mock(**kwargs)
raise TypeError("Can't pass kwargs to a mock we aren't creating")
new_attr = new
self.temp_original = original
self.is_local = local
setattr(self.target, self.attribute, new_attr)
if self.attribute_name is not None:
extra_args = {}
if self.new is DEFAULT:
extra_args[self.attribute_name] = new
for patching in self.additional_patchers:
arg = patching.__enter__()
if patching.new is DEFAULT:
extra_args.update(arg)
return extra_args
return new
def __exit__(self, *exc_info):
"""Undo the patch."""
if not _is_started(self):
raise RuntimeError('stop called on unstarted patcher')
if self.is_local and self.temp_original is not DEFAULT:
setattr(self.target, self.attribute, self.temp_original)
else:
delattr(self.target, self.attribute)
if not self.create and not hasattr(self.target, self.attribute):
# needed for proxy objects like django settings
setattr(self.target, self.attribute, self.temp_original)
del self.temp_original
del self.is_local
del self.target
for patcher in reversed(self.additional_patchers):
if _is_started(patcher):
patcher.__exit__(*exc_info)
def start(self):
"""Activate a patch, returning any created mock."""
result = self.__enter__()
self._active_patches.add(self)
return result
def stop(self):
"""Stop an active patch."""
self._active_patches.discard(self)
return self.__exit__()
def _get_target(target):
try:
target, attribute = target.rsplit('.', 1)
except (TypeError, ValueError):
raise TypeError("Need a valid target to patch. You supplied: %r" %
(target,))
getter = lambda: _importer(target)
return getter, attribute
def _patch_object(
target, attribute, new=DEFAULT, spec=None,
create=False, spec_set=None, autospec=None,
new_callable=None, **kwargs
):
"""
patch the named member (`attribute`) on an object (`target`) with a mock
object.
`patch.object` can be used as a decorator, class decorator or a context
manager. Arguments `new`, `spec`, `create`, `spec_set`,
`autospec` and `new_callable` have the same meaning as for `patch`. Like
`patch`, `patch.object` takes arbitrary keyword arguments for configuring
the mock object it creates.
When used as a class decorator `patch.object` honours `patch.TEST_PREFIX`
for choosing which methods to wrap.
"""
getter = lambda: target
return _patch(
getter, attribute, new, spec, create,
spec_set, autospec, new_callable, kwargs
)
def _patch_multiple(target, spec=None, create=False, spec_set=None,
autospec=None, new_callable=None, **kwargs):
"""Perform multiple patches in a single call. It takes the object to be
patched (either as an object or a string to fetch the object by importing)
and keyword arguments for the patches::
with patch.multiple(settings, FIRST_PATCH='one', SECOND_PATCH='two'):
...
Use `DEFAULT` as the value if you want `patch.multiple` to create
mocks for you. In this case the created mocks are passed into a decorated
function by keyword, and a dictionary is returned when `patch.multiple` is
used as a context manager.
`patch.multiple` can be used as a decorator, class decorator or a context
manager. The arguments `spec`, `spec_set`, `create`,
`autospec` and `new_callable` have the same meaning as for `patch`. These
arguments will be applied to *all* patches done by `patch.multiple`.
When used as a class decorator `patch.multiple` honours `patch.TEST_PREFIX`
for choosing which methods to wrap.
"""
if type(target) is str:
getter = lambda: _importer(target)
else:
getter = lambda: target
if not kwargs:
raise ValueError(
'Must supply at least one keyword argument with patch.multiple'
)
# need to wrap in a list for python 3, where items is a view
items = list(kwargs.items())
attribute, new = items[0]
patcher = _patch(
getter, attribute, new, spec, create, spec_set,
autospec, new_callable, {}
)
patcher.attribute_name = attribute
for attribute, new in items[1:]:
this_patcher = _patch(
getter, attribute, new, spec, create, spec_set,
autospec, new_callable, {}
)
this_patcher.attribute_name = attribute
patcher.additional_patchers.append(this_patcher)
return patcher
def patch(
target, new=DEFAULT, spec=None, create=False,
spec_set=None, autospec=None, new_callable=None, **kwargs
):
"""
`patch` acts as a function decorator, class decorator or a context
manager. Inside the body of the function or with statement, the `target`
is patched with a `new` object. When the function/with statement exits
the patch is undone.
If `new` is omitted, then the target is replaced with a
`MagicMock`. If `patch` is used as a decorator and `new` is
omitted, the created mock is passed in as an extra argument to the
decorated function. If `patch` is used as a context manager the created
mock is returned by the context manager.
`target` should be a string in the form `'package.module.ClassName'`. The
`target` is imported and the specified object replaced with the `new`
object, so the `target` must be importable from the environment you are
calling `patch` from. The target is imported when the decorated function
is executed, not at decoration time.
The `spec` and `spec_set` keyword arguments are passed to the `MagicMock`
if patch is creating one for you.
In addition you can pass `spec=True` or `spec_set=True`, which causes
patch to pass in the object being mocked as the spec/spec_set object.
`new_callable` allows you to specify a different class, or callable object,
that will be called to create the `new` object. By default `MagicMock` is
used.
A more powerful form of `spec` is `autospec`. If you set `autospec=True`
then the mock with be created with a spec from the object being replaced.
All attributes of the mock will also have the spec of the corresponding
attribute of the object being replaced. Methods and functions being
mocked will have their arguments checked and will raise a `TypeError` if
they are called with the wrong signature. For mocks replacing a class,
their return value (the 'instance') will have the same spec as the class.
Instead of `autospec=True` you can pass `autospec=some_object` to use an
arbitrary object as the spec instead of the one being replaced.
By default `patch` will fail to replace attributes that don't exist. If
you pass in `create=True`, and the attribute doesn't exist, patch will
create the attribute for you when the patched function is called, and
delete it again afterwards. This is useful for writing tests against
attributes that your production code creates at runtime. It is off by
default because it can be dangerous. With it switched on you can write
passing tests against APIs that don't actually exist!
Patch can be used as a `TestCase` class decorator. It works by
decorating each test method in the class. This reduces the boilerplate
code when your test methods share a common patchings set. `patch` finds
tests by looking for method names that start with `patch.TEST_PREFIX`.
By default this is `test`, which matches the way `unittest` finds tests.
You can specify an alternative prefix by setting `patch.TEST_PREFIX`.
Patch can be used as a context manager, with the with statement. Here the
patching applies to the indented block after the with statement. If you
use "as" then the patched object will be bound to the name after the
"as"; very useful if `patch` is creating a mock object for you.
`patch` takes arbitrary keyword arguments. These will be passed to
the `Mock` (or `new_callable`) on construction.
`patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` are
available for alternate use-cases.
"""
getter, attribute = _get_target(target)
return _patch(
getter, attribute, new, spec, create,
spec_set, autospec, new_callable, kwargs
)
class _patch_dict(object):
"""
Patch a dictionary, or dictionary like object, and restore the dictionary
to its original state after the test.
`in_dict` can be a dictionary or a mapping like container. If it is a
mapping then it must at least support getting, setting and deleting items
plus iterating over keys.
`in_dict` can also be a string specifying the name of the dictionary, which
will then be fetched by importing it.
`values` can be a dictionary of values to set in the dictionary. `values`
can also be an iterable of `(key, value)` pairs.
If `clear` is True then the dictionary will be cleared before the new
values are set.
`patch.dict` can also be called with arbitrary keyword arguments to set
values in the dictionary::
with patch.dict('sys.modules', mymodule=Mock(), other_module=Mock()):
...
`patch.dict` can be used as a context manager, decorator or class
decorator. When used as a class decorator `patch.dict` honours
`patch.TEST_PREFIX` for choosing which methods to wrap.
"""
def __init__(self, in_dict, values=(), clear=False, **kwargs):
if isinstance(in_dict, str):
in_dict = _importer(in_dict)
self.in_dict = in_dict
# support any argument supported by dict(...) constructor
self.values = dict(values)
self.values.update(kwargs)
self.clear = clear
self._original = None
def __call__(self, f):
if isinstance(f, type):
return self.decorate_class(f)
@wraps(f)
def _inner(*args, **kw):
self._patch_dict()
try:
return f(*args, **kw)
finally:
self._unpatch_dict()
return _inner
def decorate_class(self, klass):
for attr in dir(klass):
attr_value = getattr(klass, attr)
if (attr.startswith(patch.TEST_PREFIX) and
hasattr(attr_value, "__call__")):
decorator = _patch_dict(self.in_dict, self.values, self.clear)
decorated = decorator(attr_value)
setattr(klass, attr, decorated)
return klass
def __enter__(self):
"""Patch the dict."""
self._patch_dict()
def _patch_dict(self):
values = self.values
in_dict = self.in_dict
clear = self.clear
try:
original = in_dict.copy()
except AttributeError:
# dict like object with no copy method
# must support iteration over keys
original = {}
for key in in_dict:
original[key] = in_dict[key]
self._original = original
if clear:
_clear_dict(in_dict)
try:
in_dict.update(values)
except AttributeError:
# dict like object with no update method
for key in values:
in_dict[key] = values[key]
def _unpatch_dict(self):
in_dict = self.in_dict
original = self._original
_clear_dict(in_dict)
try:
in_dict.update(original)
except AttributeError:
for key in original:
in_dict[key] = original[key]
def __exit__(self, *args):
"""Unpatch the dict."""
self._unpatch_dict()
return False
start = __enter__
stop = __exit__
def _clear_dict(in_dict):
try:
in_dict.clear()
except AttributeError:
keys = list(in_dict)
for key in keys:
del in_dict[key]
def _patch_stopall():
"""Stop all active patches."""
for patch in list(_patch._active_patches):
patch.stop()
patch.object = _patch_object
patch.dict = _patch_dict
patch.multiple = _patch_multiple
patch.stopall = _patch_stopall
patch.TEST_PREFIX = 'test'
magic_methods = (
"lt le gt ge eq ne "
"getitem setitem delitem "
"len contains iter "
"hash str sizeof "
"enter exit "
"divmod neg pos abs invert "
"complex int float index "
"trunc floor ceil "
"bool next "
)
numerics = "add sub mul div floordiv mod lshift rshift and xor or pow "
inplace = ' '.join('i%s' % n for n in numerics.split())
right = ' '.join('r%s' % n for n in numerics.split())
# not including __prepare__, __instancecheck__, __subclasscheck__
# (as they are metaclass methods)
# __del__ is not supported at all as it causes problems if it exists
_non_defaults = set('__%s__' % method for method in [
'get', 'set', 'delete', 'reversed', 'missing', 'reduce', 'reduce_ex',
'getinitargs', 'getnewargs', 'getstate', 'setstate', 'getformat',
'setformat', 'repr', 'dir', 'subclasses', 'format',
])
def _get_method(name, func):
"Turns a callable object (like a mock) into a real function"
def method(self, *args, **kw):
return func(self, *args, **kw)
method.__name__ = name
return method
_magics = set(
'__%s__' % method for method in
' '.join([magic_methods, numerics, inplace, right]).split()
)
_all_magics = _magics | _non_defaults
_unsupported_magics = set([
'__getattr__', '__setattr__',
'__init__', '__new__', '__prepare__'
'__instancecheck__', '__subclasscheck__',
'__del__'
])
_calculate_return_value = {
'__hash__': lambda self: object.__hash__(self),
'__str__': lambda self: object.__str__(self),
'__sizeof__': lambda self: object.__sizeof__(self),
}
_return_values = {
'__lt__': NotImplemented,
'__gt__': NotImplemented,
'__le__': NotImplemented,
'__ge__': NotImplemented,
'__int__': 1,
'__contains__': False,
'__len__': 0,
'__exit__': False,
'__complex__': 1j,
'__float__': 1.0,
'__bool__': True,
'__index__': 1,
}
def _get_eq(self):
def __eq__(other):
ret_val = self.__eq__._mock_return_value
if ret_val is not DEFAULT:
return ret_val
return self is other
return __eq__
def _get_ne(self):
def __ne__(other):
if self.__ne__._mock_return_value is not DEFAULT:
return DEFAULT
return self is not other
return __ne__
def _get_iter(self):
def __iter__():
ret_val = self.__iter__._mock_return_value
if ret_val is DEFAULT:
return iter([])
# if ret_val was already an iterator, then calling iter on it should
# return the iterator unchanged
return iter(ret_val)
return __iter__
_side_effect_methods = {
'__eq__': _get_eq,
'__ne__': _get_ne,
'__iter__': _get_iter,
}
def _set_return_value(mock, method, name):
fixed = _return_values.get(name, DEFAULT)
if fixed is not DEFAULT:
method.return_value = fixed
return
return_calulator = _calculate_return_value.get(name)
if return_calulator is not None:
try:
return_value = return_calulator(mock)
except AttributeError:
# XXXX why do we return AttributeError here?
# set it as a side_effect instead?
return_value = AttributeError(name)
method.return_value = return_value
return
side_effector = _side_effect_methods.get(name)
if side_effector is not None:
method.side_effect = side_effector(mock)
class MagicMixin(object):
def __init__(self, *args, **kw):
_safe_super(MagicMixin, self).__init__(*args, **kw)
self._mock_set_magics()
def _mock_set_magics(self):
these_magics = _magics
if self._mock_methods is not None:
these_magics = _magics.intersection(self._mock_methods)
remove_magics = set()
remove_magics = _magics - these_magics
for entry in remove_magics:
if entry in type(self).__dict__:
# remove unneeded magic methods
delattr(self, entry)
# don't overwrite existing attributes if called a second time
these_magics = these_magics - set(type(self).__dict__)
_type = type(self)
for entry in these_magics:
setattr(_type, entry, MagicProxy(entry, self))
class NonCallableMagicMock(MagicMixin, NonCallableMock):
"""A version of `MagicMock` that isn't callable."""
def mock_add_spec(self, spec, spec_set=False):
"""Add a spec to a mock. `spec` can either be an object or a
list of strings. Only attributes on the `spec` can be fetched as
attributes from the mock.
If `spec_set` is True then only attributes on the spec can be set."""
self._mock_add_spec(spec, spec_set)
self._mock_set_magics()
class MagicMock(MagicMixin, Mock):
"""
MagicMock is a subclass of Mock with default implementations
of most of the magic methods. You can use MagicMock without having to
configure the magic methods yourself.
If you use the `spec` or `spec_set` arguments then *only* magic
methods that exist in the spec will be created.
Attributes and the return value of a `MagicMock` will also be `MagicMocks`.
"""
def mock_add_spec(self, spec, spec_set=False):
"""Add a spec to a mock. `spec` can either be an object or a
list of strings. Only attributes on the `spec` can be fetched as
attributes from the mock.
If `spec_set` is True then only attributes on the spec can be set."""
self._mock_add_spec(spec, spec_set)
self._mock_set_magics()
class MagicProxy(object):
def __init__(self, name, parent):
self.name = name
self.parent = parent
def __call__(self, *args, **kwargs):
m = self.create_mock()
return m(*args, **kwargs)
def create_mock(self):
entry = self.name
parent = self.parent
m = parent._get_child_mock(name=entry, _new_name=entry,
_new_parent=parent)
setattr(parent, entry, m)
_set_return_value(parent, m, entry)
return m
def __get__(self, obj, _type=None):
return self.create_mock()
class _ANY(object):
"A helper object that compares equal to everything."
def __eq__(self, other):
return True
def __ne__(self, other):
return False
def __repr__(self):
return '<ANY>'
ANY = _ANY()
def _format_call_signature(name, args, kwargs):
message = '%s(%%s)' % name
formatted_args = ''
args_string = ', '.join([repr(arg) for arg in args])
kwargs_string = ', '.join([
'%s=%r' % (key, value) for key, value in kwargs.items()
])
if args_string:
formatted_args = args_string
if kwargs_string:
if formatted_args:
formatted_args += ', '
formatted_args += kwargs_string
return message % formatted_args
class _Call(tuple):
"""
A tuple for holding the results of a call to a mock, either in the form
`(args, kwargs)` or `(name, args, kwargs)`.
If args or kwargs are empty then a call tuple will compare equal to
a tuple without those values. This makes comparisons less verbose::
_Call(('name', (), {})) == ('name',)
_Call(('name', (1,), {})) == ('name', (1,))
_Call(((), {'a': 'b'})) == ({'a': 'b'},)
The `_Call` object provides a useful shortcut for comparing with call::
_Call(((1, 2), {'a': 3})) == call(1, 2, a=3)
_Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3)
If the _Call has no name then it will match any name.
"""
def __new__(cls, value=(), name=None, parent=None, two=False,
from_kall=True):
name = ''
args = ()
kwargs = {}
_len = len(value)
if _len == 3:
name, args, kwargs = value
elif _len == 2:
first, second = value
if isinstance(first, str):
name = first
if isinstance(second, tuple):
args = second
else:
kwargs = second
else:
args, kwargs = first, second
elif _len == 1:
value, = value
if isinstance(value, str):
name = value
elif isinstance(value, tuple):
args = value
else:
kwargs = value
if two:
return tuple.__new__(cls, (args, kwargs))
return tuple.__new__(cls, (name, args, kwargs))
def __init__(self, value=(), name=None, parent=None, two=False,
from_kall=True):
self.name = name
self.parent = parent
self.from_kall = from_kall
def __eq__(self, other):
if other is ANY:
return True
try:
len_other = len(other)
except TypeError:
return False
self_name = ''
if len(self) == 2:
self_args, self_kwargs = self
else:
self_name, self_args, self_kwargs = self
other_name = ''
if len_other == 0:
other_args, other_kwargs = (), {}
elif len_other == 3:
other_name, other_args, other_kwargs = other
elif len_other == 1:
value, = other
if isinstance(value, tuple):
other_args = value
other_kwargs = {}
elif isinstance(value, str):
other_name = value
other_args, other_kwargs = (), {}
else:
other_args = ()
other_kwargs = value
else:
# len 2
# could be (name, args) or (name, kwargs) or (args, kwargs)
first, second = other
if isinstance(first, str):
other_name = first
if isinstance(second, tuple):
other_args, other_kwargs = second, {}
else:
other_args, other_kwargs = (), second
else:
other_args, other_kwargs = first, second
if self_name and other_name != self_name:
return False
# this order is important for ANY to work!
return (other_args, other_kwargs) == (self_args, self_kwargs)
def __ne__(self, other):
return not self.__eq__(other)
def __call__(self, *args, **kwargs):
if self.name is None:
return _Call(('', args, kwargs), name='()')
name = self.name + '()'
return _Call((self.name, args, kwargs), name=name, parent=self)
def __getattr__(self, attr):
if self.name is None:
return _Call(name=attr, from_kall=False)
name = '%s.%s' % (self.name, attr)
return _Call(name=name, parent=self, from_kall=False)
def __repr__(self):
if not self.from_kall:
name = self.name or 'call'
if name.startswith('()'):
name = 'call%s' % name
return name
if len(self) == 2:
name = 'call'
args, kwargs = self
else:
name, args, kwargs = self
if not name:
name = 'call'
elif not name.startswith('()'):
name = 'call.%s' % name
else:
name = 'call%s' % name
return _format_call_signature(name, args, kwargs)
def call_list(self):
"""For a call object that represents multiple calls, `call_list`
returns a list of all the intermediate calls as well as the
final call."""
vals = []
thing = self
while thing is not None:
if thing.from_kall:
vals.append(thing)
thing = thing.parent
return _CallList(reversed(vals))
call = _Call(from_kall=False)
def create_autospec(spec, spec_set=False, instance=False, _parent=None,
_name=None, **kwargs):
"""Create a mock object using another object as a spec. Attributes on the
mock will use the corresponding attribute on the `spec` object as their
spec.
Functions or methods being mocked will have their arguments checked
to check that they are called with the correct signature.
If `spec_set` is True then attempting to set attributes that don't exist
on the spec object will raise an `AttributeError`.
If a class is used as a spec then the return value of the mock (the
instance of the class) will have the same spec. You can use a class as the
spec for an instance object by passing `instance=True`. The returned mock
will only be callable if instances of the mock are callable.
`create_autospec` also takes arbitrary keyword arguments that are passed to
the constructor of the created mock."""
if _is_list(spec):
# can't pass a list instance to the mock constructor as it will be
# interpreted as a list of strings
spec = type(spec)
is_type = isinstance(spec, type)
_kwargs = {'spec': spec}
if spec_set:
_kwargs = {'spec_set': spec}
elif spec is None:
# None we mock with a normal mock without a spec
_kwargs = {}
_kwargs.update(kwargs)
Klass = MagicMock
if type(spec) in DescriptorTypes:
# descriptors don't have a spec
# because we don't know what type they return
_kwargs = {}
elif not _callable(spec):
Klass = NonCallableMagicMock
elif is_type and instance and not _instance_callable(spec):
Klass = NonCallableMagicMock
_new_name = _name
if _parent is None:
# for a top level object no _new_name should be set
_new_name = ''
mock = Klass(parent=_parent, _new_parent=_parent, _new_name=_new_name,
name=_name, **_kwargs)
if isinstance(spec, FunctionTypes):
# should only happen at the top level because we don't
# recurse for functions
mock = _set_signature(mock, spec)
else:
_check_signature(spec, mock, is_type, instance)
if _parent is not None and not instance:
_parent._mock_children[_name] = mock
if is_type and not instance and 'return_value' not in kwargs:
mock.return_value = create_autospec(spec, spec_set, instance=True,
_name='()', _parent=mock)
for entry in dir(spec):
if _is_magic(entry):
# MagicMock already does the useful magic methods for us
continue
# XXXX do we need a better way of getting attributes without
# triggering code execution (?) Probably not - we need the actual
# object to mock it so we would rather trigger a property than mock
# the property descriptor. Likewise we want to mock out dynamically
# provided attributes.
# XXXX what about attributes that raise exceptions other than
# AttributeError on being fetched?
# we could be resilient against it, or catch and propagate the
# exception when the attribute is fetched from the mock
try:
original = getattr(spec, entry)
except AttributeError:
continue
kwargs = {'spec': original}
if spec_set:
kwargs = {'spec_set': original}
if not isinstance(original, FunctionTypes):
new = _SpecState(original, spec_set, mock, entry, instance)
mock._mock_children[entry] = new
else:
parent = mock
if isinstance(spec, FunctionTypes):
parent = mock.mock
new = MagicMock(parent=parent, name=entry, _new_name=entry,
_new_parent=parent, **kwargs)
mock._mock_children[entry] = new
skipfirst = _must_skip(spec, entry, is_type)
_check_signature(original, new, skipfirst=skipfirst)
# so functions created with _set_signature become instance attributes,
# *plus* their underlying mock exists in _mock_children of the parent
# mock. Adding to _mock_children may be unnecessary where we are also
# setting as an instance attribute?
if isinstance(new, FunctionTypes):
setattr(mock, entry, new)
return mock
def _must_skip(spec, entry, is_type):
if not isinstance(spec, type):
if entry in getattr(spec, '__dict__', {}):
# instance attribute - shouldn't skip
return False
spec = spec.__class__
for klass in spec.__mro__:
result = klass.__dict__.get(entry, DEFAULT)
if result is DEFAULT:
continue
if isinstance(result, (staticmethod, classmethod)):
return False
return is_type
# shouldn't get here unless function is a dynamically provided attribute
# XXXX untested behaviour
return is_type
def _get_class(obj):
try:
return obj.__class__
except AttributeError:
# it is possible for objects to have no __class__
return type(obj)
class _SpecState(object):
def __init__(self, spec, spec_set=False, parent=None,
name=None, ids=None, instance=False):
self.spec = spec
self.ids = ids
self.spec_set = spec_set
self.parent = parent
self.instance = instance
self.name = name
FunctionTypes = (
# python function
type(create_autospec),
# instance method
type(ANY.__eq__),
)
file_spec = None
def mock_open(mock=None, read_data=''):
"""
A helper function to create a mock to replace the use of `open`. It works
for `open` called directly or used as a context manager.
The `mock` argument is the mock object to configure. If `None` (the
default) then a `MagicMock` will be created for you, with the API limited
to methods or attributes available on standard file handles.
`read_data` is a string for the `read` method of the file handle to return.
This is an empty string by default.
"""
global file_spec
if file_spec is None:
import _io
file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))
if mock is None:
mock = MagicMock(name='open', spec=open)
handle = MagicMock(spec=file_spec)
handle.write.return_value = None
handle.__enter__.return_value = handle
handle.read.return_value = read_data
mock.return_value = handle
return mock
class PropertyMock(Mock):
"""
A mock intended to be used as a property, or other descriptor, on a class.
`PropertyMock` provides `__get__` and `__set__` methods so you can specify
a return value when it is fetched.
Fetching a `PropertyMock` instance from an object calls the mock, with
no args. Setting it calls the mock with the value being set.
"""
def _get_child_mock(self, **kwargs):
return MagicMock(**kwargs)
def __get__(self, obj, obj_type):
return self()
def __set__(self, obj, val):
self(val)
| agpl-3.0 |
esotericnonsense/bitcoind-ncurses | net.py | 2 | 3040 | #!/usr/bin/env python
import curses, math
import global_mod as g
def draw_window(state, old_window):
old_window.clear()
old_window.refresh()
window_height = state['y'] - 1
window_width = state['x']
window = curses.newwin(window_height, window_width, 0, 0)
history = state['history']['getnettotals']
sent_deltas = []
recv_deltas = []
index = 1
while index < len(history):
timedelta = history[index]['timemillis'] - history[index-1]['timemillis']
recvdelta = history[index]['totalbytesrecv'] - history[index-1]['totalbytesrecv']
sentdelta = history[index]['totalbytessent'] - history[index-1]['totalbytessent']
recv_deltas.append(recvdelta*1000 // timedelta)
sent_deltas.append(sentdelta*1000 // timedelta)
index += 1
if sent_deltas:
chart_height = window_height - 2
plot_height = chart_height // 2
chart_width = window_width - 11
if len(sent_deltas) > chart_width:
sent_deltas = sent_deltas[-chart_width:]
recv_deltas = recv_deltas[-chart_width:]
color_sent = curses.color_pair(2)
color_recv = curses.color_pair(1)
max_sent = max(sent_deltas)
max_recv = max(recv_deltas)
max_total = max(max_sent, max_recv)
if max_total > 0:
if max_sent > 0:
height = int(math.ceil((1.0 * plot_height * max_sent) / max_total))
window.addstr(plot_height-height, 1, ("%0.1f" % (max_sent*1.0//1024)).rjust(6) + "K", curses.A_BOLD)
if max_recv > 0:
height = int(math.ceil((1.0 * plot_height * max_recv) / max_total))
window.addstr(plot_height+height, 1, ("%0.1f" % (max_recv*1.0//1024)).rjust(6) + "K", curses.A_BOLD)
index = 0
while index < len(sent_deltas):
if index < chart_width:
height = int(math.ceil((1.0 * plot_height * sent_deltas[index]) / max_total))
for y in range(0, height):
window.addch(plot_height-1-y, index+10, " ", color_sent + curses.A_REVERSE)
height = int(math.ceil((1.0 * plot_height * recv_deltas[index]) / max_total))
for y in range(0, height):
window.addch(plot_height+1+y, index+10, " ", color_recv + curses.A_REVERSE)
index += 1
recv_string = "Down: " + ("%0.1f" % (recv_deltas[-1]*1.0/1024)).rjust(7) + "KB/s"
sent_string = "Up: " + ("%0.1f" % (sent_deltas[-1]*1.0/1024)).rjust(7) + "KB/s"
total_string = "Total: " + ("%0.1f" % ((sent_deltas[-1] + recv_deltas[-1])*1.0/1024)).rjust(7) + "KB/s"
window.addstr(chart_height+1, window_width-1-18, total_string, curses.A_BOLD)
window.addstr(chart_height+1, window_width-1-38, sent_string, curses.A_BOLD + color_sent)
window.addstr(chart_height+1, window_width-1-58, recv_string, curses.A_BOLD + color_recv)
window.refresh()
| mit |
kaiserroll14/301finalproject | main/numpy/distutils/command/config.py | 75 | 15895 | # Added Fortran compiler support to config. Currently useful only for
# try_compile call. try_run works but is untested for most of Fortran
# compilers (they must define linker_exe first).
# Pearu Peterson
from __future__ import division, absolute_import, print_function
import os, signal
import warnings
import sys
from distutils.command.config import config as old_config
from distutils.command.config import LANG_EXT
from distutils import log
from distutils.file_util import copy_file
from distutils.ccompiler import CompileError, LinkError
import distutils
from numpy.distutils.exec_command import exec_command
from numpy.distutils.mingw32ccompiler import generate_manifest
from numpy.distutils.command.autodist import (check_gcc_function_attribute,
check_gcc_variable_attribute,
check_inline,
check_restrict,
check_compiler_gcc4)
from numpy.distutils.compat import get_exception
LANG_EXT['f77'] = '.f'
LANG_EXT['f90'] = '.f90'
class config(old_config):
old_config.user_options += [
('fcompiler=', None, "specify the Fortran compiler type"),
]
def initialize_options(self):
self.fcompiler = None
old_config.initialize_options(self)
def _check_compiler (self):
old_config._check_compiler(self)
from numpy.distutils.fcompiler import FCompiler, new_fcompiler
if sys.platform == 'win32' and (self.compiler.compiler_type in
('msvc', 'intelw', 'intelemw')):
# XXX: hack to circumvent a python 2.6 bug with msvc9compiler:
# initialize call query_vcvarsall, which throws an IOError, and
# causes an error along the way without much information. We try to
# catch it here, hoping it is early enough, and print an helpful
# message instead of Error: None.
if not self.compiler.initialized:
try:
self.compiler.initialize()
except IOError:
e = get_exception()
msg = """\
Could not initialize compiler instance: do you have Visual Studio
installed? If you are trying to build with MinGW, please use "python setup.py
build -c mingw32" instead. If you have Visual Studio installed, check it is
correctly installed, and the right version (VS 2008 for python 2.6, 2.7 and 3.2,
VS 2010 for >= 3.3).
Original exception was: %s, and the Compiler class was %s
============================================================================""" \
% (e, self.compiler.__class__.__name__)
print ("""\
============================================================================""")
raise distutils.errors.DistutilsPlatformError(msg)
# After MSVC is initialized, add an explicit /MANIFEST to linker
# flags. See issues gh-4245 and gh-4101 for details. Also
# relevant are issues 4431 and 16296 on the Python bug tracker.
from distutils import msvc9compiler
if msvc9compiler.get_build_version() >= 10:
for ldflags in [self.compiler.ldflags_shared,
self.compiler.ldflags_shared_debug]:
if '/MANIFEST' not in ldflags:
ldflags.append('/MANIFEST')
if not isinstance(self.fcompiler, FCompiler):
self.fcompiler = new_fcompiler(compiler=self.fcompiler,
dry_run=self.dry_run, force=1,
c_compiler=self.compiler)
if self.fcompiler is not None:
self.fcompiler.customize(self.distribution)
if self.fcompiler.get_version():
self.fcompiler.customize_cmd(self)
self.fcompiler.show_customization()
def _wrap_method(self, mth, lang, args):
from distutils.ccompiler import CompileError
from distutils.errors import DistutilsExecError
save_compiler = self.compiler
if lang in ['f77', 'f90']:
self.compiler = self.fcompiler
try:
ret = mth(*((self,)+args))
except (DistutilsExecError, CompileError):
msg = str(get_exception())
self.compiler = save_compiler
raise CompileError
self.compiler = save_compiler
return ret
def _compile (self, body, headers, include_dirs, lang):
return self._wrap_method(old_config._compile, lang,
(body, headers, include_dirs, lang))
def _link (self, body,
headers, include_dirs,
libraries, library_dirs, lang):
if self.compiler.compiler_type=='msvc':
libraries = (libraries or [])[:]
library_dirs = (library_dirs or [])[:]
if lang in ['f77', 'f90']:
lang = 'c' # always use system linker when using MSVC compiler
if self.fcompiler:
for d in self.fcompiler.library_dirs or []:
# correct path when compiling in Cygwin but with
# normal Win Python
if d.startswith('/usr/lib'):
s, o = exec_command(['cygpath', '-w', d],
use_tee=False)
if not s: d = o
library_dirs.append(d)
for libname in self.fcompiler.libraries or []:
if libname not in libraries:
libraries.append(libname)
for libname in libraries:
if libname.startswith('msvc'): continue
fileexists = False
for libdir in library_dirs or []:
libfile = os.path.join(libdir, '%s.lib' % (libname))
if os.path.isfile(libfile):
fileexists = True
break
if fileexists: continue
# make g77-compiled static libs available to MSVC
fileexists = False
for libdir in library_dirs:
libfile = os.path.join(libdir, 'lib%s.a' % (libname))
if os.path.isfile(libfile):
# copy libname.a file to name.lib so that MSVC linker
# can find it
libfile2 = os.path.join(libdir, '%s.lib' % (libname))
copy_file(libfile, libfile2)
self.temp_files.append(libfile2)
fileexists = True
break
if fileexists: continue
log.warn('could not find library %r in directories %s' \
% (libname, library_dirs))
elif self.compiler.compiler_type == 'mingw32':
generate_manifest(self)
return self._wrap_method(old_config._link, lang,
(body, headers, include_dirs,
libraries, library_dirs, lang))
def check_header(self, header, include_dirs=None, library_dirs=None, lang='c'):
self._check_compiler()
return self.try_compile(
"/* we need a dummy line to make distutils happy */",
[header], include_dirs)
def check_decl(self, symbol,
headers=None, include_dirs=None):
self._check_compiler()
body = """
int main(void)
{
#ifndef %s
(void) %s;
#endif
;
return 0;
}""" % (symbol, symbol)
return self.try_compile(body, headers, include_dirs)
def check_macro_true(self, symbol,
headers=None, include_dirs=None):
self._check_compiler()
body = """
int main(void)
{
#if %s
#else
#error false or undefined macro
#endif
;
return 0;
}""" % (symbol,)
return self.try_compile(body, headers, include_dirs)
def check_type(self, type_name, headers=None, include_dirs=None,
library_dirs=None):
"""Check type availability. Return True if the type can be compiled,
False otherwise"""
self._check_compiler()
# First check the type can be compiled
body = r"""
int main(void) {
if ((%(name)s *) 0)
return 0;
if (sizeof (%(name)s))
return 0;
}
""" % {'name': type_name}
st = False
try:
try:
self._compile(body % {'type': type_name},
headers, include_dirs, 'c')
st = True
except distutils.errors.CompileError:
st = False
finally:
self._clean()
return st
def check_type_size(self, type_name, headers=None, include_dirs=None, library_dirs=None, expected=None):
"""Check size of a given type."""
self._check_compiler()
# First check the type can be compiled
body = r"""
typedef %(type)s npy_check_sizeof_type;
int main (void)
{
static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) >= 0)];
test_array [0] = 0
;
return 0;
}
"""
self._compile(body % {'type': type_name},
headers, include_dirs, 'c')
self._clean()
if expected:
body = r"""
typedef %(type)s npy_check_sizeof_type;
int main (void)
{
static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) == %(size)s)];
test_array [0] = 0
;
return 0;
}
"""
for size in expected:
try:
self._compile(body % {'type': type_name, 'size': size},
headers, include_dirs, 'c')
self._clean()
return size
except CompileError:
pass
# this fails to *compile* if size > sizeof(type)
body = r"""
typedef %(type)s npy_check_sizeof_type;
int main (void)
{
static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) <= %(size)s)];
test_array [0] = 0
;
return 0;
}
"""
# The principle is simple: we first find low and high bounds of size
# for the type, where low/high are looked up on a log scale. Then, we
# do a binary search to find the exact size between low and high
low = 0
mid = 0
while True:
try:
self._compile(body % {'type': type_name, 'size': mid},
headers, include_dirs, 'c')
self._clean()
break
except CompileError:
#log.info("failure to test for bound %d" % mid)
low = mid + 1
mid = 2 * mid + 1
high = mid
# Binary search:
while low != high:
mid = (high - low) // 2 + low
try:
self._compile(body % {'type': type_name, 'size': mid},
headers, include_dirs, 'c')
self._clean()
high = mid
except CompileError:
low = mid + 1
return low
def check_func(self, func,
headers=None, include_dirs=None,
libraries=None, library_dirs=None,
decl=False, call=False, call_args=None):
# clean up distutils's config a bit: add void to main(), and
# return a value.
self._check_compiler()
body = []
if decl:
if type(decl) == str:
body.append(decl)
else:
body.append("int %s (void);" % func)
# Handle MSVC intrinsics: force MS compiler to make a function call.
# Useful to test for some functions when built with optimization on, to
# avoid build error because the intrinsic and our 'fake' test
# declaration do not match.
body.append("#ifdef _MSC_VER")
body.append("#pragma function(%s)" % func)
body.append("#endif")
body.append("int main (void) {")
if call:
if call_args is None:
call_args = ''
body.append(" %s(%s);" % (func, call_args))
else:
body.append(" %s;" % func)
body.append(" return 0;")
body.append("}")
body = '\n'.join(body) + "\n"
return self.try_link(body, headers, include_dirs,
libraries, library_dirs)
def check_funcs_once(self, funcs,
headers=None, include_dirs=None,
libraries=None, library_dirs=None,
decl=False, call=False, call_args=None):
"""Check a list of functions at once.
This is useful to speed up things, since all the functions in the funcs
list will be put in one compilation unit.
Arguments
---------
funcs : seq
list of functions to test
include_dirs : seq
list of header paths
libraries : seq
list of libraries to link the code snippet to
libraru_dirs : seq
list of library paths
decl : dict
for every (key, value), the declaration in the value will be
used for function in key. If a function is not in the
dictionay, no declaration will be used.
call : dict
for every item (f, value), if the value is True, a call will be
done to the function f.
"""
self._check_compiler()
body = []
if decl:
for f, v in decl.items():
if v:
body.append("int %s (void);" % f)
# Handle MS intrinsics. See check_func for more info.
body.append("#ifdef _MSC_VER")
for func in funcs:
body.append("#pragma function(%s)" % func)
body.append("#endif")
body.append("int main (void) {")
if call:
for f in funcs:
if f in call and call[f]:
if not (call_args and f in call_args and call_args[f]):
args = ''
else:
args = call_args[f]
body.append(" %s(%s);" % (f, args))
else:
body.append(" %s;" % f)
else:
for f in funcs:
body.append(" %s;" % f)
body.append(" return 0;")
body.append("}")
body = '\n'.join(body) + "\n"
return self.try_link(body, headers, include_dirs,
libraries, library_dirs)
def check_inline(self):
"""Return the inline keyword recognized by the compiler, empty string
otherwise."""
return check_inline(self)
def check_restrict(self):
"""Return the restrict keyword recognized by the compiler, empty string
otherwise."""
return check_restrict(self)
def check_compiler_gcc4(self):
"""Return True if the C compiler is gcc >= 4."""
return check_compiler_gcc4(self)
def check_gcc_function_attribute(self, attribute, name):
return check_gcc_function_attribute(self, attribute, name)
def check_gcc_variable_attribute(self, attribute):
return check_gcc_variable_attribute(self, attribute)
class GrabStdout(object):
def __init__(self):
self.sys_stdout = sys.stdout
self.data = ''
sys.stdout = self
def write (self, data):
self.sys_stdout.write(data)
self.data += data
def flush (self):
self.sys_stdout.flush()
def restore(self):
sys.stdout = self.sys_stdout
| gpl-3.0 |
Dubrzr/django-push-notifications | push_notifications/api/tastypie.py | 21 | 1413 | from tastypie.authorization import Authorization
from tastypie.authentication import BasicAuthentication
from tastypie.resources import ModelResource
from push_notifications.models import APNSDevice, GCMDevice
class APNSDeviceResource(ModelResource):
class Meta:
authorization = Authorization()
queryset = APNSDevice.objects.all()
resource_name = "device/apns"
class GCMDeviceResource(ModelResource):
class Meta:
authorization = Authorization()
queryset = GCMDevice.objects.all()
resource_name = "device/gcm"
class APNSDeviceAuthenticatedResource(APNSDeviceResource):
# user = ForeignKey(UserResource, "user")
class Meta(APNSDeviceResource.Meta):
authentication = BasicAuthentication()
# authorization = SameUserAuthorization()
def obj_create(self, bundle, **kwargs):
# See https://github.com/toastdriven/django-tastypie/issues/854
return super(APNSDeviceAuthenticatedResource, self).obj_create(bundle, user=bundle.request.user, **kwargs)
class GCMDeviceAuthenticatedResource(GCMDeviceResource):
# user = ForeignKey(UserResource, "user")
class Meta(GCMDeviceResource.Meta):
authentication = BasicAuthentication()
# authorization = SameUserAuthorization()
def obj_create(self, bundle, **kwargs):
# See https://github.com/toastdriven/django-tastypie/issues/854
return super(GCMDeviceAuthenticatedResource, self).obj_create(bundle, user=bundle.request.user, **kwargs)
| mit |
KenjiTakahashi/qoss | middleendv2.py | 1 | 8570 | # -*- coding: utf-8 -*-
# This is a part of qoss @ http://github.com/KenjiTakahashi/qoss/
# Karol "Kenji Takahashi" Wozniak (C) 2011
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from PyQt4 import QtGui
from PyQt4.QtCore import Qt, pyqtSignal, QPointF
class QOSSBar(QtGui.QProgressBar):
def __init__(self, mini, maxi, curr, parent = None):
QtGui.QProgressBar.__init__(self, parent)
self.setTextVisible(False)
self.setFixedWidth(10)
self.setOrientation(Qt.Vertical)
self.setMinimum(mini)
self.setMaximum(mini + maxi)
self.setValue(curr)
class QOSSPushButton(QtGui.QPushButton):
def __init__(self, parent = None):
QtGui.QPushButton.__init__(self, parent)
self.path = QtGui.QPainterPath()
self.ypath = QtGui.QPainterPath()
self.npath = QtGui.QPainterPath()
self.setFixedSize(21, 21)
self.setCheckable(True)
def paintEvent(self, event):
QtGui.QPushButton.paintEvent(self, event)
painter = QtGui.QPainter(self)
painter.setRenderHint(QtGui.QPainter.Antialiasing)
mid = QtGui.QPalette().mid()
painter.setPen(Qt.NoPen)
painter.setBrush(mid)
painter.drawPath(self.path)
painter.setPen(QtGui.QPen(mid, 2))
painter.setBrush(Qt.NoBrush)
if self.isChecked():
painter.drawPath(self.ypath)
else:
painter.drawPath(self.npath)
class QOSSMute(QOSSPushButton):
def __init__(self, value, parent = None):
QOSSPushButton.__init__(self, parent)
self.setChecked(value)
self.path.addRect(5, 8, 3, 5)
self.path.addPolygon(QtGui.QPolygonF(
[QPointF(8, 8), QPointF(8, 13), QPointF(12, 16), QPointF(12, 5)]))
self.npath.moveTo(14, 14)
self.npath.arcTo(10, 7, 6, 7, -50, 120)
class QOSSSlider(QtGui.QSlider):
def __init__(self, mini, maxi, curr, parent = None):
QtGui.QSlider.__init__(self, parent)
self.setMinimum(mini)
self.setMaximum(mini + maxi)
self.setValue(curr)
class QOSSSliderButton(QOSSPushButton):
def __init__(self, parent = None):
QOSSPushButton.__init__(self, parent)
self.path.addRect(5, 10, 11, 6)
self.ypath.moveTo(15, 10)
self.ypath.arcTo(6, 5, 9, 11, 0, 180)
self.npath.moveTo(15, 10)
self.npath.arcTo(6, 5, 9, 11, 0, 130)
class QOSSModes(QtGui.QComboBox):
def __init__(self, modes, current, parent = None):
QtGui.QComboBox.__init__(self, parent)
self.addItems(modes)
self.setCurrentIndex(current)
def update(self, modes, current):
self.clear()
self.addItems(modes)
self.setCurrentIndex(current)
class QOSSWidget(QtGui.QGroupBox, object):
sliderChanged = pyqtSignal(int, dict, tuple)
modesChanged = pyqtSignal(int, dict, int)
muteChanged = pyqtSignal(int, dict, bool)
onOffChanged = pyqtSignal(int, dict, bool)
def __init__(self, name, parent = None):
QtGui.QWidget.__init__(self, name, parent)
self.layout = QtGui.QGridLayout()
self.setLayout(self.layout)
self.mute = None
self.lPeak = None
self.rPeak = None
self.lSlider = None
self.rSlider = None
self.modes = None
self.onoff = None
self.eis = list()
self.inteis = dict()
@property
def fd(self):
return self.__fd
@fd.setter
def fd(self, fd):
self.__fd = fd
@property
def device(self):
return self.__device
@device.setter
def device(self, device):
self.__device = device
def createPeaks(self, curr, mini, maxi):
self.lPeak = QOSSBar(mini, maxi, maxi - curr[0])
try:
self.rPeak = QOSSBar(mini, maxi, maxi - curr[1])
except IndexError:
pass
def updatePeaks(self, values):
self.lPeak.setValue(self.lPeak.maximum() - values[0])
try:
self.rPeak.setValue(self.rPeak.maximum() - values[1])
except AttributeError:
pass
def createMute(self, value):
self.mute = QOSSMute(value)
self.mute.toggled.connect(self.changeMute)
def updateMute(self, value):
self.mute.setChecked(value)
def changeMute(self, value):
self.muteChanged.emit(self.fd, self.inteis['mute'], value)
def createSlider(self, curr, mini, maxi):
self.lSlider = QOSSSlider(mini, maxi, curr[0])
self.lSlider.valueChanged.connect(self.changelSlider)
try:
self.rSlider = QOSSSlider(mini, maxi, curr[1])
self.rSlider.valueChanged.connect(self.changerSlider)
except IndexError:
pass
def updateSlider(self, values):
self.lSlider.setValue(values[0])
try:
self.rSlider.setValue(values[1])
except AttributeError:
pass
def changelSlider(self, value):
try:
self.sliderChanged.emit(self.fd, self.inteis['slider'],
(value, self.rSlider.value()))
except AttributeError:
self.sliderChanged.emit(self.fd, self.inteis['slider'], (value,))
def changerSlider(self, value):
try:
self.sliderChanged.emit(self.fd, self.inteis['slider'],
(self.lSlider.value(), value))
except AttributeError:
self.sliderChanged.emit(self.fd, self.inteis['slider'], (value,))
def slidersLockage(self, value):
if value:
lValue = self.lSlider.value()
rValue = self.rSlider.value()
if lValue < rValue:
self.lSlider.setValue(rValue)
elif lValue > rValue:
self.rSlider.setValue(lValue)
self.lSlider.valueChanged.connect(self.rSlider.setValue)
self.rSlider.valueChanged.connect(self.lSlider.setValue)
else:
self.lSlider.valueChanged.disconnect(self.rSlider.setValue)
self.rSlider.valueChanged.disconnect(self.lSlider.setValue)
def createModes(self, values, current):
self.modes = QOSSModes(values, current)
self.modes.currentIndexChanged[int].connect(self.changeModes)
def updateModes(self, values, current):
self.modes.update(values, current)
def changeModes(self, current):
self.modesChanged.emit(self.fd, self.inteis['modes'], current)
def createOnOff(self, name, value):
self.onoff = QtGui.QCheckBox(name)
if value:
self.onoff.setCheckState(2)
self.onoff.stateChanged.connect(self.changeOnOff)
def updateOnOff(self, value):
if value:
self.onoff.setCheckState(2)
else:
self.onoff.setCheckState(0)
def changeOnOff(self, value):
self.onOffChanged.emit(self.fd, self.inteis['onoff'], value)
def do(self):
if self.mute or self.lSlider or self.rSlider:
span = 2
else:
span = 1
hCount = 0
if self.lPeak:
self.layout.addWidget(self.lPeak, 0, 0, span, 1)
hCount += 1
if self.mute:
self.layout.addWidget(self.mute, 0, hCount)
if self.lSlider:
self.layout.addWidget(self.lSlider, 1, hCount)
if self.rSlider:
hCount += 1
self.layout.addWidget(self.rSlider, 1, hCount)
sliderButton = QOSSSliderButton()
sliderButton.toggled.connect(self.slidersLockage)
self.layout.addWidget(sliderButton, 0, hCount)
hCount += 1
if self.rPeak:
self.layout.addWidget(self.rPeak, 0, hCount, span, 1)
if self.modes:
span += 1
self.layout.addWidget(self.modes, span, 0, 1, hCount)
if self.onoff:
self.layout.addWidget(self.onoff, span, 0, 1, hCount)
class QOSSConfigWidget(QOSSWidget):
def __init__(self, name, parent = None):
QOSSWidget.__init__(self, name, parent)
| gpl-3.0 |
fugitifduck/exabgp | lib/exabgp/bgp/message/update/attribute/pmsi.py | 1 | 4143 | # encoding: utf-8
"""
pmsi_tunnel.py
Created by Thomas Morin on 2014-06-10.
Copyright (c) 2014-2015 Orange. All rights reserved.
Copyright (c) 2014-2015 Exa Networks. All rights reserved.
"""
from struct import pack
from struct import unpack
from exabgp.protocol.ip import IPv4
from exabgp.bgp.message.update.attribute.attribute import Attribute
# http://tools.ietf.org/html/rfc6514#section-5
#
# +---------------------------------+
# | Flags (1 octet) |
# +---------------------------------+
# | Tunnel Type (1 octets) |
# +---------------------------------+
# | MPLS Label (3 octets) |
# +---------------------------------+
# | Tunnel Identifier (variable) |
# +---------------------------------+
# ========================================================================= PMSI
# RFC 6514
class PMSI (Attribute):
ID = Attribute.CODE.PMSI_TUNNEL
FLAG = Attribute.Flag.OPTIONAL | Attribute.Flag.TRANSITIVE
CACHING = True
TUNNEL_TYPE = -1
# TUNNEL_TYPE MUST NOT BE DEFINED HERE ( it allows to set it up as a self. value)
_pmsi_known = dict()
_name = {
0: 'No tunnel',
1: 'RSVP-TE P2MP LSP',
2: 'mLDP P2MP LSP',
3: 'PIM-SSM Tree',
4: 'PIM-SM Tree',
5: 'BIDIR-PIM Tree',
6: 'Ingress Replication',
7: 'mLDP MP2MP LSP',
}
__slots__ = ['label','flags','tunnel']
def __init__ (self, tunnel, label, flags):
self.label = label # integer
self.flags = flags # integer
self.tunnel = tunnel # tunnel id, packed data
@staticmethod
def name (tunnel_type):
return PMSI._name.get(tunnel_type,'unknown')
def pack (self):
return self._attribute(
pack(
'!BB3s',
self.flags,
self.TUNNEL_TYPE,
pack('!L',self.label << 4)[1:4]
) + self.tunnel
)
# XXX: FIXME: Orange code had 4 (and another reference to it in the code elsewhere)
def __len__ (self):
return len(self.tunnel) + 5 # label:1, tunnel type: 1, MPLS label:3
def __cmp__ (self, other):
if not isinstance(other,self.__class__):
return -1
# if self.TUNNEL_TYPE != other.TUNNEL_TYPE:
# return -1
if self.label != other.label:
return -1
if self.flags != other.flags:
return -1
if self.tunnel != other.tunnel:
return -1
return 0
def __repr__ (self):
return str(self)
def prettytunnel (self):
return "0x" + ''.join('%02X' % ord(_) for _ in self.tunnel) if self.tunnel else ''
def __str__ (self):
# TODO: add hex dump of packedValue
return "pmsi:%s:%s:%s:%s" % (
self.name(self.TUNNEL_TYPE).replace(' ','').lower(),
str(self.flags) if self.flags else '-', # why not use zero (0) ?
str(self.label) if self.label else '-', # what noy use zero (0) ?
self.prettytunnel()
)
@staticmethod
def register_pmsi (klass):
PMSI._pmsi_known[klass.TUNNEL_TYPE] = klass
@staticmethod
def pmsi_unknown (subtype, tunnel, label, flags):
pmsi = PMSI(tunnel,label,flags)
pmsi.TUNNEL_TYPE = subtype
return pmsi
@classmethod
def unpack (cls, data, negotiated):
flags,subtype = unpack('!BB',data[:2])
label = unpack('!L','\0'+data[2:5])[0] >> 4
# should we check for bottom of stack before the shift ?
if subtype in cls._pmsi_known:
return cls._pmsi_known[subtype].unpack(data[5:],label,flags)
return cls.pmsi_unknown(subtype,data[5:],label,flags)
# ================================================================= PMSINoTunnel
# RFC 6514
class PMSINoTunnel (PMSI):
TUNNEL_TYPE = 0
def __init__ (self, label=0,flags=0):
PMSI.__init__(self,'',label,flags)
def prettytunnel (self):
return ''
@classmethod
def unpack (cls, tunnel, label, flags):
return cls(label,flags)
# ======================================================= PMSIIngressReplication
# RFC 6514
class PMSIIngressReplication (PMSI):
TUNNEL_TYPE = 6
def __init__ (self, ip, label=0,flags=0,tunnel=None):
self.ip = ip
PMSI.__init__(self,tunnel if tunnel else IPv4.pton(ip),label,flags)
def prettytunnel (self):
return self.ip
@classmethod
def unpack (cls, tunnel, label, flags):
ip = IPv4.ntop(tunnel)
return cls(ip,label,flags,tunnel)
PMSI.register_pmsi(PMSINoTunnel)
PMSI.register_pmsi(PMSIIngressReplication)
| bsd-3-clause |
Qalthos/ansible | lib/ansible/modules/network/nxos/nxos_command.py | 39 | 7159 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: nxos_command
extends_documentation_fragment: nxos
version_added: "2.1"
author: "Peter Sprygada (@privateip)"
short_description: Run arbitrary command on Cisco NXOS devices
description:
- Sends an arbitrary command to an NXOS node and returns the results
read from the device. This module includes an
argument that will cause the module to wait for a specific condition
before returning or timing out if the condition is not met.
options:
commands:
description:
- The commands to send to the remote NXOS device. The resulting
output from the command is returned. If the I(wait_for)
argument is provided, the module is not returned until the
condition is satisfied or the number of retires as expired.
- The I(commands) argument also accepts an alternative form
that allows for complex values that specify the command
to run and the output format to return. This can be done
on a command by command basis. The complex argument supports
the keywords C(command) and C(output) where C(command) is the
command to run and C(output) is one of 'text' or 'json'.
required: true
wait_for:
description:
- Specifies what to evaluate from the output of the command
and what conditionals to apply. This argument will cause
the task to wait for a particular conditional to be true
before moving forward. If the conditional is not true
by the configured retries, the task fails. See examples.
aliases: ['waitfor']
version_added: "2.2"
match:
description:
- The I(match) argument is used in conjunction with the
I(wait_for) argument to specify the match policy. Valid
values are C(all) or C(any). If the value is set to C(all)
then all conditionals in the I(wait_for) must be satisfied. If
the value is set to C(any) then only one of the values must be
satisfied.
default: all
version_added: "2.2"
retries:
description:
- Specifies the number of retries a command should by tried
before it is considered failed. The command is run on the
target device every retry and evaluated against the I(wait_for)
conditionals.
default: 10
interval:
description:
- Configures the interval in seconds to wait between retries
of the command. If the command does not pass the specified
conditional, the interval indicates how to long to wait before
trying the command again.
default: 1
"""
EXAMPLES = """
---
- name: run show version on remote devices
nxos_command:
commands: show version
- name: run show version and check to see if output contains Cisco
nxos_command:
commands: show version
wait_for: result[0] contains Cisco
- name: run multiple commands on remote nodes
nxos_command:
commands:
- show version
- show interfaces
- name: run multiple commands and evaluate the output
nxos_command:
commands:
- show version
- show interfaces
wait_for:
- result[0] contains Cisco
- result[1] contains loopback0
- name: run commands and specify the output format
nxos_command:
commands:
- command: show version
output: json
"""
RETURN = """
stdout:
description: The set of responses from the commands
returned: always apart from low level errors (such as action plugin)
type: list
sample: ['...', '...']
stdout_lines:
description: The value of stdout split into a list
returned: always apart from low level errors (such as action plugin)
type: list
sample: [['...', '...'], ['...'], ['...']]
failed_conditions:
description: The list of conditionals that have failed
returned: failed
type: list
sample: ['...', '...']
"""
import time
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.parsing import Conditional, FailedConditionalError
from ansible.module_utils.network.common.utils import transform_commands, to_lines
from ansible.module_utils.network.nxos.nxos import check_args, nxos_argument_spec, run_commands
def parse_commands(module, warnings):
commands = transform_commands(module)
if module.check_mode:
for item in list(commands):
if not item['command'].startswith('show'):
warnings.append(
'Only show commands are supported when using check mode, not '
'executing %s' % item['command']
)
commands.remove(item)
return commands
def to_cli(obj):
cmd = obj['command']
if obj.get('output') == 'json':
cmd += ' | json'
return cmd
def main():
"""entry point for module execution
"""
argument_spec = dict(
# { command: <str>, output: <str>, prompt: <str>, response: <str> }
commands=dict(type='list', required=True),
wait_for=dict(type='list', aliases=['waitfor']),
match=dict(default='all', choices=['any', 'all']),
retries=dict(default=10, type='int'),
interval=dict(default=1, type='int')
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
result = {'changed': False, 'warnings': warnings}
check_args(module, warnings)
commands = parse_commands(module, warnings)
wait_for = module.params['wait_for'] or list()
try:
conditionals = [Conditional(c) for c in wait_for]
except AttributeError as exc:
module.fail_json(msg=to_text(exc))
retries = module.params['retries']
interval = module.params['interval']
match = module.params['match']
while retries > 0:
responses = run_commands(module, commands)
for item in list(conditionals):
try:
if item(responses):
if match == 'any':
conditionals = list()
break
conditionals.remove(item)
except FailedConditionalError as exc:
module.fail_json(msg=to_text(exc))
if not conditionals:
break
time.sleep(interval)
retries -= 1
if conditionals:
failed_conditions = [item.raw for item in conditionals]
msg = 'One or more conditional statements have not been satisfied'
module.fail_json(msg=msg, failed_conditions=failed_conditions)
result.update({
'stdout': responses,
'stdout_lines': list(to_lines(responses)),
})
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
sssllliang/silverberry | lib/setuptools/config.py | 41 | 16088 | from __future__ import absolute_import, unicode_literals
import io
import os
import sys
from collections import defaultdict
from functools import partial
from distutils.errors import DistutilsOptionError, DistutilsFileError
from setuptools.py26compat import import_module
from six import string_types
def read_configuration(
filepath, find_others=False, ignore_option_errors=False):
"""Read given configuration file and returns options from it as a dict.
:param str|unicode filepath: Path to configuration file
to get options from.
:param bool find_others: Whether to search for other configuration files
which could be on in various places.
:param bool ignore_option_errors: Whether to silently ignore
options, values of which could not be resolved (e.g. due to exceptions
in directives such as file:, attr:, etc.).
If False exceptions are propagated as expected.
:rtype: dict
"""
from setuptools.dist import Distribution, _Distribution
filepath = os.path.abspath(filepath)
if not os.path.isfile(filepath):
raise DistutilsFileError(
'Configuration file %s does not exist.' % filepath)
current_directory = os.getcwd()
os.chdir(os.path.dirname(filepath))
try:
dist = Distribution()
filenames = dist.find_config_files() if find_others else []
if filepath not in filenames:
filenames.append(filepath)
_Distribution.parse_config_files(dist, filenames=filenames)
handlers = parse_configuration(
dist, dist.command_options,
ignore_option_errors=ignore_option_errors)
finally:
os.chdir(current_directory)
return configuration_to_dict(handlers)
def configuration_to_dict(handlers):
"""Returns configuration data gathered by given handlers as a dict.
:param list[ConfigHandler] handlers: Handlers list,
usually from parse_configuration()
:rtype: dict
"""
config_dict = defaultdict(dict)
for handler in handlers:
obj_alias = handler.section_prefix
target_obj = handler.target_obj
for option in handler.set_options:
getter = getattr(target_obj, 'get_%s' % option, None)
if getter is None:
value = getattr(target_obj, option)
else:
value = getter()
config_dict[obj_alias][option] = value
return config_dict
def parse_configuration(
distribution, command_options, ignore_option_errors=False):
"""Performs additional parsing of configuration options
for a distribution.
Returns a list of used option handlers.
:param Distribution distribution:
:param dict command_options:
:param bool ignore_option_errors: Whether to silently ignore
options, values of which could not be resolved (e.g. due to exceptions
in directives such as file:, attr:, etc.).
If False exceptions are propagated as expected.
:rtype: list
"""
meta = ConfigMetadataHandler(
distribution.metadata, command_options, ignore_option_errors)
meta.parse()
options = ConfigOptionsHandler(
distribution, command_options, ignore_option_errors)
options.parse()
return [meta, options]
class ConfigHandler(object):
"""Handles metadata supplied in configuration files."""
section_prefix = None
"""Prefix for config sections handled by this handler.
Must be provided by class heirs.
"""
aliases = {}
"""Options aliases.
For compatibility with various packages. E.g.: d2to1 and pbr.
Note: `-` in keys is replaced with `_` by config parser.
"""
def __init__(self, target_obj, options, ignore_option_errors=False):
sections = {}
section_prefix = self.section_prefix
for section_name, section_options in options.items():
if not section_name.startswith(section_prefix):
continue
section_name = section_name.replace(section_prefix, '').strip('.')
sections[section_name] = section_options
self.ignore_option_errors = ignore_option_errors
self.target_obj = target_obj
self.sections = sections
self.set_options = []
@property
def parsers(self):
"""Metadata item name to parser function mapping."""
raise NotImplementedError(
'%s must provide .parsers property' % self.__class__.__name__)
def __setitem__(self, option_name, value):
unknown = tuple()
target_obj = self.target_obj
# Translate alias into real name.
option_name = self.aliases.get(option_name, option_name)
current_value = getattr(target_obj, option_name, unknown)
if current_value is unknown:
raise KeyError(option_name)
if current_value:
# Already inhabited. Skipping.
return
skip_option = False
parser = self.parsers.get(option_name)
if parser:
try:
value = parser(value)
except Exception:
skip_option = True
if not self.ignore_option_errors:
raise
if skip_option:
return
setter = getattr(target_obj, 'set_%s' % option_name, None)
if setter is None:
setattr(target_obj, option_name, value)
else:
setter(value)
self.set_options.append(option_name)
@classmethod
def _parse_list(cls, value, separator=','):
"""Represents value as a list.
Value is split either by separator (defaults to comma) or by lines.
:param value:
:param separator: List items separator character.
:rtype: list
"""
if isinstance(value, list): # _get_parser_compound case
return value
if '\n' in value:
value = value.splitlines()
else:
value = value.split(separator)
return [chunk.strip() for chunk in value if chunk.strip()]
@classmethod
def _parse_dict(cls, value):
"""Represents value as a dict.
:param value:
:rtype: dict
"""
separator = '='
result = {}
for line in cls._parse_list(value):
key, sep, val = line.partition(separator)
if sep != separator:
raise DistutilsOptionError(
'Unable to parse option value to dict: %s' % value)
result[key.strip()] = val.strip()
return result
@classmethod
def _parse_bool(cls, value):
"""Represents value as boolean.
:param value:
:rtype: bool
"""
value = value.lower()
return value in ('1', 'true', 'yes')
@classmethod
def _parse_file(cls, value):
"""Represents value as a string, allowing including text
from nearest files using `file:` directive.
Directive is sandboxed and won't reach anything outside
directory with setup.py.
Examples:
include: LICENSE
include: src/file.txt
:param str value:
:rtype: str
"""
if not isinstance(value, string_types):
return value
include_directive = 'file:'
if not value.startswith(include_directive):
return value
current_directory = os.getcwd()
filepath = value.replace(include_directive, '').strip()
filepath = os.path.abspath(filepath)
if not filepath.startswith(current_directory):
raise DistutilsOptionError(
'`file:` directive can not access %s' % filepath)
if os.path.isfile(filepath):
with io.open(filepath, encoding='utf-8') as f:
value = f.read()
return value
@classmethod
def _parse_attr(cls, value):
"""Represents value as a module attribute.
Examples:
attr: package.attr
attr: package.module.attr
:param str value:
:rtype: str
"""
attr_directive = 'attr:'
if not value.startswith(attr_directive):
return value
attrs_path = value.replace(attr_directive, '').strip().split('.')
attr_name = attrs_path.pop()
module_name = '.'.join(attrs_path)
module_name = module_name or '__init__'
sys.path.insert(0, os.getcwd())
try:
module = import_module(module_name)
value = getattr(module, attr_name)
finally:
sys.path = sys.path[1:]
return value
@classmethod
def _get_parser_compound(cls, *parse_methods):
"""Returns parser function to represents value as a list.
Parses a value applying given methods one after another.
:param parse_methods:
:rtype: callable
"""
def parse(value):
parsed = value
for method in parse_methods:
parsed = method(parsed)
return parsed
return parse
@classmethod
def _parse_section_to_dict(cls, section_options, values_parser=None):
"""Parses section options into a dictionary.
Optionally applies a given parser to values.
:param dict section_options:
:param callable values_parser:
:rtype: dict
"""
value = {}
values_parser = values_parser or (lambda val: val)
for key, (_, val) in section_options.items():
value[key] = values_parser(val)
return value
def parse_section(self, section_options):
"""Parses configuration file section.
:param dict section_options:
"""
for (name, (_, value)) in section_options.items():
try:
self[name] = value
except KeyError:
pass # Keep silent for a new option may appear anytime.
def parse(self):
"""Parses configuration file items from one
or more related sections.
"""
for section_name, section_options in self.sections.items():
method_postfix = ''
if section_name: # [section.option] variant
method_postfix = '_%s' % section_name
section_parser_method = getattr(
self,
# Dots in section names are tranlsated into dunderscores.
('parse_section%s' % method_postfix).replace('.', '__'),
None)
if section_parser_method is None:
raise DistutilsOptionError(
'Unsupported distribution option section: [%s.%s]' % (
self.section_prefix, section_name))
section_parser_method(section_options)
class ConfigMetadataHandler(ConfigHandler):
section_prefix = 'metadata'
aliases = {
'home_page': 'url',
'summary': 'description',
'classifier': 'classifiers',
'platform': 'platforms',
}
strict_mode = False
"""We need to keep it loose, to be partially compatible with
`pbr` and `d2to1` packages which also uses `metadata` section.
"""
@property
def parsers(self):
"""Metadata item name to parser function mapping."""
parse_list = self._parse_list
parse_file = self._parse_file
return {
'platforms': parse_list,
'keywords': parse_list,
'provides': parse_list,
'requires': parse_list,
'obsoletes': parse_list,
'classifiers': self._get_parser_compound(parse_file, parse_list),
'license': parse_file,
'description': parse_file,
'long_description': parse_file,
'version': self._parse_version,
}
def _parse_version(self, value):
"""Parses `version` option value.
:param value:
:rtype: str
"""
version = self._parse_attr(value)
if callable(version):
version = version()
if not isinstance(version, string_types):
if hasattr(version, '__iter__'):
version = '.'.join(map(str, version))
else:
version = '%s' % version
return version
class ConfigOptionsHandler(ConfigHandler):
section_prefix = 'options'
@property
def parsers(self):
"""Metadata item name to parser function mapping."""
parse_list = self._parse_list
parse_list_semicolon = partial(self._parse_list, separator=';')
parse_bool = self._parse_bool
parse_dict = self._parse_dict
return {
'zip_safe': parse_bool,
'use_2to3': parse_bool,
'include_package_data': parse_bool,
'package_dir': parse_dict,
'use_2to3_fixers': parse_list,
'use_2to3_exclude_fixers': parse_list,
'convert_2to3_doctests': parse_list,
'scripts': parse_list,
'eager_resources': parse_list,
'dependency_links': parse_list,
'namespace_packages': parse_list,
'install_requires': parse_list_semicolon,
'setup_requires': parse_list_semicolon,
'tests_require': parse_list_semicolon,
'packages': self._parse_packages,
'entry_points': self._parse_file,
'py_modules': parse_list,
}
def _parse_packages(self, value):
"""Parses `packages` option value.
:param value:
:rtype: list
"""
find_directive = 'find:'
if not value.startswith(find_directive):
return self._parse_list(value)
# Read function arguments from a dedicated section.
find_kwargs = self.parse_section_packages__find(
self.sections.get('packages.find', {}))
from setuptools import find_packages
return find_packages(**find_kwargs)
def parse_section_packages__find(self, section_options):
"""Parses `packages.find` configuration file section.
To be used in conjunction with _parse_packages().
:param dict section_options:
"""
section_data = self._parse_section_to_dict(
section_options, self._parse_list)
valid_keys = ['where', 'include', 'exclude']
find_kwargs = dict(
[(k, v) for k, v in section_data.items() if k in valid_keys and v])
where = find_kwargs.get('where')
if where is not None:
find_kwargs['where'] = where[0] # cast list to single val
return find_kwargs
def parse_section_entry_points(self, section_options):
"""Parses `entry_points` configuration file section.
:param dict section_options:
"""
parsed = self._parse_section_to_dict(section_options, self._parse_list)
self['entry_points'] = parsed
def _parse_package_data(self, section_options):
parsed = self._parse_section_to_dict(section_options, self._parse_list)
root = parsed.get('*')
if root:
parsed[''] = root
del parsed['*']
return parsed
def parse_section_package_data(self, section_options):
"""Parses `package_data` configuration file section.
:param dict section_options:
"""
self['package_data'] = self._parse_package_data(section_options)
def parse_section_exclude_package_data(self, section_options):
"""Parses `exclude_package_data` configuration file section.
:param dict section_options:
"""
self['exclude_package_data'] = self._parse_package_data(
section_options)
def parse_section_extras_require(self, section_options):
"""Parses `extras_require` configuration file section.
:param dict section_options:
"""
parse_list = partial(self._parse_list, separator=';')
self['extras_require'] = self._parse_section_to_dict(
section_options, parse_list)
| apache-2.0 |
alviano/wasp | tests/sat/Models/c677.150.UNSAT.dimacs.test.py | 5 | 9128 | input = """
c num blocks = 1
c num vars = 150
c minblockids[0] = 1
c maxblockids[0] = 150
p cnf 150 677
138 149 113 0
122 -73 -7 0
108 33 145 0
-21 -138 134 0
80 63 -59 0
-24 98 129 0
57 -135 -148 0
-35 -20 106 0
-127 106 148 0
-28 -29 66 0
-147 57 89 0
-19 3 -47 0
112 43 70 0
138 2 -134 0
-72 122 -119 0
91 76 -79 0
116 -56 -114 0
109 69 116 0
34 -46 65 0
25 -38 76 0
-83 35 -59 0
44 -142 113 0
-141 -103 -18 0
-27 118 68 0
-142 -141 -81 0
-30 149 -35 0
-25 -75 -68 0
-108 -36 14 0
-48 -78 -18 0
-146 -58 149 0
82 144 31 0
20 34 123 0
146 -39 -56 0
5 16 19 0
144 106 138 0
45 31 -149 0
-102 62 117 0
-101 64 -87 0
11 -130 -2 0
74 -23 72 0
-21 -40 -73 0
-21 5 60 0
-73 -64 -102 0
-102 -69 -140 0
-73 -116 86 0
-52 11 12 0
-88 -93 -139 0
-49 59 -30 0
25 145 -6 0
-99 -23 8 0
55 128 -123 0
-79 130 86 0
147 -123 -7 0
-44 112 -46 0
-113 25 -85 0
-52 85 -78 0
-83 -94 78 0
126 101 -108 0
-8 -120 83 0
66 -136 -109 0
-67 18 -116 0
28 -103 42 0
11 102 -74 0
119 75 41 0
-139 -32 63 0
53 -115 -114 0
-4 92 -105 0
76 -15 -132 0
-63 -40 50 0
-129 71 -43 0
115 131 40 0
-130 5 -148 0
8 42 105 0
71 44 -78 0
-65 -142 -145 0
56 -16 -35 0
148 8 92 0
-49 -117 77 0
-80 24 -25 0
150 -99 -32 0
-143 66 -16 0
110 146 98 0
19 -14 1 0
-33 122 -139 0
24 -36 94 0
-70 -57 65 0
-59 125 -48 0
-103 118 108 0
132 -24 90 0
-122 104 -23 0
-24 110 -121 0
-115 22 146 0
-46 -113 -4 0
-100 -39 131 0
56 31 39 0
87 -6 -148 0
122 -38 148 0
-117 -110 -140 0
-42 -6 -116 0
-17 -114 -73 0
6 -79 -135 0
-134 21 135 0
29 -147 84 0
-120 121 73 0
37 -6 94 0
-143 139 -61 0
-147 -62 35 0
45 -114 86 0
25 146 75 0
8 -119 135 0
40 -53 37 0
94 76 -49 0
46 -74 -142 0
-61 -88 -74 0
113 78 74 0
70 1 140 0
-117 74 85 0
40 -9 -50 0
-76 12 3 0
40 -69 -11 0
-21 -91 -111 0
115 104 -84 0
-149 -69 -57 0
-59 89 -13 0
53 18 -135 0
98 59 134 0
122 92 -96 0
145 -70 -107 0
143 -4 -137 0
11 103 51 0
85 55 34 0
-97 45 137 0
-120 40 -25 0
52 14 140 0
-64 32 -87 0
-98 20 143 0
-12 62 -70 0
-49 116 17 0
116 144 -123 0
-79 -93 144 0
23 -36 -56 0
29 53 18 0
147 126 -83 0
-106 -57 -79 0
140 112 92 0
44 -18 120 0
-55 71 -93 0
48 -90 149 0
-87 64 -127 0
4 -73 -1 0
100 -2 60 0
134 -63 -55 0
-113 45 -141 0
9 8 119 0
-12 126 95 0
87 20 -40 0
70 16 57 0
59 22 -29 0
-52 32 144 0
65 -64 -89 0
-6 65 87 0
-116 -4 -29 0
9 -94 117 0
-32 150 117 0
-108 125 143 0
-58 132 -137 0
92 149 -61 0
-40 24 37 0
-51 23 -112 0
31 26 -146 0
-33 -32 -40 0
-146 20 -138 0
35 -87 4 0
-42 -112 -126 0
-71 139 -72 0
-70 -62 77 0
149 45 88 0
114 63 -131 0
87 -17 115 0
76 5 -100 0
-145 -88 141 0
-107 35 -16 0
140 -46 -50 0
-31 42 89 0
-78 114 8 0
9 -123 139 0
-111 73 -31 0
67 34 86 0
-39 -84 -136 0
27 -77 -119 0
-138 -99 9 0
53 57 -8 0
-112 -145 -46 0
59 -75 115 0
-45 -107 116 0
59 -88 -124 0
-57 -8 130 0
-123 108 -94 0
147 59 26 0
7 148 124 0
73 104 71 0
42 -1 -77 0
-33 -32 74 0
108 65 -116 0
-130 -89 88 0
-22 24 -76 0
-93 -83 13 0
-66 -30 -44 0
-30 -76 101 0
-112 29 -63 0
91 47 76 0
10 -87 9 0
53 -133 74 0
-56 -105 128 0
130 54 -89 0
-116 124 -20 0
-92 38 -10 0
-85 -24 -93 0
14 -147 -80 0
-67 77 -96 0
-48 103 36 0
-89 9 -122 0
63 137 -148 0
-43 81 -110 0
-121 100 -125 0
-36 21 -57 0
-55 -29 99 0
65 35 -95 0
104 52 144 0
-42 30 -49 0
26 3 -126 0
67 -109 -134 0
-87 5 31 0
76 138 24 0
62 103 -33 0
15 79 -9 0
-35 -27 96 0
-40 -95 45 0
-33 86 130 0
-21 144 91 0
124 15 72 0
125 98 29 0
40 -2 -68 0
-34 73 68 0
59 45 25 0
-99 20 126 0
115 119 -47 0
-14 -21 101 0
60 -85 137 0
144 135 87 0
-7 105 94 0
23 -60 -38 0
86 -39 -139 0
18 -7 -48 0
-92 118 -93 0
-121 -40 -15 0
22 -91 125 0
27 19 47 0
104 78 44 0
-54 -7 -124 0
67 -91 113 0
64 125 139 0
112 79 94 0
-99 98 -25 0
-109 90 122 0
-126 -103 -1 0
76 93 124 0
-66 11 -108 0
-127 147 92 0
-90 138 -101 0
-142 -22 95 0
33 7 42 0
149 -94 -104 0
-94 51 42 0
-105 -83 -22 0
97 48 -35 0
-26 -128 55 0
9 94 50 0
133 122 76 0
-76 89 88 0
-53 83 -16 0
150 -4 -134 0
13 146 -78 0
-7 34 -45 0
147 78 -4 0
-140 130 105 0
-98 -24 67 0
-68 -95 2 0
41 -34 -75 0
150 -105 -144 0
127 -54 93 0
-94 121 -44 0
31 -128 60 0
59 27 -70 0
66 118 -106 0
-107 106 -77 0
-144 -93 140 0
88 -62 -124 0
1 -19 58 0
-144 -48 -60 0
-54 -103 -6 0
-28 -74 -58 0
130 65 58 0
35 60 -10 0
-140 -95 133 0
-122 -80 31 0
-101 89 -27 0
147 -3 68 0
-111 -99 23 0
128 -103 -17 0
-48 -140 82 0
107 -13 -83 0
-72 -1 -73 0
103 67 106 0
86 27 12 0
-44 12 -42 0
77 33 50 0
-125 -142 -114 0
92 10 -76 0
-4 -150 -45 0
-53 82 -122 0
109 79 -129 0
117 49 95 0
90 -85 -9 0
-4 -89 53 0
129 40 -149 0
-7 -136 -94 0
73 -116 20 0
-102 71 10 0
-72 74 -120 0
12 -148 -39 0
-90 -40 115 0
5 -36 -142 0
133 17 115 0
53 -49 108 0
-93 -11 -55 0
7 81 -93 0
88 27 -33 0
97 62 -123 0
-134 -60 83 0
64 132 14 0
-14 55 104 0
46 58 60 0
-89 106 6 0
18 -78 66 0
3 -86 84 0
-135 -128 -20 0
39 -117 108 0
-102 -129 90 0
47 97 -91 0
-38 -61 -82 0
117 138 135 0
-82 -79 -116 0
92 -49 94 0
-21 67 66 0
-67 125 -21 0
147 -23 94 0
110 28 -99 0
-92 23 -13 0
87 -40 70 0
-43 -130 -81 0
-5 88 -9 0
-74 77 5 0
-93 -42 -144 0
-135 19 -105 0
135 128 -87 0
124 135 -105 0
-104 -7 -120 0
-106 65 -119 0
34 -61 13 0
51 107 135 0
-92 -6 -79 0
-86 61 129 0
-135 85 -87 0
-94 141 70 0
95 144 -100 0
69 -147 28 0
92 -69 126 0
134 18 -137 0
148 130 -104 0
-72 -105 103 0
-113 138 17 0
15 118 -140 0
5 34 99 0
37 33 147 0
34 6 -13 0
-33 -87 135 0
-3 83 120 0
72 -125 131 0
-14 55 -21 0
-97 -25 -7 0
25 -145 -138 0
-5 -27 102 0
-36 -2 118 0
122 133 49 0
-122 -139 120 0
-71 -7 -111 0
34 50 20 0
59 47 -82 0
-76 87 -104 0
19 -72 -110 0
-143 91 -87 0
-56 65 -15 0
-14 -11 -27 0
120 101 -19 0
51 -60 7 0
-57 48 122 0
84 106 -43 0
-18 84 23 0
83 -116 -55 0
10 2 -81 0
55 80 27 0
-92 100 121 0
99 -78 -90 0
-87 58 64 0
9 19 -128 0
89 -12 -84 0
93 114 -38 0
106 131 -57 0
-130 -4 96 0
-85 137 112 0
-147 69 -88 0
31 -20 27 0
-145 78 141 0
113 -75 5 0
62 -119 -94 0
96 133 -52 0
-48 -41 -100 0
65 -27 -53 0
-73 -39 97 0
-102 -113 138 0
125 -75 141 0
-24 71 -55 0
78 -7 20 0
-26 103 117 0
33 -23 60 0
-17 125 121 0
16 -132 63 0
-134 112 57 0
36 -27 -33 0
32 64 -92 0
119 89 50 0
-78 112 50 0
-80 46 -99 0
-58 128 140 0
-89 -146 -96 0
-67 -24 41 0
-38 -59 -58 0
-6 125 -138 0
80 -76 -135 0
36 78 105 0
-81 -132 -71 0
92 -91 10 0
129 64 -127 0
-53 28 -75 0
98 114 147 0
124 -103 31 0
-12 115 -92 0
-145 141 112 0
82 79 -128 0
149 -18 1 0
110 91 -2 0
-125 118 39 0
46 135 -82 0
-114 24 122 0
-31 -123 -95 0
-53 -122 -37 0
-120 8 129 0
-108 -56 -113 0
16 -144 -132 0
-109 -132 -14 0
20 15 -30 0
-76 -53 113 0
46 -40 118 0
115 140 -65 0
147 -11 -23 0
18 -8 144 0
-39 21 -40 0
-50 -31 -20 0
150 -27 28 0
145 -137 12 0
-30 -88 109 0
-43 100 132 0
-28 24 57 0
22 -89 43 0
69 -51 138 0
91 -64 148 0
66 -16 -19 0
15 -114 97 0
41 143 -86 0
28 -108 -112 0
134 -120 -148 0
62 -128 -22 0
-25 26 -142 0
44 -88 2 0
-145 146 1 0
-92 34 60 0
-91 31 5 0
122 -145 16 0
15 -70 -102 0
-134 -101 124 0
138 67 -29 0
23 -28 -104 0
-84 -59 -145 0
129 113 -111 0
86 40 64 0
-106 -67 116 0
79 104 120 0
-150 -127 -28 0
-131 -112 95 0
25 146 79 0
-14 89 123 0
21 -17 82 0
16 -32 56 0
-102 59 -8 0
78 53 126 0
33 -147 54 0
62 -90 49 0
-29 -141 -66 0
55 -70 -54 0
77 -28 -46 0
-113 138 -89 0
100 2 41 0
-65 49 90 0
71 -123 30 0
41 -126 66 0
80 6 7 0
-142 -110 13 0
-2 -115 41 0
-88 -35 124 0
-144 87 -75 0
-41 63 -135 0
82 126 -66 0
-71 -82 44 0
120 -108 33 0
32 -1 -16 0
-92 -52 111 0
-82 -143 -84 0
115 148 141 0
70 -131 102 0
62 -65 69 0
-9 -57 59 0
87 -136 -31 0
-107 -125 60 0
-82 58 137 0
52 -136 -7 0
-66 -136 124 0
-59 44 -98 0
54 110 -94 0
17 -45 43 0
-101 130 87 0
-133 -7 52 0
130 108 -25 0
-75 71 32 0
12 40 90 0
48 -16 113 0
-22 107 -115 0
53 -30 -141 0
-24 -34 -122 0
105 42 62 0
1 129 -32 0
111 -42 -43 0
-77 96 141 0
98 -95 111 0
50 8 145 0
137 -11 41 0
-62 146 -100 0
-94 79 -1 0
-110 90 -126 0
2 5 -77 0
93 -64 47 0
-65 29 -73 0
-130 60 42 0
-147 -5 -53 0
139 -88 -75 0
62 -24 105 0
35 -79 -50 0
123 -139 -134 0
147 47 -78 0
55 -69 -136 0
77 73 -30 0
-120 -17 -16 0
111 -41 138 0
-42 148 111 0
-101 96 -119 0
39 -136 -73 0
-126 -13 -116 0
32 83 -15 0
55 24 1 0
-113 115 -35 0
-48 126 -134 0
-138 -51 142 0
58 -56 145 0
-101 106 29 0
1 126 -104 0
-107 121 -120 0
-44 14 135 0
-16 -90 71 0
-80 -118 29 0
58 97 -90 0
-38 -37 10 0
-144 -79 4 0
-148 -129 -83 0
76 -136 21 0
29 -146 -50 0
22 52 -47 0
-35 -131 -77 0
-82 -143 -121 0
124 81 131 0
-82 6 -20 0
40 -21 105 0
139 -10 83 0
-141 -119 -135 0
-63 74 -111 0
139 -29 -93 0
-113 -26 -87 0
-82 -19 48 0
70 36 -8 0
9 -74 -93 0
-75 -134 -9 0
-74 47 -21 0
-107 76 56 0
-100 -43 147 0
-147 -89 66 0
58 101 -56 0
147 -132 -94 0
63 50 90 0
-108 8 -29 0
-89 -4 -43 0
83 -45 -14 0
-37 112 -14 0
13 -76 -31 0
-69 -106 29 0
64 -54 -131 0
-19 -2 96 0
-37 93 67 0
35 -74 97 0
22 -3 25 0
-149 -30 42 0
-40 -93 -135 0
-138 -9 102 0
-29 69 -123 0
39 18 106 0
-33 -57 121 0
118 -15 -124 0
5 -3 -102 0
-96 29 -26 0
6 30 -147 0
-94 -20 146 0
-136 -15 25 0
-66 34 -20 0
22 8 96 0
67 -128 1 0
-20 -59 -150 0
28 137 -95 0
95 -85 29 0
23 6 -148 0
18 -130 24 0
-41 -7 -54 0
-150 -35 -109 0
-15 55 1 0
-149 76 53 0
-108 102 -44 0
-40 77 -148 0
115 -5 139 0
122 102 144 0
126 55 -83 0
1 -58 124 0
-82 -122 85 0
-113 -67 -47 0
-63 141 -30 0
139 17 -49 0
64 13 -145 0
25 106 50 0
-127 119 10 0
72 139 55 0
-102 147 -9 0
"""
output = "UNSAT"
| apache-2.0 |
wujianguo/proxy | shadowsocks/asyncdns.py | 7 | 18340 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 clowwindy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import, division, print_function, \
with_statement
import time
import os
import socket
import struct
import re
import logging
from shadowsocks import common, lru_cache, eventloop
CACHE_SWEEP_INTERVAL = 30
VALID_HOSTNAME = re.compile(br"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
common.patch_socket()
# rfc1035
# format
# +---------------------+
# | Header |
# +---------------------+
# | Question | the question for the name server
# +---------------------+
# | Answer | RRs answering the question
# +---------------------+
# | Authority | RRs pointing toward an authority
# +---------------------+
# | Additional | RRs holding additional information
# +---------------------+
#
# header
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ID |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# |QR| Opcode |AA|TC|RD|RA| Z | RCODE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | QDCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ANCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | NSCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ARCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
QTYPE_ANY = 255
QTYPE_A = 1
QTYPE_AAAA = 28
QTYPE_CNAME = 5
QTYPE_NS = 2
QCLASS_IN = 1
def build_address(address):
address = address.strip(b'.')
labels = address.split(b'.')
results = []
for label in labels:
l = len(label)
if l > 63:
return None
results.append(common.chr(l))
results.append(label)
results.append(b'\0')
return b''.join(results)
def build_request(address, qtype):
request_id = os.urandom(2)
header = struct.pack('!BBHHHH', 1, 0, 1, 0, 0, 0)
addr = build_address(address)
qtype_qclass = struct.pack('!HH', qtype, QCLASS_IN)
return request_id + header + addr + qtype_qclass
def parse_ip(addrtype, data, length, offset):
if addrtype == QTYPE_A:
return socket.inet_ntop(socket.AF_INET, data[offset:offset + length])
elif addrtype == QTYPE_AAAA:
return socket.inet_ntop(socket.AF_INET6, data[offset:offset + length])
elif addrtype in [QTYPE_CNAME, QTYPE_NS]:
return parse_name(data, offset)[1]
else:
return data[offset:offset + length]
def parse_name(data, offset):
p = offset
labels = []
l = common.ord(data[p])
while l > 0:
if (l & (128 + 64)) == (128 + 64):
# pointer
pointer = struct.unpack('!H', data[p:p + 2])[0]
pointer &= 0x3FFF
r = parse_name(data, pointer)
labels.append(r[1])
p += 2
# pointer is the end
return p - offset, b'.'.join(labels)
else:
labels.append(data[p + 1:p + 1 + l])
p += 1 + l
l = common.ord(data[p])
return p - offset + 1, b'.'.join(labels)
# rfc1035
# record
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | |
# / /
# / NAME /
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TYPE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | CLASS |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TTL |
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | RDLENGTH |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--|
# / RDATA /
# / /
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
def parse_record(data, offset, question=False):
nlen, name = parse_name(data, offset)
if not question:
record_type, record_class, record_ttl, record_rdlength = struct.unpack(
'!HHiH', data[offset + nlen:offset + nlen + 10]
)
ip = parse_ip(record_type, data, record_rdlength, offset + nlen + 10)
return nlen + 10 + record_rdlength, \
(name, ip, record_type, record_class, record_ttl)
else:
record_type, record_class = struct.unpack(
'!HH', data[offset + nlen:offset + nlen + 4]
)
return nlen + 4, (name, None, record_type, record_class, None, None)
def parse_header(data):
if len(data) >= 12:
header = struct.unpack('!HBBHHHH', data[:12])
res_id = header[0]
res_qr = header[1] & 128
res_tc = header[1] & 2
res_ra = header[2] & 128
res_rcode = header[2] & 15
# assert res_tc == 0
# assert res_rcode in [0, 3]
res_qdcount = header[3]
res_ancount = header[4]
res_nscount = header[5]
res_arcount = header[6]
return (res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount,
res_ancount, res_nscount, res_arcount)
return None
def parse_response(data):
try:
if len(data) >= 12:
header = parse_header(data)
if not header:
return None
res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount, \
res_ancount, res_nscount, res_arcount = header
qds = []
ans = []
offset = 12
for i in range(0, res_qdcount):
l, r = parse_record(data, offset, True)
offset += l
if r:
qds.append(r)
for i in range(0, res_ancount):
l, r = parse_record(data, offset)
offset += l
if r:
ans.append(r)
for i in range(0, res_nscount):
l, r = parse_record(data, offset)
offset += l
for i in range(0, res_arcount):
l, r = parse_record(data, offset)
offset += l
response = DNSResponse()
if qds:
response.hostname = qds[0][0]
for an in qds:
response.questions.append((an[1], an[2], an[3]))
for an in ans:
response.answers.append((an[1], an[2], an[3]))
return response
except Exception as e:
import traceback
traceback.print_exc()
logging.error(e)
return None
def is_ip(address):
for family in (socket.AF_INET, socket.AF_INET6):
try:
if type(address) != str:
address = address.decode('utf8')
socket.inet_pton(family, address)
return family
except (TypeError, ValueError, OSError, IOError):
pass
return False
def is_valid_hostname(hostname):
if len(hostname) > 255:
return False
if hostname[-1] == b'.':
hostname = hostname[:-1]
return all(VALID_HOSTNAME.match(x) for x in hostname.split(b'.'))
class DNSResponse(object):
def __init__(self):
self.hostname = None
self.questions = [] # each: (addr, type, class)
self.answers = [] # each: (addr, type, class)
def __str__(self):
return '%s: %s' % (self.hostname, str(self.answers))
STATUS_IPV4 = 0
STATUS_IPV6 = 1
class DNSResolver(object):
def __init__(self):
self._loop = None
self._hosts = {}
self._hostname_status = {}
self._hostname_to_cb = {}
self._cb_to_hostname = {}
self._cache = lru_cache.LRUCache(timeout=300)
self._last_time = time.time()
self._sock = None
self._servers = None
self._parse_resolv()
self._parse_hosts()
# TODO monitor hosts change and reload hosts
# TODO parse /etc/gai.conf and follow its rules
def _parse_resolv(self):
self._servers = []
try:
with open('/etc/resolv.conf', 'rb') as f:
content = f.readlines()
for line in content:
line = line.strip()
if line:
if line.startswith(b'nameserver'):
parts = line.split()
if len(parts) >= 2:
server = parts[1]
if is_ip(server) == socket.AF_INET:
if type(server) != str:
server = server.decode('utf8')
self._servers.append(server)
except IOError:
pass
if not self._servers:
self._servers = ['8.8.4.4', '8.8.8.8']
def _parse_hosts(self):
etc_path = '/etc/hosts'
if 'WINDIR' in os.environ:
etc_path = os.environ['WINDIR'] + '/system32/drivers/etc/hosts'
try:
with open(etc_path, 'rb') as f:
for line in f.readlines():
line = line.strip()
parts = line.split()
if len(parts) >= 2:
ip = parts[0]
if is_ip(ip):
for i in range(1, len(parts)):
hostname = parts[i]
if hostname:
self._hosts[hostname] = ip
except IOError:
self._hosts['localhost'] = '127.0.0.1'
def add_to_loop(self, loop, ref=False):
if self._loop:
raise Exception('already add to loop')
self._loop = loop
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
loop.add(self._sock, eventloop.POLL_IN)
loop.add_handler(self.handle_events, ref=ref)
def _call_callback(self, hostname, ip, error=None):
callbacks = self._hostname_to_cb.get(hostname, [])
for callback in callbacks:
if callback in self._cb_to_hostname:
del self._cb_to_hostname[callback]
if ip or error:
callback((hostname, ip), error)
else:
callback((hostname, None),
Exception('unknown hostname %s' % hostname))
if hostname in self._hostname_to_cb:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _handle_data(self, data):
response = parse_response(data)
if response and response.hostname:
hostname = response.hostname
ip = None
for answer in response.answers:
if answer[1] in (QTYPE_A, QTYPE_AAAA) and \
answer[2] == QCLASS_IN:
ip = answer[0]
break
if not ip and self._hostname_status.get(hostname, STATUS_IPV6) \
== STATUS_IPV4:
self._hostname_status[hostname] = STATUS_IPV6
self._send_req(hostname, QTYPE_AAAA)
else:
if ip:
self._cache[hostname] = ip
self._call_callback(hostname, ip)
elif self._hostname_status.get(hostname, None) == STATUS_IPV6:
for question in response.questions:
if question[1] == QTYPE_AAAA:
self._call_callback(hostname, None)
break
def handle_events(self, events):
for sock, fd, event in events:
if sock != self._sock:
continue
if event & eventloop.POLL_ERR:
logging.error('dns socket err')
self._loop.remove(self._sock)
self._sock.close()
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
self._loop.add(self._sock, eventloop.POLL_IN)
else:
data, addr = sock.recvfrom(1024)
if addr[0] not in self._servers:
logging.warn('received a packet other than our dns')
break
self._handle_data(data)
break
now = time.time()
if now - self._last_time > CACHE_SWEEP_INTERVAL:
self._cache.sweep()
self._last_time = now
def remove_callback(self, callback):
hostname = self._cb_to_hostname.get(callback)
if hostname:
del self._cb_to_hostname[callback]
arr = self._hostname_to_cb.get(hostname, None)
if arr:
arr.remove(callback)
if not arr:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _send_req(self, hostname, qtype):
req = build_request(hostname, qtype)
for server in self._servers:
logging.debug('resolving %s with type %d using server %s',
hostname, qtype, server)
self._sock.sendto(req, (server, 53))
def resolve(self, hostname, callback):
if type(hostname) != bytes:
hostname = hostname.encode('utf8')
if not hostname:
callback(None, Exception('empty hostname'))
elif is_ip(hostname):
callback((hostname, hostname), None)
elif hostname in self._hosts:
logging.debug('hit hosts: %s', hostname)
ip = self._hosts[hostname]
callback((hostname, ip), None)
elif hostname in self._cache:
logging.debug('hit cache: %s', hostname)
ip = self._cache[hostname]
callback((hostname, ip), None)
else:
if not is_valid_hostname(hostname):
callback(None, Exception('invalid hostname: %s' % hostname))
return
arr = self._hostname_to_cb.get(hostname, None)
if not arr:
self._hostname_status[hostname] = STATUS_IPV4
self._send_req(hostname, QTYPE_A)
self._hostname_to_cb[hostname] = [callback]
self._cb_to_hostname[callback] = hostname
else:
arr.append(callback)
# TODO send again only if waited too long
self._send_req(hostname, QTYPE_A)
def close(self):
if self._sock:
self._sock.close()
self._sock = None
def test():
dns_resolver = DNSResolver()
loop = eventloop.EventLoop()
dns_resolver.add_to_loop(loop, ref=True)
global counter
counter = 0
def make_callback():
global counter
def callback(result, error):
global counter
# TODO: what can we assert?
print(result, error)
counter += 1
if counter == 9:
loop.remove_handler(dns_resolver.handle_events)
dns_resolver.close()
a_callback = callback
return a_callback
assert(make_callback() != make_callback())
dns_resolver.resolve(b'google.com', make_callback())
dns_resolver.resolve('google.com', make_callback())
dns_resolver.resolve('example.com', make_callback())
dns_resolver.resolve('ipv6.google.com', make_callback())
dns_resolver.resolve('www.facebook.com', make_callback())
dns_resolver.resolve('ns2.google.com', make_callback())
dns_resolver.resolve('invalid.@!#$%^&$@.hostname', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
loop.run()
if __name__ == '__main__':
test()
| apache-2.0 |
yeahwhat-mc/CloudBotLegacy | lib/bs4/tests/test_builder_registry.py | 485 | 5374 | """Tests of the builder registry."""
import unittest
from bs4 import BeautifulSoup
from bs4.builder import (
builder_registry as registry,
HTMLParserTreeBuilder,
TreeBuilderRegistry,
)
try:
from bs4.builder import HTML5TreeBuilder
HTML5LIB_PRESENT = True
except ImportError:
HTML5LIB_PRESENT = False
try:
from bs4.builder import (
LXMLTreeBuilderForXML,
LXMLTreeBuilder,
)
LXML_PRESENT = True
except ImportError:
LXML_PRESENT = False
class BuiltInRegistryTest(unittest.TestCase):
"""Test the built-in registry with the default builders registered."""
def test_combination(self):
if LXML_PRESENT:
self.assertEqual(registry.lookup('fast', 'html'),
LXMLTreeBuilder)
if LXML_PRESENT:
self.assertEqual(registry.lookup('permissive', 'xml'),
LXMLTreeBuilderForXML)
self.assertEqual(registry.lookup('strict', 'html'),
HTMLParserTreeBuilder)
if HTML5LIB_PRESENT:
self.assertEqual(registry.lookup('html5lib', 'html'),
HTML5TreeBuilder)
def test_lookup_by_markup_type(self):
if LXML_PRESENT:
self.assertEqual(registry.lookup('html'), LXMLTreeBuilder)
self.assertEqual(registry.lookup('xml'), LXMLTreeBuilderForXML)
else:
self.assertEqual(registry.lookup('xml'), None)
if HTML5LIB_PRESENT:
self.assertEqual(registry.lookup('html'), HTML5TreeBuilder)
else:
self.assertEqual(registry.lookup('html'), HTMLParserTreeBuilder)
def test_named_library(self):
if LXML_PRESENT:
self.assertEqual(registry.lookup('lxml', 'xml'),
LXMLTreeBuilderForXML)
self.assertEqual(registry.lookup('lxml', 'html'),
LXMLTreeBuilder)
if HTML5LIB_PRESENT:
self.assertEqual(registry.lookup('html5lib'),
HTML5TreeBuilder)
self.assertEqual(registry.lookup('html.parser'),
HTMLParserTreeBuilder)
def test_beautifulsoup_constructor_does_lookup(self):
# You can pass in a string.
BeautifulSoup("", features="html")
# Or a list of strings.
BeautifulSoup("", features=["html", "fast"])
# You'll get an exception if BS can't find an appropriate
# builder.
self.assertRaises(ValueError, BeautifulSoup,
"", features="no-such-feature")
class RegistryTest(unittest.TestCase):
"""Test the TreeBuilderRegistry class in general."""
def setUp(self):
self.registry = TreeBuilderRegistry()
def builder_for_features(self, *feature_list):
cls = type('Builder_' + '_'.join(feature_list),
(object,), {'features' : feature_list})
self.registry.register(cls)
return cls
def test_register_with_no_features(self):
builder = self.builder_for_features()
# Since the builder advertises no features, you can't find it
# by looking up features.
self.assertEqual(self.registry.lookup('foo'), None)
# But you can find it by doing a lookup with no features, if
# this happens to be the only registered builder.
self.assertEqual(self.registry.lookup(), builder)
def test_register_with_features_makes_lookup_succeed(self):
builder = self.builder_for_features('foo', 'bar')
self.assertEqual(self.registry.lookup('foo'), builder)
self.assertEqual(self.registry.lookup('bar'), builder)
def test_lookup_fails_when_no_builder_implements_feature(self):
builder = self.builder_for_features('foo', 'bar')
self.assertEqual(self.registry.lookup('baz'), None)
def test_lookup_gets_most_recent_registration_when_no_feature_specified(self):
builder1 = self.builder_for_features('foo')
builder2 = self.builder_for_features('bar')
self.assertEqual(self.registry.lookup(), builder2)
def test_lookup_fails_when_no_tree_builders_registered(self):
self.assertEqual(self.registry.lookup(), None)
def test_lookup_gets_most_recent_builder_supporting_all_features(self):
has_one = self.builder_for_features('foo')
has_the_other = self.builder_for_features('bar')
has_both_early = self.builder_for_features('foo', 'bar', 'baz')
has_both_late = self.builder_for_features('foo', 'bar', 'quux')
lacks_one = self.builder_for_features('bar')
has_the_other = self.builder_for_features('foo')
# There are two builders featuring 'foo' and 'bar', but
# the one that also features 'quux' was registered later.
self.assertEqual(self.registry.lookup('foo', 'bar'),
has_both_late)
# There is only one builder featuring 'foo', 'bar', and 'baz'.
self.assertEqual(self.registry.lookup('foo', 'bar', 'baz'),
has_both_early)
def test_lookup_fails_when_cannot_reconcile_requested_features(self):
builder1 = self.builder_for_features('foo', 'bar')
builder2 = self.builder_for_features('foo', 'baz')
self.assertEqual(self.registry.lookup('bar', 'baz'), None)
| gpl-3.0 |
MicroPyramid/django-payu | payu/tests.py | 1 | 3386 | from django.test import TestCase
from django.conf import settings
from payu.models import *
from payu.gateway import (
get_hash,
check_hash,
get_webservice_hash,
payu_url,
post,
verify_payment,
check_payment,
capture_transaction,
refund_transaction,
cancel_transaction,
check_action_status,
cancel_refund_transaction,
)
setattr(settings, 'PAYU_MERCHANT_KEY', 'JBZaLc')
setattr(settings, 'PAYU_MERCHANT_SALT', 'GQs7yium')
class Sample(TestCase):
def test_addition(self):
Sum = 5 + 4
self.assertEqual(Sum, 9)
class GetHash(TestCase):
def test_get_hash(self):
data = {'email': 'hello@micropyramid.com',
'txnid': 123456,
'amount': 300}
response = get_hash(data)
self.assertTrue(response)
data['email'] = None
response = get_hash(data)
self.assertTrue(response)
class CheckHash(TestCase):
def setUp(self):
self.transaction = Transaction.objects.create(transaction_id=123456,
amount=300)
def test_check_hash(self):
data = {'additionalCharges': 220,
'txnid': 123456,
'amount': 500,
'discount': 50}
response = check_hash(data)
self.assertFalse(response)
class GetWebServiceHash(TestCase):
def test_get_webservice_hash(self):
data = {'key': 12345}
response = get_webservice_hash(data)
self.assertTrue(response)
class PayuUrl(TestCase):
def test_payu_url(self):
setattr(settings, 'PAYU_MODE', 'TEST')
reslut = payu_url()
setattr(settings, 'PAYU_MODE', 'LIVE')
reslut = payu_url()
self.assertTrue(reslut)
setattr(settings, 'PAYU_MODE', 'None')
reslut = payu_url()
self.assertFalse(reslut)
class Post(TestCase):
def get_trans(self):
self.transaction = Transaction.objects.create(transaction_id=123456,
amount=300,
mihpayid=123)
def test_post(self):
params = {'command': 'check_action_status',
'key': 'dummy11',
'hash': '12355',
'var1': 55}
response = post(params)
self.assertTrue(response)
self.get_trans()
params['var1'] = 123
response = post(params)
self.assertFalse(response)
def tearDown(self):
if hasattr(self, 'transaction'):
self.transaction.delete()
class VerifyPayment(TestCase):
def setUp(self):
Transaction.objects.create(transaction_id=123456,
amount=300,
mihpayid=123)
# setattr(settings, 'PAYU_MODE', 'TEST')
def test_verify_payment(self):
r = verify_payment(txnid=123)
self.assertFalse(r)
def test_check_payment(self):
r = check_payment(123)
self.assertFalse(r)
def test_capture_transaction(self):
capture_transaction(123)
def test_refund_transaction(self):
refund_transaction(123, 123)
def test_check_action_status(self):
check_action_status(123)
def test_cancel_refund_transaction(self):
cancel_refund_transaction(123, 10)
| mit |
helenst/django | tests/model_inheritance/tests.py | 17 | 15747 | from __future__ import unicode_literals
from operator import attrgetter
from django.core.exceptions import FieldError
from django.core.management import call_command
from django.db import connection
from django.test import TestCase
from django.test.utils import CaptureQueriesContext
from django.utils import six
from .models import (
Chef, CommonInfo, ItalianRestaurant, ParkingLot, Place, Post,
Restaurant, Student, Supplier, Worker, MixinModel,
Title, Copy, Base, SubBase)
class ModelInheritanceTests(TestCase):
def test_abstract(self):
# The Student and Worker models both have 'name' and 'age' fields on
# them and inherit the __unicode__() method, just as with normal Python
# subclassing. This is useful if you want to factor out common
# information for programming purposes, but still completely
# independent separate models at the database level.
w1 = Worker.objects.create(name="Fred", age=35, job="Quarry worker")
Worker.objects.create(name="Barney", age=34, job="Quarry worker")
s = Student.objects.create(name="Pebbles", age=5, school_class="1B")
self.assertEqual(six.text_type(w1), "Worker Fred")
self.assertEqual(six.text_type(s), "Student Pebbles")
# The children inherit the Meta class of their parents (if they don't
# specify their own).
self.assertQuerysetEqual(
Worker.objects.values("name"), [
{"name": "Barney"},
{"name": "Fred"},
],
lambda o: o
)
# Since Student does not subclass CommonInfo's Meta, it has the effect
# of completely overriding it. So ordering by name doesn't take place
# for Students.
self.assertEqual(Student._meta.ordering, [])
# However, the CommonInfo class cannot be used as a normal model (it
# doesn't exist as a model).
self.assertRaises(AttributeError, lambda: CommonInfo.objects.all())
def test_multiple_table(self):
post = Post.objects.create(title="Lorem Ipsum")
# The Post model has distinct accessors for the Comment and Link models.
post.attached_comment_set.create(content="Save $ on V1agr@", is_spam=True)
post.attached_link_set.create(
content="The Web framework for perfections with deadlines.",
url="http://www.djangoproject.com/"
)
# The Post model doesn't have an attribute called
# 'attached_%(class)s_set'.
self.assertRaises(
AttributeError, getattr, post, "attached_%(class)s_set"
)
# The Place/Restaurant/ItalianRestaurant models all exist as
# independent models. However, the subclasses also have transparent
# access to the fields of their ancestors.
# Create a couple of Places.
Place.objects.create(name="Master Shakes", address="666 W. Jersey")
Place.objects.create(name="Ace Hardware", address="1013 N. Ashland")
# Test constructor for Restaurant.
r = Restaurant.objects.create(
name="Demon Dogs",
address="944 W. Fullerton",
serves_hot_dogs=True,
serves_pizza=False,
rating=2
)
# Test the constructor for ItalianRestaurant.
c = Chef.objects.create(name="Albert")
ir = ItalianRestaurant.objects.create(
name="Ristorante Miron",
address="1234 W. Ash",
serves_hot_dogs=False,
serves_pizza=False,
serves_gnocchi=True,
rating=4,
chef=c
)
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(address="1234 W. Ash"), [
"Ristorante Miron",
],
attrgetter("name")
)
ir.address = "1234 W. Elm"
ir.save()
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(address="1234 W. Elm"), [
"Ristorante Miron",
],
attrgetter("name")
)
# Make sure Restaurant and ItalianRestaurant have the right fields in
# the right order.
self.assertEqual(
[f.name for f in Restaurant._meta.fields],
["id", "name", "address", "place_ptr", "rating", "serves_hot_dogs",
"serves_pizza", "chef"]
)
self.assertEqual(
[f.name for f in ItalianRestaurant._meta.fields],
["id", "name", "address", "place_ptr", "rating", "serves_hot_dogs",
"serves_pizza", "chef", "restaurant_ptr", "serves_gnocchi"],
)
self.assertEqual(Restaurant._meta.ordering, ["-rating"])
# Even though p.supplier for a Place 'p' (a parent of a Supplier), a
# Restaurant object cannot access that reverse relation, since it's not
# part of the Place-Supplier Hierarchy.
self.assertQuerysetEqual(Place.objects.filter(supplier__name="foo"), [])
self.assertRaises(
FieldError, Restaurant.objects.filter, supplier__name="foo"
)
# Parent fields can be used directly in filters on the child model.
self.assertQuerysetEqual(
Restaurant.objects.filter(name="Demon Dogs"), [
"Demon Dogs",
],
attrgetter("name")
)
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(address="1234 W. Elm"), [
"Ristorante Miron",
],
attrgetter("name")
)
# Filters against the parent model return objects of the parent's type.
p = Place.objects.get(name="Demon Dogs")
self.assertIs(type(p), Place)
# Since the parent and child are linked by an automatically created
# OneToOneField, you can get from the parent to the child by using the
# child's name.
self.assertEqual(
p.restaurant, Restaurant.objects.get(name="Demon Dogs")
)
self.assertEqual(
Place.objects.get(name="Ristorante Miron").restaurant.italianrestaurant,
ItalianRestaurant.objects.get(name="Ristorante Miron")
)
self.assertEqual(
Restaurant.objects.get(name="Ristorante Miron").italianrestaurant,
ItalianRestaurant.objects.get(name="Ristorante Miron")
)
# This won't work because the Demon Dogs restaurant is not an Italian
# restaurant.
self.assertRaises(
ItalianRestaurant.DoesNotExist,
lambda: p.restaurant.italianrestaurant
)
# An ItalianRestaurant which does not exist is also a Place which does
# not exist.
self.assertRaises(
Place.DoesNotExist,
ItalianRestaurant.objects.get, name="The Noodle Void"
)
# MultipleObjectsReturned is also inherited.
self.assertRaises(
Place.MultipleObjectsReturned,
Restaurant.objects.get, id__lt=12321
)
# Related objects work just as they normally do.
s1 = Supplier.objects.create(name="Joe's Chickens", address="123 Sesame St")
s1.customers = [r, ir]
s2 = Supplier.objects.create(name="Luigi's Pasta", address="456 Sesame St")
s2.customers = [ir]
# This won't work because the Place we select is not a Restaurant (it's
# a Supplier).
p = Place.objects.get(name="Joe's Chickens")
self.assertRaises(
Restaurant.DoesNotExist, lambda: p.restaurant
)
self.assertEqual(p.supplier, s1)
self.assertQuerysetEqual(
ir.provider.order_by("-name"), [
"Luigi's Pasta",
"Joe's Chickens"
],
attrgetter("name")
)
self.assertQuerysetEqual(
Restaurant.objects.filter(provider__name__contains="Chickens"), [
"Ristorante Miron",
"Demon Dogs",
],
attrgetter("name")
)
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(provider__name__contains="Chickens"), [
"Ristorante Miron",
],
attrgetter("name"),
)
ParkingLot.objects.create(
name="Main St", address="111 Main St", main_site=s1
)
ParkingLot.objects.create(
name="Well Lit", address="124 Sesame St", main_site=ir
)
self.assertEqual(
Restaurant.objects.get(lot__name="Well Lit").name,
"Ristorante Miron"
)
# The update() command can update fields in parent and child classes at
# once (although it executed multiple SQL queries to do so).
rows = Restaurant.objects.filter(
serves_hot_dogs=True, name__contains="D"
).update(
name="Demon Puppies", serves_hot_dogs=False
)
self.assertEqual(rows, 1)
r1 = Restaurant.objects.get(pk=r.pk)
self.assertFalse(r1.serves_hot_dogs)
self.assertEqual(r1.name, "Demon Puppies")
# The values() command also works on fields from parent models.
self.assertQuerysetEqual(
ItalianRestaurant.objects.values("name", "rating"), [
{"rating": 4, "name": "Ristorante Miron"}
],
lambda o: o
)
# select_related works with fields from the parent object as if they
# were a normal part of the model.
self.assertNumQueries(
2, lambda: ItalianRestaurant.objects.all()[0].chef
)
self.assertNumQueries(
1, lambda: ItalianRestaurant.objects.select_related("chef")[0].chef
)
def test_select_related_defer(self):
"""
#23370 - Should be able to defer child fields when using
select_related() from parent to child.
"""
Restaurant.objects.create(
name="Demon Dogs",
address="944 W. Fullerton",
serves_hot_dogs=True,
serves_pizza=False,
rating=2,
)
ItalianRestaurant.objects.create(
name="Ristorante Miron",
address="1234 W. Ash",
serves_hot_dogs=False,
serves_pizza=False,
serves_gnocchi=True,
rating=4,
)
qs = (Restaurant.objects
.select_related("italianrestaurant")
.defer("italianrestaurant__serves_gnocchi")
.order_by("rating"))
# Test that the field was actually deferred
with self.assertNumQueries(2):
objs = list(qs.all())
self.assertTrue(objs[1].italianrestaurant.serves_gnocchi)
# Test that model fields where assigned correct values
self.assertEqual(qs[0].name, 'Demon Dogs')
self.assertEqual(qs[0].rating, 2)
self.assertEqual(qs[1].italianrestaurant.name, 'Ristorante Miron')
self.assertEqual(qs[1].italianrestaurant.rating, 4)
def test_mixin_init(self):
m = MixinModel()
self.assertEqual(m.other_attr, 1)
def test_update_query_counts(self):
"""
Test that update queries do not generate non-necessary queries.
Refs #18304.
"""
c = Chef.objects.create(name="Albert")
ir = ItalianRestaurant.objects.create(
name="Ristorante Miron",
address="1234 W. Ash",
serves_hot_dogs=False,
serves_pizza=False,
serves_gnocchi=True,
rating=4,
chef=c
)
with self.assertNumQueries(3):
ir.save()
def test_update_parent_filtering(self):
"""
Test that updating a field of a model subclass doesn't issue an UPDATE
query constrained by an inner query.
Refs #10399
"""
supplier = Supplier.objects.create(
name='Central market',
address='610 some street'
)
# Capture the expected query in a database agnostic way
with CaptureQueriesContext(connection) as captured_queries:
Place.objects.filter(pk=supplier.pk).update(name=supplier.name)
expected_sql = captured_queries[0]['sql']
# Capture the queries executed when a subclassed model instance is saved.
with CaptureQueriesContext(connection) as captured_queries:
supplier.save(update_fields=('name',))
for query in captured_queries:
sql = query['sql']
if 'UPDATE' in sql:
self.assertEqual(expected_sql, sql)
def test_eq(self):
# Equality doesn't transfer in multitable inheritance.
self.assertNotEqual(Place(id=1), Restaurant(id=1))
self.assertNotEqual(Restaurant(id=1), Place(id=1))
def test_ticket_12567(self):
r = Restaurant.objects.create(name='n1', address='a1')
s = Supplier.objects.create(name='s1', address='a2')
self.assertQuerysetEqual(
Place.objects.filter(supplier__isnull=False),
[Place.objects.get(pk=s.pk)],
lambda x: x
)
self.assertQuerysetEqual(
Place.objects.filter(supplier__isnull=True),
[Place.objects.get(pk=r.pk)],
lambda x: x
)
self.assertQuerysetEqual(
Place.objects.exclude(supplier__isnull=False),
[Place.objects.get(pk=r.pk)],
lambda x: x
)
self.assertQuerysetEqual(
Place.objects.exclude(supplier__isnull=True),
[Place.objects.get(pk=s.pk)],
lambda x: x
)
def test_custompk_m2m(self):
b = Base.objects.create()
b.titles.add(Title.objects.create(title="foof"))
s = SubBase.objects.create(sub_id=b.id)
b = Base.objects.get(pk=s.id)
self.assertNotEqual(b.pk, s.pk)
# Low-level test for related_val
self.assertEqual(s.titles.related_val, (s.id,))
# Higher level test for correct query values (title foof not
# accidentally found).
self.assertQuerysetEqual(
s.titles.all(), [])
class InheritanceSameModelNameTests(TestCase):
def setUp(self):
# The Title model has distinct accessors for both
# model_inheritance.Copy and model_inheritance_same_model_name.Copy
# models.
self.title = Title.objects.create(title='Lorem Ipsum')
def test_inheritance_related_name(self):
self.assertEqual(
self.title.attached_model_inheritance_copy_set.create(
content='Save $ on V1agr@',
url='http://v1agra.com/',
title='V1agra is spam',
), Copy.objects.get(
content='Save $ on V1agr@',
))
def test_inheritance_with_same_model_name(self):
with self.modify_settings(
INSTALLED_APPS={'append': ['model_inheritance.same_model_name']}):
call_command('migrate', verbosity=0)
from .same_model_name.models import Copy
self.assertEqual(
self.title.attached_same_model_name_copy_set.create(
content='The Web framework for perfectionists with deadlines.',
url='http://www.djangoproject.com/',
title='Django Rocks'
), Copy.objects.get(
content='The Web framework for perfectionists with deadlines.',
))
def test_related_name_attribute_exists(self):
# The Post model doesn't have an attribute called 'attached_%(app_label)s_%(class)s_set'.
self.assertFalse(hasattr(self.title, 'attached_%(app_label)s_%(class)s_set'))
| bsd-3-clause |
Alberto-Beralix/Beralix | i386-squashfs-root/usr/share/system-config-printer/ppdsloader.py | 1 | 11100 | #!/usr/bin/python
## system-config-printer
## Copyright (C) 2010, 2011 Red Hat, Inc.
## Author: Tim Waugh <twaugh@redhat.com>
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import dbus
import gobject
import gtk
import cupshelpers
import asyncconn
from debug import debugprint
from gettext import gettext as _
class PPDsLoader(gobject.GObject):
"""
1. If PackageKit support is available, and this is a local server,
try to use PackageKit to install relevant drivers. We do this
because we can only make the right choice about the "best" driver
when the full complement of drivers is there to choose from.
2. Fetch the list of available drivers from CUPS.
3. If Jockey is available, and there is no appropriate driver
available, try to use Jockey to install one.
4. If Jockey was able to install one, fetch the list of available
drivers again.
"""
__gsignals__ = {
'finished': (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, [])
}
def __init__ (self, device_id=None, parent=None, device_uri=None,
host=None, encryption=None, language=None,
device_make_and_model=None):
gobject.GObject.__init__ (self)
debugprint ("+%s" % self)
self._device_id = device_id
self._device_uri = device_uri
self._device_make_and_model = device_make_and_model
self._parent = parent
self._host = host
self._encryption = encryption
self._language = language
self._installed_files = []
self._conn = None
self._ppds = None
self._exc = None
self._ppdsmatch_result = None
self._jockey_queried = False
self._local_cups = (self._host == None or
self._host == "localhost" or
self._host[0] == '/')
try:
self._bus = dbus.SessionBus ()
except:
debugprint ("Failed to get session bus")
self._bus = None
fmt = _("Searching")
self._dialog = gtk.MessageDialog (parent=parent,
flags=gtk.DIALOG_MODAL |
gtk.DIALOG_DESTROY_WITH_PARENT,
type=gtk.MESSAGE_INFO,
buttons=gtk.BUTTONS_CANCEL,
message_format=fmt)
self._dialog.format_secondary_text (_("Searching for drivers"))
self._dialog.connect ("response", self._dialog_response)
def run (self):
self._dialog.show_all ()
if self._device_id:
self._devid_dict = cupshelpers.parseDeviceID (self._device_id)
if self._local_cups and self._device_id and self._bus:
self._gpk_device_id = "MFG:%s;MDL:%s;" % (self._devid_dict["MFG"],
self._devid_dict["MDL"])
self._query_packagekit ()
else:
self._query_cups ()
def __del__ (self):
debugprint ("-%s" % self)
def destroy (self):
debugprint ("DESTROY: %s" % self)
if self._dialog:
self._dialog.destroy ()
self._dialog = None
self._parent = None
if self._conn:
self._conn.destroy ()
self._conn = None
def get_installed_files (self):
return self._installed_files
def get_ppds (self):
return self._ppds
def get_ppdsmatch_result (self):
return self._ppdsmatch_result
def get_error (self):
return self._exc
def _dialog_response (self, dialog, response):
dialog.destroy ()
self._dialog = None
self.emit ('finished')
def _query_cups (self):
debugprint ("Asking CUPS for PPDs")
if (not self._conn):
c = asyncconn.Connection (host=self._host,
encryption=self._encryption,
reply_handler=self._cups_connect_reply,
error_handler=self._cups_error)
self._conn = c
else:
self._cups_connect_reply(self._conn, None)
def _cups_connect_reply (self, conn, UNUSED):
conn._begin_operation (_("fetching PPDs"))
try:
conn.getPPDs2 (reply_handler=self._cups_reply,
error_handler=self._cups_error)
except AttributeError:
# getPPDs2 requires pycups >= 1.9.52
conn.getPPDs (reply_handler=self._cups_reply,
error_handler=self._cups_error)
def _cups_reply (self, conn, result):
ppds = cupshelpers.ppds.PPDs (result, language=self._language)
self._ppds = ppds
self._need_requery_cups = False
if self._device_id:
fit = ppds.\
getPPDNamesFromDeviceID (self._devid_dict["MFG"],
self._devid_dict["MDL"],
self._devid_dict["DES"],
self._devid_dict["CMD"],
self._device_uri,
self._device_make_and_model)
ppdnamelist = ppds.\
orderPPDNamesByPreference (fit.keys (),
self._installed_files,
devid=self._devid_dict,
fit=fit)
self._ppdsmatch_result = (fit, ppdnamelist)
ppdname = ppdnamelist[0]
if (self._bus and
not fit[ppdname].startswith ("exact") and
not self._jockey_queried and
self._local_cups):
# Try to install packages using jockey if
# - there's no appropriate driver (PPD) locally available
# - we are configuring local CUPS server
self._jockey_queried = True
self._query_jockey ()
return
conn.destroy ()
self._conn = None
if self._dialog != None:
self._dialog.destroy ()
self._dialog = None
self.emit ('finished')
def _cups_error (self, conn, exc):
conn.destroy ()
self._conn = None
self._ppds = None
if self._dialog != None:
self._dialog.destroy ()
self._dialog = None
self.emit ('finished')
def _query_packagekit (self):
debugprint ("Asking PackageKit to install drivers")
try:
xid = self._parent.window.xid
except:
xid = 0
try:
obj = self._bus.get_object ("org.freedesktop.PackageKit",
"/org/freedesktop/PackageKit")
proxy = dbus.Interface (obj, "org.freedesktop.PackageKit.Modify")
resources = [self._gpk_device_id]
interaction = "hide-finished"
debugprint ("Calling InstallPrinterDrivers (%s, %s, %s)" %
(repr (xid), repr (resources), repr (interaction)))
proxy.InstallPrinterDrivers (dbus.UInt32 (xid),
resources, interaction,
reply_handler=self._packagekit_reply,
error_handler=self._packagekit_error,
timeout=3600)
except Exception, e:
debugprint ("Failed to talk to PackageKit: %s" % e)
if self._dialog:
self._dialog.show_all ()
self._query_cups ()
def _packagekit_reply (self):
debugprint ("Got PackageKit reply")
self._need_requery_cups = True
if self._dialog:
self._dialog.show_all ()
self._query_cups ()
def _packagekit_error (self, exc):
debugprint ("Got PackageKit error: %s" % exc)
if self._dialog:
self._dialog.show_all ()
self._query_cups ()
def _query_jockey (self):
debugprint ("Asking Jockey to install drivers")
try:
obj = self._bus.get_object ("com.ubuntu.DeviceDriver", "/GUI")
jockey = dbus.Interface (obj, "com.ubuntu.DeviceDriver")
r = jockey.search_driver ("printer_deviceid:%s" % self._device_id,
reply_handler=self._jockey_reply,
error_handler=self._jockey_error,
timeout=3600)
except Exception, e:
self._jockey_error (e)
def _jockey_reply (self, conn, result):
debugprint ("Got Jockey result: %s" % repr (result))
try:
self._installed_files = result[1]
except:
self._installed_files = []
self._query_cups ()
def _jockey_error (self, exc):
debugprint ("Got Jockey error: %s" % exc)
if self._need_requery_cups:
self._query_cups ()
else:
if self._conn != None:
self._conn.destroy ()
self._conn = None
if self._dialog != None:
self._dialog.destroy ()
self._dialog = None
self.emit ('finished')
gobject.type_register(PPDsLoader)
if __name__ == "__main__":
class Foo:
def __init__ (self):
w = gtk.Window ()
b = gtk.Button ("Go")
w.add (b)
b.connect ('clicked', self.go)
w.connect ('delete-event', gtk.main_quit)
w.show_all ()
self._window = w
def go (self, button):
loader = PPDsLoader (device_id="MFG:MFG;MDL:MDL;",
parent=self._window)
loader.connect ('finished', self.ppds_loaded)
loader.run ()
def ppds_loaded (self, ppdsloader):
self._window.destroy ()
gtk.main_quit ()
exc = ppdsloader.get_error ()
print exc
ppds = ppdsloader.get_ppds ()
if ppds != None:
print len (ppds)
ppdsloader.destroy ()
import gobject
from debug import set_debugging
set_debugging (True)
gobject.threads_init ()
Foo ()
gtk.main ()
| gpl-3.0 |
AntonioMtn/NZBMegaSearch | jinja2/testsuite/security.py | 90 | 6157 | # -*- coding: utf-8 -*-
"""
jinja2.testsuite.security
~~~~~~~~~~~~~~~~~~~~~~~~~
Checks the sandbox and other security features.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import unittest
from jinja2.testsuite import JinjaTestCase
from jinja2 import Environment
from jinja2.sandbox import SandboxedEnvironment, \
ImmutableSandboxedEnvironment, unsafe
from jinja2 import Markup, escape
from jinja2.exceptions import SecurityError, TemplateSyntaxError, \
TemplateRuntimeError
class PrivateStuff(object):
def bar(self):
return 23
@unsafe
def foo(self):
return 42
def __repr__(self):
return 'PrivateStuff'
class PublicStuff(object):
bar = lambda self: 23
_foo = lambda self: 42
def __repr__(self):
return 'PublicStuff'
class SandboxTestCase(JinjaTestCase):
def test_unsafe(self):
env = SandboxedEnvironment()
self.assert_raises(SecurityError, env.from_string("{{ foo.foo() }}").render,
foo=PrivateStuff())
self.assert_equal(env.from_string("{{ foo.bar() }}").render(foo=PrivateStuff()), '23')
self.assert_raises(SecurityError, env.from_string("{{ foo._foo() }}").render,
foo=PublicStuff())
self.assert_equal(env.from_string("{{ foo.bar() }}").render(foo=PublicStuff()), '23')
self.assert_equal(env.from_string("{{ foo.__class__ }}").render(foo=42), '')
self.assert_equal(env.from_string("{{ foo.func_code }}").render(foo=lambda:None), '')
# security error comes from __class__ already.
self.assert_raises(SecurityError, env.from_string(
"{{ foo.__class__.__subclasses__() }}").render, foo=42)
def test_immutable_environment(self):
env = ImmutableSandboxedEnvironment()
self.assert_raises(SecurityError, env.from_string(
'{{ [].append(23) }}').render)
self.assert_raises(SecurityError, env.from_string(
'{{ {1:2}.clear() }}').render)
def test_restricted(self):
env = SandboxedEnvironment()
self.assert_raises(TemplateSyntaxError, env.from_string,
"{% for item.attribute in seq %}...{% endfor %}")
self.assert_raises(TemplateSyntaxError, env.from_string,
"{% for foo, bar.baz in seq %}...{% endfor %}")
def test_markup_operations(self):
# adding two strings should escape the unsafe one
unsafe = '<script type="application/x-some-script">alert("foo");</script>'
safe = Markup('<em>username</em>')
assert unsafe + safe == unicode(escape(unsafe)) + unicode(safe)
# string interpolations are safe to use too
assert Markup('<em>%s</em>') % '<bad user>' == \
'<em><bad user></em>'
assert Markup('<em>%(username)s</em>') % {
'username': '<bad user>'
} == '<em><bad user></em>'
# an escaped object is markup too
assert type(Markup('foo') + 'bar') is Markup
# and it implements __html__ by returning itself
x = Markup("foo")
assert x.__html__() is x
# it also knows how to treat __html__ objects
class Foo(object):
def __html__(self):
return '<em>awesome</em>'
def __unicode__(self):
return 'awesome'
assert Markup(Foo()) == '<em>awesome</em>'
assert Markup('<strong>%s</strong>') % Foo() == \
'<strong><em>awesome</em></strong>'
# escaping and unescaping
assert escape('"<>&\'') == '"<>&''
assert Markup("<em>Foo & Bar</em>").striptags() == "Foo & Bar"
assert Markup("<test>").unescape() == "<test>"
def test_template_data(self):
env = Environment(autoescape=True)
t = env.from_string('{% macro say_hello(name) %}'
'<p>Hello {{ name }}!</p>{% endmacro %}'
'{{ say_hello("<blink>foo</blink>") }}')
escaped_out = '<p>Hello <blink>foo</blink>!</p>'
assert t.render() == escaped_out
assert unicode(t.module) == escaped_out
assert escape(t.module) == escaped_out
assert t.module.say_hello('<blink>foo</blink>') == escaped_out
assert escape(t.module.say_hello('<blink>foo</blink>')) == escaped_out
def test_attr_filter(self):
env = SandboxedEnvironment()
tmpl = env.from_string('{{ cls|attr("__subclasses__")() }}')
self.assert_raises(SecurityError, tmpl.render, cls=int)
def test_binary_operator_intercepting(self):
def disable_op(left, right):
raise TemplateRuntimeError('that operator so does not work')
for expr, ctx, rv in ('1 + 2', {}, '3'), ('a + 2', {'a': 2}, '4'):
env = SandboxedEnvironment()
env.binop_table['+'] = disable_op
t = env.from_string('{{ %s }}' % expr)
assert t.render(ctx) == rv
env.intercepted_binops = frozenset(['+'])
t = env.from_string('{{ %s }}' % expr)
try:
t.render(ctx)
except TemplateRuntimeError, e:
pass
else:
self.fail('expected runtime error')
def test_unary_operator_intercepting(self):
def disable_op(arg):
raise TemplateRuntimeError('that operator so does not work')
for expr, ctx, rv in ('-1', {}, '-1'), ('-a', {'a': 2}, '-2'):
env = SandboxedEnvironment()
env.unop_table['-'] = disable_op
t = env.from_string('{{ %s }}' % expr)
assert t.render(ctx) == rv
env.intercepted_unops = frozenset(['-'])
t = env.from_string('{{ %s }}' % expr)
try:
t.render(ctx)
except TemplateRuntimeError, e:
pass
else:
self.fail('expected runtime error')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(SandboxTestCase))
return suite
| gpl-2.0 |
wilbert-abreu/flask | tests/test_user_error_handler.py | 150 | 3483 | # -*- coding: utf-8 -*-
from werkzeug.exceptions import Forbidden, InternalServerError
import flask
def test_error_handler_no_match():
app = flask.Flask(__name__)
class CustomException(Exception):
pass
@app.errorhandler(CustomException)
def custom_exception_handler(e):
assert isinstance(e, CustomException)
return 'custom'
@app.errorhandler(500)
def handle_500(e):
return type(e).__name__
@app.route('/custom')
def custom_test():
raise CustomException()
@app.route('/keyerror')
def key_error():
raise KeyError()
c = app.test_client()
assert c.get('/custom').data == b'custom'
assert c.get('/keyerror').data == b'KeyError'
def test_error_handler_subclass():
app = flask.Flask(__name__)
class ParentException(Exception):
pass
class ChildExceptionUnregistered(ParentException):
pass
class ChildExceptionRegistered(ParentException):
pass
@app.errorhandler(ParentException)
def parent_exception_handler(e):
assert isinstance(e, ParentException)
return 'parent'
@app.errorhandler(ChildExceptionRegistered)
def child_exception_handler(e):
assert isinstance(e, ChildExceptionRegistered)
return 'child-registered'
@app.route('/parent')
def parent_test():
raise ParentException()
@app.route('/child-unregistered')
def unregistered_test():
raise ChildExceptionUnregistered()
@app.route('/child-registered')
def registered_test():
raise ChildExceptionRegistered()
c = app.test_client()
assert c.get('/parent').data == b'parent'
assert c.get('/child-unregistered').data == b'parent'
assert c.get('/child-registered').data == b'child-registered'
def test_error_handler_http_subclass():
app = flask.Flask(__name__)
class ForbiddenSubclassRegistered(Forbidden):
pass
class ForbiddenSubclassUnregistered(Forbidden):
pass
@app.errorhandler(403)
def code_exception_handler(e):
assert isinstance(e, Forbidden)
return 'forbidden'
@app.errorhandler(ForbiddenSubclassRegistered)
def subclass_exception_handler(e):
assert isinstance(e, ForbiddenSubclassRegistered)
return 'forbidden-registered'
@app.route('/forbidden')
def forbidden_test():
raise Forbidden()
@app.route('/forbidden-registered')
def registered_test():
raise ForbiddenSubclassRegistered()
@app.route('/forbidden-unregistered')
def unregistered_test():
raise ForbiddenSubclassUnregistered()
c = app.test_client()
assert c.get('/forbidden').data == b'forbidden'
assert c.get('/forbidden-unregistered').data == b'forbidden'
assert c.get('/forbidden-registered').data == b'forbidden-registered'
def test_error_handler_blueprint():
bp = flask.Blueprint('bp', __name__)
@bp.errorhandler(500)
def bp_exception_handler(e):
return 'bp-error'
@bp.route('/error')
def bp_test():
raise InternalServerError()
app = flask.Flask(__name__)
@app.errorhandler(500)
def app_exception_handler(e):
return 'app-error'
@app.route('/error')
def app_test():
raise InternalServerError()
app.register_blueprint(bp, url_prefix='/bp')
c = app.test_client()
assert c.get('/error').data == b'app-error'
assert c.get('/bp/error').data == b'bp-error'
| bsd-3-clause |
NEricN/RobotCSimulator | Python/App/Lib/SocketServer.py | 33 | 24036 | """Generic socket server classes.
This module tries to capture the various aspects of defining a server:
For socket-based servers:
- address family:
- AF_INET{,6}: IP (Internet Protocol) sockets (default)
- AF_UNIX: Unix domain sockets
- others, e.g. AF_DECNET are conceivable (see <socket.h>
- socket type:
- SOCK_STREAM (reliable stream, e.g. TCP)
- SOCK_DGRAM (datagrams, e.g. UDP)
For request-based servers (including socket-based):
- client address verification before further looking at the request
(This is actually a hook for any processing that needs to look
at the request before anything else, e.g. logging)
- how to handle multiple requests:
- synchronous (one request is handled at a time)
- forking (each request is handled by a new process)
- threading (each request is handled by a new thread)
The classes in this module favor the server type that is simplest to
write: a synchronous TCP/IP server. This is bad class design, but
save some typing. (There's also the issue that a deep class hierarchy
slows down method lookups.)
There are five classes in an inheritance diagram, four of which represent
synchronous servers of four types:
+------------+
| BaseServer |
+------------+
|
v
+-----------+ +------------------+
| TCPServer |------->| UnixStreamServer |
+-----------+ +------------------+
|
v
+-----------+ +--------------------+
| UDPServer |------->| UnixDatagramServer |
+-----------+ +--------------------+
Note that UnixDatagramServer derives from UDPServer, not from
UnixStreamServer -- the only difference between an IP and a Unix
stream server is the address family, which is simply repeated in both
unix server classes.
Forking and threading versions of each type of server can be created
using the ForkingMixIn and ThreadingMixIn mix-in classes. For
instance, a threading UDP server class is created as follows:
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
The Mix-in class must come first, since it overrides a method defined
in UDPServer! Setting the various member variables also changes
the behavior of the underlying server mechanism.
To implement a service, you must derive a class from
BaseRequestHandler and redefine its handle() method. You can then run
various versions of the service by combining one of the server classes
with your request handler class.
The request handler class must be different for datagram or stream
services. This can be hidden by using the request handler
subclasses StreamRequestHandler or DatagramRequestHandler.
Of course, you still have to use your head!
For instance, it makes no sense to use a forking server if the service
contains state in memory that can be modified by requests (since the
modifications in the child process would never reach the initial state
kept in the parent process and passed to each child). In this case,
you can use a threading server, but you will probably have to use
locks to avoid two requests that come in nearly simultaneous to apply
conflicting changes to the server state.
On the other hand, if you are building e.g. an HTTP server, where all
data is stored externally (e.g. in the file system), a synchronous
class will essentially render the service "deaf" while one request is
being handled -- which may be for a very long time if a client is slow
to read all the data it has requested. Here a threading or forking
server is appropriate.
In some cases, it may be appropriate to process part of a request
synchronously, but to finish processing in a forked child depending on
the request data. This can be implemented by using a synchronous
server and doing an explicit fork in the request handler class
handle() method.
Another approach to handling multiple simultaneous requests in an
environment that supports neither threads nor fork (or where these are
too expensive or inappropriate for the service) is to maintain an
explicit table of partially finished requests and to use select() to
decide which request to work on next (or whether to handle a new
incoming request). This is particularly important for stream services
where each client can potentially be connected for a long time (if
threads or subprocesses cannot be used).
Future work:
- Standard classes for Sun RPC (which uses either UDP or TCP)
- Standard mix-in classes to implement various authentication
and encryption schemes
- Standard framework for select-based multiplexing
XXX Open problems:
- What to do with out-of-band data?
BaseServer:
- split generic "request" functionality out into BaseServer class.
Copyright (C) 2000 Luke Kenneth Casson Leighton <lkcl@samba.org>
example: read entries from a SQL database (requires overriding
get_request() to return a table entry from the database).
entry is processed by a RequestHandlerClass.
"""
# Author of the BaseServer patch: Luke Kenneth Casson Leighton
# XXX Warning!
# There is a test suite for this module, but it cannot be run by the
# standard regression test.
# To run it manually, run Lib/test/test_socketserver.py.
__version__ = "0.4"
import socket
import select
import sys
import os
import errno
try:
import threading
except ImportError:
import dummy_threading as threading
__all__ = ["TCPServer","UDPServer","ForkingUDPServer","ForkingTCPServer",
"ThreadingUDPServer","ThreadingTCPServer","BaseRequestHandler",
"StreamRequestHandler","DatagramRequestHandler",
"ThreadingMixIn", "ForkingMixIn"]
if hasattr(socket, "AF_UNIX"):
__all__.extend(["UnixStreamServer","UnixDatagramServer",
"ThreadingUnixStreamServer",
"ThreadingUnixDatagramServer"])
def _eintr_retry(func, *args):
"""restart a system call interrupted by EINTR"""
while True:
try:
return func(*args)
except (OSError, select.error) as e:
if e.args[0] != errno.EINTR:
raise
class BaseServer:
"""Base class for server classes.
Methods for the caller:
- __init__(server_address, RequestHandlerClass)
- serve_forever(poll_interval=0.5)
- shutdown()
- handle_request() # if you do not use serve_forever()
- fileno() -> int # for select()
Methods that may be overridden:
- server_bind()
- server_activate()
- get_request() -> request, client_address
- handle_timeout()
- verify_request(request, client_address)
- server_close()
- process_request(request, client_address)
- shutdown_request(request)
- close_request(request)
- handle_error()
Methods for derived classes:
- finish_request(request, client_address)
Class variables that may be overridden by derived classes or
instances:
- timeout
- address_family
- socket_type
- allow_reuse_address
Instance variables:
- RequestHandlerClass
- socket
"""
timeout = None
def __init__(self, server_address, RequestHandlerClass):
"""Constructor. May be extended, do not override."""
self.server_address = server_address
self.RequestHandlerClass = RequestHandlerClass
self.__is_shut_down = threading.Event()
self.__shutdown_request = False
def server_activate(self):
"""Called by constructor to activate the server.
May be overridden.
"""
pass
def serve_forever(self, poll_interval=0.5):
"""Handle one request at a time until shutdown.
Polls for shutdown every poll_interval seconds. Ignores
self.timeout. If you need to do periodic tasks, do them in
another thread.
"""
self.__is_shut_down.clear()
try:
while not self.__shutdown_request:
# XXX: Consider using another file descriptor or
# connecting to the socket to wake this up instead of
# polling. Polling reduces our responsiveness to a
# shutdown request and wastes cpu at all other times.
r, w, e = _eintr_retry(select.select, [self], [], [],
poll_interval)
if self in r:
self._handle_request_noblock()
finally:
self.__shutdown_request = False
self.__is_shut_down.set()
def shutdown(self):
"""Stops the serve_forever loop.
Blocks until the loop has finished. This must be called while
serve_forever() is running in another thread, or it will
deadlock.
"""
self.__shutdown_request = True
self.__is_shut_down.wait()
# The distinction between handling, getting, processing and
# finishing a request is fairly arbitrary. Remember:
#
# - handle_request() is the top-level call. It calls
# select, get_request(), verify_request() and process_request()
# - get_request() is different for stream or datagram sockets
# - process_request() is the place that may fork a new process
# or create a new thread to finish the request
# - finish_request() instantiates the request handler class;
# this constructor will handle the request all by itself
def handle_request(self):
"""Handle one request, possibly blocking.
Respects self.timeout.
"""
# Support people who used socket.settimeout() to escape
# handle_request before self.timeout was available.
timeout = self.socket.gettimeout()
if timeout is None:
timeout = self.timeout
elif self.timeout is not None:
timeout = min(timeout, self.timeout)
fd_sets = _eintr_retry(select.select, [self], [], [], timeout)
if not fd_sets[0]:
self.handle_timeout()
return
self._handle_request_noblock()
def _handle_request_noblock(self):
"""Handle one request, without blocking.
I assume that select.select has returned that the socket is
readable before this function was called, so there should be
no risk of blocking in get_request().
"""
try:
request, client_address = self.get_request()
except socket.error:
return
if self.verify_request(request, client_address):
try:
self.process_request(request, client_address)
except:
self.handle_error(request, client_address)
self.shutdown_request(request)
def handle_timeout(self):
"""Called if no new request arrives within self.timeout.
Overridden by ForkingMixIn.
"""
pass
def verify_request(self, request, client_address):
"""Verify the request. May be overridden.
Return True if we should proceed with this request.
"""
return True
def process_request(self, request, client_address):
"""Call finish_request.
Overridden by ForkingMixIn and ThreadingMixIn.
"""
self.finish_request(request, client_address)
self.shutdown_request(request)
def server_close(self):
"""Called to clean-up the server.
May be overridden.
"""
pass
def finish_request(self, request, client_address):
"""Finish one request by instantiating RequestHandlerClass."""
self.RequestHandlerClass(request, client_address, self)
def shutdown_request(self, request):
"""Called to shutdown and close an individual request."""
self.close_request(request)
def close_request(self, request):
"""Called to clean up an individual request."""
pass
def handle_error(self, request, client_address):
"""Handle an error gracefully. May be overridden.
The default is to print a traceback and continue.
"""
print '-'*40
print 'Exception happened during processing of request from',
print client_address
import traceback
traceback.print_exc() # XXX But this goes to stderr!
print '-'*40
class TCPServer(BaseServer):
"""Base class for various socket-based server classes.
Defaults to synchronous IP stream (i.e., TCP).
Methods for the caller:
- __init__(server_address, RequestHandlerClass, bind_and_activate=True)
- serve_forever(poll_interval=0.5)
- shutdown()
- handle_request() # if you don't use serve_forever()
- fileno() -> int # for select()
Methods that may be overridden:
- server_bind()
- server_activate()
- get_request() -> request, client_address
- handle_timeout()
- verify_request(request, client_address)
- process_request(request, client_address)
- shutdown_request(request)
- close_request(request)
- handle_error()
Methods for derived classes:
- finish_request(request, client_address)
Class variables that may be overridden by derived classes or
instances:
- timeout
- address_family
- socket_type
- request_queue_size (only for stream sockets)
- allow_reuse_address
Instance variables:
- server_address
- RequestHandlerClass
- socket
"""
address_family = socket.AF_INET
socket_type = socket.SOCK_STREAM
request_queue_size = 5
allow_reuse_address = False
def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True):
"""Constructor. May be extended, do not override."""
BaseServer.__init__(self, server_address, RequestHandlerClass)
self.socket = socket.socket(self.address_family,
self.socket_type)
if bind_and_activate:
try:
self.server_bind()
self.server_activate()
except:
self.server_close()
raise
def server_bind(self):
"""Called by constructor to bind the socket.
May be overridden.
"""
if self.allow_reuse_address:
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(self.server_address)
self.server_address = self.socket.getsockname()
def server_activate(self):
"""Called by constructor to activate the server.
May be overridden.
"""
self.socket.listen(self.request_queue_size)
def server_close(self):
"""Called to clean-up the server.
May be overridden.
"""
self.socket.close()
def fileno(self):
"""Return socket file number.
Interface required by select().
"""
return self.socket.fileno()
def get_request(self):
"""Get the request and client address from the socket.
May be overridden.
"""
return self.socket.accept()
def shutdown_request(self, request):
"""Called to shutdown and close an individual request."""
try:
#explicitly shutdown. socket.close() merely releases
#the socket and waits for GC to perform the actual close.
request.shutdown(socket.SHUT_WR)
except socket.error:
pass #some platforms may raise ENOTCONN here
self.close_request(request)
def close_request(self, request):
"""Called to clean up an individual request."""
request.close()
class UDPServer(TCPServer):
"""UDP server class."""
allow_reuse_address = False
socket_type = socket.SOCK_DGRAM
max_packet_size = 8192
def get_request(self):
data, client_addr = self.socket.recvfrom(self.max_packet_size)
return (data, self.socket), client_addr
def server_activate(self):
# No need to call listen() for UDP.
pass
def shutdown_request(self, request):
# No need to shutdown anything.
self.close_request(request)
def close_request(self, request):
# No need to close anything.
pass
class ForkingMixIn:
"""Mix-in class to handle each request in a new process."""
timeout = 300
active_children = None
max_children = 40
def collect_children(self):
"""Internal routine to wait for children that have exited."""
if self.active_children is None:
return
# If we're above the max number of children, wait and reap them until
# we go back below threshold. Note that we use waitpid(-1) below to be
# able to collect children in size(<defunct children>) syscalls instead
# of size(<children>): the downside is that this might reap children
# which we didn't spawn, which is why we only resort to this when we're
# above max_children.
while len(self.active_children) >= self.max_children:
try:
pid, _ = os.waitpid(-1, 0)
self.active_children.discard(pid)
except OSError as e:
if e.errno == errno.ECHILD:
# we don't have any children, we're done
self.active_children.clear()
elif e.errno != errno.EINTR:
break
# Now reap all defunct children.
for pid in self.active_children.copy():
try:
pid, _ = os.waitpid(pid, os.WNOHANG)
# if the child hasn't exited yet, pid will be 0 and ignored by
# discard() below
self.active_children.discard(pid)
except OSError as e:
if e.errno == errno.ECHILD:
# someone else reaped it
self.active_children.discard(pid)
def handle_timeout(self):
"""Wait for zombies after self.timeout seconds of inactivity.
May be extended, do not override.
"""
self.collect_children()
def process_request(self, request, client_address):
"""Fork a new subprocess to process the request."""
self.collect_children()
pid = os.fork()
if pid:
# Parent process
if self.active_children is None:
self.active_children = set()
self.active_children.add(pid)
self.close_request(request) #close handle in parent process
return
else:
# Child process.
# This must never return, hence os._exit()!
try:
self.finish_request(request, client_address)
self.shutdown_request(request)
os._exit(0)
except:
try:
self.handle_error(request, client_address)
self.shutdown_request(request)
finally:
os._exit(1)
class ThreadingMixIn:
"""Mix-in class to handle each request in a new thread."""
# Decides how threads will act upon termination of the
# main process
daemon_threads = False
def process_request_thread(self, request, client_address):
"""Same as in BaseServer but as a thread.
In addition, exception handling is done here.
"""
try:
self.finish_request(request, client_address)
self.shutdown_request(request)
except:
self.handle_error(request, client_address)
self.shutdown_request(request)
def process_request(self, request, client_address):
"""Start a new thread to process the request."""
t = threading.Thread(target = self.process_request_thread,
args = (request, client_address))
t.daemon = self.daemon_threads
t.start()
class ForkingUDPServer(ForkingMixIn, UDPServer): pass
class ForkingTCPServer(ForkingMixIn, TCPServer): pass
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass
if hasattr(socket, 'AF_UNIX'):
class UnixStreamServer(TCPServer):
address_family = socket.AF_UNIX
class UnixDatagramServer(UDPServer):
address_family = socket.AF_UNIX
class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass
class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass
class BaseRequestHandler:
"""Base class for request handler classes.
This class is instantiated for each request to be handled. The
constructor sets the instance variables request, client_address
and server, and then calls the handle() method. To implement a
specific service, all you need to do is to derive a class which
defines a handle() method.
The handle() method can find the request as self.request, the
client address as self.client_address, and the server (in case it
needs access to per-server information) as self.server. Since a
separate instance is created for each request, the handle() method
can define arbitrary other instance variariables.
"""
def __init__(self, request, client_address, server):
self.request = request
self.client_address = client_address
self.server = server
self.setup()
try:
self.handle()
finally:
self.finish()
def setup(self):
pass
def handle(self):
pass
def finish(self):
pass
# The following two classes make it possible to use the same service
# class for stream or datagram servers.
# Each class sets up these instance variables:
# - rfile: a file object from which receives the request is read
# - wfile: a file object to which the reply is written
# When the handle() method returns, wfile is flushed properly
class StreamRequestHandler(BaseRequestHandler):
"""Define self.rfile and self.wfile for stream sockets."""
# Default buffer sizes for rfile, wfile.
# We default rfile to buffered because otherwise it could be
# really slow for large data (a getc() call per byte); we make
# wfile unbuffered because (a) often after a write() we want to
# read and we need to flush the line; (b) big writes to unbuffered
# files are typically optimized by stdio even when big reads
# aren't.
rbufsize = -1
wbufsize = 0
# A timeout to apply to the request socket, if not None.
timeout = None
# Disable nagle algorithm for this socket, if True.
# Use only when wbufsize != 0, to avoid small packets.
disable_nagle_algorithm = False
def setup(self):
self.connection = self.request
if self.timeout is not None:
self.connection.settimeout(self.timeout)
if self.disable_nagle_algorithm:
self.connection.setsockopt(socket.IPPROTO_TCP,
socket.TCP_NODELAY, True)
self.rfile = self.connection.makefile('rb', self.rbufsize)
self.wfile = self.connection.makefile('wb', self.wbufsize)
def finish(self):
if not self.wfile.closed:
try:
self.wfile.flush()
except socket.error:
# An final socket error may have occurred here, such as
# the local error ECONNABORTED.
pass
self.wfile.close()
self.rfile.close()
class DatagramRequestHandler(BaseRequestHandler):
# XXX Regrettably, I cannot get this working on Linux;
# s.recvfrom() doesn't return a meaningful client address.
"""Define self.rfile and self.wfile for datagram sockets."""
def setup(self):
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
self.packet, self.socket = self.request
self.rfile = StringIO(self.packet)
self.wfile = StringIO()
def finish(self):
self.socket.sendto(self.wfile.getvalue(), self.client_address)
| apache-2.0 |
rmfitzpatrick/ansible | lib/ansible/plugins/shell/csh.py | 69 | 1478 | # (c) 2014, Chris Church <chris@ninemoreminutes.com>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.shell import ShellBase
class ShellModule(ShellBase):
# Common shell filenames that this plugin handles
COMPATIBLE_SHELLS = frozenset(('csh', 'tcsh'))
# Family of shells this has. Must match the filename without extension
SHELL_FAMILY = 'csh'
# How to end lines in a python script one-liner
_SHELL_EMBEDDED_PY_EOL = '\\\n'
_SHELL_REDIRECT_ALLNULL = '>& /dev/null'
_SHELL_AND = '&&'
_SHELL_OR = '||'
_SHELL_SUB_LEFT = '"`'
_SHELL_SUB_RIGHT = '`"'
_SHELL_GROUP_LEFT = '('
_SHELL_GROUP_RIGHT = ')'
def env_prefix(self, **kwargs):
return 'env %s' % super(ShellModule, self).env_prefix(**kwargs)
| gpl-3.0 |
jorik041/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/checkout/scm/svn.py | 119 | 17277 | # Copyright (c) 2009, 2010, 2011 Google Inc. All rights reserved.
# Copyright (c) 2009 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import os
import random
import re
import shutil
import string
import sys
import tempfile
from webkitpy.common.memoized import memoized
from webkitpy.common.system.executive import Executive, ScriptError
from .scm import AuthenticationError, SCM, commit_error_handler
_log = logging.getLogger(__name__)
# A mixin class that represents common functionality for SVN and Git-SVN.
class SVNRepository(object):
# FIXME: These belong in common.config.urls
svn_server_host = "svn.webkit.org"
svn_server_realm = "<http://svn.webkit.org:80> Mac OS Forge"
def has_authorization_for_realm(self, realm, home_directory=os.getenv("HOME")):
# If we are working on a file:// repository realm will be None
if realm is None:
return True
# ignore false positives for methods implemented in the mixee class. pylint: disable=E1101
# Assumes find and grep are installed.
if not os.path.isdir(os.path.join(home_directory, ".subversion")):
return False
find_args = ["find", ".subversion", "-type", "f", "-exec", "grep", "-q", realm, "{}", ";", "-print"]
find_output = self.run(find_args, cwd=home_directory, error_handler=Executive.ignore_error).rstrip()
if not find_output or not os.path.isfile(os.path.join(home_directory, find_output)):
return False
# Subversion either stores the password in the credential file, indicated by the presence of the key "password",
# or uses the system password store (e.g. Keychain on Mac OS X) as indicated by the presence of the key "passtype".
# We assume that these keys will not coincide with the actual credential data (e.g. that a person's username
# isn't "password") so that we can use grep.
if self.run(["grep", "password", find_output], cwd=home_directory, return_exit_code=True) == 0:
return True
return self.run(["grep", "passtype", find_output], cwd=home_directory, return_exit_code=True) == 0
class SVN(SCM, SVNRepository):
executable_name = "svn"
_svn_metadata_files = frozenset(['.svn', '_svn'])
def __init__(self, cwd, patch_directories, **kwargs):
SCM.__init__(self, cwd, **kwargs)
self._bogus_dir = None
if patch_directories == []:
raise Exception(message='Empty list of patch directories passed to SCM.__init__')
elif patch_directories == None:
self._patch_directories = [self._filesystem.relpath(cwd, self.checkout_root)]
else:
self._patch_directories = patch_directories
@classmethod
def in_working_directory(cls, path, executive=None):
if os.path.isdir(os.path.join(path, '.svn')):
# This is a fast shortcut for svn info that is usually correct for SVN < 1.7,
# but doesn't work for SVN >= 1.7.
return True
executive = executive or Executive()
svn_info_args = [cls.executable_name, 'info']
exit_code = executive.run_command(svn_info_args, cwd=path, return_exit_code=True)
return (exit_code == 0)
def find_uuid(self, path):
if not self.in_working_directory(path):
return None
return self.value_from_svn_info(path, 'Repository UUID')
@classmethod
def value_from_svn_info(cls, path, field_name):
svn_info_args = [cls.executable_name, 'info']
# FIXME: This method should use a passed in executive or be made an instance method and use self._executive.
info_output = Executive().run_command(svn_info_args, cwd=path).rstrip()
match = re.search("^%s: (?P<value>.+)$" % field_name, info_output, re.MULTILINE)
if not match:
raise ScriptError(script_args=svn_info_args, message='svn info did not contain a %s.' % field_name)
return match.group('value').rstrip('\r')
def find_checkout_root(self, path):
uuid = self.find_uuid(path)
# If |path| is not in a working directory, we're supposed to return |path|.
if not uuid:
return path
# Search up the directory hierarchy until we find a different UUID.
last_path = None
while True:
if uuid != self.find_uuid(path):
return last_path
last_path = path
(path, last_component) = self._filesystem.split(path)
if last_path == path:
return None
@staticmethod
def commit_success_regexp():
return "^Committed revision (?P<svn_revision>\d+)\.$"
def _run_svn(self, args, **kwargs):
return self.run([self.executable_name] + args, **kwargs)
@memoized
def svn_version(self):
return self._run_svn(['--version', '--quiet'])
def has_working_directory_changes(self):
# FIXME: What about files which are not committed yet?
return self._run_svn(["diff"], cwd=self.checkout_root, decode_output=False) != ""
def discard_working_directory_changes(self):
# Make sure there are no locks lying around from a previously aborted svn invocation.
# This is slightly dangerous, as it's possible the user is running another svn process
# on this checkout at the same time. However, it's much more likely that we're running
# under windows and svn just sucks (or the user interrupted svn and it failed to clean up).
self._run_svn(["cleanup"], cwd=self.checkout_root)
# svn revert -R is not as awesome as git reset --hard.
# It will leave added files around, causing later svn update
# calls to fail on the bots. We make this mirror git reset --hard
# by deleting any added files as well.
added_files = reversed(sorted(self.added_files()))
# added_files() returns directories for SVN, we walk the files in reverse path
# length order so that we remove files before we try to remove the directories.
self._run_svn(["revert", "-R", "."], cwd=self.checkout_root)
for path in added_files:
# This is robust against cwd != self.checkout_root
absolute_path = self.absolute_path(path)
# Completely lame that there is no easy way to remove both types with one call.
if os.path.isdir(path):
os.rmdir(absolute_path)
else:
os.remove(absolute_path)
def status_command(self):
return [self.executable_name, 'status']
def _status_regexp(self, expected_types):
field_count = 6 if self.svn_version() > "1.6" else 5
return "^(?P<status>[%s]).{%s} (?P<filename>.+)$" % (expected_types, field_count)
def _add_parent_directories(self, path):
"""Does 'svn add' to the path and its parents."""
if self.in_working_directory(path):
return
self.add(path)
def add_list(self, paths):
for path in paths:
self._add_parent_directories(os.path.dirname(os.path.abspath(path)))
if self.svn_version() >= "1.7":
# For subversion client 1.7 and later, need to add '--parents' option to ensure intermediate directories
# are added; in addition, 1.7 returns an exit code of 1 from svn add if one or more of the requested
# adds are already under version control, including intermediate directories subject to addition
# due to --parents
svn_add_args = ['svn', 'add', '--parents'] + paths
exit_code = self.run(svn_add_args, return_exit_code=True)
if exit_code and exit_code != 1:
raise ScriptError(script_args=svn_add_args, exit_code=exit_code)
else:
self._run_svn(["add"] + paths)
def _delete_parent_directories(self, path):
if not self.in_working_directory(path):
return
if set(os.listdir(path)) - self._svn_metadata_files:
return # Directory has non-trivial files in it.
self.delete(path)
def delete_list(self, paths):
for path in paths:
abs_path = os.path.abspath(path)
parent, base = os.path.split(abs_path)
result = self._run_svn(["delete", "--force", base], cwd=parent)
self._delete_parent_directories(os.path.dirname(abs_path))
return result
def exists(self, path):
return not self._run_svn(["info", path], return_exit_code=True, decode_output=False)
def changed_files(self, git_commit=None):
status_command = [self.executable_name, "status"]
status_command.extend(self._patch_directories)
# ACDMR: Addded, Conflicted, Deleted, Modified or Replaced
return self.run_status_and_extract_filenames(status_command, self._status_regexp("ACDMR"))
def changed_files_for_revision(self, revision):
# As far as I can tell svn diff --summarize output looks just like svn status output.
# No file contents printed, thus utf-8 auto-decoding in self.run is fine.
status_command = [self.executable_name, "diff", "--summarize", "-c", revision]
return self.run_status_and_extract_filenames(status_command, self._status_regexp("ACDMR"))
def revisions_changing_file(self, path, limit=5):
revisions = []
# svn log will exit(1) (and thus self.run will raise) if the path does not exist.
log_command = ['log', '--quiet', '--limit=%s' % limit, path]
for line in self._run_svn(log_command, cwd=self.checkout_root).splitlines():
match = re.search('^r(?P<revision>\d+) ', line)
if not match:
continue
revisions.append(int(match.group('revision')))
return revisions
def conflicted_files(self):
return self.run_status_and_extract_filenames(self.status_command(), self._status_regexp("C"))
def added_files(self):
return self.run_status_and_extract_filenames(self.status_command(), self._status_regexp("A"))
def deleted_files(self):
return self.run_status_and_extract_filenames(self.status_command(), self._status_regexp("D"))
@staticmethod
def supports_local_commits():
return False
def display_name(self):
return "svn"
def svn_revision(self, path):
return self.value_from_svn_info(path, 'Revision')
def timestamp_of_revision(self, path, revision):
# We use --xml to get timestamps like 2013-02-08T08:18:04.964409Z
repository_root = self.value_from_svn_info(self.checkout_root, 'Repository Root')
info_output = Executive().run_command([self.executable_name, 'log', '-r', revision, '--xml', repository_root], cwd=path).rstrip()
match = re.search(r"^<date>(?P<value>.+)</date>\r?$", info_output, re.MULTILINE)
return match.group('value')
# FIXME: This method should be on Checkout.
def create_patch(self, git_commit=None, changed_files=None):
"""Returns a byte array (str()) representing the patch file.
Patch files are effectively binary since they may contain
files of multiple different encodings."""
if changed_files == []:
return ""
elif changed_files == None:
changed_files = []
return self.run([self.script_path("svn-create-patch")] + changed_files,
cwd=self.checkout_root, return_stderr=False,
decode_output=False)
def committer_email_for_revision(self, revision):
return self._run_svn(["propget", "svn:author", "--revprop", "-r", revision]).rstrip()
def contents_at_revision(self, path, revision):
"""Returns a byte array (str()) containing the contents
of path @ revision in the repository."""
remote_path = "%s/%s" % (self._repository_url(), path)
return self._run_svn(["cat", "-r", revision, remote_path], decode_output=False)
def diff_for_revision(self, revision):
# FIXME: This should probably use cwd=self.checkout_root
return self._run_svn(['diff', '-c', revision])
def _bogus_dir_name(self):
rnd = ''.join(random.sample(string.ascii_letters, 5))
if sys.platform.startswith("win"):
parent_dir = tempfile.gettempdir()
else:
parent_dir = sys.path[0] # tempdir is not secure.
return os.path.join(parent_dir, "temp_svn_config_" + rnd)
def _setup_bogus_dir(self, log):
self._bogus_dir = self._bogus_dir_name()
if not os.path.exists(self._bogus_dir):
os.mkdir(self._bogus_dir)
self._delete_bogus_dir = True
else:
self._delete_bogus_dir = False
if log:
log.debug(' Html: temp config dir: "%s".', self._bogus_dir)
def _teardown_bogus_dir(self, log):
if self._delete_bogus_dir:
shutil.rmtree(self._bogus_dir, True)
if log:
log.debug(' Html: removed temp config dir: "%s".', self._bogus_dir)
self._bogus_dir = None
def diff_for_file(self, path, log=None):
self._setup_bogus_dir(log)
try:
args = ['diff']
if self._bogus_dir:
args += ['--config-dir', self._bogus_dir]
args.append(path)
return self._run_svn(args, cwd=self.checkout_root)
finally:
self._teardown_bogus_dir(log)
def show_head(self, path):
return self._run_svn(['cat', '-r', 'BASE', path], decode_output=False)
def _repository_url(self):
return self.value_from_svn_info(self.checkout_root, 'URL')
def apply_reverse_diff(self, revision):
# '-c -revision' applies the inverse diff of 'revision'
svn_merge_args = ['merge', '--non-interactive', '-c', '-%s' % revision, self._repository_url()]
_log.warning("svn merge has been known to take more than 10 minutes to complete. It is recommended you use git for rollouts.")
_log.debug("Running 'svn %s'" % " ".join(svn_merge_args))
# FIXME: Should this use cwd=self.checkout_root?
self._run_svn(svn_merge_args)
def revert_files(self, file_paths):
# FIXME: This should probably use cwd=self.checkout_root.
self._run_svn(['revert'] + file_paths)
def commit_with_message(self, message, username=None, password=None, git_commit=None, force_squash=False, changed_files=None):
# git-commit and force are not used by SVN.
svn_commit_args = ["commit"]
if not username and not self.has_authorization_for_realm(self.svn_server_realm):
raise AuthenticationError(self.svn_server_host)
if username:
svn_commit_args.extend(["--username", username])
svn_commit_args.extend(["-m", message])
if changed_files:
svn_commit_args.extend(changed_files)
return self._run_svn(svn_commit_args, cwd=self.checkout_root, error_handler=commit_error_handler)
def svn_commit_log(self, svn_revision):
svn_revision = self.strip_r_from_svn_revision(svn_revision)
return self._run_svn(['log', '--non-interactive', '--revision', svn_revision])
def last_svn_commit_log(self):
# BASE is the checkout revision, HEAD is the remote repository revision
# http://svnbook.red-bean.com/en/1.0/ch03s03.html
return self.svn_commit_log('BASE')
def svn_blame(self, path):
return self._run_svn(['blame', path])
def propset(self, pname, pvalue, path):
dir, base = os.path.split(path)
return self._run_svn(['pset', pname, pvalue, base], cwd=dir)
def propget(self, pname, path):
dir, base = os.path.split(path)
return self._run_svn(['pget', pname, base], cwd=dir).encode('utf-8').rstrip("\n")
| bsd-3-clause |
dmnyu/bitcurator | bctools/dfxml.py | 3 | 54144 | #!/usr/bin/env python
#
# dfxml.py
# Digital Forensics XML classes
"""Digital Forensics XML classes.
This module contains a number of classes for dealing with dfxml files, both using
the XML DOM model and using the EXPAT model.
The following moduel functions are defined:
isone(x) - returns true if something is equal to 1 (useful for <ALLOC>1</ALLOC>
safeInt(x) - converts something to an int but never raises an exception
The following classes are defined in this module:
byte_run - the class for representing a run on the disk
dftime - represents time. Can be in either Unix timestamp or ISO8601.
Interconverts as necessary.
fileobject - represents a DFXML fileobject.
byte_runs() is function that returns an array of byterun objects.
Each object has the attributes:
file_offset - offset from the beginning of the file
img_offset - offset from the beginning of the image
len - the number of bytes
fs_offset - offset from the beginning of the file system
where encoding, if present, is 0 for raw, 1 for NTFS compressed.
"""
from sys import stderr
from subprocess import Popen,PIPE
import base64
import hashlib
import datetime
__version__ = "1.0.1"
tsk_virtual_filenames = set(['$FAT1','$FAT2'])
def isone(x):
"""Return true if something is one (number or string)"""
try:
return int(x)==1;
except TypeError:
return False
def safeInt(x):
"""Return an integer or False. False is returned, rather than None, because you can
divide False by 3 but you can't divide None by 3.
NOTE: This function could be written as:
def safeInt(x):
return int(x) if x else False
but that doesn't work on older version of Python."""
if x: return int(x)
return False
def timestamp2iso8601(ts):
import time
return time.strftime("%FT%TZ",time.gmtime(ts))
from datetime import tzinfo,timedelta
class GMTMIN(tzinfo):
def __init__(self,minoffset): # DST starts last Sunday in March
self.minoffset = minoffset
def utcoffset(self, dt):
return timedelta(minutes=self.minoffset)
def dst(self, dt):
return timedelta(0)
def tzname(self,dt):
return "GMT+%02d%02d" % (self.minoffset/60,self.minoffset%60)
def parse_iso8601(ts):
Z = ts.find('Z')
if Z>0:
return datetime.datetime.strptime(ts[:Z],"%Y-%m-%dT%H:%M:%S")
raise RuntimeError("parse_iso8601: ISO8601 format {} not recognized".format(ts))
import re
tz_offset = re.compile("(\d\d\d\d)-(\d\d)-(\d\d)[T ](\d\d):(\d\d):(\d\d)(\.\d+)?(Z|[-+]\d+)?$")
def iso8601Tdatetime(s):
"""SLG's conversion of ISO8601 to datetime"""
m = tz_offset.search(s)
if not m:
raise ValueError("Cannot parse: "+s)
# Get the microseconds
try:
microseconds = int(float(m.group(7)) * 1000000)
except TypeError:
microseconds = 0
# Figure tz offset
offset = None
minoffset = None
if m.group(8)=="Z":
minoffset = 0
elif m.group(8)[0:1] in ["-+"]:
minoffset = int(m.group(8)[0:-2]) * 60 + int(m.group(8)[-2:])
z = s.find("Z")
if z>=0:
offset = 0
# Build the response
if minoffset:
return datetime.datetime(int(m.group(1)),int(m.group(2)),int(m.group(3)),
int(m.group(4)),int(m.group(5)),int(m.group(6)),
microseconds,GMTMIN(minoffset))
else:
return datetime.datetime(int(m.group(1)),int(m.group(2)),int(m.group(3)),
int(m.group(4)),int(m.group(5)),int(m.group(6)),
microseconds)
################################################################
###
### byte_run class
###
class byte_run:
"""The internal representation for a byte run.
byte_runs have the following attributes:
.img_offset = offset of the byte run from the image start, in bytes
.len = the length of the run, in bytes (prevoiusly called 'bytes')
.sector_size = sector size of the underlying media
Originally this was an array,
which is faster than an attributed object. But this approach is more expandable,
and it's only 70% the speed of an array under Python3.0.
"""
# declaring slots prevents other attributes from appearing,
# but that prevents the code from working with new XML that has new fields.
# __slots__ = ["file_offset","img_offset","len","fill","sector_size"]
def __init__(self,img_offset=None,len=None,file_offset=None):
self.img_offset = img_offset
self.file_offset = file_offset
self.len = len
self.sector_size = 512 # default
self.hashdigest = dict() #
def __cmp__(self,other):
if self.img_offset != None and other.img_offset != None:
return cmp(self.img_offset,other.img_offset)
elif self.file_offset != None and other.file_offset != None:
return cmp(self.file_offset,other.file_offset)
def __str__(self):
try:
return "byte_run[img_offset={0}; file_offset={1} len={2}] ".format(
self.img_offset,self.file_offset,self.len)
except (AttributeError, TypeError):
#Catch attributes that are missing or mis-typed (e.g. NoneType)
pass
try:
return "byte_run[file_offset={0}; fill={1}; len={2}]".format(
self.file_offset,self.fill,self.len)
except AttributeError:
pass
try:
return "byte_run[file_offset={0}; uncompressed_len={1}]".format(
self.file_offset,self.uncompressed_len)
except AttributeError:
return "byte_run"+str(dir(self))
def start_sector(self):
return self.img_offset // self.sector_size
def sector_count(self):
return self.len // self.sector_size
def has_sector(self,s):
if self.sector_size==0:
raise ValueError("%s: sector_size cannot be 0" % (self))
try:
return self.img_offset <= s * self.sector_size < self.img_offset+self.len
except AttributeError:
# Doesn't have necessary attributes to answer true.
# Usually this happens with runs of a constant value
return False
def extra_len(self):
return self.len % self.sector_size
def decode_xml_attributes(self,attr):
for (key,value) in attr.items():
try:
setattr(self,key,int(value))
except ValueError:
setattr(self,key,value)
def decode_sax_attributes(self,attr):
for (key,value) in attr.items():
if key=='bytes': key=='len' # tag changed name; provide backwards compatiability
try:
setattr(self,key,int(value))
except ValueError:
setattr(self,key,value)
class ComparableMixin(object):
"""
Comparator "Abstract" class. Classes inheriting this must define a _cmpkey() method.
Credit to Lennart Regebro for the total implementation of this class, found equivalently from:
http://regebro.wordpress.com/2010/12/13/python-implementing-rich-comparison-the-correct-way/
http://stackoverflow.com/questions/6907323/comparable-classes-in-python-3/6913420#6913420
"""
def _compare(self, other, method):
try:
return method(self._cmpkey(), other._cmpkey())
except (AttributeError, TypeError):
# _cmpkey not implemented, or return different type,
# so I can't compare with "other".
return NotImplemented
def __lt__(self, other):
return self._compare(other, lambda s, o: s < o)
def __le__(self, other):
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
return self._compare(other, lambda s, o: s != o)
import sys
class dftime(ComparableMixin):
"""Represents a DFXML time. Automatically converts between representations and caches the
results as necessary.."""
UTC = GMTMIN(0)
def ts2datetime(self,ts):
import datetime
return datetime.datetime.utcfromtimestamp(ts).replace(tzinfo=dftime.UTC)
def __init__(self,val):
#'unicode' is not a type in Python 3; 'basestring' is not a type in Python 2.
if sys.version_info >= (3,0):
_basestring = str
else:
_basestring = basestring
if isinstance(val, str) or isinstance(val,_basestring):
#
#Test for ISO format - "YYYY-MM-DD" should have hyphen at val[4]
if len(val)>5 and val[4]=="-":
self.iso8601_ = val
else:
#Maybe the data is a string-wrapped int?
#If this fails, data is completely unexpected, so just raise error.
self.timestamp_ = int(val)
elif type(val)==int or type(val)==float:
self.timestamp_ = val
elif val==None:
self.timestamp_ = None
self.iso8601_ = None
elif isinstance(val, dftime):
#If we instead use .timestamp_, we risk having a timezone conversion error
self.iso8601_ = val.iso8601()
else:
raise ValueError("Unknown type '%s' for DFXML time value" % (str(type(val))))
def __str__(self):
return self.iso8601() or ""
def __repr__(self):
return self.iso8601() or "None"
def __le__(self,b):
if b is None: return None
return self.iso8601().__le__(b.iso8601())
def __gt__(self,b):
if b is None: return None
return self.iso8601().__gt__(b.iso8601())
def _cmpkey(self):
"""Provide a key to use for comparisons; for use with ComparableMixin parent class."""
return self.timestamp()
def __eq__(self,b):
if b == None:
#This will always be False - if self were None, we wouldn't be in this __eq__ method.
return False
return self.timestamp()==b.timestamp()
def iso8601(self):
# Do we have a cached representation?
import time
try:
return self.iso8601_
except AttributeError:
pass
# Do we have a datetime representation?
try:
self.iso8601_ = self.datetime.isoformat()
return self.iso8601_
except AttributeError:
# We better have a Unix timestamp representation?
self.iso8601_ = time.strftime("%Y-%m-%dT%H:%M:%SZ",time.gmtime(self.timestamp_))
return self.iso8601_
def timestamp(self):
import time
# Do we have a cached representation?
try:
return self.timestamp_
except AttributeError:
pass
# Do we have a datetime_ object?
try:
self.timestamp_ = time.mktime(self.datetime_.timetuple())
return self.timestamp_
except AttributeError:
self.datetime_ = iso8601Tdatetime(self.iso8601_)
self.timestamp_ = time.mktime(self.datetime_.timetuple())
return self.timestamp_
def datetime(self):
import datetime
# return the datetime from parsing either iso8601 or from parsing timestamp
try:
self.datetime_ = self.ts2datetime(self.timestamp_)
# This needs to be in UTC offset. How annoying.
return self.datetime_
except AttributeError:
self.datetime_ = iso8601Tdatetime(self.iso8601_)
return self.datetime_
class registry_object:
def __init__(self):
self.object_index = {}
self._mtime = None
"""Keep handy a handle on the registry object"""
self.registry_handle = self
def mtime(self):
return self._mtime
class registry_cell_object:
def __init__(self):
self._byte_runs = []
"""This is a pointer to a registry_key_object. The root node has no parent key."""
self.parent_key = None
self._name = None
self._full_path = None
"""Keys have two types: "root" (0x2c,0xac) and not-root. Values have several more types."""
self.type = None
"""Keep handy a handle on the registry object"""
self.registry_handle = None
"""Name the cell type, for str() and repr()."""
self._cell_type = "(undefined cell object type)"
"""Only applicable to values."""
self._sha1 = None
def name(self):
"""This is the name of the present key or value."""
return self._name
def full_path(self):
"""
This is the full path from the root of the hive, with keys acting like directories and the value name acting like the basename.
Unlike DFXML, registry paths are delimited with a backslash due to the forward slash being a legal and commonly observed character in cell names.
"""
return self._full_path
def _myname(self):
"""This function is called by repr and str, due to (vague memories of) the possibility of an infinite loop if __repr__ calls __self__."""
if len(self._byte_runs) > 0:
addr = str(self._byte_runs[0].file_offset)
else:
addr = "(unknown)"
return "".join(["<", self._cell_type, " for hive file offset ", addr, ">"])
def __repr__(self):
return self._myname()
def __str__(self):
return self._myname()
def mtime(self):
raise NotImplementedError("registry_cell_object.mtime() not over-ridden!")
def byte_runs(self):
"""Returns a sorted array of byte_run objects."""
#If this idiom is confusing, see: http://henry.precheur.org/python/copy_list
ret = list(self._byte_runs)
return ret
def sha1(self):
"""
Return None. Meant to be overwritten.
"""
return None
class registry_key_object(registry_cell_object):
def __init__(self):
registry_cell_object.__init__(self)
self._mtime = None
self.values = {}
self.used = True #TODO Add toggling logic for when hivexml (eventually) processes recovered keys
self._cell_type = "registry_key_object"
def mtime(self):
return self._mtime
def root(self):
if self.type == None:
return None
return self.type == "root"
class registry_value_object(registry_cell_object):
def __init__(self):
registry_cell_object.__init__(self)
self.value_data = None
self._cell_type = "registry_value_object"
"""List for the string-list type of value."""
self.strings = None
def mtime(self):
"""Return nothing. Alternatively, we might return mtime of parent key in the future."""
return None
# if self.parent_key:
# return self.parent_key.mtime()
# else:
# return None
def sha1(self):
"""
Return cached hash, populating cache if necessary.
If self.value_data is None, this should return None.
"""
if self._sha1 is None:
if self.value_data != None:
h = hashlib.sha1()
if type(self.value_data) == type(""):
#String data take a little extra care:
#"The bytes in your ... file are being automatically decoded to Unicode by Python 3 as you read from the file"
#http://stackoverflow.com/a/7778340/1207160
h.update(self.value_data.encode("utf-8"))
else:
h.update(self.value_data)
self._sha1 = h.hexdigest()
return self._sha1
class fileobject:
"""The base class for file objects created either through XML DOM or EXPAT"""
TIMETAGLIST=['atime','mtime','ctime','dtime','crtime']
def __init__(self,imagefile=None):
self.imagefile = imagefile
self.hashdigest = dict()
def __str__(self):
try:
fn = self.filename()
except KeyError:
fn = "???"
return "fileobject %s byte_runs: %s" % (fn, " ".join([str(x) for x in self.byte_runs()]))
def partition(self):
"""Partion number of the file"""
return self.tag("partition")
def filename(self):
"""Complement name of the file (sometimes called pathname)"""
return self.tag("filename")
def ext(self):
"""Extension, as a lowercase string without the leading '.'"""
import os, string
(base,ext) = os.path.splitext(self.filename())
if ext == '':
return None
else:
return ext[1:]
def filesize(self):
"""Size of the file, in bytes"""
return safeInt(self.tag("filesize"))
def uid(self):
"""UID of the file"""
return safeInt(self.tag("uid"))
def gid(self):
"""GID of the file"""
return safeInt(self.tag("gid"))
def meta_type(self):
"""Meta-type of the file"""
return safeInt(self.tag("meta_type"))
def mode(self):
"""Mode of the file"""
return safeInt(self.tag("mode"))
def ctime(self):
"""Metadata Change Time (sometimes Creation Time), as number of seconds
since January 1, 1970 (Unix time)"""
t = self.tag("ctime")
if t: return dftime(t)
return None
def atime(self):
"""Access time, as number of seconds since January 1, 1970 (Unix time)"""
t = self.tag("atime")
if t: return dftime(t)
return None
def crtime(self):
"""CR time, as number of seconds since January 1, 1970 (Unix time)"""
t = self.tag("crtime")
if t: return dftime(t)
return None
def mtime(self):
"""Modify time, as number of seconds since January 1, 1970 (Unix time)"""
t = self.tag("mtime")
if t: return dftime(t)
return None
def dtime(self):
"""ext2 dtime"""
t = self.tag("dtime")
if t: return dftime(t)
return None
def times(self):
"""Return a dictionary of all times that the system has"""
ret = {}
for tag in self.TIMETAGLIST:
if self.has_tag(tag):
try:
ret[tag] = dftime(self.tag(tag))
except TypeError:
pass
return ret
def sha1(self):
"""Returns the SHA1 in hex"""
return self.tag("sha1")
def md5(self):
"""Returns the MD5 in hex"""
return self.tag("md5")
def fragments(self):
"""Returns number of file fragments"""
return len(self.byte_runs())
def name_type(self):
"""Return the contents of the name_type tag"""
return self.tag("name_type")
def is_virtual(self):
"""Returns true if the fi entry is a TSK virtual entry"""
return self.filename() in tsk_virtual_filenames
def is_dir(self):
"""Returns true if file is a directory"""
return self.name_type()=='d'
def is_file(self):
"""Returns true if file is a file"""
return self.name_type()=='r' or self.name_type()==None
def inode(self):
"""Inode; may be a number or SleuthKit x-y-z formatr"""
return self.tag("inode")
def allocated(self):
"""Returns True if the file is allocated, False if it was not
(that is, if it was deleted or is an orphan).
Note that we need to be tolerant of mixed case, as it was changed.
"""
if self.filename()=="$OrphanFiles": return False
return isone(self.tag("alloc")) or isone(self.tag("ALLOC"))
def compressed(self):
if not self.has_tag("compressed") and not self.has_tag("compressed") : return False
return isone(self.tag("compressed")) or isone(self.tag("COMPRESSED"))
def encrypted(self):
if not self.has_tag("encrypted") and not self.has_tag("encrypted") : return False
return isone(self.tag("encrypted")) or isone(self.tag("ENCRYPTED"))
def file_present(self,imagefile=None):
"""Returns true if the file is present in the disk image"""
if self.filesize()==0:
return False # empty files are never present
if imagefile==None:
imagefile=self.imagefile # use this one
for hashname in ['md5','sha1']:
oldhash = self.tag(hashname)
if oldhash:
newhash = hashlib.new(hashname,self.contents(imagefile=imagefile)).hexdigest()
return oldhash==newhash
raise ValueError("Cannot process file "+self.filename()+": no hash in "+str(self))
def has_contents(self):
"""True if the file has one or more bytes"""
return len(self.byte_runs())>0
def has_sector(self,s):
"""True if sector s is contained in one of the byte_runs."""
for run in self.byte_runs():
if run.has_sector(s): return True
return False
def libmagic(self):
"""Returns libmagic string if the string is specified
in the xml, or None otherwise"""
return self.tag("libmagic")
def content_for_run(self,run=None,imagefile=None):
""" Returns the content for a specific run. This is a convenience feature
which does not touch the file object if an imagefile is provided."""
if imagefile==None: imagefile=self.imagefile
if run.len== -1:
return chr(0) * run.len
elif hasattr(run,'fill'):
return chr(run.fill) * run.len
else:
imagefile.seek(run.img_offset)
return imagefile.read(run.len)
def contents(self,imagefile=None,icat_fallback=True):
""" Returns the contents of all the runs concatenated together. For allocated files
this should be the original file contents. """
if imagefile is None : imagefile=self.imagefile
if imagefile is None : raise ValueError("imagefile is unknown")
if self.encrypted() : raise ValueError("Cannot generate content for encrypted files")
if self.compressed() or imagefile.name.endswith(".aff") or imagefile.name.endswith(".E01"):
if icat_fallback:
#
# For now, compressed files rely on icat rather than python interface
#
offset = safeInt(self.volume.offset)
block_size = safeInt(self.volume.block_size)
if block_size==0: block_size = 512
inode = self.inode()
if inode :
block_size = 512
fstype_flag = ""
fstype = self.volume.ftype_str()
if fstype != None:
fstype_flag = '-f' + fstype
cmd = ['icat',fstype_flag,'-b',str(block_size),'-o',str(offset/block_size),imagefile.name,str(inode)]
else:
cmd = ['icat','-b',str(block_size),'-o',str(offset/block_size),imagefile.name,str(inode)]
(data,err) = Popen(cmd, stdout=PIPE,stderr=PIPE).communicate()
# Check for an error
if len(err) > 0 :
raise ValueError("icat error (" + err.strip() + "): "+" ".join(cmd))
return data
else :
raise ValueError("Inode missing from file in compressed format.")
raise ValueError("Cannot read raw bytes in compressed disk image")
res = []
for run in self.byte_runs():
res.append(self.content_for_run(run=run,imagefile=imagefile))
return "".join(res)
def tempfile(self,calcMD5=False,calcSHA1=False):
"""Return the contents of imagefile in a named temporary file. If
calcMD5 or calcSHA1 are set TRUE, then the object returned has a
haslib object as self.md5 or self.sha1 with the requested hash."""
import tempfile
tf = tempfile.NamedTemporaryFile()
if calcMD5: tf.md5 = hashlib.md5()
if calcSHA1: tf.sha1 = hashlib.sha1()
for run in self.byte_runs():
self.imagefile.seek(run.img_offset)
count = run.len
while count>0:
xfer_len = min(count,1024*1024) # transfer up to a megabyte at a time
buf = self.imagefile.read(xfer_len)
if len(buf)==0: break
tf.write(buf)
if calcMD5: tf.md5.update(buf)
if calcSHA1: tf.sha1.update(buf)
count -= xfer_len
tf.flush()
return tf
def savefile(self,filename=None):
"""Saves the file."""
with open(filename,"wb") as f:
for run in self.byte_runs():
self.imagefile.seek(run.img_offset)
count = run.len
while count>0:
xfer_len = min(count,1024*1024) # transfer up to a megabyte at a time
buf = self.imagefile.read(xfer_len)
if len(buf)==0: break
f.write(buf)
count -= xfer_len
def frag_start_sector(self,fragment):
return self.byte_runs()[fragment].img_offset / 512
def name_type(self):
return self.tag("name_type")
class fileobject_dom(fileobject):
"""file objects created through the DOM. Each object has the XML document
stored in the .doc attribute."""
def __init__(self,xmldoc,imagefile=None):
fileobject.__init__(self,imagefile=imagefile)
self.doc = xmldoc
def tag(self,name):
"""Returns the wholeText for any given NAME. Raises KeyError
if the NAME does not exist."""
try:
return self.doc.getElementsByTagName(name)[0].firstChild.wholeText
except IndexError:
# Check for a hash tag with legacy API
if name in ['md5','sha1','sha256']:
for e in self.doc.getElementsByTagName('hashdigest'):
if e.getAttribute('type').lower()==name:
return e.firstChild.wholeText
raise KeyError(name+" not in XML")
def has_tag(self,name) :
try:
temp=self.doc.getElementsByTagName(name)[0].firstChild.wholeText
return True
except IndexError:
# Check for a hash tag with legacy API
if name in ['md5','sha1','sha256']:
for e in self.doc.getElementsByTagName('hashdigest'):
if e.getAttribute('type').lower()==name:
return True
return False
def byte_runs(self):
"""Returns a sorted array of byte_run objects.
"""
ret = []
try:
for run in self.doc.getElementsByTagName("byte_runs")[0].childNodes:
b = byte_run()
if run.nodeType==run.ELEMENT_NODE:
b.decode_xml_attributes(run.attributes)
ret.append(b)
except IndexError:
pass
ret.sort(key=lambda r:r.file_offset)
return ret
class saxobject:
# saxobject is a mix-in that makes it easy to turn XML tags into functions.
# If the sax tag is registered, then a function with the tag's name is created.
# Calling the function returns the value for the tag that is stored in the _tags{}
# dictionary. The _tags{} dictionary is filled by the _end_element() method that is defined.
# For fileobjects all tags are remembered.
def __init__(self):
self._tags = {}
def tag(self,name):
"""Returns the XML text for a given NAME."""
return self._tags.get(name,None)
def has_tag(self,name) : return name in self._tags
def register_sax_tag(tagclass,name):
setattr(tagclass,name,lambda self:self.tag(name))
class fileobject_sax(fileobject,saxobject):
"""file objects created through expat. This class is created with a tags array and a set of byte runs."""
def __init__(self,imagefile=None,xml=None):
fileobject.__init__(self,imagefile=imagefile)
saxobject.__init__(self)
self._byte_runs = []
def byte_runs(self):
"""Returns an array of byte_run objects."""
return self._byte_runs
class volumeobject_sax(saxobject):
"""A class that represents the volume."""
def __init__(self):
if hasattr(saxobject, "__init__"):
saxobject.__init__(self)
self.offset = 0
self.block_size = 0
def __str__(self):
return "volume "+(str(self._tags))
def partition_offset(self):
try:
return self.tag('partition_offset')
except KeyError:
return self.tag('Partition_Offset')
def sector_size(self):
try:
return self.tag('sector_size')
except KeyError:
return self.tag('sector_size')
register_sax_tag(volumeobject_sax,'ftype')
register_sax_tag(volumeobject_sax,'ftype_str')
register_sax_tag(volumeobject_sax,'block_count')
register_sax_tag(volumeobject_sax,'first_block')
register_sax_tag(volumeobject_sax,'last_block')
class imageobject_sax(saxobject):
"""A class that represents the disk image"""
register_sax_tag(imageobject_sax,'imagesize')
register_sax_tag(imageobject_sax,'image_filename')
################################################################
################################################################
def safe_b64decode(b64data):
"""
This function takes care of the logistics of base64 decoding XML data in Python 2 and 3.
Recall that Python3 requires b64decode operate on bytes, not a string.
Ref: <http://bugs.python.org/issue4769#msg115690>
A forum post that noted several encoding differences between Python 2 and 3:
<http://stackoverflow.com/questions/9327993/python3-unicode-escape-doesnt-work-with-non-ascii-bytes>
"""
if sys.version_info.major == 2:
return base64.b64decode(b64data).decode("unicode_escape")
elif sys.version_info.major == 3:
dtype = str(type(b64data))
to_decode = None
if dtype == "<class 'str'>":
to_decode = b64data.encode("ascii")
elif dtype == "<class 'bytes'>":
to_decode = b64data
return base64.b64decode(to_decode).decode("unicode_escape")
else:
raise Exception("Not sure how to parse base64 data outside Python versions 2 or 3.")
class xml_reader:
def __init__(self):
self.cdata = None
self.tagstack = ['xml']
def _char_data(self, data):
"""Handles XML data"""
if self.cdata != None:
self.cdata += data
def process_xml_stream(self,xml_stream,callback):
"Run the reader on a given XML input stream"
self.callback = callback
import xml.parsers.expat
p = xml.parsers.expat.ParserCreate()
p.StartElementHandler = self._start_element
p.EndElementHandler = self._end_element
p.CharacterDataHandler = self._char_data
p.ParseFile(xml_stream)
class regxml_reader(xml_reader):
def __init__(self,flags=None):
self.flags = flags
xml_reader.__init__(self) #TODO wait, shouldn't flags go in here?
self.objectstack = []
self.registry_object = None
self.nonce = 0
def _start_element(self, name, attrs):
"""
The objectstack conditionally grows, depending on type of element processed
* msregistry (hive): Create a new msregistry object, append to objectstack
* key (node): Create a new key object, append to objectstack
* mtime: The text is going to become a property of the parent element; do not append to objectstack.
* value: Create a new value object, append to objectstack.
"""
new_object = None
if name in ["msregistry","hive"]:
new_object = registry_object()
self.objectstack.append(new_object)
self.registry_object = new_object
elif name in ["key","node"]:
new_object = registry_key_object()
#Note these two tests for root and parent _are_ supposed to be independent tests.
if attrs.get("root",None) == "1":
new_object.type = "root"
else:
new_object.type = ""
if len(self.objectstack) > 1:
new_object.parent_key = self.objectstack[-1]
#Sanity check: root key implies no parent
if new_object.type == "root":
assert new_object.parent_key == None
#Sanity check: no parent implies root key --OR-- recovered key
if new_object.parent_key == None:
assert new_object.used == False or new_object.type == "root"
#Define new_object.name
#Force a name for keys. If the key has no recorded name, apply artificial name prefix to nonce.
name_data = attrs.get("name")
if name_data == None:
new_object._name = "__DFXML_NONCE_" + str(self.nonce)
self.nonce += 1
else:
enc = attrs.get("name_encoding")
if enc == "base64":
new_object._name = safe_b64decode(name_data)
else:
new_object._name = name_data
if new_object.parent_key == None:
new_object._full_path = "\\" + new_object.name()
# TODO need a name scheme for orphan references, when we start processing orphans
else:
new_object._full_path = new_object.parent_key.full_path() + "\\" + new_object.name()
self.objectstack.append(new_object)
elif name in ["value"]:
new_object = registry_value_object()
new_object.parent_key = self.objectstack[-1]
new_object.type = attrs.get("type",None)
if new_object.type == "string-list":
new_object.strings = []
#Store decoded name
if attrs.get("default",None) == "1":
new_object._name = "Default"
if attrs.get("name",attrs.get("key",None)) is not None:
#TODO Notify: concurrently set name attribute and default-name flag
pass
else:
enc = attrs.get("name_encoding",attrs.get("key_encoding"))
name_data = attrs.get("name",attrs.get("key",None))
if enc == "base64":
try:
new_object._name = base64.b64decode(name_data.encode("ascii")).decode("unicode_escape")
except:
sys.stderr.write("name_data={} type={}\n".format(name_data,type(name_data)))
raise
else:
new_object._name = name_data
new_object._full_path = new_object.parent_key.full_path() + "\\" + new_object.name()
#Store decoded value
new_object.value_data = self.decoded_value(attrs)
self.objectstack.append(new_object)
elif name in ["mtime"]:
self.cdata = ""
elif name in ["string"]:
self.cdata = ""
elif name in ["byte_runs"]:
pass
elif name in ["byte_run"]:
parent = self.objectstack[-1]
parent._byte_runs.append(byte_run(file_offset=attrs.get("file_offset"), len=attrs.get("len")))
else:
raise ValueError("regxml_reader._start_element: Don't know how to start element %s.\n" % name)
#Give all cell objects a handle on the registry
if new_object != None:
new_object.registry_handle = self.registry_object
def decoded_value(self, attrs):
value_data = attrs.get("value",None)
if value_data:
# TODO adjust hivexml to not use a plain "encoding" attribute
value_encoding = attrs.get("encoding", attrs.get("value_encoding"))
if value_encoding == "base64":
import sys
if sys.version_info.major>2:
value_data = bytes(value_data,encoding='ascii')
return base64.b64decode(value_data)
else:
return value_data
else:
return None
def _end_element(self, name):
"""
The callback is invoked for each stack-popping operation, except the root.
"""
#TODO sanity-check the objectstack
if name in ["msregistry","hive"]:
pass
elif name in ["key","node"]:
finished_object = self.objectstack.pop()
#Add finished object to object index
if finished_object.full_path() in self.registry_object.object_index:
raise ValueError("regxml_reader._end_element: Same key path found more than once: " +
finished_object.full_path())
self.registry_object.object_index[finished_object.full_path()] = finished_object
self.callback(finished_object)
elif name in ["mtime"]:
self.objectstack[-1]._mtime = dftime(self.cdata)
self.cdata = None
elif name in ["value"]:
finished_object = self.objectstack.pop()
#TODO Simplify once hivexml is patched to have value/@value instead of value/[cdata]
if finished_object.value_data == None:
finished_object.value_data = self.cdata
self.callback(finished_object)
elif name in ["string"]:
value_object = self.objectstack[-1]
if value_object.strings == None:
raise ValueError("regxml_reader._end_element: parsing error, string found but parent's type can't support a string list.")
value_object.strings.append(self.cdata)
self.cdata = None
elif name in ["byte_runs","byte_run"]:
pass
else:
raise ValueError("regxml_reader._end_element: Don't know how to end element %s.\n" % name)
class fileobject_reader(xml_reader):
"""Class which uses the SAX expat-based XML reader.
Reads an FIWALK XML input file and automatically creates
volumeobject_sax and fileobject_sax objects, but just returns the filoeobject
objects.."""
def __init__(self,imagefile=None,flags=None):
self.creator = None
self.volumeobject = None
self.fileobject = None
self.imageobject = imageobject_sax()
self.imagefile = imagefile
self.flags = flags
xml_reader.__init__(self)
def _start_element(self, name, attrs):
""" Handles the start of an element for the XPAT scanner"""
self.tagstack.append(name)
self.cdata = "" # new element, so reset the data
if name=="volume":
self.volumeobject = volumeobject_sax()
self.volumeobject.block_size = 512 # reasonable default
self.volumeobject.image = self.imageobject
if "offset" in attrs:
self.volumeobject.offset = int(attrs["offset"])
return
if name=="block_size":
pass
if name=="fileobject":
self.fileobject = fileobject_sax(imagefile=self.imagefile)
self.fileobject.volume = self.volumeobject
return
if name=='hashdigest':
self.hashdigest_type = attrs['type']
if self.fileobject and (name=="run" or name=="byte_run"):
b = byte_run()
b.decode_sax_attributes(attrs)
self.fileobject._byte_runs.append(b)
return
def _end_element(self, name):
"""Handles the end of an eleement for the XPAT scanner"""
assert(self.tagstack.pop()==name) # make sure that the stack matches
if name=="volume":
self.volumeobject = None
return
if name=="block_size" and len(self.tagstack) > 1 :
if self.tagstack[-1] == "volume" :
self.volumeobject.block_size = int(self.cdata)
self.cdata=None
return
if name=="fileobject":
self.callback(self.fileobject)
self.fileobject = None
return
if name=='hashdigest' and len(self.tagstack)>0:
top = self.tagstack[-1] # what the hash was for
alg = self.hashdigest_type.lower() # name of the hash algorithm used
if top=='byte_run':
self.fileobject._byte_runs[-1].hashdigest[alg] = self.cdata
if top=="fileobject":
self.fileobject._tags[alg] = self.cdata # legacy
self.fileobject.hashdigest[alg] = self.cdata
self.cdata = None
return
if self.fileobject: # in a file object, all tags are remembered
self.fileobject._tags[name] = self.cdata
self.cdata = None
return
# Special case: <source><image_filename>fn</image_filename></source>
# gets put in <image_filename>fn</image_filename>
if name in ['image_filename','imagefile'] and self.tagstack[-1]=='source':
self.imageobject._tags['image_filename'] = self.cdata
class volumeobject_reader(xml_reader):
def __init__(self):
self.volumeobject = False
xml_reader.__init__(self)
self.imageobject = imageobject_sax()
def _start_element(self, name, attrs):
""" Handles the start of an element for the XPAT scanner"""
self.tagstack.append(name)
if name=="volume":
self.volumeobject = volumeobject_sax()
self.volumeobject.image = self.imageobject
return
if name=="fileobject":
self.cdata = None # don't record this
return
self.cdata = "" # new element; otherwise data is ignored
def _end_element(self, name):
"""Handles the end of an eleement for the XPAT scanner"""
assert(self.tagstack.pop()==name)
if name=="volume":
self.callback(self.volumeobject)
self.volumeobject = None
return
if self.tagstack[-1]=='volume' and self.volumeobject: # in the volume
self.volumeobject._tags[name] = self.cdata
self.cdata = None
return
if self.tagstack[-1] in ['fiwalk','dfxml']:
self.imageobject._tags[name] = self.cdata
return
# Special case: <source><image_filename>fn</image_filename></source> gets put in <image_filename>fn</image_filename>
if name in ['image_filename','imagefile'] and self.tagstack[-1]=='source':
self.imageobject._tags['image_filename'] = self.cdata
return
def combine_runs(runs):
"""Given an array of bytrun elements, combine the runs and return a new array."""
if runs==[]: return []
ret = [runs[0]]
for run in runs[1:]:
# if the last one ends where this run begins, just extend
# otherwise append
last = ret[-1]
if last.img_offset+last.len == run.img_offset:
ret[-1] = byte_run(img_offset = last.img_offset,
len = last.len + run.len)
continue
else:
ret.append(run)
return ret
class extentdb:
"""A class to a database of extents and report if they collide.
Currently this is not an efficient implementation, but it could become
more efficient in the future. When it does, every program that uses
this implementation will get faster too! Each extent is represented
as a byte_run object"""
def __init__(self,sectorsize=512):
self.db = [] # the database of runs
self.sectorsize = 512
pass
def report(self,f):
"""Print information about the database"""
f.write("sectorsize: %d\n" % self.sectorsize)
for run in sorted(self.db):
f.write(" [@%8d ; %8d]\n" % (run.img_offset,run.len))
f.write("total entries in database: %d\n\n" % len(r))
def sectors_for_bytes(self,count):
"""Returns the number of sectors necessary to hold COUNT bytes"""
return (count+self.sectorsize-1)//self.sectorsize
def sectors_for_run(self,run):
"""Returns an array of the sectors for a given run"""
start_sector = run.img_offset/self.sectorsize
sector_count = self.sectors_for_bytes(run.len)
return range(start_sector,start_sector+sector_count)
def run_for_sector(self,sector_number,count=1):
"""Returns the run for a specified sector, and optionally a count of sectors"""
return byte_run(len=count*self.sectorsize,img_offset=sector_number * self.sectorsize)
def intersects(self,extent):
"""Returns the intersecting extent, or None if there is none"""
if extent.len==0: return True # 0 length intersects with everything
if extent.len<0: raise ValueError("Length cannot be negative:"+str(extent))
start = extent.img_offset
stop = extent.img_offset+extent.len
for d in self.db:
if d.img_offset <= start < d.img_offset+d.len: return d
if d.img_offset < stop < d.img_offset+d.len: return d
if start<d.img_offset and d.img_offset+d.len <= stop: return d
return None
def intersects_runs(self,runs):
"""Returns the intersecting extent for a set of runs, or None
if there is none."""
for r in runs:
v = self.intersects(r)
if v: return v
return None
def intersects_sector(self,sector):
"""Returns the intersecting extent for a specified sector, None otherwise.
Sector numbers start at 0."""
return self.intersects(self.run_for_sector(sector))
def add(self,extent):
"""Adds an EXTENT (start,length) to the database.
Raises ValueError if there is an intersection."""
v = self.intersects(extent)
if v:
raise ValueError("Cannot add "+str(extent)+": it intersects "+str(v))
self.db.append(extent)
def add_runs(self,runs):
"""Adds all of the runs to the extent database"""
for r in runs:
self.add(r)
def runs_for_sectors(self,sectors):
"""Given a list of SECTORS, return a list of RUNS.
Automatically combines adjacent runs."""
runs = [byte_run(len=self.sectorsize,img_offset=x*self.sectorsize) for x in sectors]
return combine_runs(runs)
def add_sectors(self,sectors):
"""Adds the sectors in the list to the database."""
self.add_runs(self.runs_for_sectors(sectors))
def sectors_not_in_db(self,run):
"""For a given run, return a list of sectors not in the extent db"""
return filter(lambda x:not self.intersects_sector(x),self.sectors_for_run(run))
def read_dfxml(xmlfile=None,imagefile=None,flags=0,callback=None):
"""Processes an image using expat, calling a callback for every file object encountered.
If xmlfile is provided, use that as the xmlfile, otherwise runs fiwalk."""
if not callback:
raise ValueError("callback must be specified")
r = fileobject_reader(imagefile=imagefile,flags=flags)
r.process_xml_stream(xmlfile,callback)
return r
def read_regxml(xmlfile=None,flags=0,callback=None):
"""Processes an image using expat, calling a callback for node encountered."""
import xml.parsers.expat
if not callback:
raise ValueError("callback must be specified")
if not xmlfile:
raise ValueError("regxml file must be specified")
r = regxml_reader(flags=flags)
try:
r.process_xml_stream(xmlfile,callback)
except xml.parsers.expat.ExpatError:
stderr.write("XML parsing error for file \"" + xmlfile.name + "\". Object stack:\n")
for x in r.objectstack:
stderr.write(str(x) + "\n")
stderr.write("(Done.)\n")
raise
return r
def fileobjects_sax(xmlfile=None,imagefile=None,flags=0):
"""Returns a LIST of fileobjects extracted from the given
imagefile. If XMLFILE is provided, read the objects are read
directly from the XML, otherwise this method runs fiwalk with the
specified FLAGS."""
ret = []
read_dfxml(xmlfile=xmlfile,imagefile=imagefile,flags=flags,
callback=lambda fi:ret.append(fi))
return ret
def fileobjects_iter(xmlfile=None,imagefile=None,flags=0):
"""Returns an iterator that returns fileobjects extracted from the given
imagefile. If XMLFILE is provided, read the objects are read
directly from the XML, otherwise this method runs fiwalk with the
specified FLAGS."""
print("For reasons we do not understand, fileobjects_iter currently does not work.")
def local_iter(fi):
yield fi
read_dfxml(xmlfile=xmlfile,imagefile=imagefile,flags=flags,callback=local_iter)
def fileobjects_dom(xmlfile=None,imagefile=None,flags=0):
"""Returns a tuple consisting of (XML,LIST) where XML is the
document of the imagefile's fiwalk and LIST is a list of file
objects extracted from that document."""
import xml.dom.minidom
doc = xml.dom.minidom.parseString(xmlfile.read())
ret = []
for xmlfi in doc.getElementsByTagName("fileobject"):
fi = fileobject_dom(xmlfi,imagefile=imagefile)
ret.append(fi)
return (doc,ret)
def volumeobjects_sax(xmlfile=None,imagefile=None,flags=0):
ret = []
r = volumeobject_reader()
##r.process_xml_stream(xmlfile,imagefile=None,callback=lambda vo:ret.append(vo))
r.process_xml_stream(xmlfile,callback=lambda vo:ret.append(vo))
return ret
################################################################
if __name__=="__main__":
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-r","--regress",action="store_true")
(options,args) = parser.parse_args()
def check_equal(a,b,want=None):
da = dftime(a)
db = dftime(b)
result = da==db
warn = ""
if result != want:
warn = " (!)"
print("a=%s b=%s want=%s equal=%s%s" % (da,db,want,result,warn))
def check_greater(a,b,want=None):
da = dftime(a)
db = dftime(b)
result = da>db
warn = ""
if result != want:
warn = " (!)"
print("a=%s b=%s want=%s greater=%s%s" % (da,db,want,result,warn))
if options.regress:
print("Testing unicode value parsing.")
#Test base64 encoding of the "Registered" symbol, encountered in a key name in the M57-Patents corpus.
test_unicode_string = "\xae"
if sys.version_info.major == 2:
#The test string doesn't quite get defined right that way in Python 2
test_unicode_string = unicode(test_unicode_string, encoding="latin-1")
test_unicode_string_escaped = test_unicode_string.encode("unicode_escape")
test_base64_bytes = base64.b64encode(test_unicode_string_escaped)
elif sys.version_info.major == 3:
test_unicode_string_escaped = test_unicode_string.encode("unicode_escape")
test_base64_bytes = base64.b64encode(test_unicode_string_escaped)
else:
#Just hard-code value, no examples yet for this language version.
test_base64_bytes = b'XHhhZQ=='
test_base64_string = test_base64_bytes.decode("ascii")
#test_base64_string is the kind of string data you'd expect to encounter in base64-encoded values processing RegXML.
assert test_unicode_string == safe_b64decode(test_base64_bytes)
assert test_unicode_string == safe_b64decode(test_base64_string)
print("Unicode value parsing good!")
print("Testing dftime values")
#check_equal("1900-01-02T02:03:04Z",-2208895016,True) #AJN time.mktime doesn't seem to support old times any more
check_equal("2000-01-02T02:03:04Z","2000-01-02T03:03:04-0100",False)
check_equal("2000-01-02T02:03:04-0100","2000-01-02T02:03:04-0100",True)
check_equal("2000-01-02T02:03:04-0100","2000-01-02T02:03:04-0200",False)
check_equal("2000-01-02T02:03:04-0100","2000-01-02T01:03:04-0200",True)
check_greater("2000-01-02T04:04:05-0100","2000-01-02T03:04:05-0100",True)
check_greater("2000-01-02T03:04:05-0200","2000-01-02T03:04:05-0100",True)
check_greater("2009-11-17T00:33:30.9375Z","2009-11-17T00:33:30Z",True)
check_equal("2009-11-17T00:33:30.9375Z","2009-11-17T00:33:30Z",False)
check_equal("2009-11-17T00:33:30.0000Z","2009-11-17T00:33:30Z",True)
print("dftime values passed.")
print("Testing byte_run overlap engine:")
db = extentdb()
a = byte_run(img_offset=0,len=5)
db.add(a)
b = byte_run(5,5)
db.add(b)
assert db.intersects(byte_run(0,5))==byte_run(0,5)
assert db.intersects(byte_run(0,1))
assert db.intersects(byte_run(2,3))
assert db.intersects(byte_run(4,1))
assert db.intersects(byte_run(5,1))
assert db.intersects(byte_run(6,1))
assert db.intersects(byte_run(9,1))
assert db.intersects(byte_run(-1,5))
assert db.intersects(byte_run(-1,10))
assert db.intersects(byte_run(-1,11))
assert db.intersects(byte_run(-1,1))==None
assert db.intersects(byte_run(10,1))==None
print("Overlap engine good!")
| gpl-3.0 |
simone/django-gb | tests/many_to_one/tests.py | 34 | 21685 | from copy import deepcopy
import datetime
from django.core.exceptions import MultipleObjectsReturned, FieldError
from django.db import transaction
from django.test import TestCase
from django.utils import six
from django.utils.translation import ugettext_lazy
from .models import Article, Reporter
class ManyToOneTests(TestCase):
def setUp(self):
# Create a few Reporters.
self.r = Reporter(first_name='John', last_name='Smith', email='john@example.com')
self.r.save()
self.r2 = Reporter(first_name='Paul', last_name='Jones', email='paul@example.com')
self.r2.save()
# Create an Article.
self.a = Article(id=None, headline="This is a test",
pub_date=datetime.date(2005, 7, 27), reporter=self.r)
self.a.save()
def test_get(self):
# Article objects have access to their related Reporter objects.
r = self.a.reporter
self.assertEqual(r.id, self.r.id)
# These are strings instead of unicode strings because that's what was used in
# the creation of this reporter (and we haven't refreshed the data from the
# database, which always returns unicode strings).
self.assertEqual((r.first_name, self.r.last_name), ('John', 'Smith'))
def test_create(self):
# You can also instantiate an Article by passing the Reporter's ID
# instead of a Reporter object.
a3 = Article(id=None, headline="Third article",
pub_date=datetime.date(2005, 7, 27), reporter_id=self.r.id)
a3.save()
self.assertEqual(a3.reporter.id, self.r.id)
# Similarly, the reporter ID can be a string.
a4 = Article(id=None, headline="Fourth article",
pub_date=datetime.date(2005, 7, 27), reporter_id=str(self.r.id))
a4.save()
self.assertEqual(repr(a4.reporter), "<Reporter: John Smith>")
def test_add(self):
# Create an Article via the Reporter object.
new_article = self.r.article_set.create(headline="John's second story",
pub_date=datetime.date(2005, 7, 29))
self.assertEqual(repr(new_article), "<Article: John's second story>")
self.assertEqual(new_article.reporter.id, self.r.id)
# Create a new article, and add it to the article set.
new_article2 = Article(headline="Paul's story", pub_date=datetime.date(2006, 1, 17))
self.r.article_set.add(new_article2)
self.assertEqual(new_article2.reporter.id, self.r.id)
self.assertQuerysetEqual(self.r.article_set.all(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
# Add the same article to a different article set - check that it moves.
self.r2.article_set.add(new_article2)
self.assertEqual(new_article2.reporter.id, self.r2.id)
self.assertQuerysetEqual(self.r2.article_set.all(), ["<Article: Paul's story>"])
# Adding an object of the wrong type raises TypeError.
with transaction.atomic():
with six.assertRaisesRegex(self, TypeError,
"'Article' instance expected, got <Reporter.*"):
self.r.article_set.add(self.r2)
self.assertQuerysetEqual(self.r.article_set.all(),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
def test_assign(self):
new_article = self.r.article_set.create(headline="John's second story",
pub_date=datetime.date(2005, 7, 29))
new_article2 = self.r2.article_set.create(headline="Paul's story",
pub_date=datetime.date(2006, 1, 17))
# Assign the article to the reporter directly using the descriptor.
new_article2.reporter = self.r
new_article2.save()
self.assertEqual(repr(new_article2.reporter), "<Reporter: John Smith>")
self.assertEqual(new_article2.reporter.id, self.r.id)
self.assertQuerysetEqual(self.r.article_set.all(), [
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r2.article_set.all(), [])
# Set the article back again using set descriptor.
self.r2.article_set = [new_article, new_article2]
self.assertQuerysetEqual(self.r.article_set.all(), ["<Article: This is a test>"])
self.assertQuerysetEqual(self.r2.article_set.all(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
])
# Funny case - assignment notation can only go so far; because the
# ForeignKey cannot be null, existing members of the set must remain.
self.r.article_set = [new_article]
self.assertQuerysetEqual(self.r.article_set.all(),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r2.article_set.all(), ["<Article: Paul's story>"])
# Reporter cannot be null - there should not be a clear or remove method
self.assertFalse(hasattr(self.r2.article_set, 'remove'))
self.assertFalse(hasattr(self.r2.article_set, 'clear'))
def test_selects(self):
self.r.article_set.create(headline="John's second story",
pub_date=datetime.date(2005, 7, 29))
self.r2.article_set.create(headline="Paul's story",
pub_date=datetime.date(2006, 1, 17))
# Reporter objects have access to their related Article objects.
self.assertQuerysetEqual(self.r.article_set.all(), [
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r.article_set.filter(headline__startswith='This'),
["<Article: This is a test>"])
self.assertEqual(self.r.article_set.count(), 2)
self.assertEqual(self.r2.article_set.count(), 1)
# Get articles by id
self.assertQuerysetEqual(Article.objects.filter(id__exact=self.a.id),
["<Article: This is a test>"])
self.assertQuerysetEqual(Article.objects.filter(pk=self.a.id),
["<Article: This is a test>"])
# Query on an article property
self.assertQuerysetEqual(Article.objects.filter(headline__startswith='This'),
["<Article: This is a test>"])
# The API automatically follows relationships as far as you need.
# Use double underscores to separate relationships.
# This works as many levels deep as you want. There's no limit.
# Find all Articles for any Reporter whose first name is "John".
self.assertQuerysetEqual(Article.objects.filter(reporter__first_name__exact='John'),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# Check that implied __exact also works
self.assertQuerysetEqual(Article.objects.filter(reporter__first_name='John'),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# Query twice over the related field.
self.assertQuerysetEqual(
Article.objects.filter(reporter__first_name__exact='John',
reporter__last_name__exact='Smith'),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# The underlying query only makes one join when a related table is referenced twice.
queryset = Article.objects.filter(reporter__first_name__exact='John',
reporter__last_name__exact='Smith')
self.assertNumQueries(1, list, queryset)
self.assertEqual(queryset.query.get_compiler(queryset.db).as_sql()[0].count('INNER JOIN'), 1)
# The automatically joined table has a predictable name.
self.assertQuerysetEqual(
Article.objects.filter(reporter__first_name__exact='John').extra(
where=["many_to_one_reporter.last_name='Smith'"]),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# ... and should work fine with the unicode that comes out of forms.Form.cleaned_data
self.assertQuerysetEqual(
(Article.objects
.filter(reporter__first_name__exact='John')
.extra(where=["many_to_one_reporter.last_name='%s'" % 'Smith'])),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# Find all Articles for a Reporter.
# Use direct ID check, pk check, and object comparison
self.assertQuerysetEqual(
Article.objects.filter(reporter__id__exact=self.r.id),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter__pk=self.r.id),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter=self.r.id),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter=self.r),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter__in=[self.r.id, self.r2.id]).distinct(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter__in=[self.r, self.r2]).distinct(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
# You can also use a queryset instead of a literal list of instances.
# The queryset must be reduced to a list of values using values(),
# then converted into a query
self.assertQuerysetEqual(
Article.objects.filter(
reporter__in=Reporter.objects.filter(first_name='John').values('pk').query
).distinct(),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
def test_reverse_selects(self):
a3 = Article.objects.create(id=None, headline="Third article",
pub_date=datetime.date(2005, 7, 27), reporter_id=self.r.id)
Article.objects.create(id=None, headline="Fourth article",
pub_date=datetime.date(2005, 7, 27), reporter_id=str(self.r.id))
# Reporters can be queried
self.assertQuerysetEqual(Reporter.objects.filter(id__exact=self.r.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(pk=self.r.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(first_name__startswith='John'),
["<Reporter: John Smith>"])
# Reporters can query in opposite direction of ForeignKey definition
self.assertQuerysetEqual(Reporter.objects.filter(article__id__exact=self.a.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(article__pk=self.a.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(article=self.a.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(article=self.a),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__in=[self.a.id, a3.id]).distinct(),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__in=[self.a.id, a3]).distinct(),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__in=[self.a, a3]).distinct(),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__headline__startswith='T'),
["<Reporter: John Smith>", "<Reporter: John Smith>"],
ordered=False
)
self.assertQuerysetEqual(
Reporter.objects.filter(article__headline__startswith='T').distinct(),
["<Reporter: John Smith>"])
# Counting in the opposite direction works in conjunction with distinct()
self.assertEqual(
Reporter.objects.filter(article__headline__startswith='T').count(), 2)
self.assertEqual(
Reporter.objects.filter(article__headline__startswith='T').distinct().count(), 1)
# Queries can go round in circles.
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter__first_name__startswith='John'),
[
"<Reporter: John Smith>",
"<Reporter: John Smith>",
"<Reporter: John Smith>",
],
ordered=False
)
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter__first_name__startswith='John').distinct(),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter__exact=self.r).distinct(),
["<Reporter: John Smith>"])
# Check that implied __exact also works.
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter=self.r).distinct(),
["<Reporter: John Smith>"])
# It's possible to use values() calls across many-to-one relations.
# (Note, too, that we clear the ordering here so as not to drag the
# 'headline' field into the columns being used to determine uniqueness)
d = {'reporter__first_name': 'John', 'reporter__last_name': 'Smith'}
self.assertEqual([d],
list(Article.objects.filter(reporter=self.r).distinct().order_by()
.values('reporter__first_name', 'reporter__last_name')))
def test_select_related(self):
# Check that Article.objects.select_related().dates() works properly when
# there are multiple Articles with the same date but different foreign-key
# objects (Reporters).
r1 = Reporter.objects.create(first_name='Mike', last_name='Royko', email='royko@suntimes.com')
r2 = Reporter.objects.create(first_name='John', last_name='Kass', email='jkass@tribune.com')
Article.objects.create(headline='First', pub_date=datetime.date(1980, 4, 23), reporter=r1)
Article.objects.create(headline='Second', pub_date=datetime.date(1980, 4, 23), reporter=r2)
self.assertEqual(list(Article.objects.select_related().dates('pub_date', 'day')),
[
datetime.date(1980, 4, 23),
datetime.date(2005, 7, 27),
])
self.assertEqual(list(Article.objects.select_related().dates('pub_date', 'month')),
[
datetime.date(1980, 4, 1),
datetime.date(2005, 7, 1),
])
self.assertEqual(list(Article.objects.select_related().dates('pub_date', 'year')),
[
datetime.date(1980, 1, 1),
datetime.date(2005, 1, 1),
])
def test_delete(self):
self.r.article_set.create(headline="John's second story",
pub_date=datetime.date(2005, 7, 29))
self.r2.article_set.create(headline="Paul's story",
pub_date=datetime.date(2006, 1, 17))
Article.objects.create(id=None, headline="Third article",
pub_date=datetime.date(2005, 7, 27), reporter_id=self.r.id)
Article.objects.create(id=None, headline="Fourth article",
pub_date=datetime.date(2005, 7, 27), reporter_id=str(self.r.id))
# If you delete a reporter, his articles will be deleted.
self.assertQuerysetEqual(Article.objects.all(),
[
"<Article: Fourth article>",
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: Third article>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(Reporter.objects.order_by('first_name'),
[
"<Reporter: John Smith>",
"<Reporter: Paul Jones>",
])
self.r2.delete()
self.assertQuerysetEqual(Article.objects.all(),
[
"<Article: Fourth article>",
"<Article: John's second story>",
"<Article: Third article>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(Reporter.objects.order_by('first_name'),
["<Reporter: John Smith>"])
# You can delete using a JOIN in the query.
Reporter.objects.filter(article__headline__startswith='This').delete()
self.assertQuerysetEqual(Reporter.objects.all(), [])
self.assertQuerysetEqual(Article.objects.all(), [])
def test_regression_12876(self):
# Regression for #12876 -- Model methods that include queries that
# recursive don't cause recursion depth problems under deepcopy.
self.r.cached_query = Article.objects.filter(reporter=self.r)
self.assertEqual(repr(deepcopy(self.r)), "<Reporter: John Smith>")
def test_explicit_fk(self):
# Create a new Article with get_or_create using an explicit value
# for a ForeignKey.
a2, created = Article.objects.get_or_create(id=None,
headline="John's second test",
pub_date=datetime.date(2011, 5, 7),
reporter_id=self.r.id)
self.assertTrue(created)
self.assertEqual(a2.reporter.id, self.r.id)
# You can specify filters containing the explicit FK value.
self.assertQuerysetEqual(
Article.objects.filter(reporter_id__exact=self.r.id),
[
"<Article: John's second test>",
"<Article: This is a test>",
])
# Create an Article by Paul for the same date.
a3 = Article.objects.create(id=None, headline="Paul's commentary",
pub_date=datetime.date(2011, 5, 7),
reporter_id=self.r2.id)
self.assertEqual(a3.reporter.id, self.r2.id)
# Get should respect explicit foreign keys as well.
self.assertRaises(MultipleObjectsReturned,
Article.objects.get, reporter_id=self.r.id)
self.assertEqual(repr(a3),
repr(Article.objects.get(reporter_id=self.r2.id,
pub_date=datetime.date(2011, 5, 7))))
def test_manager_class_caching(self):
r1 = Reporter.objects.create(first_name='Mike')
r2 = Reporter.objects.create(first_name='John')
# Same twice
self.assertTrue(r1.article_set.__class__ is r1.article_set.__class__)
# Same as each other
self.assertTrue(r1.article_set.__class__ is r2.article_set.__class__)
def test_create_relation_with_ugettext_lazy(self):
reporter = Reporter.objects.create(first_name='John',
last_name='Smith',
email='john.smith@example.com')
lazy = ugettext_lazy('test')
reporter.article_set.create(headline=lazy,
pub_date=datetime.date(2011, 6, 10))
notlazy = six.text_type(lazy)
article = reporter.article_set.get()
self.assertEqual(article.headline, notlazy)
def test_values_list_exception(self):
expected_message = "Cannot resolve keyword 'notafield' into field. Choices are: %s"
self.assertRaisesMessage(FieldError,
expected_message % ', '.join(Reporter._meta.get_all_field_names()),
Article.objects.values_list,
'reporter__notafield')
self.assertRaisesMessage(FieldError,
expected_message % ', '.join(['EXTRA'] + Article._meta.get_all_field_names()),
Article.objects.extra(select={'EXTRA': 'EXTRA_SELECT'}).values_list,
'notafield')
| bsd-3-clause |
dvarrazzo/arduino | thermo/client/serial_read.py | 1 | 1174 | #!/usr/bin/env python
"""Read from the serial and emit one ts/value per minute.
Log messages on stderr.
"""
import sys
import serial
from datetime import datetime, timedelta
import logging
logging.basicConfig()
logger = logging.getLogger()
def main():
opt = parse_options()
buf = []
ser = serial.Serial('/dev/ttyUSB0', 19200)
minute = None
while 1:
buf.append(ser.read())
if "\n" in buf:
ts = datetime.now()
s = "".join(buf)
value, rest = s.split("\n", 1)
buf[:] = [rest]
try:
value = float(value)
except Exception, e:
logger.error("error reading: %s", e)
else:
logger.debug("read value: %s", value)
if ts.minute != minute:
minute = ts.minute
send_value(opt, ts, value)
def parse_options():
return None
def send_value(opt, ts, value):
logger.debug("sending value: %s", value)
print ts.strftime("%Y-%m-%d %H:%M:%S"), value
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print
pass
| gpl-3.0 |
mgood7123/UPM | Tests/PACKAGES/hexchat-2.12.4-6-x86_64/usr/share/glib-2.0/codegen/codegen_main.py | 1 | 7967 | # -*- Mode: Python -*-
# GDBus - GLib D-Bus Library
#
# Copyright (C) 2008-2011 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with this library; if not, see <http://www.gnu.org/licenses/>.
#
# Author: David Zeuthen <davidz@redhat.com>
import sys
import optparse
from os import path
from . import config
from . import utils
from . import dbustypes
from . import parser
from . import codegen
from . import codegen_docbook
def find_arg(arg_list, arg_name):
for a in arg_list:
if a.name == arg_name:
return a
return None
def find_method(iface, method):
for m in iface.methods:
if m.name == method:
return m
return None
def find_signal(iface, signal):
for m in iface.signals:
if m.name == signal:
return m
return None
def find_prop(iface, prop):
for m in iface.properties:
if m.name == prop:
return m
return None
def apply_annotation(iface_list, iface, method, signal, prop, arg, key, value):
iface_obj = None
for i in iface_list:
if i.name == iface:
iface_obj = i
break
if iface_obj == None:
raise RuntimeError('No interface %s'%iface)
target_obj = None
if method:
method_obj = find_method(iface_obj, method)
if method_obj == None:
raise RuntimeError('No method %s on interface %s'%(method, iface))
if arg:
arg_obj = find_arg(method_obj.in_args, arg)
if (arg_obj == None):
arg_obj = find_arg(method_obj.out_args, arg)
if (arg_obj == None):
raise RuntimeError('No arg %s on method %s on interface %s'%(arg, method, iface))
target_obj = arg_obj
else:
target_obj = method_obj
elif signal:
signal_obj = find_signal(iface_obj, signal)
if signal_obj == None:
raise RuntimeError('No signal %s on interface %s'%(signal, iface))
if arg:
arg_obj = find_arg(signal_obj.args, arg)
if (arg_obj == None):
raise RuntimeError('No arg %s on signal %s on interface %s'%(arg, signal, iface))
target_obj = arg_obj
else:
target_obj = signal_obj
elif prop:
prop_obj = find_prop(iface_obj, prop)
if prop_obj == None:
raise RuntimeError('No property %s on interface %s'%(prop, iface))
target_obj = prop_obj
else:
target_obj = iface_obj
target_obj.annotations.insert(0, dbustypes.Annotation(key, value))
def apply_annotations(iface_list, annotation_list):
# apply annotations given on the command line
for (what, key, value) in annotation_list:
pos = what.find('::')
if pos != -1:
# signal
iface = what[0:pos];
signal = what[pos + 2:]
pos = signal.find('[')
if pos != -1:
arg = signal[pos + 1:]
signal = signal[0:pos]
pos = arg.find(']')
arg = arg[0:pos]
apply_annotation(iface_list, iface, None, signal, None, arg, key, value)
else:
apply_annotation(iface_list, iface, None, signal, None, None, key, value)
else:
pos = what.find(':')
if pos != -1:
# property
iface = what[0:pos];
prop = what[pos + 1:]
apply_annotation(iface_list, iface, None, None, prop, None, key, value)
else:
pos = what.find('()')
if pos != -1:
# method
combined = what[0:pos]
pos = combined.rfind('.')
iface = combined[0:pos]
method = combined[pos + 1:]
pos = what.find('[')
if pos != -1:
arg = what[pos + 1:]
pos = arg.find(']')
arg = arg[0:pos]
apply_annotation(iface_list, iface, method, None, None, arg, key, value)
else:
apply_annotation(iface_list, iface, method, None, None, None, key, value)
else:
# must be an interface
iface = what
apply_annotation(iface_list, iface, None, None, None, None, key, value)
def codegen_main():
arg_parser = optparse.OptionParser('%prog [options]')
arg_parser.add_option('', '--xml-files', metavar='FILE', action='append',
help='D-Bus introspection XML file')
arg_parser.add_option('', '--interface-prefix', metavar='PREFIX', default='',
help='String to strip from D-Bus interface names for code and docs')
arg_parser.add_option('', '--c-namespace', metavar='NAMESPACE', default='',
help='The namespace to use for generated C code')
arg_parser.add_option('', '--c-generate-object-manager', action='store_true',
help='Generate a GDBusObjectManagerClient subclass when generating C code')
arg_parser.add_option('', '--generate-c-code', metavar='OUTFILES',
help='Generate C code in OUTFILES.[ch]')
arg_parser.add_option('', '--c-generate-autocleanup', type='choice', choices=['none', 'objects', 'all'], default='objects',
help='Generate autocleanup support')
arg_parser.add_option('', '--generate-docbook', metavar='OUTFILES',
help='Generate Docbook in OUTFILES-org.Project.IFace.xml')
arg_parser.add_option('', '--annotate', nargs=3, action='append', metavar='WHAT KEY VALUE',
help='Add annotation (may be used several times)')
arg_parser.add_option('', '--output-directory', metavar='OUTDIR', default='',
help='Location to output generated files')
(opts, args) = arg_parser.parse_args();
all_ifaces = []
for fname in args:
f = open(fname, 'rb')
xml_data = f.read()
f.close()
parsed_ifaces = parser.parse_dbus_xml(xml_data)
all_ifaces.extend(parsed_ifaces)
if opts.annotate != None:
apply_annotations(all_ifaces, opts.annotate)
for i in all_ifaces:
i.post_process(opts.interface_prefix, opts.c_namespace)
outdir = opts.output_directory
docbook = opts.generate_docbook
docbook_gen = codegen_docbook.DocbookCodeGenerator(all_ifaces, docbook, outdir);
if docbook:
ret = docbook_gen.generate()
c_code = opts.generate_c_code
if c_code:
header_name = c_code + '.h'
h = open(path.join(outdir, header_name), 'w')
c = open(path.join(outdir, c_code + '.c'), 'w')
gen = codegen.CodeGenerator(all_ifaces,
opts.c_namespace,
opts.interface_prefix,
opts.c_generate_object_manager,
opts.c_generate_autocleanup,
docbook_gen,
h, c,
header_name)
ret = gen.generate()
h.close()
c.close()
sys.exit(0)
if __name__ == "__main__":
codegen_main()
| gpl-3.0 |
40223119/2015cda | static/Brython3.1.3-20150514-095342/Lib/linecache.py | 785 | 3864 | """Cache lines from files.
This is intended to read lines from modules imported -- hence if a filename
is not found, it will look down the module search path for a file by
that name.
"""
import sys
import os
import tokenize
__all__ = ["getline", "clearcache", "checkcache"]
def getline(filename, lineno, module_globals=None):
lines = getlines(filename, module_globals)
if 1 <= lineno <= len(lines):
return lines[lineno-1]
else:
return ''
# The cache
cache = {} # The cache
def clearcache():
"""Clear the cache entirely."""
global cache
cache = {}
def getlines(filename, module_globals=None):
"""Get the lines for a file from the cache.
Update the cache if it doesn't contain an entry for this file already."""
if filename in cache:
return cache[filename][2]
else:
return updatecache(filename, module_globals)
def checkcache(filename=None):
"""Discard cache entries that are out of date.
(This is not checked upon each call!)"""
if filename is None:
filenames = list(cache.keys())
else:
if filename in cache:
filenames = [filename]
else:
return
for filename in filenames:
size, mtime, lines, fullname = cache[filename]
if mtime is None:
continue # no-op for files loaded via a __loader__
try:
stat = os.stat(fullname)
except os.error:
del cache[filename]
continue
if size != stat.st_size or mtime != stat.st_mtime:
del cache[filename]
def updatecache(filename, module_globals=None):
"""Update a cache entry and return its list of lines.
If something's wrong, print a message, discard the cache entry,
and return an empty list."""
if filename in cache:
del cache[filename]
if not filename or (filename.startswith('<') and filename.endswith('>')):
return []
fullname = filename
try:
stat = os.stat(fullname)
except OSError:
basename = filename
# Try for a __loader__, if available
if module_globals and '__loader__' in module_globals:
name = module_globals.get('__name__')
loader = module_globals['__loader__']
get_source = getattr(loader, 'get_source', None)
if name and get_source:
try:
data = get_source(name)
except (ImportError, IOError):
pass
else:
if data is None:
# No luck, the PEP302 loader cannot find the source
# for this module.
return []
cache[filename] = (
len(data), None,
[line+'\n' for line in data.splitlines()], fullname
)
return cache[filename][2]
# Try looking through the module search path, which is only useful
# when handling a relative filename.
if os.path.isabs(filename):
return []
for dirname in sys.path:
try:
fullname = os.path.join(dirname, basename)
except (TypeError, AttributeError):
# Not sufficiently string-like to do anything useful with.
continue
try:
stat = os.stat(fullname)
break
except os.error:
pass
else:
return []
try:
with tokenize.open(fullname) as fp:
lines = fp.readlines()
except IOError:
return []
if lines and not lines[-1].endswith('\n'):
lines[-1] += '\n'
size, mtime = stat.st_size, stat.st_mtime
cache[filename] = size, mtime, lines, fullname
return lines
| gpl-3.0 |
zde/librepo | tests/python/tests/test_yum_repo_downloading.py | 1 | 58125 | from tests.base import TestCaseWithFlask, MOCKURL, TEST_DATA
from tests.servermock.server import app
import tests.servermock.yum_mock.config as config
import os.path
import unittest
import tempfile
import shutil
import gpgme
import librepo
PUB_KEY = TEST_DATA+"/key.pub"
class TestCaseYumRepoDownloading(TestCaseWithFlask):
application = app
# @classmethod
# def setUpClass(cls):
# super(TestCaseYumRepoDownloading, cls).setUpClass()
def setUp(self):
self.tmpdir = tempfile.mkdtemp(prefix="librepotest-")
# Import public key into the temporary gpg keyring
self._gnupghome = os.environ.get('GNUPGHOME')
gpghome = os.path.join(self.tmpdir, "keyring")
os.mkdir(gpghome, 0o700)
os.environ['GNUPGHOME'] = gpghome
self.ctx = gpgme.Context()
self.ctx.import_(open(PUB_KEY, 'rb'))
def tearDown(self):
self.ctx.delete(self.ctx.get_key('22F2C4E9'))
if self._gnupghome is None:
os.environ.pop('GNUPGHOME')
else:
os.environ['GNUPGHOME'] = self._gnupghome
shutil.rmtree(self.tmpdir)
def test_download_repo_01(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
timestamp = r.getinfo(librepo.LRR_YUM_TIMESTAMP)
self.assertEqual(yum_repo,
{ #'deltainfo': None,
'destdir': self.tmpdir,
'filelists': self.tmpdir+'/repodata/aeca08fccd3c1ab831e1df1a62711a44ba1922c9-filelists.xml.gz',
'filelists_db': self.tmpdir+'/repodata/4034dcea76c94d3f7a9616779539a4ea8cac288f-filelists.sqlite.bz2',
#'group': None,
#'group_gz': None,
#'origin': None,
'other': self.tmpdir+'/repodata/a8977cdaa0b14321d9acfab81ce8a85e869eee32-other.xml.gz',
'other_db': self.tmpdir+'/repodata/fd96942c919628895187778633001cff61e872b8-other.sqlite.bz2',
#'prestodelta': None,
'primary': self.tmpdir+'/repodata/4543ad62e4d86337cd1949346f9aec976b847b58-primary.xml.gz',
'primary_db': self.tmpdir+'/repodata/735cd6294df08bdf28e2ba113915ca05a151118e-primary.sqlite.bz2',
'repomd': self.tmpdir+'/repodata/repomd.xml',
#'updateinfo': None,
'url': url,
'signature': None,
'mirrorlist': None,
'metalink': None}
)
self.assertEqual(yum_repomd,
{ 'content_tags': [],
#'deltainfo': None,
'distro_tags': [],
'filelists': {
'checksum': 'aeca08fccd3c1ab831e1df1a62711a44ba1922c9',
'checksum_open': '52d30ae3162ca863c63c345ffdb7f0e10c1414a5',
'checksum_open_type': 'sha1',
'checksum_type': 'sha1',
'db_version': 0,
'location_href': 'repodata/aeca08fccd3c1ab831e1df1a62711a44ba1922c9-filelists.xml.gz',
'size': 43310,
'size_open': 735088,
'timestamp': 1347459930},
'filelists_db': {
'checksum': '4034dcea76c94d3f7a9616779539a4ea8cac288f',
'checksum_open': '949c6b7b605b2bc66852630c841a5003603ca5b2',
'checksum_open_type': 'sha1',
'checksum_type': 'sha1',
'db_version': 10,
'location_href': 'repodata/4034dcea76c94d3f7a9616779539a4ea8cac288f-filelists.sqlite.bz2',
'size': 22575,
'size_open': 201728,
'timestamp': 1347459931},
#'group': None,
#'group_gz': None,
#'origin': None,
'other': {
'checksum': 'a8977cdaa0b14321d9acfab81ce8a85e869eee32',
'checksum_open': '4b5b8874fb233a626b03b3260a1aa08dce90e81a',
'checksum_open_type': 'sha1',
'checksum_type': 'sha1',
'db_version': 0,
'location_href': 'repodata/a8977cdaa0b14321d9acfab81ce8a85e869eee32-other.xml.gz',
'size': 807,
'size_open': 1910,
'timestamp': 1347459930},
'other_db': {
'checksum': 'fd96942c919628895187778633001cff61e872b8',
'checksum_open': 'c5262f62b6b3360722b9b2fb5d0a9335d0a51112',
'checksum_open_type': 'sha1',
'checksum_type': 'sha1',
'db_version': 10,
'location_href': 'repodata/fd96942c919628895187778633001cff61e872b8-other.sqlite.bz2',
'size': 1407,
'size_open': 8192,
'timestamp': 1347459931},
#'prestodelta': None,
'primary': {
'checksum': '4543ad62e4d86337cd1949346f9aec976b847b58',
'checksum_open': '68457ceb8e20bda004d46e0a4dfa4a69ce71db48',
'checksum_open_type': 'sha1',
'checksum_type': 'sha1',
'db_version': 0,
'location_href': 'repodata/4543ad62e4d86337cd1949346f9aec976b847b58-primary.xml.gz',
'size': 936,
'size_open': 3385,
'timestamp': 1347459930},
'primary_db': {
'checksum': '735cd6294df08bdf28e2ba113915ca05a151118e',
'checksum_open': 'ba636386312e1b597fc4feb182d04c059b2a77d5',
'checksum_open_type': 'sha1',
'checksum_type': 'sha1',
'db_version': 10,
'location_href': 'repodata/735cd6294df08bdf28e2ba113915ca05a151118e-primary.sqlite.bz2',
'size': 2603,
'size_open': 23552,
'timestamp': 1347459931},
'repo_tags': [],
'revision': '1347459931',
#'updateinfo': None
}
)
self.assertEqual(timestamp, 1347459931)
# Test if all mentioned files really exist
self.assertTrue(os.path.isdir(yum_repo["destdir"]))
for key in yum_repo:
if yum_repo[key] and (key not in ("url", "destdir")):
self.assertTrue(os.path.isfile(yum_repo[key]))
self.assertFalse(h.mirrors)
self.assertFalse(h.metalink)
def test_download_repo_02(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.REPO_YUM_02_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertEqual(yum_repo,
{'deltainfo': self.tmpdir+'/repodata/32d3307b672abf7356061912fa3dc9b54071c03a75c671111c1c8daf5ed1eb7e-deltainfo.xml.gz',
'destdir': self.tmpdir,
'filelists': self.tmpdir+'/repodata/2431efa18b5de6bfddb87da2a526362108226752d46ef3a298cd4bf39ba16b1d-filelists.xml.gz',
'filelists_db': self.tmpdir+'/repodata/5b37f89f9f4474801ec5f23dc30d3d6cf9cf663cb75a6656aaa864a041836ffe-filelists.sqlite.bz2',
'group': self.tmpdir+'/repodata/5b3b362d644e8fa3b359db57be0ff5de8a08365ce9a59cddc3205244a968231e-comps.xml',
'group_gz': self.tmpdir+'/repodata/c395ae7d8a9117f4e81aa23e37fb9da9865b50917f5f701b50d422875bb0cb14-comps.xml.gz',
'origin': self.tmpdir+'/repodata/c949d2b2371fab1a03d03b41057004caf1133a56e4c9236f63b3163ad358c941-pkgorigins.gz',
'other': self.tmpdir+'/repodata/76b2cfb04531a66e382f187e6a7c90905940d2b2f315b7fd738b839887d83c35-other.xml.gz',
'other_db': self.tmpdir+'/repodata/705a58b0e169bf1d2ade8e4aacc515086644ce16cee971906f920c798c5b17d0-other.sqlite.bz2',
'prestodelta': self.tmpdir+'/repodata/26e351e1a38eb1524574e86ab130ea4db780aa1a4a8bb741d37595ed203f931c-prestodelta.xml.gz',
'primary': self.tmpdir+'/repodata/5a8e6bbb940b151103b3970a26e32b8965da9e90a798b1b80ee4325308149d8d-primary.xml.gz',
'primary_db': self.tmpdir+'/repodata/a09c42730c03b0d5defa3fd9213794c49e9bafbc67acdd8d4e87a2adf30b8752-primary.sqlite.bz2',
'repomd': self.tmpdir+'/repodata/repomd.xml',
'updateinfo': self.tmpdir+'/repodata/65c4f66e2808d328890505c3c2f13bb35a96f457d1c21a6346191c4dc07e6080-updateinfo.xml.gz',
'url': url,
'signature': None,
'mirrorlist': None,
'metalink': None}
)
self.assertEqual(yum_repomd,
{'content_tags': ['binary-i386'],
'deltainfo': {'checksum': '32d3307b672abf7356061912fa3dc9b54071c03a75c671111c1c8daf5ed1eb7e',
'checksum_open': '8a35a38aef926fd88f479f03a9a22e1ab7aa8bd1aeaa9d05cd696f101eee2846',
'checksum_open_type': 'sha256',
'checksum_type': 'sha256',
'db_version': 0,
'location_href': 'repodata/32d3307b672abf7356061912fa3dc9b54071c03a75c671111c1c8daf5ed1eb7e-deltainfo.xml.gz',
'size': 300,
'size_open': 492,
'timestamp': 1355335029},
'distro_tags': [('cpe:/o:fedoraproject:fedora:17', 'r')],
'filelists': {'checksum': '2431efa18b5de6bfddb87da2a526362108226752d46ef3a298cd4bf39ba16b1d',
'checksum_open': 'afa4a01d7a692ab8105a39fed5535b5011f0c68de0efbc98f9d6ffea36de85fe',
'checksum_open_type': 'sha256',
'checksum_type': 'sha256',
'db_version': 0,
'location_href': 'repodata/2431efa18b5de6bfddb87da2a526362108226752d46ef3a298cd4bf39ba16b1d-filelists.xml.gz',
'size': 43338,
'size_open': 735112,
'timestamp': 1355393567},
'filelists_db': {'checksum': '5b37f89f9f4474801ec5f23dc30d3d6cf9cf663cb75a6656aaa864a041836ffe',
'checksum_open': '8239ecd9334a3bc4dfa9a242f7c4d545b08451a1ad468458e20f3d3f768652c3',
'checksum_open_type': 'sha256',
'checksum_type': 'sha256',
'db_version': 10,
'location_href': 'repodata/5b37f89f9f4474801ec5f23dc30d3d6cf9cf663cb75a6656aaa864a041836ffe-filelists.sqlite.bz2',
'size': 23038,
'size_open': 200704,
'timestamp': 1355393568},
'group': {'checksum': '5b3b362d644e8fa3b359db57be0ff5de8a08365ce9a59cddc3205244a968231e',
'checksum_open': None,
'checksum_open_type': None,
'checksum_type': 'sha256',
'db_version': 0,
'location_href': 'repodata/5b3b362d644e8fa3b359db57be0ff5de8a08365ce9a59cddc3205244a968231e-comps.xml',
'size': 679,
'size_open': 0,
'timestamp': 1355393567},
'group_gz': {'checksum': 'c395ae7d8a9117f4e81aa23e37fb9da9865b50917f5f701b50d422875bb0cb14',
'checksum_open': '5b3b362d644e8fa3b359db57be0ff5de8a08365ce9a59cddc3205244a968231e',
'checksum_open_type': 'sha256',
'checksum_type': 'sha256',
'db_version': 0,
'location_href': 'repodata/c395ae7d8a9117f4e81aa23e37fb9da9865b50917f5f701b50d422875bb0cb14-comps.xml.gz',
'size': 331,
'size_open': 679,
'timestamp': 1355393567},
'origin': {'checksum': 'c949d2b2371fab1a03d03b41057004caf1133a56e4c9236f63b3163ad358c941',
'checksum_open': '3928c6aadcfdff101f4482db68c0d07f5777b1c7ad8424e41358bc5e87b8465b',
'checksum_open_type': 'sha256',
'checksum_type': 'sha256',
'db_version': 0,
'location_href': 'repodata/c949d2b2371fab1a03d03b41057004caf1133a56e4c9236f63b3163ad358c941-pkgorigins.gz',
'size': 140,
'size_open': 364,
'timestamp': 1355315696},
'other': {'checksum': '76b2cfb04531a66e382f187e6a7c90905940d2b2f315b7fd738b839887d83c35',
'checksum_open': '2169e09e2c6c91393d38866c501a8697d0a1d698dd3b1027969dc16d291d8915',
'checksum_open_type': 'sha256',
'checksum_type': 'sha256',
'db_version': 0,
'location_href': 'repodata/76b2cfb04531a66e382f187e6a7c90905940d2b2f315b7fd738b839887d83c35-other.xml.gz',
'size': 826,
'size_open': 1934,
'timestamp': 1355393567},
'other_db': {'checksum': '705a58b0e169bf1d2ade8e4aacc515086644ce16cee971906f920c798c5b17d0',
'checksum_open': '916ca5e879387dc1da51b57266bda28a2569d1773ca6c8ea80abe99d9adb373e',
'checksum_open_type': 'sha256',
'checksum_type': 'sha256',
'db_version': 10,
'location_href': 'repodata/705a58b0e169bf1d2ade8e4aacc515086644ce16cee971906f920c798c5b17d0-other.sqlite.bz2',
'size': 1462,
'size_open': 8192,
'timestamp': 1355393568},
'prestodelta': {'checksum': '26e351e1a38eb1524574e86ab130ea4db780aa1a4a8bb741d37595ed203f931c',
'checksum_open': '0052b222add25fed094793c24e73aa07fd598f43f73c1643de26c5e81f6d8c07',
'checksum_open_type': 'sha256',
'checksum_type': 'sha256',
'db_version': 0,
'location_href': 'repodata/26e351e1a38eb1524574e86ab130ea4db780aa1a4a8bb741d37595ed203f931c-prestodelta.xml.gz',
'size': 336,
'size_open': 574,
'timestamp': 1337937059},
'primary': {'checksum': '5a8e6bbb940b151103b3970a26e32b8965da9e90a798b1b80ee4325308149d8d',
'checksum_open': 'b8d60e74c38b94f255c08c3fe5e10c166dcb52f2c4bfec6cae097a68fdd75e74',
'checksum_open_type': 'sha256',
'checksum_type': 'sha256',
'db_version': 0,
'location_href': 'repodata/5a8e6bbb940b151103b3970a26e32b8965da9e90a798b1b80ee4325308149d8d-primary.xml.gz',
'size': 956,
'size_open': 3411,
'timestamp': 1355393567},
'primary_db': {'checksum': 'a09c42730c03b0d5defa3fd9213794c49e9bafbc67acdd8d4e87a2adf30b8752',
'checksum_open': '27b2200efa2c518e5dd5a59deb9ab33c2abca74cb74f5241e612b15931dcec37',
'checksum_open_type': 'sha256',
'checksum_type': 'sha256',
'db_version': 10,
'location_href': 'repodata/a09c42730c03b0d5defa3fd9213794c49e9bafbc67acdd8d4e87a2adf30b8752-primary.sqlite.bz2',
'size': 2649,
'size_open': 23552,
'timestamp': 1355393568},
'repo_tags': ['test'],
'revision': '1355393568',
'updateinfo': {'checksum': '65c4f66e2808d328890505c3c2f13bb35a96f457d1c21a6346191c4dc07e6080',
'checksum_open': 'ded9c95e1b88197c906603b5d9693c579cb0afeade3bc7f8ec6cae06b962477d',
'checksum_open_type': 'sha256',
'checksum_type': 'sha256',
'db_version': 0,
'location_href': 'repodata/65c4f66e2808d328890505c3c2f13bb35a96f457d1c21a6346191c4dc07e6080-updateinfo.xml.gz',
'size': 55,
'size_open': 42,
'timestamp': 1354188048}}
)
# Test if all mentioned files really exist
self.assertTrue(os.path.isdir(yum_repo["destdir"]))
for key in yum_repo:
if yum_repo[key] and (key not in ("url", "destdir")):
self.assertTrue(os.path.isfile(yum_repo[key]))
self.assertFalse(h.mirrors)
self.assertFalse(h.metalink)
def test_download_repo_from_bad_url(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.BADURL)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
self.assertRaises(librepo.LibrepoException, h.perform, (r))
self.assertFalse(h.mirrors)
self.assertFalse(h.metalink)
def test_partial_download_repo_01(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_YUMDLIST, [])
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
self.assertEqual(yum_repo,
{#'deltainfo': None,
'destdir': self.tmpdir,
#'filelists': None,
#'filelists_db': None,
#'group': None,
#'group_gz': None,
#'origin': None,
#'other': None,
#'other_db': None,
#'prestodelta': None,
#'primary': None,
#'primary_db': None,
'repomd': self.tmpdir+'/repodata/repomd.xml',
#'updateinfo': None,
'url': url,
'signature': None,
'mirrorlist': None,
'metalink': None}
)
# Test if all mentioned files really exist
self.assertTrue(os.path.isdir(yum_repo["destdir"]))
for key in yum_repo:
if yum_repo[key] and (key not in ("url", "destdir")):
self.assertTrue(os.path.isfile(yum_repo[key]))
self.assertFalse(h.mirrors)
self.assertFalse(h.metalink)
def test_partial_download_repo_02(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_YUMDLIST, ["other", "primary"])
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
self.assertEqual(yum_repo,
{#'deltainfo': None,
'destdir': self.tmpdir,
#'filelists': None,
#'filelists_db': None,
#'group': None,
#'group_gz': None,
#'origin': None,
'other': self.tmpdir+'/repodata/a8977cdaa0b14321d9acfab81ce8a85e869eee32-other.xml.gz',
#'other_db': None,
#'prestodelta': None,
'primary': self.tmpdir+'/repodata/4543ad62e4d86337cd1949346f9aec976b847b58-primary.xml.gz',
#'primary_db': None,
'repomd': self.tmpdir+'/repodata/repomd.xml',
#'updateinfo': None,
'url': url,
'signature': None,
'mirrorlist': None,
'metalink': None}
)
# Test if all mentioned files really exist
self.assertTrue(os.path.isdir(yum_repo["destdir"]))
for key in yum_repo:
if yum_repo[key] and (key not in ("url", "destdir")):
self.assertTrue(os.path.isfile(yum_repo[key]))
self.assertFalse(h.mirrors)
self.assertFalse(h.metalink)
def test_partial_download_repo_03(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_YUMBLIST, ["other", "filelists"])
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
self.assertEqual(yum_repo,
{ #'deltainfo': None,
'destdir': self.tmpdir,
#'filelists': self.tmpdir+'/repodata/aeca08fccd3c1ab831e1df1a62711a44ba1922c9-filelists.xml.gz',
'filelists_db': self.tmpdir+'/repodata/4034dcea76c94d3f7a9616779539a4ea8cac288f-filelists.sqlite.bz2',
#'group': None,
#'group_gz': None,
#'origin': None,
#'other': self.tmpdir+'/repodata/a8977cdaa0b14321d9acfab81ce8a85e869eee32-other.xml.gz',
'other_db': self.tmpdir+'/repodata/fd96942c919628895187778633001cff61e872b8-other.sqlite.bz2',
#'prestodelta': None,
'primary': self.tmpdir+'/repodata/4543ad62e4d86337cd1949346f9aec976b847b58-primary.xml.gz',
'primary_db': self.tmpdir+'/repodata/735cd6294df08bdf28e2ba113915ca05a151118e-primary.sqlite.bz2',
'repomd': self.tmpdir+'/repodata/repomd.xml',
#'updateinfo': None,
'url': url,
'signature': None,
'mirrorlist': None,
'metalink': None}
)
# Test if all mentioned files really exist
self.assertTrue(os.path.isdir(yum_repo["destdir"]))
for key in yum_repo:
if yum_repo[key] and (key not in ("url", "destdir")):
self.assertTrue(os.path.isfile(yum_repo[key]))
self.assertFalse(h.mirrors)
self.assertFalse(h.metalink)
def test_download_repo_01_without_result_object(self):
h = librepo.Handle()
url = "%s%s" % (MOCKURL, config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_CHECKSUM, True)
r = h.perform()
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
def test_download_repo_01_with_checksum_check(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_CHECKSUM, True)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
def test_download_corrupted_repo_01_with_checksum_check(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s%s" % (MOCKURL, config.HARMCHECKSUM % "primary.xml", config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_CHECKSUM, True)
self.assertRaises(librepo.LibrepoException, h.perform, (r))
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
def test_download_repo_with_gpg_check(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_GPGCHECK, True)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
self.assertTrue("signature" in yum_repo and yum_repo["signature"])
self.assertTrue(self.tmpdir+'/repodata/repomd.xml.asc' == yum_repo["signature"] )
self.assertTrue(os.path.isfile(yum_repo["signature"]))
def test_download_repo_with_gpg_check_bad_signature(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s%s" % (MOCKURL, config.BADGPG, config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_GPGCHECK, True)
self.assertRaises(librepo.LibrepoException, h.perform, (r))
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
self.assertTrue("signature" not in yum_repo or yum_repo["signature"])
def test_download_repo_01_with_missing_file(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s%s" % (MOCKURL, config.MISSINGFILE % "primary.xml", config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
self.assertRaises(librepo.LibrepoException, h.perform, (r))
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
self.assertTrue(os.path.getsize(yum_repo["primary"]) == 0)
def test_download_repo_01_with_missing_unwanted_file(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s%s" % (MOCKURL, config.MISSINGFILE % "primary.xml", config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_YUMDLIST, ["other"])
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
def test_bad_mirrorlist_url(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.BADURL)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
self.assertRaises(librepo.LibrepoException, h.perform, (r))
# Metalink tests
def test_download_only_metalink(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.METALINK_GOOD_01)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_FETCHMIRRORS, True)
h.perform(r)
self.assertEqual(h.mirrors, ['http://127.0.0.1:5000/yum/static/01/'])
self.assertEqual(h.metalink,
{'timestamp': 1347459931,
'hashes': [
('md5', 'f76409f67a84bcd516131d5cc98e57e1'),
('sha1', '75125e73304c21945257d9041a908d0d01d2ca16'),
('sha256', 'bef5d33dc68f47adc7b31df448851b1e9e6bae27840f28700fff144881482a6a'),
('sha512', 'e40060c747895562e945a68967a04d1279e4bd8507413681f83c322479aa564027fdf3962c2d875089bfcb9317d3a623465f390dc1f4acef294711168b807af0')],
'size': 2621,
'urls': [{
'url': 'http://127.0.0.1:5000/yum/static/01/repodata/repomd.xml',
'type': 'http',
'protocol': 'http',
'location': 'CZ',
'preference': 100}],
'filename': 'repomd.xml'}
)
def test_download_repo_01_via_metalink_with_alternates(self):
h = librepo.Handle()
h.metalinkurl = "%s%s" % (MOCKURL, config.METALINK_WITH_ALTERNATES)
h.repotype = librepo.LR_YUMREPO
h.destdir = self.tmpdir
h.checksum = True
r = h.perform()
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertEqual(yum_repo["url"], "http://127.0.0.1:5000/yum/static/01/")
self.assertTrue(yum_repomd)
self.assertEqual(h.mirrors, ['http://127.0.0.1:5000/yum/static/01/'])
self.assertEqual(h.metalink,
{'timestamp': 1381706941,
'hashes': [
('md5', 'bad'),
('sha1', 'bad'),
('sha256', 'bad'),
('sha512', 'bad')],
'size': 4761,
'urls': [{
'url': 'http://127.0.0.1:5000/yum/static/01/repodata/repomd.xml',
'type': 'http',
'protocol': 'http',
'location': 'CZ',
'preference': 100}],
'filename': 'repomd.xml',
'alternates': [{
'timestamp': 1347459931,
'hashes': [
('md5', 'f76409f67a84bcd516131d5cc98e57e1'),
('sha1', '75125e73304c21945257d9041a908d0d01d2ca16'),
('sha256', 'bef5d33dc68f47adc7b31df448851b1e9e6bae27840f28700fff144881482a6a'),
('sha512', 'e40060c747895562e945a68967a04d1279e4bd8507413681f83c322479aa564027fdf3962c2d875089bfcb9317d3a623465f390dc1f4acef294711168b807af0')
],
'size': 2621},
{
'timestamp': 123,
'hashes': [
('sha1', 'foobar'),
],
'size': 456}]
}
)
def test_download_repo_01_via_metalink_01(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.METALINK_GOOD_01)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_CHECKSUM, True)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertEqual(yum_repo["url"], "http://127.0.0.1:5000/yum/static/01/")
self.assertTrue(yum_repomd)
self.assertEqual(h.mirrors, ['http://127.0.0.1:5000/yum/static/01/'])
self.assertEqual(h.metalink,
{'timestamp': 1347459931,
'hashes': [
('md5', 'f76409f67a84bcd516131d5cc98e57e1'),
('sha1', '75125e73304c21945257d9041a908d0d01d2ca16'),
('sha256', 'bef5d33dc68f47adc7b31df448851b1e9e6bae27840f28700fff144881482a6a'),
('sha512', 'e40060c747895562e945a68967a04d1279e4bd8507413681f83c322479aa564027fdf3962c2d875089bfcb9317d3a623465f390dc1f4acef294711168b807af0')],
'size': 2621,
'urls': [{
'url': 'http://127.0.0.1:5000/yum/static/01/repodata/repomd.xml',
'type': 'http',
'protocol': 'http',
'location': 'CZ',
'preference': 100}],
'filename': 'repomd.xml'}
)
def test_download_repo_01_via_metalink_02(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.METALINK_GOOD_02)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
def test_download_repo_01_via_metalink_badfilename(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.METALINK_BADFILENAME)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
self.assertRaisesRegexp(librepo.LibrepoException,
"repomd.xml was not found in metalink",
h.perform,
(r))
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertEqual(yum_repo, None)
self.assertEqual(yum_repomd, None)
def test_download_repo_01_via_metalink_badchecksum(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.METALINK_BADCHECKSUM)
h.setopt(librepo.LRO_METALINKURL, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_CHECKSUM, True)
self.assertRaises(librepo.LibrepoException, h.perform, (r))
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo["metalink"])
# All other values shoud be None, [], {} or equivalent
for key in yum_repo:
if key == "metalink":
continue
self.assertFalse(yum_repo[key])
for key in yum_repomd:
self.assertFalse(yum_repomd[key])
def test_download_repo_01_via_metalink_nourls(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.METALINK_NOURLS)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
self.assertRaisesRegexp(librepo.LibrepoException,
".* No URLs in metalink", h.perform, (r))
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertEqual(yum_repo, None)
self.assertEqual(yum_repomd, None)
def test_download_repo_01_via_metalink_badfirsturl(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.METALINK_BADFIRSTURL)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
self.assertEqual(yum_repo["url"], "http://127.0.0.1:5000/yum/static/01/")
# Test if all mentioned files really exist
self.assertTrue(os.path.isdir(yum_repo["destdir"]))
for key in yum_repo:
if yum_repo[key] and (key not in ("url", "destdir")):
self.assertTrue(os.path.isfile(yum_repo[key]))
def test_download_repo_01_via_metalink_badfirsturl_maxmirrortries(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.METALINK_BADFIRSTHOST)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_MAXMIRRORTRIES, 1)
# Because first host is bad and maxmirrortries == 1
# Download should fail
self.assertRaises(librepo.LibrepoException, h.perform, (r))
def test_download_repo_01_via_metalink_badfirsthost_fastestmirror(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.METALINK_BADFIRSTHOST)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_FASTESTMIRROR, True)
h.setopt(librepo.LRO_MAXMIRRORTRIES, 1)
# First host is bad, but fastestmirror is used and thus
# working mirror should be added to the first position
# and download should be successfull even if maxmirrortries
# is equal to 1.
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
self.assertEqual(yum_repo["url"], "http://127.0.0.1:5000/yum/static/01/")
# Test if all mentioned files really exist
self.assertTrue(os.path.isdir(yum_repo["destdir"]))
for key in yum_repo:
if yum_repo[key] and (key not in ("url", "destdir")):
self.assertTrue(os.path.isfile(yum_repo[key]))
def test_download_repo_01_via_metalink_badfirsthost_fastestmirror_with_cache(self):
h = librepo.Handle()
r = librepo.Result()
cache = os.path.join(self.tmpdir, "fastestmirror.cache")
self.assertFalse(os.path.exists(cache))
url = "%s%s" % (MOCKURL, config.METALINK_BADFIRSTHOST)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_FASTESTMIRROR, True)
h.setopt(librepo.LRO_FASTESTMIRRORCACHE, cache)
h.setopt(librepo.LRO_MAXMIRRORTRIES, 1)
# First host is bad, but fastestmirror is used and thus
# working mirror should be added to the first position
# and download should be successfull even if maxmirrortries
# is equal to 1.
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
self.assertEqual(yum_repo["url"], "http://127.0.0.1:5000/yum/static/01/")
self.assertTrue(os.path.exists(cache))
shutil.rmtree(os.path.join(self.tmpdir, "repodata"))
# Try again, this time, fastestmirror cache should be used
h.perform()
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
self.assertEqual(yum_repo["url"], "http://127.0.0.1:5000/yum/static/01/")
self.assertTrue(os.path.exists(cache))
def test_download_repo_01_via_metalink_firsturlhascorruptedfiles(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.METALINK_FIRSTURLHASCORRUPTEDFILES)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_CHECKSUM, True)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
self.assertEqual(yum_repo["url"],
"http://127.0.0.1:5000/yum/harm_checksum/primary.xml/static/01/")
# Test if all mentioned files really exist
self.assertTrue(os.path.isdir(yum_repo["destdir"]))
for key in yum_repo:
if yum_repo[key] and (key not in ("url", "destdir")):
self.assertTrue(os.path.isfile(yum_repo[key]))
def test_download_repo_01_with_baseurl_and_metalink_specified_only_fetchmirrors(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
url = "%s%s" % (MOCKURL, config.METALINK_GOOD_01)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_FETCHMIRRORS, True)
h.perform(r)
self.assertEqual(h.mirrors, ['http://127.0.0.1:5000/yum/static/01/'])
self.assertEqual(h.metalink,
{'timestamp': 1347459931,
'hashes': [
('md5', 'f76409f67a84bcd516131d5cc98e57e1'),
('sha1', '75125e73304c21945257d9041a908d0d01d2ca16'),
('sha256', 'bef5d33dc68f47adc7b31df448851b1e9e6bae27840f28700fff144881482a6a'),
('sha512', 'e40060c747895562e945a68967a04d1279e4bd8507413681f83c322479aa564027fdf3962c2d875089bfcb9317d3a623465f390dc1f4acef294711168b807af0')],
'size': 2621,
'urls': [{
'url': 'http://127.0.0.1:5000/yum/static/01/repodata/repomd.xml',
'type': 'http',
'protocol': 'http',
'location': 'CZ',
'preference': 100}],
'filename': 'repomd.xml'}
)
def test_download_repo_01_with_baseurl_and_metalink_specified(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
url = "%s%s" % (MOCKURL, config.METALINK_GOOD_01)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.fastestmirror = True # XXX
h.perform(r)
self.assertEqual(h.mirrors, ['http://127.0.0.1:5000/yum/static/01/'])
self.assertEqual(h.metalink,
{'timestamp': 1347459931,
'hashes': [
('md5', 'f76409f67a84bcd516131d5cc98e57e1'),
('sha1', '75125e73304c21945257d9041a908d0d01d2ca16'),
('sha256', 'bef5d33dc68f47adc7b31df448851b1e9e6bae27840f28700fff144881482a6a'),
('sha512', 'e40060c747895562e945a68967a04d1279e4bd8507413681f83c322479aa564027fdf3962c2d875089bfcb9317d3a623465f390dc1f4acef294711168b807af0')],
'size': 2621,
'urls': [{
'url': 'http://127.0.0.1:5000/yum/static/01/repodata/repomd.xml',
'type': 'http',
'protocol': 'http',
'location': 'CZ',
'preference': 100}],
'filename': 'repomd.xml'}
)
# Mirrorlist tests
def test_download_only_mirrorlist(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.MIRRORLIST_GOOD_01)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_FETCHMIRRORS, True)
h.perform(r)
self.assertEqual(h.mirrors, ['http://127.0.0.1:5000/yum/static/01/'])
self.assertEqual(h.metalink, None)
def test_download_repo_01_via_mirrorlist_01(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.MIRRORLIST_GOOD_01)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_CHECKSUM, True)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertEqual(yum_repo["url"], "http://127.0.0.1:5000/yum/static/01/")
self.assertTrue(yum_repomd)
def test_download_repo_01_via_mirrorlist_02(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.MIRRORLIST_GOOD_02)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_CHECKSUM, True)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertEqual(yum_repo["url"], "http://127.0.0.1:5000/yum/static/01/")
self.assertTrue(yum_repomd)
def test_download_repo_01_via_mirrorlist_nourls(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.MIRRORLIST_NOURLS)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
self.assertRaises(librepo.LibrepoException, h.perform, (r))
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertEqual(yum_repo, None)
self.assertEqual(yum_repomd, None)
def test_download_repo_01_via_mirrorlist_badfirsturl(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.MIRRORLIST_BADFIRSTURL)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
self.assertEqual(yum_repo["url"], "http://127.0.0.1:5000/yum/static/01/")
# Test if all mentioned files really exist
self.assertTrue(os.path.isdir(yum_repo["destdir"]))
for key in yum_repo:
if yum_repo[key] and (key not in ("url", "destdir")):
self.assertTrue(os.path.isfile(yum_repo[key]))
def test_download_repo_01_via_mirrorlist_firsturlhascorruptedfiles(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.MIRRORLIST_FIRSTURLHASCORRUPTEDFILES)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_CHECKSUM, True)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
self.assertEqual(yum_repo["url"],
"http://127.0.0.1:5000/yum/harm_checksum/primary.xml/static/01/")
# Test if all mentioned files really exist
self.assertTrue(os.path.isdir(yum_repo["destdir"]))
for key in yum_repo:
if yum_repo[key] and (key not in ("url", "destdir")):
self.assertTrue(os.path.isfile(yum_repo[key]))
def test_download_repo_01_via_mirrorlist_firsturlhascorruptedfiles_maxmirrortries_enabled(self):
"""Download should fails on the first mirror (one file has a bad checksum).
Other mirrors have the file with a good checksum, but option
LRO_MAXMIRRORTRIES should prevent trying of other mirrors."""
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.MIRRORLIST_FIRSTURLHASCORRUPTEDFILES)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_CHECKSUM, True)
h.setopt(librepo.LRO_MAXMIRRORTRIES, 1)
self.assertRaises(librepo.LibrepoException, h.perform, (r))
def test_download_repo_01_via_mirrorlist_firsturlhascorruptedfiles_maxmirrortries_enabled_2(self):
"""Download should fails on the first mirror (one file has a bad checksum).
Other mirrors have the file with a good checksum.
LRO_MAXMIRRORTRIES should allow try one next mirror. Thus repo
should be downloaded without error."""
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.MIRRORLIST_FIRSTURLHASCORRUPTEDFILES)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_CHECKSUM, True)
h.setopt(librepo.LRO_MAXMIRRORTRIES, 2)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
self.assertEqual(yum_repo["url"],
"http://127.0.0.1:5000/yum/harm_checksum/primary.xml/static/01/")
# Test if all mentioned files really exist
self.assertTrue(os.path.isdir(yum_repo["destdir"]))
for key in yum_repo:
if yum_repo[key] and (key not in ("url", "destdir")):
self.assertTrue(os.path.isfile(yum_repo[key]))
def test_download_repo_01_with_baseurl_and_mirrorlist_specified_only_fetchmirrors(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
url = "%s%s" % (MOCKURL, config.MIRRORLIST_GOOD_01)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_FETCHMIRRORS, True)
h.perform(r)
self.assertEqual(h.mirrors, ['http://127.0.0.1:5000/yum/static/01/'])
self.assertEqual(h.metalink, None)
def test_download_repo_01_with_baseurl_and_mirrorlist_specified(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
url = "%s%s" % (MOCKURL, config.MIRRORLIST_GOOD_01)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.perform(r)
self.assertEqual(h.mirrors, ['http://127.0.0.1:5000/yum/static/01/'])
self.assertEqual(h.metalink, None)
# Update test
def test_download_and_update_repo_01(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.MIRRORLIST_FIRSTURLHASCORRUPTEDFILES)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_CHECKSUM, True)
h.setopt(librepo.LRO_YUMDLIST, [None])
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
self.assertEqual(yum_repo["url"],
"http://127.0.0.1:5000/yum/harm_checksum/primary.xml/static/01/")
# Test that only repomd.xml has a path
self.assertTrue(os.path.isdir(yum_repo["destdir"]))
self.assertTrue(os.path.exists(yum_repo["repomd"]))
for key in yum_repo:
if yum_repo[key] and (key not in ("repomd", "url", "destdir", "mirrorlist")):
self.assertTrue(yum_repo[key] == None)
# Update repo
h.setopt(librepo.LRO_UPDATE, True)
h.setopt(librepo.LRO_YUMDLIST, ["primary"])
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
# Test that only repomd.xml and primary has a path in yum_repo
self.assertTrue(os.path.isdir(yum_repo["destdir"]))
self.assertTrue(os.path.exists(yum_repo["repomd"]))
self.assertTrue(os.path.exists(yum_repo["primary"]))
for key in yum_repo:
if yum_repo[key] and (key not in ("repomd", "primary", "url", "destdir", "mirrorlist")):
self.assertTrue(yum_repo[key] == None)
# Base Auth test
def test_download_repo_01_from_base_auth_secured_web_01(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s%s" % (MOCKURL, config.AUTHBASIC, config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_CHECKSUM, True)
self.assertRaises(librepo.LibrepoException, h.perform, (r))
def test_download_repo_01_from_base_auth_secured_web_02(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s%s" % (MOCKURL, config.AUTHBASIC, config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_CHECKSUM, True)
h.setopt(librepo.LRO_HTTPAUTH, True)
h.setopt(librepo.LRO_USERPWD, "%s:%s" % (config.AUTH_USER, config.AUTH_PASS))
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
# Progressbar test
def test_download_repo_01_with_progressbar(self):
data = {"ttd": 0, "d": 0, "calls": 0}
def cb(data, total_to_download, downloaded):
data["ttd"] = total_to_download
data["d"] = downloaded
data["calls"] = data["calls"] + 1
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_CHECKSUM, True)
h.setopt(librepo.LRO_PROGRESSDATA, data)
h.setopt(librepo.LRO_PROGRESSCB, cb)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
self.assertTrue(data["calls"] > 0)
self.assertTrue(data["ttd"] == data["d"] or data["ttd"] == 0)
# Var substitution test
def test_download_repo_01_with_url_substitution(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.REPO_YUM_01_PATH_VAR)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_CHECKSUM, True)
h.setopt(librepo.LRO_VARSUB, config.REPO_YUM_01_VARSUB_LIST)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertTrue(yum_repo)
self.assertTrue(yum_repomd)
self.assertTrue(yum_repo["url"].endswith(config.REPO_YUM_01_PATH))
def test_download_repo_01_mirrorlist_substitution(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.MIRRORLIST_VARED)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_FETCHMIRRORS, True)
h.setopt(librepo.LRO_VARSUB, config.MIRRORLIST_VARED_LIST)
h.perform(r)
self.assertEqual(h.mirrors, ['http://127.0.0.1:5000/yum/static/01/'])
self.assertEqual(h.metalink, None)
def test_download_repo_01_mirrorlist_with_url_substitution(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.MIRRORLIST_VARSUB)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_FETCHMIRRORS, True)
h.setopt(librepo.LRO_VARSUB, config.MIRRORLIST_VARSUB_LIST)
h.perform(r)
self.assertEqual(h.mirrors, ['http://127.0.0.1:5000/yum/static/01/'])
self.assertEqual(h.metalink, None)
def test_download_repo_01_metalink_with_url_substitution(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.METALINK_VARSUB)
h.setopt(librepo.LRO_MIRRORLIST, url)
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.setopt(librepo.LRO_FETCHMIRRORS, True)
h.setopt(librepo.LRO_VARSUB, config.METALINK_VARSUB_LIST)
h.perform(r)
self.assertEqual(h.mirrors, ['http://127.0.0.1:5000/yum/static/01/'])
self.assertEqual(h.metalink["urls"],
[{
'url': 'http://127.0.0.1:5000/yum/static/$version/repodata/repomd.xml',
'type': 'http',
'protocol': 'http',
'location': 'CZ',
'preference': 100}
])
| gpl-2.0 |
SM-G920P/Hacker_Kernel_SM-G92X | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py | 11088 | 3246 | # Core.py - Python extension for perf script, core functions
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
def taskState(state):
states = {
0 : "R",
1 : "S",
2 : "D",
64: "DEAD"
}
if state not in states:
return "Unknown"
return states[state]
class EventHeaders:
def __init__(self, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
self.cpu = common_cpu
self.secs = common_secs
self.nsecs = common_nsecs
self.pid = common_pid
self.comm = common_comm
def ts(self):
return (self.secs * (10 ** 9)) + self.nsecs
def ts_format(self):
return "%d.%d" % (self.secs, int(self.nsecs / 1000))
| gpl-2.0 |
pkimber/block | block/migrations/0004_auto_20150810_1651.py | 1 | 6379 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('block', '0003_auto_20150419_2130'),
]
operations = [
migrations.CreateModel(
name='Document',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('title', models.CharField(max_length=200)),
('document', models.FileField(help_text='Uploaded document e.g. PDF', null=True, blank=True, upload_to='link/document')),
('original_file_name', models.CharField(help_text='Original file name of the document', null=True, blank=True, max_length=100)),
('deleted', models.BooleanField(default=False)),
],
options={
'verbose_name': 'Document',
'verbose_name_plural': 'Documents',
},
),
migrations.CreateModel(
name='HeaderFooter',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('header', models.CharField(max_length=150)),
('url_twitter', models.URLField(verbose_name='Twitter URL', blank=True)),
('url_linkedin', models.URLField(verbose_name='LinkedIn URL', blank=True)),
('url_facebook', models.URLField(verbose_name='Facebook URL', blank=True)),
],
options={
'verbose_name': 'Header and footer',
'verbose_name_plural': 'Header and footers',
},
),
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('title', models.CharField(max_length=200)),
('image', models.ImageField(upload_to='link/image')),
('original_file_name', models.CharField(max_length=100)),
('deleted', models.BooleanField(default=False)),
],
options={
'verbose_name': 'Link Image',
'verbose_name_plural': 'Link Images',
},
),
migrations.CreateModel(
name='Link',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('title', models.CharField(max_length=250)),
('link_type', models.CharField(choices=[('d', 'Document'), ('u', 'External URL'), ('r', 'Internal URL')], max_length=1)),
('url_external', models.URLField(help_text='URL for a web site e.g. http://www.bbc.co.uk/news', null=True, verbose_name='Link', blank=True)),
('document', models.ForeignKey(null=True, blank=True, to='block.Document')),
],
options={
'verbose_name': 'Link',
'verbose_name_plural': 'Links',
},
),
migrations.CreateModel(
name='Template',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('template_name', models.CharField(help_text="File name e.g. 'compose/page_article.html'", max_length=150)),
],
options={
'verbose_name': 'Template',
'ordering': ('template_name',),
'verbose_name_plural': 'Templates',
},
),
migrations.CreateModel(
name='TemplateSection',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('section', models.ForeignKey(to='block.Section')),
('template', models.ForeignKey(to='block.Template')),
],
options={
'verbose_name': 'Template section',
'ordering': ('template__template_name', 'section__name'),
'verbose_name_plural': 'Template sections',
},
),
migrations.CreateModel(
name='Url',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('title', models.CharField(max_length=200)),
('url_type', models.CharField(choices=[('p', 'Page'), ('r', 'Reverse')], max_length=1)),
('name', models.CharField(help_text="e.g. 'project.page' or 'web.training.application'", max_length=100)),
('arg1', models.SlugField(help_text="e.g. 'training'", max_length=100)),
('arg2', models.SlugField(help_text="e.g. 'application'", max_length=100)),
('arg3', models.SlugField(help_text="e.g. 'urgent'", max_length=100)),
('deleted', models.BooleanField(default=False)),
('page', models.ForeignKey(null=True, blank=True, to='block.Page')),
],
options={
'verbose_name': 'URL',
'verbose_name_plural': 'URLs',
},
),
migrations.AddField(
model_name='link',
name='url_internal',
field=models.ForeignKey(null=True, blank=True, help_text='A page on this web site', to='block.Url'),
),
migrations.AlterUniqueTogether(
name='url',
unique_together=set([('page', 'name', 'arg1', 'arg2', 'arg3')]),
),
migrations.AlterUniqueTogether(
name='templatesection',
unique_together=set([('template', 'section')]),
),
]
| apache-2.0 |
karyon/django | tests/one_to_one/tests.py | 15 | 20501 | from __future__ import unicode_literals
from django.db import IntegrityError, connection, transaction
from django.test import TestCase
from .models import (
Bar, Director, Favorites, HiddenPointer, ManualPrimaryKey, MultiModel,
Place, Pointer, RelatedModel, Restaurant, School, Target, UndergroundBar,
Waiter,
)
class OneToOneTests(TestCase):
def setUp(self):
self.p1 = Place.objects.create(name='Demon Dogs', address='944 W. Fullerton')
self.p2 = Place.objects.create(name='Ace Hardware', address='1013 N. Ashland')
self.r1 = Restaurant.objects.create(place=self.p1, serves_hot_dogs=True, serves_pizza=False)
self.b1 = Bar.objects.create(place=self.p1, serves_cocktails=False)
def test_getter(self):
# A Restaurant can access its place.
self.assertEqual(repr(self.r1.place), '<Place: Demon Dogs the place>')
# A Place can access its restaurant, if available.
self.assertEqual(repr(self.p1.restaurant), '<Restaurant: Demon Dogs the restaurant>')
# p2 doesn't have an associated restaurant.
with self.assertRaisesMessage(Restaurant.DoesNotExist, 'Place has no restaurant'):
self.p2.restaurant
# The exception raised on attribute access when a related object
# doesn't exist should be an instance of a subclass of `AttributeError`
# refs #21563
self.assertFalse(hasattr(self.p2, 'restaurant'))
def test_setter(self):
# Set the place using assignment notation. Because place is the primary
# key on Restaurant, the save will create a new restaurant
self.r1.place = self.p2
self.r1.save()
self.assertEqual(repr(self.p2.restaurant), '<Restaurant: Ace Hardware the restaurant>')
self.assertEqual(repr(self.r1.place), '<Place: Ace Hardware the place>')
self.assertEqual(self.p2.pk, self.r1.pk)
# Set the place back again, using assignment in the reverse direction.
self.p1.restaurant = self.r1
self.assertEqual(repr(self.p1.restaurant), '<Restaurant: Demon Dogs the restaurant>')
r = Restaurant.objects.get(pk=self.p1.id)
self.assertEqual(repr(r.place), '<Place: Demon Dogs the place>')
def test_manager_all(self):
# Restaurant.objects.all() just returns the Restaurants, not the Places.
self.assertQuerysetEqual(Restaurant.objects.all(), [
'<Restaurant: Demon Dogs the restaurant>',
])
# Place.objects.all() returns all Places, regardless of whether they
# have Restaurants.
self.assertQuerysetEqual(Place.objects.order_by('name'), [
'<Place: Ace Hardware the place>',
'<Place: Demon Dogs the place>',
])
def test_manager_get(self):
def assert_get_restaurant(**params):
self.assertEqual(repr(Restaurant.objects.get(**params)),
'<Restaurant: Demon Dogs the restaurant>')
assert_get_restaurant(place__id__exact=self.p1.pk)
assert_get_restaurant(place__id=self.p1.pk)
assert_get_restaurant(place__exact=self.p1.pk)
assert_get_restaurant(place__exact=self.p1)
assert_get_restaurant(place=self.p1.pk)
assert_get_restaurant(place=self.p1)
assert_get_restaurant(pk=self.p1.pk)
assert_get_restaurant(place__pk__exact=self.p1.pk)
assert_get_restaurant(place__pk=self.p1.pk)
assert_get_restaurant(place__name__startswith="Demon")
def assert_get_place(**params):
self.assertEqual(repr(Place.objects.get(**params)),
'<Place: Demon Dogs the place>')
assert_get_place(restaurant__place__exact=self.p1.pk)
assert_get_place(restaurant__place__exact=self.p1)
assert_get_place(restaurant__place__pk=self.p1.pk)
assert_get_place(restaurant__exact=self.p1.pk)
assert_get_place(restaurant__exact=self.r1)
assert_get_place(restaurant__pk=self.p1.pk)
assert_get_place(restaurant=self.p1.pk)
assert_get_place(restaurant=self.r1)
assert_get_place(id__exact=self.p1.pk)
assert_get_place(pk=self.p1.pk)
def test_foreign_key(self):
# Add a Waiter to the Restaurant.
w = self.r1.waiter_set.create(name='Joe')
self.assertEqual(repr(w), '<Waiter: Joe the waiter at Demon Dogs the restaurant>')
# Query the waiters
def assert_filter_waiters(**params):
self.assertQuerysetEqual(Waiter.objects.filter(**params), [
'<Waiter: Joe the waiter at Demon Dogs the restaurant>'
])
assert_filter_waiters(restaurant__place__exact=self.p1.pk)
assert_filter_waiters(restaurant__place__exact=self.p1)
assert_filter_waiters(restaurant__place__pk=self.p1.pk)
assert_filter_waiters(restaurant__exact=self.r1.pk)
assert_filter_waiters(restaurant__exact=self.r1)
assert_filter_waiters(restaurant__pk=self.r1.pk)
assert_filter_waiters(restaurant=self.r1.pk)
assert_filter_waiters(restaurant=self.r1)
assert_filter_waiters(id__exact=w.pk)
assert_filter_waiters(pk=w.pk)
# Delete the restaurant; the waiter should also be removed
r = Restaurant.objects.get(pk=self.r1.pk)
r.delete()
self.assertEqual(Waiter.objects.count(), 0)
def test_multiple_o2o(self):
# One-to-one fields still work if you create your own primary key
o1 = ManualPrimaryKey(primary_key="abc123", name="primary")
o1.save()
o2 = RelatedModel(link=o1, name="secondary")
o2.save()
# You can have multiple one-to-one fields on a model, too.
x1 = MultiModel(link1=self.p1, link2=o1, name="x1")
x1.save()
self.assertEqual(repr(o1.multimodel), '<MultiModel: Multimodel x1>')
# This will fail because each one-to-one field must be unique (and
# link2=o1 was used for x1, above).
mm = MultiModel(link1=self.p2, link2=o1, name="x1")
with self.assertRaises(IntegrityError):
with transaction.atomic():
mm.save()
def test_unsaved_object(self):
"""
#10811 -- Assigning an unsaved object to a OneToOneField
should raise an exception.
"""
place = Place(name='User', address='London')
with self.assertRaises(Restaurant.DoesNotExist):
place.restaurant
msg = "save() prohibited to prevent data loss due to unsaved related object 'place'."
with self.assertRaisesMessage(ValueError, msg):
Restaurant.objects.create(place=place, serves_hot_dogs=True, serves_pizza=False)
# place should not cache restaurant
with self.assertRaises(Restaurant.DoesNotExist):
place.restaurant
def test_reverse_relationship_cache_cascade(self):
"""
Regression test for #9023: accessing the reverse relationship shouldn't
result in a cascading delete().
"""
bar = UndergroundBar.objects.create(place=self.p1, serves_cocktails=False)
# The bug in #9023: if you access the one-to-one relation *before*
# setting to None and deleting, the cascade happens anyway.
self.p1.undergroundbar
bar.place.name = 'foo'
bar.place = None
bar.save()
self.p1.delete()
self.assertEqual(Place.objects.all().count(), 1)
self.assertEqual(UndergroundBar.objects.all().count(), 1)
def test_create_models_m2m(self):
"""
Regression test for #1064 and #1506
Check that we create models via the m2m relation if the remote model
has a OneToOneField.
"""
f = Favorites(name='Fred')
f.save()
f.restaurants.set([self.r1])
self.assertQuerysetEqual(
f.restaurants.all(),
['<Restaurant: Demon Dogs the restaurant>']
)
def test_reverse_object_cache(self):
"""
Regression test for #7173
Check that the name of the cache for the reverse object is correct.
"""
self.assertEqual(self.p1.restaurant, self.r1)
self.assertEqual(self.p1.bar, self.b1)
def test_assign_none_reverse_relation(self):
p = Place.objects.get(name="Demon Dogs")
# Assigning None succeeds if field is null=True.
ug_bar = UndergroundBar.objects.create(place=p, serves_cocktails=False)
p.undergroundbar = None
self.assertIsNone(ug_bar.place)
ug_bar.save()
ug_bar.refresh_from_db()
self.assertIsNone(ug_bar.place)
def test_assign_none_null_reverse_relation(self):
p = Place.objects.get(name="Demon Dogs")
# Assigning None doesn't throw AttributeError if there isn't a related
# UndergroundBar.
p.undergroundbar = None
def test_related_object_cache(self):
""" Regression test for #6886 (the related-object cache) """
# Look up the objects again so that we get "fresh" objects
p = Place.objects.get(name="Demon Dogs")
r = p.restaurant
# Accessing the related object again returns the exactly same object
self.assertIs(p.restaurant, r)
# But if we kill the cache, we get a new object
del p._restaurant_cache
self.assertIsNot(p.restaurant, r)
# Reassigning the Restaurant object results in an immediate cache update
# We can't use a new Restaurant because that'll violate one-to-one, but
# with a new *instance* the is test below will fail if #6886 regresses.
r2 = Restaurant.objects.get(pk=r.pk)
p.restaurant = r2
self.assertIs(p.restaurant, r2)
# Assigning None succeeds if field is null=True.
ug_bar = UndergroundBar.objects.create(place=p, serves_cocktails=False)
ug_bar.place = None
self.assertIsNone(ug_bar.place)
# Assigning None will not fail: Place.restaurant is null=False
setattr(p, 'restaurant', None)
# You also can't assign an object of the wrong type here
with self.assertRaises(ValueError):
setattr(p, 'restaurant', p)
# Creation using keyword argument should cache the related object.
p = Place.objects.get(name="Demon Dogs")
r = Restaurant(place=p)
self.assertIs(r.place, p)
# Creation using keyword argument and unsaved related instance (#8070).
p = Place()
r = Restaurant(place=p)
self.assertTrue(r.place is p)
# Creation using attname keyword argument and an id will cause the related
# object to be fetched.
p = Place.objects.get(name="Demon Dogs")
r = Restaurant(place_id=p.id)
self.assertIsNot(r.place, p)
self.assertEqual(r.place, p)
def test_filter_one_to_one_relations(self):
"""
Regression test for #9968
filtering reverse one-to-one relations with primary_key=True was
misbehaving. We test both (primary_key=True & False) cases here to
prevent any reappearance of the problem.
"""
Target.objects.create()
self.assertQuerysetEqual(
Target.objects.filter(pointer=None),
['<Target: Target object>']
)
self.assertQuerysetEqual(
Target.objects.exclude(pointer=None),
[]
)
self.assertQuerysetEqual(
Target.objects.filter(second_pointer=None),
['<Target: Target object>']
)
self.assertQuerysetEqual(
Target.objects.exclude(second_pointer=None),
[]
)
def test_o2o_primary_key_delete(self):
t = Target.objects.create(name='name')
Pointer.objects.create(other=t)
num_deleted, objs = Pointer.objects.filter(other__name='name').delete()
self.assertEqual(num_deleted, 1)
self.assertEqual(objs, {'one_to_one.Pointer': 1})
def test_reverse_object_does_not_exist_cache(self):
"""
Regression for #13839 and #17439.
DoesNotExist on a reverse one-to-one relation is cached.
"""
p = Place(name='Zombie Cats', address='Not sure')
p.save()
with self.assertNumQueries(1):
with self.assertRaises(Restaurant.DoesNotExist):
p.restaurant
with self.assertNumQueries(0):
with self.assertRaises(Restaurant.DoesNotExist):
p.restaurant
def test_reverse_object_cached_when_related_is_accessed(self):
"""
Regression for #13839 and #17439.
The target of a one-to-one relation is cached
when the origin is accessed through the reverse relation.
"""
# Use a fresh object without caches
r = Restaurant.objects.get(pk=self.r1.pk)
p = r.place
with self.assertNumQueries(0):
self.assertEqual(p.restaurant, r)
def test_related_object_cached_when_reverse_is_accessed(self):
"""
Regression for #13839 and #17439.
The origin of a one-to-one relation is cached
when the target is accessed through the reverse relation.
"""
# Use a fresh object without caches
p = Place.objects.get(pk=self.p1.pk)
r = p.restaurant
with self.assertNumQueries(0):
self.assertEqual(r.place, p)
def test_reverse_object_cached_when_related_is_set(self):
"""
Regression for #13839 and #17439.
The target of a one-to-one relation is always cached.
"""
p = Place(name='Zombie Cats', address='Not sure')
p.save()
self.r1.place = p
self.r1.save()
with self.assertNumQueries(0):
self.assertEqual(p.restaurant, self.r1)
def test_reverse_object_cached_when_related_is_unset(self):
"""
Regression for #13839 and #17439.
The target of a one-to-one relation is always cached.
"""
b = UndergroundBar(place=self.p1, serves_cocktails=True)
b.save()
with self.assertNumQueries(0):
self.assertEqual(self.p1.undergroundbar, b)
b.place = None
b.save()
with self.assertNumQueries(0):
with self.assertRaises(UndergroundBar.DoesNotExist):
self.p1.undergroundbar
def test_get_reverse_on_unsaved_object(self):
"""
Regression for #18153 and #19089.
Accessing the reverse relation on an unsaved object
always raises an exception.
"""
p = Place()
# When there's no instance of the origin of the one-to-one
with self.assertNumQueries(0):
with self.assertRaises(UndergroundBar.DoesNotExist):
p.undergroundbar
UndergroundBar.objects.create()
# When there's one instance of the origin
# (p.undergroundbar used to return that instance)
with self.assertNumQueries(0):
with self.assertRaises(UndergroundBar.DoesNotExist):
p.undergroundbar
# Several instances of the origin are only possible if database allows
# inserting multiple NULL rows for a unique constraint
if connection.features.supports_nullable_unique_constraints:
UndergroundBar.objects.create()
# When there are several instances of the origin
with self.assertNumQueries(0):
with self.assertRaises(UndergroundBar.DoesNotExist):
p.undergroundbar
def test_set_reverse_on_unsaved_object(self):
"""
Writing to the reverse relation on an unsaved object
is impossible too.
"""
p = Place()
b = UndergroundBar.objects.create()
# Assigning a reverse relation on an unsaved object is allowed.
p.undergroundbar = b
# However saving the object is not allowed.
msg = "save() prohibited to prevent data loss due to unsaved related object 'place'."
with self.assertNumQueries(0):
with self.assertRaisesMessage(ValueError, msg):
b.save()
def test_nullable_o2o_delete(self):
u = UndergroundBar.objects.create(place=self.p1)
u.place_id = None
u.save()
self.p1.delete()
self.assertTrue(UndergroundBar.objects.filter(pk=u.pk).exists())
self.assertIsNone(UndergroundBar.objects.get(pk=u.pk).place)
def test_hidden_accessor(self):
"""
When a '+' ending related name is specified no reverse accessor should
be added to the related model.
"""
self.assertFalse(
hasattr(Target, HiddenPointer._meta.get_field('target').remote_field.get_accessor_name())
)
def test_related_object(self):
public_school = School.objects.create(is_public=True)
public_director = Director.objects.create(school=public_school, is_temp=False)
private_school = School.objects.create(is_public=False)
private_director = Director.objects.create(school=private_school, is_temp=True)
# Only one school is available via all() due to the custom default manager.
self.assertQuerysetEqual(
School.objects.all(),
["<School: School object>"]
)
# Only one director is available via all() due to the custom default manager.
self.assertQuerysetEqual(
Director.objects.all(),
["<Director: Director object>"]
)
self.assertEqual(public_director.school, public_school)
self.assertEqual(public_school.director, public_director)
# Make sure the base manager is used so that the related objects
# is still accessible even if the default manager doesn't normally
# allow it.
self.assertEqual(private_director.school, private_school)
# Make sure the base manager is used so that an student can still access
# its related school even if the default manager doesn't normally
# allow it.
self.assertEqual(private_school.director, private_director)
# If the manager is marked "use_for_related_fields", it'll get used instead
# of the "bare" queryset. Usually you'd define this as a property on the class,
# but this approximates that in a way that's easier in tests.
School.objects.use_for_related_fields = True
try:
private_director = Director._base_manager.get(pk=private_director.pk)
with self.assertRaises(School.DoesNotExist):
private_director.school
finally:
School.objects.use_for_related_fields = False
Director.objects.use_for_related_fields = True
try:
private_school = School._base_manager.get(pk=private_school.pk)
with self.assertRaises(Director.DoesNotExist):
private_school.director
finally:
Director.objects.use_for_related_fields = False
def test_hasattr_related_object(self):
# The exception raised on attribute access when a related object
# doesn't exist should be an instance of a subclass of `AttributeError`
# refs #21563
self.assertFalse(hasattr(Director(), 'director'))
self.assertFalse(hasattr(School(), 'school'))
def test_update_one_to_one_pk(self):
p1 = Place.objects.create()
p2 = Place.objects.create()
r1 = Restaurant.objects.create(place=p1)
r2 = Restaurant.objects.create(place=p2)
w = Waiter.objects.create(restaurant=r1)
Waiter.objects.update(restaurant=r2)
w.refresh_from_db()
self.assertEqual(w.restaurant, r2)
def test_rel_pk_subquery(self):
r = Restaurant.objects.first()
q1 = Restaurant.objects.filter(place_id=r.pk)
# Test that subquery using primary key and a query against the
# same model works correctly.
q2 = Restaurant.objects.filter(place_id__in=q1)
self.assertQuerysetEqual(q2, [r], lambda x: x)
# Test that subquery using 'pk__in' instead of 'place_id__in' work, too.
q2 = Restaurant.objects.filter(
pk__in=Restaurant.objects.filter(place__id=r.place.pk)
)
self.assertQuerysetEqual(q2, [r], lambda x: x)
def test_rel_pk_exact(self):
r = Restaurant.objects.first()
r2 = Restaurant.objects.filter(pk__exact=r).first()
self.assertEqual(r, r2)
| bsd-3-clause |
lgrootnoob/android_kernel_asus_grouper | tools/perf/scripts/python/check-perf-trace.py | 11214 | 2503 | # perf script event handlers, generated by perf script -g python
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# This script tests basic functionality such as flag and symbol
# strings, common_xxx() calls back into perf, begin, end, unhandled
# events, etc. Basically, if this script runs successfully and
# displays expected results, Python scripting support should be ok.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Core import *
from perf_trace_context import *
unhandled = autodict()
def trace_begin():
print "trace_begin"
pass
def trace_end():
print_unhandled()
def irq__softirq_entry(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
vec):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "vec=%s\n" % \
(symbol_str("irq__softirq_entry", "vec", vec)),
def kmem__kmalloc(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
call_site, ptr, bytes_req, bytes_alloc,
gfp_flags):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "call_site=%u, ptr=%u, bytes_req=%u, " \
"bytes_alloc=%u, gfp_flags=%s\n" % \
(call_site, ptr, bytes_req, bytes_alloc,
flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)),
def trace_unhandled(event_name, context, event_fields_dict):
try:
unhandled[event_name] += 1
except TypeError:
unhandled[event_name] = 1
def print_header(event_name, cpu, secs, nsecs, pid, comm):
print "%-20s %5u %05u.%09u %8u %-20s " % \
(event_name, cpu, secs, nsecs, pid, comm),
# print trace fields not included in handler args
def print_uncommon(context):
print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \
% (common_pc(context), trace_flag_str(common_flags(context)), \
common_lock_depth(context))
def print_unhandled():
keys = unhandled.keys()
if not keys:
return
print "\nunhandled events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for event_name in keys:
print "%-40s %10d\n" % (event_name, unhandled[event_name])
| gpl-2.0 |
nrc/rustc-perf | collector/benchmarks/style-servo/components/script/dom/bindings/codegen/parser/tests/test_identifier_conflict.py | 53 | 1193 | # Import the WebIDL module, so we can do isinstance checks and whatnot
import WebIDL
def WebIDLTest(parser, harness):
try:
parser.parse("""
enum Foo { "a" };
interface Foo;
""")
results = parser.finish()
harness.ok(False, "Should fail to parse")
except Exception, e:
harness.ok("Name collision" in e.message,
"Should have name collision for interface")
parser = parser.reset()
try:
parser.parse("""
dictionary Foo { long x; };
enum Foo { "a" };
""")
results = parser.finish()
harness.ok(False, "Should fail to parse")
except Exception, e:
harness.ok("Name collision" in e.message,
"Should have name collision for dictionary")
parser = parser.reset()
try:
parser.parse("""
enum Foo { "a" };
enum Foo { "b" };
""")
results = parser.finish()
harness.ok(False, "Should fail to parse")
except Exception, e:
harness.ok("Multiple unresolvable definitions" in e.message,
"Should have name collision for dictionary")
| mit |
CybOXProject/python-cybox | cybox/test/objects/win_driver_test.py | 1 | 1972 | # Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import unittest
from cybox.objects.win_driver_object import WinDriver
from cybox.compat import long
from cybox.test import EntityTestCase, round_trip
from cybox.test.objects import ObjectTestCase
class TestWinDriver(ObjectTestCase, unittest.TestCase):
object_type = "WindowsDriverObjectType"
klass = WinDriver
_full_dict = {
'driver_init': 123,
'driver_name': "A driver name",
'driver_object_address': "abcde12345",
'driver_start_io': "abcce4321",
'driver_unload': "ab3234dec",
'image_base': "12345abc",
'image_size': "12ff",
'irp_mj_cleanup': long(1),
'irp_mj_close': long(2),
'irp_mj_create': long(3),
'irp_mj_create_mailslot': long(4),
'irp_mj_create_named_pipe': long(5),
'irp_mj_device_change': long(6),
'irp_mj_device_control': long(7),
'irp_mj_directory_control': long(8),
'irp_mj_file_system_control': long(9),
'irp_mj_flush_buffers': long(11),
'irp_mj_internal_device_control': long(12),
'irp_mj_lock_control': long(13),
'irp_mj_pnp': long(14),
'irp_mj_power': long(15),
'irp_mj_query_ea': long(16),
'irp_mj_query_information': long(17),
'irp_mj_query_quota': long(22),
'irp_mj_query_security': long(23),
'irp_mj_query_volume_information': long(24),
'irp_mj_read': long(25),
'irp_mj_set_ea': long(26),
'irp_mj_set_information': long(27),
'irp_mj_set_quota': long(33),
'irp_mj_set_security': long(34),
'irp_mj_set_volume_information': long(35),
'irp_mj_shutdown': long(36),
'irp_mj_system_control': long(37),
'irp_mj_write': long(38),
#TODO: add 'device_object_list'
'xsi:type': object_type,
}
if __name__ == "__main__":
unittest.main()
| bsd-3-clause |
Vachounet/acer_iconia_tab_kernel | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py | 183 | 3245 | # Core.py - Python extension for perf trace, core functions
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
def taskState(state):
states = {
0 : "R",
1 : "S",
2 : "D",
64: "DEAD"
}
if state not in states:
return "Unknown"
return states[state]
class EventHeaders:
def __init__(self, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
self.cpu = common_cpu
self.secs = common_secs
self.nsecs = common_nsecs
self.pid = common_pid
self.comm = common_comm
def ts(self):
return (self.secs * (10 ** 9)) + self.nsecs
def ts_format(self):
return "%d.%d" % (self.secs, int(self.nsecs / 1000))
| gpl-2.0 |
dylan-reeves/home_backup | backupclient-env/Lib/site-packages/pip/vcs/subversion.py | 280 | 10468 | from __future__ import absolute_import
import logging
import os
import re
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip.index import Link
from pip.utils import rmtree, display_path
from pip.utils.logging import indent_log
from pip.vcs import vcs, VersionControl
_svn_xml_url_re = re.compile('url="([^"]+)"')
_svn_rev_re = re.compile('committed-rev="(\d+)"')
_svn_url_re = re.compile(r'URL: (.+)')
_svn_revision_re = re.compile(r'Revision: (.+)')
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
logger = logging.getLogger(__name__)
class Subversion(VersionControl):
name = 'svn'
dirname = '.svn'
repo_name = 'checkout'
schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
def get_info(self, location):
"""Returns (url, revision), where both are strings"""
assert not location.rstrip('/').endswith(self.dirname), \
'Bad directory: %s' % location
output = self.run_command(
['info', location],
show_stdout=False,
extra_environ={'LANG': 'C'},
)
match = _svn_url_re.search(output)
if not match:
logger.warning(
'Cannot determine URL of svn checkout %s',
display_path(location),
)
logger.debug('Output that cannot be parsed: \n%s', output)
return None, None
url = match.group(1).strip()
match = _svn_revision_re.search(output)
if not match:
logger.warning(
'Cannot determine revision of svn checkout %s',
display_path(location),
)
logger.debug('Output that cannot be parsed: \n%s', output)
return url, None
return url, match.group(1)
def export(self, location):
"""Export the svn repository at the url to the destination location"""
url, rev = self.get_url_rev()
rev_options = get_rev_options(url, rev)
logger.info('Exporting svn repository %s to %s', url, location)
with indent_log():
if os.path.exists(location):
# Subversion doesn't like to check out over an existing
# directory --force fixes this, but was only added in svn 1.5
rmtree(location)
self.run_command(
['export'] + rev_options + [url, location],
show_stdout=False)
def switch(self, dest, url, rev_options):
self.run_command(['switch'] + rev_options + [url, dest])
def update(self, dest, rev_options):
self.run_command(['update'] + rev_options + [dest])
def obtain(self, dest):
url, rev = self.get_url_rev()
rev_options = get_rev_options(url, rev)
if rev:
rev_display = ' (to revision %s)' % rev
else:
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.info(
'Checking out %s%s to %s',
url,
rev_display,
display_path(dest),
)
self.run_command(['checkout', '-q'] + rev_options + [url, dest])
def get_location(self, dist, dependency_links):
for url in dependency_links:
egg_fragment = Link(url).egg_fragment
if not egg_fragment:
continue
if '-' in egg_fragment:
# FIXME: will this work when a package has - in the name?
key = '-'.join(egg_fragment.split('-')[:-1]).lower()
else:
key = egg_fragment
if key == dist.key:
return url.split('#', 1)[0]
return None
def get_revision(self, location):
"""
Return the maximum revision for all files under a given location
"""
# Note: taken from setuptools.command.egg_info
revision = 0
for base, dirs, files in os.walk(location):
if self.dirname not in dirs:
dirs[:] = []
continue # no sense walking uncontrolled subdirs
dirs.remove(self.dirname)
entries_fn = os.path.join(base, self.dirname, 'entries')
if not os.path.exists(entries_fn):
# FIXME: should we warn?
continue
dirurl, localrev = self._get_svn_url_rev(base)
if base == location:
base_url = dirurl + '/' # save the root url
elif not dirurl or not dirurl.startswith(base_url):
dirs[:] = []
continue # not part of the same svn tree, skip it
revision = max(revision, localrev)
return revision
def get_url_rev(self):
# hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
url, rev = super(Subversion, self).get_url_rev()
if url.startswith('ssh://'):
url = 'svn+' + url
return url, rev
def get_url(self, location):
# In cases where the source is in a subdirectory, not alongside
# setup.py we have to look up in the location until we find a real
# setup.py
orig_location = location
while not os.path.exists(os.path.join(location, 'setup.py')):
last_location = location
location = os.path.dirname(location)
if location == last_location:
# We've traversed up to the root of the filesystem without
# finding setup.py
logger.warning(
"Could not find setup.py for directory %s (tried all "
"parent directories)",
orig_location,
)
return None
return self._get_svn_url_rev(location)[0]
def _get_svn_url_rev(self, location):
from pip.exceptions import InstallationError
with open(os.path.join(location, self.dirname, 'entries')) as f:
data = f.read()
if (data.startswith('8') or
data.startswith('9') or
data.startswith('10')):
data = list(map(str.splitlines, data.split('\n\x0c\n')))
del data[0][0] # get rid of the '8'
url = data[0][3]
revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
elif data.startswith('<?xml'):
match = _svn_xml_url_re.search(data)
if not match:
raise ValueError('Badly formatted data: %r' % data)
url = match.group(1) # get repository URL
revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
else:
try:
# subversion >= 1.7
xml = self.run_command(
['info', '--xml', location],
show_stdout=False,
)
url = _svn_info_xml_url_re.search(xml).group(1)
revs = [
int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
]
except InstallationError:
url, revs = None, []
if revs:
rev = max(revs)
else:
rev = 0
return url, rev
def get_tag_revs(self, svn_tag_url):
stdout = self.run_command(['ls', '-v', svn_tag_url], show_stdout=False)
results = []
for line in stdout.splitlines():
parts = line.split()
rev = int(parts[0])
tag = parts[-1].strip('/')
results.append((tag, rev))
return results
def find_tag_match(self, rev, tag_revs):
best_match_rev = None
best_tag = None
for tag, tag_rev in tag_revs:
if (tag_rev > rev and
(best_match_rev is None or best_match_rev > tag_rev)):
# FIXME: Is best_match > tag_rev really possible?
# or is it a sign something is wacky?
best_match_rev = tag_rev
best_tag = tag
return best_tag
def get_src_requirement(self, dist, location, find_tags=False):
repo = self.get_url(location)
if repo is None:
return None
parts = repo.split('/')
# FIXME: why not project name?
egg_project_name = dist.egg_name().split('-', 1)[0]
rev = self.get_revision(location)
if parts[-2] in ('tags', 'tag'):
# It's a tag, perfect!
full_egg_name = '%s-%s' % (egg_project_name, parts[-1])
elif parts[-2] in ('branches', 'branch'):
# It's a branch :(
full_egg_name = '%s-%s-r%s' % (dist.egg_name(), parts[-1], rev)
elif parts[-1] == 'trunk':
# Trunk :-/
full_egg_name = '%s-dev_r%s' % (dist.egg_name(), rev)
if find_tags:
tag_url = '/'.join(parts[:-1]) + '/tags'
tag_revs = self.get_tag_revs(tag_url)
match = self.find_tag_match(rev, tag_revs)
if match:
logger.info(
'trunk checkout %s seems to be equivalent to tag %s',
match,
)
repo = '%s/%s' % (tag_url, match)
full_egg_name = '%s-%s' % (egg_project_name, match)
else:
# Don't know what it is
logger.warning(
'svn URL does not fit normal structure (tags/branches/trunk): '
'%s',
repo,
)
full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
def get_rev_options(url, rev):
if rev:
rev_options = ['-r', rev]
else:
rev_options = []
r = urllib_parse.urlsplit(url)
if hasattr(r, 'username'):
# >= Python-2.5
username, password = r.username, r.password
else:
netloc = r[1]
if '@' in netloc:
auth = netloc.split('@')[0]
if ':' in auth:
username, password = auth.split(':', 1)
else:
username, password = auth, None
else:
username, password = None, None
if username:
rev_options += ['--username', username]
if password:
rev_options += ['--password', password]
return rev_options
vcs.register(Subversion)
| gpl-3.0 |
jaingaurav/ansible | examples/scripts/yaml_to_ini.py | 133 | 7634 | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import ansible.constants as C
from ansible.inventory.host import Host
from ansible.inventory.group import Group
from ansible import errors
from ansible import utils
import os
import yaml
import sys
from six import iteritems
class InventoryParserYaml(object):
''' Host inventory parser for ansible '''
def __init__(self, filename=C.DEFAULT_HOST_LIST):
sys.stderr.write("WARNING: YAML inventory files are deprecated in 0.6 and will be removed in 0.7, to migrate" +
" download and run https://github.com/ansible/ansible/blob/devel/examples/scripts/yaml_to_ini.py\n")
fh = open(filename)
data = fh.read()
fh.close()
self._hosts = {}
self._parse(data)
def _make_host(self, hostname):
if hostname in self._hosts:
return self._hosts[hostname]
else:
host = Host(hostname)
self._hosts[hostname] = host
return host
# see file 'test/yaml_hosts' for syntax
def _parse(self, data):
# FIXME: refactor into subfunctions
all = Group('all')
ungrouped = Group('ungrouped')
all.add_child_group(ungrouped)
self.groups = dict(all=all, ungrouped=ungrouped)
grouped_hosts = []
yaml = utils.parse_yaml(data)
# first add all groups
for item in yaml:
if type(item) == dict and 'group' in item:
group = Group(item['group'])
for subresult in item.get('hosts',[]):
if type(subresult) in [ str, unicode ]:
host = self._make_host(subresult)
group.add_host(host)
grouped_hosts.append(host)
elif type(subresult) == dict:
host = self._make_host(subresult['host'])
vars = subresult.get('vars',{})
if type(vars) == list:
for subitem in vars:
for (k,v) in subitem.items():
host.set_variable(k,v)
elif type(vars) == dict:
for (k,v) in subresult.get('vars',{}).items():
host.set_variable(k,v)
else:
raise errors.AnsibleError("unexpected type for variable")
group.add_host(host)
grouped_hosts.append(host)
vars = item.get('vars',{})
if type(vars) == dict:
for (k,v) in item.get('vars',{}).items():
group.set_variable(k,v)
elif type(vars) == list:
for subitem in vars:
if type(subitem) != dict:
raise errors.AnsibleError("expected a dictionary")
for (k,v) in subitem.items():
group.set_variable(k,v)
self.groups[group.name] = group
all.add_child_group(group)
# add host definitions
for item in yaml:
if type(item) in [ str, unicode ]:
host = self._make_host(item)
if host not in grouped_hosts:
ungrouped.add_host(host)
elif type(item) == dict and 'host' in item:
host = self._make_host(item['host'])
vars = item.get('vars', {})
if type(vars)==list:
varlist, vars = vars, {}
for subitem in varlist:
vars.update(subitem)
for (k,v) in vars.items():
host.set_variable(k,v)
groups = item.get('groups', {})
if type(groups) in [ str, unicode ]:
groups = [ groups ]
if type(groups)==list:
for subitem in groups:
if subitem in self.groups:
group = self.groups[subitem]
else:
group = Group(subitem)
self.groups[group.name] = group
all.add_child_group(group)
group.add_host(host)
grouped_hosts.append(host)
if host not in grouped_hosts:
ungrouped.add_host(host)
# make sure ungrouped.hosts is the complement of grouped_hosts
ungrouped_hosts = [host for host in ungrouped.hosts if host not in grouped_hosts]
if __name__ == "__main__":
if len(sys.argv) != 2:
print "usage: yaml_to_ini.py /path/to/ansible/hosts"
sys.exit(1)
result = ""
original = sys.argv[1]
yamlp = InventoryParserYaml(filename=sys.argv[1])
dirname = os.path.dirname(original)
group_names = [ g.name for g in yamlp.groups.values() ]
for group_name in sorted(group_names):
record = yamlp.groups[group_name]
if group_name == 'all':
continue
hosts = record.hosts
result = result + "[%s]\n" % record.name
for h in hosts:
result = result + "%s\n" % h.name
result = result + "\n"
groupfiledir = os.path.join(dirname, "group_vars")
if not os.path.exists(groupfiledir):
print "* creating: %s" % groupfiledir
os.makedirs(groupfiledir)
groupfile = os.path.join(groupfiledir, group_name)
print "* writing group variables for %s into %s" % (group_name, groupfile)
groupfh = open(groupfile, 'w')
groupfh.write(yaml.dump(record.get_variables()))
groupfh.close()
for (host_name, host_record) in iteritems(yamlp._hosts):
hostfiledir = os.path.join(dirname, "host_vars")
if not os.path.exists(hostfiledir):
print "* creating: %s" % hostfiledir
os.makedirs(hostfiledir)
hostfile = os.path.join(hostfiledir, host_record.name)
print "* writing host variables for %s into %s" % (host_record.name, hostfile)
hostfh = open(hostfile, 'w')
hostfh.write(yaml.dump(host_record.get_variables()))
hostfh.close()
# also need to keep a hash of variables per each host
# and variables per each group
# and write those to disk
newfilepath = os.path.join(dirname, "hosts.new")
fdh = open(newfilepath, 'w')
fdh.write(result)
fdh.close()
print "* COMPLETE: review your new inventory file and replace your original when ready"
print "* new inventory file saved as %s" % newfilepath
print "* edit group specific variables in %s/group_vars/" % dirname
print "* edit host specific variables in %s/host_vars/" % dirname
# now need to write this to disk as (oldname).new
# and inform the user
| gpl-3.0 |
shakalaca/ASUS_ZenFone_A500CG_A600CG | linux/kernel/tools/perf/python/twatch.py | 7370 | 1334 | #! /usr/bin/python
# -*- python -*-
# -*- coding: utf-8 -*-
# twatch - Experimental use of the perf python interface
# Copyright (C) 2011 Arnaldo Carvalho de Melo <acme@redhat.com>
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import perf
def main():
cpus = perf.cpu_map()
threads = perf.thread_map()
evsel = perf.evsel(task = 1, comm = 1, mmap = 0,
wakeup_events = 1, watermark = 1,
sample_id_all = 1,
sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU | perf.SAMPLE_TID)
evsel.open(cpus = cpus, threads = threads);
evlist = perf.evlist(cpus, threads)
evlist.add(evsel)
evlist.mmap()
while True:
evlist.poll(timeout = -1)
for cpu in cpus:
event = evlist.read_on_cpu(cpu)
if not event:
continue
print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu,
event.sample_pid,
event.sample_tid),
print event
if __name__ == '__main__':
main()
| gpl-2.0 |
keithroe/vtkoptix | ThirdParty/Twisted/twisted/conch/test/keydata.py | 47 | 12370 | # -*- test-case-name: twisted.conch.test.test_keys -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Data used by test_keys as well as others.
"""
RSAData = {
'n':long('1062486685755247411169438309495398947372127791189432809481'
'382072971106157632182084539383569281493520117634129557550415277'
'516685881326038852354459895734875625093273594925884531272867425'
'864910490065695876046999646807138717162833156501L'),
'e':35L,
'd':long('6678487739032983727350755088256793383481946116047863373882'
'973030104095847973715959961839578340816412167985957218887914482'
'713602371850869127033494910375212470664166001439410214474266799'
'85974425203903884190893469297150446322896587555L'),
'q':long('3395694744258061291019136154000709371890447462086362702627'
'9704149412726577280741108645721676968699696898960891593323L'),
'p':long('3128922844292337321766351031842562691837301298995834258844'
'4720539204069737532863831050930719431498338835415515173887L')}
DSAData = {
'y':long('2300663509295750360093768159135720439490120577534296730713'
'348508834878775464483169644934425336771277908527130096489120714'
'610188630979820723924744291603865L'),
'g':long('4451569990409370769930903934104221766858515498655655091803'
'866645719060300558655677517139568505649468378587802312867198352'
'1161998270001677664063945776405L'),
'p':long('7067311773048598659694590252855127633397024017439939353776'
'608320410518694001356789646664502838652272205440894335303988504'
'978724817717069039110940675621677L'),
'q':1184501645189849666738820838619601267690550087703L,
'x':863951293559205482820041244219051653999559962819L}
publicRSA_openssh = ("ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAGEArzJx8OYOnJmzf4tfBE"
"vLi8DVPrJ3/c9k2I/Az64fxjHf9imyRJbixtQhlH9lfNjUIx+4LmrJH5QNRsFporcHDKOTwTTYL"
"h5KmRpslkYHRivcJSkbh/C+BR3utDS555mV comment")
privateRSA_openssh = """-----BEGIN RSA PRIVATE KEY-----
MIIByAIBAAJhAK8ycfDmDpyZs3+LXwRLy4vA1T6yd/3PZNiPwM+uH8Yx3/YpskSW
4sbUIZR/ZXzY1CMfuC5qyR+UDUbBaaK3Bwyjk8E02C4eSpkabJZGB0Yr3CUpG4fw
vgUd7rQ0ueeZlQIBIwJgbh+1VZfr7WftK5lu7MHtqE1S1vPWZQYE3+VUn8yJADyb
Z4fsZaCrzW9lkIqXkE3GIY+ojdhZhkO1gbG0118sIgphwSWKRxK0mvh6ERxKqIt1
xJEJO74EykXZV4oNJ8sjAjEA3J9r2ZghVhGN6V8DnQrTk24Td0E8hU8AcP0FVP+8
PQm/g/aXf2QQkQT+omdHVEJrAjEAy0pL0EBH6EVS98evDCBtQw22OZT52qXlAwZ2
gyTriKFVoqjeEjt3SZKKqXHSApP/AjBLpF99zcJJZRq2abgYlf9lv1chkrWqDHUu
DZttmYJeEfiFBBavVYIF1dOlZT0G8jMCMBc7sOSZodFnAiryP+Qg9otSBjJ3bQML
pSTqy7c3a2AScC/YyOwkDaICHnnD3XyjMwIxALRzl0tQEKMXs6hH8ToUdlLROCrP
EhQ0wahUTCk1gKA4uPD6TMTChavbh4K63OvbKg==
-----END RSA PRIVATE KEY-----"""
# some versions of OpenSSH generate these (slightly different keys)
privateRSA_openssh_alternate = """-----BEGIN RSA PRIVATE KEY-----
MIIBzjCCAcgCAQACYQCvMnHw5g6cmbN/i18ES8uLwNU+snf9z2TYj8DPrh/GMd/2
KbJEluLG1CGUf2V82NQjH7guaskflA1GwWmitwcMo5PBNNguHkqZGmyWRgdGK9wl
KRuH8L4FHe60NLnnmZUCASMCYG4ftVWX6+1n7SuZbuzB7ahNUtbz1mUGBN/lVJ/M
iQA8m2eH7GWgq81vZZCKl5BNxiGPqI3YWYZDtYGxtNdfLCIKYcElikcStJr4ehEc
SqiLdcSRCTu+BMpF2VeKDSfLIwIxANyfa9mYIVYRjelfA50K05NuE3dBPIVPAHD9
BVT/vD0Jv4P2l39kEJEE/qJnR1RCawIxAMtKS9BAR+hFUvfHrwwgbUMNtjmU+dql
5QMGdoMk64ihVaKo3hI7d0mSiqlx0gKT/wIwS6Rffc3CSWUatmm4GJX/Zb9XIZK1
qgx1Lg2bbZmCXhH4hQQWr1WCBdXTpWU9BvIzAjAXO7DkmaHRZwIq8j/kIPaLUgYy
d20DC6Uk6su3N2tgEnAv2MjsJA2iAh55w918ozMCMQC0c5dLUBCjF7OoR/E6FHZS
0TgqzxIUNMGoVEwpNYCgOLjw+kzEwoWr24eCutzr2yowAA==
------END RSA PRIVATE KEY------"""
# encrypted with the passphrase 'encrypted'
privateRSA_openssh_encrypted = """-----BEGIN RSA PRIVATE KEY-----
Proc-Type: 4,ENCRYPTED
DEK-Info: DES-EDE3-CBC,FFFFFFFFFFFFFFFF
30qUR7DYY/rpVJu159paRM1mUqt/IMibfEMTKWSjNhCVD21hskftZCJROw/WgIFt
ncusHpJMkjgwEpho0KyKilcC7zxjpunTex24Meb5pCdXCrYft8AyUkRdq3dugMqT
4nuWuWxziluBhKQ2M9tPGcEOeulU4vVjceZt2pZhZQVBf08o3XUv5/7RYd24M9md
WIo+5zdj2YQkI6xMFTP954O/X32ME1KQt98wgNEy6mxhItbvf00mH3woALwEKP3v
PSMxxtx3VKeDKd9YTOm1giKkXZUf91vZWs0378tUBrU4U5qJxgryTjvvVKOtofj6
4qQy6+r6M6wtwVlXBgeRm2gBPvL3nv6MsROp3E6ztBd/e7A8fSec+UTq3ko/EbGP
0QG+IG5tg8FsdITxQ9WAIITZL3Rc6hA5Ymx1VNhySp3iSiso8Jof27lku4pyuvRV
ko/B3N2H7LnQrGV0GyrjeYocW/qZh/PCsY48JBFhlNQexn2mn44AJW3y5xgbhvKA
3mrmMD1hD17ZvZxi4fPHjbuAyM1vFqhQx63eT9ijbwJ91svKJl5O5MIv41mCRonm
hxvOXw8S0mjSasyofptzzQCtXxFLQigXbpQBltII+Ys=
-----END RSA PRIVATE KEY-----"""
# encrypted with the passphrase 'testxp'. NB: this key was generated by
# OpenSSH, so it doesn't use the same key data as the other keys here.
privateRSA_openssh_encrypted_aes = """-----BEGIN RSA PRIVATE KEY-----
Proc-Type: 4,ENCRYPTED
DEK-Info: AES-128-CBC,0673309A6ACCAB4B77DEE1C1E536AC26
4Ed/a9OgJWHJsne7yOGWeWMzHYKsxuP9w1v0aYcp+puS75wvhHLiUnNwxz0KDi6n
T3YkKLBsoCWS68ApR2J9yeQ6R+EyS+UQDrO9nwqo3DB5BT3Ggt8S1wE7vjNLQD0H
g/SJnlqwsECNhh8aAx+Ag0m3ZKOZiRD5mCkcDQsZET7URSmFytDKOjhFn3u6ZFVB
sXrfpYc6TJtOQlHd/52JB6aAbjt6afSv955Z7enIi+5yEJ5y7oYQTaE5zrFMP7N5
9LbfJFlKXxEddy/DErRLxEjmC+t4svHesoJKc2jjjyNPiOoGGF3kJXea62vsjdNV
gMK5Eged3TBVIk2dv8rtJUvyFeCUtjQ1UJZIebScRR47KrbsIpCmU8I4/uHWm5hW
0mOwvdx1L/mqx/BHqVU9Dw2COhOdLbFxlFI92chkovkmNk4P48ziyVnpm7ME22sE
vfCMsyirdqB1mrL4CSM7FXONv+CgfBfeYVkYW8RfJac9U1L/O+JNn7yee414O/rS
hRYw4UdWnH6Gg6niklVKWNY0ZwUZC8zgm2iqy8YCYuneS37jC+OEKP+/s6HSKuqk
2bzcl3/TcZXNSM815hnFRpz0anuyAsvwPNRyvxG2/DacJHL1f6luV4B0o6W410yf
qXQx01DLo7nuyhJqoH3UGCyyXB+/QUs0mbG2PAEn3f5dVs31JMdbt+PrxURXXjKk
4cexpUcIpqqlfpIRe3RD0sDVbH4OXsGhi2kiTfPZu7mgyFxKopRbn1KwU1qKinfY
EU9O4PoTak/tPT+5jFNhaP+HrURoi/pU8EAUNSktl7xAkHYwkN/9Cm7DeBghgf3n
8+tyCGYDsB5utPD0/Xe9yx0Qhc/kMm4xIyQDyA937dk3mUvLC9vulnAP8I+Izim0
fZ182+D1bWwykoD0997mUHG/AUChWR01V1OLwRyPv2wUtiS8VNG76Y2aqKlgqP1P
V+IvIEqR4ERvSBVFzXNF8Y6j/sVxo8+aZw+d0L1Ns/R55deErGg3B8i/2EqGd3r+
0jps9BqFHHWW87n3VyEB3jWCMj8Vi2EJIfa/7pSaViFIQn8LiBLf+zxG5LTOToK5
xkN42fReDcqi3UNfKNGnv4dsplyTR2hyx65lsj4bRKDGLKOuB1y7iB0AGb0LtcAI
dcsVlcCeUquDXtqKvRnwfIMg+ZunyjqHBhj3qgRgbXbT6zjaSdNnih569aTg0Vup
VykzZ7+n/KVcGLmvX0NesdoI7TKbq4TnEIOynuG5Sf+2GpARO5bjcWKSZeN/Ybgk
gccf8Cqf6XWqiwlWd0B7BR3SymeHIaSymC45wmbgdstrbk7Ppa2Tp9AZku8M2Y7c
8mY9b+onK075/ypiwBm4L4GRNTFLnoNQJXx0OSl4FNRWsn6ztbD+jZhu8Seu10Jw
SEJVJ+gmTKdRLYORJKyqhDet6g7kAxs4EoJ25WsOnX5nNr00rit+NkMPA7xbJT+7
CfI51GQLw7pUPeO2WNt6yZO/YkzZrqvTj5FEwybkUyBv7L0gkqu9wjfDdUw0fVHE
xEm4DxjEoaIp8dW/JOzXQ2EF+WaSOgdYsw3Ac+rnnjnNptCdOEDGP6QBkt+oXj4P
-----END RSA PRIVATE KEY-----"""
publicRSA_lsh = ("{KDEwOnB1YmxpYy1rZXkoMTQ6cnNhLXBrY3MxLXNoYTEoMTpuOTc6AK8yc"
"fDmDpyZs3+LXwRLy4vA1T6yd/3PZNiPwM+uH8Yx3/YpskSW4sbUIZR/ZXzY1CMfuC5qyR+UDUbB"
"aaK3Bwyjk8E02C4eSpkabJZGB0Yr3CUpG4fwvgUd7rQ0ueeZlSkoMTplMTojKSkp}")
privateRSA_lsh = ("(11:private-key(9:rsa-pkcs1(1:n97:\x00\xaf2q\xf0\xe6\x0e"
"\x9c\x99\xb3\x7f\x8b_\x04K\xcb\x8b\xc0\xd5>\xb2w\xfd\xcfd\xd8\x8f\xc0\xcf"
"\xae\x1f\xc61\xdf\xf6)\xb2D\x96\xe2\xc6\xd4!\x94\x7fe|\xd8\xd4#\x1f\xb8.j"
"\xc9\x1f\x94\rF\xc1i\xa2\xb7\x07\x0c\xa3\x93\xc14\xd8.\x1eJ\x99\x1al\x96F"
"\x07F+\xdc%)\x1b\x87\xf0\xbe\x05\x1d\xee\xb44\xb9\xe7\x99\x95)(1:e1:#)(1:d9"
"6:n\x1f\xb5U\x97\xeb\xedg\xed+\x99n\xec\xc1\xed\xa8MR\xd6\xf3\xd6e\x06\x04"
"\xdf\xe5T\x9f\xcc\x89\x00<\x9bg\x87\xece\xa0\xab\xcdoe\x90\x8a\x97\x90M\xc6"
'!\x8f\xa8\x8d\xd8Y\x86C\xb5\x81\xb1\xb4\xd7_,"\na\xc1%\x8aG\x12\xb4\x9a\xf8'
"z\x11\x1cJ\xa8\x8bu\xc4\x91\t;\xbe\x04\xcaE\xd9W\x8a\r\'\xcb#)(1:p49:\x00"
"\xdc\x9fk\xd9\x98!V\x11\x8d\xe9_\x03\x9d\n\xd3\x93n\x13wA<\x85O\x00p\xfd"
"\x05T\xff\xbc=\t\xbf\x83\xf6\x97\x7fd\x10\x91\x04\xfe\xa2gGTBk)(1:q49:\x00"
"\xcbJK\xd0@G\xe8ER\xf7\xc7\xaf\x0c mC\r\xb69\x94\xf9\xda\xa5\xe5\x03\x06v"
"\x83$\xeb\x88\xa1U\xa2\xa8\xde\x12;wI\x92\x8a\xa9q\xd2\x02\x93\xff)(1:a48:K"
"\xa4_}\xcd\xc2Ie\x1a\xb6i\xb8\x18\x95\xffe\xbfW!\x92\xb5\xaa\x0cu.\r\x9bm"
"\x99\x82^\x11\xf8\x85\x04\x16\xafU\x82\x05\xd5\xd3\xa5e=\x06\xf23)(1:b48:"
"\x17;\xb0\xe4\x99\xa1\xd1g\x02*\xf2?\xe4 \xf6\x8bR\x062wm\x03\x0b\xa5$\xea"
"\xcb\xb77k`\x12p/\xd8\xc8\xec$\r\xa2\x02\x1ey\xc3\xdd|\xa33)(1:c49:\x00\xb4"
"s\x97KP\x10\xa3\x17\xb3\xa8G\xf1:\x14vR\xd18*\xcf\x12\x144\xc1\xa8TL)5\x80"
"\xa08\xb8\xf0\xfaL\xc4\xc2\x85\xab\xdb\x87\x82\xba\xdc\xeb\xdb*)))")
privateRSA_agentv3 = ("\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x01#\x00\x00\x00`"
"n\x1f\xb5U\x97\xeb\xedg\xed+\x99n\xec\xc1\xed\xa8MR\xd6\xf3\xd6e\x06\x04"
"\xdf\xe5T\x9f\xcc\x89\x00<\x9bg\x87\xece\xa0\xab\xcdoe\x90\x8a\x97\x90M\xc6"
'!\x8f\xa8\x8d\xd8Y\x86C\xb5\x81\xb1\xb4\xd7_,"\na\xc1%\x8aG\x12\xb4\x9a\xf8'
"z\x11\x1cJ\xa8\x8bu\xc4\x91\t;\xbe\x04\xcaE\xd9W\x8a\r\'\xcb#\x00\x00\x00a"
"\x00\xaf2q\xf0\xe6\x0e\x9c\x99\xb3\x7f\x8b_\x04K\xcb\x8b\xc0\xd5>\xb2w\xfd"
"\xcfd\xd8\x8f\xc0\xcf\xae\x1f\xc61\xdf\xf6)\xb2D\x96\xe2\xc6\xd4!\x94\x7fe|"
"\xd8\xd4#\x1f\xb8.j\xc9\x1f\x94\rF\xc1i\xa2\xb7\x07\x0c\xa3\x93\xc14\xd8."
"\x1eJ\x99\x1al\x96F\x07F+\xdc%)\x1b\x87\xf0\xbe\x05\x1d\xee\xb44\xb9\xe7"
"\x99\x95\x00\x00\x001\x00\xb4s\x97KP\x10\xa3\x17\xb3\xa8G\xf1:\x14vR\xd18*"
"\xcf\x12\x144\xc1\xa8TL)5\x80\xa08\xb8\xf0\xfaL\xc4\xc2\x85\xab\xdb\x87\x82"
"\xba\xdc\xeb\xdb*\x00\x00\x001\x00\xcbJK\xd0@G\xe8ER\xf7\xc7\xaf\x0c mC\r"
"\xb69\x94\xf9\xda\xa5\xe5\x03\x06v\x83$\xeb\x88\xa1U\xa2\xa8\xde\x12;wI\x92"
"\x8a\xa9q\xd2\x02\x93\xff\x00\x00\x001\x00\xdc\x9fk\xd9\x98!V\x11\x8d\xe9_"
"\x03\x9d\n\xd3\x93n\x13wA<\x85O\x00p\xfd\x05T\xff\xbc=\t\xbf\x83\xf6\x97"
"\x7fd\x10\x91\x04\xfe\xa2gGTBk")
publicDSA_openssh = ("ssh-dss AAAAB3NzaC1kc3MAAABBAIbwTOSsZ7Bl7U1KyMNqV13Tu7"
"yRAtTr70PVI3QnfrPumf2UzCgpL1ljbKxSfAi05XvrE/1vfCFAsFYXRZLhQy0AAAAVAM965Akmo"
"6eAi7K+k9qDR4TotFAXAAAAQADZlpTW964haQWS4vC063NGdldT6xpUGDcDRqbm90CoPEa2RmNO"
"uOqi8lnbhYraEzypYH3K4Gzv/bxCBnKtHRUAAABAK+1osyWBS0+P90u/rAuko6chZ98thUSY2kL"
"SHp6hLKyy2bjnT29h7haELE+XHfq2bM9fckDx2FLOSIJzy83VmQ== comment")
privateDSA_openssh = """-----BEGIN DSA PRIVATE KEY-----
MIH4AgEAAkEAhvBM5KxnsGXtTUrIw2pXXdO7vJEC1OvvQ9UjdCd+s+6Z/ZTMKCkv
WWNsrFJ8CLTle+sT/W98IUCwVhdFkuFDLQIVAM965Akmo6eAi7K+k9qDR4TotFAX
AkAA2ZaU1veuIWkFkuLwtOtzRnZXU+saVBg3A0am5vdAqDxGtkZjTrjqovJZ24WK
2hM8qWB9yuBs7/28QgZyrR0VAkAr7WizJYFLT4/3S7+sC6SjpyFn3y2FRJjaQtIe
nqEsrLLZuOdPb2HuFoQsT5cd+rZsz19yQPHYUs5IgnPLzdWZAhUAl1TqdmlAG/b4
nnVchGiO9sML8MM=
-----END DSA PRIVATE KEY-----"""
publicDSA_lsh = ("{KDEwOnB1YmxpYy1rZXkoMzpkc2EoMTpwNjU6AIbwTOSsZ7Bl7U1KyMNqV"
"13Tu7yRAtTr70PVI3QnfrPumf2UzCgpL1ljbKxSfAi05XvrE/1vfCFAsFYXRZLhQy0pKDE6cTIx"
"OgDPeuQJJqOngIuyvpPag0eE6LRQFykoMTpnNjQ6ANmWlNb3riFpBZLi8LTrc0Z2V1PrGlQYNwN"
"Gpub3QKg8RrZGY0646qLyWduFitoTPKlgfcrgbO/9vEIGcq0dFSkoMTp5NjQ6K+1osyWBS0+P90"
"u/rAuko6chZ98thUSY2kLSHp6hLKyy2bjnT29h7haELE+XHfq2bM9fckDx2FLOSIJzy83VmSkpK"
"Q==}")
privateDSA_lsh = ("(11:private-key(3:dsa(1:p65:\x00\x86\xf0L\xe4\xacg\xb0e"
"\xedMJ\xc8\xc3jW]\xd3\xbb\xbc\x91\x02\xd4\xeb\xefC\xd5#t'~\xb3\xee\x99\xfd"
"\x94\xcc()/Ycl\xacR|\x08\xb4\xe5{\xeb\x13\xfdo|!@\xb0V\x17E\x92\xe1C-)(1:q2"
"1:\x00\xcfz\xe4\t&\xa3\xa7\x80\x8b\xb2\xbe\x93\xda\x83G\x84\xe8\xb4P\x17)(1"
":g64:\x00\xd9\x96\x94\xd6\xf7\xae!i\x05\x92\xe2\xf0\xb4\xebsFvWS\xeb\x1aT"
"\x187\x03F\xa6\xe6\xf7@\xa8<F\xb6FcN\xb8\xea\xa2\xf2Y\xdb\x85\x8a\xda\x13<"
"\xa9`}\xca\xe0l\xef\xfd\xbcB\x06r\xad\x1d\x15)(1:y64:+\xedh\xb3%\x81KO\x8f"
"\xf7K\xbf\xac\x0b\xa4\xa3\xa7!g\xdf-\x85D\x98\xdaB\xd2\x1e\x9e\xa1,\xac\xb2"
"\xd9\xb8\xe7Ooa\xee\x16\x84,O\x97\x1d\xfa\xb6l\xcf_r@\xf1\xd8R\xceH\x82s"
"\xcb\xcd\xd5\x99)(1:x21:\x00\x97T\xeavi@\x1b\xf6\xf8\x9eu\\\x84h\x8e\xf6"
"\xc3\x0b\xf0\xc3)))")
privateDSA_agentv3 = ("\x00\x00\x00\x07ssh-dss\x00\x00\x00A\x00\x86\xf0L\xe4"
"\xacg\xb0e\xedMJ\xc8\xc3jW]\xd3\xbb\xbc\x91\x02\xd4\xeb\xefC\xd5#t'~\xb3"
"\xee\x99\xfd\x94\xcc()/Ycl\xacR|\x08\xb4\xe5{\xeb\x13\xfdo|!@\xb0V\x17E\x92"
"\xe1C-\x00\x00\x00\x15\x00\xcfz\xe4\t&\xa3\xa7\x80\x8b\xb2\xbe\x93\xda\x83G"
"\x84\xe8\xb4P\x17\x00\x00\x00@\x00\xd9\x96\x94\xd6\xf7\xae!i\x05\x92\xe2"
"\xf0\xb4\xebsFvWS\xeb\x1aT\x187\x03F\xa6\xe6\xf7@\xa8<F\xb6FcN\xb8\xea\xa2"
"\xf2Y\xdb\x85\x8a\xda\x13<\xa9`}\xca\xe0l\xef\xfd\xbcB\x06r\xad\x1d\x15\x00"
"\x00\x00@+\xedh\xb3%\x81KO\x8f\xf7K\xbf\xac\x0b\xa4\xa3\xa7!g\xdf-\x85D\x98"
"\xdaB\xd2\x1e\x9e\xa1,\xac\xb2\xd9\xb8\xe7Ooa\xee\x16\x84,O\x97\x1d\xfa\xb6"
"l\xcf_r@\xf1\xd8R\xceH\x82s\xcb\xcd\xd5\x99\x00\x00\x00\x15\x00\x97T\xeavi@"
"\x1b\xf6\xf8\x9eu\\\x84h\x8e\xf6\xc3\x0b\xf0\xc3")
__all__ = ['DSAData', 'RSAData', 'privateDSA_agentv3', 'privateDSA_lsh',
'privateDSA_openssh', 'privateRSA_agentv3', 'privateRSA_lsh',
'privateRSA_openssh', 'publicDSA_lsh', 'publicDSA_openssh',
'publicRSA_lsh', 'publicRSA_openssh', 'privateRSA_openssh_alternate']
| bsd-3-clause |
tchernomax/ansible | lib/ansible/modules/cloud/ovirt/ovirt_quota_facts.py | 16 | 3955 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_quota_facts
short_description: Retrieve facts about one or more oVirt/RHV quotas
version_added: "2.3"
author: "Red Hat"
description:
- "Retrieve facts about one or more oVirt/RHV quotas."
notes:
- "This module creates a new top-level C(ovirt_quotas) fact, which
contains a list of quotas."
options:
data_center:
description:
- "Name of the datacenter where quota resides."
required: true
name:
description:
- "Name of the quota, can be used as glob expression."
extends_documentation_fragment: ovirt_facts
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Gather facts about quota named C<myquota> in Default datacenter:
- ovirt_quota_facts:
data_center: Default
name: myquota
- debug:
var: ovirt_quotas
'''
RETURN = '''
ovirt_quotas:
description: "List of dictionaries describing the quotas. Quota attributes are mapped to dictionary keys,
all quotas attributes can be found at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/quota."
returned: On success.
type: list
'''
import fnmatch
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
check_sdk,
create_connection,
get_dict_of_struct,
ovirt_facts_full_argument_spec,
search_by_name,
)
def main():
argument_spec = ovirt_facts_full_argument_spec(
data_center=dict(required=True),
name=dict(default=None),
)
module = AnsibleModule(argument_spec)
check_sdk(module)
try:
auth = module.params.pop('auth')
connection = create_connection(auth)
datacenters_service = connection.system_service().data_centers_service()
dc_name = module.params['data_center']
dc = search_by_name(datacenters_service, dc_name)
if dc is None:
raise Exception("Datacenter '%s' was not found." % dc_name)
quotas_service = datacenters_service.service(dc.id).quotas_service()
if module.params['name']:
quotas = [
e for e in quotas_service.list()
if fnmatch.fnmatch(e.name, module.params['name'])
]
else:
quotas = quotas_service.list()
module.exit_json(
changed=False,
ansible_facts=dict(
ovirt_quotas=[
get_dict_of_struct(
struct=c,
connection=connection,
fetch_nested=module.params.get('fetch_nested'),
attributes=module.params.get('nested_attributes'),
) for c in quotas
],
),
)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == '__main__':
main()
| gpl-3.0 |
synconics/odoo | addons/payment_buckaroo/models/buckaroo.py | 33 | 9169 | # -*- coding: utf-'8' "-*-"
from hashlib import sha1
import logging
import urllib
import urlparse
from openerp.addons.payment.models.payment_acquirer import ValidationError
from openerp.addons.payment_buckaroo.controllers.main import BuckarooController
from openerp.osv import osv, fields
from openerp.tools.float_utils import float_compare
_logger = logging.getLogger(__name__)
def normalize_keys_upper(data):
"""Set all keys of a dictionnary to uppercase
Buckaroo parameters names are case insensitive
convert everything to upper case to be able to easily detected the presence
of a parameter by checking the uppercase key only
"""
return dict((key.upper(), val) for key, val in data.items())
class AcquirerBuckaroo(osv.Model):
_inherit = 'payment.acquirer'
def _get_buckaroo_urls(self, cr, uid, environment, context=None):
""" Buckaroo URLs
"""
if environment == 'prod':
return {
'buckaroo_form_url': 'https://checkout.buckaroo.nl/html/',
}
else:
return {
'buckaroo_form_url': 'https://testcheckout.buckaroo.nl/html/',
}
def _get_providers(self, cr, uid, context=None):
providers = super(AcquirerBuckaroo, self)._get_providers(cr, uid, context=context)
providers.append(['buckaroo', 'Buckaroo'])
return providers
_columns = {
'brq_websitekey': fields.char('WebsiteKey', required_if_provider='buckaroo', groups='base.group_user'),
'brq_secretkey': fields.char('SecretKey', required_if_provider='buckaroo', groups='base.group_user'),
}
def _buckaroo_generate_digital_sign(self, acquirer, inout, values):
""" Generate the shasign for incoming or outgoing communications.
:param browse acquirer: the payment.acquirer browse record. It should
have a shakey in shaky out
:param string inout: 'in' (openerp contacting buckaroo) or 'out' (buckaroo
contacting openerp).
:param dict values: transaction values
:return string: shasign
"""
assert inout in ('in', 'out')
assert acquirer.provider == 'buckaroo'
keys = "add_returndata Brq_amount Brq_culture Brq_currency Brq_invoicenumber Brq_return Brq_returncancel Brq_returnerror Brq_returnreject brq_test Brq_websitekey".split()
def get_value(key):
if values.get(key):
return values[key]
return ''
values = dict(values or {})
if inout == 'out':
for key in values.keys():
# case insensitive keys
if key.upper() == 'BRQ_SIGNATURE':
del values[key]
break
items = sorted(values.items(), key=lambda (x, y): x.lower())
sign = ''.join('%s=%s' % (k, urllib.unquote_plus(v)) for k, v in items)
else:
sign = ''.join('%s=%s' % (k,get_value(k)) for k in keys)
#Add the pre-shared secret key at the end of the signature
sign = sign + acquirer.brq_secretkey
if isinstance(sign, str):
# TODO: remove me? should not be used
sign = urlparse.parse_qsl(sign)
shasign = sha1(sign.encode('utf-8')).hexdigest()
return shasign
def buckaroo_form_generate_values(self, cr, uid, id, partner_values, tx_values, context=None):
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
acquirer = self.browse(cr, uid, id, context=context)
buckaroo_tx_values = dict(tx_values)
buckaroo_tx_values.update({
'Brq_websitekey': acquirer.brq_websitekey,
'Brq_amount': tx_values['amount'],
'Brq_currency': tx_values['currency'] and tx_values['currency'].name or '',
'Brq_invoicenumber': tx_values['reference'],
'brq_test': False if acquirer.environment == 'prod' else True,
'Brq_return': '%s' % urlparse.urljoin(base_url, BuckarooController._return_url),
'Brq_returncancel': '%s' % urlparse.urljoin(base_url, BuckarooController._cancel_url),
'Brq_returnerror': '%s' % urlparse.urljoin(base_url, BuckarooController._exception_url),
'Brq_returnreject': '%s' % urlparse.urljoin(base_url, BuckarooController._reject_url),
'Brq_culture': (partner_values.get('lang') or 'en_US').replace('_', '-'),
})
if buckaroo_tx_values.get('return_url'):
buckaroo_tx_values['add_returndata'] = buckaroo_tx_values.pop('return_url')
else:
buckaroo_tx_values['add_returndata'] = ''
buckaroo_tx_values['Brq_signature'] = self._buckaroo_generate_digital_sign(acquirer, 'in', buckaroo_tx_values)
return partner_values, buckaroo_tx_values
def buckaroo_get_form_action_url(self, cr, uid, id, context=None):
acquirer = self.browse(cr, uid, id, context=context)
return self._get_buckaroo_urls(cr, uid, acquirer.environment, context=context)['buckaroo_form_url']
class TxBuckaroo(osv.Model):
_inherit = 'payment.transaction'
# buckaroo status
_buckaroo_valid_tx_status = [190]
_buckaroo_pending_tx_status = [790, 791, 792, 793]
_buckaroo_cancel_tx_status = [890, 891]
_buckaroo_error_tx_status = [490, 491, 492]
_buckaroo_reject_tx_status = [690]
_columns = {
'buckaroo_txnid': fields.char('Transaction ID'),
}
# --------------------------------------------------
# FORM RELATED METHODS
# --------------------------------------------------
def _buckaroo_form_get_tx_from_data(self, cr, uid, data, context=None):
""" Given a data dict coming from buckaroo, verify it and find the related
transaction record. """
origin_data = dict(data)
data = normalize_keys_upper(data)
reference, pay_id, shasign = data.get('BRQ_INVOICENUMBER'), data.get('BRQ_PAYMENT'), data.get('BRQ_SIGNATURE')
if not reference or not pay_id or not shasign:
error_msg = 'Buckaroo: received data with missing reference (%s) or pay_id (%s) or shashign (%s)' % (reference, pay_id, shasign)
_logger.error(error_msg)
raise ValidationError(error_msg)
tx_ids = self.search(cr, uid, [('reference', '=', reference)], context=context)
if not tx_ids or len(tx_ids) > 1:
error_msg = 'Buckaroo: received data for reference %s' % (reference)
if not tx_ids:
error_msg += '; no order found'
else:
error_msg += '; multiple order found'
_logger.error(error_msg)
raise ValidationError(error_msg)
tx = self.pool['payment.transaction'].browse(cr, uid, tx_ids[0], context=context)
#verify shasign
shasign_check = self.pool['payment.acquirer']._buckaroo_generate_digital_sign(tx.acquirer_id, 'out', origin_data)
if shasign_check.upper() != shasign.upper():
error_msg = 'Buckaroo: invalid shasign, received %s, computed %s, for data %s' % (shasign, shasign_check, data)
_logger.error(error_msg)
raise ValidationError(error_msg)
return tx
def _buckaroo_form_get_invalid_parameters(self, cr, uid, tx, data, context=None):
invalid_parameters = []
data = normalize_keys_upper(data)
if tx.acquirer_reference and data.get('BRQ_TRANSACTIONS') != tx.acquirer_reference:
invalid_parameters.append(('Transaction Id', data.get('BRQ_TRANSACTIONS'), tx.acquirer_reference))
# check what is buyed
if float_compare(float(data.get('BRQ_AMOUNT', '0.0')), tx.amount, 2) != 0:
invalid_parameters.append(('Amount', data.get('BRQ_AMOUNT'), '%.2f' % tx.amount))
if data.get('BRQ_CURRENCY') != tx.currency_id.name:
invalid_parameters.append(('Currency', data.get('BRQ_CURRENCY'), tx.currency_id.name))
return invalid_parameters
def _buckaroo_form_validate(self, cr, uid, tx, data, context=None):
data = normalize_keys_upper(data)
status_code = int(data.get('BRQ_STATUSCODE','0'))
if status_code in self._buckaroo_valid_tx_status:
tx.write({
'state': 'done',
'buckaroo_txnid': data.get('BRQ_TRANSACTIONS'),
})
return True
elif status_code in self._buckaroo_pending_tx_status:
tx.write({
'state': 'pending',
'buckaroo_txnid': data.get('BRQ_TRANSACTIONS'),
})
return True
elif status_code in self._buckaroo_cancel_tx_status:
tx.write({
'state': 'cancel',
'buckaroo_txnid': data.get('BRQ_TRANSACTIONS'),
})
return True
else:
error = 'Buckaroo: feedback error'
_logger.info(error)
tx.write({
'state': 'error',
'state_message': error,
'buckaroo_txnid': data.get('BRQ_TRANSACTIONS'),
})
return False
| agpl-3.0 |
CapOM/ChromiumGStreamerBackend | build/android/buildbot/bb_host_steps.py | 43 | 4328 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import json
import sys
import bb_utils
import bb_annotations
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from pylib import constants
SLAVE_SCRIPTS_DIR = os.path.join(bb_utils.BB_BUILD_DIR, 'scripts', 'slave')
VALID_HOST_TESTS = set(['check_webview_licenses'])
DIR_BUILD_ROOT = os.path.dirname(constants.DIR_SOURCE_ROOT)
# Short hand for RunCmd which is used extensively in this file.
RunCmd = bb_utils.RunCmd
def SrcPath(*path):
return os.path.join(constants.DIR_SOURCE_ROOT, *path)
def CheckWebViewLicenses(_):
bb_annotations.PrintNamedStep('check_licenses')
RunCmd([SrcPath('android_webview', 'tools', 'webview_licenses.py'), 'scan'],
warning_code=1)
def RunHooks(build_type):
RunCmd([SrcPath('build', 'landmines.py')])
build_path = SrcPath('out', build_type)
landmine_path = os.path.join(build_path, '.landmines_triggered')
clobber_env = os.environ.get('BUILDBOT_CLOBBER')
if clobber_env or os.path.isfile(landmine_path):
bb_annotations.PrintNamedStep('Clobber')
if not clobber_env:
print 'Clobbering due to triggered landmines:'
with open(landmine_path) as f:
print f.read()
RunCmd(['rm', '-rf', build_path])
bb_annotations.PrintNamedStep('runhooks')
RunCmd(['gclient', 'runhooks'], halt_on_failure=True)
def Compile(options):
RunHooks(options.target)
cmd = [os.path.join(SLAVE_SCRIPTS_DIR, 'compile.py'),
'--build-tool=ninja',
'--compiler=goma',
'--target=%s' % options.target,
'--goma-dir=%s' % bb_utils.GOMA_DIR]
bb_annotations.PrintNamedStep('compile')
if options.build_targets:
build_targets = options.build_targets.split(',')
cmd += ['--build-args', ' '.join(build_targets)]
RunCmd(cmd, halt_on_failure=True, cwd=DIR_BUILD_ROOT)
def ZipBuild(options):
bb_annotations.PrintNamedStep('zip_build')
RunCmd([
os.path.join(SLAVE_SCRIPTS_DIR, 'zip_build.py'),
'--src-dir', constants.DIR_SOURCE_ROOT,
'--exclude-files', 'lib.target,gen,android_webview,jingle_unittests']
+ bb_utils.EncodeProperties(options), cwd=DIR_BUILD_ROOT)
def ExtractBuild(options):
bb_annotations.PrintNamedStep('extract_build')
RunCmd([os.path.join(SLAVE_SCRIPTS_DIR, 'extract_build.py')]
+ bb_utils.EncodeProperties(options), cwd=DIR_BUILD_ROOT)
def BisectPerfRegression(options):
args = []
if options.extra_src:
args = ['--extra_src', options.extra_src]
RunCmd([SrcPath('tools', 'prepare-bisect-perf-regression.py'),
'-w', os.path.join(constants.DIR_SOURCE_ROOT, os.pardir)])
RunCmd([SrcPath('tools', 'run-bisect-perf-regression.py'),
'-w', os.path.join(constants.DIR_SOURCE_ROOT, os.pardir),
'--build-properties=%s' % json.dumps(options.build_properties)] +
args)
def GetHostStepCmds():
return [
('compile', Compile),
('extract_build', ExtractBuild),
('check_webview_licenses', CheckWebViewLicenses),
('bisect_perf_regression', BisectPerfRegression),
('zip_build', ZipBuild)
]
def GetHostStepsOptParser():
parser = bb_utils.GetParser()
parser.add_option('--steps', help='Comma separated list of host tests.')
parser.add_option('--build-targets', default='',
help='Comma separated list of build targets.')
parser.add_option('--experimental', action='store_true',
help='Indicate whether to compile experimental targets.')
parser.add_option('--extra_src', default='',
help='Path to extra source file. If this is supplied, '
'bisect script will use it to override default behavior.')
return parser
def main(argv):
parser = GetHostStepsOptParser()
options, args = parser.parse_args(argv[1:])
if args:
return sys.exit('Unused args %s' % args)
setattr(options, 'target', options.factory_properties.get('target', 'Debug'))
setattr(options, 'extra_src',
options.factory_properties.get('extra_src', ''))
if options.steps:
bb_utils.RunSteps(options.steps.split(','), GetHostStepCmds(), options)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause |
ophiry/dvc | dvc/data_cloud.py | 1 | 16388 | import base64
import hashlib
import os
import threading
import configparser
import tempfile
from boto.s3.connection import S3Connection
from google.cloud import storage as gc
import dvc
from dvc.logger import Logger
from dvc.exceptions import DvcException
from dvc.config import ConfigError
from dvc.progress import progress
from dvc.utils import copyfile
from dvc.utils import cached_property
from dvc.system import System
from dvc.utils import map_progress
class DataCloudError(DvcException):
def __init__(self, msg):
super(DataCloudError, self).__init__('Data sync error: {}'.format(msg))
def sizeof_fmt(num, suffix='B'):
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Y', suffix)
def percent_cb(name, complete, total):
Logger.debug('{}: {} transferred out of {}'.format(
name,
sizeof_fmt(complete),
sizeof_fmt(total)))
progress.update_target(os.path.basename(name), complete, total)
def create_cb(name):
return (lambda cur,tot: percent_cb(name, cur, tot))
def file_md5(fname):
""" get the (md5 hexdigest, md5 digest) of a file """
hash_md5 = hashlib.md5()
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(1024*1000), b""):
hash_md5.update(chunk)
return (hash_md5.hexdigest(), hash_md5.digest())
class DataCloudBase(object):
""" Base class for DataCloud """
def __init__(self, settings, config, cloud_config):
self._settings = settings
self._config = config
self._cloud_config = cloud_config
self._lock = threading.Lock()
@property
def storage_path(self):
""" get storage path
Precedence: Storage, then cloud specific
"""
path = self._config['Global'].get('StoragePath', None)
if path:
return path
path = self._cloud_config.get('StoragePath', None)
if path is None:
raise ConfigError('invalid StoragePath: not set for Data or cloud specific')
return path
def _storage_path_parts(self):
return self.storage_path.strip('/').split('/', 1)
@property
def storage_bucket(self):
""" Data -> StoragePath takes precedence; if doesn't exist, use cloud-specific """
return self._storage_path_parts()[0]
@property
def storage_prefix(self):
parts = self._storage_path_parts()
if len(parts) > 1:
return parts[1]
return ''
def cache_file_key(self, file):
return '{}/{}'.format(self.storage_prefix, file).strip('/')
def sanity_check(self):
pass
def push(self, item):
pass
def pull(self, item):
pass
def sync(self, item):
if os.path.isfile(item.resolved_cache.dvc):
self.push(item)
else:
self.create_directory(item)
self.pull(item)
def create_directory(self, item):
self._lock.acquire()
try:
dir = os.path.dirname(item.cache.relative)
if not os.path.exists(dir):
Logger.debug(u'Creating directory {}'.format(dir))
try:
os.makedirs(dir)
except OSError as ex:
raise DataCloudError(u'Cannot create directory {}: {}'.format(dir, ex))
elif not os.path.isdir(dir):
msg = u'File {} cannot be synced because {} is not a directory'
raise DataCloudError(msg.format(item.cache.relative, dir))
finally:
self._lock.release()
def remove(self, item):
pass
class DataCloudLOCAL(DataCloudBase):
def push(self, item):
Logger.debug('sync to cloud ' + item.resolved_cache.dvc + " " + self.storage_path)
copyfile(item.resolved_cache.dvc, self.storage_path)
def pull(self, item):
Logger.debug('sync from cloud ' + self.storage_path + " " + item.resolved_cache.dvc)
copyfile(self.storage_path, item.resolved_cache.dvc)
def remove(self, item):
Logger.debug('rm from cloud ' + item.resolved_cache.dvc)
os.remove(item.resolved_cache.dvc)
class DataCloudAWS(DataCloudBase):
""" DataCloud class for Amazon Web Services """
@property
def aws_access_key_id(self):
if self.aws_creds:
return self.aws_creds[0]
return None
@property
def aws_secret_access_key(self):
if self.aws_creds:
return self.aws_creds[1]
return None
@cached_property
def aws_creds(self):
return self.get_aws_credentials()
@property
def aws_region_host(self):
""" get the region host needed for s3 access
See notes http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
"""
region = self._cloud_config['Region']
if region is None or region == '':
return 's3.amazonaws.com'
if region == 'us-east-1':
return 's3.amazonaws.com'
return 's3.%s.amazonaws.com' % region
def get_aws_credentials(self):
""" gets aws credentials, looking in various places
Params:
Searches:
1 any override in dvc.conf [AWS] CredentialPath;
2 ~/.aws/credentials
Returns:
if successfully found, (access_key_id, secret)
None otherwise
"""
# FIX: It won't work in Windows.
default = os.path.expanduser('~/.aws/credentials')
paths = self.credential_paths(default)
for path in paths:
try:
cc = configparser.SafeConfigParser()
# use readfp(open( ... to aid mocking.
cc.readfp(open(path, 'r'))
if 'default' in cc.keys():
access_key = cc['default'].get('aws_access_key_id', None)
secret = cc['default'].get('aws_secret_access_key', None)
if access_key is not None and secret is not None:
return (access_key, secret)
except Exception as e:
pass
return None
def credential_paths(self, default):
paths = []
credpath = self._cloud_config.get('CredentialPath', None)
if credpath is not None and len(credpath) > 0:
credpath = os.path.expanduser(credpath)
if os.path.isfile(credpath):
paths.append(credpath)
else:
Logger.warn('AWS CredentialPath "%s" not found; falling back to default "%s"' % (credpath, default))
paths.append(default)
else:
paths.append(default)
return paths
def sanity_check(self):
creds = self.get_aws_credentials()
if creds is None:
Logger.info("can't find aws credetials, assuming envirment variables or iam role")
self._aws_creds = creds
def _get_bucket_aws(self):
""" get a bucket object, aws """
if all([self.aws_access_key_id, self.aws_secret_access_key, self.aws_region_host]):
conn = S3Connection(self.aws_access_key_id, self.aws_secret_access_key, host=self.aws_region_host)
else:
conn = S3Connection()
bucket_name = self.storage_bucket
bucket = conn.lookup(bucket_name)
if bucket is None:
raise DataCloudError('Storage path is not setup correctly')
return bucket
def _cmp_checksum(self, key, fname):
md5_cloud = key.etag[1:-1]
md5_local = file_md5(fname)[0]
if md5_cloud == md5_local:
return True
return False
def pull(self, item):
""" sync from cloud, aws version """
bucket = self._get_bucket_aws()
fname = item.resolved_cache.dvc
key_name = self.cache_file_key(fname)
key = bucket.get_key(key_name)
if not key:
Logger.error('File "{}" does not exist in the cloud'.format(key_name))
return
if self._cmp_checksum(key, fname):
Logger.debug('File "{}" matches with "{}".'.format(fname, key_name))
return
Logger.info('Downloading cache file from S3 "{}/{}"'.format(bucket.name, key_name))
try:
temp_file = tempfile.NamedTemporaryFile(dir=item.resolved_cache.dirname, delete=False)
key.get_contents_to_filename(temp_file.name,
cb=create_cb(item.resolved_cache.relative))
os.rename(temp_file.name, item.resolved_cache.relative)
except Exception as exc:
Logger.error('Failed to download "{}": {}'.format(key_name, exc))
if temp_file and os.path.exists(temp_file.name):
os.remove(temp_file.name)
return
progress.finish_target(os.path.basename(item.resolved_cache.relative))
Logger.info('Downloading completed')
def push(self, data_item):
""" push, aws version """
aws_key = self.cache_file_key(data_item.resolved_cache.dvc)
bucket = self._get_bucket_aws()
key = bucket.get_key(aws_key)
if key:
Logger.debug('File already uploaded to the cloud. Checksum validation...')
if self._cmp_checksum(key, data_item.resolved_cache.dvc):
Logger.debug('File checksum matches. No uploading is needed.')
return
Logger.debug('Checksum miss-match. Re-uploading is required.')
key = bucket.new_key(aws_key)
try:
key.set_contents_from_filename(data_item.resolved_cache.relative,
cb=create_cb(data_item.resolved_cache.relative))
except Exception as exc:
Logger.error('Failed to upload "{}": {}'.format(data_item.resolved_cache.relative, exc))
return
progress.finish_target(os.path.basename(data_item.resolved_cache.relative))
def remove(self, data_item):
aws_file_name = self.cache_file_key(data_item.cache.dvc)
Logger.debug(u'[Cmd-Remove] Remove from cloud {}.'.format(aws_file_name))
if not self.aws_access_key_id or not self.aws_secret_access_key:
Logger.debug('[Cmd-Remove] Unable to check cache file in the cloud')
return
conn = S3Connection(self.aws_access_key_id, self.aws_secret_access_key)
bucket_name = self.storage_bucket
bucket = conn.lookup(bucket_name)
if bucket:
key = bucket.get_key(aws_file_name)
if not key:
Logger.warn('[Cmd-Remove] S3 remove warning: file "{}" does not exist in S3'.format(aws_file_name))
else:
key.delete()
Logger.info('[Cmd-Remove] File "{}" was removed from S3'.format(aws_file_name))
pass
class DataCloudGCP(DataCloudBase):
""" DataCloud class for Google Cloud Platform """
@property
def gc_project_name(self):
return self._cloud_config.get('ProjectName', None)
def sanity_check(self):
project = self.gc_project_name
if project is None or len(project) < 1:
raise ConfigError('can\'t read google cloud project name. Please set ProjectName in section GC.')
def _get_bucket_gc(self):
""" get a bucket object, gc """
client = gc.Client(project=self.gc_project_name)
bucket = client.bucket(self.storage_bucket)
if not bucket.exists():
raise DataCloudError('sync up: google cloud bucket {} doesn\'t exist'.format(self.storage_bucket))
return bucket
def pull(self, item):
""" sync from cloud, gcp version """
bucket = self._get_bucket_gc()
key = self.cache_file_key(item.resolved_cache.dvc)
blob = bucket.get_blob(key)
if not blob:
Logger.error('File "{}" does not exist in the cloud'.format(key))
return
Logger.info('Downloading cache file from gc "{}/{}"'.format(bucket.name, key))
blob.download_to_filename(item.resolved_cache.dvc)
Logger.info('Downloading completed')
def push(self, data_item):
""" push, gcp version """
bucket = self._get_bucket_gc()
blob_name = self.cache_file_key(data_item.resolved_cache.dvc)
blob = bucket.get_blob(blob_name)
if blob is not None and blob.exists():
b64_encoded_md5 = base64.b64encode(file_md5(data_item.resolved_cache.dvc)[1])
if blob.md5_hash == b64_encoded_md5:
Logger.debug('checksum %s matches. Skipping upload' % data_item.cache.relative)
return
Logger.debug('checksum %s mismatch. re-uploading' % data_item.cache.relative)
blob = bucket.blob(blob_name)
blob.upload_from_filename(data_item.resolved_cache.relative)
Logger.info('uploading %s completed' % data_item.resolved_cache.relative)
def remove(self, item):
raise Exception('NOT IMPLEMENTED YET')
class DataCloud(object):
""" Generic class to do initial config parsing and redirect to proper DataCloud methods """
CLOUD_MAP = {
'AWS': DataCloudAWS,
'GCP': DataCloudGCP,
'LOCAL': DataCloudLOCAL,
}
def __init__(self, settings):
assert isinstance(settings, dvc.settings.Settings)
#To handle ConfigI case
if not hasattr(settings.config, '_config'):
self._cloud = DataCloudBase(None, None, None)
return
self._settings = settings
self._config = self._settings.config._config
self.typ = self._config['Global'].get('Cloud', '').strip().upper()
if self.typ not in self.CLOUD_MAP.keys():
raise ConfigError('Wrong cloud type %s specified' % self.typ)
if self.typ not in self._config.keys():
raise ConfigError('Can\'t find cloud section \'[%s]\' in config' % self.typ)
self._cloud = self.CLOUD_MAP[self.typ](self._settings, self._config, self._config[self.typ])
self.sanity_check()
def sanity_check(self):
""" sanity check a config
check that we have a cloud and storagePath
if aws, check can read credentials
if google, check ProjectName
Returns:
(T,) if good
(F, issues) if bad
"""
key = 'Cloud'
if key.lower() not in self._config['Global'].keys() or len(self._config['Global'][key]) < 1:
raise ConfigError('Please set %s in section Global in config file %s' % (key, self.file))
# now that a cloud is chosen, can check StoragePath
sp = self._cloud.storage_path
if sp is None or len(sp) == 0:
raise ConfigError('Please set StoragePath = bucket/{optional path} in conf file "%s" '
'either in Global or a cloud specific section' % self.CONFIG)
self._cloud.sanity_check()
def _collect_dir(self, d):
targets = []
for root, dirs, files in os.walk(d):
for f in files:
path = os.path.join(root, f)
item = self._settings.path_factory.data_item(path)
targets.append(item)
return targets
def _collect_target(self, target):
if System.islink(target):
item = self._settings.path_factory.data_item(target)
return [item]
elif os.path.isdir(target):
return self._collect_dir(target)
Logger.warn('Target "{}" does not exist'.format(target))
return []
def _collect_targets(self, targets):
collected = []
for t in targets:
collected += self._collect_target(t)
return collected
def _map_targets(self, f, targets, jobs):
collected = self._collect_targets(targets)
map_progress(f, collected, jobs)
def sync(self, targets, jobs=1):
self._map_targets(self._cloud.sync, targets, jobs)
def push(self, targets, jobs=1):
self._map_targets(self._cloud.push, targets, jobs)
def pull(self, targets, jobs=1):
self._map_targets(self._cloud.pull, targets, jobs)
def remove(self, item):
return self._cloud.remove(item)
| apache-2.0 |
asnorkin/sentiment_analysis | site/lib/python2.7/site-packages/numpy/distutils/command/__init__.py | 264 | 1098 | """distutils.command
Package containing implementation of all the standard Distutils
commands.
"""
from __future__ import division, absolute_import, print_function
def test_na_writable_attributes_deletion():
a = np.NA(2)
attr = ['payload', 'dtype']
for s in attr:
assert_raises(AttributeError, delattr, a, s)
__revision__ = "$Id: __init__.py,v 1.3 2005/05/16 11:08:49 pearu Exp $"
distutils_all = [ #'build_py',
'clean',
'install_clib',
'install_scripts',
'bdist',
'bdist_dumb',
'bdist_wininst',
]
__import__('distutils.command', globals(), locals(), distutils_all)
__all__ = ['build',
'config_compiler',
'config',
'build_src',
'build_py',
'build_ext',
'build_clib',
'build_scripts',
'install',
'install_data',
'install_headers',
'install_lib',
'bdist_rpm',
'sdist',
] + distutils_all
| mit |
fnordahl/nova | nova/tests/unit/objects/test_objects.py | 1 | 61175 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from collections import OrderedDict
import contextlib
import copy
import datetime
import hashlib
import inspect
import os
import pprint
import fixtures
import mock
from oslo_log import log
from oslo_utils import timeutils
from oslo_versionedobjects import base as ovo_base
from oslo_versionedobjects import exception as ovo_exc
from oslo_versionedobjects import fixture
import six
from testtools import matchers
from nova import context
from nova import exception
from nova import objects
from nova.objects import base
from nova.objects import fields
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.unit import fake_notifier
from nova import utils
LOG = log.getLogger(__name__)
class MyOwnedObject(base.NovaPersistentObject, base.NovaObject):
VERSION = '1.0'
fields = {'baz': fields.IntegerField()}
class MyObj(base.NovaPersistentObject, base.NovaObject,
base.NovaObjectDictCompat):
VERSION = '1.6'
fields = {'foo': fields.IntegerField(default=1),
'bar': fields.StringField(),
'missing': fields.StringField(),
'readonly': fields.IntegerField(read_only=True),
'rel_object': fields.ObjectField('MyOwnedObject', nullable=True),
'rel_objects': fields.ListOfObjectsField('MyOwnedObject',
nullable=True),
'mutable_default': fields.ListOfStringsField(default=[]),
}
@staticmethod
def _from_db_object(context, obj, db_obj):
self = MyObj()
self.foo = db_obj['foo']
self.bar = db_obj['bar']
self.missing = db_obj['missing']
self.readonly = 1
self._context = context
return self
def obj_load_attr(self, attrname):
setattr(self, attrname, 'loaded!')
@base.remotable_classmethod
def query(cls, context):
obj = cls(context=context, foo=1, bar='bar')
obj.obj_reset_changes()
return obj
@base.remotable
def marco(self):
return 'polo'
@base.remotable
def _update_test(self):
self.bar = 'updated'
@base.remotable
def save(self):
self.obj_reset_changes()
@base.remotable
def refresh(self):
self.foo = 321
self.bar = 'refreshed'
self.obj_reset_changes()
@base.remotable
def modify_save_modify(self):
self.bar = 'meow'
self.save()
self.foo = 42
self.rel_object = MyOwnedObject(baz=42)
def obj_make_compatible(self, primitive, target_version):
super(MyObj, self).obj_make_compatible(primitive, target_version)
# NOTE(danms): Simulate an older version that had a different
# format for the 'bar' attribute
if target_version == '1.1' and 'bar' in primitive:
primitive['bar'] = 'old%s' % primitive['bar']
class MyObjDiffVers(MyObj):
VERSION = '1.5'
@classmethod
def obj_name(cls):
return 'MyObj'
class MyObj2(base.NovaObject):
fields = {
'bar': fields.StringField(),
}
@classmethod
def obj_name(cls):
return 'MyObj'
@base.remotable_classmethod
def query(cls, *args, **kwargs):
pass
class RandomMixInWithNoFields(object):
"""Used to test object inheritance using a mixin that has no fields."""
pass
@base.NovaObjectRegistry.register_if(False)
class TestSubclassedObject(RandomMixInWithNoFields, MyObj):
fields = {'new_field': fields.StringField()}
class TestObjToPrimitive(test.NoDBTestCase):
def test_obj_to_primitive_list(self):
@base.NovaObjectRegistry.register_if(False)
class MyObjElement(base.NovaObject):
fields = {'foo': fields.IntegerField()}
def __init__(self, foo):
super(MyObjElement, self).__init__()
self.foo = foo
@base.NovaObjectRegistry.register_if(False)
class MyList(base.ObjectListBase, base.NovaObject):
fields = {'objects': fields.ListOfObjectsField('MyObjElement')}
mylist = MyList()
mylist.objects = [MyObjElement(1), MyObjElement(2), MyObjElement(3)]
self.assertEqual([1, 2, 3],
[x['foo'] for x in base.obj_to_primitive(mylist)])
def test_obj_to_primitive_dict(self):
base.NovaObjectRegistry.register(MyObj)
myobj = MyObj(foo=1, bar='foo')
self.assertEqual({'foo': 1, 'bar': 'foo'},
base.obj_to_primitive(myobj))
def test_obj_to_primitive_recursive(self):
base.NovaObjectRegistry.register(MyObj)
class MyList(base.ObjectListBase, base.NovaObject):
fields = {'objects': fields.ListOfObjectsField('MyObj')}
mylist = MyList(objects=[MyObj(), MyObj()])
for i, value in enumerate(mylist):
value.foo = i
self.assertEqual([{'foo': 0}, {'foo': 1}],
base.obj_to_primitive(mylist))
def test_obj_to_primitive_with_ip_addr(self):
@base.NovaObjectRegistry.register_if(False)
class TestObject(base.NovaObject):
fields = {'addr': fields.IPAddressField(),
'cidr': fields.IPNetworkField()}
obj = TestObject(addr='1.2.3.4', cidr='1.1.1.1/16')
self.assertEqual({'addr': '1.2.3.4', 'cidr': '1.1.1.1/16'},
base.obj_to_primitive(obj))
class TestObjMakeList(test.NoDBTestCase):
def test_obj_make_list(self):
class MyList(base.ObjectListBase, base.NovaObject):
fields = {
'objects': fields.ListOfObjectsField('MyObj'),
}
db_objs = [{'foo': 1, 'bar': 'baz', 'missing': 'banana'},
{'foo': 2, 'bar': 'bat', 'missing': 'apple'},
]
mylist = base.obj_make_list('ctxt', MyList(), MyObj, db_objs)
self.assertEqual(2, len(mylist))
self.assertEqual('ctxt', mylist._context)
for index, item in enumerate(mylist):
self.assertEqual(db_objs[index]['foo'], item.foo)
self.assertEqual(db_objs[index]['bar'], item.bar)
self.assertEqual(db_objs[index]['missing'], item.missing)
def compare_obj(test, obj, db_obj, subs=None, allow_missing=None,
comparators=None):
"""Compare a NovaObject and a dict-like database object.
This automatically converts TZ-aware datetimes and iterates over
the fields of the object.
:param:test: The TestCase doing the comparison
:param:obj: The NovaObject to examine
:param:db_obj: The dict-like database object to use as reference
:param:subs: A dict of objkey=dbkey field substitutions
:param:allow_missing: A list of fields that may not be in db_obj
:param:comparators: Map of comparator functions to use for certain fields
"""
if subs is None:
subs = {}
if allow_missing is None:
allow_missing = []
if comparators is None:
comparators = {}
for key in obj.fields:
if key in allow_missing and not obj.obj_attr_is_set(key):
continue
obj_val = getattr(obj, key)
db_key = subs.get(key, key)
db_val = db_obj[db_key]
if isinstance(obj_val, datetime.datetime):
obj_val = obj_val.replace(tzinfo=None)
if key in comparators:
comparator = comparators[key]
comparator(db_val, obj_val)
else:
test.assertEqual(db_val, obj_val)
class _BaseTestCase(test.TestCase):
def setUp(self):
super(_BaseTestCase, self).setUp()
self.remote_object_calls = list()
self.user_id = 'fake-user'
self.project_id = 'fake-project'
self.context = context.RequestContext(self.user_id, self.project_id)
fake_notifier.stub_notifier(self.stubs)
self.addCleanup(fake_notifier.reset)
# NOTE(danms): register these here instead of at import time
# so that they're not always present
base.NovaObjectRegistry.register(MyObj)
base.NovaObjectRegistry.register(MyObjDiffVers)
base.NovaObjectRegistry.register(MyOwnedObject)
def compare_obj(self, obj, db_obj, subs=None, allow_missing=None,
comparators=None):
compare_obj(self, obj, db_obj, subs=subs, allow_missing=allow_missing,
comparators=comparators)
def str_comparator(self, expected, obj_val):
"""Compare an object field to a string in the db by performing
a simple coercion on the object field value.
"""
self.assertEqual(expected, str(obj_val))
def assertNotIsInstance(self, obj, cls, msg=None):
"""Python < v2.7 compatibility. Assert 'not isinstance(obj, cls)."""
try:
f = super(_BaseTestCase, self).assertNotIsInstance
except AttributeError:
self.assertThat(obj,
matchers.Not(matchers.IsInstance(cls)),
message=msg or '')
else:
f(obj, cls, msg=msg)
class _LocalTest(_BaseTestCase):
def setUp(self):
super(_LocalTest, self).setUp()
# Just in case
self.useFixture(nova_fixtures.IndirectionAPIFixture(None))
@contextlib.contextmanager
def things_temporarily_local():
# Temporarily go non-remote so the conductor handles
# this request directly
_api = base.NovaObject.indirection_api
base.NovaObject.indirection_api = None
yield
base.NovaObject.indirection_api = _api
class FakeIndirectionHack(fixture.FakeIndirectionAPI):
def object_action(self, context, objinst, objmethod, args, kwargs):
objinst = self._ser.deserialize_entity(
context, self._ser.serialize_entity(
context, objinst))
objmethod = six.text_type(objmethod)
args = self._ser.deserialize_entity(
None, self._ser.serialize_entity(None, args))
kwargs = self._ser.deserialize_entity(
None, self._ser.serialize_entity(None, kwargs))
original = objinst.obj_clone()
with mock.patch('nova.objects.base.NovaObject.'
'indirection_api', new=None):
result = getattr(objinst, objmethod)(*args, **kwargs)
updates = self._get_changes(original, objinst)
updates['obj_what_changed'] = objinst.obj_what_changed()
return updates, result
def object_class_action(self, context, objname, objmethod, objver,
args, kwargs):
objname = six.text_type(objname)
objmethod = six.text_type(objmethod)
objver = six.text_type(objver)
args = self._ser.deserialize_entity(
None, self._ser.serialize_entity(None, args))
kwargs = self._ser.deserialize_entity(
None, self._ser.serialize_entity(None, kwargs))
cls = base.NovaObject.obj_class_from_name(objname, objver)
with mock.patch('nova.objects.base.NovaObject.'
'indirection_api', new=None):
result = getattr(cls, objmethod)(context, *args, **kwargs)
manifest = ovo_base.obj_tree_get_versions(objname)
return (base.NovaObject.obj_from_primitive(
result.obj_to_primitive(target_version=objver,
version_manifest=manifest),
context=context)
if isinstance(result, base.NovaObject) else result)
class IndirectionFixture(fixtures.Fixture):
def setUp(self):
super(IndirectionFixture, self).setUp()
ser = base.NovaObjectSerializer()
self.indirection_api = FakeIndirectionHack(serializer=ser)
self.useFixture(fixtures.MonkeyPatch(
'nova.objects.base.NovaObject.indirection_api',
self.indirection_api))
class _RemoteTest(_BaseTestCase):
def setUp(self):
super(_RemoteTest, self).setUp()
self.useFixture(IndirectionFixture())
class _TestObject(object):
def test_object_attrs_in_init(self):
# Spot check a few
objects.Instance
objects.InstanceInfoCache
objects.SecurityGroup
# Now check the test one in this file. Should be newest version
self.assertEqual('1.6', objects.MyObj.VERSION)
def test_hydration_type_error(self):
primitive = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'nova',
'nova_object.version': '1.5',
'nova_object.data': {'foo': 'a'}}
self.assertRaises(ValueError, MyObj.obj_from_primitive, primitive)
def test_hydration(self):
primitive = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'nova',
'nova_object.version': '1.5',
'nova_object.data': {'foo': 1}}
real_method = MyObj._obj_from_primitive
def _obj_from_primitive(*args):
return real_method(*args)
with mock.patch.object(MyObj, '_obj_from_primitive') as ofp:
ofp.side_effect = _obj_from_primitive
obj = MyObj.obj_from_primitive(primitive)
ofp.assert_called_once_with(None, '1.5', primitive)
self.assertEqual(obj.foo, 1)
def test_hydration_version_different(self):
primitive = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'nova',
'nova_object.version': '1.2',
'nova_object.data': {'foo': 1}}
obj = MyObj.obj_from_primitive(primitive)
self.assertEqual(obj.foo, 1)
self.assertEqual('1.2', obj.VERSION)
def test_hydration_bad_ns(self):
primitive = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'foo',
'nova_object.version': '1.5',
'nova_object.data': {'foo': 1}}
self.assertRaises(ovo_exc.UnsupportedObjectError,
MyObj.obj_from_primitive, primitive)
def test_hydration_additional_unexpected_stuff(self):
primitive = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'nova',
'nova_object.version': '1.5.1',
'nova_object.data': {
'foo': 1,
'unexpected_thing': 'foobar'}}
obj = MyObj.obj_from_primitive(primitive)
self.assertEqual(1, obj.foo)
self.assertFalse(hasattr(obj, 'unexpected_thing'))
# NOTE(danms): If we call obj_from_primitive() directly
# with a version containing .z, we'll get that version
# in the resulting object. In reality, when using the
# serializer, we'll get that snipped off (tested
# elsewhere)
self.assertEqual('1.5.1', obj.VERSION)
def test_dehydration(self):
expected = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'nova',
'nova_object.version': '1.6',
'nova_object.data': {'foo': 1}}
obj = MyObj(foo=1)
obj.obj_reset_changes()
self.assertEqual(obj.obj_to_primitive(), expected)
def test_object_property(self):
obj = MyObj(foo=1)
self.assertEqual(obj.foo, 1)
def test_object_property_type_error(self):
obj = MyObj()
def fail():
obj.foo = 'a'
self.assertRaises(ValueError, fail)
def test_load(self):
obj = MyObj()
self.assertEqual(obj.bar, 'loaded!')
def test_load_in_base(self):
@base.NovaObjectRegistry.register_if(False)
class Foo(base.NovaObject):
fields = {'foobar': fields.IntegerField()}
obj = Foo()
with self.assertRaisesRegex(NotImplementedError, ".*foobar.*"):
obj.foobar
def test_loaded_in_primitive(self):
obj = MyObj(foo=1)
obj.obj_reset_changes()
self.assertEqual(obj.bar, 'loaded!')
expected = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'nova',
'nova_object.version': '1.6',
'nova_object.changes': ['bar'],
'nova_object.data': {'foo': 1,
'bar': 'loaded!'}}
self.assertEqual(obj.obj_to_primitive(), expected)
def test_changes_in_primitive(self):
obj = MyObj(foo=123)
self.assertEqual(obj.obj_what_changed(), set(['foo']))
primitive = obj.obj_to_primitive()
self.assertIn('nova_object.changes', primitive)
obj2 = MyObj.obj_from_primitive(primitive)
self.assertEqual(obj2.obj_what_changed(), set(['foo']))
obj2.obj_reset_changes()
self.assertEqual(obj2.obj_what_changed(), set())
def test_obj_class_from_name(self):
obj = base.NovaObject.obj_class_from_name('MyObj', '1.5')
self.assertEqual('1.5', obj.VERSION)
def test_obj_class_from_name_latest_compatible(self):
obj = base.NovaObject.obj_class_from_name('MyObj', '1.1')
self.assertEqual('1.6', obj.VERSION)
def test_unknown_objtype(self):
self.assertRaises(ovo_exc.UnsupportedObjectError,
base.NovaObject.obj_class_from_name, 'foo', '1.0')
def test_obj_class_from_name_supported_version(self):
error = None
try:
base.NovaObject.obj_class_from_name('MyObj', '1.25')
except ovo_exc.IncompatibleObjectVersion as ex:
error = ex
self.assertIsNotNone(error)
self.assertEqual('1.6', error.kwargs['supported'])
def test_orphaned_object(self):
obj = MyObj.query(self.context)
obj._context = None
self.assertRaises(ovo_exc.OrphanedObjectError,
obj._update_test)
def test_changed_1(self):
obj = MyObj.query(self.context)
obj.foo = 123
self.assertEqual(obj.obj_what_changed(), set(['foo']))
obj._update_test()
self.assertEqual(obj.obj_what_changed(), set(['foo', 'bar']))
self.assertEqual(obj.foo, 123)
def test_changed_2(self):
obj = MyObj.query(self.context)
obj.foo = 123
self.assertEqual(obj.obj_what_changed(), set(['foo']))
obj.save()
self.assertEqual(obj.obj_what_changed(), set([]))
self.assertEqual(obj.foo, 123)
def test_changed_3(self):
obj = MyObj.query(self.context)
obj.foo = 123
self.assertEqual(obj.obj_what_changed(), set(['foo']))
obj.refresh()
self.assertEqual(obj.obj_what_changed(), set([]))
self.assertEqual(obj.foo, 321)
self.assertEqual(obj.bar, 'refreshed')
def test_changed_4(self):
obj = MyObj.query(self.context)
obj.bar = 'something'
self.assertEqual(obj.obj_what_changed(), set(['bar']))
obj.modify_save_modify()
self.assertEqual(obj.obj_what_changed(), set(['foo', 'rel_object']))
self.assertEqual(obj.foo, 42)
self.assertEqual(obj.bar, 'meow')
self.assertIsInstance(obj.rel_object, MyOwnedObject)
def test_changed_with_sub_object(self):
@base.NovaObjectRegistry.register_if(False)
class ParentObject(base.NovaObject):
fields = {'foo': fields.IntegerField(),
'bar': fields.ObjectField('MyObj'),
}
obj = ParentObject()
self.assertEqual(set(), obj.obj_what_changed())
obj.foo = 1
self.assertEqual(set(['foo']), obj.obj_what_changed())
bar = MyObj()
obj.bar = bar
self.assertEqual(set(['foo', 'bar']), obj.obj_what_changed())
obj.obj_reset_changes()
self.assertEqual(set(), obj.obj_what_changed())
bar.foo = 1
self.assertEqual(set(['bar']), obj.obj_what_changed())
def test_static_result(self):
obj = MyObj.query(self.context)
self.assertEqual(obj.bar, 'bar')
result = obj.marco()
self.assertEqual(result, 'polo')
def test_updates(self):
obj = MyObj.query(self.context)
self.assertEqual(obj.foo, 1)
obj._update_test()
self.assertEqual(obj.bar, 'updated')
def test_base_attributes(self):
dt = datetime.datetime(1955, 11, 5)
obj = MyObj(created_at=dt, updated_at=dt, deleted_at=None,
deleted=False)
expected = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'nova',
'nova_object.version': '1.6',
'nova_object.changes':
['deleted', 'created_at', 'deleted_at', 'updated_at'],
'nova_object.data':
{'created_at': timeutils.isotime(dt),
'updated_at': timeutils.isotime(dt),
'deleted_at': None,
'deleted': False,
}
}
actual = obj.obj_to_primitive()
self.assertJsonEqual(actual, expected)
def test_contains(self):
obj = MyObj()
self.assertNotIn('foo', obj)
obj.foo = 1
self.assertIn('foo', obj)
self.assertNotIn('does_not_exist', obj)
def test_obj_attr_is_set(self):
obj = MyObj(foo=1)
self.assertTrue(obj.obj_attr_is_set('foo'))
self.assertFalse(obj.obj_attr_is_set('bar'))
self.assertRaises(AttributeError, obj.obj_attr_is_set, 'bang')
def test_obj_reset_changes_recursive(self):
obj = MyObj(rel_object=MyOwnedObject(baz=123),
rel_objects=[MyOwnedObject(baz=456)])
self.assertEqual(set(['rel_object', 'rel_objects']),
obj.obj_what_changed())
obj.obj_reset_changes()
self.assertEqual(set(['rel_object']), obj.obj_what_changed())
self.assertEqual(set(['baz']), obj.rel_object.obj_what_changed())
self.assertEqual(set(['baz']), obj.rel_objects[0].obj_what_changed())
obj.obj_reset_changes(recursive=True, fields=['foo'])
self.assertEqual(set(['rel_object']), obj.obj_what_changed())
self.assertEqual(set(['baz']), obj.rel_object.obj_what_changed())
self.assertEqual(set(['baz']), obj.rel_objects[0].obj_what_changed())
obj.obj_reset_changes(recursive=True)
self.assertEqual(set([]), obj.rel_object.obj_what_changed())
self.assertEqual(set([]), obj.obj_what_changed())
def test_get(self):
obj = MyObj(foo=1)
# Foo has value, should not get the default
self.assertEqual(obj.get('foo', 2), 1)
# Foo has value, should return the value without error
self.assertEqual(obj.get('foo'), 1)
# Bar is not loaded, so we should get the default
self.assertEqual(obj.get('bar', 'not-loaded'), 'not-loaded')
# Bar without a default should lazy-load
self.assertEqual(obj.get('bar'), 'loaded!')
# Bar now has a default, but loaded value should be returned
self.assertEqual(obj.get('bar', 'not-loaded'), 'loaded!')
# Invalid attribute should raise AttributeError
self.assertRaises(AttributeError, obj.get, 'nothing')
# ...even with a default
self.assertRaises(AttributeError, obj.get, 'nothing', 3)
def test_object_inheritance(self):
base_fields = base.NovaPersistentObject.fields.keys()
myobj_fields = (['foo', 'bar', 'missing',
'readonly', 'rel_object',
'rel_objects', 'mutable_default'] +
list(base_fields))
myobj3_fields = ['new_field']
self.assertTrue(issubclass(TestSubclassedObject, MyObj))
self.assertEqual(len(myobj_fields), len(MyObj.fields))
self.assertEqual(set(myobj_fields), set(MyObj.fields.keys()))
self.assertEqual(len(myobj_fields) + len(myobj3_fields),
len(TestSubclassedObject.fields))
self.assertEqual(set(myobj_fields) | set(myobj3_fields),
set(TestSubclassedObject.fields.keys()))
def test_obj_as_admin(self):
obj = MyObj(context=self.context)
def fake(*args, **kwargs):
self.assertTrue(obj._context.is_admin)
with mock.patch.object(obj, 'obj_reset_changes') as mock_fn:
mock_fn.side_effect = fake
with obj.obj_as_admin():
obj.save()
self.assertTrue(mock_fn.called)
self.assertFalse(obj._context.is_admin)
def test_obj_as_admin_orphaned(self):
def testme():
obj = MyObj()
with obj.obj_as_admin():
pass
self.assertRaises(exception.OrphanedObjectError, testme)
def test_obj_alternate_context(self):
obj = MyObj(context=self.context)
with obj.obj_alternate_context(mock.sentinel.alt_ctx):
self.assertEqual(mock.sentinel.alt_ctx,
obj._context)
self.assertEqual(self.context, obj._context)
def test_get_changes(self):
obj = MyObj()
self.assertEqual({}, obj.obj_get_changes())
obj.foo = 123
self.assertEqual({'foo': 123}, obj.obj_get_changes())
obj.bar = 'test'
self.assertEqual({'foo': 123, 'bar': 'test'}, obj.obj_get_changes())
obj.obj_reset_changes()
self.assertEqual({}, obj.obj_get_changes())
def test_obj_fields(self):
@base.NovaObjectRegistry.register_if(False)
class TestObj(base.NovaObject):
fields = {'foo': fields.IntegerField()}
obj_extra_fields = ['bar']
@property
def bar(self):
return 'this is bar'
obj = TestObj()
self.assertEqual(['foo', 'bar'], obj.obj_fields)
def test_obj_constructor(self):
obj = MyObj(context=self.context, foo=123, bar='abc')
self.assertEqual(123, obj.foo)
self.assertEqual('abc', obj.bar)
self.assertEqual(set(['foo', 'bar']), obj.obj_what_changed())
def test_obj_read_only(self):
obj = MyObj(context=self.context, foo=123, bar='abc')
obj.readonly = 1
self.assertRaises(ovo_exc.ReadOnlyFieldError, setattr,
obj, 'readonly', 2)
def test_obj_mutable_default(self):
obj = MyObj(context=self.context, foo=123, bar='abc')
obj.mutable_default = None
obj.mutable_default.append('s1')
self.assertEqual(obj.mutable_default, ['s1'])
obj1 = MyObj(context=self.context, foo=123, bar='abc')
obj1.mutable_default = None
obj1.mutable_default.append('s2')
self.assertEqual(obj1.mutable_default, ['s2'])
def test_obj_mutable_default_set_default(self):
obj1 = MyObj(context=self.context, foo=123, bar='abc')
obj1.obj_set_defaults('mutable_default')
self.assertEqual(obj1.mutable_default, [])
obj1.mutable_default.append('s1')
self.assertEqual(obj1.mutable_default, ['s1'])
obj2 = MyObj(context=self.context, foo=123, bar='abc')
obj2.obj_set_defaults('mutable_default')
self.assertEqual(obj2.mutable_default, [])
obj2.mutable_default.append('s2')
self.assertEqual(obj2.mutable_default, ['s2'])
def test_obj_repr(self):
obj = MyObj(foo=123)
self.assertEqual('MyObj(bar=<?>,created_at=<?>,deleted=<?>,'
'deleted_at=<?>,foo=123,missing=<?>,'
'mutable_default=<?>,readonly=<?>,rel_object=<?>,'
'rel_objects=<?>,updated_at=<?>)',
repr(obj))
def test_obj_make_obj_compatible(self):
subobj = MyOwnedObject(baz=1)
subobj.VERSION = '1.2'
obj = MyObj(rel_object=subobj)
obj.obj_relationships = {
'rel_object': [('1.5', '1.1'), ('1.7', '1.2')],
}
orig_primitive = obj.obj_to_primitive()['nova_object.data']
with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat:
primitive = copy.deepcopy(orig_primitive)
obj._obj_make_obj_compatible(primitive, '1.8', 'rel_object')
self.assertFalse(mock_compat.called)
with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat:
primitive = copy.deepcopy(orig_primitive)
obj._obj_make_obj_compatible(primitive, '1.7', 'rel_object')
mock_compat.assert_called_once_with(
primitive['rel_object']['nova_object.data'], '1.2')
with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat:
primitive = copy.deepcopy(orig_primitive)
obj._obj_make_obj_compatible(primitive, '1.6', 'rel_object')
mock_compat.assert_called_once_with(
primitive['rel_object']['nova_object.data'], '1.1')
self.assertEqual('1.1',
primitive['rel_object']['nova_object.version'])
with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat:
primitive = copy.deepcopy(orig_primitive)
obj._obj_make_obj_compatible(primitive, '1.5', 'rel_object')
mock_compat.assert_called_once_with(
primitive['rel_object']['nova_object.data'], '1.1')
self.assertEqual('1.1',
primitive['rel_object']['nova_object.version'])
with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat:
primitive = copy.deepcopy(orig_primitive)
obj._obj_make_obj_compatible(primitive, '1.4', 'rel_object')
self.assertFalse(mock_compat.called)
self.assertNotIn('rel_object', primitive)
def test_obj_make_compatible_hits_sub_objects(self):
subobj = MyOwnedObject(baz=1)
obj = MyObj(foo=123, rel_object=subobj)
obj.obj_relationships = {'rel_object': [('1.0', '1.0')]}
with mock.patch.object(obj, '_obj_make_obj_compatible') as mock_compat:
obj.obj_make_compatible({'rel_object': 'foo'}, '1.10')
mock_compat.assert_called_once_with({'rel_object': 'foo'}, '1.10',
'rel_object')
def test_obj_make_compatible_skips_unset_sub_objects(self):
obj = MyObj(foo=123)
obj.obj_relationships = {'rel_object': [('1.0', '1.0')]}
with mock.patch.object(obj, '_obj_make_obj_compatible') as mock_compat:
obj.obj_make_compatible({'rel_object': 'foo'}, '1.10')
self.assertFalse(mock_compat.called)
def test_obj_make_compatible_doesnt_skip_falsey_sub_objects(self):
@base.NovaObjectRegistry.register_if(False)
class MyList(base.ObjectListBase, base.NovaObject):
VERSION = '1.2'
fields = {'objects': fields.ListOfObjectsField('MyObjElement')}
obj_relationships = {
'objects': [('1.1', '1.1'), ('1.2', '1.2')],
}
mylist = MyList(objects=[])
@base.NovaObjectRegistry.register_if(False)
class MyOwner(base.NovaObject):
VERSION = '1.2'
fields = {'mylist': fields.ObjectField('MyList')}
obj_relationships = {
'mylist': [('1.1', '1.1')],
}
myowner = MyOwner(mylist=mylist)
primitive = myowner.obj_to_primitive('1.1')
self.assertIn('mylist', primitive['nova_object.data'])
def test_obj_make_compatible_handles_list_of_objects(self):
subobj = MyOwnedObject(baz=1)
obj = MyObj(rel_objects=[subobj])
obj.obj_relationships = {'rel_objects': [('1.0', '1.123')]}
def fake_make_compat(primitive, version):
self.assertEqual('1.123', version)
self.assertIn('baz', primitive)
with mock.patch.object(subobj, 'obj_make_compatible') as mock_mc:
mock_mc.side_effect = fake_make_compat
obj.obj_to_primitive('1.0')
self.assertTrue(mock_mc.called)
def test_delattr(self):
obj = MyObj(bar='foo')
del obj.bar
# Should appear unset now
self.assertFalse(obj.obj_attr_is_set('bar'))
# Make sure post-delete, references trigger lazy loads
self.assertEqual('loaded!', getattr(obj, 'bar'))
def test_delattr_unset(self):
obj = MyObj()
self.assertRaises(AttributeError, delattr, obj, 'bar')
class TestObject(_LocalTest, _TestObject):
def test_set_defaults(self):
obj = MyObj()
obj.obj_set_defaults('foo')
self.assertTrue(obj.obj_attr_is_set('foo'))
self.assertEqual(1, obj.foo)
def test_set_defaults_no_default(self):
obj = MyObj()
self.assertRaises(ovo_exc.ObjectActionError,
obj.obj_set_defaults, 'bar')
def test_set_all_defaults(self):
obj = MyObj()
obj.obj_set_defaults()
self.assertEqual(set(['deleted', 'foo', 'mutable_default']),
obj.obj_what_changed())
self.assertEqual(1, obj.foo)
def test_set_defaults_not_overwrite(self):
# NOTE(danms): deleted defaults to False, so verify that it does
# not get reset by obj_set_defaults()
obj = MyObj(deleted=True)
obj.obj_set_defaults()
self.assertEqual(1, obj.foo)
self.assertTrue(obj.deleted)
class TestRemoteObject(_RemoteTest, _TestObject):
def test_major_version_mismatch(self):
MyObj2.VERSION = '2.0'
self.assertRaises(ovo_exc.IncompatibleObjectVersion,
MyObj2.query, self.context)
def test_minor_version_greater(self):
MyObj2.VERSION = '1.7'
self.assertRaises(ovo_exc.IncompatibleObjectVersion,
MyObj2.query, self.context)
def test_minor_version_less(self):
MyObj2.VERSION = '1.2'
obj = MyObj2.query(self.context)
self.assertEqual(obj.bar, 'bar')
def test_compat(self):
MyObj2.VERSION = '1.1'
obj = MyObj2.query(self.context)
self.assertEqual('oldbar', obj.bar)
def test_revision_ignored(self):
MyObj2.VERSION = '1.1.456'
obj = MyObj2.query(self.context)
self.assertEqual('bar', obj.bar)
class TestObjectSerializer(_BaseTestCase):
def test_serialize_entity_primitive(self):
ser = base.NovaObjectSerializer()
for thing in (1, 'foo', [1, 2], {'foo': 'bar'}):
self.assertEqual(thing, ser.serialize_entity(None, thing))
def test_deserialize_entity_primitive(self):
ser = base.NovaObjectSerializer()
for thing in (1, 'foo', [1, 2], {'foo': 'bar'}):
self.assertEqual(thing, ser.deserialize_entity(None, thing))
def test_serialize_set_to_list(self):
ser = base.NovaObjectSerializer()
self.assertEqual([1, 2], ser.serialize_entity(None, set([1, 2])))
def _test_deserialize_entity_newer(self, obj_version, backported_to,
my_version='1.6'):
ser = base.NovaObjectSerializer()
ser._conductor = mock.Mock()
ser._conductor.object_backport_versions.return_value = 'backported'
class MyTestObj(MyObj):
VERSION = my_version
base.NovaObjectRegistry.register(MyTestObj)
obj = MyTestObj()
obj.VERSION = obj_version
primitive = obj.obj_to_primitive()
result = ser.deserialize_entity(self.context, primitive)
if backported_to is None:
self.assertFalse(ser._conductor.object_backport_versions.called)
else:
self.assertEqual('backported', result)
versions = ovo_base.obj_tree_get_versions('MyTestObj')
ser._conductor.object_backport_versions.assert_called_with(
self.context, primitive, versions)
def test_deserialize_entity_newer_version_backports(self):
self._test_deserialize_entity_newer('1.25', '1.6')
def test_deserialize_entity_newer_revision_does_not_backport_zero(self):
self._test_deserialize_entity_newer('1.6.0', None)
def test_deserialize_entity_newer_revision_does_not_backport(self):
self._test_deserialize_entity_newer('1.6.1', None)
def test_deserialize_entity_newer_version_passes_revision(self):
self._test_deserialize_entity_newer('1.7', '1.6.1', '1.6.1')
def test_deserialize_dot_z_with_extra_stuff(self):
primitive = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'nova',
'nova_object.version': '1.6.1',
'nova_object.data': {
'foo': 1,
'unexpected_thing': 'foobar'}}
ser = base.NovaObjectSerializer()
obj = ser.deserialize_entity(self.context, primitive)
self.assertEqual(1, obj.foo)
self.assertFalse(hasattr(obj, 'unexpected_thing'))
# NOTE(danms): The serializer is where the logic lives that
# avoids backports for cases where only a .z difference in
# the received object version is detected. As a result, we
# end up with a version of what we expected, effectively the
# .0 of the object.
self.assertEqual('1.6', obj.VERSION)
@mock.patch('oslo_versionedobjects.base.obj_tree_get_versions')
def test_object_tree_backport(self, mock_get_versions):
# Test the full client backport path all the way from the serializer
# to the conductor and back.
self.start_service('conductor',
manager='nova.conductor.manager.ConductorManager')
# NOTE(danms): Actually register a complex set of objects,
# two versions of the same parent object which contain a
# child sub object.
@base.NovaObjectRegistry.register
class Child(base.NovaObject):
VERSION = '1.10'
@base.NovaObjectRegistry.register
class Parent(base.NovaObject):
VERSION = '1.0'
fields = {
'child': fields.ObjectField('Child'),
}
@base.NovaObjectRegistry.register # noqa
class Parent(base.NovaObject):
VERSION = '1.1'
fields = {
'child': fields.ObjectField('Child'),
}
# NOTE(danms): Since we're on the same node as conductor,
# return a fake version manifest so that we confirm that it
# actually honors what the client asked for and not just what
# it sees in the local machine state.
mock_get_versions.return_value = {
'Parent': '1.0',
'Child': '1.5',
}
call_context = {}
real_ofp = base.NovaObject.obj_from_primitive
def fake_obj_from_primitive(*a, **k):
# NOTE(danms): We need the first call to this to report an
# incompatible object version, but subsequent calls must
# succeed. Since we're testing the backport path all the
# way through conductor and RPC, we can't fully break this
# method, we just need it to fail once to trigger the
# backport.
if 'run' in call_context:
return real_ofp(*a, **k)
else:
call_context['run'] = True
raise ovo_exc.IncompatibleObjectVersion('foo')
child = Child()
parent = Parent(child=child)
prim = parent.obj_to_primitive()
ser = base.NovaObjectSerializer()
with mock.patch('nova.objects.base.NovaObject.'
'obj_from_primitive') as mock_ofp:
mock_ofp.side_effect = fake_obj_from_primitive
result = ser.deserialize_entity(self.context, prim)
# Our newest version (and what we passed back) of Parent
# is 1.1, make sure that the manifest version is honored
self.assertEqual('1.0', result.VERSION)
# Our newest version (and what we passed back) of Child
# is 1.10, make sure that the manifest version is honored
self.assertEqual('1.5', result.child.VERSION)
def test_object_serialization(self):
ser = base.NovaObjectSerializer()
obj = MyObj()
primitive = ser.serialize_entity(self.context, obj)
self.assertIn('nova_object.name', primitive)
obj2 = ser.deserialize_entity(self.context, primitive)
self.assertIsInstance(obj2, MyObj)
self.assertEqual(self.context, obj2._context)
def test_object_serialization_iterables(self):
ser = base.NovaObjectSerializer()
obj = MyObj()
for iterable in (list, tuple, set):
thing = iterable([obj])
primitive = ser.serialize_entity(self.context, thing)
self.assertEqual(1, len(primitive))
for item in primitive:
self.assertNotIsInstance(item, base.NovaObject)
thing2 = ser.deserialize_entity(self.context, primitive)
self.assertEqual(1, len(thing2))
for item in thing2:
self.assertIsInstance(item, MyObj)
# dict case
thing = {'key': obj}
primitive = ser.serialize_entity(self.context, thing)
self.assertEqual(1, len(primitive))
for item in six.itervalues(primitive):
self.assertNotIsInstance(item, base.NovaObject)
thing2 = ser.deserialize_entity(self.context, primitive)
self.assertEqual(1, len(thing2))
for item in six.itervalues(thing2):
self.assertIsInstance(item, MyObj)
# object-action updates dict case
thing = {'foo': obj.obj_to_primitive()}
primitive = ser.serialize_entity(self.context, thing)
self.assertEqual(thing, primitive)
thing2 = ser.deserialize_entity(self.context, thing)
self.assertIsInstance(thing2['foo'], base.NovaObject)
class TestArgsSerializer(test.NoDBTestCase):
def setUp(self):
super(TestArgsSerializer, self).setUp()
self.now = timeutils.utcnow()
self.str_now = timeutils.strtime(at=self.now)
self.unicode_str = u'\xF0\x9F\x92\xA9'
@base.serialize_args
def _test_serialize_args(self, *args, **kwargs):
expected_args = ('untouched', self.str_now, self.str_now)
for index, val in enumerate(args):
self.assertEqual(expected_args[index], val)
expected_kwargs = {'a': 'untouched', 'b': self.str_now,
'c': self.str_now, 'exc_val': self.unicode_str}
for key, val in six.iteritems(kwargs):
self.assertEqual(expected_kwargs[key], val)
def test_serialize_args(self):
self._test_serialize_args('untouched', self.now, self.now,
a='untouched', b=self.now, c=self.now,
exc_val=self.unicode_str)
class TestRegistry(test.NoDBTestCase):
@mock.patch('nova.objects.base.objects')
def test_hook_chooses_newer_properly(self, mock_objects):
reg = base.NovaObjectRegistry()
reg.registration_hook(MyObj, 0)
class MyNewerObj(object):
VERSION = '1.123'
@classmethod
def obj_name(cls):
return 'MyObj'
self.assertEqual(MyObj, mock_objects.MyObj)
reg.registration_hook(MyNewerObj, 0)
self.assertEqual(MyNewerObj, mock_objects.MyObj)
@mock.patch('nova.objects.base.objects')
def test_hook_keeps_newer_properly(self, mock_objects):
reg = base.NovaObjectRegistry()
reg.registration_hook(MyObj, 0)
class MyOlderObj(object):
VERSION = '1.1'
@classmethod
def obj_name(cls):
return 'MyObj'
self.assertEqual(MyObj, mock_objects.MyObj)
reg.registration_hook(MyOlderObj, 0)
self.assertEqual(MyObj, mock_objects.MyObj)
# NOTE(danms): The hashes in this list should only be changed if
# they come with a corresponding version bump in the affected
# objects
object_data = {
'Agent': '1.0-c0c092abaceb6f51efe5d82175f15eba',
'AgentList': '1.0-5a7380d02c3aaf2a32fc8115ae7ca98c',
'Aggregate': '1.1-1ab35c4516f71de0bef7087026ab10d1',
'AggregateList': '1.2-fb6e19f3c3a3186b04eceb98b5dadbfa',
'BandwidthUsage': '1.2-c6e4c779c7f40f2407e3d70022e3cd1c',
'BandwidthUsageList': '1.2-5fe7475ada6fe62413cbfcc06ec70746',
'BlockDeviceMapping': '1.15-d44d8d694619e79c172a99b3c1d6261d',
'BlockDeviceMappingList': '1.16-6fa262c059dad1d519b9fe05b9e4f404',
'CellMapping': '1.0-7f1a7e85a22bbb7559fc730ab658b9bd',
'ComputeNode': '1.14-a396975707b66281c5f404a68fccd395',
'ComputeNodeList': '1.14-3b6f4f5ade621c40e70cb116db237844',
'DNSDomain': '1.0-7b0b2dab778454b6a7b6c66afe163a1a',
'DNSDomainList': '1.0-4ee0d9efdfd681fed822da88376e04d2',
'EC2Ids': '1.0-474ee1094c7ec16f8ce657595d8c49d9',
'EC2InstanceMapping': '1.0-a4556eb5c5e94c045fe84f49cf71644f',
'EC2SnapshotMapping': '1.0-47e7ddabe1af966dce0cfd0ed6cd7cd1',
'EC2VolumeMapping': '1.0-5b713751d6f97bad620f3378a521020d',
'FixedIP': '1.14-53e1c10b539f1a82fe83b1af4720efae',
'FixedIPList': '1.14-87a39361c8f08f059004d6b15103cdfd',
'Flavor': '1.1-b6bb7a730a79d720344accefafacf7ee',
'FlavorList': '1.1-52b5928600e7ca973aa4fc1e46f3934c',
'FloatingIP': '1.10-52a67d52d85eb8b3f324a5b7935a335b',
'FloatingIPList': '1.11-7f2ba670714e1b7bab462ab3290f7159',
'HostMapping': '1.0-1a3390a696792a552ab7bd31a77ba9ac',
'HVSpec': '1.1-6b4f7c0f688cbd03e24142a44eb9010d',
'ImageMeta': '1.7-642d1b2eb3e880a367f37d72dd76162d',
'ImageMetaProps': '1.7-f12fc4cf3e25d616f69a66fb9d2a7aa6',
'Instance': '2.0-ff56804dce87d81d9a04834d4bd1e3d2',
# NOTE(danms): Reviewers: do not approve changes to the Instance1
# object schema. It is frozen for Liberty and will be removed in
# Mitaka.
'Instance1': '1.23-4e68422207667f4abff5fa730a5edc98',
'InstanceAction': '1.1-f9f293e526b66fca0d05c3b3a2d13914',
'InstanceActionEvent': '1.1-e56a64fa4710e43ef7af2ad9d6028b33',
'InstanceActionEventList': '1.1-13d92fb953030cdbfee56481756e02be',
'InstanceActionList': '1.0-4a53826625cc280e15fae64a575e0879',
'InstanceExternalEvent': '1.1-6e446ceaae5f475ead255946dd443417',
'InstanceFault': '1.2-7ef01f16f1084ad1304a513d6d410a38',
'InstanceFaultList': '1.1-f8ec07cbe3b60f5f07a8b7a06311ac0d',
'InstanceGroup': '1.10-1a0c8c7447dc7ecb9da53849430c4a5f',
'InstanceGroupList': '1.7-be18078220513316abd0ae1b2d916873',
'InstanceInfoCache': '1.5-cd8b96fefe0fc8d4d337243ba0bf0e1e',
'InstanceList': '2.0-6c8ba6147cca3082b1e4643f795068bf',
# NOTE(danms): Reviewers: do not approve changes to the InstanceList1
# object schema. It is frozen for Liberty and will be removed in
# Mitaka.
'InstanceList1': '1.22-6c8ba6147cca3082b1e4643f795068bf',
'InstanceMapping': '1.0-47ef26034dfcbea78427565d9177fe50',
'InstanceMappingList': '1.0-9e982e3de1613b9ada85e35f69b23d47',
'InstanceNUMACell': '1.2-535ef30e0de2d6a0d26a71bd58ecafc4',
'InstanceNUMATopology': '1.2-d944a7d6c21e1c773ffdf09c6d025954',
'InstancePCIRequest': '1.1-b1d75ebc716cb12906d9d513890092bf',
'InstancePCIRequests': '1.1-65e38083177726d806684cb1cc0136d2',
'KeyPair': '1.3-bfaa2a8b148cdf11e0c72435d9dd097a',
'KeyPairList': '1.2-58b94f96e776bedaf1e192ddb2a24c4e',
'Migration': '1.2-8784125bedcea0a9227318511904e853',
'MigrationContext': '1.0-d8c2f10069e410f639c49082b5932c92',
'MigrationList': '1.2-02c0ec0c50b75ca86a2a74c5e8c911cc',
'MonitorMetric': '1.1-53b1db7c4ae2c531db79761e7acc52ba',
'MonitorMetricList': '1.1-15ecf022a68ddbb8c2a6739cfc9f8f5e',
'NUMACell': '1.2-74fc993ac5c83005e76e34e8487f1c05',
'NUMAPagesTopology': '1.0-c71d86317283266dc8364c149155e48e',
'NUMATopology': '1.2-c63fad38be73b6afd04715c9c1b29220',
'NUMATopologyLimits': '1.0-9463e0edd40f64765ae518a539b9dfd2',
'Network': '1.2-a977ab383aa462a479b2fae8211a5dde',
'NetworkList': '1.2-69eca910d8fa035dfecd8ba10877ee59',
'NetworkRequest': '1.1-7a3e4ca2ce1e7b62d8400488f2f2b756',
'NetworkRequestList': '1.1-15ecf022a68ddbb8c2a6739cfc9f8f5e',
'PciDevice': '1.3-d92e0b17bbed61815b919af6b8d8998e',
'PciDeviceList': '1.2-3757458c45591cbc92c72ee99e757c98',
'PciDevicePool': '1.1-3f5ddc3ff7bfa14da7f6c7e9904cc000',
'PciDevicePoolList': '1.1-15ecf022a68ddbb8c2a6739cfc9f8f5e',
'Quotas': '1.2-1fe4cd50593aaf5d36a6dc5ab3f98fb3',
'QuotasNoOp': '1.2-e041ddeb7dc8188ca71706f78aad41c1',
'RequestSpec': '1.4-6922fe208b5d1186bdd825513f677921',
'S3ImageMapping': '1.0-7dd7366a890d82660ed121de9092276e',
'SchedulerLimits': '1.0-249c4bd8e62a9b327b7026b7f19cc641',
'SchedulerRetries': '1.1-3c9c8b16143ebbb6ad7030e999d14cc0',
'SecurityGroup': '1.1-0e1b9ba42fe85c13c1437f8b74bdb976',
'SecurityGroupList': '1.0-dc8bbea01ba09a2edb6e5233eae85cbc',
'SecurityGroupRule': '1.1-ae1da17b79970012e8536f88cb3c6b29',
'SecurityGroupRuleList': '1.1-674b323c9ccea02e93b1b40e7fd2091a',
'Service': '1.19-8914320cbeb4ec29f252d72ce55d07e1',
'ServiceList': '1.17-b767102cba7cbed290e396114c3f86b3',
'TaskLog': '1.0-78b0534366f29aa3eebb01860fbe18fe',
'TaskLogList': '1.0-cc8cce1af8a283b9d28b55fcd682e777',
'Tag': '1.1-8b8d7d5b48887651a0e01241672e2963',
'TagList': '1.1-55231bdb671ecf7641d6a2e9109b5d8e',
'VirtCPUFeature': '1.0-3310718d8c72309259a6e39bdefe83ee',
'VirtCPUModel': '1.0-6a5cc9f322729fc70ddc6733bacd57d3',
'VirtCPUTopology': '1.0-fc694de72e20298f7c6bab1083fd4563',
'VirtualInterface': '1.0-19921e38cba320f355d56ecbf8f29587',
'VirtualInterfaceList': '1.0-9750e2074437b3077e46359102779fc6',
'VolumeUsage': '1.0-6c8190c46ce1469bb3286a1f21c2e475',
}
class TestObjectVersions(test.NoDBTestCase):
@staticmethod
def _is_method(thing):
# NOTE(dims): In Python3, The concept of 'unbound methods' has
# been removed from the language. When referencing a method
# as a class attribute, you now get a plain function object.
# so let's check for both
return inspect.isfunction(thing) or inspect.ismethod(thing)
def _find_remotable_method(self, cls, thing, parent_was_remotable=False):
"""Follow a chain of remotable things down to the original function."""
if isinstance(thing, classmethod):
return self._find_remotable_method(cls, thing.__get__(None, cls))
elif self._is_method(thing) and hasattr(thing, 'remotable'):
return self._find_remotable_method(cls, thing.original_fn,
parent_was_remotable=True)
elif parent_was_remotable:
# We must be the first non-remotable thing underneath a stack of
# remotable things (i.e. the actual implementation method)
return thing
else:
# This means the top-level thing never hit a remotable layer
return None
def _un_unicodify_enum_valid_values(self, _fields):
for name, field in _fields:
if not isinstance(field, (fields.BaseEnumField,
fields.EnumField)):
continue
orig_type = type(field._type._valid_values)
field._type._valid_values = orig_type(
[x.encode('utf-8') for x in
field._type._valid_values])
def _get_fingerprint(self, obj_class):
fields = list(obj_class.fields.items())
# NOTE(danms): We store valid_values in the enum as strings,
# but oslo is working to make these coerced to unicode (which
# is the right thing to do). The functionality will be
# unchanged, but the repr() result that we use for calculating
# the hashes will be different. This helper method coerces all
# Enum valid_values elements to UTF-8 string before we make the
# repr() call so that it is consistent before and after the
# unicode change, and on py2 and py3.
if six.PY2:
self._un_unicodify_enum_valid_values(fields)
fields.sort()
methods = []
for name in dir(obj_class):
thing = getattr(obj_class, name)
if self._is_method(thing) or isinstance(thing, classmethod):
method = self._find_remotable_method(obj_class, thing)
if method:
methods.append((name, inspect.getargspec(method)))
methods.sort()
# NOTE(danms): Things that need a version bump are any fields
# and their types, or the signatures of any remotable methods.
# Of course, these are just the mechanical changes we can detect,
# but many other things may require a version bump (method behavior
# and return value changes, for example).
if hasattr(obj_class, 'child_versions'):
relevant_data = (fields, methods,
OrderedDict(
sorted(obj_class.child_versions.items())))
else:
relevant_data = (fields, methods)
relevant_data = repr(relevant_data)
if six.PY3:
relevant_data = relevant_data.encode('utf-8')
fingerprint = '%s-%s' % (
obj_class.VERSION, hashlib.md5(relevant_data).hexdigest())
return fingerprint
def test_find_remotable_method(self):
class MyObject(object):
@base.remotable
def my_method(self):
return 'Hello World!'
thing = self._find_remotable_method(MyObject,
getattr(MyObject, 'my_method'))
self.assertIsNotNone(thing)
def test_versions(self):
fingerprints = {}
obj_classes = base.NovaObjectRegistry.obj_classes()
for obj_name in sorted(obj_classes, key=lambda x: x[0]):
index = 0
for version_cls in obj_classes[obj_name]:
if len(obj_classes[obj_name]) > 1 and index != 0:
name = '%s%s' % (obj_name,
version_cls.VERSION.split('.')[0])
else:
name = obj_name
fingerprints[name] = self._get_fingerprint(version_cls)
index += 1
if os.getenv('GENERATE_HASHES'):
file('object_hashes.txt', 'w').write(
pprint.pformat(fingerprints))
raise test.TestingException(
'Generated hashes in object_hashes.txt')
stored = set(object_data.items())
computed = set(fingerprints.items())
changed = stored.symmetric_difference(computed)
expected = {}
actual = {}
for name, hash in changed:
expected[name] = object_data.get(name)
actual[name] = fingerprints.get(name)
self.assertEqual(expected, actual,
'Some objects have changed; please make sure the '
'versions have been bumped, and then update their '
'hashes here.')
def _get_object_field_name(self, field):
if isinstance(field._type, fields.Object):
return field._type._obj_name
if isinstance(field, fields.ListOfObjectsField):
return field._type._element_type._type._obj_name
return None
def test_obj_make_compatible(self):
# Iterate all object classes and verify that we can run
# obj_make_compatible with every older version than current.
# This doesn't actually test the data conversions, but it at least
# makes sure the method doesn't blow up on something basic like
# expecting the wrong version format.
obj_classes = base.NovaObjectRegistry.obj_classes()
for obj_name in obj_classes:
versions = ovo_base.obj_tree_get_versions(obj_name)
obj_class = obj_classes[obj_name][0]
version = utils.convert_version_to_tuple(obj_class.VERSION)
for n in range(version[1]):
test_version = '%d.%d' % (version[0], n)
LOG.info('testing obj: %s version: %s' %
(obj_name, test_version))
obj_class().obj_to_primitive(target_version=test_version,
version_manifest=versions)
def test_list_obj_make_compatible(self):
@base.NovaObjectRegistry.register_if(False)
class TestObj(base.NovaObject):
VERSION = '1.4'
fields = {'foo': fields.IntegerField()}
@base.NovaObjectRegistry.register_if(False)
class TestListObj(base.ObjectListBase, base.NovaObject):
VERSION = '1.5'
fields = {'objects': fields.ListOfObjectsField('TestObj')}
obj_relationships = {
'objects': [('1.0', '1.1'), ('1.1', '1.2'),
('1.3', '1.3'), ('1.5', '1.4')]
}
my_list = TestListObj()
my_obj = TestObj(foo=1)
my_list.objects = [my_obj]
primitive = my_list.obj_to_primitive(target_version='1.5')
primitive_data = primitive['nova_object.data']
obj_primitive = my_obj.obj_to_primitive(target_version='1.4')
obj_primitive_data = obj_primitive['nova_object.data']
with mock.patch.object(TestObj, 'obj_make_compatible') as comp:
my_list.obj_make_compatible(primitive_data, '1.1')
comp.assert_called_with(obj_primitive_data,
'1.2')
def test_list_obj_make_compatible_when_no_objects(self):
# Test to make sure obj_make_compatible works with no 'objects'
# If a List object ever has a version that did not contain the
# 'objects' key, we need to make sure converting back to that version
# doesn't cause backporting problems.
@base.NovaObjectRegistry.register_if(False)
class TestObj(base.NovaObject):
VERSION = '1.1'
fields = {'foo': fields.IntegerField()}
@base.NovaObjectRegistry.register_if(False)
class TestListObj(base.ObjectListBase, base.NovaObject):
VERSION = '1.1'
fields = {'objects': fields.ListOfObjectsField('TestObj')}
# pretend that version 1.0 didn't have 'objects'
obj_relationships = {
'objects': [('1.1', '1.1')]
}
my_list = TestListObj()
my_list.objects = [TestObj(foo=1)]
primitive = my_list.obj_to_primitive(target_version='1.1')
primitive_data = primitive['nova_object.data']
my_list.obj_make_compatible(primitive_data,
target_version='1.0')
self.assertNotIn('objects', primitive_data,
"List was backported to before 'objects' existed."
" 'objects' should not be in the primitive.")
class TestObjEqualPrims(_BaseTestCase):
def test_object_equal(self):
obj1 = MyObj(foo=1, bar='goodbye')
obj1.obj_reset_changes()
obj2 = MyObj(foo=1, bar='goodbye')
obj2.obj_reset_changes()
obj2.bar = 'goodbye'
# obj2 will be marked with field 'three' updated
self.assertTrue(base.obj_equal_prims(obj1, obj2),
"Objects that differ only because one a is marked "
"as updated should be equal")
def test_object_not_equal(self):
obj1 = MyObj(foo=1, bar='goodbye')
obj1.obj_reset_changes()
obj2 = MyObj(foo=1, bar='hello')
obj2.obj_reset_changes()
self.assertFalse(base.obj_equal_prims(obj1, obj2),
"Objects that differ in any field "
"should not be equal")
def test_object_ignore_equal(self):
obj1 = MyObj(foo=1, bar='goodbye')
obj1.obj_reset_changes()
obj2 = MyObj(foo=1, bar='hello')
obj2.obj_reset_changes()
self.assertTrue(base.obj_equal_prims(obj1, obj2, ['bar']),
"Objects that only differ in an ignored field "
"should be equal")
class TestObjMethodOverrides(test.NoDBTestCase):
def test_obj_reset_changes(self):
args = inspect.getargspec(base.NovaObject.obj_reset_changes)
obj_classes = base.NovaObjectRegistry.obj_classes()
for obj_name in obj_classes:
obj_class = obj_classes[obj_name][0]
self.assertEqual(args,
inspect.getargspec(obj_class.obj_reset_changes))
| apache-2.0 |
nikitabiradar/student_registration | janastu/lib/python2.7/site-packages/pip/commands/freeze.py | 45 | 4647 | import re
import sys
import pip
from pip.req import InstallRequirement
from pip.log import logger
from pip.basecommand import Command
from pip.util import get_installed_distributions
import pkg_resources
class FreezeCommand(Command):
"""Output installed packages in requirements format."""
name = 'freeze'
usage = """
%prog [options]"""
summary = 'Output installed packages in requirements format.'
def __init__(self, *args, **kw):
super(FreezeCommand, self).__init__(*args, **kw)
self.cmd_opts.add_option(
'-r', '--requirement',
dest='requirement',
action='store',
default=None,
metavar='file',
help="Use the order in the given requirements file and it's comments when generating output.")
self.cmd_opts.add_option(
'-f', '--find-links',
dest='find_links',
action='append',
default=[],
metavar='URL',
help='URL for finding packages, which will be added to the output.')
self.cmd_opts.add_option(
'-l', '--local',
dest='local',
action='store_true',
default=False,
help='If in a virtualenv that has global access, do not output globally-installed packages.')
self.parser.insert_option_group(0, self.cmd_opts)
def setup_logging(self):
logger.move_stdout_to_stderr()
def run(self, options, args):
requirement = options.requirement
find_links = options.find_links or []
local_only = options.local
## FIXME: Obviously this should be settable:
find_tags = False
skip_match = None
skip_regex = options.skip_requirements_regex
if skip_regex:
skip_match = re.compile(skip_regex)
dependency_links = []
f = sys.stdout
for dist in pkg_resources.working_set:
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(dist.get_metadata_lines('dependency_links.txt'))
for link in find_links:
if '#egg=' in link:
dependency_links.append(link)
for link in find_links:
f.write('-f %s\n' % link)
installations = {}
for dist in get_installed_distributions(local_only=local_only):
req = pip.FrozenRequirement.from_dist(dist, dependency_links, find_tags=find_tags)
installations[req.name] = req
if requirement:
req_f = open(requirement)
for line in req_f:
if not line.strip() or line.strip().startswith('#'):
f.write(line)
continue
if skip_match and skip_match.search(line):
f.write(line)
continue
elif line.startswith('-e') or line.startswith('--editable'):
if line.startswith('-e'):
line = line[2:].strip()
else:
line = line[len('--editable'):].strip().lstrip('=')
line_req = InstallRequirement.from_editable(line, default_vcs=options.default_vcs)
elif (line.startswith('-r') or line.startswith('--requirement')
or line.startswith('-Z') or line.startswith('--always-unzip')
or line.startswith('-f') or line.startswith('-i')
or line.startswith('--extra-index-url')
or line.startswith('--find-links')
or line.startswith('--index-url')):
f.write(line)
continue
else:
line_req = InstallRequirement.from_line(line)
if not line_req.name:
logger.notify("Skipping line because it's not clear what it would install: %s"
% line.strip())
logger.notify(" (add #egg=PackageName to the URL to avoid this warning)")
continue
if line_req.name not in installations:
logger.warn("Requirement file contains %s, but that package is not installed"
% line.strip())
continue
f.write(str(installations[line_req.name]))
del installations[line_req.name]
f.write('## The following requirements were added by pip --freeze:\n')
for installation in sorted(installations.values(), key=lambda x: x.name):
f.write(str(installation))
| mit |
sekaiamber/PredictionIO | examples/scala-parallel-similarproduct/filterbyyear/data/import_eventserver.py | 142 | 1844 | """
Import sample data for similar product engine
"""
import predictionio
import argparse
import random
SEED = 3
def import_events(client):
random.seed(SEED)
count = 0
print client.get_status()
print "Importing data..."
# generate 10 users, with user ids u1,u2,....,u10
user_ids = ["u%s" % i for i in range(1, 11)]
for user_id in user_ids:
print "Set user", user_id
client.create_event(
event="$set",
entity_type="user",
entity_id=user_id
)
count += 1
# generate 50 items, with item ids i1,i2,....,i50
# random assign 1 to 4 categories among c1-c6 to items
categories = ["c%s" % i for i in range(1, 7)]
item_ids = ["i%s" % i for i in range(1, 51)]
for item_id in item_ids:
print "Set item", item_id
client.create_event(
event="$set",
entity_type="item",
entity_id=item_id,
properties={
"categories" : random.sample(categories, random.randint(1, 4))
}
)
count += 1
# each user randomly viewed 10 items
for user_id in user_ids:
for viewed_item in random.sample(item_ids, 10):
print "User", user_id ,"views item", viewed_item
client.create_event(
event="view",
entity_type="user",
entity_id=user_id,
target_entity_type="item",
target_entity_id=viewed_item
)
count += 1
print "%s events are imported." % count
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Import sample data for similar product engine")
parser.add_argument('--access_key', default='invald_access_key')
parser.add_argument('--url', default="http://localhost:7070")
args = parser.parse_args()
print args
client = predictionio.EventClient(
access_key=args.access_key,
url=args.url,
threads=5,
qsize=500)
import_events(client)
| apache-2.0 |
igemsoftware/SYSU-Software2013 | project/Python27/Lib/warnings.py | 265 | 14044 | """Python part of the warnings subsystem."""
# Note: function level imports should *not* be used
# in this module as it may cause import lock deadlock.
# See bug 683658.
import linecache
import sys
import types
__all__ = ["warn", "showwarning", "formatwarning", "filterwarnings",
"resetwarnings", "catch_warnings"]
def warnpy3k(message, category=None, stacklevel=1):
"""Issue a deprecation warning for Python 3.x related changes.
Warnings are omitted unless Python is started with the -3 option.
"""
if sys.py3kwarning:
if category is None:
category = DeprecationWarning
warn(message, category, stacklevel+1)
def _show_warning(message, category, filename, lineno, file=None, line=None):
"""Hook to write a warning to a file; replace if you like."""
if file is None:
file = sys.stderr
try:
file.write(formatwarning(message, category, filename, lineno, line))
except IOError:
pass # the file (probably stderr) is invalid - this warning gets lost.
# Keep a working version around in case the deprecation of the old API is
# triggered.
showwarning = _show_warning
def formatwarning(message, category, filename, lineno, line=None):
"""Function to format a warning the standard way."""
s = "%s:%s: %s: %s\n" % (filename, lineno, category.__name__, message)
line = linecache.getline(filename, lineno) if line is None else line
if line:
line = line.strip()
s += " %s\n" % line
return s
def filterwarnings(action, message="", category=Warning, module="", lineno=0,
append=0):
"""Insert an entry into the list of warnings filters (at the front).
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'message' -- a regex that the warning message must match
'category' -- a class that the warning must be a subclass of
'module' -- a regex that the module name must match
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
import re
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(message, basestring), "message must be a string"
assert isinstance(category, (type, types.ClassType)), \
"category must be a class"
assert issubclass(category, Warning), "category must be a Warning subclass"
assert isinstance(module, basestring), "module must be a string"
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, re.compile(message, re.I), category,
re.compile(module), lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
def simplefilter(action, category=Warning, lineno=0, append=0):
"""Insert a simple entry into the list of warnings filters (at the front).
A simple filter matches all modules and messages.
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'category' -- a class that the warning must be a subclass of
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, None, category, None, lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
def resetwarnings():
"""Clear the list of warning filters, so that no filters are active."""
filters[:] = []
class _OptionError(Exception):
"""Exception used by option processing helpers."""
pass
# Helper to process -W options passed via sys.warnoptions
def _processoptions(args):
for arg in args:
try:
_setoption(arg)
except _OptionError, msg:
print >>sys.stderr, "Invalid -W option ignored:", msg
# Helper for _processoptions()
def _setoption(arg):
import re
parts = arg.split(':')
if len(parts) > 5:
raise _OptionError("too many fields (max 5): %r" % (arg,))
while len(parts) < 5:
parts.append('')
action, message, category, module, lineno = [s.strip()
for s in parts]
action = _getaction(action)
message = re.escape(message)
category = _getcategory(category)
module = re.escape(module)
if module:
module = module + '$'
if lineno:
try:
lineno = int(lineno)
if lineno < 0:
raise ValueError
except (ValueError, OverflowError):
raise _OptionError("invalid lineno %r" % (lineno,))
else:
lineno = 0
filterwarnings(action, message, category, module, lineno)
# Helper for _setoption()
def _getaction(action):
if not action:
return "default"
if action == "all": return "always" # Alias
for a in ('default', 'always', 'ignore', 'module', 'once', 'error'):
if a.startswith(action):
return a
raise _OptionError("invalid action: %r" % (action,))
# Helper for _setoption()
def _getcategory(category):
import re
if not category:
return Warning
if re.match("^[a-zA-Z0-9_]+$", category):
try:
cat = eval(category)
except NameError:
raise _OptionError("unknown warning category: %r" % (category,))
else:
i = category.rfind(".")
module = category[:i]
klass = category[i+1:]
try:
m = __import__(module, None, None, [klass])
except ImportError:
raise _OptionError("invalid module name: %r" % (module,))
try:
cat = getattr(m, klass)
except AttributeError:
raise _OptionError("unknown warning category: %r" % (category,))
if not issubclass(cat, Warning):
raise _OptionError("invalid warning category: %r" % (category,))
return cat
# Code typically replaced by _warnings
def warn(message, category=None, stacklevel=1):
"""Issue a warning, or maybe ignore it or raise an exception."""
# Check if message is already a Warning object
if isinstance(message, Warning):
category = message.__class__
# Check category argument
if category is None:
category = UserWarning
assert issubclass(category, Warning)
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"
filename = globals.get('__file__')
if filename:
fnl = filename.lower()
if fnl.endswith((".pyc", ".pyo")):
filename = filename[:-1]
else:
if module == "__main__":
try:
filename = sys.argv[0]
except AttributeError:
# embedded interpreters don't have sys.argv, see bug #839151
filename = '__main__'
if not filename:
filename = module
registry = globals.setdefault("__warningregistry__", {})
warn_explicit(message, category, filename, lineno, module, registry,
globals)
def warn_explicit(message, category, filename, lineno,
module=None, registry=None, module_globals=None):
lineno = int(lineno)
if module is None:
module = filename or "<unknown>"
if module[-3:].lower() == ".py":
module = module[:-3] # XXX What about leading pathname?
if registry is None:
registry = {}
if isinstance(message, Warning):
text = str(message)
category = message.__class__
else:
text = message
message = category(message)
key = (text, category, lineno)
# Quick test for common case
if registry.get(key):
return
# Search the filters
for item in filters:
action, msg, cat, mod, ln = item
if ((msg is None or msg.match(text)) and
issubclass(category, cat) and
(mod is None or mod.match(module)) and
(ln == 0 or lineno == ln)):
break
else:
action = defaultaction
# Early exit actions
if action == "ignore":
registry[key] = 1
return
# Prime the linecache for formatting, in case the
# "file" is actually in a zipfile or something.
linecache.getlines(filename, module_globals)
if action == "error":
raise message
# Other actions
if action == "once":
registry[key] = 1
oncekey = (text, category)
if onceregistry.get(oncekey):
return
onceregistry[oncekey] = 1
elif action == "always":
pass
elif action == "module":
registry[key] = 1
altkey = (text, category, 0)
if registry.get(altkey):
return
registry[altkey] = 1
elif action == "default":
registry[key] = 1
else:
# Unrecognized actions are errors
raise RuntimeError(
"Unrecognized action (%r) in warnings.filters:\n %s" %
(action, item))
# Print message and context
showwarning(message, category, filename, lineno)
class WarningMessage(object):
"""Holds the result of a single showwarning() call."""
_WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
"line")
def __init__(self, message, category, filename, lineno, file=None,
line=None):
local_values = locals()
for attr in self._WARNING_DETAILS:
setattr(self, attr, local_values[attr])
self._category_name = category.__name__ if category else None
def __str__(self):
return ("{message : %r, category : %r, filename : %r, lineno : %s, "
"line : %r}" % (self.message, self._category_name,
self.filename, self.lineno, self.line))
class catch_warnings(object):
"""A context manager that copies and restores the warnings filter upon
exiting the context.
The 'record' argument specifies whether warnings should be captured by a
custom implementation of warnings.showwarning() and be appended to a list
returned by the context manager. Otherwise None is returned by the context
manager. The objects appended to the list are arguments whose attributes
mirror the arguments to showwarning().
The 'module' argument is to specify an alternative module to the module
named 'warnings' and imported under that name. This argument is only useful
when testing the warnings module itself.
"""
def __init__(self, record=False, module=None):
"""Specify whether to record warnings and if an alternative module
should be used other than sys.modules['warnings'].
For compatibility with Python 3.0, please consider all arguments to be
keyword-only.
"""
self._record = record
self._module = sys.modules['warnings'] if module is None else module
self._entered = False
def __repr__(self):
args = []
if self._record:
args.append("record=True")
if self._module is not sys.modules['warnings']:
args.append("module=%r" % self._module)
name = type(self).__name__
return "%s(%s)" % (name, ", ".join(args))
def __enter__(self):
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._showwarning = self._module.showwarning
if self._record:
log = []
def showwarning(*args, **kwargs):
log.append(WarningMessage(*args, **kwargs))
self._module.showwarning = showwarning
return log
else:
return None
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module.showwarning = self._showwarning
# filters contains a sequence of filter 5-tuples
# The components of the 5-tuple are:
# - an action: error, ignore, always, default, module, or once
# - a compiled regex that must match the warning message
# - a class representing the warning category
# - a compiled regex that must match the module that is being warned
# - a line number for the line being warning, or 0 to mean any line
# If either if the compiled regexs are None, match anything.
_warnings_defaults = False
try:
from _warnings import (filters, default_action, once_registry,
warn, warn_explicit)
defaultaction = default_action
onceregistry = once_registry
_warnings_defaults = True
except ImportError:
filters = []
defaultaction = "default"
onceregistry = {}
# Module initialization
_processoptions(sys.warnoptions)
if not _warnings_defaults:
silence = [ImportWarning, PendingDeprecationWarning]
# Don't silence DeprecationWarning if -3 or -Q was used.
if not sys.py3kwarning and not sys.flags.division_warning:
silence.append(DeprecationWarning)
for cls in silence:
simplefilter("ignore", category=cls)
bytes_warning = sys.flags.bytes_warning
if bytes_warning > 1:
bytes_action = "error"
elif bytes_warning:
bytes_action = "default"
else:
bytes_action = "ignore"
simplefilter(bytes_action, category=BytesWarning, append=1)
del _warnings_defaults
| mit |
eonpatapon/nova | nova/objects/instance.py | 3 | 57365 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import timeutils
from nova.cells import opts as cells_opts
from nova.cells import rpcapi as cells_rpcapi
from nova.cells import utils as cells_utils
from nova.compute import flavors
from nova import context
from nova import db
from nova import exception
from nova.i18n import _LE
from nova import notifications
from nova import objects
from nova.objects import base
from nova.objects import fields
from nova import utils
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
# List of fields that can be joined in DB layer.
_INSTANCE_OPTIONAL_JOINED_FIELDS = ['metadata', 'system_metadata',
'info_cache', 'security_groups',
'pci_devices', 'tags']
# These are fields that are optional but don't translate to db columns
_INSTANCE_OPTIONAL_NON_COLUMN_FIELDS = ['fault', 'flavor', 'old_flavor',
'new_flavor', 'ec2_ids']
# These are fields that are optional and in instance_extra
_INSTANCE_EXTRA_FIELDS = ['numa_topology', 'pci_requests',
'flavor', 'vcpu_model']
# These are fields that can be specified as expected_attrs
INSTANCE_OPTIONAL_ATTRS = (_INSTANCE_OPTIONAL_JOINED_FIELDS +
_INSTANCE_OPTIONAL_NON_COLUMN_FIELDS +
_INSTANCE_EXTRA_FIELDS)
# These are fields that most query calls load by default
INSTANCE_DEFAULT_FIELDS = ['metadata', 'system_metadata',
'info_cache', 'security_groups']
def _expected_cols(expected_attrs):
"""Return expected_attrs that are columns needing joining.
NB: This function may modify expected_attrs if one
requested attribute requires another.
"""
if not expected_attrs:
return expected_attrs
if ('system_metadata' in expected_attrs and
'flavor' not in expected_attrs):
# NOTE(danms): If the client asked for sysmeta, we have to
# pull flavor so we can potentially provide compatibility
expected_attrs.append('flavor')
simple_cols = [attr for attr in expected_attrs
if attr in _INSTANCE_OPTIONAL_JOINED_FIELDS]
complex_cols = ['extra.%s' % field
for field in _INSTANCE_EXTRA_FIELDS
if field in expected_attrs]
if complex_cols:
simple_cols.append('extra')
simple_cols = [x for x in simple_cols if x not in _INSTANCE_EXTRA_FIELDS]
if (any([flavor in expected_attrs
for flavor in ['flavor', 'old_flavor', 'new_flavor']]) and
'system_metadata' not in simple_cols):
# NOTE(danms): While we're maintaining compatibility with
# flavor data being stored in system_metadata, we need to
# ask for it any time flavors are requested.
simple_cols.append('system_metadata')
expected_attrs.append('system_metadata')
return simple_cols + complex_cols
# TODO(berrange): Remove NovaObjectDictCompat
@base.NovaObjectRegistry.register
class Instance(base.NovaPersistentObject, base.NovaObject,
base.NovaObjectDictCompat):
# Version 1.0: Initial version
# Version 1.1: Added info_cache
# Version 1.2: Added security_groups
# Version 1.3: Added expected_vm_state and admin_state_reset to
# save()
# Version 1.4: Added locked_by and deprecated locked
# Version 1.5: Added cleaned
# Version 1.6: Added pci_devices
# Version 1.7: String attributes updated to support unicode
# Version 1.8: 'security_groups' and 'pci_devices' cannot be None
# Version 1.9: Make uuid a non-None real string
# Version 1.10: Added use_slave to refresh and get_by_uuid
# Version 1.11: Update instance from database during destroy
# Version 1.12: Added ephemeral_key_uuid
# Version 1.13: Added delete_metadata_key()
# Version 1.14: Added numa_topology
# Version 1.15: PciDeviceList 1.1
# Version 1.16: Added pci_requests
# Version 1.17: Added tags
# Version 1.18: Added flavor, old_flavor, new_flavor
# Version 1.19: Added vcpu_model
# Version 1.20: Added ec2_ids
# Version 1.21: TagList 1.1
VERSION = '1.21'
fields = {
'id': fields.IntegerField(),
'user_id': fields.StringField(nullable=True),
'project_id': fields.StringField(nullable=True),
'image_ref': fields.StringField(nullable=True),
'kernel_id': fields.StringField(nullable=True),
'ramdisk_id': fields.StringField(nullable=True),
'hostname': fields.StringField(nullable=True),
'launch_index': fields.IntegerField(nullable=True),
'key_name': fields.StringField(nullable=True),
'key_data': fields.StringField(nullable=True),
'power_state': fields.IntegerField(nullable=True),
'vm_state': fields.StringField(nullable=True),
'task_state': fields.StringField(nullable=True),
'memory_mb': fields.IntegerField(nullable=True),
'vcpus': fields.IntegerField(nullable=True),
'root_gb': fields.IntegerField(nullable=True),
'ephemeral_gb': fields.IntegerField(nullable=True),
'ephemeral_key_uuid': fields.UUIDField(nullable=True),
'host': fields.StringField(nullable=True),
'node': fields.StringField(nullable=True),
'instance_type_id': fields.IntegerField(nullable=True),
'user_data': fields.StringField(nullable=True),
'reservation_id': fields.StringField(nullable=True),
'scheduled_at': fields.DateTimeField(nullable=True),
'launched_at': fields.DateTimeField(nullable=True),
'terminated_at': fields.DateTimeField(nullable=True),
'availability_zone': fields.StringField(nullable=True),
'display_name': fields.StringField(nullable=True),
'display_description': fields.StringField(nullable=True),
'launched_on': fields.StringField(nullable=True),
# NOTE(jdillaman): locked deprecated in favor of locked_by,
# to be removed in Icehouse
'locked': fields.BooleanField(default=False),
'locked_by': fields.StringField(nullable=True),
'os_type': fields.StringField(nullable=True),
'architecture': fields.StringField(nullable=True),
'vm_mode': fields.StringField(nullable=True),
'uuid': fields.UUIDField(),
'root_device_name': fields.StringField(nullable=True),
'default_ephemeral_device': fields.StringField(nullable=True),
'default_swap_device': fields.StringField(nullable=True),
'config_drive': fields.StringField(nullable=True),
'access_ip_v4': fields.IPV4AddressField(nullable=True),
'access_ip_v6': fields.IPV6AddressField(nullable=True),
'auto_disk_config': fields.BooleanField(default=False),
'progress': fields.IntegerField(nullable=True),
'shutdown_terminate': fields.BooleanField(default=False),
'disable_terminate': fields.BooleanField(default=False),
'cell_name': fields.StringField(nullable=True),
'metadata': fields.DictOfStringsField(),
'system_metadata': fields.DictOfNullableStringsField(),
'info_cache': fields.ObjectField('InstanceInfoCache',
nullable=True),
'security_groups': fields.ObjectField('SecurityGroupList'),
'fault': fields.ObjectField('InstanceFault', nullable=True),
'cleaned': fields.BooleanField(default=False),
'pci_devices': fields.ObjectField('PciDeviceList', nullable=True),
'numa_topology': fields.ObjectField('InstanceNUMATopology',
nullable=True),
'pci_requests': fields.ObjectField('InstancePCIRequests',
nullable=True),
'tags': fields.ObjectField('TagList'),
'flavor': fields.ObjectField('Flavor'),
'old_flavor': fields.ObjectField('Flavor', nullable=True),
'new_flavor': fields.ObjectField('Flavor', nullable=True),
'vcpu_model': fields.ObjectField('VirtCPUModel', nullable=True),
'ec2_ids': fields.ObjectField('EC2Ids'),
}
obj_extra_fields = ['name']
obj_relationships = {
'fault': [('1.0', '1.0'), ('1.13', '1.2')],
'info_cache': [('1.1', '1.0'), ('1.9', '1.4'), ('1.10', '1.5')],
'security_groups': [('1.2', '1.0')],
'pci_devices': [('1.6', '1.0'), ('1.15', '1.1')],
'numa_topology': [('1.14', '1.0'), ('1.16', '1.1')],
'pci_requests': [('1.16', '1.1')],
'tags': [('1.17', '1.0'), ('1.21', '1.1')],
'flavor': [('1.18', '1.1')],
'old_flavor': [('1.18', '1.1')],
'new_flavor': [('1.18', '1.1')],
'vcpu_model': [('1.19', '1.0')],
'ec2_ids': [('1.20', '1.0')],
}
def __init__(self, *args, **kwargs):
super(Instance, self).__init__(*args, **kwargs)
self._reset_metadata_tracking()
def _reset_metadata_tracking(self, fields=None):
if fields is None or 'system_metadata' in fields:
self._orig_system_metadata = (dict(self.system_metadata) if
'system_metadata' in self else {})
if fields is None or 'metadata' in fields:
self._orig_metadata = (dict(self.metadata) if
'metadata' in self else {})
def obj_reset_changes(self, fields=None):
super(Instance, self).obj_reset_changes(fields)
self._reset_metadata_tracking(fields=fields)
def obj_what_changed(self):
changes = super(Instance, self).obj_what_changed()
if 'metadata' in self and self.metadata != self._orig_metadata:
changes.add('metadata')
if 'system_metadata' in self and (self.system_metadata !=
self._orig_system_metadata):
changes.add('system_metadata')
return changes
@classmethod
def _obj_from_primitive(cls, context, objver, primitive):
self = super(Instance, cls)._obj_from_primitive(context, objver,
primitive)
self._reset_metadata_tracking()
return self
def obj_make_compatible(self, primitive, target_version):
super(Instance, self).obj_make_compatible(primitive, target_version)
target_version = utils.convert_version_to_tuple(target_version)
unicode_attributes = ['user_id', 'project_id', 'image_ref',
'kernel_id', 'ramdisk_id', 'hostname',
'key_name', 'key_data', 'host', 'node',
'user_data', 'availability_zone',
'display_name', 'display_description',
'launched_on', 'locked_by', 'os_type',
'architecture', 'vm_mode', 'root_device_name',
'default_ephemeral_device',
'default_swap_device', 'config_drive',
'cell_name']
if target_version < (1, 7):
# NOTE(danms): Before 1.7, we couldn't handle unicode in
# string fields, so squash it here
for field in [x for x in unicode_attributes if x in primitive
and primitive[x] is not None]:
primitive[field] = primitive[field].encode('ascii', 'replace')
if target_version < (1, 18):
if 'system_metadata' in primitive:
for ftype in ('', 'old_', 'new_'):
attrname = '%sflavor' % ftype
primitive.pop(attrname, None)
if self[attrname] is not None:
flavors.save_flavor_info(
primitive['system_metadata'],
getattr(self, attrname), ftype)
@property
def name(self):
try:
base_name = CONF.instance_name_template % self.id
except TypeError:
# Support templates like "uuid-%(uuid)s", etc.
info = {}
# NOTE(russellb): Don't use self.iteritems() here, as it will
# result in infinite recursion on the name property.
for key in self.fields:
if key == 'name':
# NOTE(danms): prevent recursion
continue
elif not self.obj_attr_is_set(key):
# NOTE(danms): Don't trigger lazy-loads
continue
info[key] = self[key]
try:
base_name = CONF.instance_name_template % info
except KeyError:
base_name = self.uuid
return base_name
@staticmethod
def _migrate_flavor(instance):
"""Migrate a fractional flavor to a full one stored in extra.
This method migrates flavor information stored in an instance's
system_metadata to instance_extra. Since the information in the
former is not complete, we must attempt to fetch the original
flavor by id to merge its extra_specs with what we store.
This is a transitional tool and can be removed in a later release
once we can ensure that everyone has migrated their instances
(likely the L release).
"""
# NOTE(danms): Always use admin context and read_deleted=yes here
# because we need to make sure we can look up our original flavor
# and try to reconstruct extra_specs, even if it has been deleted
ctxt = context.get_admin_context(read_deleted='yes')
instance.flavor = flavors.extract_flavor(instance)
flavors.delete_flavor_info(instance.system_metadata, '')
for ftype in ('old', 'new'):
attrname = '%s_flavor' % ftype
prefix = '%s_' % ftype
try:
flavor = flavors.extract_flavor(instance, prefix)
setattr(instance, attrname, flavor)
flavors.delete_flavor_info(instance.system_metadata, prefix)
except KeyError:
setattr(instance, attrname, None)
# NOTE(danms): Merge in the extra_specs from the original flavor
# since they weren't stored with the instance.
for flv in (instance.flavor, instance.new_flavor, instance.old_flavor):
if flv is not None:
try:
db_flavor = objects.Flavor.get_by_flavor_id(ctxt,
flv.flavorid)
except exception.FlavorNotFound:
continue
extra_specs = dict(db_flavor.extra_specs)
extra_specs.update(flv.get('extra_specs', {}))
flv.extra_specs = extra_specs
def _flavor_from_db(self, db_flavor):
"""Load instance flavor information from instance_extra."""
flavor_info = jsonutils.loads(db_flavor)
self.flavor = objects.Flavor.obj_from_primitive(flavor_info['cur'])
if flavor_info['old']:
self.old_flavor = objects.Flavor.obj_from_primitive(
flavor_info['old'])
else:
self.old_flavor = None
if flavor_info['new']:
self.new_flavor = objects.Flavor.obj_from_primitive(
flavor_info['new'])
else:
self.new_flavor = None
self.obj_reset_changes(['flavor', 'old_flavor', 'new_flavor'])
def _maybe_migrate_flavor(self, db_inst, expected_attrs):
"""Determine the proper place and format for flavor loading.
This method loads the flavor information into the instance. If
the information is already migrated to instance_extra, then we
load that. If it is in system_metadata, we migrate it to extra.
If, however, we're loading an instance for an older client and
the flavor has already been migrated, we need to stash it back
into system metadata, which we do here.
This is transitional and can be removed when we remove
_migrate_flavor().
"""
version = utils.convert_version_to_tuple(self.VERSION)
flavor_requested = any(
[flavor in expected_attrs
for flavor in ('flavor', 'old_flavor', 'new_flavor')])
flavor_implied = (version < (1, 18) and
'system_metadata' in expected_attrs)
# NOTE(danms): This is compatibility logic. If the flavor
# attributes were requested, then we do this load/migrate
# logic. However, if the instance is old, we might need to
# do it anyway in order to satisfy our sysmeta-based contract.
if not (flavor_requested or flavor_implied):
return False
migrated_flavor = False
if flavor_implied:
# This instance is from before flavors were migrated out of
# system_metadata. Make sure that we honor that.
instance_extra = db_inst.get('extra') or {}
if instance_extra.get('flavor') is not None:
self._flavor_from_db(instance_extra['flavor'])
sysmeta = self.system_metadata
flavors.save_flavor_info(sysmeta, self.flavor)
del self.flavor
if self.old_flavor:
flavors.save_flavor_info(sysmeta, self.old_flavor, 'old_')
del self.old_flavor
if self.new_flavor:
flavors.save_flavor_info(sysmeta, self.new_flavor, 'new_')
del self.new_flavor
self.system_metadata = sysmeta
else:
# Migrate the flavor from system_metadata to extra,
# if needed
instance_extra = db_inst.get('extra') or {}
if instance_extra.get('flavor') is not None:
self._flavor_from_db(db_inst['extra']['flavor'])
elif 'instance_type_id' in self.system_metadata:
self._migrate_flavor(self)
migrated_flavor = True
return migrated_flavor
@staticmethod
def _from_db_object(context, instance, db_inst, expected_attrs=None):
"""Method to help with migration to objects.
Converts a database entity to a formal object.
"""
instance._context = context
if expected_attrs is None:
expected_attrs = []
# Most of the field names match right now, so be quick
for field in instance.fields:
if field in INSTANCE_OPTIONAL_ATTRS:
continue
elif field == 'deleted':
instance.deleted = db_inst['deleted'] == db_inst['id']
elif field == 'cleaned':
instance.cleaned = db_inst['cleaned'] == 1
else:
instance[field] = db_inst[field]
# NOTE(danms): We can be called with a dict instead of a
# SQLAlchemy object, so we have to be careful here
if hasattr(db_inst, '__dict__'):
have_extra = 'extra' in db_inst.__dict__ and db_inst['extra']
else:
have_extra = 'extra' in db_inst and db_inst['extra']
if 'metadata' in expected_attrs:
instance['metadata'] = utils.instance_meta(db_inst)
if 'system_metadata' in expected_attrs:
instance['system_metadata'] = utils.instance_sys_meta(db_inst)
if 'fault' in expected_attrs:
instance['fault'] = (
objects.InstanceFault.get_latest_for_instance(
context, instance.uuid))
if 'numa_topology' in expected_attrs:
if have_extra:
instance._load_numa_topology(
db_inst['extra'].get('numa_topology'))
else:
instance.numa_topology = None
if 'pci_requests' in expected_attrs:
if have_extra:
instance._load_pci_requests(
db_inst['extra'].get('pci_requests'))
else:
instance.pci_requests = None
if 'vcpu_model' in expected_attrs:
if have_extra:
instance._load_vcpu_model(
db_inst['extra'].get('vcpu_model'))
else:
instance.vcpu_model = None
if 'ec2_ids' in expected_attrs:
instance._load_ec2_ids()
if 'info_cache' in expected_attrs:
if db_inst['info_cache'] is None:
instance.info_cache = None
elif not instance.obj_attr_is_set('info_cache'):
# TODO(danms): If this ever happens on a backlevel instance
# passed to us by a backlevel service, things will break
instance.info_cache = objects.InstanceInfoCache(context)
if instance.info_cache is not None:
instance.info_cache._from_db_object(context,
instance.info_cache,
db_inst['info_cache'])
migrated_flavor = instance._maybe_migrate_flavor(db_inst,
expected_attrs)
# TODO(danms): If we are updating these on a backlevel instance,
# we'll end up sending back new versions of these objects (see
# above note for new info_caches
if 'pci_devices' in expected_attrs:
pci_devices = base.obj_make_list(
context, objects.PciDeviceList(context),
objects.PciDevice, db_inst['pci_devices'])
instance['pci_devices'] = pci_devices
if 'security_groups' in expected_attrs:
sec_groups = base.obj_make_list(
context, objects.SecurityGroupList(context),
objects.SecurityGroup, db_inst['security_groups'])
instance['security_groups'] = sec_groups
if 'tags' in expected_attrs:
tags = base.obj_make_list(
context, objects.TagList(context),
objects.Tag, db_inst['tags'])
instance['tags'] = tags
instance.obj_reset_changes()
if migrated_flavor:
# NOTE(danms): If we migrated the flavor above, we need to make
# sure we know that flavor and system_metadata have been
# touched so that the next save will update them. We can remove
# this when we remove _migrate_flavor().
instance._changed_fields.add('system_metadata')
instance._changed_fields.add('flavor')
return instance
@base.remotable_classmethod
def get_by_uuid(cls, context, uuid, expected_attrs=None, use_slave=False):
if expected_attrs is None:
expected_attrs = ['info_cache', 'security_groups']
columns_to_join = _expected_cols(expected_attrs)
db_inst = db.instance_get_by_uuid(context, uuid,
columns_to_join=columns_to_join,
use_slave=use_slave)
return cls._from_db_object(context, cls(), db_inst,
expected_attrs)
@base.remotable_classmethod
def get_by_id(cls, context, inst_id, expected_attrs=None):
if expected_attrs is None:
expected_attrs = ['info_cache', 'security_groups']
columns_to_join = _expected_cols(expected_attrs)
db_inst = db.instance_get(context, inst_id,
columns_to_join=columns_to_join)
return cls._from_db_object(context, cls(), db_inst,
expected_attrs)
@base.remotable
def create(self):
if self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='create',
reason='already created')
updates = self.obj_get_changes()
expected_attrs = [attr for attr in INSTANCE_DEFAULT_FIELDS
if attr in updates]
if 'security_groups' in updates:
updates['security_groups'] = [x.name for x in
updates['security_groups']]
if 'info_cache' in updates:
updates['info_cache'] = {
'network_info': updates['info_cache'].network_info.json()
}
updates['extra'] = {}
numa_topology = updates.pop('numa_topology', None)
if numa_topology:
expected_attrs.append('numa_topology')
updates['extra']['numa_topology'] = numa_topology._to_json()
pci_requests = updates.pop('pci_requests', None)
if pci_requests:
expected_attrs.append('pci_requests')
updates['extra']['pci_requests'] = (
pci_requests.to_json())
flavor = updates.pop('flavor', None)
if flavor:
expected_attrs.append('flavor')
old = ((self.obj_attr_is_set('old_flavor') and
self.old_flavor) and
self.old_flavor.obj_to_primitive() or None)
new = ((self.obj_attr_is_set('new_flavor') and
self.new_flavor) and
self.new_flavor.obj_to_primitive() or None)
flavor_info = {
'cur': self.flavor.obj_to_primitive(),
'old': old,
'new': new,
}
updates['extra']['flavor'] = jsonutils.dumps(flavor_info)
vcpu_model = updates.pop('vcpu_model', None)
if vcpu_model:
expected_attrs.append('vcpu_model')
updates['extra']['vcpu_model'] = (
jsonutils.dumps(vcpu_model.obj_to_primitive()))
db_inst = db.instance_create(self._context, updates)
self._from_db_object(self._context, self, db_inst, expected_attrs)
@base.remotable
def destroy(self):
if not self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='destroy',
reason='already destroyed')
if not self.obj_attr_is_set('uuid'):
raise exception.ObjectActionError(action='destroy',
reason='no uuid')
if not self.obj_attr_is_set('host') or not self.host:
# NOTE(danms): If our host is not set, avoid a race
constraint = db.constraint(host=db.equal_any(None))
else:
constraint = None
cell_type = cells_opts.get_cell_type()
if cell_type is not None:
stale_instance = self.obj_clone()
try:
db_inst = db.instance_destroy(self._context, self.uuid,
constraint=constraint)
self._from_db_object(self._context, self, db_inst)
except exception.ConstraintNotMet:
raise exception.ObjectActionError(action='destroy',
reason='host changed')
if cell_type == 'compute':
cells_api = cells_rpcapi.CellsAPI()
cells_api.instance_destroy_at_top(self._context, stale_instance)
delattr(self, base.get_attrname('id'))
def _save_info_cache(self, context):
if self.info_cache:
with self.info_cache.obj_alternate_context(context):
self.info_cache.save()
def _save_security_groups(self, context):
security_groups = self.security_groups or []
for secgroup in security_groups:
with secgroup.obj_alternate_context(context):
secgroup.save()
self.security_groups.obj_reset_changes()
def _save_fault(self, context):
# NOTE(danms): I don't think we need to worry about this, do we?
pass
def _save_numa_topology(self, context):
if self.numa_topology:
self.numa_topology.instance_uuid = self.uuid
with self.numa_topology.obj_alternate_context(context):
self.numa_topology._save()
else:
objects.InstanceNUMATopology.delete_by_instance_uuid(
context, self.uuid)
def _save_pci_requests(self, context):
# NOTE(danms): No need for this yet.
pass
def _save_pci_devices(self, context):
# NOTE(yjiang5): All devices held by PCI tracker, only PCI tracker
# permitted to update the DB. all change to devices from here will
# be dropped.
pass
def _save_flavor(self, context):
if not any([x in self.obj_what_changed() for x in
('flavor', 'old_flavor', 'new_flavor')]):
return
# FIXME(danms): We can do this smarterly by updating this
# with all the other extra things at the same time
flavor_info = {
'cur': self.flavor.obj_to_primitive(),
'old': (self.old_flavor and
self.old_flavor.obj_to_primitive() or None),
'new': (self.new_flavor and
self.new_flavor.obj_to_primitive() or None),
}
db.instance_extra_update_by_uuid(
context, self.uuid,
{'flavor': jsonutils.dumps(flavor_info)})
self.obj_reset_changes(['flavor', 'old_flavor', 'new_flavor'])
def _save_old_flavor(self, context):
if 'old_flavor' in self.obj_what_changed():
self._save_flavor(context)
def _save_new_flavor(self, context):
if 'new_flavor' in self.obj_what_changed():
self._save_flavor(context)
def _save_vcpu_model(self, context):
# TODO(yjiang5): should merge the db accesses for all the extra
# fields
if 'vcpu_model' in self.obj_what_changed():
if self.vcpu_model:
update = jsonutils.dumps(self.vcpu_model.obj_to_primitive())
else:
update = None
db.instance_extra_update_by_uuid(
context, self.uuid,
{'vcpu_model': update})
def _save_ec2_ids(self, context):
# NOTE(hanlind): Read-only so no need to save this.
pass
def _maybe_upgrade_flavor(self):
# NOTE(danms): We may have regressed to flavors stored in sysmeta,
# so we have to merge back in here. That could happen if we pass
# a converted instance to an older node, which still stores the
# flavor in sysmeta, which then calls save(). We need to not
# store that flavor info back into sysmeta after we've already
# converted it.
if (not self.obj_attr_is_set('system_metadata') or
'instance_type_id' not in self.system_metadata):
return
LOG.debug('Transforming legacy flavors on save', instance=self)
for ftype in ('', 'old_', 'new_'):
attr = '%sflavor' % ftype
try:
flavor = flavors.extract_flavor(self, prefix=ftype)
flavors.delete_flavor_info(self.system_metadata, ftype)
# NOTE(danms): This may trigger a lazy-load of the flavor
# information, but only once and it avoids re-fetching and
# re-migrating the original flavor.
getattr(self, attr).update(flavor)
except AttributeError:
setattr(self, attr, flavor)
except KeyError:
setattr(self, attr, None)
@base.remotable
def save(self, expected_vm_state=None,
expected_task_state=None, admin_state_reset=False):
"""Save updates to this instance
Column-wise updates will be made based on the result of
self.what_changed(). If expected_task_state is provided,
it will be checked against the in-database copy of the
instance before updates are made.
:param:context: Security context
:param:expected_task_state: Optional tuple of valid task states
for the instance to be in
:param:expected_vm_state: Optional tuple of valid vm states
for the instance to be in
:param admin_state_reset: True if admin API is forcing setting
of task_state/vm_state
"""
# Store this on the class because _cell_name_blocks_sync is useless
# after the db update call below.
self._sync_cells = not self._cell_name_blocks_sync()
context = self._context
cell_type = cells_opts.get_cell_type()
if cell_type is not None:
# NOTE(comstud): We need to stash a copy of ourselves
# before any updates are applied. When we call the save
# methods on nested objects, we will lose any changes to
# them. But we need to make sure child cells can tell
# what is changed.
#
# We also need to nuke any updates to vm_state and task_state
# unless admin_state_reset is True. compute cells are
# authoritative for their view of vm_state and task_state.
stale_instance = self.obj_clone()
cells_update_from_api = (cell_type == 'api' and self.cell_name and
self._sync_cells)
if cells_update_from_api:
def _handle_cell_update_from_api():
cells_api = cells_rpcapi.CellsAPI()
cells_api.instance_update_from_api(context, stale_instance,
expected_vm_state,
expected_task_state,
admin_state_reset)
self._maybe_upgrade_flavor()
updates = {}
changes = self.obj_what_changed()
for field in self.fields:
# NOTE(danms): For object fields, we construct and call a
# helper method like self._save_$attrname()
if (self.obj_attr_is_set(field) and
isinstance(self.fields[field], fields.ObjectField)):
try:
getattr(self, '_save_%s' % field)(context)
except AttributeError:
LOG.exception(_LE('No save handler for %s'), field,
instance=self)
except db_exc.DBReferenceError:
# NOTE(melwitt): This will happen if we instance.save()
# before an instance.create() and FK constraint fails.
# In practice, this occurs in cells during a delete of
# an unscheduled instance. Otherwise, it could happen
# as a result of bug.
raise exception.InstanceNotFound(instance_id=self.uuid)
elif field in changes:
if (field == 'cell_name' and self[field] is not None and
self[field].startswith(cells_utils.BLOCK_SYNC_FLAG)):
updates[field] = self[field].replace(
cells_utils.BLOCK_SYNC_FLAG, '', 1)
else:
updates[field] = self[field]
if not updates:
if cells_update_from_api:
_handle_cell_update_from_api()
return
# Cleaned needs to be turned back into an int here
if 'cleaned' in updates:
if updates['cleaned']:
updates['cleaned'] = 1
else:
updates['cleaned'] = 0
if expected_task_state is not None:
if (self.VERSION == '1.9' and
expected_task_state == 'image_snapshot'):
# NOTE(danms): Icehouse introduced a pending state which
# Havana doesn't know about. If we're an old instance,
# tolerate the pending state as well
expected_task_state = [
expected_task_state, 'image_snapshot_pending']
updates['expected_task_state'] = expected_task_state
if expected_vm_state is not None:
updates['expected_vm_state'] = expected_vm_state
expected_attrs = [attr for attr in _INSTANCE_OPTIONAL_JOINED_FIELDS
if self.obj_attr_is_set(attr)]
if 'pci_devices' in expected_attrs:
# NOTE(danms): We don't refresh pci_devices on save right now
expected_attrs.remove('pci_devices')
# NOTE(alaski): We need to pull system_metadata for the
# notification.send_update() below. If we don't there's a KeyError
# when it tries to extract the flavor.
# NOTE(danms): If we have sysmeta, we need flavor since the caller
# might be expecting flavor information as a result
if 'system_metadata' not in expected_attrs:
expected_attrs.append('system_metadata')
expected_attrs.append('flavor')
old_ref, inst_ref = db.instance_update_and_get_original(
context, self.uuid, updates,
columns_to_join=_expected_cols(expected_attrs))
self._from_db_object(context, self, inst_ref,
expected_attrs=expected_attrs)
if cells_update_from_api:
_handle_cell_update_from_api()
elif cell_type == 'compute':
if self._sync_cells:
cells_api = cells_rpcapi.CellsAPI()
cells_api.instance_update_at_top(context, stale_instance)
# NOTE(danms): We have to be super careful here not to trigger
# any lazy-loads that will unmigrate or unbackport something. So,
# make a copy of the instance for notifications first.
new_ref = self.obj_clone()
notifications.send_update(context, old_ref, new_ref)
self.obj_reset_changes()
@base.remotable
def refresh(self, use_slave=False):
extra = [field for field in INSTANCE_OPTIONAL_ATTRS
if self.obj_attr_is_set(field)]
current = self.__class__.get_by_uuid(self._context, uuid=self.uuid,
expected_attrs=extra,
use_slave=use_slave)
# NOTE(danms): We orphan the instance copy so we do not unexpectedly
# trigger a lazy-load (which would mean we failed to calculate the
# expected_attrs properly)
current._context = None
for field in self.fields:
if self.obj_attr_is_set(field):
if field == 'info_cache':
self.info_cache.refresh()
elif self[field] != current[field]:
self[field] = current[field]
self.obj_reset_changes()
def _load_generic(self, attrname):
instance = self.__class__.get_by_uuid(self._context,
uuid=self.uuid,
expected_attrs=[attrname])
# NOTE(danms): Never allow us to recursively-load
if instance.obj_attr_is_set(attrname):
self[attrname] = instance[attrname]
else:
raise exception.ObjectActionError(
action='obj_load_attr',
reason='loading %s requires recursion' % attrname)
def _load_fault(self):
self.fault = objects.InstanceFault.get_latest_for_instance(
self._context, self.uuid)
def _load_numa_topology(self, db_topology=None):
if db_topology is not None:
self.numa_topology = \
objects.InstanceNUMATopology.obj_from_db_obj(self.uuid,
db_topology)
else:
try:
self.numa_topology = \
objects.InstanceNUMATopology.get_by_instance_uuid(
self._context, self.uuid)
except exception.NumaTopologyNotFound:
self.numa_topology = None
def _load_pci_requests(self, db_requests=None):
# FIXME: also do this if none!
if db_requests is not None:
self.pci_requests = objects.InstancePCIRequests.obj_from_db(
self._context, self.uuid, db_requests)
else:
self.pci_requests = \
objects.InstancePCIRequests.get_by_instance_uuid(
self._context, self.uuid)
def _load_flavor(self):
try:
instance = self.__class__.get_by_uuid(
self._context, uuid=self.uuid,
expected_attrs=['flavor', 'system_metadata'])
except exception.InstanceNotFound:
# NOTE(danms): Before we had instance types in system_metadata,
# we just looked up the instance_type_id. Since we could still
# have an instance in the database that doesn't have either
# newer setup, mirror the original behavior here if the instance
# is deleted
if not self.deleted:
raise
self.flavor = objects.Flavor.get_by_id(self._context,
self.instance_type_id)
self.old_flavor = None
self.new_flavor = None
return
# NOTE(danms): Orphan the instance to make sure we don't lazy-load
# anything below
instance._context = None
self.flavor = instance.flavor
self.old_flavor = instance.old_flavor
self.new_flavor = instance.new_flavor
# NOTE(danms): The query above may have migrated the flavor from
# system_metadata. Since we have it anyway, go ahead and refresh
# our system_metadata from it so that a save will be accurate.
instance.system_metadata.update(self.get('system_metadata', {}))
self.system_metadata = instance.system_metadata
def _load_vcpu_model(self, db_vcpu_model=None):
if db_vcpu_model is None:
self.vcpu_model = objects.VirtCPUModel.get_by_instance_uuid(
self._context, self.uuid)
else:
db_vcpu_model = jsonutils.loads(db_vcpu_model)
self.vcpu_model = objects.VirtCPUModel.obj_from_primitive(
db_vcpu_model)
def _load_ec2_ids(self):
self.ec2_ids = objects.EC2Ids.get_by_instance(self._context, self)
def obj_load_attr(self, attrname):
if attrname not in INSTANCE_OPTIONAL_ATTRS:
raise exception.ObjectActionError(
action='obj_load_attr',
reason='attribute %s not lazy-loadable' % attrname)
if ('flavor' in attrname and
self.obj_attr_is_set('system_metadata') and
'instance_type_id' in self.system_metadata):
# NOTE(danms): Looks like we're loading a flavor, and that
# should be doable without a context, so do this before the
# orphan check below.
self._migrate_flavor(self)
if self.obj_attr_is_set(attrname):
return
if not self._context:
raise exception.OrphanedObjectError(method='obj_load_attr',
objtype=self.obj_name())
LOG.debug("Lazy-loading `%(attr)s' on %(name)s uuid %(uuid)s",
{'attr': attrname,
'name': self.obj_name(),
'uuid': self.uuid,
})
# NOTE(danms): We handle some fields differently here so that we
# can be more efficient
if attrname == 'fault':
self._load_fault()
elif attrname == 'numa_topology':
self._load_numa_topology()
elif attrname == 'pci_requests':
self._load_pci_requests()
elif attrname == 'vcpu_model':
self._load_vcpu_model()
elif attrname == 'ec2_ids':
self._load_ec2_ids()
elif 'flavor' in attrname:
self._load_flavor()
else:
# FIXME(comstud): This should be optimized to only load the attr.
self._load_generic(attrname)
self.obj_reset_changes([attrname])
def get_flavor(self, namespace=None):
prefix = ('%s_' % namespace) if namespace is not None else ''
attr = '%sflavor' % prefix
try:
return getattr(self, attr)
except exception.FlavorNotFound:
# NOTE(danms): This only happens in the case where we don't
# have flavor information in sysmeta or extra, and doing
# this triggers a lookup based on our instance_type_id for
# (very) legacy instances. That legacy code expects a None here,
# so emulate it for this helper, even though the actual attribute
# is not nullable.
return None
def set_flavor(self, flavor, namespace=None):
prefix = ('%s_' % namespace) if namespace is not None else ''
attr = '%sflavor' % prefix
if not isinstance(flavor, objects.Flavor):
flavor = objects.Flavor(**flavor)
setattr(self, attr, flavor)
self.save()
def delete_flavor(self, namespace):
prefix = ('%s_' % namespace) if namespace else ''
attr = '%sflavor' % prefix
setattr(self, attr, None)
self.save()
@base.remotable
def delete_metadata_key(self, key):
"""Optimized metadata delete method.
This provides a more efficient way to delete a single metadata
key, instead of just calling instance.save(). This should be called
with the key still present in self.metadata, which it will update
after completion.
"""
db.instance_metadata_delete(self._context, self.uuid, key)
md_was_changed = 'metadata' in self.obj_what_changed()
del self.metadata[key]
self._orig_metadata.pop(key, None)
notifications.send_update(self._context, self, self)
if not md_was_changed:
self.obj_reset_changes(['metadata'])
def _cell_name_blocks_sync(self):
if (self.obj_attr_is_set('cell_name') and
self.cell_name is not None and
self.cell_name.startswith(cells_utils.BLOCK_SYNC_FLAG)):
return True
return False
def _normalize_cell_name(self):
"""Undo skip_cell_sync()'s cell_name modification if applied"""
if not self.obj_attr_is_set('cell_name') or self.cell_name is None:
return
cn_changed = 'cell_name' in self.obj_what_changed()
if self.cell_name.startswith(cells_utils.BLOCK_SYNC_FLAG):
self.cell_name = self.cell_name.replace(
cells_utils.BLOCK_SYNC_FLAG, '', 1)
# cell_name is not normally an empty string, this means it was None
# or unset before cells_utils.BLOCK_SYNC_FLAG was applied.
if len(self.cell_name) == 0:
self.cell_name = None
if not cn_changed:
self.obj_reset_changes(['cell_name'])
@contextlib.contextmanager
def skip_cells_sync(self):
"""Context manager to save an instance without syncing cells.
Temporarily disables the cells syncing logic, if enabled. This should
only be used when saving an instance that has been passed down/up from
another cell in order to avoid passing it back to the originator to be
re-saved.
"""
cn_changed = 'cell_name' in self.obj_what_changed()
if not self.obj_attr_is_set('cell_name') or self.cell_name is None:
self.cell_name = ''
self.cell_name = '%s%s' % (cells_utils.BLOCK_SYNC_FLAG, self.cell_name)
if not cn_changed:
self.obj_reset_changes(['cell_name'])
try:
yield
finally:
self._normalize_cell_name()
def _make_instance_list(context, inst_list, db_inst_list, expected_attrs):
get_fault = expected_attrs and 'fault' in expected_attrs
inst_faults = {}
if get_fault:
# Build an instance_uuid:latest-fault mapping
expected_attrs.remove('fault')
instance_uuids = [inst['uuid'] for inst in db_inst_list]
faults = objects.InstanceFaultList.get_by_instance_uuids(
context, instance_uuids)
for fault in faults:
if fault.instance_uuid not in inst_faults:
inst_faults[fault.instance_uuid] = fault
inst_list.objects = []
for db_inst in db_inst_list:
inst_obj = objects.Instance._from_db_object(
context, objects.Instance(context), db_inst,
expected_attrs=expected_attrs)
if get_fault:
inst_obj.fault = inst_faults.get(inst_obj.uuid, None)
inst_list.objects.append(inst_obj)
inst_list.obj_reset_changes()
return inst_list
@base.NovaObjectRegistry.register
class InstanceList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: Added use_slave to get_by_host
# Instance <= version 1.9
# Version 1.2: Instance <= version 1.11
# Version 1.3: Added use_slave to get_by_filters
# Version 1.4: Instance <= version 1.12
# Version 1.5: Added method get_active_by_window_joined.
# Version 1.6: Instance <= version 1.13
# Version 1.7: Added use_slave to get_active_by_window_joined
# Version 1.8: Instance <= version 1.14
# Version 1.9: Instance <= version 1.15
# Version 1.10: Instance <= version 1.16
# Version 1.11: Added sort_keys and sort_dirs to get_by_filters
# Version 1.12: Pass expected_attrs to instance_get_active_by_window_joined
# Version 1.13: Instance <= version 1.17
# Version 1.14: Instance <= version 1.18
# Version 1.15: Instance <= version 1.19
# Version 1.16: Added get_all() method
# Version 1.17: Instance <= version 1.20
# Version 1.18: Instance <= version 1.21
# Version 1.19: Removed get_hung_in_rebooting()
VERSION = '1.19'
fields = {
'objects': fields.ListOfObjectsField('Instance'),
}
child_versions = {
'1.1': '1.9',
# NOTE(danms): Instance was at 1.9 before we added this
'1.2': '1.11',
'1.3': '1.11',
'1.4': '1.12',
'1.5': '1.12',
'1.6': '1.13',
'1.7': '1.13',
'1.8': '1.14',
'1.9': '1.15',
'1.10': '1.16',
'1.11': '1.16',
'1.12': '1.16',
'1.13': '1.17',
'1.14': '1.18',
'1.15': '1.19',
'1.16': '1.19',
'1.17': '1.20',
'1.18': '1.21',
'1.19': '1.21',
}
@base.remotable_classmethod
def get_by_filters(cls, context, filters,
sort_key='created_at', sort_dir='desc', limit=None,
marker=None, expected_attrs=None, use_slave=False,
sort_keys=None, sort_dirs=None):
if sort_keys or sort_dirs:
db_inst_list = db.instance_get_all_by_filters_sort(
context, filters, limit=limit, marker=marker,
columns_to_join=_expected_cols(expected_attrs),
use_slave=use_slave, sort_keys=sort_keys, sort_dirs=sort_dirs)
else:
db_inst_list = db.instance_get_all_by_filters(
context, filters, sort_key, sort_dir, limit=limit,
marker=marker, columns_to_join=_expected_cols(expected_attrs),
use_slave=use_slave)
return _make_instance_list(context, cls(), db_inst_list,
expected_attrs)
@base.remotable_classmethod
def get_by_host(cls, context, host, expected_attrs=None, use_slave=False):
db_inst_list = db.instance_get_all_by_host(
context, host, columns_to_join=_expected_cols(expected_attrs),
use_slave=use_slave)
return _make_instance_list(context, cls(), db_inst_list,
expected_attrs)
@base.remotable_classmethod
def get_by_host_and_node(cls, context, host, node, expected_attrs=None):
db_inst_list = db.instance_get_all_by_host_and_node(
context, host, node,
columns_to_join=_expected_cols(expected_attrs))
return _make_instance_list(context, cls(), db_inst_list,
expected_attrs)
@base.remotable_classmethod
def get_by_host_and_not_type(cls, context, host, type_id=None,
expected_attrs=None):
db_inst_list = db.instance_get_all_by_host_and_not_type(
context, host, type_id=type_id)
return _make_instance_list(context, cls(), db_inst_list,
expected_attrs)
@base.remotable_classmethod
def get_all(cls, context, expected_attrs=None):
"""Returns all instances on all nodes."""
db_instances = db.instance_get_all(
context, columns_to_join=_expected_cols(expected_attrs))
return _make_instance_list(context, cls(), db_instances,
expected_attrs)
@base.remotable_classmethod
def _get_active_by_window_joined(cls, context, begin, end=None,
project_id=None, host=None,
expected_attrs=None,
use_slave=False):
# NOTE(mriedem): We need to convert the begin/end timestamp strings
# to timezone-aware datetime objects for the DB API call.
begin = timeutils.parse_isotime(begin)
end = timeutils.parse_isotime(end) if end else None
db_inst_list = db.instance_get_active_by_window_joined(
context, begin, end, project_id, host,
columns_to_join=_expected_cols(expected_attrs))
return _make_instance_list(context, cls(), db_inst_list,
expected_attrs)
@classmethod
def get_active_by_window_joined(cls, context, begin, end=None,
project_id=None, host=None,
expected_attrs=None,
use_slave=False):
"""Get instances and joins active during a certain time window.
:param:context: nova request context
:param:begin: datetime for the start of the time window
:param:end: datetime for the end of the time window
:param:project_id: used to filter instances by project
:param:host: used to filter instances on a given compute host
:param:expected_attrs: list of related fields that can be joined
in the database layer when querying for instances
:param use_slave if True, ship this query off to a DB slave
:returns: InstanceList
"""
# NOTE(mriedem): We have to convert the datetime objects to string
# primitives for the remote call.
begin = timeutils.isotime(begin)
end = timeutils.isotime(end) if end else None
return cls._get_active_by_window_joined(context, begin, end,
project_id, host,
expected_attrs,
use_slave=use_slave)
@base.remotable_classmethod
def get_by_security_group_id(cls, context, security_group_id):
db_secgroup = db.security_group_get(
context, security_group_id,
columns_to_join=['instances.info_cache',
'instances.system_metadata'])
return _make_instance_list(context, cls(), db_secgroup['instances'],
['info_cache', 'system_metadata'])
@classmethod
def get_by_security_group(cls, context, security_group):
return cls.get_by_security_group_id(context, security_group.id)
def fill_faults(self):
"""Batch query the database for our instances' faults.
:returns: A list of instance uuids for which faults were found.
"""
uuids = [inst.uuid for inst in self]
faults = objects.InstanceFaultList.get_by_instance_uuids(
self._context, uuids)
faults_by_uuid = {}
for fault in faults:
if fault.instance_uuid not in faults_by_uuid:
faults_by_uuid[fault.instance_uuid] = fault
for instance in self:
if instance.uuid in faults_by_uuid:
instance.fault = faults_by_uuid[instance.uuid]
else:
# NOTE(danms): Otherwise the caller will cause a lazy-load
# when checking it, and we know there are none
instance.fault = None
instance.obj_reset_changes(['fault'])
return faults_by_uuid.keys()
| apache-2.0 |
mrtnrdl/.macdots | scripts/bin/platform-tools/systrace/catapult/devil/devil/android/tools/flash_device.py | 6 | 2471 | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import logging
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', '..', '..')))
from devil.android import device_blacklist
from devil.android import device_utils
from devil.android import fastboot_utils
from devil.android.tools import script_common
from devil.constants import exit_codes
from devil.utils import run_tests_helper
logger = logging.getLogger(__name__)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('build_path', help='Path to android build.')
parser.add_argument('-d', '--device', dest='devices', action='append',
help='Device(s) to flash.')
parser.add_argument('-v', '--verbose', default=0, action='count',
help='Verbose level (multiple times for more)')
parser.add_argument('-w', '--wipe', action='store_true',
help='If set, wipes user data')
parser.add_argument('--blacklist-file', help='Device blacklist file.')
args = parser.parse_args()
run_tests_helper.SetLogLevel(args.verbose)
if args.blacklist_file:
blacklist = device_blacklist.Blacklist(args.blacklist_file).Read()
if blacklist:
logger.critical('Device(s) in blacklist, not flashing devices:')
for key in blacklist:
logger.critical(' %s', key)
return exit_codes.INFRA
flashed_devices = []
failed_devices = []
def flash(device):
fastboot = fastboot_utils.FastbootUtils(device)
try:
fastboot.FlashDevice(args.build_path, wipe=args.wipe)
flashed_devices.append(device)
except Exception: # pylint: disable=broad-except
logger.exception('Device %s failed to flash.', str(device))
failed_devices.append(device)
devices = script_common.GetDevices(args.devices, args.blacklist_file)
device_utils.DeviceUtils.parallel(devices).pMap(flash)
if flashed_devices:
logger.info('The following devices were flashed:')
logger.info(' %s', ' '.join(str(d) for d in flashed_devices))
if failed_devices:
logger.critical('The following devices failed to flash:')
logger.critical(' %s', ' '.join(str(d) for d in failed_devices))
return exit_codes.INFRA
return 0
if __name__ == '__main__':
sys.exit(main())
| unlicense |
quinot/ansible | lib/ansible/modules/system/aix_inittab.py | 46 | 7274 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Joris Weijters <joris.weijters@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
author:
- Joris Weijters (@molekuul)
module: aix_inittab
short_description: Manages the inittab on AIX
description:
- Manages the inittab on AIX.
version_added: "2.3"
options:
name:
description:
- Name of the inittab entry.
required: yes
aliases: ['service']
runlevel:
description:
- Runlevel of the entry.
required: yes
action:
description:
- Action what the init has to do with this entry.
required: yes
choices:
- boot
- bootwait
- hold
- initdefault
- off
- once
- ondemand
- powerfail
- powerwait
- respawn
- sysinit
- wait
command:
description:
- What command has to run.
required: yes
insertafter:
description:
- After which inittabline should the new entry inserted.
state:
description:
- Whether the entry should be present or absent in the inittab file.
choices: [ absent, present ]
default: present
notes:
- The changes are persistent across reboots, you need root rights to read or adjust the inittab with the C(lsitab), chitab,
C(mkitab) or C(rmitab) commands.
- Tested on AIX 7.1.
requirements:
- itertools
'''
EXAMPLES = '''
# Add service startmyservice to the inittab, directly after service existingservice.
- name: Add startmyservice to inittab
aix_inittab:
name: startmyservice
runlevel: 4
action: once
command: echo hello
insertafter: existingservice
state: present
become: yes
# Change inittab entry startmyservice to runlevel "2" and processaction "wait".
- name: Change startmyservice to inittab
aix_inittab:
name: startmyservice
runlevel: 2
action: wait
command: echo hello
state: present
become: yes
- name: Remove startmyservice from inittab
aix_inittab:
name: startmyservice
runlevel: 2
action: wait
command: echo hello
state: absent
become: yes
'''
RETURN = '''
name:
description: name of the adjusted inittab entry
returned: always
type: string
sample: startmyservice
msg:
description: action done with the inittab entry
returned: changed
type: string
sample: changed inittab entry startmyservice
changed:
description: whether the inittab changed or not
returned: always
type: boolean
sample: true
'''
# Import necessary libraries
import itertools
from ansible.module_utils.basic import AnsibleModule
# end import modules
# start defining the functions
def check_current_entry(module):
# Check if entry exists, if not return False in exists in return dict,
# if true return True and the entry in return dict
existsdict = {'exist': False}
lsitab = module.get_bin_path('lsitab')
(rc, out, err) = module.run_command([lsitab, module.params['name']])
if rc == 0:
keys = ('name', 'runlevel', 'action', 'command')
values = out.split(":")
# strip non readable characters as \n
values = map(lambda s: s.strip(), values)
existsdict = dict(itertools.izip(keys, values))
existsdict.update({'exist': True})
return existsdict
def main():
# initialize
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str', required=True, aliases=['service']),
runlevel=dict(type='str', required=True),
action=dict(type='str', choices=[
'boot',
'bootwait',
'hold',
'initdefault',
'off',
'once',
'ondemand',
'powerfail',
'powerwait',
'respawn',
'sysinit',
'wait',
]),
command=dict(type='str', required=True),
insertafter=dict(type='str'),
state=dict(type='str', required=True, choices=['absent', 'present']),
),
supports_check_mode=True,
)
result = {
'name': module.params['name'],
'changed': False,
'msg': ""
}
# Find commandline strings
mkitab = module.get_bin_path('mkitab')
rmitab = module.get_bin_path('rmitab')
chitab = module.get_bin_path('chitab')
rc = 0
# check if the new entry exists
current_entry = check_current_entry(module)
# if action is install or change,
if module.params['state'] == 'present':
# create new entry string
new_entry = module.params['name'] + ":" + module.params['runlevel'] + \
":" + module.params['action'] + ":" + module.params['command']
# If current entry exists or fields are different(if the entry does not
# exists, then the entry wil be created
if (not current_entry['exist']) or (
module.params['runlevel'] != current_entry['runlevel'] or
module.params['action'] != current_entry['action'] or
module.params['command'] != current_entry['command']):
# If the entry does exist then change the entry
if current_entry['exist']:
if not module.check_mode:
(rc, out, err) = module.run_command([chitab, new_entry])
if rc != 0:
module.fail_json(
msg="could not change inittab", rc=rc, err=err)
result['msg'] = "changed inittab entry" + " " + current_entry['name']
result['changed'] = True
# If the entry does not exist create the entry
elif not current_entry['exist']:
if module.params['insertafter']:
if not module.check_mode:
(rc, out, err) = module.run_command(
[mkitab, '-i', module.params['insertafter'], new_entry])
else:
if not module.check_mode:
(rc, out, err) = module.run_command(
[mkitab, new_entry])
if rc != 0:
module.fail_json(msg="could not adjust inittab", rc=rc, err=err)
result['msg'] = "add inittab entry" + " " + module.params['name']
result['changed'] = True
elif module.params['state'] == 'absent':
# If the action is remove and the entry exists then remove the entry
if current_entry['exist']:
if not module.check_mode:
(rc, out, err) = module.run_command(
[rmitab, module.params['name']])
if rc != 0:
module.fail_json(
msg="could not remove entry grom inittab)", rc=rc, err=err)
result['msg'] = "removed inittab entry" + " " + current_entry['name']
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
tedi3231/openerp | build/lib/openerp/addons/project_issue/res_config.py | 441 | 1492 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class project_issue_settings(osv.osv_memory):
_name = 'project.config.settings'
_inherit = ['project.config.settings', 'fetchmail.config.settings']
_columns = {
'fetchmail_issue': fields.boolean("Create issues from an incoming email account ",
fetchmail_model='project.issue', fetchmail_name='Incoming Issues',
help="""Allows you to configure your incoming mail server, and create issues from incoming emails."""),
}
| agpl-3.0 |
mikeh77/mi-instrument | mi/idk/dataset_agent.py | 4 | 4826 | #!/usr/bin/env python
"""
@package mi.idk.instrument_agent IDK Instrument resource agent
@file mi/idk/instrument_agent.py
@author Bill French
@brief Speciaized instrument agent for the IDK to trap event and publish.
In the IDK we don't test all the way to a data granule, but stop at the
data particle. This is so if the IA changes it's publishing format or
the parameter definition for the stream changes our driver tests are
still valid. So we short circuit the publication handler so that it
just passed through data particles.
"""
__author__ = 'Bill French'
__license__ = 'Apache 2.0'
# ION imports.
import ion.agents.instrument.instrument_agent
from ion.agents.agent_stream_publisher import AgentStreamPublisher
from pyon.public import log
import json
import uuid
class IDKAgentStreamPublisher(AgentStreamPublisher):
def _publish_stream_buffer(self, stream_name):
"""
overloaded so that data particles are published not granules
"""
try:
buf_len = len(self._stream_buffers[stream_name])
if buf_len == 0:
return
publisher = self._publishers[stream_name]
for x in range(buf_len):
particle = self._stream_buffers[stream_name].pop()
publisher.publish(particle)
log.info('Outgoing particle: %s', particle)
log.info('Instrument agent %s published data particle on stream %s.',
self._agent._proc_name, stream_name)
log.info('Connection id: %s, connection index: %i.',
self._connection_ID.hex, self._connection_index[stream_name])
except:
log.exception('Instrument agent %s could not publish data on stream %s.',
self._agent._proc_name, stream_name)
class InstrumentAgent(ion.agents.instrument.instrument_agent.InstrumentAgent):
"""
This instrument agent is used in qualification tests. It overrides the
default publishing mechanism so the agent publishes data particles.
"""
def __init__(self, *args, **kwargs):
ion.agents.instrument.instrument_agent.InstrumentAgent.__init__(self, *args, **kwargs)
def on_init(self):
"""
overloaded so we can change the stream publisher object
"""
super(InstrumentAgent, self).on_init()
# Set up streams.
self._asp = IDKAgentStreamPublisher(self)
def _async_driver_event_sample(self, val, ts):
'''
Overload this method to change what is published. For driver tests we will verify that
Data particles are built to spec so we just pass through data particles here.
'''
# If the sample event is encoded, load it back to a dict.
if isinstance(val, str):
val = json.loads(val)
try:
stream_name = val['stream_name']
self._asp.on_sample(val)
log.debug('Instrument agent %s published data particle on stream %s.' % (self._proc_name, stream_name))
log.trace('Published value: %s' % str(val))
except Exception as e:
log.error('Instrument agent %s could not publish data. Exception caught was %s',
self._proc_name, e)
def _construct_packet_factories(self):
'''
We don't need the packet factories because we just pass the data particles through.
Overloading this method clears some factory creation error messages.
'''
pass
class PublisherInstrumentAgent(ion.agents.instrument.instrument_agent.InstrumentAgent):
"""
Override the default go active process of the agent so it is forced into command
mode. This is used for publication tests that mock input into the port agent and
there for have not command/response behavior. It is used for testing publication
"""
def _handler_inactive_go_active(self, *args, **kwargs):
"""
Overload the default go active handler so it doesn't do a discover, but instead
forces the agent into command mode.
"""
next_state = ion.agents.instrument.instrument_agent.ResourceAgentState.COMMAND
result = None
# Set the driver config if passed as a parameter.
try:
self._dvr_config['comms_config'] = args[0]
except IndexError:
pass
# Connect to the device.
dvr_comms = self._dvr_config.get('comms_config', None)
self._dvr_client.cmd_dvr('configure', dvr_comms)
self._dvr_client.cmd_dvr('connect')
# Reset the connection id and index.
#self._connection_ID = uuid.uuid4()
#self._connection_index = {key : 0 for key in self.aparam_streams.keys()}
self._asp.reset_connection()
return (next_state, result)
| bsd-2-clause |
swaroop-sridhar/llilc | test/llilc_checkpass.py | 9 | 24936 | #!/usr/bin/env python
#
#title :llilc_checkpass.py
#description :
#
# llilc_checkpass checks one or two test results, no matter summary or verbose,
# and checks if any function successfully compiled by LLILC in base result (if any)
# failed to compile in the target result or if any function that was newly
# submitted for compilation failed to compile. Either of these situations
# is considered an unexpected failure. The script returns the number of
# unexpected failures, where 0 indicates that all tests passed. If the script
# fails due to an unexpected environmental issue, the return value will be
# negative, and has no relation to the number of unexpected failures.
#
# Whether or not all tests passed, the set of methods submitted for compilation
# may differ between the base result and the target result. The number of
# methods that were only submitted to the base compiler and the number of
# methods that were only submitted to the target compiler are reported on
# stdout.
#
# usage: llilc_checkpass.py [-h] -d DIFF_RESULT_PATH [-b BASE_RESULT_PATH] [-v]
# [--bs] [--ts]
#
# Check the output of a LLILC test run (optionally against a prior baseline
# run), for each test looking to see which methods LLILC reported as passing or
# failing. Reports the reason for any failures and the failing methods and
# tests. Return code is the number of unexpected failures, i.e. failures which
# did not already occur in the baseline (if given).
#
# optional arguments:
# -h, --help show this help message and exit
# -b BASE_RESULT_PATH, --base-result-path BASE_RESULT_PATH
# full path to base result
# -v, --verbose Show target failures by test
# --bs, --baseline-summary
# Show baseline failure summary
# --ts, --target-summary
# Show target failure summary. This has all target
# failures, even those that failed in the baseline (if
# any).
#
# required arguments:
# -d DIFF_RESULT_PATH, --diff-result-path DIFF_RESULT_PATH
# full path to target result
#
#==========================================================================================
import argparse
import re
import sys
import os
import const
import traceback
def collect_summary_files(root_dir):
""" Collect a list of summary result file relative paths"""
summary_files = []
for root, dirs, files in os.walk(root_dir):
relative_dir = os.path.relpath(root, root_dir)
for file in files:
if file.endswith('sum.txt'):
relative_path = os.path.join(relative_dir, file)
summary_files.append(relative_path)
return summary_files
def analyze(file_path, pattern_reg_exp):
""" Return the set of passing and failing methods and a map
from failing methods to the reason they failed.
"""
#
# We pass in the pattern regular expression so we will not have
# compile it repeatedly. For reference, here is its definition:
#
# r'^(Successfully read (.*))|(Failed to read (.*)\[(.*)\])$'
#
passing, failing, fail_reason_map = set(), set(), {}
with open(file_path, 'r') as file:
# passing and failing track the set of methods that were
# successfully compiled and unsuccessfully compiled as reported by this file.
for line in file:
m = pattern_reg_exp.match(line)
if m:
if m.group(1):
passing.add(m.group(2))
else:
method = m.group(4)
failing.add(method)
reason = m.group(5)
fail_reason_map[method] = reason
return passing, failing, fail_reason_map
def get_fail_reason_diff(target_fail_reason_map, base_fail_reason_map):
"""Compute fail reason map for target, but not including failures from base"""
diff_fail_reason_map = {}
for method in target_fail_reason_map:
if not method in base_fail_reason_map:
reason = target_fail_reason_map[method]
diff_fail_reason_map[method] = reason
return diff_fail_reason_map
def print_failures(failed_set, caption):
"""Print methods that failed for given condition"""
if failed_set:
print(' ' + caption)
for failed in failed_set:
print(' ' + failed)
def report(value, description):
print(('CheckPass: ' + description).format(value))
def tallyfailures(method_to_reason_to_tests, reason_to_method_to_tests, test, failure_reason_map):
""" Record failures in summary maps.
Given a test name and a map from failing methods to reasons for the failure,
record the failure in two maps:
- A map from method -> reason -> set(test)
- A map from reason -> method -> set(test)
"""
for method in failure_reason_map:
reason = failure_reason_map[method]
# Update method_to_reason_to_tests
if not method in method_to_reason_to_tests:
method_to_reason_to_tests[method] = {}
reason_to_tests = method_to_reason_to_tests[method]
if not reason in reason_to_tests:
reason_to_tests[reason] = set()
tests = reason_to_tests[reason]
tests.add(test)
# Update reason_to_method_to_tests
if not reason in reason_to_method_to_tests:
reason_to_method_to_tests[reason] = {}
method_to_tests = reason_to_method_to_tests[reason]
if not method in method_to_tests:
method_to_tests[method] = set()
tests = method_to_tests[method]
tests.add(test)
def print_map_to_set(the_map, description1, description2):
""" Print a map whose range values are sets.
the_map is the map to print.
description1 and description1 are format descriptors.
description1 is used to print the key and can reference
the key number (key_num), the key (key), and the
number of elements in the corresponding set (value).
description2 is used to print the elements of the set and
can reference the item number (item_num), the item (item),
as well as the key_num and key.
"""
key_num = -1
for key in the_map:
key_num += 1
the_set = the_map[key]
print(description1.format(key_num=key_num, key=key, value=len(the_set)))
item_num = -1
for item in the_set:
item_num += 1
print(description2.format(key_num=key_num, key=key, item_num=item_num, item=item))
def print_method_to_reason_to_tests(method_to_reason_to_tests):
""" Print a map that is a map from method -> reason -> set(tests) """
method_num = -1
for method in method_to_reason_to_tests:
method_num += 1
reason_to_tests = method_to_reason_to_tests[method]
print((' '*4 + 'method[{method_num}]={method}').format(method=method, method_num=method_num))
print_map_to_set(reason_to_tests,
(' '*8 + 'reason[{key_num}]={key}, len(tests)={value}'),
(' '*12 + 'test[{item_num}]={item}'))
def print_reason_to_method_to_tests(reason_to_method_to_tests):
""" Print a map that is a map from reason-> method -> set(tests) """
reason_num = -1
for reason in reason_to_method_to_tests:
reason_num += 1
method_to_tests = reason_to_method_to_tests[reason]
print((' '*4 + 'reason[{reason_num}]={reason}').format(reason=reason, reason_num=reason_num))
print_map_to_set(method_to_tests,
(' '*8 + 'method[{key_num}]={key}'),
(' '*12 + 'test[{item_num}]={item}'))
def get_union_over_keys(map_to_set):
"""Compute the union of the map range values.
For a map whose range is a set, union the range
over keys in the map return the resulting set.
"""
result = set()
for key in map_to_set:
value_set = map_to_set[key]
result |= value_set;
return result
def get_reason_to_tests(reason_to_method_to_tests):
""" Union over methods to get tests failing for a reason."""
reason_to_tests = {}
for reason in reason_to_method_to_tests:
method_to_tests = reason_to_method_to_tests[reason]
tests = get_union_over_keys(method_to_tests)
reason_to_tests[reason] = tests
return reason_to_tests
def get_tests(reason_to_tests):
""" Get set of failing tests from a failure_to_tests map.
Union over failure reasons given a map from reasons to test sets,
returning the set of failing tests.
"""
return get_union_over_keys(reason_to_tests)
def print_reason_to_tests(reason_to_tests):
""" Print a map from reasons to sets of tests failing for that reason."""
print_map_to_set(reason_to_tests,
(' '*4 + 'reason[{key_num}]={key}, len(tests)={value}'),
(' '*8 + 'test[{item_num}]={item}'))
def print_set(the_set, description):
""" Print a set, using the given descriptor to caption each item.
Description can reference the item number (item_num) and the
item (item).
"""
item_num = -1
for item in the_set:
item_num += 1
print(description.format(item_num=item_num, item=item))
def main(argv):
""" Main program for checking one or two test runs. """
# Define return code constants
const.GeneralError = -1
const.UnknownArguments = -2
const.MissingResult = -3
# Parse the command line
parser = argparse.ArgumentParser(description='''Check the output of a LLILC test run
(optionally against a prior baseline run), for each test looking to see which methods LLILC
reported as passing or failing. Reports the reason for any failures and the failing
methods and tests. Return code is the number of unexpected failures, i.e.
failures which did not already occur in the baseline (if given).''')
required = parser.add_argument_group('required arguments')
required.add_argument('-d', '--diff-result-path', type=str, required=True,
help='full path to target result')
parser.add_argument('-b', '--base-result-path', type=str,
help='full path to base result')
parser.add_argument('-v', '--verbose', default=False, action="store_true",
help='Show target failures by test')
parser.add_argument('--bs', '--baseline-summary', default=False, action="store_true",
help='Show baseline failure summary')
parser.add_argument('--ts', '--target-summary', default=False, action="store_true",
help='''Show target failure summary. This has all target failures,
even those that failed in the baseline (if any).''')
args, unknown = parser.parse_known_args(argv)
if unknown:
print('Unknown argument(s): ', ', '.join(unknown))
return const.UnknownArguments
verbose = args.verbose
base_summary = args.bs
target_summary = args.ts
have_base = args.base_result_path != None
try:
# Collect a list of summary result file relative path in base result
if have_base:
base_files = collect_summary_files(args.base_result_path)
else:
base_files = []
# Collect a list of summary result file relative path in target result
target_files = collect_summary_files(args.diff_result_path)
except:
e = sys.exc_info()[0]
print('Error: CheckPass failed due to ', e)
traceback.print_exc()
return const.GeneralError
# Check if results are empty
if len(base_files) == 0:
print('Warning: no base files, will analyze only the target')
if len(target_files) == 0:
print('Error: target result is empty')
return const.MissingResult
# We are comparing two test runs, the baseline and the "target".
# For a given test, let
# - bp be the set of passing methods for the baseline, i.e. those successfully read.
# - bf be the set of failing methods for the baseline, i.e. those that failed to be read.
# - tp be the set of passing methods for the target, i.e. those successfully read.
# - tf be the set of failing methods for the target, i.e. those that failed to be read.
#
# Then the universe, u, of methods for the test is given by
#
# u = bp | bf | tp | tf
#
# Let
# - bm be methods of the universe missing from the baseline, i.e. bm = u - bp - bf.
# - tm be methods of the universe missing from the target, i.e. tm = u - tp - tf.
#
# Then for the baseline {bp, bf, bm} partitions the universe, and
# for the target {tp, tf, tm} partitions the universe.
#
# We can then categorize the methods by taking the cross-intersections of these partitions
# as shown by this diagram:
#
# +--------------+--------------+--------------+
# | bp | bf | bm |
# +---+--------------+--------------+--------------+
# |tp | pp = bp & tp | fp = bf & tp | mp = bm & tp |
# +---+--------------+--------------+--------------+
# |tf | pf = bp & tf | ff = bf & tf | mf = bm & tf |
# +---+--------------+--------------+--------------+
# |tm | pm = bp & tm | fm = bf & tm | mm = bm & tm |
# +---+--------------+--------------+--------------+
#
# The set mm is the methods missing from both the baseline and the
# target, so they will also be missing from the universe, so
# mm is empty. We here describe the remaining sets and give them
# short nicknames
#
# 1. pp passed in baseline and target: "old pass"
# 2. fp failed in baseline but passed in target: "fix pass"
# 3. mp was missing in baseline but passed in target: "new pass"
# 4. pf passed in baseline but failed in target: "regressing fail"
# 5. ff failed in baseline and target: "old fail"
# 6. mf was missing in baseline but failed in target: "new failure"
# 7. pm passed in baseline but is missing in target: "missing pass"
# 8. fm failed in baseline but is missing in target: "missing fail"
#
# The sum of categories 4 to 8 is the number of unexpected failures.
# Clear counters for each category.
num_pp_methods = 0
num_fp_methods = 0
num_mp_methods = 0
num_pf_methods = 0
num_ff_methods = 0
num_mf_methods = 0
num_pm_methods = 0
num_fm_methods = 0
# Maps to summarize failure tests and reasons.
# method -> (reason -> set(tests))
base_method_to_reason_to_tests = {}
target_method_to_reason_to_tests = {}
diff_method_to_reason_to_tests = {}
# Maps reason -> (method -> set(tests))
base_reason_to_method_to_tests = {}
target_reason_to_method_to_tests = {}
diff_reason_to_method_to_tests = {}
# reason -> set(tests), summed over methods.
base_reason_to_tests = {}
target_reason_to_tests = {}
diff_reason_to_tests = {}
# This script expects input that contains lines of the form:
# - "Successfully read <method name>" to indicate successsful compilation of
# a method
# - "Failed to read <method name>[<failure reason>]" to indicate that a
# method failed to compile
#
# All other lines are ignored.
pattern_reg_exp = re.compile(r'^(Successfully read (.*))|(Failed to read (.*)\[(.*)\])$')
print('Checking started.')
all_files = {}
for file in base_files:
all_files[file] = 1
for file in target_files:
if file in all_files:
all_files[file] += 2
else:
all_files[file] = 2
for file in all_files:
try:
# Every test now has its own result directory, so use that directory to
# identify the test.
test = os.path.dirname(file)
in_which = all_files[file]
if in_which == 1:
# Only in base files.
missing_result_file = os.path.join(args.diff_result_path, file)
print('Warning: diff directory does not have result file ' + missing_result_file)
continue
# If we get here in_which must be 2 or 3, which means we have the target file.
target_file_path = os.path.join(args.diff_result_path, file)
target_passing_methods, target_failing_methods, target_fail_reason_map = \
analyze(target_file_path, pattern_reg_exp)
if in_which == 3:
# file processed by both old and new.
base_file_path = os.path.join(args.base_result_path, file)
base_passing_methods, base_failing_methods, base_fail_reason_map = \
analyze(base_file_path, pattern_reg_exp)
else:
# in_which must be 2. No base file, new result file.
if have_base:
# If we do not have a base, no point in printing this out.
print('Info: diff directory has new result file ', file)
base_passing_methods, base_failing_methods = set(), set()
base_fail_reason_map = {}
tallyfailures(target_method_to_reason_to_tests,
target_reason_to_method_to_tests,
test,
target_fail_reason_map)
if have_base:
diff_fail_reason_map = get_fail_reason_diff(target_fail_reason_map,
base_fail_reason_map)
# Compute failure summary maps.
tallyfailures(base_method_to_reason_to_tests,
base_reason_to_method_to_tests,
test,
base_fail_reason_map)
tallyfailures(diff_method_to_reason_to_tests,
diff_reason_to_method_to_tests,
test,
diff_fail_reason_map)
# All methods processed in base, whether passing or failing.
base_processed_methods = base_passing_methods | base_failing_methods
# All methods processed in target, whether passing or failing.
target_processed_methods = target_passing_methods | target_failing_methods
universe_methods = base_processed_methods | target_processed_methods
base_missing_methods = universe_methods - base_processed_methods
target_missing_methods = universe_methods - target_processed_methods
# Compute the grid
pp_methods = base_passing_methods & target_passing_methods
fp_methods = base_failing_methods & target_passing_methods
mp_methods = base_missing_methods & target_passing_methods
pf_methods = base_passing_methods & target_failing_methods
ff_methods = base_failing_methods & target_failing_methods
mf_methods = base_missing_methods & target_failing_methods
pm_methods = base_passing_methods & target_missing_methods
fm_methods = base_failing_methods & target_missing_methods
pp_methods_len = len(pp_methods)
fp_methods_len = len(fp_methods)
mp_methods_len = len(mp_methods)
pf_methods_len = len(pf_methods)
ff_methods_len = len(ff_methods)
mf_methods_len = len(mf_methods)
pm_methods_len = len(pm_methods)
fm_methods_len = len(fm_methods)
test_unexpected_failures = (pf_methods_len + mf_methods_len +
pm_methods_len + fm_methods_len)
if (test_unexpected_failures > 0) & verbose:
print('Unexpected failure(s) in test: ' + test)
if have_base:
print_failures(pf_methods, ' regressing fail methods: passed in baseline but failed in target')
print_failures(mf_methods, ' new failure methods: was missing in baseline but failed in target')
print_failures(pm_methods, ' missing pass methods: passed in baseline but is missing in target')
print_failures(fm_methods, ' missing fail methods: failed in baseline but is missing in target')
else:
print_failures(mf_methods, ' failed in target')
num_pp_methods += pp_methods_len
num_fp_methods += fp_methods_len
num_mp_methods += mp_methods_len
num_pf_methods += pf_methods_len
num_ff_methods += ff_methods_len
num_mf_methods += mf_methods_len
num_pm_methods += pm_methods_len
num_fm_methods += fm_methods_len
except:
e = sys.exc_info()[0]
print('Error: CheckPass failed due to ', e)
traceback.print_exc()
return const.GeneralError
num_target_failures = num_pf_methods + num_ff_methods + num_mf_methods
target_reason_to_tests = get_reason_to_tests(target_reason_to_method_to_tests)
target_failed_tests = get_tests(target_reason_to_tests)
if ((not have_base) or target_summary) and (num_target_failures > 0):
print("Target failures summaries")
print("")
print(" method -> reason -> set(test)")
print_method_to_reason_to_tests(target_method_to_reason_to_tests)
print("")
print(" reason -> method -> set(test)")
print_reason_to_method_to_tests(target_reason_to_method_to_tests)
print(" reason -> set(test)")
print_reason_to_tests(target_reason_to_tests)
print(" Failing tests")
print_set(target_failed_tests, (' '*4 + 'test[{item_num}]={item}'))
if have_base:
num_base_failures = num_fp_methods + num_ff_methods + num_fm_methods
num_diff_failures = num_pf_methods + num_mf_methods
base_reason_to_tests = get_reason_to_tests(base_reason_to_method_to_tests)
base_failed_tests = get_tests(base_reason_to_tests)
diff_reason_to_tests = get_reason_to_tests(diff_reason_to_method_to_tests)
diff_failed_tests = get_tests(diff_reason_to_tests)
if base_summary & (num_base_failures > 0):
print("Base failures summaries")
print("")
print(" method -> reason -> set(test)")
print_method_to_reason_to_tests(base_method_to_reason_to_tests)
print("")
print(" reason -> method -> set(test)")
print_reason_to_method_to_tests(base_reason_to_method_to_tests)
print(" reason -> set(test)")
print_reason_to_tests(base_reason_to_tests)
print(" Failing tests")
print_set(base_failed_tests, (' '*4 + 'test[{item_num}]={item}'))
if num_diff_failures > 0:
print("Diff failures summaries")
print("")
print(" method -> reason -> set(test)")
print_method_to_reason_to_tests(diff_method_to_reason_to_tests)
print("")
print(" reason -> method -> set(test)")
print_reason_to_method_to_tests(diff_reason_to_method_to_tests)
print(" reason -> set(test)")
print_reason_to_tests(diff_reason_to_tests)
print(" Failing tests")
print_set(diff_failed_tests, (' '*4 + 'test[{item_num}]={item}'))
if have_base:
report(num_pp_methods, '{0} old pass methods: passed in baseline and target')
report(num_fp_methods, '{0} fix pass methods: failed in baseline but passed in target')
report(num_mp_methods, '{0} new pass methods: was missing in baseline but passed in target')
report(num_pf_methods, '{0} regressing fail methods: passed in baseline but failed in target')
report(num_ff_methods, '{0} old fail methods: failed in baseline and target')
report(num_mf_methods, '{0} new failure methods: was missing in baseline but failed in target')
report(num_pm_methods, '{0} missing pass methods: passed in baseline but is missing in target')
report(num_fm_methods, '{0} missing fail methods: failed in baseline but is missing in target')
else:
report(num_mp_methods, '{0} pass methods')
report(num_mf_methods, '{0} failure methods')
unexpected_failures = (num_pf_methods + num_mf_methods)
if have_base:
report(unexpected_failures, '{0} total failing methods excluding old fails')
if unexpected_failures == 0:
print('There were no unexpected failures.')
else:
print('There were unexpected failures.')
return unexpected_failures
if __name__ == '__main__':
return_code = main(sys.argv[1:])
sys.exit(return_code)
| mit |
eoyilmaz/anima | anima/env/mayaEnv/reference.py | 1 | 28756 | # -*- coding: utf-8 -*-
from anima.utils import do_db_setup
from pymel import core as pm
class Reference(object):
"""supplies reference related tools
"""
@classmethod
def select_reference_in_reference_editor(cls):
"""selects the reference node in the reference editor related to the
scene selection
"""
selection = pm.selected()
if not selection:
return
node = selection[0]
ref_file = node.referenceFile()
if ref_file is None:
return
ref_node = ref_file.refNode
# gather reference editor data
ref_editor_panel = pm.mel.globals['gReferenceEditorPanel']
ref_editor_data = {}
i = 0 # be safe
while True and i < 1000:
try:
pm.sceneEditor(ref_editor_panel, e=1, selectItem=i)
except RuntimeError:
break
sel_ref_node_name = \
pm.sceneEditor(ref_editor_panel, q=1, selectReference=1)[0]
ref_editor_data[i] = pm.PyNode(sel_ref_node_name)
i += 1
for i, ref_editor_ref_node in ref_editor_data.items():
if ref_editor_ref_node == ref_node:
pm.sceneEditor(ref_editor_panel, e=1, selectItem=i)
@classmethod
def get_no_parent_transform(cls, ref):
"""returns the top most parent node in the given subReferences
:param ref: pm.nt.FileReference instance
"""
all_referenced_nodes = ref.nodes()
for node in all_referenced_nodes:
if isinstance(node, pm.nt.Transform):
#print('%s has parent' % node.name())
parent_node = node.getParent()
if parent_node not in all_referenced_nodes:
return node
# check sub references
sub_refs = pm.listReferences(ref)
for sub_ref in sub_refs:
no_parent_transform = cls.get_no_parent_transform(sub_ref)
if no_parent_transform:
return no_parent_transform
@classmethod
def duplicate_selected_reference(cls):
"""duplicates the selected referenced object as reference
"""
all_selected_refs = []
for sel_node in pm.ls(sl=1):
ref = sel_node.referenceFile()
if ref not in all_selected_refs:
all_selected_refs.append(ref)
select_list = []
for ref in all_selected_refs:
# get the highest parent ref
if ref.parent():
while ref.parent():
ref = ref.parent()
namespace = ref.namespace
dup_ref = pm.createReference(
ref.path,
gl=True,
namespace=namespace,
options='v=0'
)
top_parent = cls.get_no_parent_transform(ref)
if top_parent:
node = top_parent
tra = pm.xform(node, q=1, ws=1, t=1)
rot = pm.xform(node, q=1, ws=1, ro=1)
sca = pm.xform(node, q=1, ws=1, s=1)
new_top_parent_node = cls.get_no_parent_transform(dup_ref)
pm.xform(new_top_parent_node, ws=1, t=tra)
pm.xform(new_top_parent_node, ws=1, ro=rot)
pm.xform(new_top_parent_node, ws=1, s=sca)
# parent to the same group
group = node.getParent()
if group:
pm.parent(new_top_parent_node, group)
# select the top node
select_list.append(new_top_parent_node)
pm.select(select_list)
return select_list
# @classmethod
# def duplicate_rs_proxy(cls):
# """duplicates the given rs proxy object
# """
# for node in pm.selected(type='transform'):
# shape = node.getShape()
# if not shape:
# continue
# # get the rs proxy node
# shape_inputs = shape.inMesh.inputs()
# proxy_node = None
# for n in shape_inputs:
# if isinstance(n, pm.nt.RedshiftProxyMesh):
# proxy_node = n
# break
# # now duplicate the hierarchy
# dup = pm.duplicate(node)
# # this will create the shape but will not create the proxy
# new_proxy_node = pm.nt.RedshiftProxyMesh()
# new_proxy_node.
@classmethod
def publish_model_as_look_dev(cls):
"""Publishes Model versions as LookDev versions of the same task.
Also handles references etc.
"""
#
# Create LookDev for Current Model Task
#
from stalker import Task, Version, Type, LocalSession
from stalker.db.session import DBSession
from anima import defaults
from anima.env import mayaEnv
do_db_setup()
m = mayaEnv.Maya()
local_session = LocalSession()
logged_in_user = local_session.logged_in_user
if not logged_in_user:
raise RuntimeError('Please login to Stalker')
model_type = Type.query.filter(Type.name=="Model").first()
look_dev_type = \
Type.query.filter(Type.name=="Look Development").first()
current_version = m.get_current_version()
model_task = current_version.task
if model_task.type != model_type:
raise RuntimeError('This is not a Model version')
if not current_version.is_published:
raise RuntimeError('Please Publish this maya scene')
if current_version.take_name != 'Main':
raise RuntimeError('This is not the Main take')
# find lookDev
look_dev = Task.query\
.filter(Task.parent == model_task.parent)\
.filter(Task.type == look_dev_type).first()
if not look_dev:
raise RuntimeError(
'There is no LookDev task, please inform your Stalker admin'
)
previous_look_dev_version = \
Version.query\
.filter(Version.task == look_dev)\
.filter(Version.take_name == 'Main')\
.first()
description = 'Auto Created By %s ' % logged_in_user.name
take_name = defaults.version_take_name
if not previous_look_dev_version:
# do the trick
pm.newFile(f=1)
# create a new version
new_version = Version(
task=look_dev,
description=description,
take_name=take_name,
created_by=logged_in_user
)
new_version.is_published = True
m.save_as(new_version)
# reference the model version
pm.createReference(
current_version.absolute_full_path,
gl=True,
namespace=current_version.nice_name,
options='v=0'
)
pm.saveFile()
DBSession.add(new_version)
else:
latest_look_dev_version = previous_look_dev_version.latest_version
reference_resolution = m.open(latest_look_dev_version, force=True,
skip_update_check=True)
m.update_versions(reference_resolution)
if reference_resolution['update'] \
or reference_resolution['create']:
# create a new version
new_version = Version(
task=look_dev,
description=description,
take_name=take_name,
created_by=logged_in_user,
parent=latest_look_dev_version
)
new_version.is_published = True
m.save_as(new_version)
# reopen model scene
m.open(current_version, force=True, skip_update_check=True)
@classmethod
def get_selected_reference_path(cls):
"""prints the path of the selected reference path
"""
selection = pm.ls(sl=1)
if len(selection):
node = selection[0]
ref = node.referenceFile()
if ref:
print(ref.path)
parent_ref = ref.parent()
while parent_ref is not None:
print(parent_ref.path)
parent_ref = parent_ref.parent()
@classmethod
def open_reference_in_new_maya(cls):
"""opens the selected references in new maya session
"""
import subprocess
selection = pm.ls(sl=1)
if len(selection):
node = selection[0]
ref = node.referenceFile()
if ref:
process = subprocess.Popen(
['maya', ref.path],
stderr=subprocess.PIPE
)
@classmethod
def fix_reference_namespace(cls):
"""fixes reference namespace
"""
ref_count = len(pm.listReferences(recursive=True))
if ref_count > 25:
result = pm.windows.confirmBox(
'Fix Reference Namespace',
'You have %s references in your scene,\n'
'this will take too much time\n\nIs that Ok?' % ref_count
)
if not result:
return
from stalker import LocalSession
from anima.env import mayaEnv
m = mayaEnv.Maya()
local_session = LocalSession()
logged_in_user = local_session.logged_in_user
if not logged_in_user:
raise RuntimeError('Please login before running the script')
versions = m.fix_reference_namespaces()
for version in versions:
version.created_by = logged_in_user
from stalker.db.session import DBSession
DBSession.commit()
@classmethod
def fix_reference_paths(cls):
"""Fixes reference paths that are not using environment vars
"""
# list current scene references
from anima.env import mayaEnv
m_env = mayaEnv.Maya()
current_version = m_env.get_current_version()
all_refs = pm.listReferences(recursive=True)
refs_with_wrong_prefix = []
for ref in all_refs:
if '$REPO' not in ref.unresolvedPath():
parent = ref.parent()
if parent:
refs_with_wrong_prefix.append(parent)
ref_paths = [ref.path for ref in refs_with_wrong_prefix]
for ref_path in ref_paths:
version = m_env.get_version_from_full_path(ref_path)
if version:
m_env.open(version, force=True, skip_update_check=True)
pm.saveFile()
if pm.env.sceneName() != current_version.absolute_full_path:
m_env.open(current_version, force=True, skip_update_check=True)
@classmethod
def fix_student_license_on_references(cls):
"""fixes the student license error on referenced files
"""
for ref in pm.listReferences():
result = cls.fix_student_license(ref.path)
if result:
ref.unload()
ref.load()
@classmethod
def fix_student_license_on_selected_file(cls):
"""fixes the student license error on selected file
"""
texture_path = \
file_path = pm.fileDialog2(
cap="Choose Maya Scene", okc="Choose", fm=1
)[0]
result = cls.fix_student_license(file_path)
if result:
pm.informBox('Done!', 'Fixed:\n\n%s' % file_path)
else:
pm.informBox(
'Fail!', 'No Student License Found on\n\n%s' % file_path
)
@classmethod
def fix_student_license(cls, path):
"""fixes the student license error
"""
import shutil
with open(path, 'r') as f:
data = f.readlines()
for i in range(200):
if 'student' in data[i].lower():
# backup the file
shutil.copy(path, '%s.orig' % path)
data.pop(i)
print('Fixed: %s' % path)
with open(path, 'w') as f:
f.writelines(data)
return True
return False
@classmethod
def archive_current_scene(cls):
"""archives the current scene
"""
from anima.env.mayaEnv import Maya
from anima.env.mayaEnv.archive import Archiver
from anima.utils.archive import archive_current_scene
m_env = Maya()
version = m_env.get_current_version()
archiver = Archiver()
archive_current_scene(version, archiver)
@classmethod
def bind_to_original(cls):
"""binds the current scene references to original references from the
repository
"""
# get all reference paths
import os
from anima.env import mayaEnv
from stalker import Repository, Version, Task
m = mayaEnv.Maya()
current_version = m.get_current_version()
# get the current project
project = None
if current_version:
project = current_version.task.project
# no project then do nothing
if project:
for ref in pm.listReferences():
unresolved_path = ref.unresolvedPath()
filename = os.path.basename(unresolved_path)
# find the corresponding version
v = Version.query\
.join(Version.task, Task.versions)\
.filter(Task.project == project)\
.filter(Version.full_path.endswith(filename)).first()
if v:
ref.replaceWith(
Repository.to_os_independent_path(
v.absolute_full_path
)
)
@classmethod
def unload_selected_references(cls):
"""unloads the highest parent references that is related to the selected objects
"""
refs_to_unload = []
# store selected references
for node in pm.ls(sl=1):
ref = node.referenceFile()
if not ref:
# not a reference, skip
continue
# get the highest parent ref
parent_ref = ref.parent()
i = 0
while parent_ref and i < 100:
ref = parent_ref
parent_ref = ref.parent()
i += 1
if ref not in refs_to_unload:
refs_to_unload.append(ref)
for ref in refs_to_unload:
ref.unload()
@classmethod
def remove_selected_references(cls):
"""removes the highest parent references that is related to the selected objects
"""
refs_to_remove = []
# store selected references
for node in pm.ls(sl=1):
ref = node.referenceFile()
if not ref:
# not a reference, skip
continue
# get the highest parent ref
parent_ref = ref.parent()
i = 0
while parent_ref and i < 100:
ref = parent_ref
parent_ref = ref.parent()
i += 1
if ref not in refs_to_remove:
refs_to_remove.append(ref)
response = pm.confirmDialog(
title='Remove Selected References?',
message="Remove selected references\n\n%s" % "\n".join(map(lambda x: str(x), refs_to_remove)),
button=['Yes', 'No'],
defaultButton='No',
cancelButton='No',
dismissString='No'
)
if response == 'No':
return
for ref in refs_to_remove:
ref.remove()
@classmethod
def unload_unselected_references(cls):
"""unloads the references that is not related to the selected objects
"""
import copy
selected_references = []
# store selected references
for node in pm.ls(sl=1):
ref = node.referenceFile()
if ref is not None and ref not in selected_references:
selected_references.append(ref)
temp_selected_references = copy.copy(selected_references)
# store parent references
for ref in temp_selected_references:
parent_ref = ref.parent()
if parent_ref is not None \
and parent_ref not in selected_references:
while parent_ref is not None:
if parent_ref not in selected_references:
selected_references.append(parent_ref)
parent_ref = parent_ref.parent()
# now unload all the other references
for ref in reversed(pm.listReferences(recursive=1)):
if ref not in selected_references:
ref.unload()
@classmethod
def to_base(cls):
"""replaces the related references with Base representation
"""
cls.to_repr('Base')
@classmethod
def to_gpu(cls):
"""replaces the related references with GPU representation
"""
cls.to_repr('GPU')
@classmethod
def to_ass(cls):
"""replaces the related references with the ASS representation
"""
cls.to_repr('ASS')
@classmethod
def to_rs(cls):
"""replaces the related references with the RS representation
"""
cls.to_repr('RS')
@classmethod
def to_repr(cls, repr_name):
"""replaces the related references with the given representation
:param str repr_name: Desired representation name
"""
# get apply to
apply_to = \
pm.radioButtonGrp('repr_apply_to_radio_button_grp', q=1, sl=1)
if apply_to == 1:
# work on every selected object
selection = pm.ls(sl=1)
# collect reference files first
references = []
for node in selection:
ref = node.referenceFile()
# get the topmost parent
ref = ref.topmost_parent()
if ref is not None and ref not in references:
references.append(ref)
from anima.env.mayaEnv.repr_tools import RepresentationGenerator
# now go over each reference
for ref in references:
if not ref.is_repr(repr_name):
parent_ref = ref
while parent_ref is not None:
# check if it is a look dev node
v = parent_ref.version
if v:
task = v.task
if RepresentationGenerator.is_look_dev_task(task) \
or RepresentationGenerator.is_vegetation_task(task):
# convert it to repr
parent_ref.to_repr(repr_name)
break
else:
# go to parent ref
parent_ref = parent_ref.parent()
else:
parent_ref = parent_ref.parent()
elif apply_to == 2:
# apply to all references
for ref in pm.listReferences():
ref.to_repr(repr_name)
@classmethod
def generate_repr_of_scene_caller(cls):
"""helper method to call Reference.generate_repr_of_scene() with data
coming from UI
"""
generate_gpu = 1 if pm.checkBoxGrp('generate_repr_types_checkbox_grp', q=1, v1=1) else 0
generate_ass = 1 if pm.checkBoxGrp('generate_repr_types_checkbox_grp', q=1, v2=1) else 0
generate_rs = 1 if pm.checkBoxGrp('generate_repr_types_checkbox_grp', q=1, v3=1) else 0
skip_existing = \
pm.checkBox('generate_repr_skip_existing_checkBox', q=1, v=1)
cls.generate_repr_of_scene(
generate_gpu,
generate_ass,
generate_rs,
skip_existing
)
@classmethod
def generate_repr_of_scene(cls,
generate_gpu=True,
generate_ass=True,
generate_rs=True,
skip_existing=False):
"""generates desired representations of this scene
"""
from anima.ui.progress_dialog import ProgressDialogManager
from anima.env.mayaEnv import Maya, repr_tools, auxiliary
reload(auxiliary)
reload(repr_tools)
response = pm.confirmDialog(
title='Do Create Representations?',
message='Create all Repr. for this scene?',
button=['Yes', 'No'],
defaultButton='No',
cancelButton='No',
dismissString='No'
)
if response == 'No':
return
# register a new caller
from anima.env.mayaEnv import MayaMainProgressBarWrapper
wrp = MayaMainProgressBarWrapper()
pdm = ProgressDialogManager(dialog=wrp)
m_env = Maya()
source_version = m_env.get_current_version()
gen = repr_tools.RepresentationGenerator()
# open each version
from stalker import Version
if skip_existing:
# check if there is a GPU or ASS repr
# generated from this version
child_versions = \
Version.query.filter(Version.parent == source_version).all()
for cv in child_versions:
if generate_gpu is True and '@GPU' in cv.take_name:
generate_gpu = False
if generate_ass is True and '@ASS' in cv.take_name:
generate_ass = False
if generate_rs is True and '@RS' in cv.take_name:
generate_rs = False
total_number_of_reprs = generate_gpu + generate_ass + generate_rs
caller = pdm.register(total_number_of_reprs, title='Generate Reprs')
gen.version = source_version
# generate representations
if generate_gpu:
gen.generate_gpu()
caller.step()
if generate_ass:
gen.generate_ass()
caller.step()
if generate_rs:
gen.generate_rs()
caller.step()
# now open the source version again
m_env.open(source_version, force=True, skip_update_check=True)
@classmethod
def generate_repr_of_all_references_caller(cls):
"""a helper method that calls
References.generate_repr_of_all_references() with paremeters from the
UI
"""
generate_gpu = pm.checkBoxGrp('generate_repr_types_checkbox_grp', q=1, v1=1)
generate_ass = pm.checkBoxGrp('generate_repr_types_checkbox_grp', q=1, v2=1)
generate_rs = pm.checkBoxGrp('generate_repr_types_checkbox_grp', q=1, v3=1)
skip_existing = \
pm.checkBox('generate_repr_skip_existing_checkBox', q=1, v=1)
cls.generate_repr_of_all_references(
generate_gpu,
generate_ass,
generate_rs,
skip_existing
)
@classmethod
def generate_repr_of_all_references(cls,
generate_gpu=True,
generate_ass=True,
generate_rs=True,
skip_existing=False):
"""generates all representations of all references of this scene
"""
from anima.ui.progress_dialog import ProgressDialogManager
from anima.env.mayaEnv import Maya, repr_tools, auxiliary
reload(auxiliary)
reload(repr_tools)
paths_visited = []
versions_to_visit = []
versions_cannot_be_published = []
# generate a sorted version list
# and visit each reference only once
from anima.env.mayaEnv import MayaMainProgressBarWrapper
wrp = MayaMainProgressBarWrapper()
pdm = ProgressDialogManager(dialog=wrp)
use_progress_window = False
if not pm.general.about(batch=1):
use_progress_window = True
all_refs = pm.listReferences(recursive=True)
pdm.use_ui = use_progress_window
caller = pdm.register(len(all_refs), 'List References')
for ref in reversed(all_refs):
ref_path = str(ref.path)
caller.step(message=ref_path)
if ref_path not in paths_visited:
v = ref.version
if v is not None:
paths_visited.append(ref_path)
versions_to_visit.append(v)
response = pm.confirmDialog(
title='Do Create Representations?',
message='Create all Repr. for all %s FileReferences?'
% len(versions_to_visit),
button=['Yes', 'No'],
defaultButton='No',
cancelButton='No',
dismissString='No'
)
if response == 'No':
return
# register a new caller
caller = pdm.register(max_iteration=len(versions_to_visit),
title='Generate Reprs')
m_env = Maya()
source_version = m_env.get_current_version()
gen = repr_tools.RepresentationGenerator()
# open each version
from stalker import Version
for v in versions_to_visit:
local_generate_gpu = generate_gpu
local_generate_ass = generate_ass
local_generate_rs = generate_rs
# check if this is a repr
if '@' in v.take_name:
# use the parent
v = v.parent
if not v:
continue
if skip_existing:
# check if there is a GPU or ASS repr
# generated from this version
child_versions = Version.query.filter(Version.parent == v).all()
for cv in child_versions:
if local_generate_gpu is True and '@GPU' in cv.take_name:
local_generate_gpu = False
if local_generate_ass is True and '@ASS' in cv.take_name:
local_generate_ass = False
if local_generate_rs is True and '@RS' in cv.take_name:
local_generate_rs = False
gen.version = v
# generate representations
if local_generate_gpu:
try:
gen.generate_gpu()
except RuntimeError:
if v not in versions_cannot_be_published:
versions_cannot_be_published.append(v)
if local_generate_ass:
try:
gen.generate_ass()
except RuntimeError:
if v not in versions_cannot_be_published:
versions_cannot_be_published.append(v)
if local_generate_rs:
try:
gen.generate_rs()
except RuntimeError:
if v not in versions_cannot_be_published:
versions_cannot_be_published.append(v)
caller.step()
# now open the source version again
m_env.open(source_version, force=True, skip_update_check=True)
# and generate representation for the source
gen.version = source_version
# generate representations
if not versions_cannot_be_published:
if generate_gpu:
gen.generate_gpu()
if generate_ass:
gen.generate_ass()
if generate_rs:
gen.generate_rs()
else:
pm.confirmDialog(
title='Error',
message='The following versions can not be published '
'(check script editor):\n\n%s' % (
'\n'.join(
map(lambda x: x.nice_name,
versions_cannot_be_published)
)
),
button=['OK'],
defaultButton='OK',
cancelButton='OK',
dismissString='OK'
)
pm.error(
'\n'.join(
map(lambda x: x.absolute_full_path,
versions_cannot_be_published)
)
) | mit |
111t8e/h2o-2 | py/testdir_single_jvm/test_delete_all_keys.py | 9 | 1634 | import unittest, time, sys, random
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_browse as h2b, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
h2o.init(1,java_heap_GB=10)
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_delete_all_keys(self):
# FIX! should have some model keys in here too, from RF etc.
importFolderPath = 'standard'
timeoutSecs = 500
csvFilenameAll = [
"covtype.data",
"covtype20x.data",
]
# csvFilenameList = random.sample(csvFilenameAll,1)
csvFilenameList = csvFilenameAll
for trial in range(3):
for csvFilename in csvFilenameList:
csvPathname = importFolderPath + "/" + csvFilename
start = time.time()
parseResult = h2i.import_parse(bucket='home-0xdiag-datasets', path=csvPathname, timeoutSecs=500)
elapsed = time.time() - start
print csvFilename, "parsed in", elapsed, "seconds.", "%d pct. of timeout" % ((elapsed*100)/timeoutSecs), "\n"
print "Parse result['destination_key']:", parseResult['destination_key']
print "\n" + csvFilename
print "Delete all keys"
h2o.nodes[0].remove_all_keys()
print "This shouldn't see any keys"
h2i.delete_keys_at_all_nodes()
print "\nTrial", trial, "completed\n"
if __name__ == '__main__':
h2o.unit_main()
| apache-2.0 |
orchidinfosys/odoo | addons/purchase/__openerp__.py | 19 | 2592 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Purchase Management',
'version': '1.2',
'category': 'Purchase Management',
'sequence': 60,
'summary': 'Purchase Orders, Receipts, Vendor Bills',
'description': """
Manage goods requirement by Purchase Orders easily
==================================================
Purchase management enables you to track your vendors' price quotations and convert them into purchase orders if necessary.
Odoo has several methods of monitoring invoices and tracking the receipt of ordered goods. You can handle partial deliveries in Odoo, so you can keep track of items that are still to be delivered in your orders, and you can issue reminders automatically.
Odoo's replenishment management rules enable the system to generate draft purchase orders automatically, or you can configure it to run a lean process driven entirely by current production needs.
Dashboard / Reports for Purchase Management will include:
---------------------------------------------------------
* Request for Quotations
* Purchase Orders Waiting Approval
* Monthly Purchases by Category
* Receipt Analysis
* Purchase Analysis
""",
'website': 'https://www.odoo.com/page/purchase',
'depends': ['stock_account', 'report', 'web_tip'],
'data': [
'security/purchase_security.xml',
'security/ir.model.access.csv',
'invoice_view.xml',
'purchase_sequence.xml',
'company_view.xml',
'purchase_data.xml',
'purchase_data.yml',
'purchase_report.xml',
'purchase_view.xml',
'stock_view.xml',
'partner_view.xml',
'report/purchase_report_view.xml',
'data/mail_template_data.xml',
'res_config_view.xml',
'purchase_tip_data.xml',
'views/report_purchaseorder.xml',
'views/report_purchasequotation.xml',
],
'test': [
'../account/test/account_minimal_test.xml',
'test/stock_valuation_account.xml',
'test/ui/purchase_users.yml',
'test/process/run_scheduler.yml',
'test/fifo_price.yml',
'test/fifo_returns.yml',
# 'test/costmethodchange.yml',
'test/process/cancel_order.yml',
'test/ui/duplicate_order.yml',
'test/ui/delete_order.yml',
'test/average_price.yml',
],
'demo': [
'purchase_order_demo.yml',
'purchase_demo.xml',
'purchase_stock_demo.yml',
],
'installable': True,
'auto_install': False,
'application': True,
}
| gpl-3.0 |
joke2k/faker | faker/providers/geo/__init__.py | 1 | 70104 | from decimal import Decimal
from .. import BaseProvider
localized = True
class Provider(BaseProvider):
"""
land_coords data extracted from geonames.org, under the Creative Commons Attribution 3.0 License.
Coordinates are in decimal format for mapping purposes.
Country code is in Alpha 2 format (https://www.nationsonline.org/oneworld/country_code_list.htm).
Timezones are canonical (https://en.wikipedia.org/wiki/List_of_tz_database_time_zones).
"""
land_coords = (
("42.50729", "1.53414", "les Escaldes", "AD", "Europe/Andorra"),
("36.21544", "65.93249", "Sar-e Pul", "AF", "Asia/Kabul"),
("40.49748", "44.7662", "Hrazdan", "AM", "Asia/Yerevan"),
("-11.78333", "19.91667", "Luena", "AO", "Africa/Luanda"),
("-37.32167", "-59.13316", "Tandil", "AR", "America/Argentina/Buenos_Aires"),
("-34.74785", "-58.70072", "Pontevedra", "AR", "America/Argentina/Buenos_Aires"),
("-34.64966", "-58.38341", "Barracas", "AR", "America/Argentina/Buenos_Aires"),
("-54.8", "-68.3", "Ushuaia", "AR", "America/Argentina/Ushuaia"),
("-31.25033", "-61.4867", "Rafaela", "AR", "America/Argentina/Cordoba"),
("-31.4488", "-60.93173", "Esperanza", "AR", "America/Argentina/Cordoba"),
("-34.64167", "-60.47389", "Chacabuco", "AR", "America/Argentina/Buenos_Aires"),
("-27.4338", "-65.61427", "Aguilares", "AR", "America/Argentina/Tucuman"),
("47.05", "15.46667", "Sankt Peter", "AT", "Europe/Vienna"),
("48.25", "16.4", "Floridsdorf", "AT", "Europe/Vienna"),
("-31.95224", "115.8614", "Perth", "AU", "Australia/Perth"),
("-37.9", "145.18333", "Wheelers Hill", "AU", "Australia/Melbourne"),
("-33.88096", "151.07986", "Strathfield", "AU", "Australia/Sydney"),
("-34.88422", "150.60036", "Nowra", "AU", "Australia/Sydney"),
("-25.54073", "152.70493", "Maryborough", "AU", "Australia/Brisbane"),
("-34.28853", "146.05093", "Griffith", "AU", "Australia/Sydney"),
("-33.79176", "151.08057", "Eastwood", "AU", "Australia/Sydney"),
("-37.88333", "145.06667", "Carnegie", "AU", "Australia/Melbourne"),
("-33.75881", "150.99292", "Baulkham Hills", "AU", "Australia/Sydney"),
("-27.50578", "153.10236", "Carindale", "AU", "Australia/Brisbane"),
("-32.05251", "115.88782", "Willetton", "AU", "Australia/Perth"),
("-38.16604", "145.13643", "Frankston South", "AU", "Australia/Melbourne"),
("38.45598", "48.87498", "Astara", "AZ", "Asia/Baku"),
("41.09246", "45.36561", "Qazax", "AZ", "Asia/Baku"),
("44.75874", "19.21437", "Bijeljina", "BA", "Europe/Sarajevo"),
("23.9028", "89.11943", "Kushtia", "BD", "Asia/Dhaka"),
("22.83957", "91.84128", "Manikchari", "BD", "Asia/Dhaka"),
("50.8", "3.16667", "Wevelgem", "BE", "Europe/Brussels"),
("51.12794", "4.21372", "Temse", "BE", "Europe/Brussels"),
("50.71229", "4.52529", "Rixensart", "BE", "Europe/Brussels"),
("50.74497", "3.20639", "Mouscron", "BE", "Europe/Brussels"),
("51.24197", "4.82313", "Lille", "BE", "Europe/Brussels"),
("51.03427", "5.37429", "Houthalen", "BE", "Europe/Brussels"),
("50.56149", "4.69889", "Gembloux", "BE", "Europe/Brussels"),
("50.88506", "4.07601", "Denderleeuw", "BE", "Europe/Brussels"),
("51.21187", "4.25633", "Beveren", "BE", "Europe/Brussels"),
("41.57439", "24.71204", "Smolyan", "BG", "Europe/Sofia"),
("43.4125", "23.225", "Montana", "BG", "Europe/Sofia"),
("42.7", "27.25", "Aytos", "BG", "Europe/Sofia"),
("8.88649", "2.59753", "Tchaourou", "BJ", "Africa/Porto-Novo"),
("-21.44345", "-65.71875", "Tupiza", "BO", "America/La_Paz"),
("-0.71667", "-48.52333", "Soure", "BR", "America/Belem"),
("-8.05389", "-34.88111", "Recife", "BR", "America/Recife"),
("-4.42472", "-41.45861", "Pedro II", "BR", "America/Fortaleza"),
("-3.14306", "-58.44417", "Itacoatiara", "BR", "America/Manaus"),
("-4.16694", "-40.7475", "Guaraciaba do Norte", "BR", "America/Fortaleza"),
("-8.66667", "-35.71667", "Catende", "BR", "America/Recife"),
("-8.28333", "-35.03333", "Cabo", "BR", "America/Recife"),
("-4.24444", "-42.29444", "Barras", "BR", "America/Fortaleza"),
("-3.20333", "-52.20639", "Altamira", "BR", "America/Santarem"),
("-20.87306", "-48.29694", "Viradouro", "BR", "America/Sao_Paulo"),
("-22.97056", "-46.99583", "Valinhos", "BR", "America/Sao_Paulo"),
("-10.95817", "-38.79084", "Tucano", "BR", "America/Bahia"),
("-28.81833", "-52.51028", "Soledade", "BR", "America/Sao_Paulo"),
("-23.44361", "-51.87389", "Sarandi", "BR", "America/Sao_Paulo"),
("-22.45667", "-47.53028", "Santa Gertrudes", "BR", "America/Sao_Paulo"),
("-11.48472", "-37.93278", "Rio Real", "BR", "America/Bahia"),
("-19.32556", "-41.25528", "Resplendor", "BR", "America/Sao_Paulo"),
("-26.22861", "-52.67056", "Pato Branco", "BR", "America/Sao_Paulo"),
("-25.42944", "-50.00639", "Palmeira", "BR", "America/Sao_Paulo"),
("-12.91667", "-39.25", "Muritiba", "BR", "America/Bahia"),
("-21.41222", "-42.19667", "Miracema", "BR", "America/Sao_Paulo"),
("-28.44917", "-52.2", "Marau", "BR", "America/Sao_Paulo"),
("-22.92306", "-53.13722", "Loanda", "BR", "America/Sao_Paulo"),
("-10.91722", "-37.65", "Lagarto", "BR", "America/Maceio"),
("-19.72806", "-50.19556", "Iturama", "BR", "America/Sao_Paulo"),
("-21.205", "-41.88778", "Itaperuna", "BR", "America/Sao_Paulo"),
("-20.25333", "-43.80139", "Itabirito", "BR", "America/Sao_Paulo"),
("-28.24", "-48.67028", "Imbituba", "BR", "America/Sao_Paulo"),
("-22.53722", "-42.98194", "Guapimirim", "BR", "America/Sao_Paulo"),
("-19.7625", "-44.31389", "Esmeraldas", "BR", "America/Sao_Paulo"),
("-25.42778", "-49.27306", "Curitiba", "BR", "America/Sao_Paulo"),
("-14.66463", "-52.35558", "Nova Xavantina", "BR", "America/Cuiaba"),
("-29.2975", "-51.50361", "Carlos Barbosa", "BR", "America/Sao_Paulo"),
("-15.675", "-38.94722", "Canavieiras", "BR", "America/Bahia"),
("-17.74431", "-48.62789", "Caldas Novas", "BR", "America/Sao_Paulo"),
("-23.7975", "-48.59278", "Buri", "BR", "America/Sao_Paulo"),
("-10.90889", "-37.03861", "Barra dos Coqueiros", "BR", "America/Maceio"),
("-22.57306", "-47.1725", "Artur Nogueira", "BR", "America/Sao_Paulo"),
("-10.91111", "-37.07167", "Aracaju", "BR", "America/Maceio"),
("-21.42917", "-45.94722", "Alfenas", "BR", "America/Sao_Paulo"),
("-8.76194", "-63.90389", "Porto Velho", "BR", "America/Porto_Velho"),
("-21.44236", "27.46153", "Tonota", "BW", "Africa/Gaborone"),
("55.1904", "30.2049", "Vitebsk", "BY", "Europe/Minsk"),
("53.5942", "25.8191", "Novogrudok", "BY", "Europe/Minsk"),
("52.4089", "31.3237", "Dobrush", "BY", "Europe/Minsk"),
("45.43341", "-73.86586", "Beaconsfield", "CA", "America/Toronto"),
("46.23899", "-63.13414", "Charlottetown", "CA", "America/Halifax"),
("45.4473", "-73.75335", "Dorval", "CA", "America/Toronto"),
("49.88307", "-119.48568", "Kelowna", "CA", "America/Vancouver"),
("43.86682", "-79.2663", "Markham", "CA", "America/Toronto"),
("42.8334", "-80.38297", "Norfolk County", "CA", "America/Toronto"),
("45.44868", "-73.81669", "Pointe-Claire", "CA", "America/Toronto"),
("45.40008", "-73.58248", "Sainte-Catherine", "CA", "America/Toronto"),
("53.51684", "-113.3187", "Sherwood Park", "CA", "America/Edmonton"),
("50.26729", "-119.27337", "Vernon", "CA", "America/Vancouver"),
("46.1351", "-60.1831", "Sydney", "CA", "America/Glace_Bay"),
("0.76755", "24.43973", "Yangambi", "CD", "Africa/Lubumbashi"),
("-8.73508", "24.99798", "Kamina", "CD", "Africa/Lubumbashi"),
("0.49113", "29.47306", "Beni", "CD", "Africa/Lubumbashi"),
("-4.5833", "15.16554", "Kasangulu", "CD", "Africa/Kinshasa"),
("4.94273", "15.87735", "Carnot", "CF", "Africa/Bangui"),
("-4.26613", "15.28318", "Brazzaville", "CG", "Africa/Brazzaville"),
("46.18396", "6.10237", "Onex", "CH", "Europe/Zurich"),
("47.30997", "8.52462", "Adliswil", "CH", "Europe/Zurich"),
("5.84752", "-5.682", "Lakota", "CI", "Africa/Abidjan"),
("5.27247", "-3.59625", "Bonoua", "CI", "Africa/Abidjan"),
("-33.59217", "-70.6996", "San Bernardo", "CL", "America/Santiago"),
("-30.60106", "-71.19901", "Ovalle", "CL", "America/Santiago"),
("-32.45242", "-71.23106", "La Ligua", "CL", "America/Santiago"),
("-36.9256", "-73.02841", "Chiguayante", "CL", "America/Santiago"),
("4.96667", "10.7", "Tonga", "CM", "Africa/Douala"),
("3.51667", "11.5", "Mbalmayo", "CM", "Africa/Douala"),
("4.2475", "9.00472", "Idenao", "CM", "Africa/Douala"),
("46.51872", "86.00214", "Hoxtolgay", "CN", "Asia/Urumqi"),
("36.81667", "117.81667", "Zhoucun", "CN", "Asia/Shanghai"),
("34.86472", "117.55417", "Zaozhuang", "CN", "Asia/Shanghai"),
("23.73333", "114.68333", "Heyuan", "CN", "Asia/Shanghai"),
("34.65918", "109.22921", "Yanliang", "CN", "Asia/Shanghai"),
("38.40917", "112.73333", "Xinzhou", "CN", "Asia/Shanghai"),
("33.78333", "114.51667", "Wacheng", "CN", "Asia/Shanghai"),
("27.85", "112.9", "Xiangtan", "CN", "Asia/Shanghai"),
("37.19723", "122.05228", "Tianfu", "CN", "Asia/Shanghai"),
("34.85", "117.33333", "Taozhuang", "CN", "Asia/Shanghai"),
("35.64889", "117.27583", "Sishui", "CN", "Asia/Shanghai"),
("27.34089", "117.4831", "Shaowu", "CN", "Asia/Shanghai"),
("37.30553", "120.82747", "Zhuangyuan", "CN", "Asia/Shanghai"),
("35.50056", "117.63083", "Pingyi", "CN", "Asia/Shanghai"),
("27.92333", "118.53333", "Pucheng", "CN", "Asia/Shanghai"),
("24.28859", "116.11768", "Meizhou", "CN", "Asia/Shanghai"),
("37.65181", "120.33063", "Longgang", "CN", "Asia/Shanghai"),
("23.29549", "113.82465", "Licheng", "CN", "Asia/Shanghai"),
("36.19278", "117.65694", "Laiwu", "CN", "Asia/Shanghai"),
("30.35028", "112.19028", "Jingzhou", "CN", "Asia/Shanghai"),
("32.50611", "120.14278", "Jiangyan", "CN", "Asia/Shanghai"),
("30.24706", "115.04814", "Huangshi", "CN", "Asia/Shanghai"),
("37.73222", "115.70111", "Hengshui", "CN", "Asia/Shanghai"),
("28.88162", "120.03308", "Guli", "CN", "Asia/Shanghai"),
("23.02677", "113.13148", "Foshan", "CN", "Asia/Shanghai"),
("35.85", "117.7", "Dongdu", "CN", "Asia/Shanghai"),
("32.54278", "111.50861", "Danjiangkou", "CN", "Asia/Shanghai"),
("35.20889", "111.73861", "Changzhi", "CN", "Asia/Shanghai"),
("34.56861", "105.89333", "Beidao", "CN", "Asia/Shanghai"),
("29.98869", "122.20488", "Zhoushan", "CN", "Asia/Shanghai"),
("40.66482", "122.22833", "Yingkou", "CN", "Asia/Shanghai"),
("46.08333", "122.08333", "Ulanhot", "CN", "Asia/Shanghai"),
("45.35", "126.28333", "Shuangcheng", "CN", "Asia/Shanghai"),
("41.09822", "120.74792", "Nanpiao", "CN", "Asia/Shanghai"),
("41.27194", "123.17306", "Liaoyang", "CN", "Asia/Shanghai"),
("41.94175", "123.50266", "Hushitai", "CN", "Asia/Shanghai"),
("40.85158", "122.74754", "Haicheng", "CN", "Asia/Shanghai"),
("42.64031", "125.51176", "Dongfeng", "CN", "Asia/Shanghai"),
("45.75279", "130.57211", "Boli", "CN", "Asia/Shanghai"),
("31.64615", "120.74221", "Changshu City", "CN", "Asia/Shanghai"),
("7.83389", "-72.47417", "Villa del Rosario", "CO", "America/Bogota"),
("6.46838", "-73.26022", "Socorro", "CO", "America/Bogota"),
("8.79577", "-75.69947", "San Carlos", "CO", "America/Bogota"),
("10.98778", "-74.95472", "Puerto Colombia", "CO", "America/Bogota"),
("4.73245", "-74.26419", "Madrid", "CO", "America/Bogota"),
("5.20856", "-74.73584", "Honda", "CO", "America/Bogota"),
("10.15031", "-73.9614", "El Copey", "CO", "America/Bogota"),
("3.8801", "-77.03116", "Buenaventura", "CO", "America/Bogota"),
("5.6561", "-75.87877", "Andes", "CO", "America/Bogota"),
("9.92787", "-84.13722", "San Rafael", "CR", "America/Costa_Rica"),
("10.63504", "-85.43772", "Liberia", "CR", "America/Costa_Rica"),
("23.15678", "-81.24441", "Varadero", "CU", "America/Havana"),
("20.14298", "-77.43532", "Media Luna", "CU", "America/Havana"),
("23.04419", "-82.00919", "Jaruco", "CU", "America/Havana"),
("22.98212", "-80.58556", "Corralillo", "CU", "America/Havana"),
("23.0072", "-82.4017", "Boyeros", "CU", "America/Havana"),
("50.50301", "13.63617", "Most", "CZ", "Europe/Prague"),
("50.23271", "12.87117", "Karlovy Vary", "CZ", "Europe/Prague"),
("51.04962", "12.1369", "Zeitz", "DE", "Europe/Berlin"),
("52.59319", "13.32127", "Wittenau", "DE", "Europe/Berlin"),
("50.82709", "6.9747", "Wesseling", "DE", "Europe/Berlin"),
("50.9803", "11.32903", "Weimar", "DE", "Europe/Berlin"),
("52.86147", "9.5926", "Walsrode", "DE", "Europe/Berlin"),
("51.88333", "8.51667", "Verl", "DE", "Europe/Berlin"),
("48.07667", "8.64409", "Trossingen", "DE", "Europe/Berlin"),
("48.78232", "9.17702", "Stuttgart", "DE", "Europe/Berlin"),
("53.59337", "9.47629", "Stade", "DE", "Europe/Berlin"),
("50.80019", "7.20769", "Siegburg", "DE", "Europe/Berlin"),
("51.21667", "6.26667", "Schwalmtal", "DE", "Europe/Berlin"),
("54.52156", "9.5586", "Schleswig", "DE", "Europe/Berlin"),
("50.72043", "11.34046", "Rudolstadt", "DE", "Europe/Berlin"),
("48.49144", "9.20427", "Reutlingen", "DE", "Europe/Berlin"),
("51.20219", "7.36027", "Radevormwald", "DE", "Europe/Berlin"),
("48.46458", "9.22796", "Pfullingen", "DE", "Europe/Berlin"),
("51.30001", "13.10984", "Oschatz", "DE", "Europe/Berlin"),
("51.47805", "6.8625", "Oberhausen", "DE", "Europe/Berlin"),
("50.23805", "8.86704", "Nidderau", "DE", "Europe/Berlin"),
("48.73218", "11.18709", "Neuburg an der Donau", "DE", "Europe/Berlin"),
("47.98372", "10.18527", "Memmingen", "DE", "Europe/Berlin"),
("50.80904", "8.77069", "Marburg an der Lahn", "DE", "Europe/Berlin"),
("49.5099", "6.74549", "Losheim", "DE", "Europe/Berlin"),
("48.52961", "12.16179", "Landshut", "DE", "Europe/Berlin"),
("51.19139", "6.51352", "Korschenbroich", "DE", "Europe/Berlin"),
("52.2", "8.63333", "Kirchlengern", "DE", "Europe/Berlin"),
("50.23019", "8.77155", "Karben", "DE", "Europe/Berlin"),
("50.09019", "8.4493", "Hofheim am Taunus", "DE", "Europe/Berlin"),
("52.61131", "13.31783", "Hermsdorf", "DE", "Europe/Berlin"),
("48.35149", "8.96317", "Hechingen", "DE", "Europe/Berlin"),
("53.63333", "9.85", "Halstenbek", "DE", "Europe/Berlin"),
("52.21099", "7.02238", "Gronau", "DE", "Europe/Berlin"),
("52.47774", "10.5511", "Gifhorn", "DE", "Europe/Berlin"),
("48.06919", "11.37703", "Gauting", "DE", "Europe/Berlin"),
("48.35693", "10.98461", "Friedberg", "DE", "Europe/Berlin"),
("51.168", "7.973", "Finnentrop", "DE", "Europe/Berlin"),
("49.13645", "8.91229", "Eppingen", "DE", "Europe/Berlin"),
("48.28259", "9.72749", "Ehingen", "DE", "Europe/Berlin"),
("52.4581", "13.28702", "Dahlem", "DE", "Europe/Berlin"),
("51.08468", "7.11393", "Burscheid", "DE", "Europe/Berlin"),
("49.03685", "8.70745", "Bretten", "DE", "Europe/Berlin"),
("49.68369", "8.61839", "Bensheim", "DE", "Europe/Berlin"),
("53.94313", "10.30215", "Bad Segeberg", "DE", "Europe/Berlin"),
("50.64336", "7.2278", "Bad Honnef", "DE", "Europe/Berlin"),
("49.97704", "9.15214", "Aschaffenburg", "DE", "Europe/Berlin"),
("48.21644", "9.02596", "Albstadt", "DE", "Europe/Berlin"),
("52.53048", "13.29371", "Charlottenburg-Nord", "DE", "Europe/Berlin"),
("53.6052", "10.03988", "Barmbek-Nord", "DE", "Europe/Berlin"),
("11.15583", "42.7125", "'Ali Sabieh", "DJ", "Africa/Djibouti"),
("55.67938", "12.53463", "Frederiksberg", "DK", "Europe/Copenhagen"),
("18.20854", "-71.10077", "Santa Cruz de Barahona", "DO", "America/Santo_Domingo"),
("36.76639", "3.47717", "Boumerdas", "DZ", "Africa/Algiers"),
("36.72544", "3.55665", "Thenia", "DZ", "Africa/Algiers"),
("34.15429", "3.50309", "Messaad", "DZ", "Africa/Algiers"),
("35.21222", "2.31889", "Ksar Chellala", "DZ", "Africa/Algiers"),
("35.06544", "1.04945", "Frenda", "DZ", "Africa/Algiers"),
("36.06386", "4.62744", "El Achir", "DZ", "Africa/Algiers"),
("36.76775", "2.95924", "Cheraga", "DZ", "Africa/Algiers"),
("36.27462", "4.85668", "Bordj Zemoura", "DZ", "Africa/Algiers"),
("36.61954", "4.08282", "Beni Douala", "DZ", "Africa/Algiers"),
("-2.13404", "-79.59415", "Milagro", "EC", "America/Guayaquil"),
("-2.90055", "-79.00453", "Cuenca", "EC", "America/Guayaquil"),
("59.37722", "28.19028", "Narva", "EE", "Europe/Tallinn"),
("26.67319", "31.4976", "Juhaynah", "EG", "Africa/Cairo"),
("31.20176", "29.91582", "Alexandria", "EG", "Africa/Cairo"),
("39.96348", "-4.83076", "Talavera de la Reina", "ES", "Europe/Madrid"),
("37.35813", "-6.03731", "San Juan de Aznalfarache", "ES", "Europe/Madrid"),
("38.68712", "-4.10734", "Puertollano", "ES", "Europe/Madrid"),
("38.38479", "-0.76773", "Novelda", "ES", "Europe/Madrid"),
("27.76056", "-15.58602", "Maspalomas", "ES", "Atlantic/Canary"),
("38.47917", "-1.325", "Jumilla", "ES", "Europe/Madrid"),
("38.96667", "-0.18333", "Gandia", "ES", "Europe/Madrid"),
("38.10558", "-1.86343", "Caravaca", "ES", "Europe/Madrid"),
("37.49073", "-2.77259", "Baza", "ES", "Europe/Madrid"),
("42.64685", "-5.55835", "Villaquilambre", "ES", "Europe/Madrid"),
("42.06166", "-1.60452", "Tudela", "ES", "Europe/Madrid"),
("40.42386", "-3.53261", "San Fernando de Henares", "ES", "Europe/Madrid"),
("41.15612", "1.10687", "Reus", "ES", "Europe/Madrid"),
("41.91738", "3.1631", "Palafrugell", "ES", "Europe/Madrid"),
("43.32686", "-2.98884", "Leioa", "ES", "Europe/Madrid"),
("43.31667", "-2.68333", "Gernika-Lumo", "ES", "Europe/Madrid"),
("43.48961", "-8.2194", "Ferrol", "ES", "Europe/Madrid"),
("41.63976", "2.35739", "Cardedeu", "ES", "Europe/Madrid"),
("40.70995", "0.57856", "Amposta", "ES", "Europe/Madrid"),
("37.13548", "-3.67029", "Las Gabias", "ES", "Europe/Madrid"),
("42.8139", "-1.64295", "Segundo Ensanche", "ES", "Europe/Madrid"),
("41.41204", "2.18247", "el Camp de l'Arpa del Clot", "ES", "Europe/Madrid"),
("11.85", "38.01667", "Debre Tabor", "ET", "Africa/Addis_Ababa"),
("6.03333", "37.55", "Arba Minch", "ET", "Africa/Addis_Ababa"),
("65.84811", "24.14662", "Tornio", "FI", "Europe/Helsinki"),
("60.18427", "24.95034", "Kallio", "FI", "Europe/Helsinki"),
("60.2052", "24.6522", "Espoo", "FI", "Europe/Helsinki"),
("45.51667", "4.86667", "Vienne", "FR", "Europe/Paris"),
("44.92801", "4.8951", "Valence", "FR", "Europe/Paris"),
("44.80477", "-0.59543", "Talence", "FR", "Europe/Paris"),
("48.77644", "2.29026", "Sceaux", "FR", "Europe/Paris"),
("50.75", "2.25", "Saint-Omer", "FR", "Europe/Paris"),
("45.69558", "4.7934", "Saint-Genis-Laval", "FR", "Europe/Paris"),
("48.8765", "2.18967", "Rueil-Malmaison", "FR", "Europe/Paris"),
("48", "-4.1", "Quimper", "FR", "Europe/Paris"),
("43.11667", "1.6", "Pamiers", "FR", "Europe/Paris"),
("46.32313", "-0.45877", "Niort", "FR", "Europe/Paris"),
("43.61092", "3.87723", "Montpellier", "FR", "Europe/Paris"),
("48.98333", "2.61667", "Mitry-Mory", "FR", "Europe/Paris"),
("48.86667", "2.08333", "Marly-le-Roi", "FR", "Europe/Paris"),
("46.67535", "5.55575", "Lons-le-Saunier", "FR", "Europe/Paris"),
("43.32393", "5.4584", "Les Olives", "FR", "Europe/Paris"),
("48.8222", "2.12213", "Le Chesnay", "FR", "Europe/Paris"),
("48.90472", "2.2469", "La Garenne-Colombes", "FR", "Europe/Paris"),
("48.98994", "2.1699", "Herblay", "FR", "Europe/Paris"),
("48.98693", "2.44892", "Gonesse", "FR", "Europe/Paris"),
("48.79325", "2.29275", "Fontenay-aux-Roses", "FR", "Europe/Paris"),
("49.28669", "1.00288", "Elbeuf", "FR", "Europe/Paris"),
("43.71032", "-1.05366", "Dax", "FR", "Europe/Paris"),
("43.61058", "1.33467", "Colomiers", "FR", "Europe/Paris"),
("43.83125", "5.03586", "Cavaillon", "FR", "Europe/Paris"),
("45.73333", "4.91667", "Bron", "FR", "Europe/Paris"),
("48.90982", "2.45012", "Bobigny", "FR", "Europe/Paris"),
("48.77275", "5.16108", "Bar-le-Duc", "FR", "Europe/Paris"),
("43.67681", "4.63031", "Arles", "FR", "Europe/Paris"),
("41.91886", "8.73812", "Ajaccio", "FR", "Europe/Paris"),
("43.2907", "5.4384", "Marseille 11", "FR", "Europe/Paris"),
("-1.63333", "13.58357", "Franceville", "GA", "Africa/Libreville"),
("53.19146", "-2.52398", "Winsford", "GB", "Europe/London"),
("51.26", "-2.1875", "Westbury", "GB", "Europe/London"),
("51.84819", "1.26738", "Walton-on-the-Naze", "GB", "Europe/London"),
("52.41667", "0.75", "Thetford", "GB", "Europe/London"),
("51.39323", "0.47713", "Strood", "GB", "Europe/London"),
("50.79205", "-1.08593", "Southsea", "GB", "Europe/London"),
("53.78333", "-1.06667", "Selby", "GB", "Europe/London"),
("55.82885", "-4.21376", "Rutherglen", "GB", "Europe/London"),
("53.00974", "-3.05814", "Rhosllanerchrugog", "GB", "Europe/London"),
("53.83333", "-2.98333", "Poulton-le-Fylde", "GB", "Europe/London"),
("50.11861", "-5.53715", "Penzance", "GB", "Europe/London"),
("50.82882", "-0.32247", "Lancing", "GB", "Europe/London"),
("51.40148", "-1.32471", "Newbury", "GB", "Europe/London"),
("53.49389", "-1.29243", "Mexborough", "GB", "Europe/London"),
("50.75767", "-1.5443", "Lymington", "GB", "Europe/London"),
("53.69786", "-2.68758", "Leyland", "GB", "Europe/London"),
("53.7446", "-0.33525", "Kingston upon Hull", "GB", "Europe/London"),
("57.47908", "-4.22398", "Inverness", "GB", "Europe/London"),
("51.62907", "-0.74934", "High Wycombe", "GB", "Europe/London"),
("51.38673", "0.30367", "Hartley", "GB", "Europe/London"),
("52.66277", "-2.01111", "Great Wyrley", "GB", "Europe/London"),
("53.38333", "-0.76667", "Gainsborough", "GB", "Europe/London"),
("50.7236", "-3.52751", "Exeter", "GB", "Europe/London"),
("52.68333", "0.93333", "East Dereham", "GB", "Europe/London"),
("51.35084", "-1.99421", "Devizes", "GB", "Europe/London"),
("50.76306", "-1.29772", "Cowes", "GB", "Europe/London"),
("51.78967", "1.15597", "Clacton-on-Sea", "GB", "Europe/London"),
("53.46506", "-1.47217", "Chapletown", "GB", "Europe/London"),
("51.64316", "-0.36053", "Bushey", "GB", "Europe/London"),
("52.48173", "-2.12139", "Brierley Hill", "GB", "Europe/London"),
("53.81667", "-3.05", "Blackpool", "GB", "Europe/London"),
("53.0233", "-1.48119", "Belper", "GB", "Europe/London"),
("51.65", "-0.2", "Barnet", "GB", "Europe/London"),
("56.56317", "-2.58736", "Arbroath", "GB", "Europe/London"),
("57.14369", "-2.09814", "Aberdeen", "GB", "Europe/London"),
("51.39148", "-0.29825", "Surbiton", "GB", "Europe/London"),
("51.42708", "-0.91979", "Lower Earley", "GB", "Europe/London"),
("55.82737", "-4.0573", "Viewpark", "GB", "Europe/London"),
("41.82143", "41.77921", "Kobuleti", "GE", "Asia/Tbilisi"),
("5.30383", "-1.98956", "Tarkwa", "GH", "Africa/Accra"),
("7.06273", "-1.4001", "Mampong", "GH", "Africa/Accra"),
("6.46346", "-2.31938", "Bibiani", "GH", "Africa/Accra"),
("13.56667", "-15.6", "Farafenni", "GM", "Africa/Banjul"),
("9.535", "-13.68778", "Camayenne", "GN", "Africa/Conakry"),
("14.93333", "-91.11667", "Chichicastenango", "GT", "America/Guatemala"),
("22.37066", "114.10479", "Tsuen Wan", "HK", "Asia/Hong_Kong"),
("15.48131", "-86.57415", "Olanchito", "HN", "America/Tegucigalpa"),
("43.50891", "16.43915", "Split", "HR", "Europe/Zagreb"),
("18.65297", "-72.09391", "Thomazeau", "HT", "America/Port-au-Prince"),
("18.57677", "-72.22625", "Croix-des-Bouquets", "HT", "America/Port-au-Prince"),
("3.3285", "99.1625", "Tebingtinggi", "ID", "Asia/Jakarta"),
("3.7278", "98.6738", "Labuhan Deli", "ID", "Asia/Jakarta"),
("-7.51611", "109.05389", "Wangon", "ID", "Asia/Jakarta"),
("3.31332", "117.59152", "Tarakan", "ID", "Asia/Makassar"),
("-6.91806", "106.92667", "Sukabumi", "ID", "Asia/Jakarta"),
("-1.26424", "104.09701", "Simpang", "ID", "Asia/Jakarta"),
("-7.0981", "109.3243", "Randudongkal", "ID", "Asia/Jakarta"),
("0.51667", "101.44167", "Pekanbaru", "ID", "Asia/Jakarta"),
("-7.01833", "107.60389", "Pameungpeuk", "ID", "Asia/Jakarta"),
("-8.43333", "114.33333", "Muncar", "ID", "Asia/Jakarta"),
("-3.5403", "118.9707", "Majene", "ID", "Asia/Makassar"),
("-6.8048", "110.8405", "Kudus", "ID", "Asia/Jakarta"),
("-7.81667", "112.01667", "Kediri", "ID", "Asia/Jakarta"),
("-1.6", "103.61667", "Jambi City", "ID", "Asia/Jakarta"),
("-7.57897", "112.23109", "Diwek", "ID", "Asia/Jakarta"),
("-6.48167", "106.85417", "Cibinong", "ID", "Asia/Jakarta"),
("-7.73379", "113.69785", "Besuki", "ID", "Asia/Jakarta"),
("-1.26753", "116.82887", "Balikpapan", "ID", "Asia/Makassar"),
("-7.54972", "110.71639", "Ngemplak", "ID", "Asia/Jakarta"),
("53.53333", "-7.35", "An Muileann gCearr", "IE", "Europe/Dublin"),
("53.43333", "-7.95", "Athlone", "IE", "Europe/Dublin"),
("31.92923", "34.86563", "Ramla", "IL", "Asia/Jerusalem"),
("32.05971", "34.8732", "Ganei Tikva", "IL", "Asia/Jerusalem"),
("31.39547", "34.75699", "Rahat", "IL", "Asia/Jerusalem"),
("18.87813", "72.93924", "Uran", "IN", "Asia/Kolkata"),
("10.58806", "77.24779", "Udumalaippettai", "IN", "Asia/Kolkata"),
("9.82564", "78.25795", "Tiruppuvanam", "IN", "Asia/Kolkata"),
("25.49043", "85.94001", "Teghra", "IN", "Asia/Kolkata"),
("12.04161", "75.35927", "Talipparamba", "IN", "Asia/Kolkata"),
("26.11527", "86.59509", "Supaul", "IN", "Asia/Kolkata"),
("34.08565", "74.80555", "Srinagar", "IN", "Asia/Kolkata"),
("25.92493", "73.66633", "Sojat", "IN", "Asia/Kolkata"),
("14.62072", "74.83554", "Sirsi", "IN", "Asia/Kolkata"),
("25.13915", "73.06784", "Sheoganj", "IN", "Asia/Kolkata"),
("11.50526", "77.23826", "Sathyamangalam", "IN", "Asia/Kolkata"),
("21.46527", "83.97573", "Sambalpur", "IN", "Asia/Kolkata"),
("25.87498", "86.59611", "Saharsa", "IN", "Asia/Kolkata"),
("12.95629", "78.27539", "Robertsonpet", "IN", "Asia/Kolkata"),
("26.44931", "91.61356", "Rangia", "IN", "Asia/Kolkata"),
("33.37526", "74.3092", "Rajaori", "IN", "Asia/Kolkata"),
("24.81757", "84.63445", "Rafiganj", "IN", "Asia/Kolkata"),
("18.51957", "73.85535", "Pune", "IN", "Asia/Kolkata"),
("11.93381", "79.82979", "Puducherry", "IN", "Asia/Kolkata"),
("28.71271", "77.656", "Pilkhua", "IN", "Asia/Kolkata"),
("10.12268", "77.54372", "Periyakulam", "IN", "Asia/Kolkata"),
("31.28092", "74.85849", "Patti", "IN", "Asia/Kolkata"),
("20.88098", "75.11937", "Parola", "IN", "Asia/Kolkata"),
("23.07492", "88.28637", "Pandua", "IN", "Asia/Kolkata"),
("18.18158", "76.03889", "Osmanabad", "IN", "Asia/Kolkata"),
("25.6439", "77.9129", "Narwar", "IN", "Asia/Kolkata"),
("30.81383", "75.16878", "Moga", "IN", "Asia/Kolkata"),
("28.98002", "77.70636", "Meerut", "IN", "Asia/Kolkata"),
("11.12018", "76.11996", "Manjeri", "IN", "Asia/Kolkata"),
("30.21121", "74.4818", "Malaut", "IN", "Asia/Kolkata"),
("25.92127", "86.79271", "Madhipura", "IN", "Asia/Kolkata"),
("24.05979", "77.40858", "Leteri", "IN", "Asia/Kolkata"),
("21.34222", "71.30633", "Kundla", "IN", "Asia/Kolkata"),
("22.75218", "72.68533", "Kheda", "IN", "Asia/Kolkata"),
("23.1959", "86.51499", "Kenda", "IN", "Asia/Kolkata"),
("29.21399", "78.95693", "Kashipur", "IN", "Asia/Kolkata"),
("11.00599", "77.5609", "Kangayam", "IN", "Asia/Kolkata"),
("22.88783", "84.13864", "Jashpurnagar", "IN", "Asia/Kolkata"),
("26.2649", "81.54855", "Jais", "IN", "Asia/Kolkata"),
("16.06213", "76.0586", "Hungund", "IN", "Asia/Kolkata"),
("29.22254", "79.5286", "Haldwani", "IN", "Asia/Kolkata"),
("26.76628", "83.36889", "Gorakhpur", "IN", "Asia/Kolkata"),
("12.25282", "79.41727", "Gingee", "IN", "Asia/Kolkata"),
("21.53889", "71.57737", "Gariadhar", "IN", "Asia/Kolkata"),
("15.73628", "75.96976", "Gajendragarh", "IN", "Asia/Kolkata"),
("17.54907", "82.85749", "Elamanchili", "IN", "Asia/Kolkata"),
("19.21667", "73.08333", "Dombivli", "IN", "Asia/Kolkata"),
("22.19303", "88.18466", "Diamond Harbour", "IN", "Asia/Kolkata"),
("12.1277", "78.15794", "Dharmapuri", "IN", "Asia/Kolkata"),
("25.75728", "75.37991", "Deoli", "IN", "Asia/Kolkata"),
("14.46693", "75.92694", "Davangere", "IN", "Asia/Kolkata"),
("25.66795", "85.83636", "Dalsingh Sarai", "IN", "Asia/Kolkata"),
("15.5439", "73.7553", "Calangute", "IN", "Asia/Kolkata"),
("27.9247", "78.40102", "Chharra", "IN", "Asia/Kolkata"),
("32.55531", "76.12647", "Chamba", "IN", "Asia/Kolkata"),
("20.88197", "85.83334", "Bhuban", "IN", "Asia/Kolkata"),
("19.30157", "72.85107", "Bhayandar", "IN", "Asia/Kolkata"),
("15.45144", "78.14797", "Betamcherla", "IN", "Asia/Kolkata"),
("26.32293", "91.00632", "Barpeta", "IN", "Asia/Kolkata"),
("28.92694", "78.23456", "Bachhraon", "IN", "Asia/Kolkata"),
("21.59983", "71.21169", "Amreli", "IN", "Asia/Kolkata"),
("10.10649", "76.35484", "Alwaye", "IN", "Asia/Kolkata"),
("24.41288", "76.56719", "Aklera", "IN", "Asia/Kolkata"),
("23.49668", "86.68363", "Adra", "IN", "Asia/Kolkata"),
("22.4711", "88.1453", "Pujali", "IN", "Asia/Kolkata"),
("22.10194", "85.37752", "Barbil", "IN", "Asia/Kolkata"),
("17.34769", "78.55757", "Lal Bahadur Nagar", "IN", "Asia/Kolkata"),
("23.18", "88.58", "Aistala", "IN", "Asia/Kolkata"),
("9.57046", "76.32756", "Kalavoor", "IN", "Asia/Kolkata"),
("32.61603", "44.02488", "Karbala", "IQ", "Asia/Baghdad"),
("35.6803", "51.0193", "Shahre Jadide Andisheh", "IR", "Asia/Tehran"),
("36.64852", "51.49621", "Nowshahr", "IR", "Asia/Tehran"),
("33.14447", "47.3799", "Darreh Shahr", "IR", "Asia/Tehran"),
("33.86419", "48.26258", "Aleshtar", "IR", "Asia/Tehran"),
("32.65246", "51.67462", "Isfahan", "IR", "Asia/Tehran"),
("38.07789", "13.44275", "Villabate", "IT", "Europe/Rome"),
("36.92574", "14.72443", "Ragusa", "IT", "Europe/Rome"),
("37.51803", "15.00913", "Misterbianco", "IT", "Europe/Rome"),
("37.49223", "15.07041", "Catania", "IT", "Europe/Rome"),
("37.31065", "13.57661", "Agrigento", "IT", "Europe/Rome"),
("43.78956", "7.60872", "Ventimiglia", "IT", "Europe/Rome"),
("44.89784", "8.86374", "Tortona", "IT", "Europe/Rome"),
("40.87329", "14.43865", "Somma Vesuviana", "IT", "Europe/Rome"),
("40.72586", "8.55552", "Sassari", "IT", "Europe/Rome"),
("45.39402", "9.29109", "San Giuliano Milanese", "IT", "Europe/Rome"),
("42.67164", "14.01481", "Roseto degli Abruzzi", "IT", "Europe/Rome"),
("45.78071", "12.84052", "Portogruaro", "IT", "Europe/Rome"),
("43.1122", "12.38878", "Perugia", "IT", "Europe/Rome"),
("45.44694", "8.62118", "Novara", "IT", "Europe/Rome"),
("45.50369", "11.412", "Montecchio Maggiore-Alte Ceccato", "IT", "Europe/Rome"),
("40.55851", "17.80774", "Mesagne", "IT", "Europe/Rome"),
("45.79377", "8.88104", "Malnate", "IT", "Europe/Rome"),
("42.22718", "14.39024", "Lanciano", "IT", "Europe/Rome"),
("45.53069", "9.40531", "Gorgonzola", "IT", "Europe/Rome"),
("40.53123", "17.58522", "Francavilla Fontana", "IT", "Europe/Rome"),
("43.62558", "13.39954", "Falconara Marittima", "IT", "Europe/Rome"),
("45.9836", "12.70038", "Cordenons", "IT", "Europe/Rome"),
("44.31771", "9.32241", "Chiavari", "IT", "Europe/Rome"),
("44.59445", "11.04979", "Castelfranco Emilia", "IT", "Europe/Rome"),
("41.55947", "14.66737", "Campobasso", "IT", "Europe/Rome"),
("41.24264", "16.50104", "Bisceglie", "IT", "Europe/Rome"),
("41.72063", "12.6723", "Ariccia", "IT", "Europe/Rome"),
("40.92298", "14.30935", "Afragola", "IT", "Europe/Rome"),
("40.87363", "14.34085", "Volla", "IT", "Europe/Rome"),
("18.00747", "-76.78319", "New Kingston", "JM", "America/Jamaica"),
("35.8", "137.23333", "Gero", "JP", "Asia/Tokyo"),
("34.61667", "135.6", "Yao", "JP", "Asia/Tokyo"),
("34.75856", "136.13108", "Ueno-ebisumachi", "JP", "Asia/Tokyo"),
("34.81667", "137.4", "Toyokawa", "JP", "Asia/Tokyo"),
("34.4833", "136.84186", "Toba", "JP", "Asia/Tokyo"),
("36.65", "138.31667", "Suzaka", "JP", "Asia/Tokyo"),
("34.9", "137.5", "Shinshiro", "JP", "Asia/Tokyo"),
("35.06667", "135.21667", "Sasayama", "JP", "Asia/Tokyo"),
("36", "139.55722", "Okegawa", "JP", "Asia/Tokyo"),
("36.53333", "136.61667", "Nonoichi", "JP", "Asia/Tokyo"),
("36.75965", "137.36215", "Namerikawa", "JP", "Asia/Tokyo"),
("35", "136.51667", "Komono", "JP", "Asia/Tokyo"),
("33.4425", "129.96972", "Karatsu", "JP", "Asia/Tokyo"),
("35.30889", "139.55028", "Kamakura", "JP", "Asia/Tokyo"),
("34.25", "135.31667", "Iwade", "JP", "Asia/Tokyo"),
("35.82756", "137.95378", "Ina", "JP", "Asia/Tokyo"),
("33.3213", "130.94098", "Hita", "JP", "Asia/Tokyo"),
("36.24624", "139.07204", "Fujioka", "JP", "Asia/Tokyo"),
("36.33011", "138.89585", "Annaka", "JP", "Asia/Tokyo"),
("35.815", "139.6853", "Shimotoda", "JP", "Asia/Tokyo"),
("39.46667", "141.95", "Yamada", "JP", "Asia/Tokyo"),
("37.56667", "140.11667", "Inawashiro", "JP", "Asia/Tokyo"),
("43.82634", "144.09638", "Motomachi", "JP", "Asia/Tokyo"),
("44.35056", "142.45778", "Nayoro", "JP", "Asia/Tokyo"),
("41.77583", "140.73667", "Hakodate", "JP", "Asia/Tokyo"),
("35.48199", "137.02166", "Minokamo", "JP", "Asia/Tokyo"),
("0.03813", "36.36339", "Nyahururu", "KE", "Africa/Nairobi"),
("3.11988", "35.59642", "Lodwar", "KE", "Africa/Nairobi"),
("0.46005", "34.11169", "Busia", "KE", "Africa/Nairobi"),
("40.93333", "73", "Jalal-Abad", "KG", "Asia/Bishkek"),
("13.65805", "102.56365", "Paoy Paet", "KH", "Asia/Phnom_Penh"),
("36.82167", "128.63083", "Eisen", "KR", "Asia/Seoul"),
("37.1759", "128.9889", "T’aebaek", "KR", "Asia/Seoul"),
("36.20389", "127.08472", "Nonsan", "KR", "Asia/Seoul"),
("37.65639", "126.835", "Goyang-si", "KR", "Asia/Seoul"),
("36.6009", "126.665", "Hongseong", "KR", "Asia/Seoul"),
("34.8825", "128.62667", "Sinhyeon", "KR", "Asia/Seoul"),
("47.83333", "59.6", "Shalqar", "KZ", "Asia/Aqtobe"),
("47.46657", "84.87144", "Zaysan", "KZ", "Asia/Almaty"),
("44.85278", "65.50917", "Kyzylorda", "KZ", "Asia/Qyzylorda"),
("43.41949", "77.0202", "Otegen Batyra", "KZ", "Asia/Almaty"),
("6.84019", "79.87116", "Dehiwala-Mount Lavinia", "LK", "Asia/Colombo"),
("6.9909", "79.883", "Hendala", "LK", "Asia/Colombo"),
("7.57944", "-8.53778", "New Yekepa", "LR", "Africa/Monrovia"),
("55.25", "24.75", "Ukmerge", "LT", "Europe/Vilnius"),
("54.39635", "24.04142", "Alytus", "LT", "Europe/Vilnius"),
("30.75545", "20.22625", "Ajdabiya", "LY", "Africa/Tripoli"),
("24.96334", "10.18003", "Ghat", "LY", "Africa/Tripoli"),
("33.92866", "-6.90656", "Temara", "MA", "Africa/Casablanca"),
("33.42585", "-6.00137", "Oulmes", "MA", "Africa/Casablanca"),
("34.31", "-2.16", "Jerada", "MA", "Africa/Casablanca"),
("33.43443", "-5.22126", "Azrou", "MA", "Africa/Casablanca"),
("48.15659", "28.28489", "Soroca", "MD", "Europe/Chisinau"),
("42.28639", "18.84", "Budva", "ME", "Europe/Podgorica"),
("-22.9", "44.53333", "Sakaraha", "MG", "Indian/Antananarivo"),
("-21.15", "46.58333", "Ikalamavony", "MG", "Indian/Antananarivo"),
("-19.65", "47.31667", "Antanifotsy", "MG", "Indian/Antananarivo"),
("-17.83333", "48.41667", "Ambatondrazaka", "MG", "Indian/Antananarivo"),
("42", "21.32778", "Saraj", "MK", "Europe/Skopje"),
("41.92361", "20.91361", "Bogovinje", "MK", "Europe/Skopje"),
("12.74409", "-8.07257", "Kati", "ML", "Africa/Bamako"),
("14.0823", "98.19151", "Dawei", "MM", "Asia/Yangon"),
("16.68911", "98.50893", "Myawadi", "MM", "Asia/Yangon"),
("17.30858", "97.01124", "Kyaikto", "MM", "Asia/Yangon"),
("47.90771", "106.88324", "Ulan Bator", "MN", "Asia/Ulaanbaatar"),
("14.67751", "-60.94228", "Le Robert", "MQ", "America/Martinique"),
("35.89972", "14.51472", "Valletta", "MT", "Europe/Malta"),
("-13.7804", "34.4587", "Salima", "MW", "Africa/Blantyre"),
("16.75973", "-93.11308", "Tuxtla", "MX", "America/Mexico_City"),
("19.8173", "-97.35992", "Teziutlan", "MX", "America/Mexico_City"),
("21.28306", "-89.66123", "Progreso", "MX", "America/Merida"),
("17.06542", "-96.72365", "Oaxaca", "MX", "America/Mexico_City"),
("25.87972", "-97.50417", "Heroica Matamoros", "MX", "America/Matamoros"),
("19.32932", "-98.1664", "Contla", "MX", "America/Mexico_City"),
("17.94979", "-94.91386", "Acayucan", "MX", "America/Mexico_City"),
("19.32889", "-99.32556", "San Lorenzo Acopilco", "MX", "America/Mexico_City"),
("20.22816", "-103.5687", "Zacoalco de Torres", "MX", "America/Mexico_City"),
("20.74122", "-100.44843", "Santa Rosa Jauregui", "MX", "America/Mexico_City"),
("20.21322", "-100.88023", "Salvatierra", "MX", "America/Mexico_City"),
("19.64745", "-102.04897", "Paracho de Verduzco", "MX", "America/Mexico_City"),
("20.28527", "-103.42897", "Jocotepec", "MX", "America/Mexico_City"),
("21.01858", "-101.2591", "Guanajuato", "MX", "America/Mexico_City"),
("22.49396", "-105.36369", "Acaponeta", "MX", "America/Mazatlan"),
("19.04222", "-98.11889", "Casa Blanca", "MX", "America/Mexico_City"),
("1.6561", "103.6032", "Kulai", "MY", "Asia/Kuala_Lumpur"),
("5.90702", "116.10146", "Donggongon", "MY", "Asia/Kuching"),
("4.88441", "101.96857", "Gua Musang", "MY", "Asia/Kuala_Lumpur"),
("5.4709", "100.24529", "Batu Feringgi", "MY", "Asia/Kuala_Lumpur"),
("4.02219", "101.02083", "Teluk Intan", "MY", "Asia/Kuala_Lumpur"),
("1.6", "103.81667", "Ulu Tiram", "MY", "Asia/Kuala_Lumpur"),
("2.2139", "102.3278", "Kampung Ayer Molek", "MY", "Asia/Kuala_Lumpur"),
("-23.85972", "35.34722", "Maxixe", "MZ", "Africa/Maputo"),
("-21.98333", "16.91667", "Okahandja", "NA", "Africa/Windhoek"),
("13.70727", "9.15013", "Mirriah", "NE", "Africa/Niamey"),
("4.92675", "6.26764", "Yenagoa", "NG", "Africa/Lagos"),
("6.8485", "3.64633", "Shagamu", "NG", "Africa/Lagos"),
("7.6", "4.18333", "Olupona", "NG", "Africa/Lagos"),
("6.15038", "6.83042", "Nkpor", "NG", "Africa/Lagos"),
("6.45407", "3.39467", "Lagos", "NG", "Africa/Lagos"),
("9.58126", "8.2926", "Kafanchan", "NG", "Africa/Lagos"),
("7.62789", "4.74161", "Ilesa", "NG", "Africa/Lagos"),
("7.50251", "5.06258", "Igbara-Odo", "NG", "Africa/Lagos"),
("11.86064", "9.0027", "Gaya", "NG", "Africa/Lagos"),
("7.65649", "4.92235", "Efon-Alaaye", "NG", "Africa/Lagos"),
("10.61285", "12.19458", "Biu", "NG", "Africa/Lagos"),
("12.74482", "4.52514", "Argungu", "NG", "Africa/Lagos"),
("13.48082", "-86.58208", "Somoto", "NI", "America/Managua"),
("11.84962", "-86.19903", "Jinotepe", "NI", "America/Managua"),
("52.09", "5.23333", "Zeist", "NL", "Europe/Amsterdam"),
("51.65333", "5.2875", "Vught", "NL", "Europe/Amsterdam"),
("51.44889", "5.51978", "Tongelre", "NL", "Europe/Amsterdam"),
("51.95838", "4.47124", "Schiebroek", "NL", "Europe/Amsterdam"),
("52.31333", "6.92917", "Oldenzaal", "NL", "Europe/Amsterdam"),
("52.26083", "7.00417", "Losser", "NL", "Europe/Amsterdam"),
("53.16167", "6.76111", "Hoogezand", "NL", "Europe/Amsterdam"),
("52.57583", "6.61944", "Hardenberg", "NL", "Europe/Amsterdam"),
("52.71083", "5.74861", "Emmeloord", "NL", "Europe/Amsterdam"),
("51.955", "5.22778", "Culemborg", "NL", "Europe/Amsterdam"),
("52.14", "5.58472", "Barneveld", "NL", "Europe/Amsterdam"),
("68.79833", "16.54165", "Harstad", "NO", "Europe/Oslo"),
("-44.39672", "171.25364", "Timaru", "NZ", "Pacific/Auckland"),
("-38.65333", "178.00417", "Gisborne", "NZ", "Pacific/Auckland"),
("8.88988", "-79.62603", "Veracruz", "PA", "America/Panama"),
("9.15093", "-79.62098", "Chilibre", "PA", "America/Panama"),
("-3.74912", "-73.25383", "Iquitos", "PE", "America/Lima"),
("-16.25", "-69.08333", "Yunguyo", "PE", "America/Lima"),
("-15.21194", "-75.11028", "Minas de Marcona", "PE", "America/Lima"),
("-11.94306", "-76.70944", "Chosica", "PE", "America/Lima"),
("-5.85746", "144.23058", "Mount Hagen", "PG", "Pacific/Port_Moresby"),
("6.33444", "124.95278", "Tupi", "PH", "Asia/Manila"),
("10.7375", "122.9666", "Talisay", "PH", "Asia/Manila"),
("12.97389", "123.99333", "Sorsogon", "PH", "Asia/Manila"),
("9.3337", "122.8637", "Santa Catalina", "PH", "Asia/Manila"),
("12.35275", "121.06761", "San Jose", "PH", "Asia/Manila"),
("6.95194", "121.96361", "Recodo", "PH", "Asia/Manila"),
("14.66", "120.56528", "Pilar", "PH", "Asia/Manila"),
("10.20898", "123.758", "Naga", "PH", "Asia/Manila"),
("12.37169", "123.62494", "Masbate", "PH", "Asia/Manila"),
("16.0438", "120.4861", "Manaoag", "PH", "Asia/Manila"),
("10.13361", "124.84472", "Maasin", "PH", "Asia/Manila"),
("16.455", "120.5875", "La Trinidad", "PH", "Asia/Manila"),
("9.6531", "124.3697", "Jagna", "PH", "Asia/Manila"),
("14.8361", "120.97844", "Guyong", "PH", "Asia/Manila"),
("8.56697", "123.33471", "Dipolog", "PH", "Asia/Manila"),
("10.31672", "123.89071", "Cebu City", "PH", "Asia/Manila"),
("14.14989", "121.3152", "Calauan", "PH", "Asia/Manila"),
("15.72892", "120.57224", "Burgos", "PH", "Asia/Manila"),
("14.95472", "120.89694", "Baliuag", "PH", "Asia/Manila"),
("14.62578", "121.12251", "Antipolo", "PH", "Asia/Manila"),
("27.52948", "68.75915", "Khairpur Mir’s", "PK", "Asia/Karachi"),
("26.9423", "68.11759", "Tharu Shah", "PK", "Asia/Karachi"),
("31.82539", "72.54064", "Sillanwali", "PK", "Asia/Karachi"),
("31.71667", "73.38333", "Sangla Hill", "PK", "Asia/Karachi"),
("30.29184", "71.67164", "Qadirpur Ran", "PK", "Asia/Karachi"),
("31.96258", "73.97117", "Naushahra Virkan", "PK", "Asia/Karachi"),
("32.57756", "71.52847", "Mianwali", "PK", "Asia/Karachi"),
("27.55898", "68.21204", "Larkana", "PK", "Asia/Karachi"),
("30.46907", "70.96699", "Kot Addu", "PK", "Asia/Karachi"),
("30.76468", "74.12286", "Kanganpur", "PK", "Asia/Karachi"),
("25.95533", "68.88871", "Jhol", "PK", "Asia/Karachi"),
("29.69221", "72.54566", "Hasilpur", "PK", "Asia/Karachi"),
("32.17629", "75.06583", "Fazilpur", "PK", "Asia/Karachi"),
("32.87533", "71.57118", "Daud Khel", "PK", "Asia/Karachi"),
("25.80565", "68.49143", "Bhit Shah", "PK", "Asia/Karachi"),
("29.38242", "70.91106", "Alipur", "PK", "Asia/Karachi"),
("51.14942", "15.00835", "Zgorzelec", "PL", "Europe/Warsaw"),
("54.58048", "16.86194", "Ustka", "PL", "Europe/Warsaw"),
("50.5107", "18.30056", "Strzelce Opolskie", "PL", "Europe/Warsaw"),
("54.60528", "18.34717", "Reda", "PL", "Europe/Warsaw"),
("50.20528", "19.27498", "Jaworzno", "PL", "Europe/Warsaw"),
("50.86079", "17.4674", "Brzeg", "PL", "Europe/Warsaw"),
("18.42745", "-67.15407", "Aguadilla", "PR", "America/Puerto_Rico"),
("18.03496", "-66.8499", "Yauco", "PR", "America/Puerto_Rico"),
("31.78336", "35.23388", "East Jerusalem", "PS", "Asia/Hebron"),
("38.72706", "-9.24671", "Carnaxide", "PT", "Europe/Lisbon"),
("37.08819", "-8.2503", "Albufeira", "PT", "Europe/Lisbon"),
("41.20485", "-8.33147", "Paredes", "PT", "Europe/Lisbon"),
("41.1053", "-7.32097", "Custoias", "PT", "Europe/Lisbon"),
("37.74615", "-25.66689", "Ponta Delgada", "PT", "Atlantic/Azores"),
("-20.88231", "55.4504", "Saint-Denis", "RE", "Indian/Reunion"),
("44.43579", "26.01649", "Sector 6", "RO", "Europe/Bucharest"),
("44.22639", "22.53083", "Negotin", "RS", "Europe/Belgrade"),
("44.97639", "19.61222", "Sremska Mitrovica", "RS", "Europe/Belgrade"),
("53.53395", "33.72798", "Zhukovka", "RU", "Europe/Moscow"),
("46.7055", "38.2739", "Yeysk", "RU", "Europe/Moscow"),
("44.98901", "38.94324", "Yablonovskiy", "RU", "Europe/Moscow"),
("56.03361", "35.96944", "Volokolamsk", "RU", "Europe/Moscow"),
("57.97472", "33.2525", "Valday", "RU", "Europe/Moscow"),
("56.85836", "35.90057", "Tver", "RU", "Europe/Moscow"),
("55.62047", "37.49338", "Tyoply Stan", "RU", "Europe/Moscow"),
("54.90083", "38.07083", "Stupino", "RU", "Europe/Moscow"),
("55.63711", "37.38115", "Solntsevo", "RU", "Europe/Moscow"),
("59.80917", "30.38167", "Shushary", "RU", "Europe/Moscow"),
("64.5635", "39.8302", "Severodvinsk", "RU", "Europe/Moscow"),
("51.78771", "56.36091", "Saraktash", "RU", "Asia/Yekaterinburg"),
("53.95278", "32.86389", "Roslavl’", "RU", "Europe/Moscow"),
("51.40944", "46.04833", "Privolzhskiy", "RU", "Europe/Saratov"),
("61.78491", "34.34691", "Petrozavodsk", "RU", "Europe/Moscow"),
("53.37596", "51.3452", "Otradnyy", "RU", "Europe/Samara"),
("54.48147", "53.47103", "Oktyabr’skiy", "RU", "Asia/Yekaterinburg"),
("43.96222", "43.63417", "Novopavlovsk", "RU", "Europe/Moscow"),
("53.53041", "43.67663", "Nizhniy Lomov", "RU", "Europe/Moscow"),
("55.38752", "36.73307", "Naro-Fominsk", "RU", "Europe/Moscow"),
("50.06", "43.2379", "Mikhaylovka", "RU", "Europe/Volgograd"),
("55.64776", "38.02486", "Malakhovka", "RU", "Europe/Moscow"),
("55.85", "37.56667", "Likhobory", "RU", "Europe/Moscow"),
("51.4781", "57.3552", "Kuvandyk", "RU", "Asia/Yekaterinburg"),
("44.92934", "37.99117", "Krymsk", "RU", "Europe/Moscow"),
("54.03876", "43.91385", "Kovylkino", "RU", "Europe/Moscow"),
("60.02427", "30.28491", "Kolomyagi", "RU", "Europe/Moscow"),
("53.93361", "37.92792", "Kireyevsk", "RU", "Europe/Moscow"),
("54.84444", "38.16694", "Kashira", "RU", "Europe/Moscow"),
("58.7002", "59.4839", "Kachkanar", "RU", "Asia/Yekaterinburg"),
("43.35071", "46.10925", "Gudermes", "RU", "Europe/Moscow"),
("57.30185", "39.85331", "Gavrilov-Yam", "RU", "Europe/Moscow"),
("53.59782", "34.33825", "Dyat’kovo", "RU", "Europe/Moscow"),
("58.1908", "40.17171", "Danilov", "RU", "Europe/Moscow"),
("42.819", "47.1192", "Buynaksk", "RU", "Europe/Moscow"),
("53.77166", "38.12408", "Bogoroditsk", "RU", "Europe/Moscow"),
("54.39304", "53.26023", "Bavly", "RU", "Europe/Moscow"),
("55.39485", "43.83992", "Arzamas", "RU", "Europe/Moscow"),
("54.8421", "46.5813", "Alatyr’", "RU", "Europe/Moscow"),
("58.63667", "59.80222", "Lesnoy", "RU", "Asia/Yekaterinburg"),
("55.8736", "85.4265", "Yashkino", "RU", "Asia/Novokuznetsk"),
("58.04254", "65.27258", "Tavda", "RU", "Asia/Yekaterinburg"),
("55.54028", "89.20083", "Sharypovo", "RU", "Asia/Krasnoyarsk"),
("53.30972", "83.62389", "Novosilikatnyy", "RU", "Asia/Barnaul"),
("58.23583", "92.48278", "Lesosibirsk", "RU", "Asia/Krasnoyarsk"),
("56.11281", "69.49015", "Ishim", "RU", "Asia/Yekaterinburg"),
("56.9083", "60.8019", "Beryozovsky", "RU", "Asia/Yekaterinburg"),
("55.75556", "60.70278", "Ozersk", "RU", "Asia/Yekaterinburg"),
("51.82721", "107.60627", "Ulan-Ude", "RU", "Asia/Irkutsk"),
("45.47885", "133.42825", "Lesozavodsk", "RU", "Asia/Vladivostok"),
("65.93381", "111.4834", "Aykhal", "RU", "Asia/Yakutsk"),
("53.14657", "140.72287", "Nikolayevsk-on-Amure", "RU", "Asia/Vladivostok"),
("60.97944", "76.92421", "Izluchinsk", "RU", "Asia/Yekaterinburg"),
("-1.9487", "30.4347", "Rwamagana", "RW", "Africa/Kigali"),
("27.0174", "49.62251", "Al Jubayl", "SA", "Asia/Riyadh"),
("11.8659", "34.3869", "Ar Ruseris", "SD", "Africa/Khartoum"),
("61.72744", "17.10558", "Hudiksvall", "SE", "Europe/Stockholm"),
("59.33333", "18.28333", "Boo", "SE", "Europe/Stockholm"),
("48.8449", "17.22635", "Skalica", "SK", "Europe/Bratislava"),
("48.43174", "17.8031", "Hlohovec", "SK", "Europe/Bratislava"),
("8.48714", "-13.2356", "Freetown", "SL", "Africa/Freetown"),
("-0.35817", "42.54536", "Kismayo", "SO", "Africa/Mogadishu"),
("9.89206", "43.38531", "Baki", "SO", "Africa/Mogadishu"),
("13.73417", "-89.71472", "Sonzacate", "SV", "America/El_Salvador"),
("13.70167", "-89.10944", "Ilopango", "SV", "America/El_Salvador"),
("34.5624", "38.28402", "Tadmur", "SY", "Asia/Damascus"),
("35.95664", "36.7138", "Binnish", "SY", "Asia/Damascus"),
("12.18441", "18.69303", "Mongo", "TD", "Africa/Ndjamena"),
("15.46063", "99.89166", "Thap Than", "TH", "Asia/Bangkok"),
("8.43333", "99.96667", "Nakhon Si Thammarat", "TH", "Asia/Bangkok"),
("13.51825", "99.95469", "Damnoen Saduak", "TH", "Asia/Bangkok"),
("15.79408", "104.1451", "Yasothon", "TH", "Asia/Bangkok"),
("6.25947", "102.05461", "Tak Bai", "TH", "Asia/Bangkok"),
("16.0567", "103.65309", "Roi Et", "TH", "Asia/Bangkok"),
("13.44581", "101.18445", "Phanat Nikhom", "TH", "Asia/Bangkok"),
("13.8196", "100.04427", "Nakhon Pathom", "TH", "Asia/Bangkok"),
("14.64056", "104.64992", "Kantharalak", "TH", "Asia/Bangkok"),
("15.58552", "102.42587", "Bua Yai", "TH", "Asia/Bangkok"),
("14.37395", "100.48528", "Bang Ban", "TH", "Asia/Bangkok"),
("38.55632", "69.01354", "Vahdat", "TJ", "Asia/Dushanbe"),
("-8.99167", "125.21972", "Maliana", "TL", "Asia/Dili"),
("36.08497", "9.37082", "Siliana", "TN", "Africa/Tunis"),
("35.72917", "10.58082", "Msaken", "TN", "Africa/Tunis"),
("36.46917", "10.78222", "Beni Khiar", "TN", "Africa/Tunis"),
("37.16911", "10.03478", "El Alia", "TN", "Africa/Tunis"),
("38.13708", "41.00817", "Silvan", "TR", "Europe/Istanbul"),
("39.22493", "42.85693", "Patnos", "TR", "Europe/Istanbul"),
("37.31309", "40.74357", "Mardin", "TR", "Europe/Istanbul"),
("37.58105", "29.26639", "Serinhisar", "TR", "Europe/Istanbul"),
("37.05944", "37.3825", "Gaziantep", "TR", "Europe/Istanbul"),
("39.59611", "27.02444", "Edremit", "TR", "Europe/Istanbul"),
("39.12074", "27.18052", "Bergama", "TR", "Europe/Istanbul"),
("38.37255", "34.02537", "Aksaray", "TR", "Europe/Istanbul"),
("40.98894", "28.67582", "Yakuplu", "TR", "Europe/Istanbul"),
("40.1675", "34.37389", "Sungurlu", "TR", "Europe/Istanbul"),
("40.37528", "28.88222", "Mudanya", "TR", "Europe/Istanbul"),
("10.66668", "-61.51889", "Port of Spain", "TT", "America/Port_of_Spain"),
("23.5654", "119.58627", "Magong", "TW", "Asia/Taipei"),
("-2.68333", "33", "Usagara", "TZ", "Africa/Dar_es_Salaam"),
("-4.06667", "37.73333", "Same", "TZ", "Africa/Dar_es_Salaam"),
("-6.25", "38.66667", "Mvomero", "TZ", "Africa/Dar_es_Salaam"),
("-4.83", "29.65806", "Mwandiga", "TZ", "Africa/Dar_es_Salaam"),
("-6.8", "39.25", "Magomeni", "TZ", "Africa/Dar_es_Salaam"),
("-7.60361", "37.00438", "Kidodi", "TZ", "Africa/Dar_es_Salaam"),
("-7.76667", "35.7", "Iringa", "TZ", "Africa/Dar_es_Salaam"),
("-5.41667", "38.01667", "Chanika", "TZ", "Africa/Dar_es_Salaam"),
("-10.33333", "39.28333", "Nyangao", "TZ", "Africa/Dar_es_Salaam"),
("49.07866", "30.96755", "Zvenihorodka", "UA", "Europe/Kiev"),
("47.56494", "31.33078", "Voznesensk", "UA", "Europe/Kiev"),
("49.41029", "38.15035", "Svatove", "UA", "Europe/Zaporozhye"),
("50.18545", "27.06365", "Shepetivka", "UA", "Europe/Kiev"),
("47.48444", "36.25361", "Polohy", "UA", "Europe/Zaporozhye"),
("46.75451", "33.34864", "Nova Kakhovka", "UA", "Europe/Kiev"),
("50.75932", "25.34244", "Lutsk", "UA", "Europe/Kiev"),
("49.65186", "26.97253", "Krasyliv", "UA", "Europe/Kiev"),
("46.65581", "32.6178", "Kherson", "UA", "Europe/Kiev"),
("51.67822", "33.9162", "Hlukhiv", "UA", "Europe/Kiev"),
("45.99194", "29.41824", "Artsyz", "UA", "Europe/Kiev"),
("2.41669", "30.98551", "Paidha", "UG", "Africa/Kampala"),
("3.27833", "32.88667", "Kitgum", "UG", "Africa/Kampala"),
("3.02013", "30.91105", "Arua", "UG", "Africa/Kampala"),
("33.45122", "-86.99666", "Hueytown", "US", "America/Chicago"),
("33.44872", "-86.78777", "Vestavia Hills", "US", "America/Chicago"),
("35.25064", "-91.73625", "Searcy", "US", "America/Chicago"),
("26.68451", "-80.66756", "Belle Glade", "US", "America/New_York"),
("28.54944", "-81.77285", "Clermont", "US", "America/New_York"),
("28.90054", "-81.26367", "Deltona", "US", "America/New_York"),
("29.65163", "-82.32483", "Gainesville", "US", "America/New_York"),
("25.67927", "-80.31727", "Kendall", "US", "America/New_York"),
("28.15112", "-82.46148", "Lutz", "US", "America/New_York"),
("26.2173", "-80.22588", "North Lauderdale", "US", "America/New_York"),
("30.17746", "-81.38758", "Palm Valley", "US", "America/New_York"),
("26.91756", "-82.07842", "Punta Gorda Isles", "US", "America/New_York"),
("27.71809", "-82.35176", "Sun City Center", "US", "America/New_York"),
("27.09978", "-82.45426", "Venice", "US", "America/New_York"),
("34.06635", "-84.67837", "Acworth", "US", "America/New_York"),
("32.54044", "-82.90375", "Dublin", "US", "America/New_York"),
("33.08014", "-83.2321", "Milledgeville", "US", "America/New_York"),
("33.54428", "-84.23381", "Stockbridge", "US", "America/New_York"),
("38.58894", "-89.99038", "Fairview Heights", "US", "America/Chicago"),
("39.78504", "-85.76942", "Greenfield", "US", "America/Indiana/Indianapolis"),
("38.06084", "-97.92977", "Hutchinson", "US", "America/Chicago"),
("39.08367", "-84.50855", "Covington", "US", "America/New_York"),
("36.61033", "-88.31476", "Murray", "US", "America/Chicago"),
("29.84576", "-90.10674", "Estelle", "US", "America/Chicago"),
("32.52515", "-93.75018", "Shreveport", "US", "America/Chicago"),
("38.96372", "-76.99081", "Chillum", "US", "America/New_York"),
("38.70734", "-77.02303", "Fort Washington", "US", "America/New_York"),
("39.33427", "-76.43941", "Middle River", "US", "America/New_York"),
("39.32011", "-76.51552", "Rosedale", "US", "America/New_York"),
("39.32288", "-76.72803", "Woodlawn", "US", "America/New_York"),
("39.09112", "-94.41551", "Independence", "US", "America/Chicago"),
("37.95143", "-91.77127", "Rolla", "US", "America/Chicago"),
("33.41012", "-91.06177", "Greenville", "US", "America/Chicago"),
("34.25807", "-88.70464", "Tupelo", "US", "America/Chicago"),
("35.05266", "-78.87836", "Fayetteville", "US", "America/New_York"),
("34.25628", "-78.04471", "Leland", "US", "America/New_York"),
("35.88264", "-80.08199", "Thomasville", "US", "America/New_York"),
("39.71734", "-74.96933", "Sicklerville", "US", "America/New_York"),
("39.43534", "-84.20299", "Lebanon", "US", "America/New_York"),
("34.77453", "-96.67834", "Ada", "US", "America/Chicago"),
("35.74788", "-95.36969", "Muskogee", "US", "America/Chicago"),
("39.96097", "-75.60804", "West Chester", "US", "America/New_York"),
("33.98154", "-81.23621", "Lexington", "US", "America/New_York"),
("36.02506", "-86.77917", "Brentwood Estates", "US", "America/Chicago"),
("35.61452", "-88.81395", "Jackson", "US", "America/Chicago"),
("32.44874", "-99.73314", "Abilene", "US", "America/Chicago"),
("30.16688", "-96.39774", "Brenham", "US", "America/Chicago"),
("31.12406", "-97.90308", "Copperas Cove", "US", "America/Chicago"),
("29.53885", "-95.44744", "Fresno", "US", "America/Chicago"),
("30.5427", "-97.54667", "Hutto", "US", "America/Chicago"),
("32.5007", "-94.74049", "Longview", "US", "America/Chicago"),
("31.76212", "-95.63079", "Palestine", "US", "America/Chicago"),
("26.18924", "-98.15529", "San Juan", "US", "America/Chicago"),
("32.35126", "-95.30106", "Tyler", "US", "America/Chicago"),
("37.52487", "-77.55777", "Bon Air", "US", "America/New_York"),
("38.91817", "-78.19444", "Front Royal", "US", "America/New_York"),
("37.60876", "-77.37331", "Mechanicsville", "US", "America/New_York"),
("39.00622", "-77.4286", "Sterling", "US", "America/New_York"),
("39.45621", "-77.96389", "Martinsburg", "US", "America/New_York"),
("41.27621", "-72.86843", "East Haven", "US", "America/New_York"),
("41.14676", "-73.49484", "New Canaan", "US", "America/New_York"),
("41.55815", "-73.0515", "Waterbury", "US", "America/New_York"),
("41.6764", "-91.58045", "Coralville", "US", "America/Chicago"),
("41.57721", "-93.71133", "West Des Moines", "US", "America/Chicago"),
("41.15376", "-87.88754", "Bourbonnais", "US", "America/Chicago"),
("42.24113", "-88.3162", "Crystal Lake", "US", "America/Chicago"),
("41.72059", "-87.70172", "Evergreen Park", "US", "America/Chicago"),
("42.16808", "-88.42814", "Huntley", "US", "America/Chicago"),
("41.8542", "-87.66561", "Lower West Side", "US", "America/Chicago"),
("41.80753", "-87.65644", "New City", "US", "America/Chicago"),
("40.56754", "-89.64066", "Pekin", "US", "America/Chicago"),
("41.84364", "-87.71255", "South Lawndale", "US", "America/Chicago"),
("41.85059", "-87.882", "Westchester", "US", "America/Chicago"),
("41.75338", "-86.11084", "Granger", "US", "America/Indiana/Indianapolis"),
("41.47892", "-87.45476", "Schererville", "US", "America/Chicago"),
("42.35843", "-71.05977", "Boston", "US", "America/New_York"),
("42.58342", "-71.8023", "Fitchburg", "US", "America/New_York"),
("42.4251", "-71.06616", "Malden", "US", "America/New_York"),
("42.52787", "-70.92866", "Peabody", "US", "America/New_York"),
("41.9001", "-71.08977", "Taunton", "US", "America/New_York"),
("43.91452", "-69.96533", "Brunswick", "US", "America/New_York"),
("42.30865", "-83.48216", "Canton", "US", "America/Detroit"),
("46.09273", "-88.64235", "Iron River", "US", "America/Menominee"),
("42.97086", "-82.42491", "Port Huron", "US", "America/Detroit"),
("42.7392", "-84.62081", "Waverly", "US", "America/Detroit"),
("45.0408", "-93.263", "Columbia Heights", "US", "America/Chicago"),
("45.16024", "-93.08883", "Lino Lakes", "US", "America/Chicago"),
("44.73941", "-93.12577", "Rosemount", "US", "America/Chicago"),
("47.92526", "-97.03285", "Grand Forks", "US", "America/Chicago"),
("42.93369", "-72.27814", "Keene", "US", "America/New_York"),
("40.94065", "-73.99681", "Dumont", "US", "America/New_York"),
("40.72816", "-74.07764", "Jersey City", "US", "America/New_York"),
("40.82232", "-74.15987", "Nutley", "US", "America/New_York"),
("40.65538", "-74.38987", "Scotch Plains", "US", "America/New_York"),
("40.5576", "-74.28459", "Woodbridge", "US", "America/New_York"),
("40.57788", "-73.95958", "Brighton Beach", "US", "America/New_York"),
("40.67705", "-73.89125", "Cypress Hills", "US", "America/New_York"),
("40.60538", "-73.75513", "Far Rockaway", "US", "America/New_York"),
("40.72371", "-73.95097", "Greenpoint", "US", "America/New_York"),
("40.64621", "-73.97069", "Kensington", "US", "America/New_York"),
("40.68066", "-73.47429", "Massapequa", "US", "America/New_York"),
("41.50343", "-74.01042", "Newburgh", "US", "America/New_York"),
("40.63316", "-74.13653", "Port Richmond", "US", "America/New_York"),
("41.0051", "-73.78458", "Scarsdale", "US", "America/New_York"),
("43.1009", "-75.23266", "Utica", "US", "America/New_York"),
("40.93121", "-73.89875", "Yonkers", "US", "America/New_York"),
("41.55838", "-81.56929", "Collinwood", "US", "America/New_York"),
("41.48199", "-81.79819", "Lakewood", "US", "America/New_York"),
("41.24255", "-82.61573", "Norwalk", "US", "America/New_York"),
("41.66394", "-83.55521", "Toledo", "US", "America/New_York"),
("40.2737", "-76.88442", "Harrisburg", "US", "America/New_York"),
("40.24537", "-75.64963", "Pottstown", "US", "America/New_York"),
("41.54566", "-71.29144", "Middletown", "US", "America/New_York"),
("43.61062", "-72.97261", "Rutland", "US", "America/New_York"),
("44.27804", "-88.27205", "Kaukauna", "US", "America/Chicago"),
("42.55308", "-87.93341", "Pleasant Prairie", "US", "America/Chicago"),
("41.16704", "-73.20483", "Bridgeport", "US", "America/New_York"),
("33.35283", "-111.78903", "Gilbert", "US", "America/Phoenix"),
("33.50921", "-111.89903", "Scottsdale", "US", "America/Phoenix"),
("38.17492", "-122.2608", "American Canyon", "US", "America/Los_Angeles"),
("33.92946", "-116.97725", "Beaumont", "US", "America/Los_Angeles"),
("34.21639", "-119.0376", "Camarillo", "US", "America/Los_Angeles"),
("34.09668", "-117.71978", "Claremont", "US", "America/Los_Angeles"),
("38.54491", "-121.74052", "Davis", "US", "America/Los_Angeles"),
("33.03699", "-117.29198", "Encinitas", "US", "America/Los_Angeles"),
("34.14251", "-118.25508", "Glendale", "US", "America/Los_Angeles"),
("33.7207", "-116.21677", "Indio", "US", "America/Los_Angeles"),
("33.52253", "-117.70755", "Laguna Niguel", "US", "America/Los_Angeles"),
("34.63915", "-120.45794", "Lompoc", "US", "America/Los_Angeles"),
("32.9156", "-117.14392", "Mira Mesa", "US", "America/Los_Angeles"),
("33.93113", "-117.54866", "Norco", "US", "America/Los_Angeles"),
("33.72255", "-116.37697", "Palm Desert", "US", "America/Los_Angeles"),
("36.06523", "-119.01677", "Porterville", "US", "America/Los_Angeles"),
("37.73604", "-120.93549", "Riverbank", "US", "America/Los_Angeles"),
("34.09611", "-118.10583", "San Gabriel", "US", "America/Los_Angeles"),
("34.95303", "-120.43572", "Santa Maria", "US", "America/Los_Angeles"),
("33.95015", "-118.03917", "South Whittier", "US", "America/Los_Angeles"),
("33.76446", "-117.79394", "North Tustin", "US", "America/Los_Angeles"),
("36.91023", "-121.75689", "Watsonville", "US", "America/Los_Angeles"),
("39.72943", "-104.83192", "Aurora", "US", "America/Denver"),
("39.57582", "-105.11221", "Ken Caryl", "US", "America/Denver"),
("32.42067", "-104.22884", "Carlsbad", "US", "America/Denver"),
("36.20829", "-115.98391", "Pahrump", "US", "America/Los_Angeles"),
("31.84568", "-102.36764", "Odessa", "US", "America/Chicago"),
("40.58654", "-122.39168", "Redding", "US", "America/Los_Angeles"),
("43.54072", "-116.56346", "Nampa", "US", "America/Boise"),
("45.49428", "-122.86705", "Aloha", "US", "America/Los_Angeles"),
("44.99012", "-123.02621", "Keizer", "US", "America/Los_Angeles"),
("45.53929", "-122.38731", "Troutdale", "US", "America/Los_Angeles"),
("40.65995", "-111.99633", "Kearns", "US", "America/Denver"),
("40.34912", "-111.90466", "Saratoga Springs", "US", "America/Denver"),
("47.76232", "-122.2054", "Bothell", "US", "America/Los_Angeles"),
("47.38093", "-122.23484", "Kent", "US", "America/Los_Angeles"),
("47.64995", "-117.23991", "Opportunity", "US", "America/Los_Angeles"),
("46.32374", "-120.00865", "Sunnyside", "US", "America/Los_Angeles"),
("20.88953", "-156.47432", "Kahului", "US", "Pacific/Honolulu"),
("40.81", "-73.9625", "Morningside Heights", "US", "America/New_York"),
("43.16547", "-77.70066", "Gates-North Gates", "US", "America/New_York"),
("47.4943", "-122.24092", "Bryn Mawr-Skyway", "US", "America/Los_Angeles"),
("47.80527", "-122.24064", "Bothell West", "US", "America/Los_Angeles"),
("37.71715", "-122.40433", "Visitacion Valley", "US", "America/Los_Angeles"),
("-33.38056", "-56.52361", "Durazno", "UY", "America/Montevideo"),
("41.29444", "69.67639", "Parkent", "UZ", "Asia/Tashkent"),
("40.11583", "67.84222", "Jizzax", "UZ", "Asia/Samarkand"),
("40.78206", "72.34424", "Andijon", "UZ", "Asia/Tashkent"),
("9.91861", "-68.30472", "Tinaquillo", "VE", "America/Caracas"),
("10.22677", "-67.33122", "La Victoria", "VE", "America/Caracas"),
("8.35122", "-62.64102", "Ciudad Guayana", "VE", "America/Caracas"),
("8.62261", "-70.20749", "Barinas", "VE", "America/Caracas"),
("10.29085", "105.75635", "Sa Dec", "VN", "Asia/Ho_Chi_Minh"),
("-17.73648", "168.31366", "Port-Vila", "VU", "Pacific/Efate"),
("42.62833", "20.89389", "Glogovac", "XK", "Europe/Belgrade"),
("14.53767", "46.83187", "Ataq", "YE", "Asia/Aden"),
("-27.76952", "30.79165", "Vryheid", "ZA", "Africa/Johannesburg"),
("-26.93366", "29.24152", "Standerton", "ZA", "Africa/Johannesburg"),
("-24.19436", "29.00974", "Mokopane", "ZA", "Africa/Johannesburg"),
)
def coordinate(self, center=None, radius=0.001):
"""
Optionally center the coord and pick a point within radius.
"""
if center is None:
return Decimal(str(self.generator.random.randint(-180000000, 180000000) / 1000000)).quantize(
Decimal(".000001"),
)
else:
center = float(center)
radius = float(radius)
geo = self.generator.random.uniform(center - radius, center + radius)
return Decimal(str(geo)).quantize(Decimal(".000001"))
def latitude(self):
# Latitude has a range of -90 to 90, so divide by two.
return self.coordinate() / 2
def longitude(self):
return self.coordinate()
def latlng(self):
return (self.latitude(), self.longitude())
def local_latlng(self, country_code='US', coords_only=False):
"""Returns a location known to exist on land in a country specified by `country_code`.
Defaults to 'en_US'. See the `land_coords` list for available locations/countries.
"""
results = [loc for loc in self.land_coords if loc[3] == country_code]
if results:
place = self.random_element(results)
return (place[0], place[1]) if coords_only else place
def location_on_land(self, coords_only=False):
"""Returns a random tuple specifying a coordinate set guaranteed to exist on land.
Format is `(latitude, longitude, place name, two-letter country code, timezone)`
Pass `coords_only` to return coordinates without metadata.
"""
place = self.random_element(self.land_coords)
return (place[0], place[1]) if coords_only else place
| mit |
laiqiqi886/kbengine | kbe/src/lib/python/Lib/multiprocessing/popen_spawn_win32.py | 102 | 2998 | import os
import msvcrt
import signal
import sys
import _winapi
from . import context
from . import spawn
from . import reduction
from . import util
__all__ = ['Popen']
#
#
#
TERMINATE = 0x10000
WINEXE = (sys.platform == 'win32' and getattr(sys, 'frozen', False))
WINSERVICE = sys.executable.lower().endswith("pythonservice.exe")
#
# We define a Popen class similar to the one from subprocess, but
# whose constructor takes a process object as its argument.
#
class Popen(object):
'''
Start a subprocess to run the code of a process object
'''
method = 'spawn'
def __init__(self, process_obj):
prep_data = spawn.get_preparation_data(process_obj._name)
# read end of pipe will be "stolen" by the child process
# -- see spawn_main() in spawn.py.
rhandle, whandle = _winapi.CreatePipe(None, 0)
wfd = msvcrt.open_osfhandle(whandle, 0)
cmd = spawn.get_command_line(parent_pid=os.getpid(),
pipe_handle=rhandle)
cmd = ' '.join('"%s"' % x for x in cmd)
with open(wfd, 'wb', closefd=True) as to_child:
# start process
try:
hp, ht, pid, tid = _winapi.CreateProcess(
spawn.get_executable(), cmd,
None, None, False, 0, None, None, None)
_winapi.CloseHandle(ht)
except:
_winapi.CloseHandle(rhandle)
raise
# set attributes of self
self.pid = pid
self.returncode = None
self._handle = hp
self.sentinel = int(hp)
util.Finalize(self, _winapi.CloseHandle, (self.sentinel,))
# send information to child
context.set_spawning_popen(self)
try:
reduction.dump(prep_data, to_child)
reduction.dump(process_obj, to_child)
finally:
context.set_spawning_popen(None)
def duplicate_for_child(self, handle):
assert self is context.get_spawning_popen()
return reduction.duplicate(handle, self.sentinel)
def wait(self, timeout=None):
if self.returncode is None:
if timeout is None:
msecs = _winapi.INFINITE
else:
msecs = max(0, int(timeout * 1000 + 0.5))
res = _winapi.WaitForSingleObject(int(self._handle), msecs)
if res == _winapi.WAIT_OBJECT_0:
code = _winapi.GetExitCodeProcess(self._handle)
if code == TERMINATE:
code = -signal.SIGTERM
self.returncode = code
return self.returncode
def poll(self):
return self.wait(timeout=0)
def terminate(self):
if self.returncode is None:
try:
_winapi.TerminateProcess(int(self._handle), TERMINATE)
except OSError:
if self.wait(timeout=1.0) is None:
raise
| lgpl-3.0 |
bobisme/odoo | addons/board/board.py | 70 | 6623 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2013 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from operator import itemgetter
from textwrap import dedent
from openerp import tools
from openerp.osv import fields, osv
class board_board(osv.osv):
_name = 'board.board'
_description = "Board"
_auto = False
_columns = {}
@tools.cache()
def list(self, cr, uid, context=None):
Actions = self.pool.get('ir.actions.act_window')
Menus = self.pool.get('ir.ui.menu')
IrValues = self.pool.get('ir.values')
act_ids = Actions.search(cr, uid, [('res_model', '=', self._name)], context=context)
refs = ['%s,%s' % (Actions._name, act_id) for act_id in act_ids]
# cannot search "action" field on menu (non stored function field without search_fnct)
irv_ids = IrValues.search(cr, uid, [
('model', '=', 'ir.ui.menu'),
('key', '=', 'action'),
('key2', '=', 'tree_but_open'),
('value', 'in', refs),
], context=context)
menu_ids = map(itemgetter('res_id'), IrValues.read(cr, uid, irv_ids, ['res_id'], context=context))
menu_names = Menus.name_get(cr, uid, menu_ids, context=context)
return [dict(id=m[0], name=m[1]) for m in menu_names]
def _clear_list_cache(self):
self.list.clear_cache(self)
def create(self, cr, user, vals, context=None):
return 0
def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
"""
Overrides orm field_view_get.
@return: Dictionary of Fields, arch and toolbar.
"""
res = {}
res = super(board_board, self).fields_view_get(cr, user, view_id, view_type,
context, toolbar=toolbar, submenu=submenu)
CustView = self.pool.get('ir.ui.view.custom')
vids = CustView.search(cr, user, [('user_id', '=', user), ('ref_id', '=', view_id)], context=context)
if vids:
view_id = vids[0]
arch = CustView.browse(cr, user, view_id, context=context)
res['custom_view_id'] = view_id
res['arch'] = arch.arch
res['arch'] = self._arch_preprocessing(cr, user, res['arch'], context=context)
res['toolbar'] = {'print': [], 'action': [], 'relate': []}
return res
def _arch_preprocessing(self, cr, user, arch, context=None):
from lxml import etree
def remove_unauthorized_children(node):
for child in node.iterchildren():
if child.tag == 'action' and child.get('invisible'):
node.remove(child)
else:
child = remove_unauthorized_children(child)
return node
def encode(s):
if isinstance(s, unicode):
return s.encode('utf8')
return s
archnode = etree.fromstring(encode(arch))
return etree.tostring(remove_unauthorized_children(archnode), pretty_print=True)
class board_create(osv.osv_memory):
def board_create(self, cr, uid, ids, context=None):
assert len(ids) == 1
this = self.browse(cr, uid, ids[0], context=context)
view_arch = dedent("""<?xml version="1.0"?>
<form string="%s" version="7.0">
<board style="2-1">
<column/>
<column/>
</board>
</form>
""".strip() % (this.name,))
view_id = self.pool.get('ir.ui.view').create(cr, uid, {
'name': this.name,
'model': 'board.board',
'priority': 16,
'type': 'form',
'arch': view_arch,
}, context=context)
action_id = self.pool.get('ir.actions.act_window').create(cr, uid, {
'name': this.name,
'view_type': 'form',
'view_mode': 'form',
'res_model': 'board.board',
'usage': 'menu',
'view_id': view_id,
'help': dedent('''<div class="oe_empty_custom_dashboard">
<p>
<b>This dashboard is empty.</b>
</p><p>
To add the first report into this dashboard, go to any
menu, switch to list or graph view, and click <i>'Add to
Dashboard'</i> in the extended search options.
</p><p>
You can filter and group data before inserting into the
dashboard using the search options.
</p>
</div>
''')
}, context=context)
menu_id = self.pool.get('ir.ui.menu').create(cr, uid, {
'name': this.name,
'parent_id': this.menu_parent_id.id,
'action': 'ir.actions.act_window,%s' % (action_id,)
}, context=context)
self.pool.get('board.board')._clear_list_cache()
return {
'type': 'ir.actions.client',
'tag': 'reload',
'params': {
'menu_id': menu_id
},
}
def _default_menu_parent_id(self, cr, uid, context=None):
_, menu_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'menu_reporting_dashboard')
return menu_id
_name = "board.create"
_description = "Board Creation"
_columns = {
'name': fields.char('Board Name', required=True),
'menu_parent_id': fields.many2one('ir.ui.menu', 'Parent Menu', required=True),
}
_defaults = {
'menu_parent_id': _default_menu_parent_id,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
bruce2728/kernel_lge_d802 | Documentation/networking/cxacru-cf.py | 14668 | 1626 | #!/usr/bin/env python
# Copyright 2009 Simon Arlott
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Usage: cxacru-cf.py < cxacru-cf.bin
# Output: values string suitable for the sysfs adsl_config attribute
#
# Warning: cxacru-cf.bin with MD5 hash cdbac2689969d5ed5d4850f117702110
# contains mis-aligned values which will stop the modem from being able
# to make a connection. If the first and last two bytes are removed then
# the values become valid, but the modulation will be forced to ANSI
# T1.413 only which may not be appropriate.
#
# The original binary format is a packed list of le32 values.
import sys
import struct
i = 0
while True:
buf = sys.stdin.read(4)
if len(buf) == 0:
break
elif len(buf) != 4:
sys.stdout.write("\n")
sys.stderr.write("Error: read {0} not 4 bytes\n".format(len(buf)))
sys.exit(1)
if i > 0:
sys.stdout.write(" ")
sys.stdout.write("{0:x}={1}".format(i, struct.unpack("<I", buf)[0]))
i += 1
sys.stdout.write("\n")
| gpl-2.0 |
joone/chromium-crosswalk | tools/perf/page_sets/startup_pages.py | 18 | 2069 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
from telemetry import story
class BrowserStartupSharedState(shared_page_state.SharedPageState):
"""Shared state that restarts the browser for every single story."""
def __init__(self, test, finder_options, story_set):
super(BrowserStartupSharedState, self).__init__(
test, finder_options, story_set)
def DidRunStory(self, results):
super(BrowserStartupSharedState, self).DidRunStory(results)
self._StopBrowser()
class StartedPage(page_module.Page):
def __init__(self, url, page_set):
super(StartedPage, self).__init__(
url=url, page_set=page_set, startup_url=url,
shared_page_state_class=BrowserStartupSharedState)
self.archive_data_file = 'data/startup_pages.json'
def RunNavigateSteps(self, action_runner):
# Do not call super.RunNavigateSteps() to avoid reloading the page that has
# already been opened with startup_url.
# TODO(gabadie): Get rid of this (crbug.com/555504)
action_runner.Wait(10)
def RunPageInteractions(self, action_runner):
self.RunNavigateSteps(action_runner)
class StartupPagesPageSet(story.StorySet):
"""Pages for testing starting Chrome with a URL.
Note that this file can't be used with record_wpr, since record_wpr requires
a true navigate step, which we do not want for startup testing. Instead use
record_wpr startup_pages_record to record data for this test."""
def __init__(self):
super(StartupPagesPageSet, self).__init__(
archive_data_file='data/startup_pages.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
# Typical page.
self.AddStory(StartedPage('about:blank', self))
# Typical page.
self.AddStory(StartedPage('http://bbc.co.uk', self))
# Horribly complex page - stress test!
self.AddStory(StartedPage('http://kapook.com', self))
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.