repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
MartynShaw/audacity | refs/heads/master | lib-src/lv2/sord/waflib/ConfigSet.py | 266 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import copy,re,os
from waflib import Logs,Utils
re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M)
class ConfigSet(object):
__slots__=('table','parent')
def __init__(self,filename=None):
self.table={}
if filename:
self.load(filename)
def __contains__(self,key):
if key in self.table:return True
try:return self.parent.__contains__(key)
except AttributeError:return False
def keys(self):
keys=set()
cur=self
while cur:
keys.update(cur.table.keys())
cur=getattr(cur,'parent',None)
keys=list(keys)
keys.sort()
return keys
def __str__(self):
return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in self.keys()])
def __getitem__(self,key):
try:
while 1:
x=self.table.get(key,None)
if not x is None:
return x
self=self.parent
except AttributeError:
return[]
def __setitem__(self,key,value):
self.table[key]=value
def __delitem__(self,key):
self[key]=[]
def __getattr__(self,name):
if name in self.__slots__:
return object.__getattr__(self,name)
else:
return self[name]
def __setattr__(self,name,value):
if name in self.__slots__:
object.__setattr__(self,name,value)
else:
self[name]=value
def __delattr__(self,name):
if name in self.__slots__:
object.__delattr__(self,name)
else:
del self[name]
def derive(self):
newenv=ConfigSet()
newenv.parent=self
return newenv
def detach(self):
tbl=self.get_merged_dict()
try:
delattr(self,'parent')
except AttributeError:
pass
else:
keys=tbl.keys()
for x in keys:
tbl[x]=copy.deepcopy(tbl[x])
self.table=tbl
def get_flat(self,key):
s=self[key]
if isinstance(s,str):return s
return' '.join(s)
def _get_list_value_for_modification(self,key):
try:
value=self.table[key]
except KeyError:
try:value=self.parent[key]
except AttributeError:value=[]
if isinstance(value,list):
value=value[:]
else:
value=[value]
else:
if not isinstance(value,list):
value=[value]
self.table[key]=value
return value
def append_value(self,var,val):
current_value=self._get_list_value_for_modification(var)
if isinstance(val,str):
val=[val]
current_value.extend(val)
def prepend_value(self,var,val):
if isinstance(val,str):
val=[val]
self.table[var]=val+self._get_list_value_for_modification(var)
def append_unique(self,var,val):
if isinstance(val,str):
val=[val]
current_value=self._get_list_value_for_modification(var)
for x in val:
if x not in current_value:
current_value.append(x)
def get_merged_dict(self):
table_list=[]
env=self
while 1:
table_list.insert(0,env.table)
try:env=env.parent
except AttributeError:break
merged_table={}
for table in table_list:
merged_table.update(table)
return merged_table
def store(self,filename):
try:
os.makedirs(os.path.split(filename)[0])
except OSError:
pass
buf=[]
merged_table=self.get_merged_dict()
keys=list(merged_table.keys())
keys.sort()
try:
fun=ascii
except NameError:
fun=repr
for k in keys:
if k!='undo_stack':
buf.append('%s = %s\n'%(k,fun(merged_table[k])))
Utils.writef(filename,''.join(buf))
def load(self,filename):
tbl=self.table
code=Utils.readf(filename,m='rU')
for m in re_imp.finditer(code):
g=m.group
tbl[g(2)]=eval(g(3))
Logs.debug('env: %s'%str(self.table))
def update(self,d):
for k,v in d.items():
self[k]=v
def stash(self):
orig=self.table
tbl=self.table=self.table.copy()
for x in tbl.keys():
tbl[x]=copy.deepcopy(tbl[x])
self.undo_stack=self.undo_stack+[orig]
def revert(self):
self.table=self.undo_stack.pop(-1)
|
diogenesjf/userinfuser | refs/heads/master | fantasm/action.py | 28 | """ Fantasm: A taskqueue-based Finite State Machine for App Engine Python
Docs and examples: http://code.google.com/p/fantasm/
Copyright 2010 VendAsta Technologies Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class FSMAction(object):
""" Defines the interface for all user actions. """
def execute(self, context, obj):
""" Executes some action. The return value is ignored, _except_ for the main state action.
@param context The FSMContext (i.e., machine). context.get() and context.put() can be used to get data
from/to the context.
@param obj: An object which the action can operate on
For the main state action, the return value should be a string representing the event to be dispatched.
Actions performed should be careful to be idempotent: because of potential retry mechanisms
(notably with TaskQueueFSMContext), individual execute methods may get executed more than once with
exactly the same context.
"""
raise NotImplementedError()
class ContinuationFSMAction(FSMAction):
""" Defines the interface for all continuation actions. """
def continuation(self, context, obj, token=None):
""" Accepts a token (may be None) and returns the next token for the continutation.
@param token: the continuation token
@param context The FSMContext (i.e., machine). context.get() and context.put() can be used to get data
from/to the context.
@param obj: An object which the action can operate on
"""
raise NotImplementedError()
class DatastoreContinuationFSMAction(ContinuationFSMAction):
""" A datastore continuation. """
def continuation(self, context, obj, token=None):
""" Accepts a token (an optional cursor) and returns the next token for the continutation.
The results of the query are stored on obj.results.
"""
# the continuation query comes
query = self.getQuery(context, obj)
cursor = token
if cursor:
query.with_cursor(cursor)
limit = self.getBatchSize(context, obj)
# place results on obj.results
obj['results'] = query.fetch(limit)
obj.results = obj['results'] # deprecated interface
# add first obj.results item on obj.result - convenient for batch size 1
if obj['results'] and len(obj['results']) > 0:
obj['result'] = obj['results'][0]
else:
obj['result'] = None
obj.result = obj['result'] # deprecated interface
if len(obj['results']) == limit:
return query.cursor()
def getQuery(self, context, obj):
""" Returns a GqlQuery """
raise NotImplementedError()
# W0613: 78:DatastoreContinuationFSMAction.getBatchSize: Unused argument 'obj'
def getBatchSize(self, context, obj): # pylint: disable-msg=W0613
""" Returns a batch size, default 1. Override for different values. """
return 1
|
ychen820/microblog | refs/heads/master | y/google-cloud-sdk/platform/google_appengine/lib/django-1.3/django/contrib/gis/tests/geogapp/tests.py | 222 | """
Tests for geography support in PostGIS 1.5+
"""
import os
from django.contrib.gis import gdal
from django.contrib.gis.measure import D
from django.test import TestCase
from models import City, County, Zipcode
class GeographyTest(TestCase):
def test01_fixture_load(self):
"Ensure geography features loaded properly."
self.assertEqual(8, City.objects.count())
def test02_distance_lookup(self):
"Testing GeoQuerySet distance lookup support on non-point geography fields."
z = Zipcode.objects.get(code='77002')
cities1 = list(City.objects
.filter(point__distance_lte=(z.poly, D(mi=500)))
.order_by('name')
.values_list('name', flat=True))
cities2 = list(City.objects
.filter(point__dwithin=(z.poly, D(mi=500)))
.order_by('name')
.values_list('name', flat=True))
for cities in [cities1, cities2]:
self.assertEqual(['Dallas', 'Houston', 'Oklahoma City'], cities)
def test03_distance_method(self):
"Testing GeoQuerySet.distance() support on non-point geography fields."
# `GeoQuerySet.distance` is not allowed geometry fields.
htown = City.objects.get(name='Houston')
qs = Zipcode.objects.distance(htown.point)
def test04_invalid_operators_functions(self):
"Ensuring exceptions are raised for operators & functions invalid on geography fields."
# Only a subset of the geometry functions & operator are available
# to PostGIS geography types. For more information, visit:
# http://postgis.refractions.net/documentation/manual-1.5/ch08.html#PostGIS_GeographyFunctions
z = Zipcode.objects.get(code='77002')
# ST_Within not available.
self.assertRaises(ValueError, City.objects.filter(point__within=z.poly).count)
# `@` operator not available.
self.assertRaises(ValueError, City.objects.filter(point__contained=z.poly).count)
# Regression test for #14060, `~=` was never really implemented for PostGIS.
htown = City.objects.get(name='Houston')
self.assertRaises(ValueError, City.objects.get, point__exact=htown.point)
def test05_geography_layermapping(self):
"Testing LayerMapping support on models with geography fields."
# There is a similar test in `layermap` that uses the same data set,
# but the County model here is a bit different.
if not gdal.HAS_GDAL: return
from django.contrib.gis.utils import LayerMapping
# Getting the shapefile and mapping dictionary.
shp_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'data'))
co_shp = os.path.join(shp_path, 'counties', 'counties.shp')
co_mapping = {'name' : 'Name',
'state' : 'State',
'mpoly' : 'MULTIPOLYGON',
}
# Reference county names, number of polygons, and state names.
names = ['Bexar', 'Galveston', 'Harris', 'Honolulu', 'Pueblo']
num_polys = [1, 2, 1, 19, 1] # Number of polygons for each.
st_names = ['Texas', 'Texas', 'Texas', 'Hawaii', 'Colorado']
lm = LayerMapping(County, co_shp, co_mapping, source_srs=4269, unique='name')
lm.save(silent=True, strict=True)
for c, name, num_poly, state in zip(County.objects.order_by('name'), names, num_polys, st_names):
self.assertEqual(4326, c.mpoly.srid)
self.assertEqual(num_poly, len(c.mpoly))
self.assertEqual(name, c.name)
self.assertEqual(state, c.state)
def test06_geography_area(self):
"Testing that Area calculations work on geography columns."
from django.contrib.gis.measure import A
# SELECT ST_Area(poly) FROM geogapp_zipcode WHERE code='77002';
ref_area = 5439084.70637573
tol = 5
z = Zipcode.objects.area().get(code='77002')
self.assertAlmostEqual(z.area.sq_m, ref_area, tol)
|
mancoast/CPythonPyc_test | refs/heads/master | cpython/263_test_future3.py | 238 | from __future__ import nested_scopes
from __future__ import division
import unittest
from test import test_support
x = 2
def nester():
x = 3
def inner():
return x
return inner()
class TestFuture(unittest.TestCase):
def test_floor_div_operator(self):
self.assertEqual(7 // 2, 3)
def test_true_div_as_default(self):
self.assertAlmostEqual(7 / 2, 3.5)
def test_nested_scopes(self):
self.assertEqual(nester(), 3)
def test_main():
test_support.run_unittest(TestFuture)
if __name__ == "__main__":
test_main()
|
helldorado/ansible | refs/heads/devel | lib/ansible/modules/network/fortios/fortios_firewall_interface_policy6.py | 24 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# the lib use python logging can get it if the following is set in your
# Ansible config.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_firewall_interface_policy6
short_description: Configure IPv6 interface policies in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS by
allowing the user to configure firewall feature and interface_policy6 category.
Examples includes all options and need to be adjusted to datasources before usage.
Tested with FOS v6.0.2
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate ip address.
required: true
username:
description:
- FortiOS or FortiGate username.
required: true
password:
description:
- FortiOS or FortiGate password.
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS
protocol
type: bool
default: false
firewall_interface_policy6:
description:
- Configure IPv6 interface policies.
default: null
suboptions:
state:
description:
- Indicates whether to create or remove the object
choices:
- present
- absent
address-type:
description:
- Policy address type (IPv4 or IPv6).
choices:
- ipv4
- ipv6
application-list:
description:
- Application list name. Source application.list.name.
application-list-status:
description:
- Enable/disable application control.
choices:
- enable
- disable
av-profile:
description:
- Antivirus profile. Source antivirus.profile.name.
av-profile-status:
description:
- Enable/disable antivirus.
choices:
- enable
- disable
comments:
description:
- Comments.
dlp-sensor:
description:
- DLP sensor name. Source dlp.sensor.name.
dlp-sensor-status:
description:
- Enable/disable DLP.
choices:
- enable
- disable
dsri:
description:
- Enable/disable DSRI.
choices:
- enable
- disable
dstaddr6:
description:
- IPv6 address object to limit traffic monitoring to network traffic sent to the specified address or range.
suboptions:
name:
description:
- Address name. Source firewall.address6.name firewall.addrgrp6.name.
required: true
interface:
description:
- Monitored interface name from available interfaces. Source system.zone.name system.interface.name.
ips-sensor:
description:
- IPS sensor name. Source ips.sensor.name.
ips-sensor-status:
description:
- Enable/disable IPS.
choices:
- enable
- disable
label:
description:
- Label.
logtraffic:
description:
- "Logging type to be used in this policy (Options: all | utm | disable, Default: utm)."
choices:
- all
- utm
- disable
policyid:
description:
- Policy ID.
required: true
scan-botnet-connections:
description:
- Enable/disable scanning for connections to Botnet servers.
choices:
- disable
- block
- monitor
service6:
description:
- Service name.
suboptions:
name:
description:
- Address name. Source firewall.service.custom.name firewall.service.group.name.
required: true
spamfilter-profile:
description:
- Antispam profile. Source spamfilter.profile.name.
spamfilter-profile-status:
description:
- Enable/disable antispam.
choices:
- enable
- disable
srcaddr6:
description:
- IPv6 address object to limit traffic monitoring to network traffic sent from the specified address or range.
suboptions:
name:
description:
- Address name. Source firewall.address6.name firewall.addrgrp6.name.
required: true
status:
description:
- Enable/disable this policy.
choices:
- enable
- disable
webfilter-profile:
description:
- Web filter profile. Source webfilter.profile.name.
webfilter-profile-status:
description:
- Enable/disable web filtering.
choices:
- enable
- disable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
tasks:
- name: Configure IPv6 interface policies.
fortios_firewall_interface_policy6:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
firewall_interface_policy6:
state: "present"
address-type: "ipv4"
application-list: "<your_own_value> (source application.list.name)"
application-list-status: "enable"
av-profile: "<your_own_value> (source antivirus.profile.name)"
av-profile-status: "enable"
comments: "<your_own_value>"
dlp-sensor: "<your_own_value> (source dlp.sensor.name)"
dlp-sensor-status: "enable"
dsri: "enable"
dstaddr6:
-
name: "default_name_13 (source firewall.address6.name firewall.addrgrp6.name)"
interface: "<your_own_value> (source system.zone.name system.interface.name)"
ips-sensor: "<your_own_value> (source ips.sensor.name)"
ips-sensor-status: "enable"
label: "<your_own_value>"
logtraffic: "all"
policyid: "19"
scan-botnet-connections: "disable"
service6:
-
name: "default_name_22 (source firewall.service.custom.name firewall.service.group.name)"
spamfilter-profile: "<your_own_value> (source spamfilter.profile.name)"
spamfilter-profile-status: "enable"
srcaddr6:
-
name: "default_name_26 (source firewall.address6.name firewall.addrgrp6.name)"
status: "enable"
webfilter-profile: "<your_own_value> (source webfilter.profile.name)"
webfilter-profile-status: "enable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
fos = None
def login(data):
host = data['host']
username = data['username']
password = data['password']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password)
def filter_firewall_interface_policy6_data(json):
option_list = ['address-type', 'application-list', 'application-list-status',
'av-profile', 'av-profile-status', 'comments',
'dlp-sensor', 'dlp-sensor-status', 'dsri',
'dstaddr6', 'interface', 'ips-sensor',
'ips-sensor-status', 'label', 'logtraffic',
'policyid', 'scan-botnet-connections', 'service6',
'spamfilter-profile', 'spamfilter-profile-status', 'srcaddr6',
'status', 'webfilter-profile', 'webfilter-profile-status']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def firewall_interface_policy6(data, fos):
vdom = data['vdom']
firewall_interface_policy6_data = data['firewall_interface_policy6']
filtered_data = filter_firewall_interface_policy6_data(firewall_interface_policy6_data)
if firewall_interface_policy6_data['state'] == "present":
return fos.set('firewall',
'interface-policy6',
data=filtered_data,
vdom=vdom)
elif firewall_interface_policy6_data['state'] == "absent":
return fos.delete('firewall',
'interface-policy6',
mkey=filtered_data['policyid'],
vdom=vdom)
def fortios_firewall(data, fos):
login(data)
methodlist = ['firewall_interface_policy6']
for method in methodlist:
if data[method]:
resp = eval(method)(data, fos)
break
fos.logout()
return not resp['status'] == "success", resp['status'] == "success", resp
def main():
fields = {
"host": {"required": True, "type": "str"},
"username": {"required": True, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": "False"},
"firewall_interface_policy6": {
"required": False, "type": "dict",
"options": {
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"address-type": {"required": False, "type": "str",
"choices": ["ipv4", "ipv6"]},
"application-list": {"required": False, "type": "str"},
"application-list-status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"av-profile": {"required": False, "type": "str"},
"av-profile-status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"comments": {"required": False, "type": "str"},
"dlp-sensor": {"required": False, "type": "str"},
"dlp-sensor-status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dsri": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dstaddr6": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"interface": {"required": False, "type": "str"},
"ips-sensor": {"required": False, "type": "str"},
"ips-sensor-status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"label": {"required": False, "type": "str"},
"logtraffic": {"required": False, "type": "str",
"choices": ["all", "utm", "disable"]},
"policyid": {"required": True, "type": "int"},
"scan-botnet-connections": {"required": False, "type": "str",
"choices": ["disable", "block", "monitor"]},
"service6": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"spamfilter-profile": {"required": False, "type": "str"},
"spamfilter-profile-status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"srcaddr6": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"webfilter-profile": {"required": False, "type": "str"},
"webfilter-profile-status": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
global fos
fos = FortiOSAPI()
is_error, has_changed, result = fortios_firewall(module.params, fos)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
|
redhat-openstack/horizon | refs/heads/mitaka-patches | openstack_dashboard/dashboards/project/network_topology/instances/tables.py | 23 | # Copyright 2013 NTT Innovation Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from openstack_dashboard.dashboards.project.instances import tables
class InstancesTable(tables.InstancesTable):
class Meta(object):
name = "instances"
verbose_name = _("Instances")
row_actions = (
tables.DeleteInstance,
)
|
RichHelle/data-science-from-scratch | refs/heads/master | first-edition/code/charts.py | 12133432 | |
garnertb/geonode | refs/heads/master | geonode/upload/templatetags/__init__.py | 12133432 | |
yousharizvi1/django_workshop_poll_app | refs/heads/master | polls/__init__.py | 12133432 | |
perezg/infoxchange | refs/heads/master | BASE/lib/python2.7/site-packages/django/conf/locale/he/__init__.py | 12133432 | |
jcfr/mystic | refs/heads/master | models/venkataraman.py | 1 | #!/usr/bin/env python
#
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Author: Alta Fang (altafang @caltech and alta @princeton)
# Copyright (c) 1997-2015 California Institute of Technology.
# License: 3-clause BSD. The full license text is available at:
# - http://trac.mystic.cacr.caltech.edu/project/mystic/browser/mystic/LICENSE
__doc__ = _doc = """
This is drawn from examples in Applied Optimization with MATLAB programming,
with the function definition found in [1].
References::
[1] Venkataraman, P. "Applied Optimization with MATLAB Programming",
John Wiley and Sons, Hoboken NJ, 2nd Edition, 2009.
"""
from abstract_model import AbstractFunction
from math import sin
class Sinc(AbstractFunction):
__doc__ = \
"""a Venkataraman's sinc function generator
Venkataraman's sinc function [1] has the global minimum at the center
of concentric rings of local minima, with well depth decreasing with
distance from center.
The generated function f(x) is identical to equation (9.5) of example
9.1 of [1], and requires len(x) == 2.
""" + _doc
def __init__(self, ndim=2): # is 2-dimensional
AbstractFunction.__init__(self, ndim=ndim)
return
def function(self,coeffs):
"""evaluates Venkataraman's sinc function for a list of coeffs
f(x) = -20 * \sin(r(x))/r(x)
Where:
r(x) = \sqrt((x_0 - 4)^2 + (x_1 - 4)^2 + 0.1)
Inspect with mystic_model_plotter using::
mystic.models.venkat91 -b "-10:10:.1, -10:10:.1" -d
The minimum is f(x)=-19.668329370585823 at x=(4.0, 4.0)"""
x,y = coeffs
R = ((x-4.)**2 + (y-4.)**2 + 0.1)**.5
return -20. * sin(R)/R
minimizers = None #XXX: there are many local minima
pass
# cleanup
del _doc
# prepared instances
venkat91 = Sinc().function
# End of file
|
gkarlin/django-jenkins | refs/heads/master | build/Django/tests/regressiontests/i18n/commands/__init__.py | 149 | from django.utils.translation import ugettext as _
# Translators: This comment should be extracted
dummy1 = _("This is a translatable string.")
# This comment should not be extracted
dummy2 = _("This is another translatable string.")
|
blueboxgroup/nova | refs/heads/master | nova/tests/unit/api/openstack/compute/test_microversions.py | 42 | # Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from oslo_serialization import jsonutils
from nova.api.openstack import api_version_request as api_version
from nova import test
from nova.tests.unit.api.openstack import fakes
CONF = cfg.CONF
class MicroversionsTest(test.NoDBTestCase):
header_name = 'X-OpenStack-Nova-API-Version'
def _test_microversions(self, app, req, ret_code, ret_header=None):
req.environ['CONTENT_TYPE'] = "application/json"
res = req.get_response(app)
self.assertEqual(ret_code, res.status_int)
if ret_header:
self.assertEqual(ret_header,
res.headers[self.header_name])
return res
@mock.patch("nova.api.openstack.APIRouterV21.api_extension_namespace",
return_value='nova.api.v3.test_extensions')
def test_microversions_no_header(self, mock_namespace):
app = fakes.wsgi_app_v21(init_only='test-microversions')
req = fakes.HTTPRequest.blank('/v2/fake/microversions')
res = req.get_response(app)
self.assertEqual(200, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual('val', resp_json['param'])
@mock.patch("nova.api.openstack.APIRouterV21.api_extension_namespace",
return_value='nova.api.v3.test_extensions')
def test_microversions_return_header(self, mock_namespace):
app = fakes.wsgi_app_v21(init_only='test-microversions')
req = fakes.HTTPRequest.blank('/v2/fake/microversions')
res = req.get_response(app)
self.assertEqual(200, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual('val', resp_json['param'])
self.assertEqual("2.1", res.headers[self.header_name])
self.assertEqual(self.header_name, res.headers['Vary'])
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
@mock.patch("nova.api.openstack.APIRouterV21.api_extension_namespace",
return_value='nova.api.v3.test_extensions')
def test_microversions_return_header_non_default(self, mock_namespace,
mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("2.3")
app = fakes.wsgi_app_v21(init_only='test-microversions')
req = fakes.HTTPRequest.blank('/v2/fake/microversions')
req.headers = {self.header_name: '2.3'}
res = req.get_response(app)
self.assertEqual(200, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual('val2', resp_json['param'])
self.assertEqual("2.3", res.headers[self.header_name])
self.assertEqual(self.header_name, res.headers['Vary'])
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
@mock.patch("nova.api.openstack.APIRouterV21.api_extension_namespace",
return_value='nova.api.v3.test_extensions')
def test_microversions_return_header_fault(self, mock_namespace,
mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("3.0")
app = fakes.wsgi_app_v21(init_only='test-microversions')
req = fakes.HTTPRequest.blank('/v2/fake/microversions')
req.headers = {self.header_name: '3.0'}
res = req.get_response(app)
self.assertEqual(400, res.status_int)
self.assertEqual("3.0", res.headers[self.header_name])
self.assertEqual(self.header_name, res.headers['Vary'])
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
@mock.patch("nova.api.openstack.APIRouterV21.api_extension_namespace",
return_value='nova.api.v3.test_extensions')
def _check_microversion_response(self, url, req_version, resp_param,
mock_namespace, mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest('2.3')
app = fakes.wsgi_app_v21(init_only='test-microversions')
req = fakes.HTTPRequest.blank(url)
req.headers = {self.header_name: req_version}
res = req.get_response(app)
self.assertEqual(200, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual(resp_param, resp_json['param'])
def test_microversions_with_header(self):
self._check_microversion_response('/v2/fake/microversions',
'2.3', 'val2')
def test_microversions_with_header_exact_match(self):
self._check_microversion_response('/v2/fake/microversions',
'2.2', 'val2')
def test_microversions2_no_2_1_version(self):
self._check_microversion_response('/v2/fake/microversions2',
'2.3', 'controller2_val1')
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
@mock.patch("nova.api.openstack.APIRouterV21.api_extension_namespace",
return_value='nova.api.v3.test_extensions')
def test_microversions2_later_version(self, mock_namespace, mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("3.1")
app = fakes.wsgi_app_v21(init_only='test-microversions')
req = fakes.HTTPRequest.blank('/v2/fake/microversions2')
req.headers = {self.header_name: '3.0'}
res = req.get_response(app)
self.assertEqual(202, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual('controller2_val2', resp_json['param'])
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
@mock.patch("nova.api.openstack.APIRouterV21.api_extension_namespace",
return_value='nova.api.v3.test_extensions')
def test_microversions2_version_too_high(self, mock_namespace,
mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("3.5")
app = fakes.wsgi_app_v21(init_only='test-microversions')
req = fakes.HTTPRequest.blank('/v2/fake/microversions2')
req.headers = {self.header_name: '3.2'}
res = req.get_response(app)
self.assertEqual(404, res.status_int)
@mock.patch("nova.api.openstack.APIRouterV21.api_extension_namespace",
return_value='nova.api.v3.test_extensions')
def test_microversions2_version_too_low(self, mock_namespace):
app = fakes.wsgi_app_v21(init_only='test-microversions')
req = fakes.HTTPRequest.blank('/v2/fake/microversions2')
req.headers = {self.header_name: '2.1'}
res = req.get_response(app)
self.assertEqual(404, res.status_int)
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
@mock.patch("nova.api.openstack.APIRouterV21.api_extension_namespace",
return_value='nova.api.v3.test_extensions')
def test_microversions_global_version_too_high(self, mock_namespace,
mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("3.5")
app = fakes.wsgi_app_v21(init_only='test-microversions')
req = fakes.HTTPRequest.blank('/v2/fake/microversions2')
req.headers = {self.header_name: '3.7'}
res = req.get_response(app)
self.assertEqual(406, res.status_int)
res_json = jsonutils.loads(res.body)
self.assertEqual("Version 3.7 is not supported by the API. "
"Minimum is 2.1 and maximum is 3.5.",
res_json['computeFault']['message'])
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
@mock.patch("nova.api.openstack.APIRouterV21.api_extension_namespace",
return_value='nova.api.v3.test_extensions')
def test_microversions_schema(self, mock_namespace, mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("3.3")
app = fakes.wsgi_app_v21(init_only='test-microversions')
req = fakes.HTTPRequest.blank('/v2/fake/microversions3')
req.method = 'POST'
req.headers = {self.header_name: '2.2'}
req.environ['CONTENT_TYPE'] = "application/json"
req.body = jsonutils.dumps({'dummy': {'val': 'foo'}})
res = req.get_response(app)
self.assertEqual(200, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual('create_val1', resp_json['param'])
self.assertEqual("2.2", res.headers[self.header_name])
self.assertEqual(self.header_name, res.headers['Vary'])
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
@mock.patch("nova.api.openstack.APIRouterV21.api_extension_namespace",
return_value='nova.api.v3.test_extensions')
def test_microversions_schema_fail(self, mock_namespace, mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("3.3")
app = fakes.wsgi_app_v21(init_only='test-microversions')
req = fakes.HTTPRequest.blank('/v2/fake/microversions3')
req.method = 'POST'
req.headers = {self.header_name: '2.2'}
req.environ['CONTENT_TYPE'] = "application/json"
req.body = jsonutils.dumps({'dummy': {'invalid_param': 'foo'}})
res = req.get_response(app)
self.assertEqual(400, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertTrue(resp_json['badRequest']['message'].startswith(
"Invalid input for field/attribute dummy."))
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
@mock.patch("nova.api.openstack.APIRouterV21.api_extension_namespace",
return_value='nova.api.v3.test_extensions')
def test_microversions_schema_out_of_version_check(self, mock_namespace,
mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("3.3")
app = fakes.wsgi_app_v21(init_only='test-microversions')
req = fakes.HTTPRequest.blank('/v2/fake/microversions3/1')
req.method = 'PUT'
req.headers = {self.header_name: '2.2'}
req.body = jsonutils.dumps({'dummy': {'inv_val': 'foo'}})
req.environ['CONTENT_TYPE'] = "application/json"
res = req.get_response(app)
self.assertEqual(200, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual('update_val1', resp_json['param'])
self.assertEqual("2.2", res.headers[self.header_name])
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
@mock.patch("nova.api.openstack.APIRouterV21.api_extension_namespace",
return_value='nova.api.v3.test_extensions')
def test_microversions_schema_second_version(self, mock_namespace,
mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("3.3")
app = fakes.wsgi_app_v21(init_only='test-microversions')
req = fakes.HTTPRequest.blank('/v2/fake/microversions3/1')
req.headers = {self.header_name: '2.10'}
req.environ['CONTENT_TYPE'] = "application/json"
req.method = 'PUT'
req.body = jsonutils.dumps({'dummy': {'val2': 'foo'}})
res = req.get_response(app)
self.assertEqual(200, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual('update_val1', resp_json['param'])
self.assertEqual("2.10", res.headers[self.header_name])
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
@mock.patch("nova.api.openstack.APIRouterV21.api_extension_namespace",
return_value='nova.api.v3.test_extensions')
def _test_microversions_inner_function(self, version, expected_resp,
mock_namespace,
mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("2.2")
app = fakes.wsgi_app_v21(init_only='test-microversions')
req = fakes.HTTPRequest.blank('/v2/fake/microversions4')
req.headers = {self.header_name: version}
req.environ['CONTENT_TYPE'] = "application/json"
req.method = 'POST'
res = req.get_response(app)
self.assertEqual(200, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual(expected_resp, resp_json['param'])
self.assertEqual(version, res.headers[self.header_name])
def test_microversions_inner_function_v22(self):
self._test_microversions_inner_function('2.2', 'controller4_val2')
def test_microversions_inner_function_v21(self):
self._test_microversions_inner_function('2.1', 'controller4_val1')
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
@mock.patch("nova.api.openstack.APIRouterV21.api_extension_namespace",
return_value='nova.api.v3.test_extensions')
def test_with_extends_decorator(self, mock_namespace, mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest('2.4')
app = fakes.wsgi_app_v21(init_only='test-microversions')
req = fakes.HTTPRequest.blank('/v2/fake/microversions5/item')
req.headers = {'X-OpenStack-Nova-API-Version': '2.4'}
res = req.get_response(app)
self.assertEqual(200, res.status_int)
expected_res = {
"extend_ctrlr2": "val_2",
"extend_ctrlr1": "val_1",
"base_param": "base_val"}
resp_json = jsonutils.loads(res.body)
for param in resp_json:
self.assertIn(param, expected_res)
self.assertEqual(expected_res[param], resp_json[param])
self.assertEqual(3, len(resp_json))
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
@mock.patch("nova.api.openstack.APIRouterV21.api_extension_namespace",
return_value='nova.api.v3.test_extensions')
def _test_microversions_actions(self, ret_code, ret_header, req_header,
mock_namespace,
mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("2.3")
app = fakes.wsgi_app_v21(init_only='test-microversions')
req = fakes.HTTPRequest.blank('/v2/fake/microversions3/1/action')
if req_header:
req.headers = {self.header_name: req_header}
req.method = 'POST'
req.body = jsonutils.dumps({'foo': None})
res = self._test_microversions(app, req, ret_code,
ret_header=ret_header)
if ret_code == 202:
resp_json = jsonutils.loads(res.body)
self.assertEqual({'foo': 'bar'}, resp_json)
def test_microversions_actions(self):
self._test_microversions_actions(202, "2.1", "2.1")
def test_microversions_actions_too_high(self):
self._test_microversions_actions(404, "2.3", "2.3")
def test_microversions_actions_no_header(self):
self._test_microversions_actions(202, "2.1", None)
|
gmission/gmission | refs/heads/remaster | hkust-gmission/test/concurrent_test.py | 1 | __author__ = 'chenzhao'
# coding:utf8
import gevent;
from gevent import monkey; monkey.patch_all()
import os
import sys
import json
import flask
import random
import urllib2
import datetime
from termcolor import colored
from geventhttpclient import HTTPClient
from geventhttpclient.url import URL
def color_print(color, *args):
colored_str = colored(' '.join(map(str, args)), color)
# print type(colored_str)
# print repr(colored_str)
print colored_str
def http_debug(*args):
return
newargs = list(args)
newargs.insert(0, '[')
newargs.append(']')
color_print('grey', *newargs)
url_root = 'http://127.0.0.1:5000/'
def post(urlpath, **kw):
url = url_root+urlpath
json_data = json.dumps(kw)
headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
http_debug('POST', url, json_data)
resp = requests.post(url, data=json_data, headers=headers)
http_debug('Response:', resp.status_code, resp.content[:60], '...')
return resp
def put(urlpath, **kw):
url = url_root+urlpath
json_data = json.dumps(kw)
headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
http_debug('POST', url, json_data)
resp = requests.put(url, data=json_data, headers=headers)
http_debug('Response:', resp.status_code, resp.content[:60], '...')
return resp
def upload(urlpath, filename):
url = url_root+urlpath
files = {'file': open(filename, 'rb')}
resp = requests.post(url, files=files)
return resp
def get(urlpath):
url = url_root+urlpath
# headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
# json_params = json.dumps(kw) if kw else ''
http_debug('GET', url)
resp = requests.get(url)
http_debug('Response:', resp.status_code, resp.content[:60], '...')
return resp
def delete(urlpath):
url = url_root+urlpath
http_debug('DELETE', url)
resp = requests.delete(url)
http_debug('Response:', resp.status_code, resp.content[:60], '...')
assert resp.status_code==204
return resp
def rest_post(name, obj_dict):
return post('rest/'+name, **obj_dict)
def rest_get_many(name, filter_dict={}):
filters = []
for col_name, col_val in filter_dict.items():
filters.append(dict(name=col_name, op='==', val=col_val))
if filters:
return get('rest/'+name+'?q='+json.dumps(dict(filters=filters)))
else:
return get('rest/'+name)
def rest_get(name, _id):
return get('rest/%s/%d' % (name, _id)).json()
def rest_get_list(name, filter_dict={}):
return rest_get_many(name, filter_dict).json()['objects']
def rest_put(name, _id, data):
return put('rest/%s/%d' % (name, _id), **data)
def rest_delete_all(name, filter_dict):
objs_to_del = rest_get_list(name, filter_dict)
# print objs_to_del
for obj in objs_to_del:
urlpath='rest/%s/%d'%(name, obj['id'])
delete(urlpath)
return len(objs_to_del)
def test_case(func, **kw):
def run_test_case(**kw):
print '-'*80+'\nTest case begin:', func.__name__
r = func(**kw)
# try:
# r = func(**kw)
# except Exception as e:
# exc_type, exc_j, exc_tb = sys.exc_info()
# fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
# color_print('red', e, exc_type, fname, exc_tb.tb_lineno)
# r = False
color_print('green' if r else 'red', 'Test case end :', func.__name__, 'OK' if r else 'failed')
return r
return run_test_case
users = [dict(email='test1@xxx.com', password='111111'),
dict(email='test2@xxx.com', password='111111'),
dict(email='test3@xxx.com', password='111111'),
dict(email='test4@xxx.com', password='111111'),
dict(email='test5@xxx.com', password='111111')]
users_j = []
@test_case
def test_user_created():
global users, users_j
users_j = [post('user/login', **u).json() for u in users]
assert all(u['id'] for u in users_j)
return True
@test_case
def user_register_new():
new_user = dict(email='testcase_new_user@test.com', password='1234567', name='testcase_new')
rest_delete_all('user', dict(email=new_user['email']))
r = post('user/register', **new_user)
assert r.status_code == 200
rjson = r.json()
assert rjson['res'] == 0
assert rjson['email'] == new_user['email']
assert rjson['name'] == new_user['name']
roles = rjson['roles']
assert 'requester' in roles and 'worker' in roles and 'admin' not in roles
assert rjson['token'] and rjson['id'] and rjson['credit']
return True
@test_case
def user_register_existing():
existing_user = dict(email='testcase_existing_user@test.com', password='1234567', name='testcase_existing')
rest_post('user', existing_user)
r = post('user/register', **existing_user)
assert r.status_code == 200
assert r.json()['res'] != 0
return True
@test_case
def user_login_success():
user = dict(email='testcase_existing_user@test.com', password='1234567', name='testcase_existing')
rest_post('user', user)
r = post('user/login', email=user['email'], password=user['password'])
assert r.status_code == 200
rjson = r.json()
assert rjson['res'] == 0
assert rjson['email'] == user['email']
assert rjson['name'] == user['name']
assert rjson['token'] and rjson['id'] and rjson['credit']
r = post('user/login', name=user['name'], password=user['password'])
assert r.status_code == 200
rjson = r.json()
assert rjson['res'] == 0
assert rjson['email'] == user['email']
assert rjson['name'] == user['name']
assert rjson['token'] and rjson['id'] and rjson['credit']
return True
@test_case
def user_login_fail():
user = dict(email='testcase_existing_user@test.com', password='1234567', name='testcase_existing')
rest_post('user', user)
r = post('user/login', email=user['email'], password=user['password']+'asdf')
assert r.status_code == 200
rjson = r.json()
assert rjson['res'] != 0
return True
@test_case
def new_image_task():
user = dict(email='testcase_existing_user@test.com', password='1234567', name='testcase_existing')
r = post('user/login', email=user['email'], password=user['password'])
user_j = r.json()
bound = dict(left_top_longitude=100,
left_top_latitude=120,
right_bottom_longitude=101,
right_bottom_latitude=121)
location = dict(name='testlocation', longitude=110, latitude=119, bound=bound)
new_task = dict(type='image',
brief='test new image task',
credit=10,
required_answer_count=3,
requester_id=user_j['id'],
location=location)
r = rest_post('task', new_task)
task_j = r.json()
assert task_j['id'] and task_j['begin_time'] and task_j['end_time'] and task_j['location_id']
assert task_j['status'] == 'open'
assert task_j['type'] == new_task['type']
assert task_j['credit'] == new_task['credit']
assert task_j['requester_id'] == new_task['requester_id']
return True
@test_case
def new_task_with_existing_location():
user = dict(email='testcase_existing_user@test.com', password='1234567', name='testcase_existing')
r = post('user/login', email=user['email'], password=user['password'])
user_j = r.json()
bound = dict(left_top_longitude=100, left_top_latitude=120, right_bottom_longitude=101, right_bottom_latitude=121)
location = dict(name='testlocation', longitude=110, latitude=119, bound=bound)
r = rest_post('location', location)
location_j = r.json()
new_task = dict(type='image',
brief='test new image task',
credit=10,
required_answer_count=3,
requester_id=user_j['id'],
location=location)
r = rest_post('task', new_task)
task_j = r.json()
assert task_j['id'] and task_j['begin_time'] and task_j['end_time'] and task_j['location_id']
assert task_j['status'] == 'open'
assert task_j['type'] == new_task['type']
assert task_j['credit'] == new_task['credit']
assert task_j['requester_id'] == new_task['requester_id']
assert task_j['location_id'] == location_j['id']
return True
@test_case
def upload_image():
filename = 'HKUST.jpeg'
file_j = upload('image/upload', filename).json()
assert filename in file_j['filename']
assert file_j['size'] == os.path.getsize(filename)
r = get('image/original/'+file_j['filename'])
assert int(r.headers['content-length']) == os.path.getsize(filename)
r = get('image/thumb/'+file_j['filename'])
assert 0 < int(r.headers['content-length']) < os.path.getsize(filename)
return True
@test_case
def new_answer_with_image():
requester = dict(email='testcase_requester@test.com', password='1234567', name='testcase_requester')
requester_j = rest_post('user', requester).json()
worker = dict(email='testcase_worker@test.com', password='1234567', name='testcase_worker')
worker_j = rest_post('user', worker).json()
bound = dict(left_top_longitude=100, left_top_latitude=120, right_bottom_longitude=101, right_bottom_latitude=121)
location = dict(name='testlocation', longitude=110, latitude=119, bound=bound)
task = dict(type='image', brief='test new image task', requester_id=requester_j['id'], location=location, credit=10)
task_j = rest_post('task', task).json()
original_filename = 'HKUST.jpeg'
filename = upload('image/upload', original_filename).json()['filename']
attachment = dict(name='image name', type='image', value=filename)
answer = dict(task_id=task_j['id'], brief='test image answer', type='image', location=location,
worker_id=worker_j['id'], attachment=attachment)
r = rest_post('answer', answer)
answer_j = r.json()
assert answer_j['task_id'] == task_j['id']
att = rest_get('attachment', answer_j['attachment_id'])
assert att['value'] == filename
answers_of_task = rest_get_list('answer', dict(task_id=task_j['id']))
assert answer_j['id'] in (a['id'] for a in answers_of_task)
messages = rest_get_list('message', dict(receiver_id=requester_j['id']))
assert str(task_j['id']) in [m['attachment'] for m in messages]
return True
@test_case
def comment_answer():
requester = dict(email='testcase_requester@test.com', password='1234567', name='testcase_requester')
requester_j = rest_post('user', requester).json()
worker = dict(email='testcase_worker@test.com', password='1234567', name='testcase_worker')
worker_j = rest_post('user', worker).json()
bound = dict(left_top_longitude=100, left_top_latitude=120, right_bottom_longitude=101, right_bottom_latitude=121)
location = dict(name='testlocation', longitude=110, latitude=119, bound=bound)
task = dict(type='image', brief='test answer comment task', requester_id=requester_j['id'], location=location, credit=10)
task_j = rest_post('task', task).json()
original_filename = 'HKUST.jpeg'
filename = upload('image/upload', original_filename).json()['filename']
attachment = dict(name='image name', type='image', value=filename)
answer = dict(task_id=task_j['id'], brief='test image answer', type='image', location=location,
worker_id=worker_j['id'], attachment=attachment)
r = rest_post('answer', answer)
answer_j = r.json()
comment = dict(answer_id=answer_j['id'], content='this is a comment', sender_id=requester_j['id'],
receiver_id=worker_j['id'])
comment_j = rest_post('answer_comment', comment)
messages = rest_get_list('message', dict(receiver_id=worker_j['id']))
assert str(answer_j['id']) in [m['attachment'] for m in messages]
return True
def enough_answer():
requester = dict(email='testcase_requester@test.com', password='1234567', name='testcase_requester')
requester_j = rest_post('user', requester).json()
location = dict(name='testlocation', longitude=110, latitude=119)
task = dict(type='image', brief='test answer', requester_id=requester_j['id'],
credit=2, required_answer_count=3, location=location)
task_j = rest_post('task', task).json()
global users
workers = []
for u in users[:task['required_answer_count']]:
worker = rest_post('user', u).json()
workers.append(worker)
answer = dict(task_id=task_j['id'], brief='test answer', type='image', worker_id=worker['id'])
rest_post('answer', answer)
for u, worker_before in zip(users[:task['required_answer_count']], workers):
worker_after = rest_post('user', u).json()
assert worker_after['credit'] - worker_before['credit'] == task['credit']
requester_j_after = rest_post('user', requester).json()
assert requester_j['credit'] - requester_j_after['credit'] == task['credit']*task['required_answer_count']
return True
@test_case
def user_last_location():
global users
user = rest_post('user', users[0]).json()
trace = dict(longitude=random.random(), latitude=random.random(), z=random.randint(-9, 9), user_id=user['id'])
trace_j = rest_post('position_trace', trace).json()
position_l = rest_get_list('user_last_position', {'user_id':user['id']})
assert len(position_l) == 1
position_j = position_l[0]
assert position_j['longitude'] == trace_j['longitude']
assert position_j['latitude'] == trace_j['latitude']
assert position_j['z'] == trace_j['z']
return True
@test_case
def noti_nearby_user_for_new_task():
global users
nearby_worker = rest_post('user', users[0]).json()
requester = rest_post('user', users[1]).json()
trace = dict(longitude=random.random(), latitude=random.random(), z=random.randint(-9, 9), user_id=nearby_worker['id'])
# print trace
trace_j = rest_post('position_trace', trace).json()
# print trace_j
location = dict(name='testlocation with someone nearby %f, %f'%(trace['longitude'], trace['latitude']),
longitude=trace['longitude'], latitude=trace['latitude'])
new_task = dict(type='mix', brief='test new image task', requester_id=requester['id'], location=location)
task_j = rest_post('task', new_task).json()
messages = rest_get_list('message', dict(receiver_id=nearby_worker['id']))
assert str(task_j['id']) in [m['attachment'] for m in messages]
return True
@test_case
def new_text_task():
global users
requester = rest_post('user', users[1]).json()
location = dict(name='testlocation', longitude=110, latitude=119)
options = [{'type': 'text', 'title':'text answer %d'%(i), "worker_id":requester['id']} for i in range(3)]
new_task = dict(type='text', brief='test new choice task', requester_id=requester['id'], location=location, answers=options)
task_j = rest_post('task', new_task).json()
answers_of_task = rest_get_list('answer', dict(task_id=task_j['id']))
for opt in options:
assert opt['title'] in [a['title'] for a in answers_of_task]
# print task_j
return True
@test_case
def put_existing_answer():
global users
requester = rest_post('user', users[1]).json()
worker = dict(email='testcase_worker@test.com', password='1234567', name='testcase_worker')
worker_j = rest_post('user', worker).json()
location = dict(name='testlocation', longitude=110, latitude=119)
# options = [{'type': 'text', 'title':'text answer %d'%(i), "worker_id":requester['id']} for i in range(3)]
new_task = dict(type='text', brief='test new text task', requester_id=requester['id'], location=location)
task_j = rest_post('task', new_task).json()
answer = dict(task_id=task_j['id'], brief='test text answer', type='text', location=location,
worker_id=worker_j['id'])
r = rest_post('answer', answer)
answer_j = r.json()
assert answer_j['task_id'] == task_j['id']
assert answer_j['title'] == answer['title']
answer['title'] = 'modified text answer'
r = rest_put('answer', answer_j['id'] ,answer)
answer_j = r.json()
assert answer_j['title'] == answer['title']
answers_of_task = rest_get_list('answer', dict(task_id=task_j['id']))
# print task_j
return True
@test_case
def new_checkin():
global users
user = rest_post('user', users[1]).json()
location = dict(name='testlocation', longitude=110, latitude=119)
new_checkin = dict(type='text', content='test new checkin', user_id=user['id'], location=location)
checkin_j = rest_post('checkin', new_checkin).json()
# print checkin_j
return True
# def new_task_with_random_location_from_random_use():
def rand_xy():
x = random.randint(100, 110)+random.random()
y = random.randint(100, 110)+random.random()
return x, y
all_names = ["世界之窗", "前广场", "世界之窗北门", "世界之窗南门", "世界之窗北侧门", "大胡子艺术", "世界文化浮雕墙", "世界地图喷泉", "环球舞台",
"亚马逊丛林穿越", "巴西耶稣山", "墨西哥巨石头像", "秘鲁那斯卡图线壁", "印加迷城攀岩", "墨西哥凝神武士像", "印第安射箭场", "印第安图腾柱",
"印第安民居", "巴西议会大厦", "美国自由女神像", "美国白宫", "美国国会大厦", "美国林肯纪念堂", "美国总统山", "科罗拉多峡谷漂流", "科罗拉多大峡谷",
"金字塔探秘", "非洲民居", "埃及基萨金字塔群", "埃及阿布辛伯勒神庙", "玻璃凯旋门", "阿尔卑斯冰雪世界"]
def new_task_concurrent(thread_id):
begin_time = time.time()
location_name = u'西贡区大学道'
# location_name = all_names[random.randint(0, len(all_names)-1)]
requester_id = 40
bound = dict(left_top_longitude=100, left_top_latitude=120, right_bottom_longitude=101, right_bottom_latitude=121)
location = dict(name=location_name, longitude=110, latitude=119, bound=bound)
new_task = dict(type='image',
brief='test concurrent',
credit=10,
required_answer_count=3,
requester_id=requester_id,
location=location)
r = rest_post('task', new_task)
task_j = r.json()
end_time = time.time()
print 'single cost', thread_id, end_time-begin_time
assert task_j['id'] and task_j['begin_time'] and task_j['end_time'] and task_j['location_id']
assert task_j['status'] == 'open'
assert task_j['type'] == new_task['type']
assert task_j['credit'] == new_task['credit']
assert task_j['requester_id'] == new_task['requester_id']
# @test_case
def new_task_with_random_location():
user = dict(email='testcase_existing_user@test.com', password='1234567', name='testcase_existing')
r = post('user/login', email=user['email'], password=user['password'])
user_j = r.json()
bound = dict(left_top_longitude=100, left_top_latitude=120, right_bottom_longitude=101, right_bottom_latitude=121)
x,y = rand_xy()
location = dict(name='testlocation(%.4f,%.4f)'%(x,y), longitude=x, latitude=y, bound=bound)
r = rest_post('location', location)
location_j = r.json()
new_task = dict(type='image',
brief='test random location task',
credit=10,
required_answer_count=3,
requester_id=user_j['id'],
location=location)
r = rest_post('task', new_task)
task_j = r.json()
assert task_j['id'] and task_j['begin_time'] and task_j['end_time'] and task_j['location_id']
assert task_j['status'] == 'open'
assert task_j['type'] == new_task['type']
assert task_j['credit'] == new_task['credit']
assert task_j['requester_id'] == new_task['requester_id']
assert task_j['location_id'] == location_j['id']
return True
def run_all_cases(n):
test_user_created()
user_register_new()
user_register_existing()
user_login_success()
user_login_fail()
new_image_task()
new_task_with_existing_location()
# upload_image()
# new_answer_with_image()
comment_answer()
enough_answer()
user_last_location()
noti_nearby_user_for_new_task()
new_text_task()
put_existing_answer()
new_checkin()
print n
def get_simple(n):
url = 'http://gmission-asia.cloudapp.net/gmission_szww/test_data?size=10000'
data = urllib2.urlopen(url).read()
req_counter = 0
total_limit = 50000
def concurrent_test_unit(seq):
global req_counter, total_limit
while req_counter < total_limit:
req_counter += 1
get_simple(seq)
# print req_counter
def gevent_test():
con = 500
print 'concurrent begin', con
begin = datetime.datetime.now()
jobs = [gevent.spawn(concurrent_test_unit, i) for i in range(con)]
gevent.joinall(jobs)
end = datetime.datetime.now()
print 'time:', (end-begin).total_seconds()
print 'rps:', total_limit/(end-begin).total_seconds()
def main():
global url_root
url_root = 'http://gmission-asia.cloudapp.net/gmission_szww/'
gevent_test()
if __name__=='__main__':
main()
|
anurag-ks/eden | refs/heads/master | modules/s3db/msg.py | 7 | # -*- coding: utf-8 -*-
""" Sahana Eden Messaging Model
@copyright: 2009-2015 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("S3ChannelModel",
"S3MessageModel",
"S3MessageAttachmentModel",
"S3EmailModel",
"S3FacebookModel",
"S3MCommonsModel",
"S3ParsingModel",
"S3RSSModel",
"S3SMSModel",
"S3SMSOutboundModel",
"S3TropoModel",
"S3TwilioModel",
"S3TwitterModel",
"S3TwitterSearchModel",
"S3XFormsModel",
"S3BaseStationModel",
)
from gluon import *
from gluon.storage import Storage
from ..s3 import *
# Compact JSON encoding
SEPARATORS = (",", ":")
# =============================================================================
class S3ChannelModel(S3Model):
"""
Messaging Channels
- all Inbound & Outbound channels for messages are instances of this
super-entity
"""
names = ("msg_channel",
"msg_channel_limit",
"msg_channel_status",
"msg_channel_id",
"msg_channel_enable",
"msg_channel_disable",
"msg_channel_enable_interactive",
"msg_channel_disable_interactive",
"msg_channel_onaccept",
)
def model(self):
T = current.T
db = current.db
define_table = self.define_table
#----------------------------------------------------------------------
# Super entity: msg_channel
#
channel_types = Storage(msg_email_channel = T("Email (Inbound)"),
msg_facebook_channel = T("Facebook"),
msg_mcommons_channel = T("Mobile Commons (Inbound)"),
msg_rss_channel = T("RSS Feed"),
msg_sms_modem_channel = T("SMS Modem"),
msg_sms_webapi_channel = T("SMS WebAPI (Outbound)"),
msg_sms_smtp_channel = T("SMS via SMTP (Outbound)"),
msg_tropo_channel = T("Tropo"),
msg_twilio_channel = T("Twilio (Inbound)"),
msg_twitter_channel = T("Twitter"),
)
tablename = "msg_channel"
self.super_entity(tablename, "channel_id",
channel_types,
Field("name",
#label = T("Name"),
),
Field("description",
#label = T("Description"),
),
Field("enabled", "boolean",
default = True,
#label = T("Enabled?")
#represent = s3_yes_no_represent,
),
# @ToDo: Indicate whether channel can be used for Inbound or Outbound
#Field("inbound", "boolean",
# label = T("Inbound?")),
#Field("outbound", "boolean",
# label = T("Outbound?")),
)
# @todo: make lazy_table
table = db[tablename]
table.instance_type.readable = True
# Reusable Field
channel_id = S3ReusableField("channel_id", "reference %s" % tablename,
label = T("Channel"),
ondelete = "SET NULL",
represent = S3Represent(lookup=tablename),
requires = IS_EMPTY_OR(
IS_ONE_OF_EMPTY(db, "msg_channel.id")),
)
self.add_components(tablename,
msg_channel_status = "channel_id",
)
# ---------------------------------------------------------------------
# Channel Limit
# Used to limit the number of emails sent from the system
# - works by simply recording an entry for the timestamp to be checked against
#
# - currently just used by msg.send_email()
#
tablename = "msg_channel_limit"
define_table(tablename,
# @ToDo: Make it per-channel
#channel_id(),
*s3_timestamp())
# ---------------------------------------------------------------------
# Channel Status
# Used to record errors encountered in the Channel
#
tablename = "msg_channel_status"
define_table(tablename,
channel_id(),
Field("status",
#label = T("Status"),
#represent = s3_yes_no_represent,
represent = lambda v: v or current.messages["NONE"],
),
*s3_meta_fields())
# ---------------------------------------------------------------------
return dict(msg_channel_id = channel_id,
msg_channel_enable = self.channel_enable,
msg_channel_disable = self.channel_disable,
msg_channel_enable_interactive = self.channel_enable_interactive,
msg_channel_disable_interactive = self.channel_disable_interactive,
msg_channel_onaccept = self.channel_onaccept,
msg_channel_poll = self.channel_poll,
)
# -------------------------------------------------------------------------
@staticmethod
def channel_enable(tablename, channel_id):
"""
Enable a Channel
- Schedule a Poll for new messages
- Enable all associated Parsers
CLI API for shell scripts & to be called by S3Method
"""
db = current.db
s3db = current.s3db
table = s3db.table(tablename)
record = db(table.channel_id == channel_id).select(table.id, # needed for update_record
table.enabled,
limitby=(0, 1),
).first()
if not record.enabled:
# Flag it as enabled
# Update Instance
record.update_record(enabled = True)
# Update Super
s3db.update_super(table, record)
# Enable all Parser tasks on this channel
ptable = s3db.msg_parser
query = (ptable.channel_id == channel_id) & \
(ptable.deleted == False)
parsers = db(query).select(ptable.id)
for parser in parsers:
s3db.msg_parser_enable(parser.id)
# Do we have an existing Task?
ttable = db.scheduler_task
args = '["%s", %s]' % (tablename, channel_id)
query = ((ttable.function_name == "msg_poll") & \
(ttable.args == args) & \
(ttable.status.belongs(["RUNNING", "QUEUED", "ALLOCATED"])))
exists = db(query).select(ttable.id,
limitby=(0, 1)).first()
if exists:
return "Channel already enabled"
else:
current.s3task.schedule_task("msg_poll",
args = [tablename, channel_id],
period = 300, # seconds
timeout = 300, # seconds
repeats = 0 # unlimited
)
return "Channel enabled"
# -------------------------------------------------------------------------
@staticmethod
def channel_enable_interactive(r, **attr):
"""
Enable a Channel
- Schedule a Poll for new messages
S3Method for interactive requests
"""
tablename = r.tablename
result = current.s3db.msg_channel_enable(tablename, r.record.channel_id)
current.session.confirmation = result
fn = tablename.split("_", 1)[1]
redirect(URL(f=fn))
# -------------------------------------------------------------------------
@staticmethod
def channel_disable(tablename, channel_id):
"""
Disable a Channel
- Remove schedule for Polling for new messages
- Disable all associated Parsers
CLI API for shell scripts & to be called by S3Method
"""
db = current.db
s3db = current.s3db
table = s3db.table(tablename)
record = db(table.channel_id == channel_id).select(table.id, # needed for update_record
table.enabled,
limitby=(0, 1),
).first()
if record.enabled:
# Flag it as disabled
# Update Instance
record.update_record(enabled = False)
# Update Super
s3db.update_super(table, record)
# Disable all Parser tasks on this channel
ptable = s3db.msg_parser
parsers = db(ptable.channel_id == channel_id).select(ptable.id)
for parser in parsers:
s3db.msg_parser_disable(parser.id)
# Do we have an existing Task?
ttable = db.scheduler_task
args = '["%s", %s]' % (tablename, channel_id)
query = ((ttable.function_name == "msg_poll") & \
(ttable.args == args) & \
(ttable.status.belongs(["RUNNING", "QUEUED", "ALLOCATED"])))
exists = db(query).select(ttable.id,
limitby=(0, 1)).first()
if exists:
# Disable all
db(query).update(status="STOPPED")
return "Channel disabled"
else:
return "Channel already disabled"
# --------------------------------------------------------------------------
@staticmethod
def channel_disable_interactive(r, **attr):
"""
Disable a Channel
- Remove schedule for Polling for new messages
S3Method for interactive requests
"""
tablename = r.tablename
result = current.s3db.msg_channel_disable(tablename, r.record.channel_id)
current.session.confirmation = result
fn = tablename.split("_", 1)[1]
redirect(URL(f=fn))
# -------------------------------------------------------------------------
@staticmethod
def channel_onaccept(form):
"""
Process the Enabled Flag
"""
if form.record:
# Update form
# Process if changed
if form.record.enabled and not form.vars.enabled:
current.s3db.msg_channel_disable(form.table._tablename,
form.vars.channel_id)
elif form.vars.enabled and not form.record.enabled:
current.s3db.msg_channel_enable(form.table._tablename,
form.vars.channel_id)
else:
# Create form
# Process only if enabled
if form.vars.enabled:
current.s3db.msg_channel_enable(form.table._tablename,
form.vars.channel_id)
# -------------------------------------------------------------------------
@staticmethod
def channel_poll(r, **attr):
"""
Poll a Channel for new messages
S3Method for interactive requests
"""
tablename = r.tablename
current.s3task.async("msg_poll", args=[tablename, r.record.channel_id])
current.session.confirmation = \
current.T("The poll request has been submitted, so new messages should appear shortly - refresh to see them")
if tablename == "msg_email_channel":
fn = "email_inbox"
elif tablename == "msg_mcommons_channel":
fn = "sms_inbox"
elif tablename == "msg_rss_channel":
fn = "rss"
elif tablename == "msg_twilio_channel":
fn = "sms_inbox"
elif tablename == "msg_twitter_channel":
fn = "twitter_inbox"
else:
return "Unsupported channel: %s" % tablename
redirect(URL(f=fn))
# =============================================================================
class S3MessageModel(S3Model):
"""
Messages
"""
names = ("msg_message",
"msg_message_id",
"msg_message_represent",
"msg_outbox",
)
def model(self):
T = current.T
db = current.db
UNKNOWN_OPT = current.messages.UNKNOWN_OPT
configure = self.configure
define_table = self.define_table
# Message priority
msg_priority_opts = {3 : T("High"),
2 : T("Medium"),
1 : T("Low"),
}
# ---------------------------------------------------------------------
# Message Super Entity - all Inbound & Outbound Messages
#
message_types = Storage(msg_email = T("Email"),
msg_facebook = T("Facebook"),
msg_rss = T("RSS"),
msg_sms = T("SMS"),
msg_twitter = T("Twitter"),
msg_twitter_result = T("Twitter Search Results"),
)
tablename = "msg_message"
self.super_entity(tablename, "message_id",
message_types,
# Knowing which Channel Incoming Messages
# came in on allows correlation to Outbound
# messages (campaign_message, deployment_alert, etc)
self.msg_channel_id(),
s3_datetime(default="now"),
Field("body", "text",
label = T("Message"),
),
Field("from_address",
label = T("From"),
),
Field("to_address",
label = T("To"),
),
Field("inbound", "boolean",
default = False,
label = T("Direction"),
represent = lambda direction: \
(direction and [T("In")] or \
[T("Out")])[0],
),
)
# @todo: make lazy_table
table = db[tablename]
table.instance_type.readable = True
table.instance_type.writable = True
configure(tablename,
list_fields = ["instance_type",
"from_address",
"to_address",
"body",
"inbound",
],
)
# Reusable Field
message_represent = S3Represent(lookup=tablename, fields=["body"])
message_id = S3ReusableField("message_id", "reference %s" % tablename,
ondelete = "RESTRICT",
represent = message_represent,
requires = IS_EMPTY_OR(
IS_ONE_OF_EMPTY(db, "msg_message.id")),
)
self.add_components(tablename,
msg_attachment = "message_id",
deploy_response = "message_id",
)
# ---------------------------------------------------------------------
# Outbound Messages
#
# Show only the supported messaging methods
MSG_CONTACT_OPTS = current.msg.MSG_CONTACT_OPTS
# Maximum number of retries to send a message
MAX_SEND_RETRIES = current.deployment_settings.get_msg_max_send_retries()
# Valid message outbox statuses
MSG_STATUS_OPTS = {1 : T("Unsent"),
2 : T("Sent"),
3 : T("Draft"),
4 : T("Invalid"),
5 : T("Failed"),
}
opt_msg_status = S3ReusableField("status", "integer",
notnull=True,
requires = IS_IN_SET(MSG_STATUS_OPTS,
zero=None),
default = 1,
label = T("Status"),
represent = lambda opt: \
MSG_STATUS_OPTS.get(opt,
UNKNOWN_OPT))
# Outbox - needs to be separate to Message since a single message
# sent needs different outbox entries for each recipient
tablename = "msg_outbox"
define_table(tablename,
# FK not instance
message_id(),
# Person/Group to send the message out to:
self.super_link("pe_id", "pr_pentity"),
# If set used instead of picking up from pe_id:
Field("address"),
Field("contact_method", length=32,
default = "EMAIL",
label = T("Contact Method"),
represent = lambda opt: \
MSG_CONTACT_OPTS.get(opt, UNKNOWN_OPT),
requires = IS_IN_SET(MSG_CONTACT_OPTS,
zero=None),
),
opt_msg_status(),
# Used to loop through a PE to get it's members
Field("system_generated", "boolean",
default = False,
),
# Give up if we can't send after MAX_RETRIES
Field("retries", "integer",
default = MAX_SEND_RETRIES,
readable = False,
writable = False,
),
*s3_meta_fields())
configure(tablename,
list_fields = ["id",
"message_id",
"pe_id",
"status",
],
orderby = "msg_outbox.created_on desc",
)
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
return dict(msg_message_id = message_id,
msg_message_represent = message_represent,
)
# -------------------------------------------------------------------------
@staticmethod
def defaults():
"""
Return safe defaults in case the model has been deactivated.
"""
dummy = S3ReusableField("dummy_id", "integer",
readable = False,
writable = False)
return dict(msg_message_id = lambda **attr: dummy("message_id"),
)
# =============================================================================
class S3MessageAttachmentModel(S3Model):
"""
Message Attachments
- link table between msg_message & doc_document
"""
names = ("msg_attachment",)
def model(self):
# ---------------------------------------------------------------------
#
tablename = "msg_attachment"
self.define_table(tablename,
# FK not instance
self.msg_message_id(ondelete="CASCADE"),
self.doc_document_id(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
return {}
# =============================================================================
class S3EmailModel(S3ChannelModel):
"""
Email
InBound Channels
Outbound Email is currently handled via deployment_settings
InBox/OutBox
"""
names = ("msg_email_channel",
"msg_email",
)
def model(self):
T = current.T
configure = self.configure
define_table = self.define_table
set_method = self.set_method
super_link = self.super_link
# ---------------------------------------------------------------------
# Email Inbound Channels
#
tablename = "msg_email_channel"
define_table(tablename,
# Instance
super_link("channel_id", "msg_channel"),
Field("name"),
Field("description"),
Field("enabled", "boolean",
default = True,
label = T("Enabled?"),
represent = s3_yes_no_represent,
),
Field("server"),
Field("protocol",
requires = IS_IN_SET(["imap", "pop3"],
zero=None),
),
Field("use_ssl", "boolean"),
Field("port", "integer"),
Field("username"),
Field("password", "password", length=64,
readable = False,
requires = IS_NOT_EMPTY(),
),
# Set true to delete messages from the remote
# inbox after fetching them.
Field("delete_from_server", "boolean"),
*s3_meta_fields())
configure(tablename,
onaccept = self.msg_channel_onaccept,
super_entity = "msg_channel",
)
set_method("msg", "email_channel",
method = "enable",
action = self.msg_channel_enable_interactive)
set_method("msg", "email_channel",
method = "disable",
action = self.msg_channel_disable_interactive)
set_method("msg", "email_channel",
method = "poll",
action = self.msg_channel_poll)
# ---------------------------------------------------------------------
# Email Messages: InBox & Outbox
#
sender = current.deployment_settings.get_mail_sender()
tablename = "msg_email"
define_table(tablename,
# Instance
super_link("message_id", "msg_message"),
self.msg_channel_id(),
s3_datetime(default = "now"),
Field("subject", length=78, # RFC 2822
label = T("Subject"),
),
Field("body", "text",
label = T("Message"),
),
Field("from_address", #notnull=True,
default = sender,
label = T("Sender"),
requires = IS_EMAIL(),
),
Field("to_address",
label = T("To"),
requires = IS_EMAIL(),
),
Field("raw", "text",
label = T("Message Source"),
readable = False,
writable = False,
),
Field("inbound", "boolean",
default = False,
label = T("Direction"),
represent = lambda direction: \
(direction and [T("In")] or [T("Out")])[0],
),
*s3_meta_fields())
configure(tablename,
orderby = "msg_email.date desc",
super_entity = "msg_message",
)
# Components
self.add_components(tablename,
# Used to link to custom tab deploy_response_select_mission:
deploy_mission = {"name": "select",
"link": "deploy_response",
"joinby": "message_id",
"key": "mission_id",
"autodelete": False,
},
)
# ---------------------------------------------------------------------
return {}
# =============================================================================
class S3FacebookModel(S3ChannelModel):
"""
Facebook
Channels
InBox/OutBox
https://developers.facebook.com/docs/graph-api
"""
names = ("msg_facebook_channel",
"msg_facebook",
"msg_facebook_login",
)
def model(self):
T = current.T
configure = self.configure
define_table = self.define_table
set_method = self.set_method
super_link = self.super_link
# ---------------------------------------------------------------------
# Facebook Channels
#
tablename = "msg_facebook_channel"
define_table(tablename,
# Instance
super_link("channel_id", "msg_channel"),
Field("name"),
Field("description"),
Field("enabled", "boolean",
default = True,
label = T("Enabled?"),
represent = s3_yes_no_represent,
),
Field("login", "boolean",
default = False,
label = T("Use for Login?"),
represent = s3_yes_no_represent,
),
Field("app_id", "bigint",
requires = IS_INT_IN_RANGE(0, +1e16)
),
Field("app_secret", "password", length=64,
readable = False,
requires = IS_NOT_EMPTY(),
),
# Optional
Field("page_id", "bigint",
requires = IS_INT_IN_RANGE(0, +1e16)
),
Field("page_access_token"),
*s3_meta_fields())
configure(tablename,
onaccept = self.msg_facebook_channel_onaccept,
super_entity = "msg_channel",
)
set_method("msg", "facebook_channel",
method = "enable",
action = self.msg_channel_enable_interactive)
set_method("msg", "facebook_channel",
method = "disable",
action = self.msg_channel_disable_interactive)
#set_method("msg", "facebook_channel",
# method = "poll",
# action = self.msg_channel_poll)
# ---------------------------------------------------------------------
# Facebook Messages: InBox & Outbox
#
tablename = "msg_facebook"
define_table(tablename,
# Instance
super_link("message_id", "msg_message"),
self.msg_channel_id(),
s3_datetime(default = "now"),
Field("body", "text",
label = T("Message"),
),
# @ToDo: Are from_address / to_address relevant in Facebook?
Field("from_address", #notnull=True,
#default = sender,
label = T("Sender"),
),
Field("to_address",
label = T("To"),
),
Field("inbound", "boolean",
default = False,
label = T("Direction"),
represent = lambda direction: \
(direction and [T("In")] or [T("Out")])[0],
),
*s3_meta_fields())
configure(tablename,
orderby = "msg_facebook.date desc",
super_entity = "msg_message",
)
# ---------------------------------------------------------------------
return dict(msg_facebook_login = self.msg_facebook_login,
)
# -------------------------------------------------------------------------
@staticmethod
def defaults():
""" Safe defaults for model-global names if module is disabled """
return dict(msg_facebook_login = lambda: False,
)
# -------------------------------------------------------------------------
@staticmethod
def msg_facebook_channel_onaccept(form):
if form.vars.login:
# Ensure only a single account used for Login
current.db(current.s3db.msg_facebook_channel.id != form.vars.id).update(login = False)
# Normal onaccept processing
S3ChannelModel.channel_onaccept(form)
# -------------------------------------------------------------------------
@staticmethod
def msg_facebook_login():
table = current.s3db.msg_facebook_channel
query = (table.login == True) & \
(table.deleted == False)
c = current.db(query).select(table.app_id,
table.app_secret,
limitby=(0, 1)
).first()
return c
# =============================================================================
class S3MCommonsModel(S3ChannelModel):
"""
Mobile Commons Inbound SMS Settings
- Outbound can use Web API
"""
names = ("msg_mcommons_channel",)
def model(self):
#T = current.T
define_table = self.define_table
set_method = self.set_method
# ---------------------------------------------------------------------
tablename = "msg_mcommons_channel"
define_table(tablename,
self.super_link("channel_id", "msg_channel"),
Field("name"),
Field("description"),
Field("enabled", "boolean",
default = True,
#label = T("Enabled?"),
represent = s3_yes_no_represent,
),
Field("campaign_id", length=128, unique=True,
requires = IS_NOT_EMPTY(),
),
Field("url",
default = \
"https://secure.mcommons.com/api/messages",
requires = IS_URL()
),
Field("username",
requires = IS_NOT_EMPTY(),
),
Field("password", "password",
readable = False,
requires = IS_NOT_EMPTY(),
),
Field("query"),
Field("timestmp", "datetime",
writable = False,
),
*s3_meta_fields())
self.configure(tablename,
onaccept = self.msg_channel_onaccept,
super_entity = "msg_channel",
)
set_method("msg", "mcommons_channel",
method = "enable",
action = self.msg_channel_enable_interactive)
set_method("msg", "mcommons_channel",
method = "disable",
action = self.msg_channel_disable_interactive)
set_method("msg", "mcommons_channel",
method = "poll",
action = self.msg_channel_poll)
# ---------------------------------------------------------------------
return {}
# =============================================================================
class S3ParsingModel(S3Model):
"""
Message Parsing Model
"""
names = ("msg_parser",
"msg_parsing_status",
"msg_session",
"msg_keyword",
"msg_sender",
"msg_parser_enabled",
"msg_parser_enable",
"msg_parser_disable",
"msg_parser_enable_interactive",
"msg_parser_disable_interactive",
)
def model(self):
T = current.T
define_table = self.define_table
set_method = self.set_method
channel_id = self.msg_channel_id
message_id = self.msg_message_id
# ---------------------------------------------------------------------
# Link between Message Channels and Parsers in parser.py
#
tablename = "msg_parser"
define_table(tablename,
# Source
channel_id(ondelete = "CASCADE"),
Field("function_name",
label = T("Parser"),
),
Field("enabled", "boolean",
default = True,
label = T("Enabled?"),
represent = s3_yes_no_represent,
),
*s3_meta_fields())
self.configure(tablename,
onaccept = self.msg_parser_onaccept,
)
set_method("msg", "parser",
method = "enable",
action = self.parser_enable_interactive)
set_method("msg", "parser",
method = "disable",
action = self.parser_disable_interactive)
set_method("msg", "parser",
method = "parse",
action = self.parser_parse)
# ---------------------------------------------------------------------
# Message parsing status
# - component to core msg_message table
#
tablename = "msg_parsing_status"
define_table(tablename,
# Component, not Instance
message_id(ondelete = "CASCADE"),
# Source
channel_id(ondelete = "CASCADE"),
Field("is_parsed", "boolean",
default = False,
label = T("Parsing Status"),
represent = lambda parsed: \
(parsed and [T("Parsed")] or \
[T("Not Parsed")])[0],
),
message_id("reply_id",
label = T("Reply"),
ondelete = "CASCADE",
),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Login sessions for Message Parsing
# - links a from_address with a login until expiry
#
tablename = "msg_session"
define_table(tablename,
Field("from_address"),
Field("email"),
Field("created_datetime", "datetime",
default = current.request.utcnow,
),
Field("expiration_time", "integer"),
Field("is_expired", "boolean",
default = False,
),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Keywords for Message Parsing
#
tablename = "msg_keyword"
define_table(tablename,
Field("keyword",
label = T("Keyword"),
),
# @ToDo: Move this to a link table
self.event_incident_type_id(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Senders for Message Parsing
# - whitelist / blacklist / prioritise
#
tablename = "msg_sender"
define_table(tablename,
Field("sender",
label = T("Sender"),
),
# @ToDo: Make pe_id work for this
#self.super_link("pe_id", "pr_pentity"),
Field("priority", "integer",
label = T("Priority"),
),
*s3_meta_fields())
# ---------------------------------------------------------------------
return dict(msg_parser_enabled = self.parser_enabled,
msg_parser_enable = self.parser_enable,
msg_parser_disable = self.parser_disable,
)
# -----------------------------------------------------------------------------
@staticmethod
def parser_parse(r, **attr):
"""
Parse unparsed messages
S3Method for interactive requests
"""
record = r.record
current.s3task.async("msg_parse", args=[record.channel_id, record.function_name])
current.session.confirmation = \
current.T("The parse request has been submitted")
redirect(URL(f="parser"))
# -------------------------------------------------------------------------
@staticmethod
def parser_enabled(channel_id):
"""
Helper function to see if there is a Parser connected to a Channel
- used to determine whether to populate the msg_parsing_status table
"""
table = current.s3db.msg_parser
record = current.db(table.channel_id == channel_id).select(table.enabled,
limitby=(0, 1),
).first()
if record and record.enabled:
return True
else:
return False
# -------------------------------------------------------------------------
@staticmethod
def parser_enable(id):
"""
Enable a Parser
- Connect a Parser to a Channel
CLI API for shell scripts & to be called by S3Method
@ToDo: Ensure only 1 Parser is connected to any Channel at a time
"""
db = current.db
s3db = current.s3db
table = s3db.msg_parser
record = db(table.id == id).select(table.id, # needed for update_record
table.enabled,
table.channel_id,
table.function_name,
limitby=(0, 1),
).first()
if not record.enabled:
# Flag it as enabled
record.update_record(enabled = True)
channel_id = record.channel_id
function_name = record.function_name
# Do we have an existing Task?
ttable = db.scheduler_task
args = '[%s, "%s"]' % (channel_id, function_name)
query = ((ttable.function_name == "msg_parse") & \
(ttable.args == args) & \
(ttable.status.belongs(["RUNNING", "QUEUED", "ALLOCATED"])))
exists = db(query).select(ttable.id,
limitby=(0, 1)).first()
if exists:
return "Parser already enabled"
else:
current.s3task.schedule_task("msg_parse",
args = [channel_id, function_name],
period = 300, # seconds
timeout = 300, # seconds
repeats = 0 # unlimited
)
return "Parser enabled"
# -------------------------------------------------------------------------
@staticmethod
def parser_enable_interactive(r, **attr):
"""
Enable a Parser
- Connect a Parser to a Channel
S3Method for interactive requests
"""
result = current.s3db.msg_parser_enable(r.id)
current.session.confirmation = result
redirect(URL(f="parser"))
# -------------------------------------------------------------------------
@staticmethod
def parser_disable(id):
"""
Disable a Parser
- Disconnect a Parser from a Channel
CLI API for shell scripts & to be called by S3Method
"""
db = current.db
s3db = current.s3db
table = s3db.msg_parser
record = db(table.id == id).select(table.id, # needed for update_record
table.enabled,
table.channel_id,
table.function_name,
limitby=(0, 1),
).first()
if record.enabled:
# Flag it as disabled
record.update_record(enabled = False)
# Do we have an existing Task?
ttable = db.scheduler_task
args = '[%s, "%s"]' % (record.channel_id, record.function_name)
query = ((ttable.function_name == "msg_parse") & \
(ttable.args == args) & \
(ttable.status.belongs(["RUNNING", "QUEUED", "ALLOCATED"])))
exists = db(query).select(ttable.id,
limitby=(0, 1)).first()
if exists:
# Disable all
db(query).update(status="STOPPED")
return "Parser disabled"
else:
return "Parser already disabled"
# -------------------------------------------------------------------------
@staticmethod
def parser_disable_interactive(r, **attr):
"""
Disable a Parser
- Disconnect a Parser from a Channel
S3Method for interactive requests
"""
result = current.s3db.msg_parser_disable(r.id)
current.session.confirmation = result
redirect(URL(f="parser"))
# -------------------------------------------------------------------------
@staticmethod
def msg_parser_onaccept(form):
"""
Process the Enabled Flag
"""
if form.record:
# Update form
# process of changed
if form.record.enabled and not form.vars.enabled:
current.s3db.msg_parser_disable(form.vars.id)
elif form.vars.enabled and not form.record.enabled:
current.s3db.msg_parser_enable(form.vars.id)
else:
# Create form
# Process only if enabled
if form.vars.enabled:
current.s3db.msg_parser_enable(form.vars.id)
# =============================================================================
class S3RSSModel(S3ChannelModel):
"""
RSS channel
"""
names = ("msg_rss_channel",
"msg_rss",
)
def model(self):
T = current.T
define_table = self.define_table
set_method = self.set_method
super_link = self.super_link
# ---------------------------------------------------------------------
# RSS Settings for an account
#
tablename = "msg_rss_channel"
define_table(tablename,
# Instance
super_link("channel_id", "msg_channel"),
Field("name", length=255, unique=True,
label = T("Name"),
),
Field("description",
label = T("Description"),
),
Field("enabled", "boolean",
default = True,
label = T("Enabled?"),
represent = s3_yes_no_represent,
),
Field("url",
label = T("URL"),
requires = IS_URL(),
),
s3_datetime(label = T("Last Polled"),
writable = False,
),
Field("etag",
label = T("ETag"),
writable = False
),
*s3_meta_fields())
self.configure(tablename,
list_fields = ["name",
"description",
"enabled",
"url",
"date",
"channel_status.status",
],
onaccept = self.msg_channel_onaccept,
super_entity = "msg_channel",
)
set_method("msg", "rss_channel",
method = "enable",
action = self.msg_channel_enable_interactive)
set_method("msg", "rss_channel",
method = "disable",
action = self.msg_channel_disable_interactive)
set_method("msg", "rss_channel",
method = "poll",
action = self.msg_channel_poll)
# ---------------------------------------------------------------------
# RSS Feed Posts
#
tablename = "msg_rss"
define_table(tablename,
# Instance
super_link("message_id", "msg_message"),
self.msg_channel_id(),
s3_datetime(default="now",
label = T("Published on"),
),
Field("title",
label = T("Title"),
),
Field("body", "text",
label = T("Content"),
),
Field("from_address",
label = T("Link"),
),
# http://pythonhosted.org/feedparser/reference-feed-author_detail.html
Field("author",
label = T("Author"),
),
# http://pythonhosted.org/feedparser/reference-entry-tags.html
Field("tags", "list:string",
label = T("Tags"),
),
self.gis_location_id(),
# Just present for Super Entity
Field("inbound", "boolean",
default = True,
readable = False,
writable = False,
),
*s3_meta_fields())
self.configure(tablename,
deduplicate = self.msg_rss_duplicate,
list_fields = ["channel_id",
"title",
"from_address",
"date",
"body"
],
super_entity = current.s3db.msg_message,
)
# ---------------------------------------------------------------------
return {}
# ---------------------------------------------------------------------
@staticmethod
def msg_rss_duplicate(item):
"""
Import item deduplication, match by link (from_address)
@param item: the S3ImportItem instance
"""
from_address = item.data.get("from_address")
table = item.table
query = (table.from_address == from_address)
duplicate = current.db(query).select(table.id,
limitby=(0, 1)).first()
if duplicate:
item.id = duplicate.id
item.method = item.METHOD.UPDATE
# =============================================================================
class S3SMSModel(S3Model):
"""
SMS: Short Message Service
These can be received through a number of different gateways
- MCommons
- Modem (@ToDo: Restore this)
- Tropo
- Twilio
"""
names = ("msg_sms",)
def model(self):
#T = current.T
user = current.auth.user
if user and user.organisation_id:
# SMS Messages need to be tagged to their org so that they can be sent through the correct gateway
default = user.organisation_id
else:
default = None
# ---------------------------------------------------------------------
# SMS Messages: InBox & Outbox
#
tablename = "msg_sms"
self.define_table(tablename,
# Instance
self.super_link("message_id", "msg_message"),
self.msg_channel_id(),
self.org_organisation_id(default = default),
s3_datetime(default="now"),
Field("body", "text",
# Allow multi-part SMS
#length = 160,
#label = T("Message"),
),
Field("from_address",
#label = T("Sender"),
),
Field("to_address",
#label = T("To"),
),
Field("inbound", "boolean",
default = False,
#represent = lambda direction: \
# (direction and [T("In")] or \
# [T("Out")])[0],
#label = T("Direction")),
),
# Used e.g. for Clickatell
Field("remote_id",
#label = T("Remote ID"),
),
*s3_meta_fields())
self.configure(tablename,
super_entity = "msg_message",
)
# ---------------------------------------------------------------------
return {}
# =============================================================================
class S3SMSOutboundModel(S3Model):
"""
SMS: Short Message Service
- Outbound Channels
These can be sent through a number of different gateways
- Modem
- SMTP
- Tropo
- Web API (inc Clickatell, MCommons, mVaayoo)
"""
names = ("msg_sms_outbound_gateway",
"msg_sms_modem_channel",
"msg_sms_smtp_channel",
"msg_sms_webapi_channel",
)
def model(self):
#T = current.T
configure = self.configure
define_table = self.define_table
settings = current.deployment_settings
# ---------------------------------------------------------------------
# SMS Outbound Gateway
# - select which gateway is in active use for which Organisation/Branch
#
country_code = settings.get_L10n_default_country_code()
tablename = "msg_sms_outbound_gateway"
define_table(tablename,
self.msg_channel_id(
requires = IS_ONE_OF(current.db, "msg_channel.channel_id",
S3Represent(lookup="msg_channel"),
instance_types = ("msg_sms_modem_channel",
"msg_sms_webapi_channel",
"msg_sms_smtp_channel",
),
sort = True,
),
),
#Field("outgoing_sms_handler", length=32,
# requires = IS_IN_SET(current.msg.GATEWAY_OPTS,
# zero = None),
# ),
# Allow selection of different gateways based on Organisation/Branch
self.org_organisation_id(),
# @ToDo: Allow selection of different gateways based on destination Location
#self.gis_location_id(),
Field("default_country_code", "integer",
default = country_code,
),
*s3_meta_fields())
# ---------------------------------------------------------------------
# SMS Modem Channel
#
tablename = "msg_sms_modem_channel"
define_table(tablename,
self.super_link("channel_id", "msg_channel"),
Field("name"),
Field("description"),
Field("modem_port"),
Field("modem_baud", "integer",
default = 115200,
),
Field("enabled", "boolean",
default = True,
),
Field("max_length", "integer",
default = 160,
),
*s3_meta_fields())
configure(tablename,
super_entity = "msg_channel",
)
# ---------------------------------------------------------------------
# SMS via SMTP Channel
#
tablename = "msg_sms_smtp_channel"
define_table(tablename,
self.super_link("channel_id", "msg_channel"),
Field("name"),
Field("description"),
Field("address", length=64,
requires = IS_NOT_EMPTY(),
),
Field("subject", length=64),
Field("enabled", "boolean",
default = True,
),
Field("max_length", "integer",
default = 160,
),
*s3_meta_fields())
configure(tablename,
super_entity = "msg_channel",
)
# ---------------------------------------------------------------------
# Settings for Web API services
#
# @ToDo: Simplified dropdown of services which prepopulates entries & provides nice prompts for the config options
# + Advanced mode for raw access to real fields
#
# https://www.twilio.com/docs/api/rest/sending-messages
#
tablename = "msg_sms_webapi_channel"
define_table(tablename,
self.super_link("channel_id", "msg_channel"),
Field("name"),
Field("description"),
Field("url",
#default = "http://sms1.cardboardfish.com:9001/HTTPSMS?", # Cardboardfish
default = "https://api.clickatell.com/http/sendmsg", # Clickatell
#default = "https://secure.mcommons.com/api/send_message", # Mobile Commons
#default = "https://www.textmagic.com/app/api", # Text Magic
#default = "https://api.twilio.com/2010-04-01/Accounts/{AccountSid}/Messages", # Twilio (Untested)
requires = IS_URL(),
),
Field("parameters",
#default = "S=H&UN=yourusername&P=yourpassword&SA=Sahana", # Cardboardfish
default = "user=yourusername&password=yourpassword&api_id=yourapiid", # Clickatell
#default = "campaign_id=yourid", # Mobile Commons
#default = "username=yourusername&password=yourpassword&cmd=send&unicode=1", # Text Magic
#default = "From={RegisteredTelNumber}", # Twilio (Untested)
),
Field("message_variable", "string",
#default = "M", # Cardboardfish
default = "text", # Clickatell, Text Magic
#default = "body", # Mobile Commons
#default = "Body", # Twilio (Untested)
requires = IS_NOT_EMPTY(),
),
Field("to_variable", "string",
#default = "DA", # Cardboardfish
default = "to", # Clickatell
#default = "phone_number", # Mobile Commons
#default = "phone", # Text Magic
#default = "To", # Twilio (Untested)
requires = IS_NOT_EMPTY(),
),
Field("max_length", "integer",
default = 480, # Clickatell concat 3
),
# If using HTTP Auth (e.g. Mobile Commons)
Field("username"),
Field("password"),
Field("enabled", "boolean",
default = True,
),
*s3_meta_fields())
configure(tablename,
super_entity = "msg_channel",
)
# ---------------------------------------------------------------------
return {}
# =============================================================================
class S3TropoModel(S3Model):
"""
Tropo can be used to send & receive SMS, Twitter & XMPP
https://www.tropo.com
"""
names = ("msg_tropo_channel",
"msg_tropo_scratch",
)
def model(self):
#T = current.T
define_table = self.define_table
set_method = self.set_method
# ---------------------------------------------------------------------
# Tropo Channels
#
tablename = "msg_tropo_channel"
define_table(tablename,
self.super_link("channel_id", "msg_channel"),
Field("name"),
Field("description"),
Field("enabled", "boolean",
default = True,
#label = T("Enabled?"),
represent = s3_yes_no_represent,
),
Field("token_messaging"),
#Field("token_voice"),
*s3_meta_fields())
self.configure(tablename,
super_entity = "msg_channel",
)
set_method("msg", "tropo_channel",
method = "enable",
action = self.msg_channel_enable_interactive)
set_method("msg", "tropo_channel",
method = "disable",
action = self.msg_channel_disable_interactive)
set_method("msg", "tropo_channel",
method = "poll",
action = self.msg_channel_poll)
# ---------------------------------------------------------------------
# Tropo Scratch pad for outbound messaging
#
tablename = "msg_tropo_scratch"
define_table(tablename,
Field("row_id", "integer"),
Field("message_id", "integer"),
Field("recipient"),
Field("message"),
Field("network"),
)
# ---------------------------------------------------------------------
return {}
# =============================================================================
class S3TwilioModel(S3ChannelModel):
"""
Twilio Inbound SMS channel
- for Outbound, use Web API
"""
names = ("msg_twilio_channel",
"msg_twilio_sid",
)
def model(self):
#T = current.T
define_table = self.define_table
set_method = self.set_method
# ---------------------------------------------------------------------
# Twilio Channels
#
tablename = "msg_twilio_channel"
define_table(tablename,
# Instance
self.super_link("channel_id", "msg_channel"),
Field("name"),
Field("description"),
Field("enabled", "boolean",
default = True,
#label = T("Enabled?"),
represent = s3_yes_no_represent,
),
Field("account_name", length=255, unique=True),
Field("url",
default = \
"https://api.twilio.com/2010-04-01/Accounts"
),
Field("account_sid", length=64,
requires = IS_NOT_EMPTY(),
),
Field("auth_token", "password", length=64,
readable = False,
requires = IS_NOT_EMPTY(),
),
*s3_meta_fields())
self.configure(tablename,
onaccept = self.msg_channel_onaccept,
super_entity = "msg_channel",
)
set_method("msg", "twilio_channel",
method = "enable",
action = self.msg_channel_enable_interactive)
set_method("msg", "twilio_channel",
method = "disable",
action = self.msg_channel_disable_interactive)
set_method("msg", "twilio_channel",
method = "poll",
action = self.msg_channel_poll)
# ---------------------------------------------------------------------
# Twilio Message extensions
# - store message sid to know which ones we've already downloaded
#
tablename = "msg_twilio_sid"
define_table(tablename,
# Component not Instance
self.msg_message_id(ondelete = "CASCADE"),
Field("sid"),
*s3_meta_fields())
# ---------------------------------------------------------------------
return {}
# =============================================================================
class S3TwitterModel(S3Model):
names = ("msg_twitter_channel",
"msg_twitter",
)
def model(self):
T = current.T
db = current.db
configure = self.configure
define_table = self.define_table
set_method = self.set_method
# ---------------------------------------------------------------------
# Twitter Channel
#
password_widget = S3PasswordWidget()
tablename = "msg_twitter_channel"
define_table(tablename,
#Instance
self.super_link("channel_id", "msg_channel"),
# @ToDo: Allow different Twitter accounts for different PEs (Orgs / Teams)
#self.pr_pe_id(),
Field("name",
label = T("Name"),
),
Field("description",
label = T("Description"),
),
Field("enabled", "boolean",
default = True,
label = T("Enabled?"),
represent = s3_yes_no_represent,
),
Field("login", "boolean",
default = False,
label = T("Use for Login?"),
represent = s3_yes_no_represent,
),
Field("twitter_account",
label = T("Twitter Account"),
),
# Get these from https://apps.twitter.com
Field("consumer_key", "password",
label = T("Consumer Key"),
widget = password_widget,
),
Field("consumer_secret", "password",
label = T("Consumer Secret"),
widget = password_widget,
),
Field("access_token", "password",
label = T("Access Token"),
widget = password_widget,
),
Field("access_token_secret", "password",
label = T("Access Token Secret"),
widget = password_widget,
),
*s3_meta_fields())
configure(tablename,
onaccept = self.twitter_channel_onaccept,
#onvalidation = self.twitter_channel_onvalidation
super_entity = "msg_channel",
)
set_method("msg", "twitter_channel",
method = "enable",
action = self.msg_channel_enable_interactive)
set_method("msg", "twitter_channel",
method = "disable",
action = self.msg_channel_disable_interactive)
set_method("msg", "twitter_channel",
method = "poll",
action = self.msg_channel_poll)
# ---------------------------------------------------------------------
# Twitter Messages: InBox & Outbox
#
tablename = "msg_twitter"
define_table(tablename,
# Instance
self.super_link("message_id", "msg_message"),
self.msg_channel_id(),
s3_datetime(default = "now",
label = T("Posted on"),
),
Field("body", length=140,
label = T("Message"),
),
Field("from_address", #notnull=True,
label = T("From"),
represent = self.twitter_represent,
requires = IS_NOT_EMPTY(),
),
Field("to_address",
label = T("To"),
represent = self.twitter_represent,
),
Field("inbound", "boolean",
default = False,
label = T("Direction"),
represent = lambda direction: \
(direction and [T("In")] or \
[T("Out")])[0],
),
Field("msg_id", # Twitter Message ID
readable = False,
writable = False,
),
*s3_meta_fields())
configure(tablename,
list_fields = ["id",
#"priority",
#"category",
"body",
"from_address",
"date",
#"location_id",
],
#orderby = ~table.priority,
super_entity = "msg_message",
)
# ---------------------------------------------------------------------
return {}
# -------------------------------------------------------------------------
@staticmethod
def twitter_represent(nickname, show_link=True):
"""
Represent a Twitter account
"""
if not nickname:
return current.messages["NONE"]
db = current.db
s3db = current.s3db
table = s3db.pr_contact
query = (table.contact_method == "TWITTER") & \
(table.value == nickname)
row = db(query).select(table.pe_id,
limitby=(0, 1)).first()
if row:
repr = s3db.pr_pentity_represent(row.pe_id)
if show_link:
# Assume person
ptable = s3db.pr_person
row = db(ptable.pe_id == row.pe_id).select(ptable.id,
limitby=(0, 1)).first()
if row:
link = URL(c="pr", f="person", args=[row.id])
return A(repr, _href=link)
return repr
else:
return nickname
# -------------------------------------------------------------------------
@staticmethod
def twitter_channel_onaccept(form):
if form.vars.login:
# Ensure only a single account used for Login
current.db(current.s3db.msg_twitter_channel.id != form.vars.id).update(login = False)
# Normal onaccept processing
S3ChannelModel.channel_onaccept(form)
# -------------------------------------------------------------------------
@staticmethod
def twitter_channel_onvalidation(form):
"""
Complete oauth: take tokens from session + pin from form,
and do the 2nd API call to Twitter
"""
T = current.T
session = current.session
settings = current.deployment_settings.msg
s3 = session.s3
form_vars = form.vars
if form_vars.pin and s3.twitter_request_key and s3.twitter_request_secret:
try:
import tweepy
except:
raise HTTP(501, body=T("Can't import tweepy"))
oauth = tweepy.OAuthHandler(settings.twitter_oauth_consumer_key,
settings.twitter_oauth_consumer_secret)
oauth.set_request_token(s3.twitter_request_key,
s3.twitter_request_secret)
try:
oauth.get_access_token(form_vars.pin)
form_vars.oauth_key = oauth.access_token.key
form_vars.oauth_secret = oauth.access_token.secret
twitter = tweepy.API(oauth)
form_vars.twitter_account = twitter.me().screen_name
form_vars.pin = "" # we won't need it anymore
return
except tweepy.TweepError:
session.error = T("Settings were reset because authenticating with Twitter failed")
# Either user asked to reset, or error - clear everything
for k in ["oauth_key", "oauth_secret", "twitter_account"]:
form_vars[k] = None
for k in ["twitter_request_key", "twitter_request_secret"]:
s3[k] = ""
# =============================================================================
class S3TwitterSearchModel(S3ChannelModel):
"""
Twitter Searches
- results can be fed to KeyGraph
https://dev.twitter.com/docs/api/1.1/get/search/tweets
"""
names = ("msg_twitter_search",
"msg_twitter_result",
)
def model(self):
T = current.T
db = current.db
configure = self.configure
define_table = self.define_table
set_method = self.set_method
# ---------------------------------------------------------------------
# Twitter Search Query
#
tablename = "msg_twitter_search"
define_table(tablename,
Field("keywords", "text",
label = T("Keywords"),
),
# @ToDo: Allow setting a Point & Radius for filtering by geocode
#self.gis_location_id(),
Field("lang",
# Set in controller
#default = current.response.s3.language,
label = T("Language"),
),
Field("count", "integer",
default = 100,
label = T("# Results per query"),
),
Field("include_entities", "boolean",
default = False,
label = T("Include Entity Information?"),
represent = s3_yes_no_represent,
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Entity Information"),
T("This is required if analyzing with KeyGraph."))),
),
# @ToDo: Rename or even move to Component Table
Field("is_processed", "boolean",
default = False,
label = T("Processed with KeyGraph?"),
represent = s3_yes_no_represent,
),
Field("is_searched", "boolean",
default = False,
label = T("Searched?"),
represent = s3_yes_no_represent,
),
*s3_meta_fields())
configure(tablename,
list_fields = ["keywords",
"lang",
"count",
#"include_entities",
],
)
# Reusable Query ID
represent = S3Represent(lookup=tablename, fields=["keywords"])
search_id = S3ReusableField("search_id", "reference %s" % tablename,
label = T("Search Query"),
ondelete = "CASCADE",
represent = represent,
requires = IS_EMPTY_OR(
IS_ONE_OF_EMPTY(db, "msg_twitter_search.id")
),
)
set_method("msg", "twitter_search",
method = "poll",
action = self.twitter_search_poll)
set_method("msg", "twitter_search",
method = "keygraph",
action = self.twitter_keygraph)
set_method("msg", "twitter_result",
method = "timeline",
action = self.twitter_timeline)
# ---------------------------------------------------------------------
# Twitter Search Results
#
# @ToDo: Store the places mentioned in the Tweet as linked Locations
#
tablename = "msg_twitter_result"
define_table(tablename,
# Instance
self.super_link("message_id", "msg_message"),
# Just present for Super Entity
#self.msg_channel_id(),
search_id(),
s3_datetime(default="now",
label = T("Tweeted on"),
),
Field("tweet_id",
label = T("Tweet ID")),
Field("lang",
label = T("Language")),
Field("from_address",
label = T("Tweeted by")),
Field("body",
label = T("Tweet")),
# @ToDo: Populate from Parser
#Field("category",
# writable = False,
# label = T("Category"),
# ),
#Field("priority", "integer",
# writable = False,
# label = T("Priority"),
# ),
self.gis_location_id(),
# Just present for Super Entity
#Field("inbound", "boolean",
# default = True,
# readable = False,
# writable = False,
# ),
*s3_meta_fields())
configure(tablename,
list_fields = [#"category",
#"priority",
"body",
"from_address",
"date",
"location_id",
],
#orderby=~table.priority,
super_entity = "msg_message",
)
# ---------------------------------------------------------------------
return {}
# -----------------------------------------------------------------------------
@staticmethod
def twitter_search_poll(r, **attr):
"""
Perform a Search of Twitter
S3Method for interactive requests
"""
id = r.id
tablename = r.tablename
current.s3task.async("msg_twitter_search", args=[id])
current.session.confirmation = \
current.T("The search request has been submitted, so new messages should appear shortly - refresh to see them")
# Filter results to this Search
redirect(URL(f="twitter_result",
vars={"~.search_id": id}))
# -----------------------------------------------------------------------------
@staticmethod
def twitter_keygraph(r, **attr):
"""
Prcoess Search Results with KeyGraph
S3Method for interactive requests
"""
tablename = r.tablename
current.s3task.async("msg_process_keygraph", args=[r.id])
current.session.confirmation = \
current.T("The search results are now being processed with KeyGraph")
# @ToDo: Link to KeyGraph results
redirect(URL(f="twitter_result"))
# =============================================================================
@staticmethod
def twitter_timeline(r, **attr):
"""
Display the Tweets on a Simile Timeline
http://www.simile-widgets.org/wiki/Reference_Documentation_for_Timeline
"""
if r.representation == "html" and r.name == "twitter_result":
response = current.response
s3 = response.s3
appname = r.application
# Add core Simile Code
s3.scripts.append("/%s/static/scripts/simile/timeline/timeline-api.js" % appname)
# Add our control script
if s3.debug:
s3.scripts.append("/%s/static/scripts/S3/s3.timeline.js" % appname)
else:
s3.scripts.append("/%s/static/scripts/S3/s3.timeline.min.js" % appname)
# Add our data
# @ToDo: Make this the initial data & then collect extra via REST with a stylesheet
# add in JS using S3.timeline.eventSource.addMany(events) where events is a []
if r.record:
# Single record
rows = [r.record]
else:
# Multiple records
# @ToDo: Load all records & sort to closest in time
# http://stackoverflow.com/questions/7327689/how-to-generate-a-sequence-of-future-datetimes-in-python-and-determine-nearest-d
rows = r.resource.select(["date", "body"], limit=2000, as_rows=True)
data = {"dateTimeFormat": "iso8601",
}
now = r.utcnow
tl_start = tl_end = now
events = []
import re
for row in rows:
# Dates
start = row.date or ""
if start:
if start < tl_start:
tl_start = start
if start > tl_end:
tl_end = start
start = start.isoformat()
title = (re.sub(r"(?<=^|(?<=[^a-zA-Z0-9-_\.]))@([A-Za-z]+[A-Za-z0-9]+)|RT", "", row.body))
if len(title) > 30:
title = title[:30]
events.append({"start": start,
"title": title,
"description": row.body,
})
data["events"] = events
data = json.dumps(data, separators=SEPARATORS)
code = "".join((
'''S3.timeline.data=''', data, '''
S3.timeline.tl_start="''', tl_start.isoformat(), '''"
S3.timeline.tl_end="''', tl_end.isoformat(), '''"
S3.timeline.now="''', now.isoformat(), '''"
'''))
# Control our code in static/scripts/S3/s3.timeline.js
s3.js_global.append(code)
# Create the DIV
item = DIV(_id="s3timeline", _class="s3-timeline")
output = dict(item=item)
# Maintain RHeader for consistency
if attr.get("rheader"):
rheader = attr["rheader"](r)
if rheader:
output["rheader"] = rheader
output["title"] = current.T("Twitter Timeline")
response.view = "timeline.html"
return output
else:
r.error(405, current.ERROR.BAD_METHOD)
# =============================================================================
class S3XFormsModel(S3Model):
"""
XForms are used by the ODK Collect mobile client
http://eden.sahanafoundation.org/wiki/BluePrint/Mobile#Android
"""
names = ("msg_xforms_store",)
def model(self):
#T = current.T
# ---------------------------------------------------------------------
# SMS store for persistence and scratch pad for combining incoming xform chunks
tablename = "msg_xforms_store"
self.define_table(tablename,
Field("sender", length=20),
Field("fileno", "integer"),
Field("totalno", "integer"),
Field("partno", "integer"),
Field("message", length=160)
)
# ---------------------------------------------------------------------
return {}
# =============================================================================
class S3BaseStationModel(S3Model):
"""
Base Stations (Cell Towers) are a type of Site
@ToDo: Calculate Coverage from Antenna Height, Radio Power and Terrain
- see RadioMobile
"""
names = ("msg_basestation",)
def model(self):
T = current.T
# ---------------------------------------------------------------------
# Base Stations (Cell Towers)
#
if current.deployment_settings.get_msg_basestation_code_unique():
db = current.db
code_unique = IS_EMPTY_OR(IS_NOT_IN_DB(db, "msg_basestation.code"))
else:
code_unique = None
tablename = "msg_basestation"
self.define_table(tablename,
self.super_link("site_id", "org_site"),
Field("name", notnull=True,
length=64, # Mayon Compatibility
label = T("Name"),
),
Field("code", length=10, # Mayon compatibility
label = T("Code"),
requires = code_unique,
),
self.org_organisation_id(
label = T("Operator"),
requires = self.org_organisation_requires(required=True,
updateable=True),
#widget=S3OrganisationAutocompleteWidget(default_from_profile=True),
),
self.gis_location_id(),
s3_comments(),
*s3_meta_fields())
# CRUD strings
current.response.s3.crud_strings[tablename] = Storage(
label_create=T("Create Base Station"),
title_display=T("Base Station Details"),
title_list=T("Base Stations"),
title_update=T("Edit Base Station"),
title_upload=T("Import Base Stations"),
title_map=T("Map of Base Stations"),
label_list_button=T("List Base Stations"),
label_delete_button=T("Delete Base Station"),
msg_record_created=T("Base Station added"),
msg_record_modified=T("Base Station updated"),
msg_record_deleted=T("Base Station deleted"),
msg_list_empty=T("No Base Stations currently registered"))
self.configure(tablename,
deduplicate = self.msg_basestation_duplicate,
super_entity = "org_site",
)
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return {}
# ---------------------------------------------------------------------
@staticmethod
def msg_basestation_duplicate(item):
"""
Import item deduplication, match by name
(Adding location_id doesn't seem to be a good idea)
@param item: the S3ImportItem instance
"""
name = item.data.get("name")
table = item.table
query = (table.name.lower() == name.lower())
#location_id = None
# if "location_id" in item.data:
# location_id = item.data.location_id
## This doesn't find deleted records:
# query = query & (table.location_id == location_id)
duplicate = current.db(query).select(table.id,
limitby=(0, 1)).first()
# if duplicate is None and location_id:
## Search for deleted basestations with this name
# query = (table.name.lower() == name.lower()) & \
# (table.deleted == True)
# row = db(query).select(table.id, table.deleted_fk,
# limitby=(0, 1)).first()
# if row:
# fkeys = json.loads(row.deleted_fk)
# if "location_id" in fkeys and \
# str(fkeys["location_id"]) == str(location_id):
# duplicate = row
if duplicate:
item.id = duplicate.id
item.method = item.METHOD.UPDATE
# END =========================================================================
|
syphar/django | refs/heads/master | tests/i18n/test_compilation.py | 16 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import gettext as gettext_module
import os
import stat
import unittest
from subprocess import Popen
from django.core.management import (
CommandError, call_command, execute_from_command_line,
)
from django.core.management.commands.makemessages import \
Command as MakeMessagesCommand
from django.core.management.utils import find_command
from django.test import SimpleTestCase, mock, override_settings
from django.test.utils import captured_stderr, captured_stdout
from django.utils import six, translation
from django.utils.encoding import force_text
from django.utils.six import StringIO
from django.utils.translation import ugettext
from .utils import RunInTmpDirMixin, copytree
has_msgfmt = find_command('msgfmt')
@unittest.skipUnless(has_msgfmt, 'msgfmt is mandatory for compilation tests')
class MessageCompilationTests(RunInTmpDirMixin, SimpleTestCase):
work_subdir = 'commands'
class PoFileTests(MessageCompilationTests):
LOCALE = 'es_AR'
MO_FILE = 'locale/%s/LC_MESSAGES/django.mo' % LOCALE
def test_bom_rejection(self):
with self.assertRaises(CommandError) as cm:
call_command('compilemessages', locale=[self.LOCALE], stdout=StringIO())
self.assertIn("file has a BOM (Byte Order Mark)", cm.exception.args[0])
self.assertFalse(os.path.exists(self.MO_FILE))
def test_no_write_access(self):
mo_file_en = 'locale/en/LC_MESSAGES/django.mo'
err_buffer = StringIO()
# put file in read-only mode
old_mode = os.stat(mo_file_en).st_mode
os.chmod(mo_file_en, stat.S_IREAD)
try:
call_command('compilemessages', locale=['en'], stderr=err_buffer, verbosity=0)
err = err_buffer.getvalue()
self.assertIn("not writable location", force_text(err))
finally:
os.chmod(mo_file_en, old_mode)
class PoFileContentsTests(MessageCompilationTests):
# Ticket #11240
LOCALE = 'fr'
MO_FILE = 'locale/%s/LC_MESSAGES/django.mo' % LOCALE
def test_percent_symbol_in_po_file(self):
call_command('compilemessages', locale=[self.LOCALE], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE))
class MultipleLocaleCompilationTests(MessageCompilationTests):
MO_FILE_HR = None
MO_FILE_FR = None
def setUp(self):
super(MultipleLocaleCompilationTests, self).setUp()
localedir = os.path.join(self.test_dir, 'locale')
self.MO_FILE_HR = os.path.join(localedir, 'hr/LC_MESSAGES/django.mo')
self.MO_FILE_FR = os.path.join(localedir, 'fr/LC_MESSAGES/django.mo')
def test_one_locale(self):
with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, 'locale')]):
call_command('compilemessages', locale=['hr'], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE_HR))
def test_multiple_locales(self):
with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, 'locale')]):
call_command('compilemessages', locale=['hr', 'fr'], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE_HR))
self.assertTrue(os.path.exists(self.MO_FILE_FR))
class ExcludedLocaleCompilationTests(MessageCompilationTests):
work_subdir = 'exclude'
MO_FILE = 'locale/%s/LC_MESSAGES/django.mo'
def setUp(self):
super(ExcludedLocaleCompilationTests, self).setUp()
copytree('canned_locale', 'locale')
def test_command_help(self):
with captured_stdout(), captured_stderr():
# `call_command` bypasses the parser; by calling
# `execute_from_command_line` with the help subcommand we
# ensure that there are no issues with the parser itself.
execute_from_command_line(['django-admin', 'help', 'compilemessages'])
def test_one_locale_excluded(self):
call_command('compilemessages', exclude=['it'], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE % 'en'))
self.assertTrue(os.path.exists(self.MO_FILE % 'fr'))
self.assertFalse(os.path.exists(self.MO_FILE % 'it'))
def test_multiple_locales_excluded(self):
call_command('compilemessages', exclude=['it', 'fr'], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE % 'en'))
self.assertFalse(os.path.exists(self.MO_FILE % 'fr'))
self.assertFalse(os.path.exists(self.MO_FILE % 'it'))
def test_one_locale_excluded_with_locale(self):
call_command('compilemessages', locale=['en', 'fr'], exclude=['fr'], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE % 'en'))
self.assertFalse(os.path.exists(self.MO_FILE % 'fr'))
self.assertFalse(os.path.exists(self.MO_FILE % 'it'))
def test_multiple_locales_excluded_with_locale(self):
call_command('compilemessages', locale=['en', 'fr', 'it'], exclude=['fr', 'it'],
stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE % 'en'))
self.assertFalse(os.path.exists(self.MO_FILE % 'fr'))
self.assertFalse(os.path.exists(self.MO_FILE % 'it'))
class CompilationErrorHandling(MessageCompilationTests):
def test_error_reported_by_msgfmt(self):
# po file contains wrong po formatting.
with self.assertRaises(CommandError):
call_command('compilemessages', locale=['ja'], verbosity=0)
def test_msgfmt_error_including_non_ascii(self):
# po file contains invalid msgstr content (triggers non-ascii error content).
# Make sure the output of msgfmt is unaffected by the current locale.
env = os.environ.copy()
env.update({str('LANG'): str('C')})
with mock.patch('django.core.management.utils.Popen', lambda *args, **kwargs: Popen(*args, env=env, **kwargs)):
if six.PY2:
# Various assertRaises on PY2 don't support unicode error messages.
try:
call_command('compilemessages', locale=['ko'], verbosity=0)
except CommandError as err:
self.assertIn("' cannot start a field name", six.text_type(err))
else:
cmd = MakeMessagesCommand()
if cmd.gettext_version < (0, 18, 3):
self.skipTest("python-brace-format is a recent gettext addition.")
with self.assertRaisesMessage(CommandError, "' cannot start a field name"):
call_command('compilemessages', locale=['ko'], verbosity=0)
class ProjectAndAppTests(MessageCompilationTests):
LOCALE = 'ru'
PROJECT_MO_FILE = 'locale/%s/LC_MESSAGES/django.mo' % LOCALE
APP_MO_FILE = 'app_with_locale/locale/%s/LC_MESSAGES/django.mo' % LOCALE
class FuzzyTranslationTest(ProjectAndAppTests):
def setUp(self):
super(FuzzyTranslationTest, self).setUp()
gettext_module._translations = {} # flush cache or test will be useless
def test_nofuzzy_compiling(self):
with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, 'locale')]):
call_command('compilemessages', locale=[self.LOCALE], stdout=StringIO())
with translation.override(self.LOCALE):
self.assertEqual(ugettext('Lenin'), force_text('Ленин'))
self.assertEqual(ugettext('Vodka'), force_text('Vodka'))
def test_fuzzy_compiling(self):
with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, 'locale')]):
call_command('compilemessages', locale=[self.LOCALE], fuzzy=True, stdout=StringIO())
with translation.override(self.LOCALE):
self.assertEqual(ugettext('Lenin'), force_text('Ленин'))
self.assertEqual(ugettext('Vodka'), force_text('Водка'))
class AppCompilationTest(ProjectAndAppTests):
def test_app_locale_compiled(self):
call_command('compilemessages', locale=[self.LOCALE], stdout=StringIO())
self.assertTrue(os.path.exists(self.PROJECT_MO_FILE))
self.assertTrue(os.path.exists(self.APP_MO_FILE))
|
ak2703/edx-platform | refs/heads/master | lms/djangoapps/courseware/features/staff_debug_info.py | 173 | """
Steps for staff_debug_info.feature lettuce tests
"""
from django.contrib.auth.models import User
from lettuce import world, step
from common import create_course, course_id
from courseware.courses import get_course_by_id
from instructor.access import allow_access
@step(u'i am staff member for the course "([^"]*)"$')
def i_am_staff_member_for_the_course(step, course_number):
# Create the course
create_course(step, course_number)
course = get_course_by_id(course_id(course_number))
# Create the user
world.create_user('robot', 'test')
user = User.objects.get(username='robot')
# Add user as a course staff.
allow_access(course, user, "staff")
world.log_in(username='robot', password='test')
@step(u'I can view staff debug info')
def view_staff_debug_info(step):
css_selector = "a.instructor-info-action"
world.css_click(css_selector)
world.wait_for_visible("section.staff-modal")
@step(u'I can reset student attempts')
def view_staff_debug_info(step):
css_selector = "a.staff-debug-reset"
world.css_click(css_selector)
world.wait_for_ajax_complete()
@step(u'I cannot see delete student state link')
def view_staff_debug_info(step):
css_selector = "a.staff-debug-sdelete"
world.is_css_not_present(css_selector)
@step(u'I cannot see rescore student submission link')
def view_staff_debug_info(step):
css_selector = "a.staff-debug-rescore"
world.is_css_not_present(css_selector)
|
nirmeshk/oh-mainline | refs/heads/master | vendor/packages/Django/django/conf/locale/vi/formats.py | 237 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'\N\gà\y d \t\há\n\g n \nă\m Y'
TIME_FORMAT = 'H:i:s'
DATETIME_FORMAT = r'H:i:s \N\gà\y d \t\há\n\g n \nă\m Y'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'd-m-Y'
SHORT_DATETIME_FORMAT = 'H:i:s d-m-Y'
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
# NUMBER_GROUPING =
|
zaihui/wechatpy | refs/heads/master | wechatpy/client/api/group.py | 4 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from wechatpy.utils import to_text
from wechatpy.client.api.base import BaseWeChatAPI
class WeChatGroup(BaseWeChatAPI):
def create(self, name):
"""
创建分组
详情请参考
http://mp.weixin.qq.com/wiki/0/56d992c605a97245eb7e617854b169fc.html
:param name: 分组名字(30个字符以内)
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.group.create('New Group')
"""
name = to_text(name)
return self._post(
'groups/create',
data={'group': {'name': name}}
)
def get(self, user_id=None):
"""
查询所有分组或查询用户所在分组 ID
详情请参考
http://mp.weixin.qq.com/wiki/0/56d992c605a97245eb7e617854b169fc.html
:param user_id: 用户 ID,提供时查询该用户所在分组,否则查询所有分组
:return: 所有分组列表或用户所在分组 ID
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
group = client.group.get('openid')
"""
if user_id is None:
res = self._get(
'groups/get',
result_processor=lambda x: x['groups']
)
else:
res = self._post(
'groups/getid',
data={'openid': user_id},
result_processor=lambda x: x['groupid']
)
return res
def update(self, group_id, name):
"""
修改分组名
详情请参考
http://mp.weixin.qq.com/wiki/0/56d992c605a97245eb7e617854b169fc.html
:param group_id: 分组id,由微信分配
:param name: 分组名字(30个字符以内)
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.group.update(1234, 'New Name')
"""
name = to_text(name)
return self._post(
'groups/update',
data={
'group': {
'id': int(group_id),
'name': name
}
}
)
def move_user(self, user_id, group_id):
"""
移动用户分组
详情请参考
http://mp.weixin.qq.com/wiki/0/56d992c605a97245eb7e617854b169fc.html
:param user_id: 用户 ID, 可以是单个或者列表,为列表时为批量移动用户分组
:param group_id: 分组 ID
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.group.move_user('openid', 1234)
"""
data = {'to_groupid': group_id}
if isinstance(user_id, (tuple, list)):
endpoint = 'groups/members/batchupdate'
data['openid_list'] = user_id
else:
endpoint = 'groups/members/update'
data['openid'] = user_id
return self._post(endpoint, data=data)
def delete(self, group_id):
"""
删除分组
详情请参考
http://mp.weixin.qq.com/wiki/0/56d992c605a97245eb7e617854b169fc.html
:param group_id: 分组 ID
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.group.delete(1234)
"""
return self._post(
'groups/delete',
data={
'group': {
'id': group_id
}
}
)
|
mangadul/WhatsAppy | refs/heads/master | yowsup/layers/protocol_acks/protocolentities/__init__.py | 70 | from .ack import AckProtocolEntity
from .ack_incoming import IncomingAckProtocolEntity
from .ack_outgoing import OutgoingAckProtocolEntity |
mattvonrocketstein/smash | refs/heads/master | smashlib/ipy3x/qt/console/ansi_code_processor.py | 1 | """ Utilities for processing ANSI escape codes and special ASCII characters.
"""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
from collections import namedtuple
import re
# System library imports
from IPython.external.qt import QtGui
# Local imports
from IPython.utils.py3compat import string_types
#-----------------------------------------------------------------------------
# Constants and datatypes
#-----------------------------------------------------------------------------
# An action for erase requests (ED and EL commands).
EraseAction = namedtuple('EraseAction', ['action', 'area', 'erase_to'])
# An action for cursor move requests (CUU, CUD, CUF, CUB, CNL, CPL, CHA, CUP,
# and HVP commands).
# FIXME: Not implemented in AnsiCodeProcessor.
MoveAction = namedtuple('MoveAction', ['action', 'dir', 'unit', 'count'])
# An action for scroll requests (SU and ST) and form feeds.
ScrollAction = namedtuple('ScrollAction', ['action', 'dir', 'unit', 'count'])
# An action for the carriage return character
CarriageReturnAction = namedtuple('CarriageReturnAction', ['action'])
# An action for the \n character
NewLineAction = namedtuple('NewLineAction', ['action'])
# An action for the beep character
BeepAction = namedtuple('BeepAction', ['action'])
# An action for backspace
BackSpaceAction = namedtuple('BackSpaceAction', ['action'])
# Regular expressions.
CSI_COMMANDS = 'ABCDEFGHJKSTfmnsu'
CSI_SUBPATTERN = '\[(.*?)([%s])' % CSI_COMMANDS
OSC_SUBPATTERN = '\](.*?)[\x07\x1b]'
ANSI_PATTERN = ('\x01?\x1b(%s|%s)\x02?' %
(CSI_SUBPATTERN, OSC_SUBPATTERN))
ANSI_OR_SPECIAL_PATTERN = re.compile(
'(\a|\b|\r(?!\n)|\r?\n)|(?:%s)' % ANSI_PATTERN)
SPECIAL_PATTERN = re.compile('([\f])')
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class AnsiCodeProcessor(object):
""" Translates special ASCII characters and ANSI escape codes into readable
attributes. It also supports a few non-standard, xterm-specific codes.
"""
# Whether to increase intensity or set boldness for SGR code 1.
# (Different terminals handle this in different ways.)
bold_text_enabled = False
# We provide an empty default color map because subclasses will likely want
# to use a custom color format.
default_color_map = {}
#-------------------------------------------------------------------------
# AnsiCodeProcessor interface
#-------------------------------------------------------------------------
def __init__(self):
self.actions = []
self.color_map = self.default_color_map.copy()
self.reset_sgr()
def reset_sgr(self):
""" Reset graphics attributs to their default values.
"""
self.intensity = 0
self.italic = False
self.bold = False
self.underline = False
self.foreground_color = None
self.background_color = None
def split_string(self, string):
""" Yields substrings for which the same escape code applies.
"""
self.actions = []
start = 0
# strings ending with \r are assumed to be ending in \r\n since
# \n is appended to output strings automatically. Accounting
# for that, here.
last_char = '\n' if len(string) > 0 and string[-1] == '\n' else None
string = string[:-1] if last_char is not None else string
for match in ANSI_OR_SPECIAL_PATTERN.finditer(string):
raw = string[start:match.start()]
substring = SPECIAL_PATTERN.sub(self._replace_special, raw)
if substring or self.actions:
yield substring
self.actions = []
start = match.end()
groups = [g for g in match.groups() if (g is not None)]
g0 = groups[0]
if g0 == '\a':
self.actions.append(BeepAction('beep'))
yield None
self.actions = []
elif g0 == '\r':
self.actions.append(CarriageReturnAction('carriage-return'))
yield None
self.actions = []
elif g0 == '\b':
self.actions.append(BackSpaceAction('backspace'))
yield None
self.actions = []
elif g0 == '\n' or g0 == '\r\n':
self.actions.append(NewLineAction('newline'))
yield g0
self.actions = []
else:
params = [param for param in groups[1].split(';') if param]
if g0.startswith('['):
# Case 1: CSI code.
try:
params = list(map(int, params))
except ValueError:
# Silently discard badly formed codes.
pass
else:
self.set_csi_code(groups[2], params)
elif g0.startswith(']'):
# Case 2: OSC code.
self.set_osc_code(params)
raw = string[start:]
substring = SPECIAL_PATTERN.sub(self._replace_special, raw)
if substring or self.actions:
yield substring
if last_char is not None:
self.actions.append(NewLineAction('newline'))
yield last_char
def set_csi_code(self, command, params=[]):
""" Set attributes based on CSI (Control Sequence Introducer) code.
Parameters
----------
command : str
The code identifier, i.e. the final character in the sequence.
params : sequence of integers, optional
The parameter codes for the command.
"""
if command == 'm': # SGR - Select Graphic Rendition
if params:
self.set_sgr_code(params)
else:
self.set_sgr_code([0])
elif (command == 'J' or # ED - Erase Data
command == 'K'): # EL - Erase in Line
code = params[0] if params else 0
if 0 <= code <= 2:
area = 'screen' if command == 'J' else 'line'
if code == 0:
erase_to = 'end'
elif code == 1:
erase_to = 'start'
elif code == 2:
erase_to = 'all'
self.actions.append(EraseAction('erase', area, erase_to))
elif (command == 'S' or # SU - Scroll Up
command == 'T'): # SD - Scroll Down
dir = 'up' if command == 'S' else 'down'
count = params[0] if params else 1
self.actions.append(ScrollAction('scroll', dir, 'line', count))
def set_osc_code(self, params):
""" Set attributes based on OSC (Operating System Command) parameters.
Parameters
----------
params : sequence of str
The parameters for the command.
"""
try:
command = int(params.pop(0))
except (IndexError, ValueError):
return
if command == 4:
# xterm-specific: set color number to color spec.
try:
color = int(params.pop(0))
spec = params.pop(0)
self.color_map[color] = self._parse_xterm_color_spec(spec)
except (IndexError, ValueError):
pass
def set_sgr_code(self, params):
""" Set attributes based on SGR (Select Graphic Rendition) codes.
Parameters
----------
params : sequence of ints
A list of SGR codes for one or more SGR commands. Usually this
sequence will have one element per command, although certain
xterm-specific commands requires multiple elements.
"""
# Always consume the first parameter.
if not params:
return
code = params.pop(0)
if code == 0:
self.reset_sgr()
elif code == 1:
if self.bold_text_enabled:
self.bold = True
else:
self.intensity = 1
elif code == 2:
self.intensity = 0
elif code == 3:
self.italic = True
elif code == 4:
self.underline = True
elif code == 22:
self.intensity = 0
self.bold = False
elif code == 23:
self.italic = False
elif code == 24:
self.underline = False
elif code >= 30 and code <= 37:
self.foreground_color = code - 30
elif code == 38 and params and params.pop(0) == 5:
# xterm-specific: 256 color support.
if params:
self.foreground_color = params.pop(0)
elif code == 39:
self.foreground_color = None
elif code >= 40 and code <= 47:
self.background_color = code - 40
elif code == 48 and params and params.pop(0) == 5:
# xterm-specific: 256 color support.
if params:
self.background_color = params.pop(0)
elif code == 49:
self.background_color = None
# Recurse with unconsumed parameters.
self.set_sgr_code(params)
#-------------------------------------------------------------------------
# Protected interface
#-------------------------------------------------------------------------
def _parse_xterm_color_spec(self, spec):
if spec.startswith('rgb:'):
return tuple(map(lambda x: int(x, 16), spec[4:].split('/')))
elif spec.startswith('rgbi:'):
return tuple(map(lambda x: int(float(x) * 255),
spec[5:].split('/')))
elif spec == '?':
raise ValueError('Unsupported xterm color spec')
return spec
def _replace_special(self, match):
special = match.group(1)
if special == '\f':
self.actions.append(ScrollAction('scroll', 'down', 'page', 1))
return ''
class QtAnsiCodeProcessor(AnsiCodeProcessor):
""" Translates ANSI escape codes into QTextCharFormats.
"""
# A map from ANSI color codes to SVG color names or RGB(A) tuples.
darkbg_color_map = {
0: 'black', # black
1: 'darkred', # red
2: 'darkgreen', # green
3: 'brown', # yellow
4: 'darkblue', # blue
5: 'darkviolet', # magenta
6: 'steelblue', # cyan
7: 'grey', # white
8: 'grey', # black (bright)
9: 'red', # red (bright)
10: 'lime', # green (bright)
11: 'yellow', # yellow (bright)
12: 'deepskyblue', # blue (bright)
13: 'magenta', # magenta (bright)
14: 'cyan', # cyan (bright)
15: 'white'} # white (bright)
# Set the default color map for super class.
default_color_map = darkbg_color_map.copy()
def get_color(self, color, intensity=0):
""" Returns a QColor for a given color code, or None if one cannot be
constructed.
"""
if color is None:
return None
# Adjust for intensity, if possible.
if color < 8 and intensity > 0:
color += 8
constructor = self.color_map.get(color, None)
if isinstance(constructor, string_types):
# If this is an X11 color name, we just hope there is a close SVG
# color name. We could use QColor's static method
# 'setAllowX11ColorNames()', but this is global and only available
# on X11. It seems cleaner to aim for uniformity of behavior.
return QtGui.QColor(constructor)
elif isinstance(constructor, (tuple, list)):
return QtGui.QColor(*constructor)
return None
def get_format(self):
""" Returns a QTextCharFormat that encodes the current style attributes.
"""
format = QtGui.QTextCharFormat()
# Set foreground color
qcolor = self.get_color(self.foreground_color, self.intensity)
if qcolor is not None:
format.setForeground(qcolor)
# Set background color
qcolor = self.get_color(self.background_color, self.intensity)
if qcolor is not None:
format.setBackground(qcolor)
# Set font weight/style options
if self.bold:
format.setFontWeight(QtGui.QFont.Bold)
else:
format.setFontWeight(QtGui.QFont.Normal)
format.setFontItalic(self.italic)
format.setFontUnderline(self.underline)
return format
def set_background_color(self, color):
""" Given a background color (a QColor), attempt to set a color map
that will be aesthetically pleasing.
"""
# Set a new default color map.
self.default_color_map = self.darkbg_color_map.copy()
if color.value() >= 127:
# Colors appropriate for a terminal with a light background. For
# now, only use non-bright colors...
for i in range(8):
self.default_color_map[i + 8] = self.default_color_map[i]
# ...and replace white with black.
self.default_color_map[7] = self.default_color_map[15] = 'black'
# Update the current color map with the new defaults.
self.color_map.update(self.default_color_map)
|
hail-is/hail | refs/heads/master | hail/python/hail/ir/register_aggregators.py | 1 | from .ir import register_aggregator
def register_aggregators():
from hail.expr.types import dtype
register_aggregator('ApproxCDF', (dtype('int32'),), (dtype('int32'),),
dtype('struct{values:array<int32>,ranks:array<int64>,_compaction_counts:array<int32>}'))
register_aggregator('ApproxCDF', (dtype('int32'),), (dtype('int64'),),
dtype('struct{values:array<int64>,ranks:array<int64>,_compaction_counts:array<int32>}'))
register_aggregator('ApproxCDF', (dtype('int32'),), (dtype('float32'),),
dtype('struct{values:array<float32>,ranks:array<int64>,_compaction_counts:array<int32>}'))
register_aggregator('ApproxCDF', (dtype('int32'),), (dtype('float64'),),
dtype('struct{values:array<float64>,ranks:array<int64>,_compaction_counts:array<int32>}'))
register_aggregator('Collect', (), (dtype("?in"),), dtype('array<?in>'))
info_score_aggregator_type = dtype('struct{score:float64,n_included:tint32}')
register_aggregator('InfoScore', (), (dtype('array<float64>'),), info_score_aggregator_type)
register_aggregator('Sum', (), (dtype('int64'),), dtype('int64'))
register_aggregator('Sum', (), (dtype('float64'),), dtype('float64'))
register_aggregator('Sum', (), (dtype('array<int64>'),), dtype('array<int64>'))
register_aggregator('Sum', (), (dtype('array<float64>'),), dtype('array<float64>'))
register_aggregator('CollectAsSet', (), (dtype("?in"),), dtype('set<?in>'))
register_aggregator('Product', (), (dtype('int64'),), dtype('int64'))
register_aggregator('Product', (), (dtype('float64'),), dtype('float64'))
hwe_aggregator_type = dtype('struct { het_freq_hwe: float64, p_value: float64 }')
register_aggregator('HardyWeinberg', (), (dtype('call'),), hwe_aggregator_type)
register_aggregator('Max', (), (dtype('bool'),), dtype('bool'))
register_aggregator('Max', (), (dtype('int32'),), dtype('int32'))
register_aggregator('Max', (), (dtype('int64'),), dtype('int64'))
register_aggregator('Max', (), (dtype('float32'),), dtype('float32'))
register_aggregator('Max', (), (dtype('float64'),), dtype('float64'))
register_aggregator('Min', (), (dtype('bool'),), dtype('bool'))
register_aggregator('Min', (), (dtype('int32'),), dtype('int32'))
register_aggregator('Min', (), (dtype('int64'),), dtype('int64'))
register_aggregator('Min', (), (dtype('float32'),), dtype('float32'))
register_aggregator('Min', (), (dtype('float64'),), dtype('float64'))
register_aggregator('Count', (), (), dtype('int64'))
register_aggregator('Counter', (), (dtype('?in'),), dtype('dict<?in, int64>'))
register_aggregator('Take', (dtype('int32'),), (dtype('?in'),), dtype('array<?in>'))
register_aggregator('TakeBy', (dtype('int32'),), (dtype('?in'), dtype('?key'),), dtype('array<?in>'))
downsample_aggregator_type = dtype('array<tuple(float64, float64, array<str>)>')
register_aggregator('Downsample', (dtype('int32'),), (dtype('float64'), dtype('float64'), dtype('array<?T>'),), downsample_aggregator_type)
call_stats_aggregator_type = dtype('struct{AC: array<int32>,AF:array<float64>,AN:int32,homozygote_count:array<int32>}')
register_aggregator('CallStats', (dtype('int32'),), (dtype('call'),), call_stats_aggregator_type)
inbreeding_aggregator_type = dtype('struct{f_stat:float64,n_called:int64,expected_homs:float64,observed_homs:int64}')
register_aggregator('Inbreeding', (), (dtype('call'), dtype('float64'),), inbreeding_aggregator_type)
linreg_aggregator_type = dtype('struct{xty:array<float64>,beta:array<float64>,diag_inv:array<float64>,beta0:array<float64>}')
register_aggregator('LinearRegression', (dtype('int32'), dtype('int32'),), (dtype('float64'), dtype('array<float64>'),), linreg_aggregator_type)
register_aggregator('PrevNonnull', (), (dtype('?in'),), dtype('?in'))
register_aggregator('ImputeType', (), (dtype('str'),),
dtype('struct{anyNonMissing: bool,'
'allDefined: bool,'
'supportsBool: bool,'
'supportsInt32: bool,'
'supportsInt64: bool,'
'supportsFloat64: bool}'))
|
pywinauto/pywinauto | refs/heads/atspi | pywinauto/windows/win32functions.py | 1 | # GUI Application automation and testing library
# Copyright (C) 2006-2018 Mark Mc Mahon and Contributors
# https://github.com/pywinauto/pywinauto/graphs/contributors
# http://pywinauto.readthedocs.io/en/latest/credits.html
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of pywinauto nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Defines Windows(tm) functions"""
from ctypes import LibraryLoader
from ctypes import WinDLL
from ctypes import wintypes
from ctypes import c_short
from ctypes import WINFUNCTYPE
from ctypes import c_void_p
from ctypes import c_int
from ctypes import c_uint
from ctypes import byref
from ctypes import POINTER
from ctypes import c_ubyte
from ctypes import c_size_t
from . import win32defines, win32structures
from ..actionlogger import ActionLogger
# Quote: "If you want cached libs without polluting ctypes.cdll or
# ctypes.windll, just create your own instance such as
# windll = ctypes.LibraryLoader(ctypes.WinDLL)."
# see https://bugs.python.org/issue22552
windll = LibraryLoader(WinDLL)
SHORT = c_short
CreateBrushIndirect = windll.gdi32.CreateBrushIndirect
CreateBrushIndirect.restype = wintypes.HBRUSH
CreateBrushIndirect.argtypes = [
c_void_p,
]
CreateDC = windll.gdi32.CreateDCW
CreateDC.restype = wintypes.HDC
CreateDC.argtypes = [
wintypes.LPCWSTR,
wintypes.LPCWSTR,
wintypes.LPCWSTR,
c_void_p,
]
CreateFontIndirect = windll.gdi32.CreateFontIndirectW
CreateFontIndirect.restype = wintypes.HFONT
CreateFontIndirect.argtypes = [
POINTER(win32structures.LOGFONTW),
]
CreatePen = windll.gdi32.CreatePen
CreatePen.restype = wintypes.HPEN
CreatePen.argtypes = [
c_int,
c_int,
wintypes.COLORREF,
]
DeleteDC = windll.gdi32.DeleteDC
DeleteDC.restype = wintypes.BOOL
DeleteDC.argtypes = [
wintypes.HDC,
]
GetObject = windll.gdi32.GetObjectW
GetObject.restype = c_int
GetObject.argtypes = [
wintypes.HANDLE,
c_int,
wintypes.LPVOID,
]
DeleteObject = windll.gdi32.DeleteObject
DeleteObject.restype = wintypes.BOOL
DeleteObject.argtypes = [
wintypes.HGDIOBJ,
]
DrawText = windll.user32.DrawTextW
DrawText.restype = c_int
DrawText.argtypes = [
wintypes.HDC,
wintypes.LPCWSTR,
c_int,
POINTER(wintypes.RECT),
wintypes.UINT,
]
TextOut = windll.gdi32.TextOutW
TextOut.restype = wintypes.BOOL
TextOut.argtypes = [
wintypes.HDC,
c_int,
c_int,
wintypes.LPCWSTR,
c_int,
]
Rectangle = windll.gdi32.Rectangle
Rectangle.restype = wintypes.BOOL
Rectangle.argtypes = [
wintypes.HDC,
c_int,
c_int,
c_int,
c_int,
]
SelectObject = windll.gdi32.SelectObject
SelectObject.restype = wintypes.HGDIOBJ
SelectObject.argtypes = [
wintypes.HDC,
wintypes.HGDIOBJ,
]
GetStockObject = windll.gdi32.GetStockObject
GetStockObject.restype = wintypes.HGDIOBJ
GetStockObject.argtypes = [
c_int,
]
GetSystemMetrics = windll.user32.GetSystemMetrics
GetSystemMetrics.restype = c_int
GetSystemMetrics.argtypes = [
c_int,
]
GetTextMetrics = windll.gdi32.GetTextMetricsW
GetTextMetrics.restype = wintypes.BOOL
GetTextMetrics.argtypes = [
wintypes.HDC,
POINTER(win32structures.TEXTMETRICW),
]
EnumChildWindows = windll.user32.EnumChildWindows
EnumChildWindows.restype = wintypes.BOOL
EnumChildWindows.argtypes = [
wintypes.HWND,
WINFUNCTYPE(wintypes.BOOL, wintypes.HWND, wintypes.LPARAM),
wintypes.LPARAM,
]
EnumDesktopWindows = windll.user32.EnumDesktopWindows
EnumDesktopWindows.restype = wintypes.BOOL
EnumDesktopWindows.argtypes = [
wintypes.LPVOID,
WINFUNCTYPE(wintypes.BOOL, wintypes.HWND, wintypes.LPARAM),
wintypes.LPARAM,
]
EnumWindows = windll.user32.EnumWindows
EnumWindows.restype = wintypes.BOOL
EnumWindows.argtypes = [
WINFUNCTYPE(wintypes.BOOL, wintypes.HWND, wintypes.LPARAM),
wintypes.LPARAM,
]
GetDC = windll.user32.GetDC
GetDC.restype = wintypes.LPVOID
GetDC.argtypes = [
wintypes.HWND,
]
GetDesktopWindow = windll.user32.GetDesktopWindow
GetDesktopWindow.restype = wintypes.HWND
GetDesktopWindow.argtypes = [
]
SendInput = windll.user32.SendInput
SendInput.restype = wintypes.UINT
SendInput.argtypes = [
wintypes.UINT,
c_void_p, # using POINTER(win32structures.INPUT) needs rework in keyboard.py
c_int,
]
SetCursorPos = windll.user32.SetCursorPos
SetCursorPos.restype = wintypes.BOOL
SetCursorPos.argtypes = [
c_int,
c_int,
]
GetCursorPos = windll.user32.GetCursorPos
GetCursorPos.restype = wintypes.BOOL
GetCursorPos.argtypes = [
POINTER(wintypes.POINT),
]
GetCaretPos = windll.user32.GetCaretPos
GetCaretPos.restype = wintypes.BOOL
GetCaretPos.argtypes = [
POINTER(wintypes.POINT),
]
GetKeyboardState = windll.user32.GetKeyboardState
GetKeyboardState.restype = wintypes.BOOL
GetKeyboardState.argtypes = [
POINTER(c_ubyte),
]
SetKeyboardState = windll.user32.SetKeyboardState
SetKeyboardState.restype = wintypes.BOOL
SetKeyboardState.argtypes = [
POINTER(c_ubyte),
]
GetKeyboardLayout = windll.user32.GetKeyboardLayout
GetKeyboardLayout.restype = wintypes.HKL
GetKeyboardLayout.argtypes = [
wintypes.DWORD,
]
VkKeyScanW = windll.user32.VkKeyScanW
VkKeyScanW.restype = SHORT
VkKeyScanW.argtypes = [
wintypes.WCHAR,
]
VkKeyScanExW = windll.user32.VkKeyScanExW
VkKeyScanExW.restype = SHORT
VkKeyScanExW.argtypes = [
wintypes.WCHAR,
wintypes.HKL,
]
GetMessageExtraInfo = windll.user32.GetMessageExtraInfo
MapVirtualKeyW = windll.user32.MapVirtualKeyW
# menu functions
DrawMenuBar = windll.user32.DrawMenuBar
DrawMenuBar.restype = wintypes.BOOL
DrawMenuBar.argstype = [
wintypes.HWND,
]
GetMenu = windll.user32.GetMenu
GetMenu.restype = wintypes.HMENU
GetMenu.argtypes = [
wintypes.HWND,
]
GetMenuBarInfo = windll.user32.GetMenuBarInfo
GetMenuBarInfo.restype = wintypes.BOOL
GetMenuBarInfo.argtypes = [
wintypes.HWND,
wintypes.LONG,
wintypes.LONG,
POINTER(win32structures.MENUBARINFO),
]
GetMenuInfo = windll.user32.GetMenuInfo
GetMenuInfo.restype = wintypes.BOOL
GetMenuInfo.argtypes = [
wintypes.HWND,
POINTER(win32structures.MENUINFO),
]
GetMenuItemCount = windll.user32.GetMenuItemCount
GetMenuItemCount.restype = c_int
GetMenuItemCount.argtypes = [
wintypes.HMENU,
]
GetMenuItemInfo = windll.user32.GetMenuItemInfoW
GetMenuItemInfo.restype = wintypes.BOOL
GetMenuItemInfo.argtypes = [
wintypes.HMENU,
wintypes.UINT,
wintypes.BOOL,
POINTER(win32structures.MENUITEMINFOW),
]
SetMenuItemInfo = windll.user32.SetMenuItemInfoW
SetMenuItemInfo.restype = wintypes.BOOL
SetMenuItemInfo.argtypes = [
wintypes.HMENU,
wintypes.UINT,
wintypes.BOOL,
POINTER(win32structures.MENUITEMINFOW),
]
GetMenuItemRect = windll.user32.GetMenuItemRect
GetMenuItemRect.restype = wintypes.BOOL
GetMenuItemRect.argtypes = [
wintypes.HWND,
wintypes.HMENU,
wintypes.UINT,
POINTER(wintypes.RECT),
]
CheckMenuItem = windll.user32.CheckMenuItem
CheckMenuItem.restype = wintypes.DWORD
CheckMenuItem.argtypes = [
wintypes.HMENU,
wintypes.UINT,
wintypes.UINT,
]
GetMenuState = windll.user32.GetMenuState
GetMenuState.restype = wintypes.UINT
GetMenuState.argtypes = [
wintypes.HMENU,
wintypes.UINT,
wintypes.UINT,
]
GetSubMenu = windll.user32.GetSubMenu
GetSubMenu.restype = wintypes.HMENU
GetSubMenu.argtypes = [
wintypes.HMENU,
c_int,
]
GetSystemMenu = windll.user32.GetSystemMenu
GetSystemMenu.restype = wintypes.HMENU
GetSystemMenu.argtypes = [
wintypes.HWND,
wintypes.BOOL,
]
HiliteMenuItem = windll.user32.HiliteMenuItem
HiliteMenuItem.restype = wintypes.BOOL
HiliteMenuItem.argtypes = [
wintypes.HWND,
wintypes.HMENU,
wintypes.UINT,
wintypes.UINT,
]
IsMenu = windll.user32.IsMenu
IsMenu.restype = wintypes.BOOL
IsMenu.argtypes = [
wintypes.HMENU,
]
MenuItemFromPoint = windll.user32.MenuItemFromPoint
MenuItemFromPoint.restype = c_int
MenuItemFromPoint.argtypes = [
wintypes.HWND,
wintypes.HMENU,
POINTER(wintypes.POINT),
]
BringWindowToTop = windll.user32.BringWindowToTop
BringWindowToTop.restype = wintypes.BOOL
BringWindowToTop.argtypes = [
wintypes.HWND,
]
GetParent = windll.user32.GetParent
GetParent.restype = wintypes.HWND
GetParent.argtypes = [
wintypes.HWND,
]
GetWindow = windll.user32.GetWindow
GetWindow.restype = wintypes.HWND
GetWindow.argtypes = [
wintypes.HWND,
wintypes.UINT,
]
ShowWindow = windll.user32.ShowWindow
ShowWindow.restype = wintypes.BOOL
ShowWindow.argtypes = [
wintypes.HWND,
c_int,
]
GetWindowContextHelpId = windll.user32.GetWindowContextHelpId
GetWindowContextHelpId.restype = wintypes.DWORD
GetWindowContextHelpId.argtypes = [
wintypes.HWND,
]
GetWindowLong = windll.user32.GetWindowLongW
GetWindowLong.restype = wintypes.LONG
GetWindowLong.argtypes = [
wintypes.HWND,
c_int,
]
GetWindowPlacement = windll.user32.GetWindowPlacement
GetWindowPlacement.restype = wintypes.BOOL
GetWindowPlacement.argtypes = [
wintypes.HWND,
POINTER(win32structures.WINDOWPLACEMENT),
]
GetWindowRect = windll.user32.GetWindowRect
GetWindowRect.restype = wintypes.BOOL
GetWindowRect.argtypes = [
wintypes.HWND,
POINTER(wintypes.RECT),
]
GetWindowText = windll.user32.GetWindowTextW
GetWindowText.restype = c_int
GetWindowText.argtypes = [
wintypes.HWND,
wintypes.LPWSTR,
c_int,
]
GetWindowTextLength = windll.user32.GetWindowTextLengthW
GetWindowTextLength.restype = c_int
GetWindowTextLength.argtypes = [
wintypes.HWND,
]
GetClassName = windll.user32.GetClassNameW
GetClassName.restype = c_int
GetClassName.argtypes = [
wintypes.HWND,
wintypes.LPWSTR,
c_int,
]
GetClientRect = windll.user32.GetClientRect
GetClientRect.restype = wintypes.BOOL
GetClientRect.argtypes = [
wintypes.HWND,
POINTER(wintypes.RECT),
]
IsChild = windll.user32.IsChild
IsChild.restype = wintypes.BOOL
IsChild.argtypes = [
wintypes.HWND,
wintypes.HWND,
]
IsWindow = windll.user32.IsWindow
IsWindow.restype = wintypes.BOOL
IsWindow.argtypes = [
wintypes.HWND,
]
IsWindowUnicode = windll.user32.IsWindowUnicode
IsWindowUnicode.restype = wintypes.BOOL
IsWindowUnicode.argtypes = [
wintypes.HWND,
]
IsWindowVisible = windll.user32.IsWindowVisible
IsWindowVisible.restype = wintypes.BOOL
IsWindowVisible.argtypes = [
wintypes.HWND,
]
IsWindowEnabled = windll.user32.IsWindowEnabled
IsWindowEnabled.restype = wintypes.BOOL
IsWindowEnabled.argtypes = [
wintypes.HWND,
]
ClientToScreen = windll.user32.ClientToScreen
ClientToScreen.restype = wintypes.BOOL
ClientToScreen.argtypes = [
wintypes.HWND,
POINTER(wintypes.POINT),
]
ScreenToClient = windll.user32.ScreenToClient
ScreenToClient.restype = wintypes.BOOL
ScreenToClient.argtypes = [
wintypes.HWND,
POINTER(wintypes.POINT),
]
GetCurrentThreadId = windll.kernel32.GetCurrentThreadId
GetCurrentThreadId.restype = wintypes.DWORD
GetCurrentThreadId.argtypes = [
]
GetWindowThreadProcessId = windll.user32.GetWindowThreadProcessId
GetWindowThreadProcessId.restype = wintypes.DWORD
GetWindowThreadProcessId.argtypes = [
wintypes.HWND,
POINTER(wintypes.DWORD),
]
GetGUIThreadInfo = windll.user32.GetGUIThreadInfo
GetGUIThreadInfo.restype = wintypes.BOOL
GetGUIThreadInfo.argtypes = [
wintypes.DWORD,
POINTER(win32structures.GUITHREADINFO),
]
AttachThreadInput = windll.user32.AttachThreadInput
AttachThreadInput.restype = wintypes.BOOL
AttachThreadInput.argtypes = [
wintypes.DWORD,
wintypes.DWORD,
wintypes.BOOL
]
OpenProcess = windll.kernel32.OpenProcess
OpenProcess.restype = wintypes.HANDLE
OpenProcess.argtypes = [
wintypes.DWORD,
wintypes.BOOL,
wintypes.DWORD,
]
CloseHandle = windll.kernel32.CloseHandle
CloseHandle.restype = wintypes.BOOL
CloseHandle.argtypes = [
wintypes.HANDLE,
]
CreateProcess = windll.kernel32.CreateProcessW
CreateProcess.restype = wintypes.BOOL
CreateProcess.argtypes = [
wintypes.LPCWSTR,
wintypes.LPWSTR,
POINTER(win32structures.SECURITY_ATTRIBUTES),
POINTER(win32structures.SECURITY_ATTRIBUTES),
wintypes.BOOL,
wintypes.DWORD,
wintypes.LPVOID,
wintypes.LPCWSTR,
POINTER(win32structures.STARTUPINFOW),
POINTER(win32structures.PROCESS_INFORMATION),
]
TerminateProcess = windll.kernel32.TerminateProcess
TerminateProcess.restype = wintypes.BOOL
TerminateProcess.argtypes = [
wintypes.HANDLE,
wintypes.UINT,
]
ExitProcess = windll.kernel32.ExitProcess
ExitProcess.restype = None
ExitProcess.argtypes = [
wintypes.UINT,
]
ReadProcessMemory = windll.kernel32.ReadProcessMemory
ReadProcessMemory.restype = wintypes.BOOL
ReadProcessMemory.argtypes = [
wintypes.HANDLE,
wintypes.LPVOID,
wintypes.LPVOID,
c_size_t,
POINTER(c_size_t),
]
GlobalAlloc = windll.kernel32.GlobalAlloc
GlobalLock = windll.kernel32.GlobalLock
GlobalUnlock = windll.kernel32.GlobalUnlock
SendMessage = windll.user32.SendMessageW
SendMessage.restype = wintypes.LPARAM
SendMessage.argtypes = [
wintypes.HWND,
wintypes.UINT,
wintypes.WPARAM,
wintypes.LPVOID,
]
SendMessageTimeout = windll.user32.SendMessageTimeoutW
SendMessageTimeout.restype = wintypes.LPARAM
SendMessageTimeout.argtypes = [
wintypes.HWND,
wintypes.UINT,
wintypes.WPARAM,
wintypes.LPARAM,
wintypes.UINT,
wintypes.UINT,
win32structures.PDWORD_PTR,
]
PostMessage = windll.user32.PostMessageW
PostMessage.restype = wintypes.BOOL
PostMessage.argtypes = [
wintypes.HWND,
wintypes.UINT,
wintypes.WPARAM,
wintypes.LPARAM,
]
GetMessage = windll.user32.GetMessageW
GetMessage.restype = wintypes.BOOL
GetMessage.argtypes = [
POINTER(wintypes.MSG),
wintypes.HWND,
wintypes.UINT,
wintypes.UINT,
]
RegisterWindowMessage = windll.user32.RegisterWindowMessageW
RegisterWindowMessage.restype = wintypes.UINT
RegisterWindowMessage.argtypes = [
wintypes.LPCWSTR,
]
MoveWindow = windll.user32.MoveWindow
MoveWindow.restype = wintypes.BOOL
MoveWindow.argtypes = [
wintypes.HWND,
c_int,
c_int,
c_int,
c_int,
wintypes.BOOL,
]
EnableWindow = windll.user32.EnableWindow
EnableWindow.restype = wintypes.BOOL
EnableWindow.argtypes = [
wintypes.HWND,
wintypes.BOOL,
]
SetFocus = windll.user32.SetFocus
SetFocus.restype = wintypes.HWND
SetFocus.argtypes = [
wintypes.HWND,
]
SetWindowLong = windll.user32.SetWindowLongW
SetWindowLong.restype = wintypes.LONG
SetWindowLong.argtypes = [
wintypes.HWND,
c_int,
wintypes.LONG,
]
try:
SetWindowLongPtr = windll.user32.SetWindowLongPtrW
SetWindowLongPtr.argtypes = [wintypes.HWND, c_int, wintypes.LONG_PTR]
SetWindowLongPtr.restype = wintypes.LONG_PTR
except AttributeError:
SetWindowLongPtr = SetWindowLong
SystemParametersInfo = windll.user32.SystemParametersInfoW
SystemParametersInfo.restype = wintypes.UINT
SystemParametersInfo.argtypes = [
wintypes.UINT,
wintypes.UINT,
wintypes.LPVOID, # should map well to PVOID
wintypes.UINT,
]
VirtualAllocEx = windll.kernel32.VirtualAllocEx
VirtualAllocEx.restype = wintypes.LPVOID
VirtualAllocEx.argtypes = [
wintypes.HANDLE,
wintypes.LPVOID,
c_size_t,
wintypes.DWORD,
wintypes.DWORD,
]
VirtualFreeEx = windll.kernel32.VirtualFreeEx
VirtualFreeEx.restype = wintypes.BOOL
VirtualFreeEx.argtypes = [
wintypes.HANDLE,
wintypes.LPVOID,
c_size_t,
wintypes.DWORD,
]
VirtualAlloc = windll.kernel32.VirtualAlloc
VirtualAlloc.restype = wintypes.LPVOID
VirtualAlloc.argtypes = [
wintypes.LPVOID,
c_size_t,
wintypes.DWORD,
wintypes.DWORD,
]
VirtualFree = windll.kernel32.VirtualFree
VirtualFree.retype = wintypes.BOOL
VirtualFree.argtypes = [
wintypes.LPVOID,
c_size_t,
wintypes.DWORD,
]
WriteProcessMemory = windll.kernel32.WriteProcessMemory
WriteProcessMemory.restype = wintypes.BOOL
WriteProcessMemory.argtypes = [
wintypes.HANDLE,
wintypes.LPVOID,
wintypes.LPVOID,
c_size_t,
POINTER(c_size_t),
]
ReleaseCapture = windll.user32.ReleaseCapture
ReleaseCapture.restype = wintypes.BOOL
ReleaseCapture.argtypes = [
]
WindowFromPoint = windll.user32.WindowFromPoint
WindowFromPoint.restype = wintypes.HWND
WindowFromPoint.argtypes = [
wintypes.POINT,
]
WaitForSingleObject = windll.kernel32.WaitForSingleObject
WaitForSingleObject.restype = wintypes.DWORD
WaitForSingleObject.argtypes = [
wintypes.HANDLE,
wintypes.DWORD,
]
WaitForInputIdle = windll.user32.WaitForInputIdle
WaitForInputIdle.restype = wintypes.DWORD
WaitForInputIdle.argtypes = [
wintypes.HANDLE,
wintypes.DWORD,
]
IsHungAppWindow = windll.user32.IsHungAppWindow
IsHungAppWindow.restype = wintypes.BOOL
IsHungAppWindow.argtypes = [
wintypes.HWND,
]
GetModuleFileNameEx = windll.psapi.GetModuleFileNameExW
GetModuleFileNameEx.restype = wintypes.DWORD
GetModuleFileNameEx.argtypes = [
wintypes.HANDLE,
wintypes.HMODULE,
wintypes.LPWSTR,
wintypes.DWORD,
]
GetClipboardData = windll.user32.GetClipboardData
GetClipboardData.restype = wintypes.HANDLE
GetClipboardData.argtypes = [
wintypes.UINT,
]
OpenClipboard = windll.user32.OpenClipboard
OpenClipboard.restype = wintypes.BOOL
OpenClipboard.argtypes = [
wintypes.HWND,
]
EmptyClipboard = windll.user32.EmptyClipboard
EmptyClipboard.restype = wintypes.BOOL
EmptyClipboard.argtypes = [
]
CloseClipboard = windll.user32.CloseClipboard
CloseClipboard.restype = wintypes.BOOL
CloseClipboard.argtypes = [
]
CountClipboardFormats = windll.user32.CountClipboardFormats
CountClipboardFormats.restype = c_int
CountClipboardFormats.argtypes = [
]
EnumClipboardFormats = windll.user32.EnumClipboardFormats
EnumClipboardFormats.restype = wintypes.UINT
EnumClipboardFormats.argtypes = [
wintypes.UINT,
]
GetClipboardFormatName = windll.user32.GetClipboardFormatNameW
GetClipboardFormatName.restype = c_int
GetClipboardFormatName.argtypes = [
wintypes.UINT,
wintypes.LPWSTR,
c_int,
]
TranslateMessage = windll.user32.TranslateMessage
TranslateMessage.argtypes = [
POINTER(wintypes.MSG)
]
DispatchMessageW = windll.user32.DispatchMessageW
DispatchMessageW.argtypes = [
POINTER(wintypes.MSG)
]
PeekMessageW = windll.user32.PeekMessageW
PeekMessageW.restypes = wintypes.BOOL
PeekMessageW.argtypes = [
POINTER(wintypes.MSG),
wintypes.HWND,
c_uint,
c_uint,
c_uint,
]
# DPIAware API funcs are not available on WinXP
try:
IsProcessDPIAware = windll.user32.IsProcessDPIAware
SetProcessDPIAware = windll.user32.SetProcessDPIAware
except AttributeError:
IsProcessDPIAware = None
SetProcessDPIAware = None
# DpiAwareness API funcs are available only from win 8.1 and greater
# Supported types of DPI awareness described here:
# https://msdn.microsoft.com/en-us/library/windows/desktop/dn280512(v=vs.85).aspx
# typedef enum _Process_DPI_Awareness {
# Process_DPI_Unaware = 0,
# Process_System_DPI_Aware = 1,
# Process_Per_Monitor_DPI_Aware = 2
# } Process_DPI_Awareness;
try:
shcore = windll.LoadLibrary("Shcore.dll")
SetProcessDpiAwareness = shcore.SetProcessDpiAwareness
GetProcessDpiAwareness = shcore.GetProcessDpiAwareness
Process_DPI_Awareness = {
"PROCESS_DPI_UNAWARE" : 0,
"PROCESS_SYSTEM_DPI_AWARE" : 1,
"PROCESS_PER_MONITOR_DPI_AWARE" : 2,
}
except (OSError, AttributeError):
SetProcessDpiAwareness = None
GetProcessDpiAwareness = None
Process_DPI_Awareness = None
# SetProcessDpiAwarenessContext is available from
# Windows 10, version 1703 or Windows Server 2016
try:
# Notice that argument values for SetProcessDpiAwarenessContext are
# different from values returned by GetAwarenessFromDpiAwarenessContext
# GetAwarenessFromDpiAwarenessContext
# https://docs.microsoft.com/en-us/windows/win32/api/windef/ne-windef-dpi_awareness
# typedef enum DPI_AWARENESS {
# DPI_AWARENESS_INVALID,
# DPI_AWARENESS_UNAWARE,
# DPI_AWARENESS_SYSTEM_AWARE,
# DPI_AWARENESS_PER_MONITOR_AWARE
# };
# SetProcessDpiAwarenessContext
# https://docs.microsoft.com/en-au/windows/win32/hidpi/dpi-awareness-context
# #define DPI_AWARENESS_CONTEXT_UNAWARE ((DPI_AWARENESS_CONTEXT)-1)
# #define DPI_AWARENESS_CONTEXT_SYSTEM_AWARE ((DPI_AWARENESS_CONTEXT)-2)
# #define DPI_AWARENESS_CONTEXT_PER_MONITOR_AWARE ((DPI_AWARENESS_CONTEXT)-3)
# #define DPI_AWARENESS_CONTEXT_PER_MONITOR_AWARE_V2 ((DPI_AWARENESS_CONTEXT)-4)
# #define DPI_AWARENESS_CONTEXT_UNAWARE_GDISCALED ((DPI_AWARENESS_CONTEXT)-5)
DPI_AWARENESS_CONTEXT = {
"UNAWARE": wintypes.HANDLE(-1),
"SYSTEM_AWARE": wintypes.HANDLE(-2),
"PER_MONITOR_AWARE": wintypes.HANDLE(-3),
"PER_MONITOR_AWARE_V2": wintypes.HANDLE(-4),
"UNAWARE_GDISCALED": wintypes.HANDLE(-5),
}
SetProcessDpiAwarenessContext = windll.user32.SetProcessDpiAwarenessContext
SetProcessDpiAwarenessContext.restype = wintypes.BOOL
SetProcessDpiAwarenessContext.argtypes = [
wintypes.HANDLE,
]
except (OSError, AttributeError):
SetProcessDpiAwarenessContext = None
# Setup DPI awareness for the python process if any is supported
if SetProcessDpiAwarenessContext:
ActionLogger().log("Call SetProcessDpiAwarenessContext with PROCESS_PER_MONITOR_DPI_AWARE")
SetProcessDpiAwarenessContext(
DPI_AWARENESS_CONTEXT["PER_MONITOR_AWARE"])
elif SetProcessDpiAwareness:
ActionLogger().log("Call SetProcessDpiAwareness with PROCESS_PER_MONITOR_DPI_AWARE")
SetProcessDpiAwareness(
Process_DPI_Awareness["PROCESS_PER_MONITOR_DPI_AWARE"])
elif SetProcessDPIAware:
ActionLogger().log("Call SetProcessDPIAware")
SetProcessDPIAware()
GetQueueStatus = windll.user32.GetQueueStatus
LoadString = windll.user32.LoadStringW
#def VkKeyScanW(p1):
# # C:/PROGRA~1/MICROS~4/VC98/Include/winuser.h 4225
# return VkKeyScanW._api_(p1)
#VkKeyScan = stdcall(SHORT, 'user32', [c_wchar]) (VkKeyScanW)
#
#def MapVirtualKeyExW(p1, p2, p3):
# # C:/PROGRA~1/MICROS~4/VC98/Include/winuser.h 4376
# return MapVirtualKeyExW._api_(p1, p2, p3)
#MapVirtualKeyEx = stdcall(
# UINT, 'user32', [c_uint, c_uint, c_long]) (MapVirtualKeyExW)
#
#def MapVirtualKeyW(p1, p2):
# # C:/PROGRA~1/MICROS~4/VC98/Include/winuser.h 4355
# return MapVirtualKeyW._api_(p1, p2)
#MapVirtualKey = stdcall(UINT, 'user32', [c_uint, c_uint]) (MapVirtualKeyW)
#====================================================================
def MakeLong(high, low):
"""Pack high into the high word of a long and low into the low word"""
# we need to AND each value with 0xFFFF to account for numbers
# greater then normal WORD (short) size
return ((high & 0xFFFF) << 16) | (low & 0xFFFF)
#====================================================================
def HiWord(value):
"""Return the high word from a long"""
#return (value & (~ 0xFFFF)) / 0xFFFF
return (value >> 16) & 0xffff
#====================================================================
def LoWord(value):
"""Return the low word from a long"""
return value & 0xFFFF
#====================================================================
def WaitGuiThreadIdle(handle):
"""Wait until the thread of the specified handle is ready"""
process_id = wintypes.DWORD(0)
GetWindowThreadProcessId(handle, byref(process_id))
# ask the control if it has finished processing the message
hprocess = OpenProcess(
win32defines.PROCESS_QUERY_INFORMATION,
0,
process_id.value)
# WaitForInputIdle call is removed because it's useful only
# while an app is starting (should be called only once)
if IsHungAppWindow(handle) == win32defines.TRUE:
raise RuntimeError('Window (hwnd={0}) is not responding!'.format(handle))
CloseHandle(hprocess)
#====================================================================
def GetDpiAwarenessByPid(pid):
"""Get DPI awareness properties of a process specified by ID"""
dpi_awareness = -1
hProcess = None
if GetProcessDpiAwareness and pid:
hProcess = OpenProcess(
win32defines.PROCESS_QUERY_INFORMATION,
0,
pid)
if not hProcess:
# process doesn't exist, exit with a default return value
return dpi_awareness
try:
dpi_awareness = c_int()
hRes = GetProcessDpiAwareness(
hProcess,
byref(dpi_awareness))
CloseHandle(hProcess)
if hRes == 0:
return dpi_awareness.value
finally:
if hProcess:
CloseHandle(hProcess)
# GetProcessDpiAwareness is not supported or pid is not specified,
# return a default value
return dpi_awareness
|
labcodes/django | refs/heads/master | tests/foreign_object/models/empty_join.py | 25 | from django.db import models
from django.db.models.fields.related import (
ForeignObjectRel, ReverseManyToOneDescriptor,
)
from django.db.models.lookups import StartsWith
from django.db.models.query_utils import PathInfo
class CustomForeignObjectRel(ForeignObjectRel):
"""
Define some extra Field methods so this Rel acts more like a Field, which
lets us use ReverseManyToOneDescriptor in both directions.
"""
@property
def foreign_related_fields(self):
return tuple(lhs_field for lhs_field, rhs_field in self.field.related_fields)
def get_attname(self):
return self.name
class StartsWithRelation(models.ForeignObject):
"""
A ForeignObject that uses StartsWith operator in its joins instead of
the default equality operator. This is logically a many-to-many relation
and creates a ReverseManyToOneDescriptor in both directions.
"""
auto_created = False
many_to_many = False
many_to_one = True
one_to_many = False
one_to_one = False
rel_class = CustomForeignObjectRel
def __init__(self, *args, **kwargs):
kwargs['on_delete'] = models.DO_NOTHING
super().__init__(*args, **kwargs)
@property
def field(self):
"""
Makes ReverseManyToOneDescriptor work in both directions.
"""
return self.remote_field
def get_extra_restriction(self, where_class, alias, related_alias):
to_field = self.remote_field.model._meta.get_field(self.to_fields[0])
from_field = self.model._meta.get_field(self.from_fields[0])
return StartsWith(to_field.get_col(alias), from_field.get_col(related_alias))
def get_joining_columns(self, reverse_join=False):
return tuple()
def get_path_info(self):
to_opts = self.remote_field.model._meta
from_opts = self.model._meta
return [PathInfo(from_opts, to_opts, (to_opts.pk,), self, False, False)]
def get_reverse_path_info(self):
to_opts = self.model._meta
from_opts = self.remote_field.model._meta
return [PathInfo(from_opts, to_opts, (to_opts.pk,), self.remote_field, False, False)]
def contribute_to_class(self, cls, name, private_only=False):
super().contribute_to_class(cls, name, private_only)
setattr(cls, self.name, ReverseManyToOneDescriptor(self))
class BrokenContainsRelation(StartsWithRelation):
"""
This model is designed to yield no join conditions and
raise an exception in ``Join.as_sql()``.
"""
def get_extra_restriction(self, where_class, alias, related_alias):
return None
class SlugPage(models.Model):
slug = models.CharField(max_length=20, unique=True)
descendants = StartsWithRelation(
'self',
from_fields=['slug'],
to_fields=['slug'],
related_name='ascendants',
)
containers = BrokenContainsRelation(
'self',
from_fields=['slug'],
to_fields=['slug'],
)
class Meta:
ordering = ['slug']
def __str__(self):
return 'SlugPage %s' % self.slug
|
zozo123/buildbot | refs/heads/master | master/buildbot/test/unit/test_master.py | 1 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import mock
import os
import signal
from buildbot import config
from buildbot import master
from buildbot import monkeypatches
from buildbot.changes.changes import Change
from buildbot.db import exceptions
from buildbot.test.fake import fakedata
from buildbot.test.fake import fakedb
from buildbot.test.fake import fakemq
from buildbot.test.util import compat
from buildbot.test.util import dirs
from buildbot.test.util import logging
from twisted.internet import defer
from twisted.internet import reactor
from twisted.python import log
from twisted.trial import unittest
class OldTriggeringMethods(unittest.TestCase):
def setUp(self):
self.patch(master.BuildMaster, 'create_child_services',
lambda self: None)
self.master = master.BuildMaster(basedir=None)
self.master.data = fakedata.FakeDataConnector(self.master, self)
self.master.db = fakedb.FakeDBConnector(self.master, self)
self.master.db.insertTestData([
fakedb.Change(changeid=1, author='this is a test'),
])
self.fake_Change = mock.Mock(name='fake_Change')
def fromChdict(master, chdict):
if chdict['author'] != 'this is a test':
raise AssertionError("did not get expected chdict")
return defer.succeed(self.fake_Change)
self.patch(Change, 'fromChdict', staticmethod(fromChdict))
def do_test_addChange_args(self, args=(), kwargs={}, exp_data_kwargs={}):
# add default arguments
default_data_kwargs = {
'author': None,
'branch': None,
'category': None,
'codebase': None,
'comments': None,
'files': None,
'project': '',
'properties': {},
'repository': '',
'revision': None,
'revlink': '',
'src': None,
'when_timestamp': None,
}
default_data_kwargs.update(exp_data_kwargs)
exp_data_kwargs = default_data_kwargs
d = self.master.addChange(*args, **kwargs)
@d.addCallback
def check(change):
self.assertIdentical(change, self.fake_Change)
self.assertEqual(self.master.data.updates.changesAdded,
[exp_data_kwargs])
return d
def test_addChange_args_author(self):
# who should come through as author
return self.do_test_addChange_args(
kwargs=dict(who='me'),
exp_data_kwargs=dict(author='me'))
def test_addChange_args_when(self):
# when should come through as when_timestamp, as a datetime
return self.do_test_addChange_args(
kwargs=dict(when=892293875),
exp_data_kwargs=dict(when_timestamp=892293875))
def test_addChange_args_properties(self):
# properties should not be qualified with a source
return self.do_test_addChange_args(
kwargs=dict(properties={'a': 'b'}),
exp_data_kwargs=dict(properties={u'a': u'b'}))
def test_addChange_args_properties_tuple(self):
# properties should not be qualified with a source
return self.do_test_addChange_args(
kwargs=dict(properties={'a': ('b', 'Change')}),
exp_data_kwargs=dict(properties={'a': ('b', 'Change')}))
def test_addChange_args_positional(self):
# master.addChange can take author, files, comments as positional
# arguments
return self.do_test_addChange_args(
args=('me', ['a'], 'com'),
exp_data_kwargs=dict(author='me', files=['a'], comments='com'))
class StartupAndReconfig(dirs.DirsMixin, logging.LoggingMixin, unittest.TestCase):
def setUp(self):
self.setUpLogging()
self.basedir = os.path.abspath('basedir')
d = self.setUpDirs(self.basedir)
@d.addCallback
def make_master(_):
# don't create child services
self.patch(master.BuildMaster, 'create_child_services',
lambda self: None)
# patch out a few other annoying things the master likes to do
self.patch(monkeypatches, 'patch_all', lambda: None)
self.patch(signal, 'signal', lambda sig, hdlr: None)
self.patch(master, 'Status', lambda master: mock.Mock()) # XXX temporary
self.patch(config.MasterConfig, 'loadConfig',
classmethod(lambda cls, b, f: cls()))
self.master = master.BuildMaster(self.basedir)
self.db = self.master.db = fakedb.FakeDBConnector(self.master, self)
self.mq = self.master.mq = fakemq.FakeMQConnector(self.master, self)
self.data = self.master.data = fakedata.FakeDataConnector(self.master, self)
return d
def tearDown(self):
return self.tearDownDirs()
def make_reactor(self):
r = mock.Mock()
r.callWhenRunning = reactor.callWhenRunning
return r
def patch_loadConfig_fail(self):
@classmethod
def loadConfig(cls, b, f):
config.error('oh noes')
self.patch(config.MasterConfig, 'loadConfig', loadConfig)
# tests
def test_startup_bad_config(self):
reactor = self.make_reactor()
self.patch_loadConfig_fail()
d = self.master.startService(_reactor=reactor)
@d.addCallback
def check(_):
reactor.stop.assert_called_with()
self.assertLogged("oh noes")
return d
def test_startup_db_not_ready(self):
reactor = self.make_reactor()
def db_setup():
log.msg("GOT HERE")
raise exceptions.DatabaseNotReadyError()
self.db.setup = db_setup
d = self.master.startService(_reactor=reactor)
@d.addCallback
def check(_):
reactor.stop.assert_called_with()
self.assertLogged("GOT HERE")
return d
@compat.usesFlushLoggedErrors
def test_startup_error(self):
reactor = self.make_reactor()
def db_setup():
raise RuntimeError("oh noes")
self.db.setup = db_setup
d = self.master.startService(_reactor=reactor)
@d.addCallback
def check(_):
reactor.stop.assert_called_with()
self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1)
return d
def test_startup_ok(self):
reactor = self.make_reactor()
d = self.master.startService(_reactor=reactor)
@d.addCallback
def check_started(_):
self.assertTrue(self.master.data.updates.masterActive)
d.addCallback(lambda _: self.master.stopService())
@d.addCallback
def check(_):
self.failIf(reactor.stop.called)
self.assertLogged("BuildMaster is running")
# check started/stopped messages
self.assertFalse(self.master.data.updates.masterActive)
return d
def test_reconfig(self):
reactor = self.make_reactor()
self.master.reconfigService = mock.Mock(
side_effect=lambda n: defer.succeed(None))
d = self.master.startService(_reactor=reactor)
d.addCallback(lambda _: self.master.reconfig())
d.addCallback(lambda _: self.master.stopService())
@d.addCallback
def check(_):
self.master.reconfigService.assert_called_with(mock.ANY)
return d
@defer.inlineCallbacks
def test_reconfig_bad_config(self):
reactor = self.make_reactor()
self.master.reconfigService = mock.Mock(
side_effect=lambda n: defer.succeed(None))
yield self.master.startService(_reactor=reactor)
# reset, since startService called reconfigService
self.master.reconfigService.reset_mock()
# reconfig, with a failure
self.patch_loadConfig_fail()
yield self.master.reconfig()
self.master.stopService()
self.assertLogged("reconfig aborted without")
self.failIf(self.master.reconfigService.called)
@defer.inlineCallbacks
def test_reconfigService_db_url_changed(self):
old = self.master.config = config.MasterConfig()
old.db['db_url'] = 'aaaa'
yield self.master.reconfigService(old)
new = config.MasterConfig()
new.db['db_url'] = 'bbbb'
self.assertRaises(config.ConfigErrors, lambda:
self.master.reconfigService(new))
|
mtndesign/mtnvim | refs/heads/master | myvim/bundle/ropevim/ftplugin/python/libs/rope/base/oi/doa.py | 59 | import cPickle as pickle
import marshal
import os
import socket
import subprocess
import sys
import tempfile
import threading
class PythonFileRunner(object):
"""A class for running python project files"""
def __init__(self, pycore, file_, args=None, stdin=None,
stdout=None, analyze_data=None):
self.pycore = pycore
self.file = file_
self.analyze_data = analyze_data
self.observers = []
self.args = args
self.stdin = stdin
self.stdout = stdout
def run(self):
"""Execute the process"""
env = dict(os.environ)
file_path = self.file.real_path
path_folders = self.pycore.get_source_folders() + \
self.pycore.get_python_path_folders()
env['PYTHONPATH'] = os.pathsep.join(folder.real_path
for folder in path_folders)
runmod_path = self.pycore.find_module('rope.base.oi.runmod').real_path
self.receiver = None
self._init_data_receiving()
send_info = '-'
if self.receiver:
send_info = self.receiver.get_send_info()
args = [sys.executable, runmod_path, send_info,
self.pycore.project.address, self.file.real_path]
if self.analyze_data is None:
del args[1:4]
if self.args is not None:
args.extend(self.args)
self.process = subprocess.Popen(
executable=sys.executable, args=args, env=env,
cwd=os.path.split(file_path)[0], stdin=self.stdin,
stdout=self.stdout, stderr=self.stdout, close_fds=os.name != 'nt')
def _init_data_receiving(self):
if self.analyze_data is None:
return
# Disabling FIFO data transfer due to blocking when running
# unittests in the GUI.
# XXX: Handle FIFO data transfer for `rope.ui.testview`
if True or os.name == 'nt':
self.receiver = _SocketReceiver()
else:
self.receiver = _FIFOReceiver()
self.receiving_thread = threading.Thread(target=self._receive_information)
self.receiving_thread.setDaemon(True)
self.receiving_thread.start()
def _receive_information(self):
#temp = open('/dev/shm/info', 'w')
for data in self.receiver.receive_data():
self.analyze_data(data)
#temp.write(str(data) + '\n')
#temp.close()
for observer in self.observers:
observer()
def wait_process(self):
"""Wait for the process to finish"""
self.process.wait()
if self.analyze_data:
self.receiving_thread.join()
def kill_process(self):
"""Stop the process"""
if self.process.poll() is not None:
return
try:
if hasattr(self.process, 'terminate'):
self.process.terminate()
elif os.name != 'nt':
os.kill(self.process.pid, 9)
else:
import ctypes
handle = int(self.process._handle)
ctypes.windll.kernel32.TerminateProcess(handle, -1)
except OSError:
pass
def add_finishing_observer(self, observer):
"""Notify this observer when execution finishes"""
self.observers.append(observer)
class _MessageReceiver(object):
def receive_data(self):
pass
def get_send_info(self):
pass
class _SocketReceiver(_MessageReceiver):
def __init__(self):
self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.data_port = 3037
while self.data_port < 4000:
try:
self.server_socket.bind(('', self.data_port))
break
except socket.error, e:
self.data_port += 1
self.server_socket.listen(1)
def get_send_info(self):
return str(self.data_port)
def receive_data(self):
conn, addr = self.server_socket.accept()
self.server_socket.close()
my_file = conn.makefile('r')
while True:
try:
yield pickle.load(my_file)
except EOFError:
break
my_file.close()
conn.close()
class _FIFOReceiver(_MessageReceiver):
def __init__(self):
# XXX: this is insecure and might cause race conditions
self.file_name = self._get_file_name()
os.mkfifo(self.file_name)
def _get_file_name(self):
prefix = tempfile.gettempdir() + '/__rope_'
i = 0
while os.path.exists(prefix + str(i).rjust(4, '0')):
i += 1
return prefix + str(i).rjust(4, '0')
def get_send_info(self):
return self.file_name
def receive_data(self):
my_file = open(self.file_name, 'rb')
while True:
try:
yield marshal.load(my_file)
except EOFError:
break
my_file.close()
os.remove(self.file_name)
|
hectord/lettuce | refs/heads/master | tests/integration/lib/Django-1.2.5/django/forms/models.py | 43 | """
Helper functions for creating Form classes from Django models
and database field objects.
"""
from django.db import connections
from django.utils.encoding import smart_unicode, force_unicode
from django.utils.datastructures import SortedDict
from django.utils.text import get_text_list, capfirst
from django.utils.translation import ugettext_lazy as _, ugettext
from django.core.exceptions import ValidationError, NON_FIELD_ERRORS
from django.core.validators import EMPTY_VALUES
from util import ErrorList
from forms import BaseForm, get_declared_fields
from fields import Field, ChoiceField
from widgets import SelectMultiple, HiddenInput, MultipleHiddenInput
from widgets import media_property
from formsets import BaseFormSet, formset_factory, DELETION_FIELD_NAME
__all__ = (
'ModelForm', 'BaseModelForm', 'model_to_dict', 'fields_for_model',
'save_instance', 'form_for_fields', 'ModelChoiceField',
'ModelMultipleChoiceField',
)
def construct_instance(form, instance, fields=None, exclude=None):
"""
Constructs and returns a model instance from the bound ``form``'s
``cleaned_data``, but does not save the returned instance to the
database.
"""
from django.db import models
opts = instance._meta
cleaned_data = form.cleaned_data
file_field_list = []
for f in opts.fields:
if not f.editable or isinstance(f, models.AutoField) \
or not f.name in cleaned_data:
continue
if fields and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
# Defer saving file-type fields until after the other fields, so a
# callable upload_to can use the values from other fields.
if isinstance(f, models.FileField):
file_field_list.append(f)
else:
f.save_form_data(instance, cleaned_data[f.name])
for f in file_field_list:
f.save_form_data(instance, cleaned_data[f.name])
return instance
def save_instance(form, instance, fields=None, fail_message='saved',
commit=True, exclude=None, construct=True):
"""
Saves bound Form ``form``'s cleaned_data into model instance ``instance``.
If commit=True, then the changes to ``instance`` will be saved to the
database. Returns ``instance``.
If construct=False, assume ``instance`` has already been constructed and
just needs to be saved.
"""
if construct:
instance = construct_instance(form, instance, fields, exclude)
opts = instance._meta
if form.errors:
raise ValueError("The %s could not be %s because the data didn't"
" validate." % (opts.object_name, fail_message))
# Wrap up the saving of m2m data as a function.
def save_m2m():
cleaned_data = form.cleaned_data
for f in opts.many_to_many:
if fields and f.name not in fields:
continue
if f.name in cleaned_data:
f.save_form_data(instance, cleaned_data[f.name])
if commit:
# If we are committing, save the instance and the m2m data immediately.
instance.save()
save_m2m()
else:
# We're not committing. Add a method to the form to allow deferred
# saving of m2m data.
form.save_m2m = save_m2m
return instance
def make_model_save(model, fields, fail_message):
"""Returns the save() method for a Form."""
def save(self, commit=True):
return save_instance(self, model(), fields, fail_message, commit)
return save
def make_instance_save(instance, fields, fail_message):
"""Returns the save() method for a Form."""
def save(self, commit=True):
return save_instance(self, instance, fields, fail_message, commit)
return save
def form_for_fields(field_list):
"""
Returns a Form class for the given list of Django database field instances.
"""
fields = SortedDict([(f.name, f.formfield())
for f in field_list if f.editable])
return type('FormForFields', (BaseForm,), {'base_fields': fields})
# ModelForms #################################################################
def model_to_dict(instance, fields=None, exclude=None):
"""
Returns a dict containing the data in ``instance`` suitable for passing as
a Form's ``initial`` keyword argument.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned dict.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned dict, even if they are listed in
the ``fields`` argument.
"""
# avoid a circular import
from django.db.models.fields.related import ManyToManyField
opts = instance._meta
data = {}
for f in opts.fields + opts.many_to_many:
if not f.editable:
continue
if fields and not f.name in fields:
continue
if exclude and f.name in exclude:
continue
if isinstance(f, ManyToManyField):
# If the object doesn't have a primry key yet, just use an empty
# list for its m2m fields. Calling f.value_from_object will raise
# an exception.
if instance.pk is None:
data[f.name] = []
else:
# MultipleChoiceWidget needs a list of pks, not object instances.
data[f.name] = [obj.pk for obj in f.value_from_object(instance)]
else:
data[f.name] = f.value_from_object(instance)
return data
def fields_for_model(model, fields=None, exclude=None, widgets=None, formfield_callback=None):
"""
Returns a ``SortedDict`` containing form fields for the given model.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned fields.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned fields, even if they are listed
in the ``fields`` argument.
"""
field_list = []
ignored = []
opts = model._meta
for f in opts.fields + opts.many_to_many:
if not f.editable:
continue
if fields and not f.name in fields:
continue
if exclude and f.name in exclude:
continue
if widgets and f.name in widgets:
kwargs = {'widget': widgets[f.name]}
else:
kwargs = {}
if formfield_callback is None:
formfield = f.formfield(**kwargs)
elif not callable(formfield_callback):
raise TypeError('formfield_callback must be a function or callable')
else:
formfield = formfield_callback(f, **kwargs)
if formfield:
field_list.append((f.name, formfield))
else:
ignored.append(f.name)
field_dict = SortedDict(field_list)
if fields:
field_dict = SortedDict(
[(f, field_dict.get(f)) for f in fields
if ((not exclude) or (exclude and f not in exclude)) and (f not in ignored)]
)
return field_dict
class ModelFormOptions(object):
def __init__(self, options=None):
self.model = getattr(options, 'model', None)
self.fields = getattr(options, 'fields', None)
self.exclude = getattr(options, 'exclude', None)
self.widgets = getattr(options, 'widgets', None)
class ModelFormMetaclass(type):
def __new__(cls, name, bases, attrs):
formfield_callback = attrs.pop('formfield_callback', None)
try:
parents = [b for b in bases if issubclass(b, ModelForm)]
except NameError:
# We are defining ModelForm itself.
parents = None
declared_fields = get_declared_fields(bases, attrs, False)
new_class = super(ModelFormMetaclass, cls).__new__(cls, name, bases,
attrs)
if not parents:
return new_class
if 'media' not in attrs:
new_class.media = media_property(new_class)
opts = new_class._meta = ModelFormOptions(getattr(new_class, 'Meta', None))
if opts.model:
# If a model is defined, extract form fields from it.
fields = fields_for_model(opts.model, opts.fields,
opts.exclude, opts.widgets, formfield_callback)
# Override default model fields with any custom declared ones
# (plus, include all the other declared fields).
fields.update(declared_fields)
else:
fields = declared_fields
new_class.declared_fields = declared_fields
new_class.base_fields = fields
return new_class
class BaseModelForm(BaseForm):
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=':',
empty_permitted=False, instance=None):
opts = self._meta
if instance is None:
if opts.model is None:
raise ValueError('ModelForm has no model class specified.')
# if we didn't get an instance, instantiate a new one
self.instance = opts.model()
object_data = {}
else:
self.instance = instance
object_data = model_to_dict(instance, opts.fields, opts.exclude)
# if initial was provided, it should override the values from instance
if initial is not None:
object_data.update(initial)
# self._validate_unique will be set to True by BaseModelForm.clean().
# It is False by default so overriding self.clean() and failing to call
# super will stop validate_unique from being called.
self._validate_unique = False
super(BaseModelForm, self).__init__(data, files, auto_id, prefix, object_data,
error_class, label_suffix, empty_permitted)
def _update_errors(self, message_dict):
for k, v in message_dict.items():
if k != NON_FIELD_ERRORS:
self._errors.setdefault(k, self.error_class()).extend(v)
# Remove the data from the cleaned_data dict since it was invalid
if k in self.cleaned_data:
del self.cleaned_data[k]
if NON_FIELD_ERRORS in message_dict:
messages = message_dict[NON_FIELD_ERRORS]
self._errors.setdefault(NON_FIELD_ERRORS, self.error_class()).extend(messages)
def _get_validation_exclusions(self):
"""
For backwards-compatibility, several types of fields need to be
excluded from model validation. See the following tickets for
details: #12507, #12521, #12553
"""
exclude = []
# Build up a list of fields that should be excluded from model field
# validation and unique checks.
for f in self.instance._meta.fields:
field = f.name
# Exclude fields that aren't on the form. The developer may be
# adding these values to the model after form validation.
if field not in self.fields:
exclude.append(f.name)
# Don't perform model validation on fields that were defined
# manually on the form and excluded via the ModelForm's Meta
# class. See #12901.
elif self._meta.fields and field not in self._meta.fields:
exclude.append(f.name)
elif self._meta.exclude and field in self._meta.exclude:
exclude.append(f.name)
# Exclude fields that failed form validation. There's no need for
# the model fields to validate them as well.
elif field in self._errors.keys():
exclude.append(f.name)
# Exclude empty fields that are not required by the form, if the
# underlying model field is required. This keeps the model field
# from raising a required error. Note: don't exclude the field from
# validaton if the model field allows blanks. If it does, the blank
# value may be included in a unique check, so cannot be excluded
# from validation.
else:
form_field = self.fields[field]
field_value = self.cleaned_data.get(field, None)
if not f.blank and not form_field.required and field_value in EMPTY_VALUES:
exclude.append(f.name)
return exclude
def clean(self):
self._validate_unique = True
return self.cleaned_data
def _post_clean(self):
opts = self._meta
# Update the model instance with self.cleaned_data.
self.instance = construct_instance(self, self.instance, opts.fields, opts.exclude)
exclude = self._get_validation_exclusions()
# Foreign Keys being used to represent inline relationships
# are excluded from basic field value validation. This is for two
# reasons: firstly, the value may not be supplied (#12507; the
# case of providing new values to the admin); secondly the
# object being referred to may not yet fully exist (#12749).
# However, these fields *must* be included in uniqueness checks,
# so this can't be part of _get_validation_exclusions().
for f_name, field in self.fields.items():
if isinstance(field, InlineForeignKeyField):
exclude.append(f_name)
# Clean the model instance's fields.
try:
self.instance.clean_fields(exclude=exclude)
except ValidationError, e:
self._update_errors(e.message_dict)
# Call the model instance's clean method.
try:
self.instance.clean()
except ValidationError, e:
self._update_errors({NON_FIELD_ERRORS: e.messages})
# Validate uniqueness if needed.
if self._validate_unique:
self.validate_unique()
def validate_unique(self):
"""
Calls the instance's validate_unique() method and updates the form's
validation errors if any were raised.
"""
exclude = self._get_validation_exclusions()
try:
self.instance.validate_unique(exclude=exclude)
except ValidationError, e:
self._update_errors(e.message_dict)
def save(self, commit=True):
"""
Saves this ``form``'s cleaned_data into model instance
``self.instance``.
If commit=True, then the changes to ``instance`` will be saved to the
database. Returns ``instance``.
"""
if self.instance.pk is None:
fail_message = 'created'
else:
fail_message = 'changed'
return save_instance(self, self.instance, self._meta.fields,
fail_message, commit, construct=False)
save.alters_data = True
class ModelForm(BaseModelForm):
__metaclass__ = ModelFormMetaclass
def modelform_factory(model, form=ModelForm, fields=None, exclude=None,
formfield_callback=None):
# Create the inner Meta class. FIXME: ideally, we should be able to
# construct a ModelForm without creating and passing in a temporary
# inner class.
# Build up a list of attributes that the Meta object will have.
attrs = {'model': model}
if fields is not None:
attrs['fields'] = fields
if exclude is not None:
attrs['exclude'] = exclude
# If parent form class already has an inner Meta, the Meta we're
# creating needs to inherit from the parent's inner meta.
parent = (object,)
if hasattr(form, 'Meta'):
parent = (form.Meta, object)
Meta = type('Meta', parent, attrs)
# Give this new form class a reasonable name.
class_name = model.__name__ + 'Form'
# Class attributes for the new form class.
form_class_attrs = {
'Meta': Meta,
'formfield_callback': formfield_callback
}
return ModelFormMetaclass(class_name, (form,), form_class_attrs)
# ModelFormSets ##############################################################
class BaseModelFormSet(BaseFormSet):
"""
A ``FormSet`` for editing a queryset and/or adding new objects to it.
"""
model = None
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
queryset=None, **kwargs):
self.queryset = queryset
defaults = {'data': data, 'files': files, 'auto_id': auto_id, 'prefix': prefix}
defaults.update(kwargs)
super(BaseModelFormSet, self).__init__(**defaults)
def initial_form_count(self):
"""Returns the number of forms that are required in this FormSet."""
if not (self.data or self.files):
return len(self.get_queryset())
return super(BaseModelFormSet, self).initial_form_count()
def _existing_object(self, pk):
if not hasattr(self, '_object_dict'):
self._object_dict = dict([(o.pk, o) for o in self.get_queryset()])
return self._object_dict.get(pk)
def _construct_form(self, i, **kwargs):
if self.is_bound and i < self.initial_form_count():
pk_key = "%s-%s" % (self.add_prefix(i), self.model._meta.pk.name)
pk = self.data[pk_key]
pk_field = self.model._meta.pk
pk = pk_field.get_db_prep_lookup('exact', pk,
connection=connections[self.get_queryset().db])
if isinstance(pk, list):
pk = pk[0]
kwargs['instance'] = self._existing_object(pk)
if i < self.initial_form_count() and not kwargs.get('instance'):
kwargs['instance'] = self.get_queryset()[i]
return super(BaseModelFormSet, self)._construct_form(i, **kwargs)
def get_queryset(self):
if not hasattr(self, '_queryset'):
if self.queryset is not None:
qs = self.queryset
else:
qs = self.model._default_manager.get_query_set()
# If the queryset isn't already ordered we need to add an
# artificial ordering here to make sure that all formsets
# constructed from this queryset have the same form order.
if not qs.ordered:
qs = qs.order_by(self.model._meta.pk.name)
# Removed queryset limiting here. As per discussion re: #13023
# on django-dev, max_num should not prevent existing
# related objects/inlines from being displayed.
self._queryset = qs
return self._queryset
def save_new(self, form, commit=True):
"""Saves and returns a new model instance for the given form."""
return form.save(commit=commit)
def save_existing(self, form, instance, commit=True):
"""Saves and returns an existing model instance for the given form."""
return form.save(commit=commit)
def save(self, commit=True):
"""Saves model instances for every form, adding and changing instances
as necessary, and returns the list of instances.
"""
if not commit:
self.saved_forms = []
def save_m2m():
for form in self.saved_forms:
form.save_m2m()
self.save_m2m = save_m2m
return self.save_existing_objects(commit) + self.save_new_objects(commit)
def clean(self):
self.validate_unique()
def validate_unique(self):
# Collect unique_checks and date_checks to run from all the forms.
all_unique_checks = set()
all_date_checks = set()
for form in self.forms:
if not hasattr(form, 'cleaned_data'):
continue
exclude = form._get_validation_exclusions()
unique_checks, date_checks = form.instance._get_unique_checks(exclude=exclude)
all_unique_checks = all_unique_checks.union(set(unique_checks))
all_date_checks = all_date_checks.union(set(date_checks))
errors = []
# Do each of the unique checks (unique and unique_together)
for uclass, unique_check in all_unique_checks:
seen_data = set()
for form in self.forms:
# if the form doesn't have cleaned_data then we ignore it,
# it's already invalid
if not hasattr(form, "cleaned_data"):
continue
# get data for each field of each of unique_check
row_data = tuple([form.cleaned_data[field] for field in unique_check if field in form.cleaned_data])
if row_data and not None in row_data:
# if we've aready seen it then we have a uniqueness failure
if row_data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_unique_error_message(unique_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
del form.cleaned_data
break
# mark the data as seen
seen_data.add(row_data)
# iterate over each of the date checks now
for date_check in all_date_checks:
seen_data = set()
uclass, lookup, field, unique_for = date_check
for form in self.forms:
# if the form doesn't have cleaned_data then we ignore it,
# it's already invalid
if not hasattr(self, 'cleaned_data'):
continue
# see if we have data for both fields
if (form.cleaned_data and form.cleaned_data[field] is not None
and form.cleaned_data[unique_for] is not None):
# if it's a date lookup we need to get the data for all the fields
if lookup == 'date':
date = form.cleaned_data[unique_for]
date_data = (date.year, date.month, date.day)
# otherwise it's just the attribute on the date/datetime
# object
else:
date_data = (getattr(form.cleaned_data[unique_for], lookup),)
data = (form.cleaned_data[field],) + date_data
# if we've aready seen it then we have a uniqueness failure
if data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_date_error_message(date_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
del form.cleaned_data
break
seen_data.add(data)
if errors:
raise ValidationError(errors)
def get_unique_error_message(self, unique_check):
if len(unique_check) == 1:
return ugettext("Please correct the duplicate data for %(field)s.") % {
"field": unique_check[0],
}
else:
return ugettext("Please correct the duplicate data for %(field)s, "
"which must be unique.") % {
"field": get_text_list(unique_check, unicode(_("and"))),
}
def get_date_error_message(self, date_check):
return ugettext("Please correct the duplicate data for %(field_name)s "
"which must be unique for the %(lookup)s in %(date_field)s.") % {
'field_name': date_check[2],
'date_field': date_check[3],
'lookup': unicode(date_check[1]),
}
def get_form_error(self):
return ugettext("Please correct the duplicate values below.")
def save_existing_objects(self, commit=True):
self.changed_objects = []
self.deleted_objects = []
if not self.get_queryset():
return []
saved_instances = []
for form in self.initial_forms:
pk_name = self._pk_field.name
raw_pk_value = form._raw_value(pk_name)
# clean() for different types of PK fields can sometimes return
# the model instance, and sometimes the PK. Handle either.
pk_value = form.fields[pk_name].clean(raw_pk_value)
pk_value = getattr(pk_value, 'pk', pk_value)
obj = self._existing_object(pk_value)
if self.can_delete:
raw_delete_value = form._raw_value(DELETION_FIELD_NAME)
should_delete = form.fields[DELETION_FIELD_NAME].clean(raw_delete_value)
if should_delete:
self.deleted_objects.append(obj)
obj.delete()
continue
if form.has_changed():
self.changed_objects.append((obj, form.changed_data))
saved_instances.append(self.save_existing(form, obj, commit=commit))
if not commit:
self.saved_forms.append(form)
return saved_instances
def save_new_objects(self, commit=True):
self.new_objects = []
for form in self.extra_forms:
if not form.has_changed():
continue
# If someone has marked an add form for deletion, don't save the
# object.
if self.can_delete:
raw_delete_value = form._raw_value(DELETION_FIELD_NAME)
should_delete = form.fields[DELETION_FIELD_NAME].clean(raw_delete_value)
if should_delete:
continue
self.new_objects.append(self.save_new(form, commit=commit))
if not commit:
self.saved_forms.append(form)
return self.new_objects
def add_fields(self, form, index):
"""Add a hidden field for the object's primary key."""
from django.db.models import AutoField, OneToOneField, ForeignKey
self._pk_field = pk = self.model._meta.pk
# If a pk isn't editable, then it won't be on the form, so we need to
# add it here so we can tell which object is which when we get the
# data back. Generally, pk.editable should be false, but for some
# reason, auto_created pk fields and AutoField's editable attribute is
# True, so check for that as well.
def pk_is_not_editable(pk):
return ((not pk.editable) or (pk.auto_created or isinstance(pk, AutoField))
or (pk.rel and pk.rel.parent_link and pk_is_not_editable(pk.rel.to._meta.pk)))
if pk_is_not_editable(pk) or pk.name not in form.fields:
if form.is_bound:
pk_value = form.instance.pk
else:
try:
if index is not None:
pk_value = self.get_queryset()[index].pk
else:
pk_value = None
except IndexError:
pk_value = None
if isinstance(pk, OneToOneField) or isinstance(pk, ForeignKey):
qs = pk.rel.to._default_manager.get_query_set()
else:
qs = self.model._default_manager.get_query_set()
qs = qs.using(form.instance._state.db)
form.fields[self._pk_field.name] = ModelChoiceField(qs, initial=pk_value, required=False, widget=HiddenInput)
super(BaseModelFormSet, self).add_fields(form, index)
def modelformset_factory(model, form=ModelForm, formfield_callback=None,
formset=BaseModelFormSet,
extra=1, can_delete=False, can_order=False,
max_num=None, fields=None, exclude=None):
"""
Returns a FormSet class for the given Django model class.
"""
form = modelform_factory(model, form=form, fields=fields, exclude=exclude,
formfield_callback=formfield_callback)
FormSet = formset_factory(form, formset, extra=extra, max_num=max_num,
can_order=can_order, can_delete=can_delete)
FormSet.model = model
return FormSet
# InlineFormSets #############################################################
class BaseInlineFormSet(BaseModelFormSet):
"""A formset for child objects related to a parent."""
def __init__(self, data=None, files=None, instance=None,
save_as_new=False, prefix=None, queryset=None):
from django.db.models.fields.related import RelatedObject
if instance is None:
self.instance = self.fk.rel.to()
else:
self.instance = instance
self.save_as_new = save_as_new
# is there a better way to get the object descriptor?
self.rel_name = RelatedObject(self.fk.rel.to, self.model, self.fk).get_accessor_name()
if queryset is None:
queryset = self.model._default_manager
qs = queryset.filter(**{self.fk.name: self.instance})
super(BaseInlineFormSet, self).__init__(data, files, prefix=prefix,
queryset=qs)
def initial_form_count(self):
if self.save_as_new:
return 0
return super(BaseInlineFormSet, self).initial_form_count()
def _construct_form(self, i, **kwargs):
form = super(BaseInlineFormSet, self)._construct_form(i, **kwargs)
if self.save_as_new:
# Remove the primary key from the form's data, we are only
# creating new instances
form.data[form.add_prefix(self._pk_field.name)] = None
# Remove the foreign key from the form's data
form.data[form.add_prefix(self.fk.name)] = None
# Set the fk value here so that the form can do it's validation.
setattr(form.instance, self.fk.get_attname(), self.instance.pk)
return form
#@classmethod
def get_default_prefix(cls):
from django.db.models.fields.related import RelatedObject
return RelatedObject(cls.fk.rel.to, cls.model, cls.fk).get_accessor_name().replace('+','')
get_default_prefix = classmethod(get_default_prefix)
def save_new(self, form, commit=True):
# Use commit=False so we can assign the parent key afterwards, then
# save the object.
obj = form.save(commit=False)
pk_value = getattr(self.instance, self.fk.rel.field_name)
setattr(obj, self.fk.get_attname(), getattr(pk_value, 'pk', pk_value))
if commit:
obj.save()
# form.save_m2m() can be called via the formset later on if commit=False
if commit and hasattr(form, 'save_m2m'):
form.save_m2m()
return obj
def add_fields(self, form, index):
super(BaseInlineFormSet, self).add_fields(form, index)
if self._pk_field == self.fk:
name = self._pk_field.name
kwargs = {'pk_field': True}
else:
# The foreign key field might not be on the form, so we poke at the
# Model field to get the label, since we need that for error messages.
name = self.fk.name
kwargs = {
'label': getattr(form.fields.get(name), 'label', capfirst(self.fk.verbose_name))
}
if self.fk.rel.field_name != self.fk.rel.to._meta.pk.name:
kwargs['to_field'] = self.fk.rel.field_name
form.fields[name] = InlineForeignKeyField(self.instance, **kwargs)
# Add the generated field to form._meta.fields if it's defined to make
# sure validation isn't skipped on that field.
if form._meta.fields:
if isinstance(form._meta.fields, tuple):
form._meta.fields = list(form._meta.fields)
form._meta.fields.append(self.fk.name)
def get_unique_error_message(self, unique_check):
unique_check = [field for field in unique_check if field != self.fk.name]
return super(BaseInlineFormSet, self).get_unique_error_message(unique_check)
def _get_foreign_key(parent_model, model, fk_name=None, can_fail=False):
"""
Finds and returns the ForeignKey from model to parent if there is one
(returns None if can_fail is True and no such field exists). If fk_name is
provided, assume it is the name of the ForeignKey field. Unles can_fail is
True, an exception is raised if there is no ForeignKey from model to
parent_model.
"""
# avoid circular import
from django.db.models import ForeignKey
opts = model._meta
if fk_name:
fks_to_parent = [f for f in opts.fields if f.name == fk_name]
if len(fks_to_parent) == 1:
fk = fks_to_parent[0]
if not isinstance(fk, ForeignKey) or \
(fk.rel.to != parent_model and
fk.rel.to not in parent_model._meta.get_parent_list()):
raise Exception("fk_name '%s' is not a ForeignKey to %s" % (fk_name, parent_model))
elif len(fks_to_parent) == 0:
raise Exception("%s has no field named '%s'" % (model, fk_name))
else:
# Try to discover what the ForeignKey from model to parent_model is
fks_to_parent = [
f for f in opts.fields
if isinstance(f, ForeignKey)
and (f.rel.to == parent_model
or f.rel.to in parent_model._meta.get_parent_list())
]
if len(fks_to_parent) == 1:
fk = fks_to_parent[0]
elif len(fks_to_parent) == 0:
if can_fail:
return
raise Exception("%s has no ForeignKey to %s" % (model, parent_model))
else:
raise Exception("%s has more than 1 ForeignKey to %s" % (model, parent_model))
return fk
def inlineformset_factory(parent_model, model, form=ModelForm,
formset=BaseInlineFormSet, fk_name=None,
fields=None, exclude=None,
extra=3, can_order=False, can_delete=True, max_num=None,
formfield_callback=None):
"""
Returns an ``InlineFormSet`` for the given kwargs.
You must provide ``fk_name`` if ``model`` has more than one ``ForeignKey``
to ``parent_model``.
"""
fk = _get_foreign_key(parent_model, model, fk_name=fk_name)
# enforce a max_num=1 when the foreign key to the parent model is unique.
if fk.unique:
max_num = 1
kwargs = {
'form': form,
'formfield_callback': formfield_callback,
'formset': formset,
'extra': extra,
'can_delete': can_delete,
'can_order': can_order,
'fields': fields,
'exclude': exclude,
'max_num': max_num,
}
FormSet = modelformset_factory(model, **kwargs)
FormSet.fk = fk
return FormSet
# Fields #####################################################################
class InlineForeignKeyHiddenInput(HiddenInput):
def _has_changed(self, initial, data):
return False
class InlineForeignKeyField(Field):
"""
A basic integer field that deals with validating the given value to a
given parent instance in an inline.
"""
default_error_messages = {
'invalid_choice': _(u'The inline foreign key did not match the parent instance primary key.'),
}
def __init__(self, parent_instance, *args, **kwargs):
self.parent_instance = parent_instance
self.pk_field = kwargs.pop("pk_field", False)
self.to_field = kwargs.pop("to_field", None)
if self.parent_instance is not None:
if self.to_field:
kwargs["initial"] = getattr(self.parent_instance, self.to_field)
else:
kwargs["initial"] = self.parent_instance.pk
kwargs["required"] = False
kwargs["widget"] = InlineForeignKeyHiddenInput
super(InlineForeignKeyField, self).__init__(*args, **kwargs)
def clean(self, value):
if value in EMPTY_VALUES:
if self.pk_field:
return None
# if there is no value act as we did before.
return self.parent_instance
# ensure the we compare the values as equal types.
if self.to_field:
orig = getattr(self.parent_instance, self.to_field)
else:
orig = self.parent_instance.pk
if force_unicode(value) != force_unicode(orig):
raise ValidationError(self.error_messages['invalid_choice'])
return self.parent_instance
class ModelChoiceIterator(object):
def __init__(self, field):
self.field = field
self.queryset = field.queryset
def __iter__(self):
if self.field.empty_label is not None:
yield (u"", self.field.empty_label)
if self.field.cache_choices:
if self.field.choice_cache is None:
self.field.choice_cache = [
self.choice(obj) for obj in self.queryset.all()
]
for choice in self.field.choice_cache:
yield choice
else:
for obj in self.queryset.all():
yield self.choice(obj)
def __len__(self):
return len(self.queryset)
def choice(self, obj):
return (self.field.prepare_value(obj), self.field.label_from_instance(obj))
class ModelChoiceField(ChoiceField):
"""A ChoiceField whose choices are a model QuerySet."""
# This class is a subclass of ChoiceField for purity, but it doesn't
# actually use any of ChoiceField's implementation.
default_error_messages = {
'invalid_choice': _(u'Select a valid choice. That choice is not one of'
u' the available choices.'),
}
def __init__(self, queryset, empty_label=u"---------", cache_choices=False,
required=True, widget=None, label=None, initial=None,
help_text=None, to_field_name=None, *args, **kwargs):
if required and (initial is not None):
self.empty_label = None
else:
self.empty_label = empty_label
self.cache_choices = cache_choices
# Call Field instead of ChoiceField __init__() because we don't need
# ChoiceField.__init__().
Field.__init__(self, required, widget, label, initial, help_text,
*args, **kwargs)
self.queryset = queryset
self.choice_cache = None
self.to_field_name = to_field_name
def __deepcopy__(self, memo):
result = super(ChoiceField, self).__deepcopy__(memo)
# Need to force a new ModelChoiceIterator to be created, bug #11183
result.queryset = result.queryset
return result
def _get_queryset(self):
return self._queryset
def _set_queryset(self, queryset):
self._queryset = queryset
self.widget.choices = self.choices
queryset = property(_get_queryset, _set_queryset)
# this method will be used to create object labels by the QuerySetIterator.
# Override it to customize the label.
def label_from_instance(self, obj):
"""
This method is used to convert objects into strings; it's used to
generate the labels for the choices presented by this object. Subclasses
can override this method to customize the display of the choices.
"""
return smart_unicode(obj)
def _get_choices(self):
# If self._choices is set, then somebody must have manually set
# the property self.choices. In this case, just return self._choices.
if hasattr(self, '_choices'):
return self._choices
# Otherwise, execute the QuerySet in self.queryset to determine the
# choices dynamically. Return a fresh ModelChoiceIterator that has not been
# consumed. Note that we're instantiating a new ModelChoiceIterator *each*
# time _get_choices() is called (and, thus, each time self.choices is
# accessed) so that we can ensure the QuerySet has not been consumed. This
# construct might look complicated but it allows for lazy evaluation of
# the queryset.
return ModelChoiceIterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
def prepare_value(self, value):
if hasattr(value, '_meta'):
if self.to_field_name:
return value.serializable_value(self.to_field_name)
else:
return value.pk
return super(ModelChoiceField, self).prepare_value(value)
def to_python(self, value):
if value in EMPTY_VALUES:
return None
try:
key = self.to_field_name or 'pk'
value = self.queryset.get(**{key: value})
except (ValueError, self.queryset.model.DoesNotExist):
raise ValidationError(self.error_messages['invalid_choice'])
return value
def validate(self, value):
return Field.validate(self, value)
class ModelMultipleChoiceField(ModelChoiceField):
"""A MultipleChoiceField whose choices are a model QuerySet."""
widget = SelectMultiple
hidden_widget = MultipleHiddenInput
default_error_messages = {
'list': _(u'Enter a list of values.'),
'invalid_choice': _(u'Select a valid choice. %s is not one of the'
u' available choices.'),
'invalid_pk_value': _(u'"%s" is not a valid value for a primary key.')
}
def __init__(self, queryset, cache_choices=False, required=True,
widget=None, label=None, initial=None,
help_text=None, *args, **kwargs):
super(ModelMultipleChoiceField, self).__init__(queryset, None,
cache_choices, required, widget, label, initial, help_text,
*args, **kwargs)
def clean(self, value):
if self.required and not value:
raise ValidationError(self.error_messages['required'])
elif not self.required and not value:
return []
if not isinstance(value, (list, tuple)):
raise ValidationError(self.error_messages['list'])
for pk in value:
try:
self.queryset.filter(pk=pk)
except ValueError:
raise ValidationError(self.error_messages['invalid_pk_value'] % pk)
qs = self.queryset.filter(pk__in=value)
pks = set([force_unicode(o.pk) for o in qs])
for val in value:
if force_unicode(val) not in pks:
raise ValidationError(self.error_messages['invalid_choice'] % val)
# Since this overrides the inherited ModelChoiceField.clean
# we run custom validators here
self.run_validators(value)
return qs
def prepare_value(self, value):
if hasattr(value, '__iter__'):
return [super(ModelMultipleChoiceField, self).prepare_value(v) for v in value]
return super(ModelMultipleChoiceField, self).prepare_value(value)
|
crichardson17/starburst_atlas | refs/heads/master | SFH_comparison/SFH_plotter_IR.py | 1 | ############################################################
########## Plotting File for SFH comparison Plots ##########
################## Data read from Cloudy ###################
################ Helen Meskhidze, Fall 2015 ################
#################### Elon University #######################
#------------------------------------------------------------------------------------------------------
'''
The inputs this code takes are the peaks files exported by my peaksreader.py
This code outputs IR SFH comparison plots, saved to the working directory
'''
#------------------------------------------------------------------------------------------------------
#Packages importing
import csv
import matplotlib.pyplot as plt
from numpy import *
import scipy.interpolate
import math
from pylab import *
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
import matplotlib.patches as patches
from matplotlib.path import Path
import os
# ---------------------------------------------------
#red for Padova; black for Geneva
color1 = "#e50000" #red
color2 = "#000000" #black
plt.subplots_adjust(wspace=0, hspace=1) #remove space between plots
#subplot routine
def add_sub_plot(sub_num, desiredline):
plt.subplot(4,4,sub_num)
plt.scatter(xvals, peakspadcont[desiredline], c =color1, s = 8)
plt.plot(xvals, peakspadcont[desiredline], c =color1, label="Padova Continuous")
plt.scatter(xvals, peakspadinst[desiredline], c =color1, s = 8)
plt.plot(xvals, peakspadinst[desiredline], c =color1, linestyle='dotted', label="Padova Instantaneous")
plt.scatter(xvals, peaksgencont[desiredline], c =color2, s = 8)
plt.plot(xvals, peaksgencont[desiredline], c =color2, label="Geneva Continuous")
plt.scatter(xvals, peaksgeninst[desiredline], c =color2, s = 8)
plt.plot(xvals, peaksgeninst[desiredline], c =color2, linestyle='dotted', label = "Geneva Instantaneous")
#plt.legend(prop={'size':4}, loc=3)
plt.xlim(min(xvals),max(xvals))
plt.ylim(0,3.5)
plt.xticks(arange(0,8,1),fontsize=6)
plt.yticks(arange(0,3.5,.5),fontsize=6)
if sub_num in [1, 2,3,4]:
plt.tick_params(labelleft = 'off')
plt.tick_params(labelbottom = 'on')
plt.xlabel('Age (Myr)', fontsize=6)
plt.annotate(headers[desiredline], xy=(0.1,0.05), xytext=(0.1,0.05), fontsize = 6)
if sub_num in [5,6,7,8]:
plt.tick_params(labelleft = 'off')
plt.xlabel('Age (Myr)', fontsize=6)
plt.annotate(headers[desiredline], xy=(0.1,0.05), xytext=(0.1,0.05), fontsize = 6)
if sub_num in [9,10,11,12]:
plt.tick_params(labelleft = 'off')
plt.xlabel('Age (Myr)', fontsize=6)
plt.annotate(headers[desiredline], xy=(0.1,0.05), xytext=(0.1,0.05), fontsize = 6)
if sub_num == 1:
plt.ylabel('log($W _{\lambda}$)', fontsize=6)
plt.tick_params(labelleft = 'on')
if sub_num == 5:
plt.xlabel('Age (Myr)', fontsize=6)
plt.ylabel('log($W _{\lambda}$)', fontsize=6)
plt.tick_params(labelleft = 'on')
if sub_num == 9:
plt.xlabel('Age (Myr)', fontsize=6)
plt.ylabel('log($W _{\lambda}$)', fontsize=6)
plt.tick_params(labelleft = 'on')
if sub_num in [4,8,12]:
plt.xticks(arange(0,9,1),fontsize=6)
if sub_num == 6:
figtext(.5,.95,'IR Emission Lines', fontsize=8, ha='center')
if sub_num == 1:
plt.legend(bbox_to_anchor=(0., 1.2, 4., 0), loc=1, ncol=4, mode="expand", prop={'size':6}, borderaxespad=0.)
if sub_num == 5:
plt.legend(bbox_to_anchor=(0., 1.2, 4., 0), loc=1, ncol=4, mode="expand", prop={'size':6}, borderaxespad=0.)
if sub_num == 9:
plt.legend(bbox_to_anchor=(0., 1.2, 4., 0), loc=1, ncol=4, mode="expand", prop={'size':6}, borderaxespad=0.)
# ---------------------------------------------------
numFiles = 5
gridFiles = [None]*numFiles
emissionFiles = [None]*numFiles
os.chdir("./data")
#input files
for file in os.listdir('./'):
if file.endswith("peaks_Geneva_cont_0"):
inputfile0 = file
print file
for file in os.listdir('./'):
if file.endswith("peaks_Geneva_cont_2"):
inputfile1 = file
for file in os.listdir('./'):
if file.endswith("peaks_Geneva_cont_4"):
inputfile2 = file
for file in os.listdir('./'):
if file.endswith("peaks_Geneva_cont_5"):
inputfile3 = file
for file in os.listdir('./'):
if file.endswith("peaks_Geneva_cont_6"):
inputfile4 = file
for file in os.listdir('./'):
if file.endswith("peaks_Geneva_cont_8"):
inputfile20 = file
for file in os.listdir('./'):
if file.endswith("peaks_Geneva_inst_0"):
inputfile5 = file
for file in os.listdir('./'):
if file.endswith("peaks_Geneva_inst_2"):
inputfile6 = file
for file in os.listdir('./'):
if file.endswith("peaks_Geneva_inst_4"):
inputfile7 = file
for file in os.listdir('./'):
if file.endswith("peaks_Geneva_inst_5"):
inputfile8 = file
for file in os.listdir('./'):
if file.endswith("peaks_Geneva_inst_6"):
inputfile9 = file
for file in os.listdir('./'):
if file.endswith("peaks_Geneva_inst_8"):
inputfile21 = file
for file in os.listdir('./'):
if file.endswith("peaks_Padova_inst_0"):
inputfile10 = file
for file in os.listdir('./'):
if file.endswith("peaks_Padova_inst_2"):
inputfile11 = file
for file in os.listdir('./'):
if file.endswith("peaks_Padova_inst_4"):
inputfile12 = file
for file in os.listdir('./'):
if file.endswith("peaks_Padova_inst_5"):
inputfile13 = file
for file in os.listdir('./'):
if file.endswith("peaks_Padova_inst_6"):
inputfile14 = file
for file in os.listdir('./'):
if file.endswith("peaks_Padova_inst_8"):
inputfile22 = file
for file in os.listdir('./'):
if file.endswith("peaks_Padova_cont_0"):
inputfile15 = file
for file in os.listdir('./'):
if file.endswith("peaks_Padova_cont_2"):
inputfile16 = file
for file in os.listdir('./'):
if file.endswith("peaks_Padova_cont_4"):
inputfile17 = file
for file in os.listdir('./'):
if file.endswith("peaks_Padova_cont_5"):
inputfile18 = file
for file in os.listdir('./'):
if file.endswith("peaks_Padova_cont_6"):
inputfile19 = file
for file in os.listdir('./'):
if file.endswith("peaks_Padova_cont_8"):
inputfile23 = file
# importing headers file
for file in os.listdir('../'):
if file.endswith(".txt"):
headers = file
# ---------------------------------------------------
lines0 = [];
with open(inputfile0, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines0.append(row);
lines0 = asarray(lines0)
lines1 = [];
with open(inputfile1, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines1.append(row);
lines1 = asarray(lines1)
lines2 = [];
with open(inputfile2, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines2.append(row);
lines2 = asarray(lines2)
lines3 = [];
with open(inputfile3, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines3.append(row);
lines3 = asarray(lines3)
lines4 = [];
with open(inputfile4, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines4.append(row);
lines4 = asarray(lines4)
lines5 = [];
with open(inputfile5, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines5.append(row);
lines5 = asarray(lines5)
lines6 = [];
with open(inputfile6, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines6.append(row);
lines6 = asarray(lines6)
lines7 = [];
with open(inputfile7, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines7.append(row);
lines7 = asarray(lines7)
lines8 = [];
with open(inputfile8, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines8.append(row);
lines8 = asarray(lines8)
lines9 = [];
with open(inputfile9, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines9.append(row);
lines9 = asarray(lines9)
lines10 = [];
with open(inputfile10, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines10.append(row);
lines10 = asarray(lines10)
lines11 = [];
with open(inputfile11, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines11.append(row);
lines11 = asarray(lines11)
lines12 = [];
with open(inputfile12, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines12.append(row);
lines12 = asarray(lines12)
lines13 = [];
with open(inputfile13, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines13.append(row);
lines13 = asarray(lines13)
lines14 = [];
with open(inputfile14, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines14.append(row);
lines14 = asarray(lines14)
lines15 = [];
with open(inputfile15, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines15.append(row);
lines15 = asarray(lines15)
lines16 = [];
with open(inputfile16, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines16.append(row);
lines16 = asarray(lines16)
lines17 = [];
with open(inputfile17, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines17.append(row);
lines17 = asarray(lines17)
lines18 = [];
with open(inputfile18, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines18.append(row);
lines18 = asarray(lines18)
lines19 = [];
with open(inputfile19, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines19.append(row);
lines19 = asarray(lines19)
lines20 = [];
with open(inputfile20, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines20.append(row);
lines20 = asarray(lines20)
lines21 = [];
with open(inputfile21, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines21.append(row);
lines21 = asarray(lines21)
lines22 = [];
with open(inputfile22, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines22.append(row);
lines22 = asarray(lines22)
lines23 = [];
with open(inputfile23, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
lines23.append(row);
lines23 = asarray(lines23)
dataEmissionlines = [];
os.chdir("../")
with open(headers, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
headers = csvReader.next()
for row in csvReader:
dataEmissionlines.append(row);
dataEmissionlines = asarray(dataEmissionlines)
print "import files complete"
# ---------------------------------------------------
#create an array full of the peak values. the columns represent the times (0,2,4,5,6)
peakspadcont = zeros((len(lines6),6))
peakspadinst = zeros((len(lines6),6))
peaksgencont = zeros((len(lines6),6))
peaksgeninst = zeros((len(lines6),6))
peaksgencont[:,0] = lines0[:,0]
peaksgencont[:,1] = lines1[:,0]
peaksgencont[:,2] = lines2[:,0]
peaksgencont[:,3] = lines3[:,0]
peaksgencont[:,4] = lines4[:,0]
peaksgencont[:,5] = lines20[:,0]
peaksgeninst[:,0] = lines5[:,0]
peaksgeninst[:,1] = lines6[:,0]
peaksgeninst[:,2] = lines7[:,0]
peaksgeninst[:,3] = lines8[:,0]
peaksgeninst[:,4] = lines9[:,0]
peaksgeninst[:,5] = lines21[:,0]
peakspadinst[:,0] = lines10[:,0]
peakspadinst[:,1] = lines11[:,0]
peakspadinst[:,2] = lines12[:,0]
peakspadinst[:,3] = lines13[:,0]
peakspadinst[:,4] = lines14[:,0]
peakspadinst[:,5] = lines22[:,0]
peakspadcont[:,0] = lines15[:,0]
peakspadcont[:,1] = lines16[:,0]
peakspadcont[:,2] = lines17[:,0]
peakspadcont[:,3] = lines18[:,0]
peakspadcont[:,4] = lines19[:,0]
peakspadcont[:,5] = lines23[:,0]
headers = headers[1:] #the first is #linelist so let's make sure this will work
xvals = [0,2,4,5,6,8]
print "data arraged"
# ---------------------------------------------------
#below is where you should specify which lines you'd like to plot
desired = [73,75,76,78,
84,86,87,88]
plt.clf()
for i in range(8):
add_sub_plot(i+1,desired[i])
plt.savefig('SFH_Comp_IR.pdf')
print "plot saved and complete"
|
Limezero/libreoffice | refs/heads/master | helpcontent2/wiki-to-help/hhc.py | 2 | import platform, os
from executor import Executor
class HHC(object):
""" Class for execution of Html Help Compiler """
hhcexe="c:\\htmlhelp\\hhc.exe"
def __init__(self,**args):
"""
@args Arguments for Executor.__init__()
"""
self.args=args
def exWindows(self,source):
""" Private. Compile @source calling HHC natively under Windows """
cmd=[self.hhcexe,os.path.join(source,"htmlhelp.hhp")]
return Executor(**self.args).executor(*tuple(cmd))
def exWine(self,source):
""" Private. Compile @source calling HHC via Wine """
#dirname = os.path.dirname(source)
wine = Wine(source,"j:",self.args)
r = wine(self.hhcexe,"j:\\htmlhelp.hhp")
del wine
return r
def __call__(self,source):
"""
Converts @source with HHC
@source path to input directory that contains htmlhelp.hhp
"""
windows=(platform.system()=="Windows")
if windows is False:
self.exWine(source)
else:
self.exWindows(source)
class Wine(object):
# TODO: this should be a singleton
def __init__(self,workingDir,driveletter,args={}):
"""
Setup the wine environment. Granting access so that wine is able
@workingDir will be accessable via @driveletter
@args Arguments for Executor as dict (**args)
E.g. Wine("/tmp/dir","j:")
"""
homedir = os.path.expanduser('~')
wineprefix=os.path.join(homedir,".wine")
drive=os.path.join(wineprefix,"dosdevices",driveletter)
if os.path.lexists(drive):
self.driveBak = drive+".bak"
shutil.move(drive,self.driveBak)
os.symlink(workingDir,drive)
self.drive = drive
#self.driveBak = driveBak
self.executor = Executor(**args)
def ex(self,*cmd):
""" execute something with wine """
cmd = [elem for elem in cmd]
cmd = ["/usr/bin/wine"]+cmd
return self.executor(*tuple(cmd))
def __call__(self,*cmd):
return self.ex(*cmd)
def __del__(self):
os.remove(self.drive)
if hasattr(self,'driveBak'):
shutil.move(self.driveBak,self.drive)
|
yghannam/teuthology | refs/heads/master | teuthology/task/swift.py | 10 | """
Test Swfit api.
"""
from cStringIO import StringIO
from configobj import ConfigObj
import base64
import contextlib
import logging
import os
from teuthology import misc as teuthology
from teuthology import contextutil
from ..config import config as teuth_config
from ..orchestra import run
from ..orchestra.connection import split_user
log = logging.getLogger(__name__)
@contextlib.contextmanager
def download(ctx, config):
"""
Download the Swift API.
"""
testdir = teuthology.get_testdir(ctx)
assert isinstance(config, list)
log.info('Downloading swift...')
for client in config:
ctx.cluster.only(client).run(
args=[
'git', 'clone',
teuth_config.ceph_git_base_url + 'swift.git',
'{tdir}/swift'.format(tdir=testdir),
],
)
try:
yield
finally:
log.info('Removing swift...')
testdir = teuthology.get_testdir(ctx)
for client in config:
ctx.cluster.only(client).run(
args=[
'rm',
'-rf',
'{tdir}/swift'.format(tdir=testdir),
],
)
def _config_user(testswift_conf, account, user, suffix):
"""
Configure a swift user
:param account: Swift account
:param user: User name
:param suffix: user name and email suffixes.
"""
testswift_conf['func_test'].setdefault('account{s}'.format(s=suffix), account)
testswift_conf['func_test'].setdefault('username{s}'.format(s=suffix), user)
testswift_conf['func_test'].setdefault('email{s}'.format(s=suffix), '{account}+test@test.test'.format(account=account))
testswift_conf['func_test'].setdefault('display_name{s}'.format(s=suffix), 'Mr. {account} {user}'.format(account=account, user=user))
testswift_conf['func_test'].setdefault('password{s}'.format(s=suffix), base64.b64encode(os.urandom(40)))
@contextlib.contextmanager
def create_users(ctx, config):
"""
Create rgw users to interact with the swift interface.
"""
assert isinstance(config, dict)
log.info('Creating rgw users...')
testdir = teuthology.get_testdir(ctx)
users = {'': 'foo', '2': 'bar'}
for client in config['clients']:
testswift_conf = config['testswift_conf'][client]
for suffix, user in users.iteritems():
_config_user(testswift_conf, '{user}.{client}'.format(user=user, client=client), user, suffix)
ctx.cluster.only(client).run(
args=[
'adjust-ulimits',
'ceph-coverage',
'{tdir}/archive/coverage'.format(tdir=testdir),
'radosgw-admin',
'-n', client,
'user', 'create',
'--subuser', '{account}:{user}'.format(account=testswift_conf['func_test']['account{s}'.format(s=suffix)],user=user),
'--display-name', testswift_conf['func_test']['display_name{s}'.format(s=suffix)],
'--secret', testswift_conf['func_test']['password{s}'.format(s=suffix)],
'--email', testswift_conf['func_test']['email{s}'.format(s=suffix)],
'--key-type', 'swift',
'--access', 'full',
],
)
try:
yield
finally:
for client in config['clients']:
for user in users.itervalues():
uid = '{user}.{client}'.format(user=user, client=client)
ctx.cluster.only(client).run(
args=[
'adjust-ulimits',
'ceph-coverage',
'{tdir}/archive/coverage'.format(tdir=testdir),
'radosgw-admin',
'-n', client,
'user', 'rm',
'--uid', uid,
'--purge-data',
],
)
@contextlib.contextmanager
def configure(ctx, config):
"""
Configure rgw and Swift
"""
assert isinstance(config, dict)
log.info('Configuring testswift...')
testdir = teuthology.get_testdir(ctx)
for client, properties in config['clients'].iteritems():
log.info('client={c}'.format(c=client))
log.info('config={c}'.format(c=config))
testswift_conf = config['testswift_conf'][client]
if properties is not None and 'rgw_server' in properties:
host = None
for target, roles in zip(ctx.config['targets'].iterkeys(), ctx.config['roles']):
log.info('roles: ' + str(roles))
log.info('target: ' + str(target))
if properties['rgw_server'] in roles:
_, host = split_user(target)
assert host is not None, "Invalid client specified as the rgw_server"
testswift_conf['func_test']['auth_host'] = host
else:
testswift_conf['func_test']['auth_host'] = 'localhost'
log.info(client)
(remote,) = ctx.cluster.only(client).remotes.keys()
remote.run(
args=[
'cd',
'{tdir}/swift'.format(tdir=testdir),
run.Raw('&&'),
'./bootstrap',
],
)
conf_fp = StringIO()
testswift_conf.write(conf_fp)
teuthology.write_file(
remote=remote,
path='{tdir}/archive/testswift.{client}.conf'.format(tdir=testdir, client=client),
data=conf_fp.getvalue(),
)
yield
@contextlib.contextmanager
def run_tests(ctx, config):
"""
Run an individual Swift test.
"""
assert isinstance(config, dict)
testdir = teuthology.get_testdir(ctx)
for client, client_config in config.iteritems():
args = [
'SWIFT_TEST_CONFIG_FILE={tdir}/archive/testswift.{client}.conf'.format(tdir=testdir, client=client),
'{tdir}/swift/virtualenv/bin/nosetests'.format(tdir=testdir),
'-w',
'{tdir}/swift/test/functional'.format(tdir=testdir),
'-v',
'-a', '!fails_on_rgw',
]
if client_config is not None and 'extra_args' in client_config:
args.extend(client_config['extra_args'])
ctx.cluster.only(client).run(
args=args,
)
yield
@contextlib.contextmanager
def task(ctx, config):
"""
Run the testswift suite against rgw.
To run all tests on all clients::
tasks:
- ceph:
- rgw:
- testswift:
To restrict testing to particular clients::
tasks:
- ceph:
- rgw: [client.0]
- testswift: [client.0]
To run against a server on client.1::
tasks:
- ceph:
- rgw: [client.1]
- testswift:
client.0:
rgw_server: client.1
To pass extra arguments to nose (e.g. to run a certain test)::
tasks:
- ceph:
- rgw: [client.0]
- testswift:
client.0:
extra_args: ['test.functional.tests:TestFileUTF8', '-m', 'testCopy']
client.1:
extra_args: ['--exclude', 'TestFile']
"""
assert config is None or isinstance(config, list) \
or isinstance(config, dict), \
"task testswift only supports a list or dictionary for configuration"
all_clients = ['client.{id}'.format(id=id_)
for id_ in teuthology.all_roles_of_type(ctx.cluster, 'client')]
if config is None:
config = all_clients
if isinstance(config, list):
config = dict.fromkeys(config)
clients = config.keys()
log.info('clients={c}'.format(c=clients))
testswift_conf = {}
for client in clients:
testswift_conf[client] = ConfigObj(
indent_type='',
infile={
'func_test':
{
'auth_port' : 7280,
'auth_ssl' : 'no',
'auth_prefix' : '/auth/',
},
}
)
with contextutil.nested(
lambda: download(ctx=ctx, config=clients),
lambda: create_users(ctx=ctx, config=dict(
clients=clients,
testswift_conf=testswift_conf,
)),
lambda: configure(ctx=ctx, config=dict(
clients=config,
testswift_conf=testswift_conf,
)),
lambda: run_tests(ctx=ctx, config=config),
):
pass
yield
|
zhumingliang1209/Ardupilot | refs/heads/master | ardupilot/Tools/autotest/apm_unit_tests/dev/arducopter_climb_descend.py | 250 | import arducopter
def unit_test(mavproxy, mav):
'''A scripted flight plan'''
if (
arducopter.calibrate_level(mavproxy, mav) and
arducopter.arm_motors(mavproxy, mav) and
arducopter.takeoff(mavproxy,mav, alt_min=30, takeoff_throttle=1510) and
arducopter.change_alt(mavproxy, mav, alt_min=60) and
arducopter.change_alt(mavproxy, mav, alt_min=20)
):
return True
return False
|
pwong-mapr/private-hue | refs/heads/HUE-1096-abe | desktop/core/ext-py/Django-1.4.5/tests/regressiontests/context_processors/views.py | 200 | from django.core import context_processors
from django.shortcuts import render_to_response
from django.template.context import RequestContext
def request_processor(request):
return render_to_response('context_processors/request_attrs.html',
RequestContext(request, {}, processors=[context_processors.request]))
|
bristy/login_app_flask | refs/heads/master | env/lib/python2.7/site-packages/werkzeug/debug/__init__.py | 280 | # -*- coding: utf-8 -*-
"""
werkzeug.debug
~~~~~~~~~~~~~~
WSGI application traceback debugger.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import json
import mimetypes
from os.path import join, dirname, basename, isfile
from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response
from werkzeug.debug.tbtools import get_current_traceback, render_console_html
from werkzeug.debug.console import Console
from werkzeug.security import gen_salt
#: import this here because it once was documented as being available
#: from this module. In case there are users left ...
from werkzeug.debug.repr import debug_repr
class _ConsoleFrame(object):
"""Helper class so that we can reuse the frame console code for the
standalone console.
"""
def __init__(self, namespace):
self.console = Console(namespace)
self.id = 0
class DebuggedApplication(object):
"""Enables debugging support for a given application::
from werkzeug.debug import DebuggedApplication
from myapp import app
app = DebuggedApplication(app, evalex=True)
The `evalex` keyword argument allows evaluating expressions in a
traceback's frame context.
.. versionadded:: 0.9
The `lodgeit_url` parameter was deprecated.
:param app: the WSGI application to run debugged.
:param evalex: enable exception evaluation feature (interactive
debugging). This requires a non-forking server.
:param request_key: The key that points to the request object in ths
environment. This parameter is ignored in current
versions.
:param console_path: the URL for a general purpose console.
:param console_init_func: the function that is executed before starting
the general purpose console. The return value
is used as initial namespace.
:param show_hidden_frames: by default hidden traceback frames are skipped.
You can show them by setting this parameter
to `True`.
"""
# this class is public
__module__ = 'werkzeug'
def __init__(self, app, evalex=False, request_key='werkzeug.request',
console_path='/console', console_init_func=None,
show_hidden_frames=False, lodgeit_url=None):
if lodgeit_url is not None:
from warnings import warn
warn(DeprecationWarning('Werkzeug now pastes into gists.'))
if not console_init_func:
console_init_func = dict
self.app = app
self.evalex = evalex
self.frames = {}
self.tracebacks = {}
self.request_key = request_key
self.console_path = console_path
self.console_init_func = console_init_func
self.show_hidden_frames = show_hidden_frames
self.secret = gen_salt(20)
def debug_application(self, environ, start_response):
"""Run the application and conserve the traceback frames."""
app_iter = None
try:
app_iter = self.app(environ, start_response)
for item in app_iter:
yield item
if hasattr(app_iter, 'close'):
app_iter.close()
except Exception:
if hasattr(app_iter, 'close'):
app_iter.close()
traceback = get_current_traceback(skip=1, show_hidden_frames=
self.show_hidden_frames,
ignore_system_exceptions=True)
for frame in traceback.frames:
self.frames[frame.id] = frame
self.tracebacks[traceback.id] = traceback
try:
start_response('500 INTERNAL SERVER ERROR', [
('Content-Type', 'text/html; charset=utf-8'),
# Disable Chrome's XSS protection, the debug
# output can cause false-positives.
('X-XSS-Protection', '0'),
])
except Exception:
# if we end up here there has been output but an error
# occurred. in that situation we can do nothing fancy any
# more, better log something into the error log and fall
# back gracefully.
environ['wsgi.errors'].write(
'Debugging middleware caught exception in streamed '
'response at a point where response headers were already '
'sent.\n')
else:
yield traceback.render_full(evalex=self.evalex,
secret=self.secret) \
.encode('utf-8', 'replace')
traceback.log(environ['wsgi.errors'])
def execute_command(self, request, command, frame):
"""Execute a command in a console."""
return Response(frame.console.eval(command), mimetype='text/html')
def display_console(self, request):
"""Display a standalone shell."""
if 0 not in self.frames:
self.frames[0] = _ConsoleFrame(self.console_init_func())
return Response(render_console_html(secret=self.secret),
mimetype='text/html')
def paste_traceback(self, request, traceback):
"""Paste the traceback and return a JSON response."""
rv = traceback.paste()
return Response(json.dumps(rv), mimetype='application/json')
def get_source(self, request, frame):
"""Render the source viewer."""
return Response(frame.render_source(), mimetype='text/html')
def get_resource(self, request, filename):
"""Return a static resource from the shared folder."""
filename = join(dirname(__file__), 'shared', basename(filename))
if isfile(filename):
mimetype = mimetypes.guess_type(filename)[0] \
or 'application/octet-stream'
f = open(filename, 'rb')
try:
return Response(f.read(), mimetype=mimetype)
finally:
f.close()
return Response('Not Found', status=404)
def __call__(self, environ, start_response):
"""Dispatch the requests."""
# important: don't ever access a function here that reads the incoming
# form data! Otherwise the application won't have access to that data
# any more!
request = Request(environ)
response = self.debug_application
if request.args.get('__debugger__') == 'yes':
cmd = request.args.get('cmd')
arg = request.args.get('f')
secret = request.args.get('s')
traceback = self.tracebacks.get(request.args.get('tb', type=int))
frame = self.frames.get(request.args.get('frm', type=int))
if cmd == 'resource' and arg:
response = self.get_resource(request, arg)
elif cmd == 'paste' and traceback is not None and \
secret == self.secret:
response = self.paste_traceback(request, traceback)
elif cmd == 'source' and frame and self.secret == secret:
response = self.get_source(request, frame)
elif self.evalex and cmd is not None and frame is not None and \
self.secret == secret:
response = self.execute_command(request, cmd, frame)
elif self.evalex and self.console_path is not None and \
request.path == self.console_path:
response = self.display_console(request)
return response(environ, start_response)
|
iamutkarshtiwari/sympy | refs/heads/master | sympy/plotting/pygletplot/plot_rotation.py | 94 | from __future__ import print_function, division
try:
from pyglet.gl.gl import c_float
except ImportError:
pass
from pyglet.gl import *
from math import sqrt as _sqrt, acos as _acos
def cross(a, b):
return (a[1] * b[2] - a[2] * b[1],
a[2] * b[0] - a[0] * b[2],
a[0] * b[1] - a[1] * b[0])
def dot(a, b):
return a[0] * b[0] + a[1] * b[1] + a[2] * b[2]
def mag(a):
return _sqrt(a[0]**2 + a[1]**2 + a[2]**2)
def norm(a):
m = mag(a)
return (a[0] / m, a[1] / m, a[2] / m)
def get_sphere_mapping(x, y, width, height):
x = min([max([x, 0]), width])
y = min([max([y, 0]), height])
sr = _sqrt((width/2)**2 + (height/2)**2)
sx = ((x - width / 2) / sr)
sy = ((y - height / 2) / sr)
sz = 1.0 - sx**2 - sy**2
if sz > 0.0:
sz = _sqrt(sz)
return (sx, sy, sz)
else:
sz = 0
return norm((sx, sy, sz))
rad2deg = 180.0 / 3.141592
def get_spherical_rotatation(p1, p2, width, height, theta_multiplier):
v1 = get_sphere_mapping(p1[0], p1[1], width, height)
v2 = get_sphere_mapping(p2[0], p2[1], width, height)
d = min(max([dot(v1, v2), -1]), 1)
if abs(d - 1.0) < 0.000001:
return None
raxis = norm( cross(v1, v2) )
rtheta = theta_multiplier * rad2deg * _acos(d)
glPushMatrix()
glLoadIdentity()
glRotatef(rtheta, *raxis)
mat = (c_float*16)()
glGetFloatv(GL_MODELVIEW_MATRIX, mat)
glPopMatrix()
return mat
|
weese/seqan | refs/heads/master | misc/seqan_instrumentation/bin/classes/sync.py | 6 | import os
import subprocess
class Sync(object):
MAX_REVISION_LENGTH = 16
def __init__(self, bin_dir):
self.bin_dir = bin_dir
def make_comparison_copy(self, from_dir, to_dir, excluded_resources):
if(os.name == "nt"):
p = subprocess.Popen([self.bin_dir + "/robocopy.exe", from_dir, to_dir, "/w:1", "/r:1", "/MIR"] + excluded_resources.get_exclude_list_win32_robocopy(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else:
p = subprocess.Popen(["rsync", "-auv", "--delete", "--delete-excluded"] + excluded_resources.get_exclude_list_other_rsync() + [from_dir + "/", to_dir], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
|
ianmiell/shutit-test | refs/heads/master | test/bash_tests/2/test2.py | 1 | from shutit_module import ShutItModule
class test2(ShutItModule):
def build(self, shutit):
shutit.send('rm -rf git-101-tutorial')
shutit.send('git clone https://github.com/ianmiell/git-101-tutorial')
shutit.send('pushd git-101-tutorial')
orig_expect = shutit.get_default_shutit_pexpect_session_expect()
new_expect = ':.*:.*# '
shutit.set_default_shutit_pexpect_session_expect(new_expect)
shutit.send('./run.sh -l DEBUG',check_exit=False)
shutit.send('\x1D',nonewline=True,check_exit=False)
shutit.send('\x1D',nonewline=True,check_exit=False)
shutit.send('\x1D',nonewline=True,check_exit=False)
shutit.send('\x1D',nonewline=True,check_exit=False)
shutit.send('\x1D',nonewline=True,check_exit=False)
shutit.send('\x1D',nonewline=True,check_exit=False)
shutit.send('\x1D',nonewline=True,check_exit=False)
shutit.send('\x1D',nonewline=True,check_exit=False,expect='test2.test2')
return True
def module():
return test2(
'bash_tests.test2.test2', 2010087968.0125012500112535246,
description='',
maintainer='',
delivery_methods=['bash'],
depends=['shutit.tk.setup']
)
|
AnderEnder/ansible-modules-extras | refs/heads/devel | notification/mail.py | 44 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2012 Dag Wieers <dag@wieers.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
---
author: "Dag Wieers (@dagwieers)"
module: mail
short_description: Send an email
description:
- This module is useful for sending emails from playbooks.
- One may wonder why automate sending emails? In complex environments
there are from time to time processes that cannot be automated, either
because you lack the authority to make it so, or because not everyone
agrees to a common approach.
- If you cannot automate a specific step, but the step is non-blocking,
sending out an email to the responsible party to make him perform his
part of the bargain is an elegant way to put the responsibility in
someone else's lap.
- Of course sending out a mail can be equally useful as a way to notify
one or more people in a team that a specific action has been
(successfully) taken.
version_added: "0.8"
options:
from:
description:
- The email-address the mail is sent from. May contain address and phrase.
default: root
required: false
to:
description:
- The email-address(es) the mail is being sent to. This is
a comma-separated list, which may contain address and phrase portions.
default: root
required: false
cc:
description:
- The email-address(es) the mail is being copied to. This is
a comma-separated list, which may contain address and phrase portions.
required: false
bcc:
description:
- The email-address(es) the mail is being 'blind' copied to. This is
a comma-separated list, which may contain address and phrase portions.
required: false
subject:
description:
- The subject of the email being sent.
required: true
body:
description:
- The body of the email being sent.
default: $subject
required: false
username:
description:
- If SMTP requires username
default: null
required: false
version_added: "1.9"
password:
description:
- If SMTP requires password
default: null
required: false
version_added: "1.9"
host:
description:
- The mail server
default: 'localhost'
required: false
port:
description:
- The mail server port
default: '25'
required: false
version_added: "1.0"
attach:
description:
- A space-separated list of pathnames of files to attach to the message.
Attached files will have their content-type set to C(application/octet-stream).
default: null
required: false
version_added: "1.0"
headers:
description:
- A vertical-bar-separated list of headers which should be added to the message.
Each individual header is specified as C(header=value) (see example below).
default: null
required: false
version_added: "1.0"
charset:
description:
- The character set of email being sent
default: 'us-ascii'
required: false
subtype:
description:
- The minor mime type, can be either text or html. The major type is always text.
default: 'plain'
required: false
version_added: "2.0"
"""
EXAMPLES = '''
# Example playbook sending mail to root
- local_action: mail subject='System {{ ansible_hostname }} has been successfully provisioned.'
# Sending an e-mail using Gmail SMTP servers
- local_action: mail
host='smtp.gmail.com'
port=587
username=username@gmail.com
password='mysecret'
to="John Smith <john.smith@example.com>"
subject='Ansible-report'
body='System {{ ansible_hostname }} has been successfully provisioned.'
# Send e-mail to a bunch of users, attaching files
- local_action: mail
host='127.0.0.1'
port=2025
subject="Ansible-report"
body="Hello, this is an e-mail. I hope you like it ;-)"
from="jane@example.net (Jane Jolie)"
to="John Doe <j.d@example.org>, Suzie Something <sue@example.com>"
cc="Charlie Root <root@localhost>"
attach="/etc/group /tmp/pavatar2.png"
headers=Reply-To=john@example.com|X-Special="Something or other"
charset=utf8
# Sending an e-mail using the remote machine, not the Ansible controller node
- mail:
host='localhost'
port=25
to="John Smith <john.smith@example.com>"
subject='Ansible-report'
body='System {{ ansible_hostname }} has been successfully provisioned.'
'''
import os
import sys
import smtplib
import ssl
try:
from email import encoders
import email.utils
from email.utils import parseaddr, formataddr
from email.mime.base import MIMEBase
from mail.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
except ImportError:
from email import Encoders as encoders
import email.Utils
from email.Utils import parseaddr, formataddr
from email.MIMEBase import MIMEBase
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
def main():
module = AnsibleModule(
argument_spec = dict(
username = dict(default=None),
password = dict(default=None, no_log=True),
host = dict(default='localhost'),
port = dict(default='25'),
sender = dict(default='root', aliases=['from']),
to = dict(default='root', aliases=['recipients']),
cc = dict(default=None),
bcc = dict(default=None),
subject = dict(required=True, aliases=['msg']),
body = dict(default=None),
attach = dict(default=None),
headers = dict(default=None),
charset = dict(default='us-ascii'),
subtype = dict(default='plain')
)
)
username = module.params.get('username')
password = module.params.get('password')
host = module.params.get('host')
port = module.params.get('port')
sender = module.params.get('sender')
recipients = module.params.get('to')
copies = module.params.get('cc')
blindcopies = module.params.get('bcc')
subject = module.params.get('subject')
body = module.params.get('body')
attach_files = module.params.get('attach')
headers = module.params.get('headers')
charset = module.params.get('charset')
subtype = module.params.get('subtype')
sender_phrase, sender_addr = parseaddr(sender)
if not body:
body = subject
try:
try:
smtp = smtplib.SMTP_SSL(host, port=int(port))
except (smtplib.SMTPException, ssl.SSLError):
smtp = smtplib.SMTP(host, port=int(port))
except Exception, e:
module.fail_json(rc=1, msg='Failed to send mail to server %s on port %s: %s' % (host, port, e))
smtp.ehlo()
if username and password:
if smtp.has_extn('STARTTLS'):
smtp.starttls()
try:
smtp.login(username, password)
except smtplib.SMTPAuthenticationError:
module.fail_json(msg="Authentication to %s:%s failed, please check your username and/or password" % (host, port))
msg = MIMEMultipart()
msg['Subject'] = subject
msg['From'] = formataddr((sender_phrase, sender_addr))
msg.preamble = "Multipart message"
if headers is not None:
for hdr in [x.strip() for x in headers.split('|')]:
try:
h_key, h_val = hdr.split('=')
msg.add_header(h_key, h_val)
except:
pass
if 'X-Mailer' not in msg:
msg.add_header('X-Mailer', "Ansible")
to_list = []
cc_list = []
addr_list = []
if recipients is not None:
for addr in [x.strip() for x in recipients.split(',')]:
to_list.append( formataddr( parseaddr(addr)) )
addr_list.append( parseaddr(addr)[1] ) # address only, w/o phrase
if copies is not None:
for addr in [x.strip() for x in copies.split(',')]:
cc_list.append( formataddr( parseaddr(addr)) )
addr_list.append( parseaddr(addr)[1] ) # address only, w/o phrase
if blindcopies is not None:
for addr in [x.strip() for x in blindcopies.split(',')]:
addr_list.append( parseaddr(addr)[1] )
if len(to_list) > 0:
msg['To'] = ", ".join(to_list)
if len(cc_list) > 0:
msg['Cc'] = ", ".join(cc_list)
part = MIMEText(body + "\n\n", _subtype=subtype, _charset=charset)
msg.attach(part)
if attach_files is not None:
for file in attach_files.split():
try:
fp = open(file, 'rb')
part = MIMEBase('application', 'octet-stream')
part.set_payload(fp.read())
fp.close()
encoders.encode_base64(part)
part.add_header('Content-disposition', 'attachment', filename=os.path.basename(file))
msg.attach(part)
except Exception, e:
module.fail_json(rc=1, msg="Failed to send mail: can't attach file %s: %s" % (file, e))
composed = msg.as_string()
try:
smtp.sendmail(sender_addr, set(addr_list), composed)
except Exception, e:
module.fail_json(rc=1, msg='Failed to send mail to %s: %s' % (", ".join(addr_list), e))
smtp.quit()
module.exit_json(changed=False)
# import module snippets
from ansible.module_utils.basic import *
main()
|
waytai/odoo | refs/heads/8.0 | addons/l10n_de/__init__.py | 693 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
NeostreamTechnology/Microservices | refs/heads/master | venv/lib/python2.7/site-packages/setuptools/command/upload_docs.py | 173 | # -*- coding: utf-8 -*-
"""upload_docs
Implements a Distutils 'upload_docs' subcommand (upload documentation to
PyPI's pythonhosted.org).
"""
from base64 import standard_b64encode
from distutils import log
from distutils.errors import DistutilsOptionError
import os
import socket
import zipfile
import tempfile
import shutil
import itertools
import functools
from setuptools.extern import six
from setuptools.extern.six.moves import http_client, urllib
from pkg_resources import iter_entry_points
from .upload import upload
def _encode(s):
errors = 'surrogateescape' if six.PY3 else 'strict'
return s.encode('utf-8', errors)
class upload_docs(upload):
# override the default repository as upload_docs isn't
# supported by Warehouse (and won't be).
DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/'
description = 'Upload documentation to PyPI'
user_options = [
('repository=', 'r',
"url of repository [default: %s]" % upload.DEFAULT_REPOSITORY),
('show-response', None,
'display full response text from server'),
('upload-dir=', None, 'directory to upload'),
]
boolean_options = upload.boolean_options
def has_sphinx(self):
if self.upload_dir is None:
for ep in iter_entry_points('distutils.commands', 'build_sphinx'):
return True
sub_commands = [('build_sphinx', has_sphinx)]
def initialize_options(self):
upload.initialize_options(self)
self.upload_dir = None
self.target_dir = None
def finalize_options(self):
upload.finalize_options(self)
if self.upload_dir is None:
if self.has_sphinx():
build_sphinx = self.get_finalized_command('build_sphinx')
self.target_dir = build_sphinx.builder_target_dir
else:
build = self.get_finalized_command('build')
self.target_dir = os.path.join(build.build_base, 'docs')
else:
self.ensure_dirname('upload_dir')
self.target_dir = self.upload_dir
if 'pypi.python.org' in self.repository:
log.warn("Upload_docs command is deprecated. Use RTD instead.")
self.announce('Using upload directory %s' % self.target_dir)
def create_zipfile(self, filename):
zip_file = zipfile.ZipFile(filename, "w")
try:
self.mkpath(self.target_dir) # just in case
for root, dirs, files in os.walk(self.target_dir):
if root == self.target_dir and not files:
tmpl = "no files found in upload directory '%s'"
raise DistutilsOptionError(tmpl % self.target_dir)
for name in files:
full = os.path.join(root, name)
relative = root[len(self.target_dir):].lstrip(os.path.sep)
dest = os.path.join(relative, name)
zip_file.write(full, dest)
finally:
zip_file.close()
def run(self):
# Run sub commands
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
tmp_dir = tempfile.mkdtemp()
name = self.distribution.metadata.get_name()
zip_file = os.path.join(tmp_dir, "%s.zip" % name)
try:
self.create_zipfile(zip_file)
self.upload_file(zip_file)
finally:
shutil.rmtree(tmp_dir)
@staticmethod
def _build_part(item, sep_boundary):
key, values = item
title = '\nContent-Disposition: form-data; name="%s"' % key
# handle multiple entries for the same name
if not isinstance(values, list):
values = [values]
for value in values:
if isinstance(value, tuple):
title += '; filename="%s"' % value[0]
value = value[1]
else:
value = _encode(value)
yield sep_boundary
yield _encode(title)
yield b"\n\n"
yield value
if value and value[-1:] == b'\r':
yield b'\n' # write an extra newline (lurve Macs)
@classmethod
def _build_multipart(cls, data):
"""
Build up the MIME payload for the POST data
"""
boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = b'\n--' + boundary
end_boundary = sep_boundary + b'--'
end_items = end_boundary, b"\n",
builder = functools.partial(
cls._build_part,
sep_boundary=sep_boundary,
)
part_groups = map(builder, data.items())
parts = itertools.chain.from_iterable(part_groups)
body_items = itertools.chain(parts, end_items)
content_type = 'multipart/form-data; boundary=%s' % boundary.decode('ascii')
return b''.join(body_items), content_type
def upload_file(self, filename):
with open(filename, 'rb') as f:
content = f.read()
meta = self.distribution.metadata
data = {
':action': 'doc_upload',
'name': meta.get_name(),
'content': (os.path.basename(filename), content),
}
# set up the authentication
credentials = _encode(self.username + ':' + self.password)
credentials = standard_b64encode(credentials)
if six.PY3:
credentials = credentials.decode('ascii')
auth = "Basic " + credentials
body, ct = self._build_multipart(data)
msg = "Submitting documentation to %s" % (self.repository)
self.announce(msg, log.INFO)
# build the Request
# We can't use urllib2 since we need to send the Basic
# auth right with the first request
schema, netloc, url, params, query, fragments = \
urllib.parse.urlparse(self.repository)
assert not params and not query and not fragments
if schema == 'http':
conn = http_client.HTTPConnection(netloc)
elif schema == 'https':
conn = http_client.HTTPSConnection(netloc)
else:
raise AssertionError("unsupported schema " + schema)
data = ''
try:
conn.connect()
conn.putrequest("POST", url)
content_type = ct
conn.putheader('Content-type', content_type)
conn.putheader('Content-length', str(len(body)))
conn.putheader('Authorization', auth)
conn.endheaders()
conn.send(body)
except socket.error as e:
self.announce(str(e), log.ERROR)
return
r = conn.getresponse()
if r.status == 200:
msg = 'Server response (%s): %s' % (r.status, r.reason)
self.announce(msg, log.INFO)
elif r.status == 301:
location = r.getheader('Location')
if location is None:
location = 'https://pythonhosted.org/%s/' % meta.get_name()
msg = 'Upload successful. Visit %s' % location
self.announce(msg, log.INFO)
else:
msg = 'Upload failed (%s): %s' % (r.status, r.reason)
self.announce(msg, log.ERROR)
if self.show_response:
print('-' * 75, r.read(), '-' * 75)
|
cpennington/edx-platform | refs/heads/master | common/lib/xmodule/xmodule/exceptions.py | 22 | class InvalidDefinitionError(Exception):
pass
class NotFoundError(Exception):
pass
class ProcessingError(Exception):
'''
An error occurred while processing a request to the XModule.
For example: if an exception occurs while checking a capa problem.
'''
pass
class InvalidVersionError(Exception):
"""
Tried to save an item with a location that a store cannot support (e.g., draft version
for a non-leaf node)
"""
def __init__(self, location):
super(InvalidVersionError, self).__init__()
self.location = location
class SerializationError(Exception):
"""
Thrown when a module cannot be exported to XML
"""
def __init__(self, location, msg):
super(SerializationError, self).__init__(msg)
self.location = location
class UndefinedContext(Exception):
"""
Tried to access an xmodule field which needs a different context (runtime) to have a value.
"""
pass
class HeartbeatFailure(Exception):
"""
Raised when heartbeat fails.
"""
def __init__(self, msg, service):
"""
In addition to a msg, provide the name of the service.
"""
self.service = service
super(HeartbeatFailure, self).__init__(msg)
|
nfco/netforce | refs/heads/master | netforce_messaging/netforce_messaging/__init__.py | 16 | # Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from . import models
|
TathagataChakraborti/resource-conflicts | refs/heads/master | PLANROB-2015/seq-sat-lama/Python-2.5.2/Lib/test/test_imgfile.py | 43 | #! /usr/bin/env python
"""Simple test script for imgfile.c
Roger E. Masse
"""
from test.test_support import verbose, unlink, findfile
import imgfile, uu, os
def main():
uu.decode(findfile('testrgb.uue'), 'test.rgb')
uu.decode(findfile('greyrgb.uue'), 'greytest.rgb')
# Test a 3 byte color image
testimage('test.rgb')
# Test a 1 byte greyscale image
testimage('greytest.rgb')
unlink('test.rgb')
unlink('greytest.rgb')
def testimage(name):
"""Run through the imgfile's battery of possible methods
on the image passed in name.
"""
import sys
import os
outputfile = '/tmp/deleteme'
# try opening the name directly
try:
# This function returns a tuple (x, y, z) where x and y are the size
# of the image in pixels and z is the number of bytes per pixel. Only
# 3 byte RGB pixels and 1 byte greyscale pixels are supported.
sizes = imgfile.getsizes(name)
except imgfile.error:
# get a more qualified path component of the script...
if __name__ == '__main__':
ourname = sys.argv[0]
else: # ...or the full path of the module
ourname = sys.modules[__name__].__file__
parts = ourname.split(os.sep)
parts[-1] = name
name = os.sep.join(parts)
sizes = imgfile.getsizes(name)
if verbose:
print 'Opening test image: %s, sizes: %s' % (name, str(sizes))
# This function reads and decodes the image on the specified file,
# and returns it as a python string. The string has either 1 byte
# greyscale pixels or 4 byte RGBA pixels. The bottom left pixel
# is the first in the string. This format is suitable to pass
# to gl.lrectwrite, for instance.
image = imgfile.read(name)
# This function writes the RGB or greyscale data in data to
# image file file. x and y give the size of the image, z is
# 1 for 1 byte greyscale images or 3 for RGB images (which
# are stored as 4 byte values of which only the lower three
# bytes are used). These are the formats returned by gl.lrectread.
if verbose:
print 'Writing output file'
imgfile.write (outputfile, image, sizes[0], sizes[1], sizes[2])
if verbose:
print 'Opening scaled test image: %s, sizes: %s' % (name, str(sizes))
# This function is identical to read but it returns an image that
# is scaled to the given x and y sizes. If the filter and blur
# parameters are omitted scaling is done by simply dropping
# or duplicating pixels, so the result will be less than perfect,
# especially for computer-generated images. Alternatively,
# you can specify a filter to use to smoothen the image after
# scaling. The filter forms supported are 'impulse', 'box',
# 'triangle', 'quadratic' and 'gaussian'. If a filter is
# specified blur is an optional parameter specifying the
# blurriness of the filter. It defaults to 1.0. readscaled
# makes no attempt to keep the aspect ratio correct, so that
# is the users' responsibility.
if verbose:
print 'Filtering with "impulse"'
simage = imgfile.readscaled (name, sizes[0]/2, sizes[1]/2, 'impulse', 2.0)
# This function sets a global flag which defines whether the
# scan lines of the image are read or written from bottom to
# top (flag is zero, compatible with SGI GL) or from top to
# bottom(flag is one, compatible with X). The default is zero.
if verbose:
print 'Switching to X compatibility'
imgfile.ttob (1)
if verbose:
print 'Filtering with "triangle"'
simage = imgfile.readscaled (name, sizes[0]/2, sizes[1]/2, 'triangle', 3.0)
if verbose:
print 'Switching back to SGI compatibility'
imgfile.ttob (0)
if verbose: print 'Filtering with "quadratic"'
simage = imgfile.readscaled (name, sizes[0]/2, sizes[1]/2, 'quadratic')
if verbose: print 'Filtering with "gaussian"'
simage = imgfile.readscaled (name, sizes[0]/2, sizes[1]/2, 'gaussian', 1.0)
if verbose:
print 'Writing output file'
imgfile.write (outputfile, simage, sizes[0]/2, sizes[1]/2, sizes[2])
os.unlink(outputfile)
main()
|
bop/foundation | refs/heads/master | lib/python2.7/site-packages/django/contrib/auth/context_processors.py | 57 | # PermWrapper and PermLookupDict proxy the permissions system into objects that
# the template system can understand.
class PermLookupDict(object):
def __init__(self, user, module_name):
self.user, self.module_name = user, module_name
def __repr__(self):
return str(self.user.get_all_permissions())
def __getitem__(self, perm_name):
return self.user.has_perm("%s.%s" % (self.module_name, perm_name))
def __iter__(self):
# To fix 'item in perms.someapp' and __getitem__ iteraction we need to
# define __iter__. See #18979 for details.
raise TypeError("PermLookupDict is not iterable.")
def __nonzero__(self):
return self.user.has_module_perms(self.module_name)
class PermWrapper(object):
def __init__(self, user):
self.user = user
def __getitem__(self, module_name):
return PermLookupDict(self.user, module_name)
def __iter__(self):
# I am large, I contain multitudes.
raise TypeError("PermWrapper is not iterable.")
def auth(request):
"""
Returns context variables required by apps that use Django's authentication
system.
If there is no 'user' attribute in the request, uses AnonymousUser (from
django.contrib.auth).
"""
if hasattr(request, 'user'):
user = request.user
else:
from django.contrib.auth.models import AnonymousUser
user = AnonymousUser()
return {
'user': user,
'perms': PermWrapper(user),
}
|
ubc/edx-platform | refs/heads/release | pavelib/utils/test/suites/bokchoy_suite.py | 57 | """
Class used for defining and running Bok Choy acceptance test suite
"""
from time import sleep
from paver.easy import sh
from pavelib.utils.test.suites.suite import TestSuite
from pavelib.utils.envs import Env
from pavelib.utils.test import bokchoy_utils
from pavelib.utils.test import utils as test_utils
try:
from pygments.console import colorize
except ImportError:
colorize = lambda color, text: text # pylint: disable=invalid-name
__test__ = False # do not collect
class BokChoyTestSuite(TestSuite):
"""
TestSuite for running Bok Choy tests
Properties (below is a subset):
test_dir - parent directory for tests
log_dir - directory for test output
report_dir - directory for reports (e.g., coverage) related to test execution
xunit_report - directory for xunit-style output (xml)
fasttest - when set, skip various set-up tasks (e.g., collectstatic)
serversonly - prepare and run the necessary servers, only stopping when interrupted with Ctrl-C
testsonly - assume servers are running (as per above) and run tests with no setup or cleaning of environment
test_spec - when set, specifies test files, classes, cases, etc. See platform doc.
default_store - modulestore to use when running tests (split or draft)
"""
def __init__(self, *args, **kwargs):
super(BokChoyTestSuite, self).__init__(*args, **kwargs)
self.test_dir = Env.BOK_CHOY_DIR / kwargs.get('test_dir', 'tests')
self.log_dir = Env.BOK_CHOY_LOG_DIR
self.report_dir = Env.BOK_CHOY_REPORT_DIR
self.xunit_report = self.report_dir / "xunit.xml"
self.cache = Env.BOK_CHOY_CACHE
self.fasttest = kwargs.get('fasttest', False)
self.serversonly = kwargs.get('serversonly', False)
self.testsonly = kwargs.get('testsonly', False)
self.test_spec = kwargs.get('test_spec', None)
self.default_store = kwargs.get('default_store', None)
self.verbosity = kwargs.get('verbosity', 2)
self.extra_args = kwargs.get('extra_args', '')
self.har_dir = self.log_dir / 'hars'
self.imports_dir = kwargs.get('imports_dir', None)
def __enter__(self):
super(BokChoyTestSuite, self).__enter__()
# Ensure that we have a directory to put logs and reports
self.log_dir.makedirs_p()
self.har_dir.makedirs_p()
self.report_dir.makedirs_p()
test_utils.clean_reports_dir()
if not (self.fasttest or self.skip_clean):
test_utils.clean_test_files()
msg = colorize('green', "Checking for mongo, memchache, and mysql...")
print msg
bokchoy_utils.check_services()
if not self.testsonly:
self.prepare_bokchoy_run()
msg = colorize('green', "Confirming servers have started...")
print msg
bokchoy_utils.wait_for_test_servers()
if self.serversonly:
self.run_servers_continuously()
def __exit__(self, exc_type, exc_value, traceback):
super(BokChoyTestSuite, self).__exit__(exc_type, exc_value, traceback)
msg = colorize('green', "Cleaning up databases...")
print msg
# Clean up data we created in the databases
sh("./manage.py lms --settings bok_choy flush --traceback --noinput")
bokchoy_utils.clear_mongo()
def prepare_bokchoy_run(self):
"""
Sets up and starts servers for a Bok Choy run. If --fasttest is not
specified then static assets are collected
"""
sh("{}/scripts/reset-test-db.sh".format(Env.REPO_ROOT))
if not self.fasttest:
self.generate_optimized_static_assets()
# Clear any test data already in Mongo or MySQLand invalidate
# the cache
bokchoy_utils.clear_mongo()
self.cache.flush_all()
sh(
"DEFAULT_STORE={default_store}"
" ./manage.py lms --settings bok_choy loaddata --traceback"
" common/test/db_fixtures/*.json".format(
default_store=self.default_store,
)
)
if self.imports_dir:
sh(
"DEFAULT_STORE={default_store}"
" ./manage.py cms --settings=bok_choy import {import_dir}".format(
default_store=self.default_store,
import_dir=self.imports_dir
)
)
# Ensure the test servers are available
msg = colorize('green', "Confirming servers are running...")
print msg
bokchoy_utils.start_servers(self.default_store)
def run_servers_continuously(self):
"""
Infinite loop. Servers will continue to run in the current session unless interrupted.
"""
print 'Bok-choy servers running. Press Ctrl-C to exit...\n'
print 'Note: pressing Ctrl-C multiple times can corrupt noseid files and system state. Just press it once.\n'
while True:
try:
sleep(10000)
except KeyboardInterrupt:
print "Stopping bok-choy servers.\n"
break
@property
def cmd(self):
"""
This method composes the nosetests command to send to the terminal. If nosetests aren't being run,
the command returns an empty string.
"""
# Default to running all tests if no specific test is specified
if not self.test_spec:
test_spec = self.test_dir
else:
test_spec = self.test_dir / self.test_spec
# Skip any additional commands (such as nosetests) if running in
# servers only mode
if self.serversonly:
return ""
# Construct the nosetests command, specifying where to save
# screenshots and XUnit XML reports
cmd = [
"DEFAULT_STORE={}".format(self.default_store),
"SCREENSHOT_DIR='{}'".format(self.log_dir),
"BOK_CHOY_HAR_DIR='{}'".format(self.har_dir),
"SELENIUM_DRIVER_LOG_DIR='{}'".format(self.log_dir),
"nosetests",
test_spec,
"--with-xunit",
"--xunit-file={}".format(self.xunit_report),
"--verbosity={}".format(self.verbosity),
]
if self.pdb:
cmd.append("--pdb")
cmd.append(self.extra_args)
cmd = (" ").join(cmd)
return cmd
|
benrudolph/commcare-hq | refs/heads/master | corehq/apps/registration/forms.py | 1 | from corehq.apps.programs.models import Program
from corehq.apps.users.models import CouchUser
from django import forms
from django.contrib.auth.models import User
from corehq.apps.users.forms import RoleForm, SupplyPointSelectWidget
import re
from corehq.apps.domain.forms import clean_password, max_pwd
from django.core.validators import validate_email
from corehq.apps.domain.models import Domain
from corehq.apps.domain.utils import new_domain_re, new_org_re, website_re
from corehq.apps.orgs.models import Organization
from corehq.apps.style.forms.widgets import Select2Widget
from django.utils.encoding import smart_str
from django.utils.safestring import mark_safe
from corehq.util.timezones.fields import TimeZoneField
from corehq.util.timezones.forms import TimeZoneChoiceField
from django.utils.translation import ugettext_lazy as _
class NewWebUserRegistrationForm(forms.Form):
"""
Form for a brand new user, before they've created a domain or done anything on CommCare HQ.
"""
full_name = forms.CharField(label=_('Full Name'),
max_length=User._meta.get_field('first_name').max_length +
User._meta.get_field('last_name').max_length + 1)
email = forms.EmailField(label=_('Email Address'),
max_length=User._meta.get_field('email').max_length,
help_text=_('You will use this email to log in.'))
password = forms.CharField(label=_('Create Password'),
max_length=max_pwd,
widget=forms.PasswordInput(render_value=False))
email_opt_in = forms.BooleanField(required=False,
initial=True,
label="",
help_text=_("Opt into emails about new features and other CommCare updates."))
# Must be set to False to have the clean_*() routine called
eula_confirmed = forms.BooleanField(required=False,
label="",
help_text=mark_safe(_(
"""I have read and agree to the
<a data-toggle='modal'
data-target='#eulaModal'
href='#eulaModal'>
CommCare HQ End User License Agreement
</a>.""")))
# not required for when a user accepts an invitation
domain_type = forms.CharField(
required=False, widget=forms.HiddenInput(), initial='commcare')
def clean_full_name(self):
data = self.cleaned_data['full_name'].split()
return [data.pop(0)] + [' '.join(data)]
def clean_email(self):
data = self.cleaned_data['email'].strip().lower()
validate_email(data)
duplicate = CouchUser.get_by_username(data)
if duplicate:
# sync django user
duplicate.save()
if User.objects.filter(username__iexact=data).count() > 0 or duplicate:
raise forms.ValidationError('Username already taken; please try another')
return data
def clean_password(self):
return clean_password(self.cleaned_data.get('password'))
def clean(self):
for field in self.cleaned_data:
if isinstance(self.cleaned_data[field], basestring):
self.cleaned_data[field] = self.cleaned_data[field].strip()
return self.cleaned_data
def clean_eula_confirmed(self):
data = self.cleaned_data['eula_confirmed']
if data is not True:
raise forms.ValidationError('You must agree to our End User License Agreement in order to register an account.')
return data
class OrganizationRegistrationForm(forms.Form):
"""
form for creating an organization for the first time
"""
org_title = forms.CharField(label='Organization Title:', max_length=25, help_text='e.g. - Dimagi Inc')
org_name = forms.CharField(label='Organization ID:', max_length=25, help_text='e.g. - dimagi')
email = forms.CharField(label='Organization Email:', max_length=35, required=False)
url = forms.CharField(label='Organization Homepage:', max_length=35, required=False)
location = forms.CharField(label='Organization Location:', max_length=25, required=False)
# logo = forms.ImageField(label='Organization Logo:', required=False)
def clean_org_name(self):
data = self.cleaned_data['org_name'].strip().lower()
if not re.match("^%s$" % new_org_re, data):
raise forms.ValidationError('Only lowercase letters and numbers allowed. Single hyphens may be used to separate words.')
if Organization.get_by_name(data) or Organization.get_by_name(data.replace('-', '.')):
raise forms.ValidationError('Organization name already taken---please try another')
return data
def clean_org_title(self):
data = self.cleaned_data['org_title'].strip()
return data
def clean_email(self):
data = self.cleaned_data['email'].strip()
if not data == '':
validate_email(data)
return data
def clean_url(self):
data = self.cleaned_data['url'].strip()
if not re.match("^%s$" % website_re, data) and not data == '':
raise forms.ValidationError('invalid url')
return data
def clean_location(self):
data = self.cleaned_data['location']
return data
# def clean_logo(self):
# data = self.cleaned_data['logo']
# #resize image to fit in website nicely
# return data
def clean(self):
for field in self.cleaned_data:
if isinstance(self.cleaned_data[field], basestring):
self.cleaned_data[field] = self.cleaned_data[field].strip()
return self.cleaned_data
class DomainRegistrationForm(forms.Form):
"""
Form for creating a domain for the first time
"""
org = forms.CharField(widget=forms.HiddenInput(), required=False)
domain_name = forms.CharField(label=_('Project Name:'), max_length=25,
help_text=_("Project name cannot contain spaces."))
domain_type = forms.CharField(widget=forms.HiddenInput(), required=False,
initial='commcare')
domain_timezone = TimeZoneChoiceField(
label=_("Time Zone:"), initial="UTC", required=False,
widget=Select2Widget(attrs={'class': 'input-xlarge',
'bindparent': 'visible: override_tz',
'data-bind': 'event: {change: updateForm}'}))
def clean_domain_name(self):
data = self.cleaned_data['domain_name'].strip().lower()
if not re.match("^%s$" % new_domain_re, data):
raise forms.ValidationError('Only lowercase letters and numbers allowed. Single hyphens may be used to separate words.')
conflict = Domain.get_by_name(data) or Domain.get_by_name(data.replace('-', '.'))
if conflict:
raise forms.ValidationError('Project name already taken---please try another')
return data
def clean_domain_type(self):
data = self.cleaned_data.get('domain_type', '').strip().lower()
return data if data else 'commcare'
def clean_domain_timezone(self):
data = self.cleaned_data['domain_timezone']
timezone_field = TimeZoneField()
timezone_field.run_validators(data)
return smart_str(data)
def clean(self):
for field in self.cleaned_data:
if isinstance(self.cleaned_data[field], basestring):
self.cleaned_data[field] = self.cleaned_data[field].strip()
return self.cleaned_data
# From http://www.peterbe.com/plog/automatically-strip-whitespace-in-django-app_manager
#
# I'll put this in each app, so they can be standalone, but it should really go in some centralized
# part of the distro
class _BaseForm(object):
def clean(self):
for field in self.cleaned_data:
if isinstance(self.cleaned_data[field], basestring):
self.cleaned_data[field] = self.cleaned_data[field].strip()
return self.cleaned_data
class AdminInvitesUserForm(RoleForm, _BaseForm, forms.Form):
# As above. Need email now; still don't need domain. Don't need TOS. Do need the is_active flag,
# and do need to relabel some things.
email = forms.EmailField(label="Email Address",
max_length=User._meta.get_field('email').max_length)
# is_domain_admin = forms.BooleanField(label='User is a domain administrator', initial=False, required=False)
role = forms.ChoiceField(choices=(), label="Project Role")
def __init__(self, data=None, excluded_emails=None, *args, **kwargs):
domain = None
if 'domain' in kwargs:
domain = Domain.get_by_name(kwargs['domain'])
del kwargs['domain']
super(AdminInvitesUserForm, self).__init__(data=data, *args, **kwargs)
if domain and domain.commtrack_enabled:
self.fields['supply_point'] = forms.CharField(label='Supply Point:', required=False, widget=SupplyPointSelectWidget(domain=domain.name))
self.fields['program'] = forms.ChoiceField(label="Program", choices=(), required=False)
programs = Program.by_domain(domain.name, wrap=False)
choices = list((prog['_id'], prog['name']) for prog in programs)
choices.insert(0, ('', ''))
self.fields['program'].choices = choices
self.excluded_emails = excluded_emails or []
def clean_email(self):
email = self.cleaned_data['email'].strip()
if email in self.excluded_emails:
raise forms.ValidationError(_("A user with this email address is already in "
"this project or has a pending invitation."))
return email
|
shsingh/ansible | refs/heads/devel | lib/ansible/modules/cloud/centurylink/clc_publicip.py | 47 | #!/usr/bin/python
#
# Copyright (c) 2015 CenturyLink
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: clc_publicip
short_description: Add and Delete public ips on servers in CenturyLink Cloud.
description:
- An Ansible module to add or delete public ip addresses on an existing server or servers in CenturyLink Cloud.
version_added: "2.0"
options:
protocol:
description:
- The protocol that the public IP will listen for.
default: TCP
choices: ['TCP', 'UDP', 'ICMP']
ports:
description:
- A list of ports to expose. This is required when state is 'present'
server_ids:
description:
- A list of servers to create public ips on.
required: True
state:
description:
- Determine whether to create or delete public IPs. If present module will not create a second public ip if one
already exists.
default: present
choices: ['present', 'absent']
wait:
description:
- Whether to wait for the tasks to finish before returning.
type: bool
default: 'yes'
requirements:
- python = 2.7
- requests >= 2.5.0
- clc-sdk
author: "CLC Runner (@clc-runner)"
notes:
- To use this module, it is required to set the below environment variables which enables access to the
Centurylink Cloud
- CLC_V2_API_USERNAME, the account login id for the centurylink cloud
- CLC_V2_API_PASSWORD, the account password for the centurylink cloud
- Alternatively, the module accepts the API token and account alias. The API token can be generated using the
CLC account login and password via the HTTP api call @ https://api.ctl.io/v2/authentication/login
- CLC_V2_API_TOKEN, the API token generated from https://api.ctl.io/v2/authentication/login
- CLC_ACCT_ALIAS, the account alias associated with the centurylink cloud
- Users can set CLC_V2_API_URL to specify an endpoint for pointing to a different CLC environment.
'''
EXAMPLES = '''
# Note - You must set the CLC_V2_API_USERNAME And CLC_V2_API_PASSWD Environment variables before running these examples
- name: Add Public IP to Server
hosts: localhost
gather_facts: False
connection: local
tasks:
- name: Create Public IP For Servers
clc_publicip:
protocol: TCP
ports:
- 80
server_ids:
- UC1TEST-SVR01
- UC1TEST-SVR02
state: present
register: clc
- name: debug
debug:
var: clc
- name: Delete Public IP from Server
hosts: localhost
gather_facts: False
connection: local
tasks:
- name: Create Public IP For Servers
clc_publicip:
server_ids:
- UC1TEST-SVR01
- UC1TEST-SVR02
state: absent
register: clc
- name: debug
debug:
var: clc
'''
RETURN = '''
server_ids:
description: The list of server ids that are changed
returned: success
type: list
sample:
[
"UC1TEST-SVR01",
"UC1TEST-SVR02"
]
'''
__version__ = '${version}'
import os
import traceback
from distutils.version import LooseVersion
REQUESTS_IMP_ERR = None
try:
import requests
except ImportError:
REQUESTS_IMP_ERR = traceback.format_exc()
REQUESTS_FOUND = False
else:
REQUESTS_FOUND = True
#
# Requires the clc-python-sdk.
# sudo pip install clc-sdk
#
CLC_IMP_ERR = None
try:
import clc as clc_sdk
from clc import CLCException
except ImportError:
CLC_IMP_ERR = traceback.format_exc()
CLC_FOUND = False
clc_sdk = None
else:
CLC_FOUND = True
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
class ClcPublicIp(object):
clc = clc_sdk
module = None
def __init__(self, module):
"""
Construct module
"""
self.module = module
if not CLC_FOUND:
self.module.fail_json(msg=missing_required_lib('clc-sdk'), exception=CLC_IMP_ERR)
if not REQUESTS_FOUND:
self.module.fail_json(msg=missing_required_lib('requests'), exception=REQUESTS_IMP_ERR)
if requests.__version__ and LooseVersion(requests.__version__) < LooseVersion('2.5.0'):
self.module.fail_json(
msg='requests library version should be >= 2.5.0')
self._set_user_agent(self.clc)
def process_request(self):
"""
Process the request - Main Code Path
:return: Returns with either an exit_json or fail_json
"""
self._set_clc_credentials_from_env()
params = self.module.params
server_ids = params['server_ids']
ports = params['ports']
protocol = params['protocol']
state = params['state']
if state == 'present':
changed, changed_server_ids, requests = self.ensure_public_ip_present(
server_ids=server_ids, protocol=protocol, ports=ports)
elif state == 'absent':
changed, changed_server_ids, requests = self.ensure_public_ip_absent(
server_ids=server_ids)
else:
return self.module.fail_json(msg="Unknown State: " + state)
self._wait_for_requests_to_complete(requests)
return self.module.exit_json(changed=changed,
server_ids=changed_server_ids)
@staticmethod
def _define_module_argument_spec():
"""
Define the argument spec for the ansible module
:return: argument spec dictionary
"""
argument_spec = dict(
server_ids=dict(type='list', required=True),
protocol=dict(default='TCP', choices=['TCP', 'UDP', 'ICMP']),
ports=dict(type='list'),
wait=dict(type='bool', default=True),
state=dict(default='present', choices=['present', 'absent']),
)
return argument_spec
def ensure_public_ip_present(self, server_ids, protocol, ports):
"""
Ensures the given server ids having the public ip available
:param server_ids: the list of server ids
:param protocol: the ip protocol
:param ports: the list of ports to expose
:return: (changed, changed_server_ids, results)
changed: A flag indicating if there is any change
changed_server_ids : the list of server ids that are changed
results: The result list from clc public ip call
"""
changed = False
results = []
changed_server_ids = []
servers = self._get_servers_from_clc(
server_ids,
'Failed to obtain server list from the CLC API')
servers_to_change = [
server for server in servers if len(
server.PublicIPs().public_ips) == 0]
ports_to_expose = [{'protocol': protocol, 'port': port}
for port in ports]
for server in servers_to_change:
if not self.module.check_mode:
result = self._add_publicip_to_server(server, ports_to_expose)
results.append(result)
changed_server_ids.append(server.id)
changed = True
return changed, changed_server_ids, results
def _add_publicip_to_server(self, server, ports_to_expose):
result = None
try:
result = server.PublicIPs().Add(ports_to_expose)
except CLCException as ex:
self.module.fail_json(msg='Failed to add public ip to the server : {0}. {1}'.format(
server.id, ex.response_text
))
return result
def ensure_public_ip_absent(self, server_ids):
"""
Ensures the given server ids having the public ip removed if there is any
:param server_ids: the list of server ids
:return: (changed, changed_server_ids, results)
changed: A flag indicating if there is any change
changed_server_ids : the list of server ids that are changed
results: The result list from clc public ip call
"""
changed = False
results = []
changed_server_ids = []
servers = self._get_servers_from_clc(
server_ids,
'Failed to obtain server list from the CLC API')
servers_to_change = [
server for server in servers if len(
server.PublicIPs().public_ips) > 0]
for server in servers_to_change:
if not self.module.check_mode:
result = self._remove_publicip_from_server(server)
results.append(result)
changed_server_ids.append(server.id)
changed = True
return changed, changed_server_ids, results
def _remove_publicip_from_server(self, server):
result = None
try:
for ip_address in server.PublicIPs().public_ips:
result = ip_address.Delete()
except CLCException as ex:
self.module.fail_json(msg='Failed to remove public ip from the server : {0}. {1}'.format(
server.id, ex.response_text
))
return result
def _wait_for_requests_to_complete(self, requests_lst):
"""
Waits until the CLC requests are complete if the wait argument is True
:param requests_lst: The list of CLC request objects
:return: none
"""
if not self.module.params['wait']:
return
for request in requests_lst:
request.WaitUntilComplete()
for request_details in request.requests:
if request_details.Status() != 'succeeded':
self.module.fail_json(
msg='Unable to process public ip request')
def _set_clc_credentials_from_env(self):
"""
Set the CLC Credentials on the sdk by reading environment variables
:return: none
"""
env = os.environ
v2_api_token = env.get('CLC_V2_API_TOKEN', False)
v2_api_username = env.get('CLC_V2_API_USERNAME', False)
v2_api_passwd = env.get('CLC_V2_API_PASSWD', False)
clc_alias = env.get('CLC_ACCT_ALIAS', False)
api_url = env.get('CLC_V2_API_URL', False)
if api_url:
self.clc.defaults.ENDPOINT_URL_V2 = api_url
if v2_api_token and clc_alias:
self.clc._LOGIN_TOKEN_V2 = v2_api_token
self.clc._V2_ENABLED = True
self.clc.ALIAS = clc_alias
elif v2_api_username and v2_api_passwd:
self.clc.v2.SetCredentials(
api_username=v2_api_username,
api_passwd=v2_api_passwd)
else:
return self.module.fail_json(
msg="You must set the CLC_V2_API_USERNAME and CLC_V2_API_PASSWD "
"environment variables")
def _get_servers_from_clc(self, server_ids, message):
"""
Gets list of servers form CLC api
"""
try:
return self.clc.v2.Servers(server_ids).servers
except CLCException as exception:
self.module.fail_json(msg=message + ': %s' % exception)
@staticmethod
def _set_user_agent(clc):
if hasattr(clc, 'SetRequestsSession'):
agent_string = "ClcAnsibleModule/" + __version__
ses = requests.Session()
ses.headers.update({"Api-Client": agent_string})
ses.headers['User-Agent'] += " " + agent_string
clc.SetRequestsSession(ses)
def main():
"""
The main function. Instantiates the module and calls process_request.
:return: none
"""
module = AnsibleModule(
argument_spec=ClcPublicIp._define_module_argument_spec(),
supports_check_mode=True
)
clc_public_ip = ClcPublicIp(module)
clc_public_ip.process_request()
if __name__ == '__main__':
main()
|
helldorado/ansible | refs/heads/devel | lib/ansible/module_utils/network/slxos/__init__.py | 12133432 | |
sjhewitt/graphene | refs/heads/master | examples/__init__.py | 12133432 | |
recto/lynda_uar_python_django | refs/heads/master | ch02/02_04/start/firstdjango/inventory/__init__.py | 12133432 | |
MechanisM/django-hosts | refs/heads/develop | docs/conf.py | 1 | # -*- coding: utf-8 -*-
#
# django-hosts documentation build configuration file, created by
# sphinx-quickstart on Mon Sep 26 16:39:46 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-hosts'
copyright = u'2011, Jannis Leidel and contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.4'
# The full version, including alpha/beta/rc tags.
release = '0.4'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-hostsdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'django-hosts.tex', u'django-hosts Documentation',
u'Jannis Leidel and contributors', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'django-hosts', u'django-hosts Documentation',
[u'Jannis Leidel and contributors'], 1)
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('http://python.readthedocs.org/en/v2.7.2/', None),
'django': (
'https://docs.djangoproject.com/en/dev/',
'https://docs.djangoproject.com/en/dev/_objects/',
),
}
|
rehandalal/morgoth | refs/heads/master | morgoth/addons/tests/__init__.py | 1 | import factory
from tempfile import NamedTemporaryFile
from unittest.mock import patch
from morgoth.addons.models import Addon, AddonGroup
from morgoth.base.tests import FuzzyUnicode, FuzzyVersionNumber
FAKE_XPI_HASH = ('c383ffa8d660821158c1313690e7676eaeb917ac12de0bde06e3059920d106e8'
'656a6273655fbc2bc28d694dce433d11784807c27065f6f7f6e83b276b1d2926')
FAKE_XPI_FILESIZE = 9
def mock_urlretrieve(*args, **kwargs):
f = NamedTemporaryFile(delete=False)
f.write(b'Testfile\n')
tmp_file = f.name
f.close()
return tmp_file, None
class AddonFactory(factory.DjangoModelFactory):
name = FuzzyUnicode()
version = FuzzyVersionNumber()
ftp_url = factory.Faker('url')
class Meta:
model = Addon
@classmethod
def _create(cls, model_class, *args, **kwargs):
obj = model_class(*args, **kwargs)
with patch('urllib.request.urlretrieve', mock_urlretrieve):
obj.save()
return obj
class AddonGroupFactory(factory.DjangoModelFactory):
browser_version = FuzzyVersionNumber()
class Meta:
model = AddonGroup
@factory.post_generation
def addons(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of groups were passed in, use them
for addon in extracted:
self.addons.add(addon)
|
smendes/django-andablog | refs/heads/master | demo/common/migrations/0001_initial.py | 3 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(unique=True, max_length=255, verbose_name='email address', db_index=True)),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('name', models.CharField(max_length=100)),
('profile_name', models.CharField(unique=True, max_length=20, verbose_name=b'profile name')),
('slug', models.SlugField(unique=True)),
('groups', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of his/her group.', verbose_name='groups')),
('user_permissions', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Permission', blank=True, help_text='Specific permissions for this user.', verbose_name='user permissions')),
],
options={
'ordering': ['email'],
'abstract': False,
},
bases=(models.Model,),
),
]
|
fnouama/intellij-community | refs/heads/master | python/testData/surround/SurroundWithIf.py | 150 | def foo():
<selection>print "hello"</selection>
|
bocaaust/FreshLife | refs/heads/master | django_project/env/lib/python2.7/site-packages/django/views/generic/detail.py | 108 | from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist
from django.db import models
from django.http import Http404
from django.utils.translation import ugettext as _
from django.views.generic.base import TemplateResponseMixin, ContextMixin, View
class SingleObjectMixin(ContextMixin):
"""
Provides the ability to retrieve a single object for further manipulation.
"""
model = None
queryset = None
slug_field = 'slug'
context_object_name = None
slug_url_kwarg = 'slug'
pk_url_kwarg = 'pk'
def get_object(self, queryset=None):
"""
Returns the object the view is displaying.
By default this requires `self.queryset` and a `pk` or `slug` argument
in the URLconf, but subclasses can override this to return any object.
"""
# Use a custom queryset if provided; this is required for subclasses
# like DateDetailView
if queryset is None:
queryset = self.get_queryset()
# Next, try looking up by primary key.
pk = self.kwargs.get(self.pk_url_kwarg, None)
slug = self.kwargs.get(self.slug_url_kwarg, None)
if pk is not None:
queryset = queryset.filter(pk=pk)
# Next, try looking up by slug.
elif slug is not None:
slug_field = self.get_slug_field()
queryset = queryset.filter(**{slug_field: slug})
# If none of those are defined, it's an error.
else:
raise AttributeError("Generic detail view %s must be called with "
"either an object pk or a slug."
% self.__class__.__name__)
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except ObjectDoesNotExist:
raise Http404(_("No %(verbose_name)s found matching the query") %
{'verbose_name': queryset.model._meta.verbose_name})
return obj
def get_queryset(self):
"""
Get the queryset to look an object up against. May not be called if
`get_object` is overridden.
"""
if self.queryset is None:
if self.model:
return self.model._default_manager.all()
else:
raise ImproperlyConfigured("%(cls)s is missing a queryset. Define "
"%(cls)s.model, %(cls)s.queryset, or override "
"%(cls)s.get_queryset()." % {
'cls': self.__class__.__name__
})
return self.queryset._clone()
def get_slug_field(self):
"""
Get the name of a slug field to be used to look up by slug.
"""
return self.slug_field
def get_context_object_name(self, obj):
"""
Get the name to use for the object.
"""
if self.context_object_name:
return self.context_object_name
elif isinstance(obj, models.Model):
return obj._meta.object_name.lower()
else:
return None
def get_context_data(self, **kwargs):
"""
Insert the single object into the context dict.
"""
context = {}
context_object_name = self.get_context_object_name(self.object)
if context_object_name:
context[context_object_name] = self.object
context.update(kwargs)
return super(SingleObjectMixin, self).get_context_data(**context)
class BaseDetailView(SingleObjectMixin, View):
"""
A base view for displaying a single object
"""
def get(self, request, *args, **kwargs):
self.object = self.get_object()
context = self.get_context_data(object=self.object)
return self.render_to_response(context)
class SingleObjectTemplateResponseMixin(TemplateResponseMixin):
template_name_field = None
template_name_suffix = '_detail'
def get_template_names(self):
"""
Return a list of template names to be used for the request. May not be
called if render_to_response is overridden. Returns the following list:
* the value of ``template_name`` on the view (if provided)
* the contents of the ``template_name_field`` field on the
object instance that the view is operating upon (if available)
* ``<app_label>/<object_name><template_name_suffix>.html``
"""
try:
names = super(SingleObjectTemplateResponseMixin, self).get_template_names()
except ImproperlyConfigured:
# If template_name isn't specified, it's not a problem --
# we just start with an empty list.
names = []
# If self.template_name_field is set, grab the value of the field
# of that name from the object; this is the most specific template
# name, if given.
if self.object and self.template_name_field:
name = getattr(self.object, self.template_name_field, None)
if name:
names.insert(0, name)
# The least-specific option is the default <app>/<model>_detail.html;
# only use this if the object in question is a model.
if isinstance(self.object, models.Model):
names.append("%s/%s%s.html" % (
self.object._meta.app_label,
self.object._meta.object_name.lower(),
self.template_name_suffix
))
elif hasattr(self, 'model') and self.model is not None and issubclass(self.model, models.Model):
names.append("%s/%s%s.html" % (
self.model._meta.app_label,
self.model._meta.object_name.lower(),
self.template_name_suffix
))
return names
class DetailView(SingleObjectTemplateResponseMixin, BaseDetailView):
"""
Render a "detail" view of an object.
By default this is a model instance looked up from `self.queryset`, but the
view will support display of *any* object by overriding `self.get_object()`.
"""
|
GuillaumeGomez/servo | refs/heads/master | tests/wpt/css-tests/tools/sslutils/openssl.py | 61 | import functools
import os
import random
import shutil
import subprocess
import tempfile
from datetime import datetime
class OpenSSL(object):
def __init__(self, logger, binary, base_path, conf_path, hosts, duration,
base_conf_path=None):
"""Context manager for interacting with OpenSSL.
Creates a config file for the duration of the context.
:param logger: stdlib logger or python structured logger
:param binary: path to openssl binary
:param base_path: path to directory for storing certificates
:param conf_path: path for configuration file storing configuration data
:param hosts: list of hosts to include in configuration (or None if not
generating host certificates)
:param duration: Certificate duration in days"""
self.base_path = base_path
self.binary = binary
self.conf_path = conf_path
self.base_conf_path = base_conf_path
self.logger = logger
self.proc = None
self.cmd = []
self.hosts = hosts
self.duration = duration
def __enter__(self):
with open(self.conf_path, "w") as f:
f.write(get_config(self.base_path, self.hosts, self.duration))
return self
def __exit__(self, *args, **kwargs):
os.unlink(self.conf_path)
def log(self, line):
if hasattr(self.logger, "process_output"):
self.logger.process_output(self.proc.pid if self.proc is not None else None,
line.decode("utf8", "replace"),
command=" ".join(self.cmd))
else:
self.logger.debug(line)
def __call__(self, cmd, *args, **kwargs):
"""Run a command using OpenSSL in the current context.
:param cmd: The openssl subcommand to run
:param *args: Additional arguments to pass to the command
"""
self.cmd = [self.binary, cmd]
if cmd != "x509":
self.cmd += ["-config", self.conf_path]
self.cmd += list(args)
# Copy the environment, converting to plain strings. Windows
# StartProcess is picky about all the keys/values being plain strings,
# but at least in MSYS shells, the os.environ dictionary can be mixed.
env = {}
for k, v in os.environ.iteritems():
env[k.encode("utf8")] = v.encode("utf8")
if self.base_conf_path is not None:
env["OPENSSL_CONF"] = self.base_conf_path.encode("utf8")
self.proc = subprocess.Popen(self.cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
env=env)
stdout, stderr = self.proc.communicate()
self.log(stdout)
if self.proc.returncode != 0:
raise subprocess.CalledProcessError(self.proc.returncode, self.cmd,
output=stdout)
self.cmd = []
self.proc = None
return stdout
def make_subject(common_name,
country=None,
state=None,
locality=None,
organization=None,
organization_unit=None):
args = [("country", "C"),
("state", "ST"),
("locality", "L"),
("organization", "O"),
("organization_unit", "OU"),
("common_name", "CN")]
rv = []
for var, key in args:
value = locals()[var]
if value is not None:
rv.append("/%s=%s" % (key, value.replace("/", "\\/")))
return "".join(rv)
def make_alt_names(hosts):
rv = []
for name in hosts:
rv.append("DNS:%s" % name)
return ",".join(rv)
def get_config(root_dir, hosts, duration=30):
if hosts is None:
san_line = ""
else:
san_line = "subjectAltName = %s" % make_alt_names(hosts)
if os.path.sep == "\\":
# This seems to be needed for the Shining Light OpenSSL on
# Windows, at least.
root_dir = root_dir.replace("\\", "\\\\")
rv = """[ ca ]
default_ca = CA_default
[ CA_default ]
dir = %(root_dir)s
certs = $dir
new_certs_dir = $certs
crl_dir = $dir%(sep)scrl
database = $dir%(sep)sindex.txt
private_key = $dir%(sep)scakey.pem
certificate = $dir%(sep)scacert.pem
serial = $dir%(sep)sserial
crldir = $dir%(sep)scrl
crlnumber = $dir%(sep)scrlnumber
crl = $crldir%(sep)scrl.pem
RANDFILE = $dir%(sep)sprivate%(sep)s.rand
x509_extensions = usr_cert
name_opt = ca_default
cert_opt = ca_default
default_days = %(duration)d
default_crl_days = %(duration)d
default_md = sha256
preserve = no
policy = policy_anything
copy_extensions = copy
[ policy_anything ]
countryName = optional
stateOrProvinceName = optional
localityName = optional
organizationName = optional
organizationalUnitName = optional
commonName = supplied
emailAddress = optional
[ req ]
default_bits = 2048
default_keyfile = privkey.pem
distinguished_name = req_distinguished_name
attributes = req_attributes
x509_extensions = v3_ca
# Passwords for private keys if not present they will be prompted for
# input_password = secret
# output_password = secret
string_mask = utf8only
req_extensions = v3_req
[ req_distinguished_name ]
countryName = Country Name (2 letter code)
countryName_default = AU
countryName_min = 2
countryName_max = 2
stateOrProvinceName = State or Province Name (full name)
stateOrProvinceName_default =
localityName = Locality Name (eg, city)
0.organizationName = Organization Name
0.organizationName_default = Web Platform Tests
organizationalUnitName = Organizational Unit Name (eg, section)
#organizationalUnitName_default =
commonName = Common Name (e.g. server FQDN or YOUR name)
commonName_max = 64
emailAddress = Email Address
emailAddress_max = 64
[ req_attributes ]
[ usr_cert ]
basicConstraints=CA:false
subjectKeyIdentifier=hash
authorityKeyIdentifier=keyid,issuer
[ v3_req ]
basicConstraints = CA:FALSE
keyUsage = nonRepudiation, digitalSignature, keyEncipherment
extendedKeyUsage = serverAuth
%(san_line)s
[ v3_ca ]
basicConstraints = CA:true
subjectKeyIdentifier=hash
authorityKeyIdentifier=keyid:always,issuer:always
keyUsage = keyCertSign
""" % {"root_dir": root_dir,
"san_line": san_line,
"duration": duration,
"sep": os.path.sep.replace("\\", "\\\\")}
return rv
class OpenSSLEnvironment(object):
ssl_enabled = True
def __init__(self, logger, openssl_binary="openssl", base_path=None,
password="web-platform-tests", force_regenerate=False,
duration=30, base_conf_path=None):
"""SSL environment that creates a local CA and host certificate using OpenSSL.
By default this will look in base_path for existing certificates that are still
valid and only create new certificates if there aren't any. This behaviour can
be adjusted using the force_regenerate option.
:param logger: a stdlib logging compatible logger or mozlog structured logger
:param openssl_binary: Path to the OpenSSL binary
:param base_path: Path in which certificates will be stored. If None, a temporary
directory will be used and removed when the server shuts down
:param password: Password to use
:param force_regenerate: Always create a new certificate even if one already exists.
"""
self.logger = logger
self.temporary = False
if base_path is None:
base_path = tempfile.mkdtemp()
self.temporary = True
self.base_path = os.path.abspath(base_path)
self.password = password
self.force_regenerate = force_regenerate
self.duration = duration
self.base_conf_path = base_conf_path
self.path = None
self.binary = openssl_binary
self.openssl = None
self._ca_cert_path = None
self._ca_key_path = None
self.host_certificates = {}
def __enter__(self):
if not os.path.exists(self.base_path):
os.makedirs(self.base_path)
path = functools.partial(os.path.join, self.base_path)
with open(path("index.txt"), "w"):
pass
with open(path("serial"), "w") as f:
serial = "%x" % random.randint(0, 1000000)
if len(serial) % 2:
serial = "0" + serial
f.write(serial)
self.path = path
return self
def __exit__(self, *args, **kwargs):
if self.temporary:
shutil.rmtree(self.base_path)
def _config_openssl(self, hosts):
conf_path = self.path("openssl.cfg")
return OpenSSL(self.logger, self.binary, self.base_path, conf_path, hosts,
self.duration, self.base_conf_path)
def ca_cert_path(self):
"""Get the path to the CA certificate file, generating a
new one if needed"""
if self._ca_cert_path is None and not self.force_regenerate:
self._load_ca_cert()
if self._ca_cert_path is None:
self._generate_ca()
return self._ca_cert_path
def _load_ca_cert(self):
key_path = self.path("cakey.pem")
cert_path = self.path("cacert.pem")
if self.check_key_cert(key_path, cert_path, None):
self.logger.info("Using existing CA cert")
self._ca_key_path, self._ca_cert_path = key_path, cert_path
def check_key_cert(self, key_path, cert_path, hosts):
"""Check that a key and cert file exist and are valid"""
if not os.path.exists(key_path) or not os.path.exists(cert_path):
return False
with self._config_openssl(hosts) as openssl:
end_date_str = openssl("x509",
"-noout",
"-enddate",
"-in", cert_path).split("=", 1)[1].strip()
# Not sure if this works in other locales
end_date = datetime.strptime(end_date_str, "%b %d %H:%M:%S %Y %Z")
# Should have some buffer here e.g. 1 hr
if end_date < datetime.now():
return False
#TODO: check the key actually signed the cert.
return True
def _generate_ca(self):
path = self.path
self.logger.info("Generating new CA in %s" % self.base_path)
key_path = path("cakey.pem")
req_path = path("careq.pem")
cert_path = path("cacert.pem")
with self._config_openssl(None) as openssl:
openssl("req",
"-batch",
"-new",
"-newkey", "rsa:2048",
"-keyout", key_path,
"-out", req_path,
"-subj", make_subject("web-platform-tests"),
"-passout", "pass:%s" % self.password)
openssl("ca",
"-batch",
"-create_serial",
"-keyfile", key_path,
"-passin", "pass:%s" % self.password,
"-selfsign",
"-extensions", "v3_ca",
"-in", req_path,
"-out", cert_path)
os.unlink(req_path)
self._ca_key_path, self._ca_cert_path = key_path, cert_path
def host_cert_path(self, hosts):
"""Get a tuple of (private key path, certificate path) for a host,
generating new ones if necessary.
hosts must be a list of all hosts to appear on the certificate, with
the primary hostname first."""
hosts = tuple(hosts)
if hosts not in self.host_certificates:
if not self.force_regenerate:
key_cert = self._load_host_cert(hosts)
else:
key_cert = None
if key_cert is None:
key, cert = self._generate_host_cert(hosts)
else:
key, cert = key_cert
self.host_certificates[hosts] = key, cert
return self.host_certificates[hosts]
def _load_host_cert(self, hosts):
host = hosts[0]
key_path = self.path("%s.key" % host)
cert_path = self.path("%s.pem" % host)
# TODO: check that this cert was signed by the CA cert
if self.check_key_cert(key_path, cert_path, hosts):
self.logger.info("Using existing host cert")
return key_path, cert_path
def _generate_host_cert(self, hosts):
host = hosts[0]
if self._ca_key_path is None:
self._generate_ca()
ca_key_path = self._ca_key_path
assert os.path.exists(ca_key_path)
path = self.path
req_path = path("wpt.req")
cert_path = path("%s.pem" % host)
key_path = path("%s.key" % host)
self.logger.info("Generating new host cert")
with self._config_openssl(hosts) as openssl:
openssl("req",
"-batch",
"-newkey", "rsa:2048",
"-keyout", key_path,
"-in", ca_key_path,
"-nodes",
"-out", req_path)
openssl("ca",
"-batch",
"-in", req_path,
"-passin", "pass:%s" % self.password,
"-subj", make_subject(host),
"-out", cert_path)
os.unlink(req_path)
return key_path, cert_path
|
QQuick/Transcrypt | refs/heads/master | transcrypt/development/automated_tests/transcrypt/proxies/__init__.py | 1 | from org.transcrypt.stubs.browser import __pragma__
def run (autoTester):
class CodedStore:
def __init__ (self):
try:
__pragma__ ('js', '{}', 'self ["__dict__"] = {}')
except:
pass
def __setattr__ (self, name, message):
self.__dict__ ['_' + name] = 'coded_' + message
def __getattr__ (self, name):
return 'decoded_' + self.__dict__ ['_' + name]
def peek (self, name):
return self.__dict__ ['_' + name]
s = CodedStore ()
s.john = 'brown'
s.mary = 'white'
autoTester.check (s.peek ('john'))
autoTester.check (s.peek ('mary'))
autoTester.check (s.john)
autoTester.check (s.mary)
'''
The code above produces the following output:
'coded_brown'
'coded_white'
'decoded_coded_brown'
'decoded_coded_white'
'''
class A:
def __init__ (self):
self.p = 1
self.q = 2
class B (A):
def __getattr__ (self, name):
return 'Faked {}'.format (name)
class C (A):
def __setattr__ (self, name, value):
autoTester.check ('Set faked {}'.format (name))
A.__setattr__ (self, name, value)
# Needed for CPython, inherited from class 'object'
# Transcrypt doesn't need it, if there's no __setattrib__ it will just use self [name] = value
class D (B, C):
pass
a = A ()
b = B ()
c = C ()
d = D ()
autoTester.check (a.p, a.q)
a.p = 3
autoTester.check (a.p, a.q)
autoTester.check (b.p, b.q, b.r, b.s)
b.p = 4
b.r = 5
autoTester.check (b.p, b.q, b.r, b.s)
autoTester.check (c.p, c.q)
c.p = 6
c.q = 7
autoTester.check (c.p, c.q)
autoTester.check (d.p, d.q, d.r, d.s)
d.p = 8
d.q = 9
d.r = 10
d.s = 11
autoTester.check (d.p, d.q, d.r, d.s)
# Issue 587, code as utilized by pjbonestro
autoTester.check ("Issue 587")
class Element():
def __init__(self):
self.message = "Goodbye"
def sayBye(self):
autoTester.check (self.message)
class Wrapper():
def __init__ (self, element):
self.element = element
def __setattr__ (self, name, value):
""" set attribute on element if it already has the attribute """
if name != "element" and hasattr(self.element, name):
setattr(self.element, name, value)
else:
self.__dict__[name] = value
def __getattr__ (self, name):
""" get attribute from element if this object doesn't have the attribute """
result = getattr(self.element, name)
# if result is a function, bind self.element to it
if hasattr(result, 'call') and hasattr(result, 'bind'):
result = result.bind(self.element)
return result
def sayHello(self):
autoTester.check("Hello")
return self
e = Element()
w = Wrapper(e)
#
# Usage
#
e.sayBye()
w.sayBye() # call functions on e, using w
# and method chaining should work:
w.sayHello().sayBye()
w.message = "Bye" # set attributes on e, using w
e.sayBye()
w.sayBye() # call functions on e, using w
# and method chaining should work:
w.sayHello().sayBye()
autoTester.check ("End issue 587")
# End of issue 587
|
pdfminer/pdfminer.six | refs/heads/develop | pdfminer/converter.py | 1 | import io
import logging
import re
import sys
from .pdfdevice import PDFTextDevice
from .pdffont import PDFUnicodeNotDefined
from .layout import LTContainer
from .layout import LTPage
from .layout import LTText
from .layout import LTLine
from .layout import LTRect
from .layout import LTCurve
from .layout import LTFigure
from .layout import LTImage
from .layout import LTChar
from .layout import LTTextLine
from .layout import LTTextBox
from .layout import LTTextBoxVertical
from .layout import LTTextGroup
from .utils import apply_matrix_pt
from .utils import mult_matrix
from .utils import enc
from .utils import bbox2str
from . import utils
log = logging.getLogger(__name__)
class PDFLayoutAnalyzer(PDFTextDevice):
def __init__(self, rsrcmgr, pageno=1, laparams=None):
PDFTextDevice.__init__(self, rsrcmgr)
self.pageno = pageno
self.laparams = laparams
self._stack = []
return
def begin_page(self, page, ctm):
(x0, y0, x1, y1) = page.mediabox
(x0, y0) = apply_matrix_pt(ctm, (x0, y0))
(x1, y1) = apply_matrix_pt(ctm, (x1, y1))
mediabox = (0, 0, abs(x0-x1), abs(y0-y1))
self.cur_item = LTPage(self.pageno, mediabox)
return
def end_page(self, page):
assert not self._stack, str(len(self._stack))
assert isinstance(self.cur_item, LTPage), str(type(self.cur_item))
if self.laparams is not None:
self.cur_item.analyze(self.laparams)
self.pageno += 1
self.receive_layout(self.cur_item)
return
def begin_figure(self, name, bbox, matrix):
self._stack.append(self.cur_item)
self.cur_item = LTFigure(name, bbox, mult_matrix(matrix, self.ctm))
return
def end_figure(self, _):
fig = self.cur_item
assert isinstance(self.cur_item, LTFigure), str(type(self.cur_item))
self.cur_item = self._stack.pop()
self.cur_item.add(fig)
return
def render_image(self, name, stream):
assert isinstance(self.cur_item, LTFigure), str(type(self.cur_item))
item = LTImage(name, stream,
(self.cur_item.x0, self.cur_item.y0,
self.cur_item.x1, self.cur_item.y1))
self.cur_item.add(item)
return
def paint_path(self, gstate, stroke, fill, evenodd, path):
"""Paint paths described in section 4.4 of the PDF reference manual"""
shape = ''.join(x[0] for x in path)
if shape.count('m') > 1:
# recurse if there are multiple m's in this shape
for m in re.finditer(r'm[^m]+', shape):
subpath = path[m.start(0):m.end(0)]
self.paint_path(gstate, stroke, fill, evenodd, subpath)
else:
if shape == 'ml':
# single line segment
(x0, y0) = apply_matrix_pt(self.ctm, path[0][1:])
(x1, y1) = apply_matrix_pt(self.ctm, path[1][1:])
if x0 == x1 or y0 == y1:
line = LTLine(gstate.linewidth, (x0, y0), (x1, y1), stroke,
fill, evenodd, gstate.scolor, gstate.ncolor)
self.cur_item.add(line)
elif shape == 'mlllh':
(x0, y0) = apply_matrix_pt(self.ctm, path[0][1:])
(x1, y1) = apply_matrix_pt(self.ctm, path[1][1:])
(x2, y2) = apply_matrix_pt(self.ctm, path[2][1:])
(x3, y3) = apply_matrix_pt(self.ctm, path[3][1:])
if (x0 == x1 and y1 == y2 and x2 == x3 and y3 == y0) or \
(y0 == y1 and x1 == x2 and y2 == y3 and x3 == x0):
rect = LTRect(gstate.linewidth, (x0, y0, x2, y2), stroke,
fill, evenodd, gstate.scolor, gstate.ncolor)
self.cur_item.add(rect)
else:
curve = self._create_curve(gstate, stroke, fill, evenodd,
path)
self.cur_item.add(curve)
else:
curve = self._create_curve(gstate, stroke, fill, evenodd, path)
self.cur_item.add(curve)
def _create_curve(self, gstate, stroke, fill, evenodd, path):
"""Create a `LTCurve` object for the paint path operator"""
pts = [
apply_matrix_pt(self.ctm, point)
for p in path
for point in zip(p[1::2], p[2::2])
]
curve = LTCurve(gstate.linewidth, pts, stroke, fill, evenodd,
gstate.scolor, gstate.ncolor)
return curve
def render_char(self, matrix, font, fontsize, scaling, rise, cid, ncs,
graphicstate):
try:
text = font.to_unichr(cid)
assert isinstance(text, str), str(type(text))
except PDFUnicodeNotDefined:
text = self.handle_undefined_char(font, cid)
textwidth = font.char_width(cid)
textdisp = font.char_disp(cid)
item = LTChar(matrix, font, fontsize, scaling, rise, text, textwidth,
textdisp, ncs, graphicstate)
self.cur_item.add(item)
return item.adv
def handle_undefined_char(self, font, cid):
log.info('undefined: %r, %r', font, cid)
return '(cid:%d)' % cid
def receive_layout(self, ltpage):
return
class PDFPageAggregator(PDFLayoutAnalyzer):
def __init__(self, rsrcmgr, pageno=1, laparams=None):
PDFLayoutAnalyzer.__init__(self, rsrcmgr, pageno=pageno,
laparams=laparams)
self.result = None
return
def receive_layout(self, ltpage):
self.result = ltpage
return
def get_result(self):
return self.result
class PDFConverter(PDFLayoutAnalyzer):
def __init__(self, rsrcmgr, outfp, codec='utf-8', pageno=1,
laparams=None):
PDFLayoutAnalyzer.__init__(self, rsrcmgr, pageno=pageno,
laparams=laparams)
self.outfp = outfp
self.codec = codec
self.outfp_binary = self._is_binary_stream(self.outfp)
@staticmethod
def _is_binary_stream(outfp):
"""Test if an stream is binary or not"""
if 'b' in getattr(outfp, 'mode', ''):
return True
elif hasattr(outfp, 'mode'):
# output stream has a mode, but it does not contain 'b'
return False
elif isinstance(outfp, io.BytesIO):
return True
elif isinstance(outfp, io.StringIO):
return False
return True
class TextConverter(PDFConverter):
def __init__(self, rsrcmgr, outfp, codec='utf-8', pageno=1, laparams=None,
showpageno=False, imagewriter=None):
PDFConverter.__init__(self, rsrcmgr, outfp, codec=codec, pageno=pageno,
laparams=laparams)
self.showpageno = showpageno
self.imagewriter = imagewriter
return
def write_text(self, text):
text = utils.compatible_encode_method(text, self.codec, 'ignore')
if self.outfp_binary:
text = text.encode()
self.outfp.write(text)
return
def receive_layout(self, ltpage):
def render(item):
if isinstance(item, LTContainer):
for child in item:
render(child)
elif isinstance(item, LTText):
self.write_text(item.get_text())
if isinstance(item, LTTextBox):
self.write_text('\n')
elif isinstance(item, LTImage):
if self.imagewriter is not None:
self.imagewriter.export_image(item)
if self.showpageno:
self.write_text('Page %s\n' % ltpage.pageid)
render(ltpage)
self.write_text('\f')
return
# Some dummy functions to save memory/CPU when all that is wanted
# is text. This stops all the image and drawing output from being
# recorded and taking up RAM.
def render_image(self, name, stream):
if self.imagewriter is None:
return
PDFConverter.render_image(self, name, stream)
return
def paint_path(self, gstate, stroke, fill, evenodd, path):
return
class HTMLConverter(PDFConverter):
RECT_COLORS = {
'figure': 'yellow',
'textline': 'magenta',
'textbox': 'cyan',
'textgroup': 'red',
'curve': 'black',
'page': 'gray',
}
TEXT_COLORS = {
'textbox': 'blue',
'char': 'black',
}
def __init__(self, rsrcmgr, outfp, codec='utf-8', pageno=1, laparams=None,
scale=1, fontscale=1.0, layoutmode='normal', showpageno=True,
pagemargin=50, imagewriter=None, debug=0, rect_colors=None,
text_colors=None):
PDFConverter.__init__(self, rsrcmgr, outfp, codec=codec, pageno=pageno,
laparams=laparams)
if text_colors is None:
text_colors = {'char': 'black'}
if rect_colors is None:
rect_colors = {'curve': 'black', 'page': 'gray'}
self.scale = scale
self.fontscale = fontscale
self.layoutmode = layoutmode
self.showpageno = showpageno
self.pagemargin = pagemargin
self.imagewriter = imagewriter
self.rect_colors = rect_colors
self.text_colors = text_colors
if debug:
self.rect_colors.update(self.RECT_COLORS)
self.text_colors.update(self.TEXT_COLORS)
self._yoffset = self.pagemargin
self._font = None
self._fontstack = []
self.write_header()
return
def write(self, text):
if self.codec:
text = text.encode(self.codec)
if sys.version_info < (3, 0):
text = str(text)
self.outfp.write(text)
return
def write_header(self):
self.write('<html><head>\n')
if self.codec:
s = '<meta http-equiv="Content-Type" content="text/html; ' \
'charset=%s">\n' % self.codec
else:
s = '<meta http-equiv="Content-Type" content="text/html">\n'
self.write(s)
self.write('</head><body>\n')
return
def write_footer(self):
page_links = ['<a href="#{}">{}</a>'.format(i, i)
for i in range(1, self.pageno)]
s = '<div style="position:absolute; top:0px;">Page: %s</div>\n' % \
', '.join(page_links)
self.write(s)
self.write('</body></html>\n')
return
def write_text(self, text):
self.write(enc(text))
return
def place_rect(self, color, borderwidth, x, y, w, h):
color = self.rect_colors.get(color)
if color is not None:
s = '<span style="position:absolute; border: %s %dpx solid; ' \
'left:%dpx; top:%dpx; width:%dpx; height:%dpx;"></span>\n' % \
(color, borderwidth, x * self.scale,
(self._yoffset - y) * self.scale, w * self.scale,
h * self.scale)
self.write(
s)
return
def place_border(self, color, borderwidth, item):
self.place_rect(color, borderwidth, item.x0, item.y1, item.width,
item.height)
return
def place_image(self, item, borderwidth, x, y, w, h):
if self.imagewriter is not None:
name = self.imagewriter.export_image(item)
s = '<img src="%s" border="%d" style="position:absolute; ' \
'left:%dpx; top:%dpx;" width="%d" height="%d" />\n' % \
(enc(name), borderwidth, x * self.scale,
(self._yoffset - y) * self.scale, w * self.scale,
h * self.scale)
self.write(s)
return
def place_text(self, color, text, x, y, size):
color = self.text_colors.get(color)
if color is not None:
s = '<span style="position:absolute; color:%s; left:%dpx; ' \
'top:%dpx; font-size:%dpx;">' % \
(color, x * self.scale, (self._yoffset - y) * self.scale,
size * self.scale * self.fontscale)
self.write(s)
self.write_text(text)
self.write('</span>\n')
return
def begin_div(self, color, borderwidth, x, y, w, h, writing_mode=False):
self._fontstack.append(self._font)
self._font = None
s = '<div style="position:absolute; border: %s %dpx solid; ' \
'writing-mode:%s; left:%dpx; top:%dpx; width:%dpx; ' \
'height:%dpx;">' % \
(color, borderwidth, writing_mode, x * self.scale,
(self._yoffset - y) * self.scale, w * self.scale, h * self.scale)
self.write(s)
return
def end_div(self, color):
if self._font is not None:
self.write('</span>')
self._font = self._fontstack.pop()
self.write('</div>')
return
def put_text(self, text, fontname, fontsize):
font = (fontname, fontsize)
if font != self._font:
if self._font is not None:
self.write('</span>')
# Remove subset tag from fontname, see PDF Reference 5.5.3
fontname_without_subset_tag = fontname.split('+')[-1]
self.write('<span style="font-family: %s; font-size:%dpx">' %
(fontname_without_subset_tag,
fontsize * self.scale * self.fontscale))
self._font = font
self.write_text(text)
return
def put_newline(self):
self.write('<br>')
return
def receive_layout(self, ltpage):
def show_group(item):
if isinstance(item, LTTextGroup):
self.place_border('textgroup', 1, item)
for child in item:
show_group(child)
return
def render(item):
if isinstance(item, LTPage):
self._yoffset += item.y1
self.place_border('page', 1, item)
if self.showpageno:
self.write('<div style="position:absolute; top:%dpx;">' %
((self._yoffset-item.y1)*self.scale))
self.write('<a name="{}">Page {}</a></div>\n'
.format(item.pageid, item.pageid))
for child in item:
render(child)
if item.groups is not None:
for group in item.groups:
show_group(group)
elif isinstance(item, LTCurve):
self.place_border('curve', 1, item)
elif isinstance(item, LTFigure):
self.begin_div('figure', 1, item.x0, item.y1, item.width,
item.height)
for child in item:
render(child)
self.end_div('figure')
elif isinstance(item, LTImage):
self.place_image(item, 1, item.x0, item.y1, item.width,
item.height)
else:
if self.layoutmode == 'exact':
if isinstance(item, LTTextLine):
self.place_border('textline', 1, item)
for child in item:
render(child)
elif isinstance(item, LTTextBox):
self.place_border('textbox', 1, item)
self.place_text('textbox', str(item.index+1), item.x0,
item.y1, 20)
for child in item:
render(child)
elif isinstance(item, LTChar):
self.place_border('char', 1, item)
self.place_text('char', item.get_text(), item.x0,
item.y1, item.size)
else:
if isinstance(item, LTTextLine):
for child in item:
render(child)
if self.layoutmode != 'loose':
self.put_newline()
elif isinstance(item, LTTextBox):
self.begin_div('textbox', 1, item.x0, item.y1,
item.width, item.height,
item.get_writing_mode())
for child in item:
render(child)
self.end_div('textbox')
elif isinstance(item, LTChar):
self.put_text(item.get_text(), item.fontname,
item.size)
elif isinstance(item, LTText):
self.write_text(item.get_text())
return
render(ltpage)
self._yoffset += self.pagemargin
return
def close(self):
self.write_footer()
return
class XMLConverter(PDFConverter):
CONTROL = re.compile('[\x00-\x08\x0b-\x0c\x0e-\x1f]')
def __init__(self, rsrcmgr, outfp, codec='utf-8', pageno=1, laparams=None,
imagewriter=None, stripcontrol=False):
PDFConverter.__init__(self, rsrcmgr, outfp, codec=codec, pageno=pageno,
laparams=laparams)
self.imagewriter = imagewriter
self.stripcontrol = stripcontrol
self.write_header()
return
def write(self, text):
if self.codec:
text = text.encode(self.codec)
self.outfp.write(text)
return
def write_header(self):
if self.codec:
self.write('<?xml version="1.0" encoding="%s" ?>\n' % self.codec)
else:
self.write('<?xml version="1.0" ?>\n')
self.write('<pages>\n')
return
def write_footer(self):
self.write('</pages>\n')
return
def write_text(self, text):
if self.stripcontrol:
text = self.CONTROL.sub('', text)
self.write(enc(text))
return
def receive_layout(self, ltpage):
def show_group(item):
if isinstance(item, LTTextBox):
self.write('<textbox id="%d" bbox="%s" />\n' %
(item.index, bbox2str(item.bbox)))
elif isinstance(item, LTTextGroup):
self.write('<textgroup bbox="%s">\n' % bbox2str(item.bbox))
for child in item:
show_group(child)
self.write('</textgroup>\n')
return
def render(item):
if isinstance(item, LTPage):
s = '<page id="%s" bbox="%s" rotate="%d">\n' % \
(item.pageid, bbox2str(item.bbox), item.rotate)
self.write(s)
for child in item:
render(child)
if item.groups is not None:
self.write('<layout>\n')
for group in item.groups:
show_group(group)
self.write('</layout>\n')
self.write('</page>\n')
elif isinstance(item, LTLine):
s = '<line linewidth="%d" bbox="%s" />\n' % \
(item.linewidth, bbox2str(item.bbox))
self.write(s)
elif isinstance(item, LTRect):
s = '<rect linewidth="%d" bbox="%s" />\n' % \
(item.linewidth, bbox2str(item.bbox))
self.write(s)
elif isinstance(item, LTCurve):
s = '<curve linewidth="%d" bbox="%s" pts="%s"/>\n' % \
(item.linewidth, bbox2str(item.bbox), item.get_pts())
self.write(s)
elif isinstance(item, LTFigure):
s = '<figure name="%s" bbox="%s">\n' % \
(item.name, bbox2str(item.bbox))
self.write(s)
for child in item:
render(child)
self.write('</figure>\n')
elif isinstance(item, LTTextLine):
self.write('<textline bbox="%s">\n' % bbox2str(item.bbox))
for child in item:
render(child)
self.write('</textline>\n')
elif isinstance(item, LTTextBox):
wmode = ''
if isinstance(item, LTTextBoxVertical):
wmode = ' wmode="vertical"'
s = '<textbox id="%d" bbox="%s"%s>\n' %\
(item.index, bbox2str(item.bbox), wmode)
self.write(s)
for child in item:
render(child)
self.write('</textbox>\n')
elif isinstance(item, LTChar):
s = '<text font="%s" bbox="%s" colourspace="%s" ' \
'ncolour="%s" size="%.3f">' % \
(enc(item.fontname), bbox2str(item.bbox),
item.ncs.name, item.graphicstate.ncolor, item.size)
self.write(s)
self.write_text(item.get_text())
self.write('</text>\n')
elif isinstance(item, LTText):
self.write('<text>%s</text>\n' % item.get_text())
elif isinstance(item, LTImage):
if self.imagewriter is not None:
name = self.imagewriter.export_image(item)
self.write('<image src="%s" width="%d" height="%d" />\n' %
(enc(name), item.width, item.height))
else:
self.write('<image width="%d" height="%d" />\n' %
(item.width, item.height))
else:
assert False, str(('Unhandled', item))
return
render(ltpage)
return
def close(self):
self.write_footer()
return
|
maheshakya/scikit-learn | refs/heads/master | examples/linear_model/plot_logistic_l1_l2_sparsity.py | 384 | """
==============================================
L1 Penalty and Sparsity in Logistic Regression
==============================================
Comparison of the sparsity (percentage of zero coefficients) of solutions when
L1 and L2 penalty are used for different values of C. We can see that large
values of C give more freedom to the model. Conversely, smaller values of C
constrain the model more. In the L1 penalty case, this leads to sparser
solutions.
We classify 8x8 images of digits into two classes: 0-4 against 5-9.
The visualization shows coefficients of the models for varying C.
"""
print(__doc__)
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Mathieu Blondel <mathieu@mblondel.org>
# Andreas Mueller <amueller@ais.uni-bonn.de>
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LogisticRegression
from sklearn import datasets
from sklearn.preprocessing import StandardScaler
digits = datasets.load_digits()
X, y = digits.data, digits.target
X = StandardScaler().fit_transform(X)
# classify small against large digits
y = (y > 4).astype(np.int)
# Set regularization parameter
for i, C in enumerate((100, 1, 0.01)):
# turn down tolerance for short training time
clf_l1_LR = LogisticRegression(C=C, penalty='l1', tol=0.01)
clf_l2_LR = LogisticRegression(C=C, penalty='l2', tol=0.01)
clf_l1_LR.fit(X, y)
clf_l2_LR.fit(X, y)
coef_l1_LR = clf_l1_LR.coef_.ravel()
coef_l2_LR = clf_l2_LR.coef_.ravel()
# coef_l1_LR contains zeros due to the
# L1 sparsity inducing norm
sparsity_l1_LR = np.mean(coef_l1_LR == 0) * 100
sparsity_l2_LR = np.mean(coef_l2_LR == 0) * 100
print("C=%.2f" % C)
print("Sparsity with L1 penalty: %.2f%%" % sparsity_l1_LR)
print("score with L1 penalty: %.4f" % clf_l1_LR.score(X, y))
print("Sparsity with L2 penalty: %.2f%%" % sparsity_l2_LR)
print("score with L2 penalty: %.4f" % clf_l2_LR.score(X, y))
l1_plot = plt.subplot(3, 2, 2 * i + 1)
l2_plot = plt.subplot(3, 2, 2 * (i + 1))
if i == 0:
l1_plot.set_title("L1 penalty")
l2_plot.set_title("L2 penalty")
l1_plot.imshow(np.abs(coef_l1_LR.reshape(8, 8)), interpolation='nearest',
cmap='binary', vmax=1, vmin=0)
l2_plot.imshow(np.abs(coef_l2_LR.reshape(8, 8)), interpolation='nearest',
cmap='binary', vmax=1, vmin=0)
plt.text(-8, 3, "C = %.2f" % C)
l1_plot.set_xticks(())
l1_plot.set_yticks(())
l2_plot.set_xticks(())
l2_plot.set_yticks(())
plt.show()
|
M4sse/chromium.src | refs/heads/nw12 | build/android/pylib/utils/run_tests_helper.py | 113 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Helper functions common to native, java and host-driven test runners."""
import logging
import sys
import time
class CustomFormatter(logging.Formatter):
"""Custom log formatter."""
#override
def __init__(self, fmt='%(threadName)-4s %(message)s'):
# Can't use super() because in older Python versions logging.Formatter does
# not inherit from object.
logging.Formatter.__init__(self, fmt=fmt)
self._creation_time = time.time()
#override
def format(self, record):
# Can't use super() because in older Python versions logging.Formatter does
# not inherit from object.
msg = logging.Formatter.format(self, record)
if 'MainThread' in msg[:19]:
msg = msg.replace('MainThread', 'Main', 1)
timediff = time.time() - self._creation_time
return '%s %8.3fs %s' % (record.levelname[0], timediff, msg)
def SetLogLevel(verbose_count):
"""Sets log level as |verbose_count|."""
log_level = logging.WARNING # Default.
if verbose_count == 1:
log_level = logging.INFO
elif verbose_count >= 2:
log_level = logging.DEBUG
logger = logging.getLogger()
logger.setLevel(log_level)
custom_handler = logging.StreamHandler(sys.stdout)
custom_handler.setFormatter(CustomFormatter())
logging.getLogger().addHandler(custom_handler)
|
dalegregory/odoo | refs/heads/8.0 | addons/account/test/test_parent_structure.py | 432 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#
# TODO: move this in a YAML test with !python tag
#
import xmlrpclib
DB = 'training3'
USERID = 1
USERPASS = 'admin'
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % ('localhost',8069))
ids = sock.execute(DB, USERID, USERPASS, 'account.account', 'search', [], {})
account_lists = sock.execute(DB, USERID, USERPASS, 'account.account', 'read', ids, ['parent_id','parent_left','parent_right'])
accounts = dict(map(lambda x: (x['id'],x), account_lists))
for a in account_lists:
if a['parent_id']:
assert a['parent_left'] > accounts[a['parent_id'][0]]['parent_left']
assert a['parent_right'] < accounts[a['parent_id'][0]]['parent_right']
assert a['parent_left'] < a['parent_right']
for a2 in account_lists:
assert not ((a2['parent_right']>a['parent_left']) and
(a2['parent_left']<a['parent_left']) and
(a2['parent_right']<a['parent_right']))
if a2['parent_id']==a['id']:
assert (a2['parent_left']>a['parent_left']) and (a2['parent_right']<a['parent_right'])
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
chokribr/invenio | refs/heads/master | invenio/ext/sqlalchemy/types/pickle_binary.py | 17 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2011, 2012, 2013, 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Implement ZlibPickable column type."""
from sqlalchemy.types import TypeDecorator, LargeBinary
from invenio.utils.serializers import ZlibPickle
class PickleBinary(TypeDecorator):
"""Implement ZlibPickable column type."""
impl = LargeBinary
def process_bind_param(self, value, dialect):
"""Dump data to column using ZlibPickle."""
if value is not None:
value = ZlibPickle.dumps(value)
return value
def process_result_value(self, value, dialect):
"""Load ZlibPickled data from column."""
if value is not None:
value = ZlibPickle.loads(value)
return value
|
adlnet-archive/edx-platform | refs/heads/master | lms/djangoapps/django_comment_client/migrations/0001_initial.py | 188 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Role'
db.create_table('django_comment_client_role', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=30)),
('course_id', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=255, blank=True)),
))
db.send_create_signal('django_comment_client', ['Role'])
# Adding M2M table for field users on 'Role'
db.create_table('django_comment_client_role_users', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('role', models.ForeignKey(orm['django_comment_client.role'], null=False)),
('user', models.ForeignKey(orm['auth.user'], null=False))
))
db.create_unique('django_comment_client_role_users', ['role_id', 'user_id'])
# Adding model 'Permission'
db.create_table('django_comment_client_permission', (
('name', self.gf('django.db.models.fields.CharField')(max_length=30, primary_key=True)),
))
db.send_create_signal('django_comment_client', ['Permission'])
# Adding M2M table for field roles on 'Permission'
db.create_table('django_comment_client_permission_roles', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('permission', models.ForeignKey(orm['django_comment_client.permission'], null=False)),
('role', models.ForeignKey(orm['django_comment_client.role'], null=False))
))
db.create_unique('django_comment_client_permission_roles', ['permission_id', 'role_id'])
def backwards(self, orm):
# Deleting model 'Role'
db.delete_table('django_comment_client_role')
# Removing M2M table for field users on 'Role'
db.delete_table('django_comment_client_role_users')
# Deleting model 'Permission'
db.delete_table('django_comment_client_permission')
# Removing M2M table for field roles on 'Permission'
db.delete_table('django_comment_client_permission_roles')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'django_comment_client.permission': {
'Meta': {'object_name': 'Permission'},
'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}),
'roles': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'permissions'", 'symmetrical': 'False', 'to': "orm['django_comment_client.Role']"})
},
'django_comment_client.role': {
'Meta': {'object_name': 'Role'},
'course_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'roles'", 'symmetrical': 'False', 'to': "orm['auth.User']"})
}
}
complete_apps = ['django_comment_client']
|
JohnTheodore/cloud-custodian | refs/heads/master | tests/test_s3.py | 2 | # Copyright 2015-2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
import datetime
import functools
import json
import os
import shutil
import tempfile
import time # NOQA needed for some recordings
from unittest import TestCase
from botocore.exceptions import ClientError
from dateutil.tz import tzutc
from c7n.executor import MainThreadExecutor
from c7n.resources import s3
from c7n.mu import LambdaManager
from c7n.ufuncs import s3crypt
from .common import (
BaseTest, ConfigTest, event_data, skip_if_not_validating, functional)
class RestoreCompletionTest(TestCase):
def test_restore_complete(self):
self.assertTrue(
s3.restore_complete(
('ongoing-request="false", '
'expiry-date="Fri, 23 Dec 2012 00:00:00 GMT"')))
self.assertFalse(s3.restore_complete('ongoing-request="true"'))
class BucketScanLogTests(TestCase):
def setUp(self):
self.log_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.log_dir)
self.log = s3.BucketScanLog(self.log_dir, 'test')
def test_scan_log(self):
first_five = list(range(5))
next_five = list(range(5, 10))
with self.log:
self.log.add(first_five)
self.log.add(next_five)
with open(self.log.path) as fh:
data = json.load(fh)
self.assertEqual(
data,
[first_five, next_five, []])
def destroyBucket(client, bucket):
for o in client.list_objects(Bucket=bucket).get('Contents', []):
client.delete_object(Bucket=bucket, Key=o['Key'])
client.delete_bucket(Bucket=bucket)
def destroyVersionedBucket(client, bucket):
for o in client.list_object_versions(Bucket=bucket).get('Versions', []):
client.delete_object(
Bucket=bucket, Key=o['Key'], VersionId=o['VersionId'])
client.delete_bucket(Bucket=bucket)
def destroyBucketIfPresent(client, bucket):
try:
destroyVersionedBucket(client, bucket)
except Exception as exc:
response = getattr(exc, 'response',
{'ResponseMetadata': {'HTTPStatusCode': None}})
if response['ResponseMetadata']['HTTPStatusCode'] != 404:
raise
def generateBucketContents(s3, bucket, contents=None):
default_contents = {
'home.txt': 'hello',
'AWSLogs/2015/10/10': 'out',
'AWSLogs/2015/10/11': 'spot'}
if contents is None:
contents = default_contents
b = s3.Bucket(bucket)
for k, v in contents.items():
key = s3.Object(bucket, k)
key.put(
Body=v,
ContentLength=len(v),
ContentType='text/plain')
class BucketMetrics(BaseTest):
def test_metrics(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [])
session_factory = self.replay_flight_data('test_s3_metrics')
p = self.load_policy({
'name': 's3-obj-count',
'resource': 's3',
'filters': [
{'type': 'metrics',
'value': 10000,
'name': 'NumberOfObjects',
'op': 'greater-than'}],
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['Name'], 'custodian-skunk-trails')
self.assertTrue('c7n.metrics' in resources[0])
self.assertTrue(
'AWS/S3.NumberOfObjects.Average' in resources[0]['c7n.metrics'])
class BucketEncryption(BaseTest):
def test_s3_bucket_encryption_filter(self):
bname = 'c7n-bucket-with-encryption'
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [])
session_factory = self.replay_flight_data('test_s3_bucket_encryption_filter')
client = session_factory().client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(client.delete_bucket, Bucket=bname)
enc = {
'Rules': [{
'ApplyServerSideEncryptionByDefault': {
'SSEAlgorithm': 'AES256'
}
}]
}
client.put_bucket_encryption(Bucket=bname, ServerSideEncryptionConfiguration=enc)
p = self.load_policy({
'name': 's3-enc',
'resource': 's3',
'filters': [
{
'type': 'bucket-encryption',
'crypto': 'AES256'
}
]
}, session_factory=session_factory)
resources = p.run() or []
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['Name'], bname)
def test_s3_bucket_encryption_filter_kms(self):
bname = 'c7n-bucket-with-encryption'
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [])
session_factory = self.replay_flight_data('test_s3_bucket_encryption_filter_kms')
client = session_factory().client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(client.delete_bucket, Bucket=bname)
aws_alias = 'arn:aws:kms:us-east-1:108891588060:key/079a6f7d-5f8a-4da1-a465-30aa099b9688'
enc = {
'Rules': [{
'ApplyServerSideEncryptionByDefault': {
'SSEAlgorithm': 'aws:kms',
'KMSMasterKeyID': aws_alias
}
}]
}
client.put_bucket_encryption(Bucket=bname, ServerSideEncryptionConfiguration=enc)
p = self.load_policy({
'name': 's3-enc-kms',
'resource': 's3',
'filters': [
{
'type': 'bucket-encryption',
'crypto': 'aws:kms',
'key': 'alias/aws/s3'
}
]
}, session_factory=session_factory)
resources = p.run() or []
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['Name'], bname)
class BucketInventory(BaseTest):
def test_s3_set_encrypted_inventory_sses3(self):
bname = 'custodian-inventory-test'
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [])
session_factory = self.replay_flight_data('test_s3_set_encrypted_inventory_sses3')
client = session_factory().client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(client.delete_bucket, Bucket=bname)
p = self.load_policy({
'name': 's3-inv',
'resource': 's3',
'filters': [
{'Name': bname}
],
'actions': [
{
'type': 'set-inventory',
'destination': 'inv-dest',
'name': 'inv-name',
'state': 'enabled',
'encryption': 'SSES3',
'fields': ['Size', 'EncryptionStatus']
}
]
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
invs = client.list_bucket_inventory_configurations(
Bucket=bname).get('InventoryConfigurationList')
self.assertTrue(invs)
self.assertTrue('SSES3' in invs[0]['Destination']['S3BucketDestination']['Encryption'])
self.assertTrue('EncryptionStatus' in invs[0]['OptionalFields'])
def test_s3_set_encrypted_inventory_ssekms(self):
bname = 'custodian-inventory-test'
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [])
session_factory = self.replay_flight_data('test_s3_set_encrypted_inventory_ssekms')
client = session_factory().client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(client.delete_bucket, Bucket=bname)
p = self.load_policy({
'name': 's3-inv',
'resource': 's3',
'filters': [
{'Name': bname}
],
'actions': [
{
'type': 'set-inventory',
'destination': 'inv-dest',
'name': 'inv-name',
'state': 'enabled',
'encryption': 'SSEKMS',
'key_id': 'arn:valid:kms',
'fields': ['Size', 'EncryptionStatus']
}
]
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
invs = client.list_bucket_inventory_configurations(
Bucket=bname).get('InventoryConfigurationList')
self.assertTrue(invs)
self.assertTrue('SSEKMS' in invs[0]['Destination']['S3BucketDestination']['Encryption'])
self.assertTrue('EncryptionStatus' in invs[0]['OptionalFields'])
def test_s3_delete_inventory_inventory_not_set(self):
bname = 'delete_inventory'
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [])
session_factory = self.replay_flight_data('test_s3_delete_inventory_inventory_not_set')
client = session_factory().client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(client.delete_bucket, Bucket=bname)
p = self.load_policy({
'name': 's3-inv',
'resource': 's3',
'filters': [
{'Name': bname}
],
'actions': [
{
'type': 'set-inventory',
'destination': 'inv-dest',
'name': 'inv-name',
'state': 'absent',
}
]
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
inventoryConfigList = client.list_bucket_inventory_configurations(Bucket=bname).get('InventoryConfigurationList')
self.assertFalse(inventoryConfigList)
@functional
def test_inventory(self):
bname = 'custodian-test-data'
inv_bname = 'custodian-inv'
inv_name = 'something'
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [])
session_factory = self.replay_flight_data('test_s3_inventory')
client = session_factory().client('s3')
if self.recording:
destroyBucketIfPresent(client, bname)
destroyBucketIfPresent(client, inv_bname)
client.create_bucket(Bucket=bname,
CreateBucketConfiguration={'LocationConstraint': 'us-east-2'})
client.create_bucket(Bucket=inv_bname,
CreateBucketConfiguration={'LocationConstraint': 'us-east-2'})
self.addCleanup(client.delete_bucket, Bucket=bname)
self.addCleanup(client.delete_bucket, Bucket=inv_bname)
inv = {
'Destination': {
'S3BucketDestination': {
'Bucket': "arn:aws:s3:::%s" % inv_bname,
'Format': 'CSV',
'Prefix': 'abcdef'},
},
'IsEnabled': True,
'Id': inv_name,
'IncludedObjectVersions': 'All',
'OptionalFields': ['LastModifiedDate'],
'Schedule': {
'Frequency': 'Daily'}
}
client.put_bucket_inventory_configuration(
Bucket=bname,
Id=inv_name,
InventoryConfiguration=inv)
p = self.load_policy({
'name': 's3-inv',
'resource': 's3',
'filters': [
{'Name': 'custodian-test-data'}],
'actions': [
{'type': 'set-inventory',
'destination': inv_bname,
'name': inv_name}]
}, session_factory=session_factory)
self.assertEqual(len(p.run()), 1)
invs = client.list_bucket_inventory_configurations(
Bucket=bname).get('InventoryConfigurationList')
self.assertTrue(invs)
self.assertEqual(sorted(invs[0]['OptionalFields']), ['LastModifiedDate', 'Size'])
p = self.load_policy({
'name': 's3-inv',
'resource': 's3',
'filters': [
{'Name': 'custodian-test-data'}],
'actions': [
{'type': 'set-inventory',
'destination': inv_bname,
'state': 'absent',
'name': inv_name}]
}, session_factory=session_factory)
self.assertEqual(len(p.run()), 1)
self.assertFalse(
client.list_bucket_inventory_configurations(
Bucket=bname).get('InventoryConfigurationList'))
class BucketDelete(BaseTest):
def test_delete_replicated_bucket(self):
# the iam setup is a little for replication to duplicate in a test
# preconditions - custodian-replicated and custodian-replicated-west
# buckets setup with replication, we're deleting the custodian-replicated
# bucket (source).
bname = 'custodian-replicated'
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(
s3, 'S3_AUGMENT_TABLE',
[('get_bucket_replication', 'Replication', None, None),
('get_bucket_versioning', 'Versioning', None, None)])
session_factory = self.replay_flight_data(
'test_s3_delete_replicated_bucket')
p = self.load_policy({
'name': 's3-delete-bucket',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [{'type': 'delete', 'remove-contents': True}]
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
session = session_factory()
client = session.client('s3')
buckets = set([b['Name'] for b in client.list_buckets()['Buckets']])
self.assertFalse(bname in buckets)
@functional
def test_delete_versioned_bucket(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE',
[('get_bucket_versioning', 'Versioning', None, None)])
session_factory = self.replay_flight_data(
'test_s3_delete_versioned_bucket')
session = session_factory()
client = session.client('s3')
s3_resource = session.resource('s3')
bname = 'custodian-byebye'
if self.recording:
destroyBucketIfPresent(client, bname)
client.create_bucket(Bucket=bname,
CreateBucketConfiguration={'LocationConstraint': 'us-east-2'})
client.put_bucket_versioning(
Bucket=bname,
VersioningConfiguration={'Status': 'Enabled'})
generateBucketContents(s3_resource, bname)
# Generate some versions
generateBucketContents(s3_resource, bname)
upload_info = client.create_multipart_upload(
Bucket=bname, Key='abcdef12345')
client.upload_part(
Body='1' * 1024,
Bucket=bname,
Key='abcdef12345',
PartNumber=1,
UploadId=upload_info['UploadId'])
p = self.load_policy({
'name': 's3-delete-bucket',
'resource': 's3',
'filters': [
{'Name': bname}],
'actions': [{'type': 'delete', 'remove-contents': True}]
}, session_factory=session_factory)
resources = p.run()
if self.recording:
time.sleep(60)
self.assertEqual(len(resources), 1)
buckets = set([b['Name'] for b in client.list_buckets()['Buckets']])
self.assertFalse(bname in buckets)
@functional
def test_delete_bucket(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(
s3.DeleteBucket, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [])
session_factory = self.replay_flight_data('test_s3_delete_bucket')
session = session_factory()
client = session.client('s3')
bname = 'custodian-byebye'
if self.recording:
destroyBucketIfPresent(client, bname)
client.create_bucket(Bucket=bname,
CreateBucketConfiguration={'LocationConstraint': 'us-east-2'})
generateBucketContents(session.resource('s3'), bname)
p = self.load_policy({
'name': 's3-delete-bucket',
'resource': 's3',
'filters': [
{'Name': bname}],
'actions': [{'type': 'delete', 'remove-contents': True}]
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
buckets = set([b['Name'] for b in client.list_buckets()['Buckets']])
self.assertFalse(bname in buckets)
def test_delete_bucket_with_failure(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3.DeleteBucket, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [])
session_factory = self.replay_flight_data('test_s3_delete_bucket_with_failure')
session = session_factory()
client = session.client('s3')
bname = 'custodian-perm-denied'
client.create_bucket(Bucket=bname)
generateBucketContents(session.resource('s3'), bname)
# This bucket policy prevents viewing contents
policy = {
"Version": "2012-10-17",
"Id": "Policy1487359365244",
"Statement": [{
"Sid": "Stmt1487359361981",
"Effect": "Deny",
"Principal": "*",
"Action": "s3:DeleteBucket",
"Resource":"arn:aws:s3:::{}".format(bname)
}]
}
client.put_bucket_policy(Bucket=bname, Policy=json.dumps(policy))
p = self.load_policy({
'name': 's3-delete-bucket',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [{'type': 'delete', 'remove-contents': True}]
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
buckets = set([b['Name'] for b in client.list_buckets()['Buckets']])
self.assertIn(bname, buckets)
# Make sure file got written
denied_file = os.path.join(p.resource_manager.log_dir, 'denied.json')
self.assertIn(bname, open(denied_file).read())
#
# Now delete it for real
#
client.delete_bucket_policy(Bucket=bname)
resources = p.run()
self.assertEqual(len(resources), 1)
buckets = set([b['Name'] for b in client.list_buckets()['Buckets']])
self.assertFalse(bname in buckets)
class BucketTag(BaseTest):
@functional
def test_tag_bucket(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(
s3.EncryptExtantKeys, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_tagging', 'Tags', [], 'TagSet')])
session_factory = self.replay_flight_data('test_s3_tag')
session = session_factory()
client = session.client('s3')
bname = 'custodian-tagger'
if self.recording:
destroyBucketIfPresent(client, bname)
client.create_bucket(Bucket=bname,
CreateBucketConfiguration={'LocationConstraint': 'us-east-2'})
self.addCleanup(destroyBucket, client, bname)
client.put_bucket_tagging(
Bucket=bname,
Tagging={'TagSet': [
{'Key': 'rudolph', 'Value': 'reindeer'},
{'Key': 'platform', 'Value': 'lxwee'}]})
p = self.load_policy({
'name': 's3-tagger',
'resource': 's3',
'filters': [
{'Name': bname}],
'actions': [
{'type': 'tag', 'tags': {
'borrowed': 'new', 'platform': 'serverless'}}]
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
tags = {t['Key']: t['Value'] for t in client.get_bucket_tagging(
Bucket=bname)['TagSet']}
self.assertEqual(
{'rudolph': 'reindeer',
'platform': 'serverless',
'borrowed': 'new'},
tags)
class S3ConfigSource(ConfigTest):
maxDiff = None
@functional
def test_normalize(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
augments = list(s3.S3_AUGMENT_TABLE)
augments.remove(('get_bucket_location', 'Location', None, None))
self.patch(s3, 'S3_AUGMENT_TABLE', augments)
bname = 'custodian-test-data-23'
session_factory = self.replay_flight_data('test_s3_normalize')
session = session_factory()
queue_url = self.initialize_config_subscriber(session)
client = session.client('s3')
if self.recording:
destroyBucketIfPresent(client, bname)
client.create_bucket(Bucket=bname,
CreateBucketConfiguration={'LocationConstraint': 'us-east-2'})
self.addCleanup(destroyBucket, client, bname)
sns = session.client('sns')
notify_topic = sns.create_topic(Name=bname).get('TopicArn')
sns.set_topic_attributes(
TopicArn=notify_topic,
AttributeName='Policy',
AttributeValue=json.dumps({
'Statement': [{
'Action': 'SNS:Publish',
'Effect': 'Allow',
'Resource': notify_topic,
'Principal': {'Service': 's3.amazonaws.com'}}]}))
self.addCleanup(sns.delete_topic, TopicArn=notify_topic)
public = 'http://acs.amazonaws.com/groups/global/AuthenticatedUsers'
client.put_bucket_acl(
Bucket=bname,
AccessControlPolicy={
"Owner": {
"DisplayName": "mandeep.bal",
"ID": "e7c8bb65a5fc49cf906715eae09de9e4bb7861a96361ba79b833aa45f6833b15",
},
'Grants': [
{'Grantee': {
'Type': 'Group',
'URI': public},
'Permission': 'READ'},
{'Grantee': {
'Type': 'Group',
'URI': 'http://acs.amazonaws.com/groups/s3/LogDelivery'},
'Permission': 'WRITE'},
{'Grantee': {
'Type': 'Group',
'URI': 'http://acs.amazonaws.com/groups/s3/LogDelivery'},
'Permission': 'READ_ACP'},
]})
client.put_bucket_tagging(
Bucket=bname,
Tagging={'TagSet': [
{'Key': 'rudolph', 'Value': 'rabbit'},
{'Key': 'platform', 'Value': 'tyre'}]})
client.put_bucket_logging(
Bucket=bname,
BucketLoggingStatus={
'LoggingEnabled': {
'TargetBucket': bname,
'TargetPrefix': 's3-logs/'}})
client.put_bucket_versioning(
Bucket=bname,
VersioningConfiguration={'Status': 'Enabled'})
client.put_bucket_accelerate_configuration(
Bucket=bname,
AccelerateConfiguration={'Status': 'Enabled'})
client.put_bucket_website(
Bucket=bname,
WebsiteConfiguration={
'IndexDocument': {
'Suffix': 'index.html'}})
client.put_bucket_policy(
Bucket=bname,
Policy=json.dumps({
'Version': '2012-10-17',
'Statement': [{
'Sid': 'Zebra',
'Effect': 'Deny',
'Principal': '*',
'Action': 's3:PutObject',
'Resource': 'arn:aws:s3:::%s/*' % bname,
'Condition': {
'StringNotEquals': {
's3:x-amz-server-side-encryption': [
'AES256', 'aws:kms']}}}]}))
client.put_bucket_notification_configuration(
Bucket=bname,
NotificationConfiguration={
'TopicConfigurations': [{
'Id': bname,
'TopicArn': notify_topic,
'Events': ['s3:ObjectCreated:*'],
'Filter': {
'Key': {
'FilterRules': [
{'Name': 'prefix',
'Value': 's3-logs/'}
]
}
}
}]
})
p = self.load_policy({
'name': 's3-inv',
'resource': 's3',
'filters': [{'Name': bname}]}, session_factory=session_factory)
manager = p.get_resource_manager()
resource_a = manager.get_resources([bname])[0]
results = self.wait_for_config(session, queue_url, bname)
resource_b = s3.ConfigS3(manager).load_resource(results[0])
self.maxDiff = None
for k in ('Logging',
'Policy',
'Versioning',
'Name',
'Website'):
self.assertEqual(resource_a[k], resource_b[k])
self.assertEqual(
{t['Key']: t['Value'] for t in resource_a.get('Tags')},
{t['Key']: t['Value'] for t in resource_b.get('Tags')})
def test_config_normalize_notification(self):
event = event_data('s3-rep-and-notify.json', 'config')
p = self.load_policy({'name': 's3cfg', 'resource': 's3'})
source = p.resource_manager.get_source('config')
resource = source.load_resource(event)
self.assertEqual(
resource['Notification'],
{u'TopicConfigurations': [
{u'Filter': {
u'Key': {
u'FilterRules': [
{u'Name': 'Prefix', u'Value': 'oids/'}]}},
u'Id': 'rabbit',
u'TopicArn': 'arn:aws:sns:us-east-1:644160558196:custodian-test-data-22',
u'Events': ['s3:ReducedRedundancyLostObject',
's3:ObjectCreated:CompleteMultipartUpload']}],
u'LambdaFunctionConfigurations': [
{u'Filter': {
u'Key': {
u'FilterRules': [
{u'Name': 'Prefix', u'Value': 'void/'}]}},
u'LambdaFunctionArn': 'arn:aws:lambda:us-east-1:644160558196:function:lambdaenv',
u'Id': 'ZDAzZDViMTUtNGU3MS00ZWIwLWI0MzgtOTZiMWQ3ZWNkZDY1',
u'Events': ['s3:ObjectRemoved:Delete']}],
u'QueueConfigurations': [
{u'Filter': {
u'Key': {
u'FilterRules': [
{u'Name': 'Prefix', u'Value': 'images/'}]}},
u'Id': 'OGQ5OTAyNjYtYjBmNy00ZTkwLWFiMjUtZjE4ODBmYTgwNTE0',
u'QueueArn': 'arn:aws:sqs:us-east-1:644160558196:test-queue',
u'Events': ['s3:ObjectCreated:*']}]})
def test_config_normalize_lifecycle_null_predicate(self):
event = event_data('s3-lifecycle-null-predicate.json', 'config')
p = self.load_policy({'name': 's3cfg', 'resource': 's3'})
source = p.resource_manager.get_source('config')
resource = source.load_resource(event)
rule = resource['Lifecycle']['Rules'][0]
self.assertEqual(
rule,
{'AbortIncompleteMultipartUpload': {'DaysAfterInitiation': 1},
'Expiration': {'Days': 1},
'ID': 'RemoveAbortedUploads',
'NoncurrentVersionExpiration': {'NoncurrentDays': -1},
'Status': 'Enabled'})
def test_config_normalize_lifecycle_and_predicate(self):
event = event_data('s3-lifecycle-and-predicate.json', 'config')
p = self.load_policy({'name': 's3cfg', 'resource': 's3'})
source = p.resource_manager.get_source('config')
resource = source.load_resource(event)
rfilter = resource['Lifecycle']['Rules'][0]['Filter']
self.assertEqual(
rfilter['And']['Prefix'],
'docs/')
self.assertEqual(
rfilter['And']['Tags'],
[{"Value": "Archive", "Key": "Workflow"},
{"Value": "Complete", "Key": "State"}])
def test_config_normalize_lifecycle(self):
event = event_data('s3-lifecycle.json', 'config')
p = self.load_policy({'name': 's3cfg', 'resource': 's3'})
source = p.resource_manager.get_source('config')
resource = source.load_resource(event)
self.assertEqual(
resource['Lifecycle'], {
"Rules": [
{
"Status": "Enabled",
"NoncurrentVersionExpiration": {
"NoncurrentDays": 545
},
"Filter": {
"Prefix": "docs/"
},
"Transitions": [{
"Days": 30,
"StorageClass": "STANDARD_IA"
}],
"Expiration": {
"ExpiredObjectDeleteMarker": True
},
"AbortIncompleteMultipartUpload": {
"DaysAfterInitiation": 7
},
"NoncurrentVersionTransitions": [{
"NoncurrentDays": 180,
"StorageClass": "GLACIER"
}],
"ID": "Docs"
}
]
})
def test_config_normalize_replication(self):
event = event_data('s3-rep-and-notify.json', 'config')
p = self.load_policy({'name': 's3cfg', 'resource': 's3'})
source = p.resource_manager.get_source('config')
resource = source.load_resource(event)
self.assertEqual(
resource['Replication'], {
u'ReplicationConfiguration': {
u'Rules': [{u'Status': 'Enabled',
u'Prefix': '',
u'Destination': {
u'Bucket': 'arn:aws:s3:::testing-west'},
u'ID': 'testing-99'}],
u'Role': (
'arn:aws:iam::644160558196:role'
'/custodian-replicated-custodian-replicated'
'-west-s3-repl-role')}})
def test_config_normalize_website_redirect(self):
event = event_data('s3-website-redirect.json', 'config')
p = self.load_policy({'name': 's3cfg', 'resource': 's3'})
source = p.resource_manager.get_source('config')
self.maxDiff = None
resource = source.load_resource(event)
self.assertEqual(
resource['Website'],
{'RedirectAllRequestsTo': {'HostName': 'www.google.com/', 'Protocol': 'https'}})
def test_config_normalize_website(self):
event = event_data('s3-website.json', 'config')
p = self.load_policy({'name': 's3cfg', 'resource': 's3'})
source = p.resource_manager.get_source('config')
self.maxDiff = None
resource = source.load_resource(event)
self.assertEqual(
resource['Website'],
{u'IndexDocument': {u'Suffix': 'index.html'},
u'RoutingRules': [
{u'Redirect': {u'ReplaceKeyWith': 'error.html'},
u'Condition': {u'HttpErrorCodeReturnedEquals': '404',
u'KeyPrefixEquals': 'docs/'}}]})
def test_load_item_resource(self):
event = event_data('s3.json', 'config')
p = self.load_policy({
'name': 's3cfg',
'resource': 's3'})
source = p.resource_manager.get_source('config')
self.maxDiff = None
resource = source.load_resource(event)
resource.pop('CreationDate')
self.assertEqual(
{'Planet': 'Earth', 'Verbose': 'Game'},
{t['Key']: t['Value'] for t in resource.pop('Tags')}
)
self.assertEqual(
resource,
{'Location': {'LocationConstraint': u'us-east-2'},
'Name': u'config-rule-sanity',
'Lifecycle': None,
'Website': None,
'Policy': None,
'Replication': None,
'Versioning': None,
'Logging': None,
'Notification': None,
"Acl": {
"Owner": {
"ID": u"e7c8bb65a5fc49cf906715eae09de9e4bb7861a96361ba79b833aa45f6833b15"
},
"Grants": [
{
"Grantee": {
"Type": "CanonicalUser",
"ID": u"e7c8bb65a5fc49cf906715eae09de9e4bb7861a96361ba79b833aa45f6833b15"
},
"Permission": "FULL_CONTROL"
}
]}
})
def test_load_item_resource_config_event(self):
event = event_data('s3-from-rule.json', 'config')
p = self.load_policy({
'name': 's3cfg',
'resource': 's3'})
source = p.resource_manager.get_source('config')
self.maxDiff = None
resource_config = json.loads(event['invokingEvent'])['configurationItem']
resource = source.load_resource(resource_config)
self.assertEqual(
resource,
{u'Acl': {
u'Grants': [{
u'Grantee': {
u'ID': u'e7c8bb65a5fc49cf906715eae09de9e4bb7861a96361ba79b833aa45f6833b15',
u'Type': u'CanonicalUser'},
u'Permission': u'FULL_CONTROL'}],
u'Owner': {u'DisplayName': u'mandeep.bal',
u'ID': u'e7c8bb65a5fc49cf906715eae09de9e4bb7861a96361ba79b833aa45f6833b15'}},
u'CreationDate': datetime.datetime(2017, 9, 15, 2, 5, 40, tzinfo=tzutc()),
u'Lifecycle': None,
u'Location': None,
u'Logging': None,
u'Name': u'c7n-fire-logs',
u'Notification': {},
u'Policy': None,
u'Replication': None,
u'Tags': [],
u'Versioning': None,
u'Website': None})
class BucketPolicyStatements(BaseTest):
@functional
def test_policy(self):
bname = 'custodian-test-data'
sid = 'CustodianTest'
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_policy', 'Policy', None, 'Policy'),
])
session_factory = self.replay_flight_data('test_s3_policy_statements')
client = session_factory().client('s3')
if self.recording:
destroyBucketIfPresent(client, bname)
client.create_bucket(Bucket=bname,
CreateBucketConfiguration={'LocationConstraint': 'us-east-2'})
self.addCleanup(client.delete_bucket, Bucket=bname)
p = self.load_policy({
'name': 's3-policy-statements',
'resource': 's3',
'filters': [
{'Name': bname}],
'actions': [
{'type': 'set-statements',
'statements': [{
'Sid': sid,
'Effect': 'Deny',
'Action': 's3:GetObject',
'Principal': {
'AWS': '*'
},
'Resource': 'arn:aws:s3:::{bucket_name}/*',
'Condition': {
'Bool': {
'aws:SecureTransport': False
}
}
}]}]
}, session_factory=session_factory)
self.assertEqual(len(p.run()), 1)
policy = client.get_bucket_policy(Bucket=bname).get('Policy')
policy = json.loads(policy)
self.assertTrue(len(policy['Statement']) > 0)
self.assertTrue(len([
s for s in policy['Statement'] if s['Sid'] == sid and s['Resource'] == 'arn:aws:s3:::%s/*' % (bname)
]) == 1)
@functional
def test_policy_no_change(self):
bname = 'custodian-test-data'
sid = 'CustodianTest'
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [('get_bucket_policy', 'Policy', None, 'Policy')])
session_factory = self.replay_flight_data('test_s3_policy_statements_no_change')
client = session_factory().client('s3')
if self.recording:
destroyBucketIfPresent(client, bname)
client.create_bucket(Bucket=bname,
CreateBucketConfiguration={'LocationConstraint': 'us-east-2'})
self.addCleanup(client.delete_bucket, Bucket=bname)
p = self.load_policy({
'name': 's3-policy-statements',
'resource': 's3',
'filters': [
{'Name': bname}],
'actions': [
{'type': 'set-statements',
'statements': []}]
}, session_factory=session_factory)
self.assertEqual(len(p.run()), 1)
class S3Test(BaseTest):
def test_multipart_large_file(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(
s3.EncryptExtantKeys, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [])
self.patch(s3, 'MAX_COPY_SIZE', (1024 * 1024 * 6.1))
session_factory = self.replay_flight_data('test_s3_multipart_file')
session = session_factory()
client = session.client('s3')
bname = 'custodian-largef-test'
key = 'hello'
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
class wrapper(object):
def __init__(self, d, length):
self.d = d
self.len = length
self.counter = length
def read(self, size):
if self.counter == 0:
return ""
if size > self.counter:
size = self.counter
self.counter = 0
else:
self.counter -= size
return self.d.read(size)
def seek(self, offset, whence=0):
if whence == 2 and offset == 0:
self.counter = 0
elif whence == 0 and offset == 0:
self.counter = self.len
def tell(self):
return self.len - self.counter
size = 1024 * 1024 * 16
client.put_object(
Bucket=bname, Key=key,
Metadata={'planet': 'earth'},
Body=wrapper(open('/dev/zero'), size), ContentLength=size)
info = client.head_object(Bucket=bname, Key=key)
p = self.load_policy({
'name': 'encrypt-obj',
'resource': 's3',
'filters': [{"Name": bname}],
'actions': ['encrypt-keys']}, session_factory=session_factory)
p.run()
post_info = client.head_object(Bucket=bname, Key='hello')
self.assertTrue('ServerSideEncryption' in post_info)
self.assertEqual(post_info['Metadata'], {'planet': 'earth'})
# etags on multipart do not reflect md5 :-(
self.assertTrue(info['ContentLength'], post_info['ContentLength'])
def test_self_log(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_logging', 'Logging', None, 'LoggingEnabled')])
session_factory = self.replay_flight_data('test_s3_self_log_target')
session = session_factory()
client = session.client('s3')
bname = 'custodian-log-test'
client.create_bucket(Bucket=bname)
self.addCleanup(client.delete_bucket, Bucket=bname)
client.put_bucket_acl(
Bucket=bname,
AccessControlPolicy={
"Owner": {
"DisplayName": "k_vertigo",
"ID": "904fc4c4790937100e9eb293a15e6a0a1f265a064888055b43d030034f8881ee"
},
'Grants': [
{'Grantee': {
'Type': 'Group',
'URI': 'http://acs.amazonaws.com/groups/s3/LogDelivery'},
'Permission': 'WRITE'},
{'Grantee': {
'Type': 'Group',
'URI': 'http://acs.amazonaws.com/groups/s3/LogDelivery'},
'Permission': 'READ_ACP'},
]})
client.put_bucket_logging(
Bucket=bname,
BucketLoggingStatus={
'LoggingEnabled': {
'TargetBucket': bname,
'TargetPrefix': 's3-logs/'}})
p = self.load_policy({
'name': 's3-log-targets',
'resource': 's3',
'filters': [
{'Name': bname},
{'type': 'is-log-target', 'self': True}]},
session_factory=session_factory)
resources = p.run()
names = [b['Name'] for b in resources]
self.assertEqual(names[0], bname)
self.assertEqual(len(names), 1)
def test_log_target(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_logging', 'Logging', None, 'LoggingEnabled')])
session_factory = self.replay_flight_data('test_s3_log_target')
session = session_factory()
client = session.client('s3')
bname = 'custodian-log-test'
client.create_bucket(Bucket='custodian-log-test')
self.addCleanup(client.delete_bucket, Bucket=bname)
client.put_bucket_acl(
Bucket=bname,
AccessControlPolicy={
"Owner": {
"DisplayName": "k_vertigo",
"ID": "904fc4c4790937100e9eb293a15e6a0a1f265a064888055b43d030034f8881ee"
},
'Grants': [
{'Grantee': {
'Type': 'Group',
'URI': 'http://acs.amazonaws.com/groups/s3/LogDelivery'},
'Permission': 'WRITE'},
{'Grantee': {
'Type': 'Group',
'URI': 'http://acs.amazonaws.com/groups/s3/LogDelivery'},
'Permission': 'READ_ACP'},
]})
client.put_bucket_logging(
Bucket=bname,
BucketLoggingStatus={
'LoggingEnabled': {
'TargetBucket': bname,
'TargetPrefix': 's3-logs/'}})
p = self.load_policy({
'name': 's3-log-targets',
'resource': 's3',
'filters': ['is-log-target']}, session_factory=session_factory)
resources = p.run()
names = [b['Name'] for b in resources]
self.assertTrue(bname in names)
def test_has_statement(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(
s3.MissingPolicyStatementFilter, 'executor_factory',
MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_policy', 'Policy', None, 'Policy'),
])
session_factory = self.replay_flight_data('test_s3_has_statement')
bname = "custodian-policy-test"
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
client.put_bucket_policy(
Bucket=bname,
Policy=json.dumps({
'Version': '2012-10-17',
'Statement': [{
'Sid': 'Zebra',
'Effect': 'Deny',
'Principal': '*',
'Action': 's3:PutObject',
'Resource': 'arn:aws:s3:::%s/*' % bname,
'Condition': {
'StringNotEquals': {
's3:x-amz-server-side-encryption': [
'AES256', 'aws:kms']}}}]}))
p = self.load_policy({
'name': 's3-has-policy',
'resource': 's3',
'filters': [
{'Name': bname},
{'type': 'has-statement',
'statement_ids': ['Zebra']}]},
session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
def test_has_statement_policy(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(
s3.MissingPolicyStatementFilter, 'executor_factory',
MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_policy', 'Policy', None, 'Policy'),
])
session_factory = self.replay_flight_data('test_s3_has_statement')
bname = "custodian-policy-test"
session = session_factory()
p = self.load_policy({
'name': 's3-has-policy',
'resource': 's3',
'filters': [
{'Name': bname},
{'type': 'has-statement',
'statements': [
{'Effect': 'Deny',
'Action': 's3:PutObject',
'Principal': '*'}]}]},
session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
def test_no_encryption_statement(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(
s3.MissingPolicyStatementFilter, 'executor_factory',
MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_policy', 'Policy', None, 'Policy'),
])
session_factory = self.replay_flight_data('test_s3_no_encryption_statement')
bname = "custodian-encryption-test"
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
client.put_bucket_policy(
Bucket=bname,
Policy=json.dumps({
'Version': '2017-3-28',
'Statement': [{
'Sid': 'RequiredEncryptedObject',
'Effect': 'Allow',
'Principal': '*',
'Action': 's3:PutObject',
'Resource': 'arn:aws:s3:::%s/*' % bname,
'Condition': {
'StringNotEquals': {
's3:x-amz-server-side-encryption': [
'AES256', 'aws:kms']}}}]}))
p = self.load_policy({
'name': 's3-no-encryption-policy',
'resource': 's3',
'filters': [
{'Name': bname},
{'type': 'no-encryption-statement'}]},
session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
def test_missing_policy_statement(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(
s3.MissingPolicyStatementFilter, 'executor_factory',
MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_policy', 'Policy', None, 'Policy'),
])
session_factory = self.replay_flight_data('test_s3_missing_policy')
bname = "custodian-encrypt-test"
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
client.put_bucket_policy(
Bucket=bname,
Policy=json.dumps({
'Version': '2012-10-17',
'Statement': [{
'Sid': 'Zebra',
'Effect': 'Deny',
'Principal': '*',
'Action': 's3:PutObject',
'Resource': 'arn:aws:s3:::%s/*' % bname,
'Condition': {
'StringNotEquals': {
's3:x-amz-server-side-encryption': [
'AES256', 'aws:kms']}}}]}))
p = self.load_policy({
'name': 'encrypt-keys',
'resource': 's3',
'filters': [
{'Name': bname},
{'type': 'missing-policy-statement',
'statement_ids': ['RequireEncryptedPutObject']}]},
session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
def test_enable_versioning(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_versioning', 'Versioning', None, None)])
session_factory = self.replay_flight_data('test_s3_enable_versioning')
bname = 'superduper-and-magic'
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
p = self.load_policy({
'name': 's3-version',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': ['toggle-versioning']
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['Name'], bname)
# eventual consistency fun for recording
#time.sleep(10)
versioning = client.get_bucket_versioning(Bucket=bname)['Status']
self.assertEqual('Enabled', versioning)
# running against a bucket with versioning already on
# is idempotent
resources = p.run()
self.assertEqual(len(resources), 1)
p = self.load_policy({
'name': 's3-version',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [
{'type': 'toggle-versioning', 'enabled': False}]},
session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
# eventual consistency fun for recording
#time.sleep(10)
versioning = client.get_bucket_versioning(Bucket=bname)['Status']
self.assertEqual('Suspended', versioning)
@functional
def test_enable_logging(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_logging', 'Logging', None, None)])
session_factory = self.replay_flight_data('test_s3_enable_logging')
bname = 'superduper-and-magic'
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
client.put_bucket_acl(
Bucket=bname,
AccessControlPolicy={
"Owner": {
"DisplayName": "mandeep.bal",
"ID": "e7c8bb65a5fc49cf906715eae09de9e4bb7861a96361ba79b833aa45f6833b15",
},
'Grants': [
{'Grantee': {
'Type': 'Group',
'URI': 'http://acs.amazonaws.com/groups/s3/LogDelivery'},
'Permission': 'WRITE'},
{'Grantee': {
'Type': 'Group',
'URI': 'http://acs.amazonaws.com/groups/s3/LogDelivery'},
'Permission': 'READ_ACP'}]})
self.addCleanup(destroyBucket, client, bname)
p = self.load_policy({
'name': 's3-version',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [
{'type': 'toggle-logging',
'target_bucket': bname,
'target_prefix': '{account}/{source_bucket_name}'}]
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['Name'], bname)
# eventual consistency fun for recording
if self.recording:
time.sleep(5)
logging = client.get_bucket_logging(Bucket=bname).get('LoggingEnabled')
self.assertTrue(logging)
self.assertEqual(logging['TargetPrefix'], 'custodian-skunk-works/superduper-and-magic')
# Flip the switch
p = self.load_policy({
'name': 's3-version',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [
{'type': 'toggle-logging', 'enabled': False}]},
session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
# eventual consistency fun for recording
if self.recording:
time.sleep(12)
logging = client.get_bucket_logging(Bucket=bname).get('LoggingEnabled')
self.assertFalse(logging)
def test_encrypt_policy(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_policy', 'Policy', None, 'Policy'),
])
session_factory = self.replay_flight_data('test_s3_encrypt_policy')
bname = "custodian-encrypt-test"
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
p = self.load_policy({
'name': 'encrypt-keys',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': ['encryption-policy']}, session_factory=session_factory)
resources = p.run()
try:
resource = session.resource('s3')
key = resource.Object(bname, 'home.txt')
key.put(Body='hello', ContentLength=5, ContentType='text/plain')
except ClientError as e:
self.assertEqual(e.response['Error']['Code'], 'AccessDenied')
else:
self.fail("Encryption required policy")
def test_remove_policy_none_extant(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_policy', 'Policy', None, 'Policy'),
])
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
session_factory = self.replay_flight_data(
'test_s3_remove_empty_policy')
bname = "custodian-policy-test"
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
p = self.load_policy({
'name': 'remove-policy',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [
{'type': 'remove-statements', 'statement_ids': [
'Zebra', 'Moon']}],
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertRaises(ClientError, client.get_bucket_policy, Bucket=bname)
def test_remove_policy(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_policy', 'Policy', None, 'Policy'),
])
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(
s3.RemovePolicyStatement, 'executor_factory', MainThreadExecutor)
session_factory = self.replay_flight_data('test_s3_remove_policy')
bname = "custodian-policy-test"
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
client.put_bucket_policy(
Bucket=bname,
Policy=json.dumps({
'Version': '2012-10-17',
'Statement': [{
'Sid': 'Zebra',
'Effect': 'Deny',
'Principal': '*',
'Action': 's3:PutObject',
'Resource': 'arn:aws:s3:::%s/*' % bname,
'Condition': {
'StringNotEquals': {
's3:x-amz-server-side-encryption': [
'AES256', 'aws:kms']}}}]}))
self.addCleanup(destroyBucket, client, bname)
p = self.load_policy({
'name': 'remove-policy',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [
{'type': 'remove-statements', 'statement_ids': [
'Zebra', 'Moon']}],
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertRaises(ClientError, client.get_bucket_policy, Bucket=bname)
def test_remove_policy_matched(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_policy', 'Policy', None, 'Policy'),
])
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(
s3.RemovePolicyStatement, 'executor_factory', MainThreadExecutor)
self.patch(MainThreadExecutor, 'async', False)
bname = "custodian-policy-test"
statement = {
'Sid': 'Zebra',
'Effect': 'Deny',
'Principal': '*',
'Action': 's3:PutObject',
'Resource': 'arn:aws:s3:::%s/*' % bname,
'Condition': {
'StringNotEquals': {
's3:x-amz-server-side-encryption': [
'AES256', 'aws:kms']}}}
process_buckets = s3.RemovePolicyStatement.process
def enrich(self, buckets):
buckets[0]['CrossAccountViolations'] = [statement]
process_buckets(self, buckets)
self.patch(s3.RemovePolicyStatement, 'process', enrich)
session_factory = self.replay_flight_data('test_s3_remove_policy')
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
client.put_bucket_policy(
Bucket=bname,
Policy=json.dumps({
'Version': '2012-10-17', 'Statement': [statement]}))
self.addCleanup(destroyBucket, client, bname)
p = self.load_policy({
'name': 'remove-policy',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [
{'type': 'remove-statements', 'statement_ids': 'matched'}],
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertRaises(ClientError, client.get_bucket_policy, Bucket=bname)
def test_attach_encrypt_requires_role(self):
self.assertRaises(
ValueError, self.load_policy,
{'name': 'attach-encrypt',
'resource': 's3',
'actions': [{'type': 'attach-encrypt'}]})
@skip_if_not_validating
def test_attach_encrypt_accepts_topic(self):
p = self.load_policy(
{'name': 'attach-encrypt',
'resource': 's3',
'actions': [{
'type': 'attach-encrypt', 'role': '-', 'topic': 'default'}]})
self.assertEqual(p.data['actions'][0]['topic'], 'default')
def test_create_bucket_event(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_policy', 'Policy', None, 'Policy'),
])
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
session_factory = self.replay_flight_data('test_s3_create')
bname = 'custodian-create-bucket-v4'
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
p = self.load_policy({
'name': 'bucket-create-v2',
'resource': 's3',
'mode': {
'type': 'cloudtrail',
'role': 'arn:aws:iam::619193117841:role/CustodianDemoRole',
'events': ['CreateBucket'],
},
'actions': [
'encryption-policy']}, session_factory=session_factory)
p.push(event_data('event-cloud-trail-create-bucket.json'), None)
try:
result = client.get_bucket_policy(Bucket=bname)
except:
self.fail("Could not get bucket policy")
self.assertTrue('Policy' in result)
policy = json.loads(result['Policy'])
self.assertEqual(
policy,
{u'Statement': [
{u'Action': u's3:PutObject',
u'Condition': {
u'StringNotEquals': {
u's3:x-amz-server-side-encryption': [
u'AES256',
u'aws:kms']}},
u'Effect': u'Deny',
u'Principal': u'*',
u'Resource': u'arn:aws:s3:::custodian-create-bucket-v4/*',
u'Sid': u'RequireEncryptedPutObject'}],
u'Version': u'2012-10-17'})
def test_attach_encrypt_via_bucket_notification(self):
self.patch(s3, 'S3_AUGMENT_TABLE',
[('get_bucket_location', 'Location', None, None)])
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
session_factory = self.replay_flight_data(
'test_s3_attach_encrypt_via_bucket_notification')
bname = "custodian-attach-encrypt-test"
role = "arn:aws:iam::644160558196:role/custodian-mu"
self.maxDiff = None
session = session_factory(region='us-west-2')
client = session.client('s3')
client.create_bucket(
Bucket=bname,
CreateBucketConfiguration={
'LocationConstraint': 'us-west-2'})
self.addCleanup(destroyBucket, client, bname)
p = self.load_policy({
'name': 'attach-encrypt',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [{
'type': 'attach-encrypt',
'role': role}]
}, session_factory=session_factory)
self.addCleanup(
LambdaManager(functools.partial(session_factory, region='us-west-2')).remove,
s3crypt.get_function(None, role))
resources = p.run()
self.assertEqual(len(resources), 1)
#time.sleep(10)
notifications = client.get_bucket_notification_configuration(
Bucket=bname)
notifications.pop('ResponseMetadata')
self.assertEqual(
notifications,
{'LambdaFunctionConfigurations': [{
'Events': ['s3:ObjectCreated:*'],
'Id': 'c7n-s3-encrypt',
'LambdaFunctionArn':'arn:aws:lambda:us-west-2:644160558196:function:c7n-s3-encrypt'}]})
client.put_object(
Bucket=bname, Key='hello-world.txt',
Body='hello world', ContentType='text/plain')
#time.sleep(30)
info = client.head_object(Bucket=bname, Key='hello-world.txt')
self.assertTrue('ServerSideEncryption' in info)
def test_attach_encrypt_via_new_topic(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [(
'get_bucket_notification_configuration', 'Notification', None,
None)])
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
session_factory = self.replay_flight_data(
'test_s3_attach_encrypt_via_new_topic')
bname = "custodian-attach-encrypt-test"
role = "arn:aws:iam::644160558196:role/custodian-mu"
self.maxDiff = None
session = session_factory(region='us-east-1')
client = session.client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
p = self.load_policy({
'name': 'attach-encrypt',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [{
'type': 'attach-encrypt',
'role': role,
'topic': 'default'}]
}, session_factory=session_factory)
self.addCleanup(
LambdaManager(
functools.partial(session_factory, region='us-east-1')).remove,
s3crypt.get_function(None, role))
arn = 'arn:aws:sns:us-east-1:644160558196:custodian-attach-encrypt-test'
self.addCleanup(session.client('sns').delete_topic, TopicArn=arn)
self.addCleanup(session.client('logs').delete_log_group,
logGroupName='/aws/lambda/c7n-s3-encrypt')
# Check that the policy sets stuff up properly.
resources = p.run()
self.assertEqual(len(resources), 1)
#time.sleep(10)
topic_notifications = client.get_bucket_notification_configuration(
Bucket=bname).get('TopicConfigurations', [])
us = [t for t in topic_notifications if t.get('TopicArn') == arn]
self.assertEqual(len(us), 1)
# Check that the stuff behaves properly.
client.put_object(
Bucket=bname, Key='hello-world.txt',
Body='hello world', ContentType='text/plain')
#time.sleep(30)
info = client.head_object(Bucket=bname, Key='hello-world.txt')
self.assertTrue('ServerSideEncryption' in info)
def test_attach_encrypt_via_implicit_existing_topic(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [(
'get_bucket_notification_configuration', 'Notification', None,
None)])
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
session_factory = self.replay_flight_data(
'test_s3_attach_encrypt_via_implicit_existing_topic')
bname = "custodian-attach-encrypt-test"
role = "arn:aws:iam::644160558196:role/custodian-mu"
self.maxDiff = None
session = session_factory(region='us-east-1')
client = session.client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
# Create two sns topics
topic_configs = []
for suffix in ('.jpg', '.txt'):
sns = session.client('sns')
existing_topic_arn = sns.create_topic(
Name='existing-{}-{}'.format(bname, suffix[1:]))['TopicArn']
policy = {
'Statement': [{
'Action': 'SNS:Publish',
'Effect': 'Allow',
'Resource': existing_topic_arn,
'Principal': {'Service': 's3.amazonaws.com'}}]}
sns.set_topic_attributes(
TopicArn=existing_topic_arn,
AttributeName='Policy',
AttributeValue=json.dumps(policy))
self.addCleanup(session.client('sns').delete_topic,
TopicArn=existing_topic_arn)
topic_configs.append({
'TopicArn': existing_topic_arn,
'Events': ['s3:ObjectCreated:*'],
'Filter': {'Key': {'FilterRules': [{
'Name': 'suffix',
'Value': suffix}]}}})
session.resource('s3').BucketNotification(bname).put(
NotificationConfiguration={'TopicConfigurations': topic_configs})
# Now define the policy.
p = self.load_policy({
'name': 'attach-encrypt',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [{
'type': 'attach-encrypt',
'role': role,
'topic': 'default'}]
}, session_factory=session_factory)
self.addCleanup(
LambdaManager(
functools.partial(session_factory, region='us-east-1')).remove,
s3crypt.get_function(None, role))
self.addCleanup(session.client('logs').delete_log_group,
logGroupName='/aws/lambda/c7n-s3-encrypt')
# Check that the policy sets stuff up properly.
resources = p.run()
self.assertEqual(len(resources), 1)
#time.sleep(10)
notifies = client.get_bucket_notification_configuration(
Bucket=bname).get('TopicConfigurations', [])
existing = [t for t in notifies if 'existing' in t['TopicArn']]
self.assertEqual(len(existing), 2)
# Check that the stuff behaves properly.
client.put_object(
Bucket=bname, Key='hello-world.txt',
Body='hello world', ContentType='text/plain')
#time.sleep(30)
info = client.head_object(Bucket=bname, Key='hello-world.txt')
self.assertTrue('ServerSideEncryption' in info)
def test_attach_encrypt_via_explicit_existing_topic(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [(
'get_bucket_notification_configuration', 'Notification', None,
None)])
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
session_factory = self.replay_flight_data(
'test_s3_attach_encrypt_via_explicit_existing_topic')
bname = "custodian-attach-encrypt-test"
role = "arn:aws:iam::644160558196:role/custodian-mu"
self.maxDiff = None
session = session_factory(region='us-east-1')
client = session.client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
# Create an sns topic
topic_configs = []
sns = session.client('sns')
existing_topic_arn = sns.create_topic(
Name='preexisting-{}'.format(bname))['TopicArn']
policy = {
'Statement': [{
'Action': 'SNS:Publish',
'Effect': 'Allow',
'Resource': existing_topic_arn,
'Principal': {'Service': 's3.amazonaws.com'}}]}
sns.set_topic_attributes(
TopicArn=existing_topic_arn,
AttributeName='Policy',
AttributeValue=json.dumps(policy))
self.addCleanup(session.client('sns').delete_topic,
TopicArn=existing_topic_arn)
topic_configs.append({
'TopicArn': existing_topic_arn,
'Events': ['s3:ObjectCreated:*']})
session.resource('s3').BucketNotification(bname).put(
NotificationConfiguration={'TopicConfigurations': topic_configs})
# Now define the policy.
p = self.load_policy({
'name': 'attach-encrypt',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [{
'type': 'attach-encrypt',
'role': role,
'topic': existing_topic_arn}]
}, session_factory=session_factory)
self.addCleanup(
LambdaManager(
functools.partial(session_factory, region='us-east-1')).remove,
s3crypt.get_function(None, role))
self.addCleanup(session.client('logs').delete_log_group,
logGroupName='/aws/lambda/c7n-s3-encrypt')
# Check that the policy sets stuff up properly.
resources = p.run()
self.assertEqual(len(resources), 1)
#time.sleep(10)
notifies = client.get_bucket_notification_configuration(
Bucket=bname).get('TopicConfigurations', [])
existing = [t for t in notifies if 'existing' in t['TopicArn']]
self.assertEqual(len(existing), 1)
# Check that the stuff behaves properly.
client.put_object(
Bucket=bname, Key='hello-world.txt',
Body='hello world', ContentType='text/plain')
#time.sleep(30)
info = client.head_object(Bucket=bname, Key='hello-world.txt')
self.assertTrue('ServerSideEncryption' in info)
def test_encrypt_versioned_bucket(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_versioning', 'Versioning', None, None)])
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(
s3.EncryptExtantKeys, 'executor_factory', MainThreadExecutor)
session_factory = self.replay_flight_data('test_s3_encrypt_versioned')
bname = "custodian-encrypt-test"
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
client.put_bucket_versioning(
Bucket=bname,
VersioningConfiguration={'Status': 'Enabled'})
self.addCleanup(destroyVersionedBucket, client, bname)
generateBucketContents(session.resource('s3'), bname)
p = self.load_policy({
'name': 'encrypt-keys',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': ['encrypt-keys']}, session_factory=session_factory)
resources = p.run()
self.assertTrue(
len(client.list_object_versions(Bucket=bname)['Versions']) == 3)
self.assertTrue(
'ServerSideEncryption' in client.head_object(
Bucket=bname, Key='home.txt'))
@functional
def test_encrypt_versioned_bucket_with_existing_keys(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_versioning', 'Versioning', None, None)])
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(
s3.EncryptExtantKeys, 'executor_factory', MainThreadExecutor)
session_factory = self.replay_flight_data('test_s3_encrypt_versioned_bucket_with_existing_keys')
bname = "custodian-encrypt-test-versioning"
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
generateBucketContents(session.resource('s3'), bname,
{
'data1.txt': 'one',
'data2.txt': 'two'
})
client.put_bucket_versioning(
Bucket=bname,
VersioningConfiguration={'Status': 'Enabled'})
self.addCleanup(destroyVersionedBucket, client, bname)
generateBucketContents(session.resource('s3'), bname, {'data1.txt': 'three'})
p = self.load_policy({
'name': 'encrypt-keys',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': ['encrypt-keys']}, session_factory=session_factory)
resources = p.run()
self.assertTrue(
len(client.list_object_versions(Bucket=bname)['Versions']) == 2)
self.assertTrue(
'ServerSideEncryption' in client.head_object(
Bucket=bname, Key='data1.txt'))
self.assertTrue(
'ServerSideEncryption' in client.head_object(
Bucket=bname, Key='data2.txt'))
def test_encrypt_key_empty_bucket(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [])
self.patch(
s3.EncryptExtantKeys, 'executor_factory', MainThreadExecutor)
session_factory = self.replay_flight_data('test_s3_encrypt_empty')
bname = "custodian-encrypt-test"
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
p = self.load_policy({
'name': 'encrypt-keys',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': ['encrypt-keys']}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
def test_encrypt_keys(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [])
session_factory = self.replay_flight_data('test_s3_encrypt')
bname = "custodian-encrypt-test"
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
generateBucketContents(session.resource('s3'), bname)
# start with a report-only option since it doesn't modify the bucket
report_policy = self.load_policy({
'name': 'encrypt-keys',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [{'type': 'encrypt-keys',
'report-only': True}]},
session_factory=session_factory)
report_resources = report_policy.run()
self.assertEqual(report_resources[0]['KeyRemediated'], 3)
p = self.load_policy({
'name': 'encrypt-keys',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': ['encrypt-keys']}, session_factory=session_factory)
p.run()
self.assertTrue(
'ServerSideEncryption' in client.head_object(
Bucket=bname, Key='home.txt'))
# re-run the report policy after to ensure we have no items
# needing remediation
report_resources = report_policy.run()
self.assertEqual(report_resources[0]['KeyRemediated'], 0)
def test_encrypt_keys_aes256_sufficient(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [])
session_factory = self.replay_flight_data(
'test_s3_encrypt_aes256_sufficient')
bname = "custodian-encrypt-sufficient-test"
session = session_factory()
client = session.client('s3')
kms = session.client('kms')
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
key_id = [
k for k in kms.list_aliases().get('Aliases', ())
if k['AliasName'] == 'alias/aws/s3'][0]['AliasArn']
client.put_object(
Bucket=bname, Key='testing-abc', ServerSideEncryption='aws:kms',
SSEKMSKeyId=key_id)
client.put_object(
Bucket=bname, Key='testing-123', ServerSideEncryption='AES256')
p = self.load_policy({
'name': 'encrypt-keys',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [{'type': 'encrypt-keys'}]},
session_factory=session_factory)
p.run()
result = client.head_object(Bucket=bname, Key='testing-123')
self.assertTrue(result['ServerSideEncryption'] == 'AES256')
result = client.head_object(Bucket=bname, Key='testing-abc')
self.assertTrue(result['ServerSideEncryption'] == 'aws:kms')
data = json.load(open(
os.path.join(p.ctx.output_path, 'action-encryptextantkeys')))
self.assertEqual(
[{'Count': 2, 'Remediated': 0, 'Bucket': bname}], data)
def test_encrypt_keys_key_id_option(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [])
session_factory = self.replay_flight_data(
'test_s3_encrypt_key_id_option')
bname = "custodian-encrypt-test"
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
generateBucketContents(session.resource('s3'), bname)
key_one = '845ab6f1-744c-4edc-b702-efae6836818a'
p = self.load_policy({
'name': 'encrypt-keys',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [{'type': 'encrypt-keys',
'crypto': 'aws:kms',
'key-id': key_one}]},
session_factory=session_factory)
p.run()
result = client.head_object(Bucket=bname, Key='home.txt')
self.assertTrue('SSEKMSKeyId' in result)
self.assertTrue(key_one in result['SSEKMSKeyId'])
# Now test that we can re-key it to something else
key_two = '5fd9f6d6-4294-4926-8719-1e85695e2ad6'
p = self.load_policy({
'name': 'encrypt-keys',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [{'type': 'encrypt-keys',
'crypto': 'aws:kms',
'key-id': key_two}]},
session_factory=session_factory)
p.run()
result = client.head_object(Bucket=bname, Key='home.txt')
self.assertTrue('SSEKMSKeyId' in result)
self.assertTrue(key_two in result['SSEKMSKeyId'])
def test_global_grants_filter_option(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_acl', 'Acl', None, None)
])
session_factory = self.replay_flight_data(
'test_s3_global_grants_filter')
bname = 'custodian-testing-grants'
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
public = 'http://acs.amazonaws.com/groups/global/AllUsers'
client.put_bucket_acl(
Bucket=bname,
AccessControlPolicy={
"Owner": {
"DisplayName": "k_vertigo",
"ID": "904fc4c4790937100e9eb293a15e6a0a1f265a064888055b43d030034f8881ee"
},
'Grants': [
{'Grantee': {
'Type': 'Group',
'URI': public},
'Permission': 'WRITE'}
]})
p = self.load_policy(
{'name': 's3-global-check',
'resource': 's3',
'filters': [
{'Name': 'custodian-testing-grants'},
{'type': 'global-grants',
'permissions': ['READ_ACP']}]},
session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 0)
p = self.load_policy(
{'name': 's3-global-check',
'resource': 's3',
'filters': [
{'Name': 'custodian-testing-grants'},
{'type': 'global-grants',
'permissions': ['WRITE']}]},
session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
def test_global_grants_filter_and_remove(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_acl', 'Acl', None, None)
])
session_factory = self.replay_flight_data('test_s3_grants')
bname = 'custodian-testing-grants'
session = session_factory()
client = session.client('s3')
client.create_bucket(Bucket=bname)
public = 'http://acs.amazonaws.com/groups/global/AllUsers'
client.put_bucket_acl(
Bucket=bname,
AccessControlPolicy={
"Owner": {
"DisplayName": "k_vertigo",
"ID": "904fc4c4790937100e9eb293a15e6a0a1f265a064888055b43d030034f8881ee"
},
'Grants': [
{'Grantee': {
'Type': 'Group',
'URI': public},
'Permission': 'WRITE'}
]})
p = self.load_policy(
{'name': 's3-remove-global',
'resource': 's3',
'filters': [
{'Name': 'custodian-testing-grants'},
{'type': 'global-grants'}],
'actions': [
{'type': 'delete-global-grants',
'grantees': [public]}]
}, session_factory=session_factory)
resources = p.run()
grants = client.get_bucket_acl(Bucket=bname)
client.delete_bucket(Bucket=bname)
self.assertEqual(grants['Grants'], [])
self.assertEqual(resources[0]['Name'], bname)
def test_s3_mark_for_op(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_tagging', 'Tags', [], 'TagSet')])
session_factory = self.replay_flight_data('test_s3_mark_for_op')
session = session_factory()
client = session.client('s3')
bname = 'custodian-mark-test'
p = self.load_policy({
'name': 's3-mark',
'resource': 's3',
'filters': [
{'Name': bname}],
'actions': [
{'type': 'mark-for-op', 'days': 3,
'op': 'delete'}]},
session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
tags = client.get_bucket_tagging(Bucket=bname)
tag_map = {t['Key']: t['Value'] for t in tags.get('TagSet', {})}
self.assertTrue('maid_status' in tag_map)
self.assertTrue('delete' in tag_map.get('maid_status'))
def test_s3_remove_tag(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_tagging', 'Tags', [], 'TagSet')])
session_factory = self.replay_flight_data('test_s3_remove_tag')
session = session_factory()
client = session.client('s3')
bname = 'custodian-mark-test'
p = self.load_policy({
'name': 's3-unmark',
'resource': 's3',
'filters': [{"Name": bname}],
'actions': ['unmark']},
session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
tags = client.get_bucket_tagging(Bucket=bname)
tag_map = {t['Key']: t['Value'] for t in tags.get('TagSet', {})}
self.assertTrue('maid_status' not in tag_map)
def test_hosts_website(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [
('get_bucket_website', 'Website', None, None)])
session_factory = self.replay_flight_data('test_s3_hosts_website')
session = session_factory()
client = session.client('s3')
bname = 'custodian-static-website-test'
client.create_bucket(Bucket=bname)
client.put_bucket_website(
Bucket=bname,
WebsiteConfiguration={
'ErrorDocument': {
'Key': 'error.html'
},
'IndexDocument': {
'Suffix': 'index.html'
}
})
self.addCleanup(client.delete_bucket, Bucket=bname)
p = self.load_policy({
'name': 's3-website-hosting',
'resource': 's3',
'filters': [{'Website': 'not-null'}]},
session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
names = [b['Name'] for b in resources]
self.assertTrue(bname in names)
p = self.load_policy({
'name': 's3-website-hosting',
'resource': 's3',
'filters': [{'Website': 'not-null'}],
'actions': ['remove-website-hosting']},
session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 0)
def test_delete_bucket_notification(self):
self.patch(s3, 'S3_AUGMENT_TABLE', [(
'get_bucket_notification_configuration', 'Notification', None,
None)])
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
session_factory = self.replay_flight_data(
'test_s3_delete_bucket_notification')
bname = 'custodian-delete-bucket-notification-test'
config_id = 'c7n-notify-1'
self.maxDiff = None
session = session_factory(region='us-east-1')
client = session.client('s3')
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
p = self.load_policy({
'name': 's3-delete-bucket-notification',
'resource': 's3',
'filters': [
{'Name': bname},
{'type': 'bucket-notification',
'kind': 'sns',
'key': 'Id',
'value': config_id,
'op': 'eq'}
],
'actions': [{'type': 'delete-bucket-notification',
'statement_ids': 'matched'}]
}, session_factory=session_factory)
topic_arn = session.client('sns').create_topic(Name='bucket-notification-test')['TopicArn']
self.addCleanup(session.client('sns').delete_topic, TopicArn=topic_arn)
topic_policy = {
'Statement': [{
'Action': 'SNS:Publish',
'Effect': 'Allow',
'Resource': topic_arn,
'Principal': {'Service': 's3.amazonaws.com'}}]}
session.client('sns').set_topic_attributes(
TopicArn=topic_arn,
AttributeName='Policy',
AttributeValue=json.dumps(topic_policy))
client.put_bucket_notification_configuration(
Bucket=bname,
NotificationConfiguration={
'TopicConfigurations': [
{'TopicArn': topic_arn, 'Events': ['s3:ObjectCreated:*'], 'Id': config_id},
{'TopicArn': topic_arn, 'Events': ['s3:ObjectRemoved:*'], 'Id': 'another1'}
]
})
resources = p.run()
self.assertEqual(len(resources), 1)
#time.sleep(10)
topic_notifications = client.get_bucket_notification_configuration(
Bucket=bname).get('TopicConfigurations', [])
us = [t for t in topic_notifications if t.get('TopicArn') == topic_arn]
self.assertEqual(len(us), 1)
def test_enable_bucket_encryption_kms(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [])
session_factory = self.replay_flight_data('test_s3_enable_bucket_encryption_kms')
session = session_factory()
client = session.client('s3')
kms_client = session.client('kms')
bname = 'custodian-enable-bucket-encryption-kms'
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
with self.assertRaises(Exception) as context:
response = client.get_bucket_encryption(Bucket=bname)
key = kms_client.list_keys()['Keys'][0]
key_arn = kms_client.describe_key(KeyId=key['KeyId'])['KeyMetadata']['Arn']
p = self.load_policy({
'name': 's3-enable-bucket-encryption',
'resource': 's3',
'filters': [
{'Name': bname}
],
'actions': [{
'type': 'set-bucket-encryption',
'key': str(key['KeyId']),
'crypto': 'aws:kms'
}]
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
if self.recording: time.sleep(5)
response = client.get_bucket_encryption(Bucket=bname)
rules = response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']
self.assertEqual(rules['SSEAlgorithm'], 'aws:kms')
self.assertEqual(rules['KMSMasterKeyID'], key_arn)
def test_enable_bucket_encryption_kms_alias(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [])
session_factory = self.replay_flight_data('test_s3_enable_bucket_encryption_kms_alias')
session = session_factory()
client = session.client('s3')
kms_client = session.client('kms')
bname = 'custodian-enable-bucket-encryption-kms-alias'
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
kms_alias = 'alias/some-key'
kms_alias_id = kms_client.describe_key(KeyId=kms_alias)['KeyMetadata']['Arn']
p = self.load_policy({
'name': 's3-enable-bucket-encryption-alias',
'resource': 's3',
'filters': [
{'Name': bname}
],
'actions': [{
'type': 'set-bucket-encryption',
'crypto': 'aws:kms',
'key': kms_alias
}]
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
if self.recording: time.sleep(5)
response = client.get_bucket_encryption(Bucket=bname)
rules = response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']
self.assertEqual(rules['SSEAlgorithm'], 'aws:kms')
self.assertEqual(rules['KMSMasterKeyID'], kms_alias_id)
p = self.load_policy({
'name': 's3-enable-bucket-encryption-bad-alias',
'resource': 's3',
'filters': [
{'Name': bname}
],
'actions': [{
'type': 'set-bucket-encryption',
'crypto': 'aws:kms',
'key': 'alias/some-nonexistant-alias'
}]
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
if self.recording: time.sleep(5)
response = client.get_bucket_encryption(Bucket=bname)
rules = response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']
self.assertEqual(rules['SSEAlgorithm'], 'aws:kms')
self.assertIsNone(rules.get('KMSMasterKeyID'))
def test_enable_bucket_encryption_aes256(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [])
session_factory = self.replay_flight_data('test_s3_enable_bucket_encryption_aes256')
session = session_factory()
client = session.client('s3')
bname = 'custodian-enable-bucket-encryption-aes256'
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
with self.assertRaises(Exception) as context:
client.get_bucket_encryption(Bucket=bname)
p = self.load_policy({
'name': 's3-enable-bucket-encryption',
'resource': 's3',
'filters': [
{'Name': bname}
],
'actions': [{
'type': 'set-bucket-encryption'
}]
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
if self.recording: time.sleep(5)
response = client.get_bucket_encryption(Bucket=bname)
rules = response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']
self.assertEqual(rules['SSEAlgorithm'], 'AES256')
client.delete_bucket_encryption(Bucket=bname)
if self.recording: time.sleep(5)
with self.assertRaises(Exception) as context:
client.get_bucket_encryption(Bucket=bname)
p = self.load_policy({
'name': 's3-enable-bucket-encryption',
'resource': 's3',
'filters': [
{'Name': bname}
],
'actions': [{
'type': 'set-bucket-encryption',
'crypto': 'AES256'
}]
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
if self.recording: time.sleep(5)
response = client.get_bucket_encryption(Bucket=bname)
rules = response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']
self.assertEqual(rules['SSEAlgorithm'], 'AES256')
def test_delete_bucket_encryption(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(s3, 'S3_AUGMENT_TABLE', [])
session_factory = self.replay_flight_data('test_s3_delete_bucket_encryption')
session = session_factory()
client = session.client('s3')
bname = 'custodian-delete-bucket-encryption-aes256'
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
with self.assertRaises(Exception) as context:
response = client.get_bucket_encryption(Bucket=bname)
client.put_bucket_encryption(
Bucket=bname,
ServerSideEncryptionConfiguration={
'Rules': [
{
'ApplyServerSideEncryptionByDefault': {
'SSEAlgorithm': 'AES256'
}
}
]
})
p = self.load_policy({
'name': 's3-delete-bucket-encryption',
'resource': 's3',
'filters': [
{'Name': bname}
],
'actions': [{
'type': 'set-bucket-encryption',
'enabled': False
}]
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
if self.recording: time.sleep(5)
with self.assertRaises(Exception) as context:
client.get_bucket_encryption(Bucket=bname)
class S3LifecycleTest(BaseTest):
def test_lifecycle(self):
self.patch(s3.S3, 'executor_factory', MainThreadExecutor)
self.patch(
s3, 'S3_AUGMENT_TABLE',
[('get_bucket_lifecycle_configuration', 'Lifecycle', None, None)])
session_factory = self.replay_flight_data('test_s3_lifecycle')
session = session_factory()
client = session.client('s3')
bname = 'custodian-lifecycle-test'
# Make a bucket
client.create_bucket(Bucket=bname)
self.addCleanup(destroyBucket, client, bname)
buckets = set([b['Name'] for b in client.list_buckets()['Buckets']])
self.assertIn(bname, buckets)
def get_policy(**kwargs):
rule = {
'Status': 'Enabled',
'Prefix': 'foo/',
'Transitions': [{
'Days': 60,
'StorageClass': 'GLACIER',
}],
}
rule.update(**kwargs)
policy = {
'name': 's3-lifecycle',
'resource': 's3',
'filters': [{'Name': bname}],
'actions': [{
'type': 'configure-lifecycle',
'rules': [rule],
}]
}
return policy
def run_policy(policy):
p = self.load_policy(policy, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
if self.recording:
time.sleep(5)
#
# Add the first lifecycle
#
lifecycle_id1 = 'test-lifecycle'
policy = get_policy(ID=lifecycle_id1)
run_policy(policy)
lifecycle = client.get_bucket_lifecycle_configuration(Bucket=bname)
self.assertEqual(lifecycle['Rules'][0]['ID'], lifecycle_id1)
#
# Now add another lifecycle rule to ensure it doesn't clobber the first one
#
lifecycle_id2 = 'test-lifecycle-two'
policy = get_policy(ID=lifecycle_id2, Prefix='bar/')
run_policy(policy)
# Verify the lifecycle
lifecycle = client.get_bucket_lifecycle_configuration(Bucket=bname)
self.assertEqual(len(lifecycle['Rules']), 2)
self.assertSetEqual(set([x['ID'] for x in lifecycle['Rules']]),
set([lifecycle_id1, lifecycle_id2]))
#
# Next, overwrite one of the lifecycles and make sure it changed
#
policy = get_policy(ID=lifecycle_id2, Prefix='baz/')
run_policy(policy)
# Verify the lifecycle
lifecycle = client.get_bucket_lifecycle_configuration(Bucket=bname)
self.assertEqual(len(lifecycle['Rules']), 2)
self.assertSetEqual(set([x['ID'] for x in lifecycle['Rules']]),
set([lifecycle_id1, lifecycle_id2]))
for rule in lifecycle['Rules']:
if rule['ID'] == lifecycle_id2:
self.assertEqual(rule['Prefix'], 'baz/')
#
# Test deleting a lifecycle
#
policy = get_policy(ID=lifecycle_id1, Status='absent')
run_policy(policy)
lifecycle = client.get_bucket_lifecycle_configuration(Bucket=bname)
self.assertEqual(len(lifecycle['Rules']), 1)
self.assertEqual(lifecycle['Rules'][0]['ID'], lifecycle_id2)
|
WoLpH/CouchPotatoServer | refs/heads/master | libs/sqlalchemy/ext/mutable.py | 18 | # ext/mutable.py
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Provide support for tracking of in-place changes to scalar values,
which are propagated into ORM change events on owning parent objects.
The :mod:`sqlalchemy.ext.mutable` extension replaces SQLAlchemy's legacy approach to in-place
mutations of scalar values, established by the :class:`.types.MutableType`
class as well as the ``mutable=True`` type flag, with a system that allows
change events to be propagated from the value to the owning parent, thereby
removing the need for the ORM to maintain copies of values as well as the very
expensive requirement of scanning through all "mutable" values on each flush
call, looking for changes.
.. _mutable_scalars:
Establishing Mutability on Scalar Column Values
===============================================
A typical example of a "mutable" structure is a Python dictionary.
Following the example introduced in :ref:`types_toplevel`, we
begin with a custom type that marshals Python dictionaries into
JSON strings before being persisted::
from sqlalchemy.types import TypeDecorator, VARCHAR
import json
class JSONEncodedDict(TypeDecorator):
"Represents an immutable structure as a json-encoded string."
impl = VARCHAR
def process_bind_param(self, value, dialect):
if value is not None:
value = json.dumps(value)
return value
def process_result_value(self, value, dialect):
if value is not None:
value = json.loads(value)
return value
The usage of ``json`` is only for the purposes of example. The :mod:`sqlalchemy.ext.mutable`
extension can be used
with any type whose target Python type may be mutable, including
:class:`.PickleType`, :class:`.postgresql.ARRAY`, etc.
When using the :mod:`sqlalchemy.ext.mutable` extension, the value itself
tracks all parents which reference it. Here we will replace the usage
of plain Python dictionaries with a dict subclass that implements
the :class:`.Mutable` mixin::
import collections
from sqlalchemy.ext.mutable import Mutable
class MutationDict(Mutable, dict):
@classmethod
def coerce(cls, key, value):
"Convert plain dictionaries to MutationDict."
if not isinstance(value, MutationDict):
if isinstance(value, dict):
return MutationDict(value)
# this call will raise ValueError
return Mutable.coerce(key, value)
else:
return value
def __setitem__(self, key, value):
"Detect dictionary set events and emit change events."
dict.__setitem__(self, key, value)
self.changed()
def __delitem__(self, key):
"Detect dictionary del events and emit change events."
dict.__delitem__(self, key)
self.changed()
The above dictionary class takes the approach of subclassing the Python
built-in ``dict`` to produce a dict
subclass which routes all mutation events through ``__setitem__``. There are
many variants on this approach, such as subclassing ``UserDict.UserDict``,
the newer ``collections.MutableMapping``, etc. The part that's important to this
example is that the :meth:`.Mutable.changed` method is called whenever an in-place change to the
datastructure takes place.
We also redefine the :meth:`.Mutable.coerce` method which will be used to
convert any values that are not instances of ``MutationDict``, such
as the plain dictionaries returned by the ``json`` module, into the
appropriate type. Defining this method is optional; we could just as well created our
``JSONEncodedDict`` such that it always returns an instance of ``MutationDict``,
and additionally ensured that all calling code uses ``MutationDict``
explicitly. When :meth:`.Mutable.coerce` is not overridden, any values
applied to a parent object which are not instances of the mutable type
will raise a ``ValueError``.
Our new ``MutationDict`` type offers a class method
:meth:`~.Mutable.as_mutable` which we can use within column metadata
to associate with types. This method grabs the given type object or
class and associates a listener that will detect all future mappings
of this type, applying event listening instrumentation to the mapped
attribute. Such as, with classical table metadata::
from sqlalchemy import Table, Column, Integer
my_data = Table('my_data', metadata,
Column('id', Integer, primary_key=True),
Column('data', MutationDict.as_mutable(JSONEncodedDict))
)
Above, :meth:`~.Mutable.as_mutable` returns an instance of ``JSONEncodedDict``
(if the type object was not an instance already), which will intercept any
attributes which are mapped against this type. Below we establish a simple
mapping against the ``my_data`` table::
from sqlalchemy import mapper
class MyDataClass(object):
pass
# associates mutation listeners with MyDataClass.data
mapper(MyDataClass, my_data)
The ``MyDataClass.data`` member will now be notified of in place changes
to its value.
There's no difference in usage when using declarative::
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class MyDataClass(Base):
__tablename__ = 'my_data'
id = Column(Integer, primary_key=True)
data = Column(MutationDict.as_mutable(JSONEncodedDict))
Any in-place changes to the ``MyDataClass.data`` member
will flag the attribute as "dirty" on the parent object::
>>> from sqlalchemy.orm import Session
>>> sess = Session()
>>> m1 = MyDataClass(data={'value1':'foo'})
>>> sess.add(m1)
>>> sess.commit()
>>> m1.data['value1'] = 'bar'
>>> assert m1 in sess.dirty
True
The ``MutationDict`` can be associated with all future instances
of ``JSONEncodedDict`` in one step, using :meth:`~.Mutable.associate_with`. This
is similar to :meth:`~.Mutable.as_mutable` except it will intercept
all occurrences of ``MutationDict`` in all mappings unconditionally, without
the need to declare it individually::
MutationDict.associate_with(JSONEncodedDict)
class MyDataClass(Base):
__tablename__ = 'my_data'
id = Column(Integer, primary_key=True)
data = Column(JSONEncodedDict)
Supporting Pickling
--------------------
The key to the :mod:`sqlalchemy.ext.mutable` extension relies upon the
placement of a ``weakref.WeakKeyDictionary`` upon the value object, which
stores a mapping of parent mapped objects keyed to the attribute name under
which they are associated with this value. ``WeakKeyDictionary`` objects are
not picklable, due to the fact that they contain weakrefs and function
callbacks. In our case, this is a good thing, since if this dictionary were
picklable, it could lead to an excessively large pickle size for our value
objects that are pickled by themselves outside of the context of the parent.
The developer responsibility here is only to provide a ``__getstate__`` method
that excludes the :meth:`~.MutableBase._parents` collection from the pickle
stream::
class MyMutableType(Mutable):
def __getstate__(self):
d = self.__dict__.copy()
d.pop('_parents', None)
return d
With our dictionary example, we need to return the contents of the dict itself
(and also restore them on __setstate__)::
class MutationDict(Mutable, dict):
# ....
def __getstate__(self):
return dict(self)
def __setstate__(self, state):
self.update(state)
In the case that our mutable value object is pickled as it is attached to one
or more parent objects that are also part of the pickle, the :class:`.Mutable`
mixin will re-establish the :attr:`.Mutable._parents` collection on each value
object as the owning parents themselves are unpickled.
.. _mutable_composites:
Establishing Mutability on Composites
=====================================
Composites are a special ORM feature which allow a single scalar attribute to
be assigned an object value which represents information "composed" from one
or more columns from the underlying mapped table. The usual example is that of
a geometric "point", and is introduced in :ref:`mapper_composite`.
.. versionchanged:: 0.7
The internals of :func:`.orm.composite` have been
greatly simplified and in-place mutation detection is no longer enabled by
default; instead, the user-defined value must detect changes on its own and
propagate them to all owning parents. The :mod:`sqlalchemy.ext.mutable`
extension provides the helper class :class:`.MutableComposite`, which is a
slight variant on the :class:`.Mutable` class.
As is the case with :class:`.Mutable`, the user-defined composite class
subclasses :class:`.MutableComposite` as a mixin, and detects and delivers
change events to its parents via the :meth:`.MutableComposite.changed` method.
In the case of a composite class, the detection is usually via the usage of
Python descriptors (i.e. ``@property``), or alternatively via the special
Python method ``__setattr__()``. Below we expand upon the ``Point`` class
introduced in :ref:`mapper_composite` to subclass :class:`.MutableComposite`
and to also route attribute set events via ``__setattr__`` to the
:meth:`.MutableComposite.changed` method::
from sqlalchemy.ext.mutable import MutableComposite
class Point(MutableComposite):
def __init__(self, x, y):
self.x = x
self.y = y
def __setattr__(self, key, value):
"Intercept set events"
# set the attribute
object.__setattr__(self, key, value)
# alert all parents to the change
self.changed()
def __composite_values__(self):
return self.x, self.y
def __eq__(self, other):
return isinstance(other, Point) and \\
other.x == self.x and \\
other.y == self.y
def __ne__(self, other):
return not self.__eq__(other)
The :class:`.MutableComposite` class uses a Python metaclass to automatically
establish listeners for any usage of :func:`.orm.composite` that specifies our
``Point`` type. Below, when ``Point`` is mapped to the ``Vertex`` class,
listeners are established which will route change events from ``Point``
objects to each of the ``Vertex.start`` and ``Vertex.end`` attributes::
from sqlalchemy.orm import composite, mapper
from sqlalchemy import Table, Column
vertices = Table('vertices', metadata,
Column('id', Integer, primary_key=True),
Column('x1', Integer),
Column('y1', Integer),
Column('x2', Integer),
Column('y2', Integer),
)
class Vertex(object):
pass
mapper(Vertex, vertices, properties={
'start': composite(Point, vertices.c.x1, vertices.c.y1),
'end': composite(Point, vertices.c.x2, vertices.c.y2)
})
Any in-place changes to the ``Vertex.start`` or ``Vertex.end`` members
will flag the attribute as "dirty" on the parent object::
>>> from sqlalchemy.orm import Session
>>> sess = Session()
>>> v1 = Vertex(start=Point(3, 4), end=Point(12, 15))
>>> sess.add(v1)
>>> sess.commit()
>>> v1.end.x = 8
>>> assert v1 in sess.dirty
True
Coercing Mutable Composites
---------------------------
The :meth:`.MutableBase.coerce` method is also supported on composite types.
In the case of :class:`.MutableComposite`, the :meth:`.MutableBase.coerce`
method is only called for attribute set operations, not load operations.
Overriding the :meth:`.MutableBase.coerce` method is essentially equivalent
to using a :func:`.validates` validation routine for all attributes which
make use of the custom composite type::
class Point(MutableComposite):
# other Point methods
# ...
def coerce(cls, key, value):
if isinstance(value, tuple):
value = Point(*value)
elif not isinstance(value, Point):
raise ValueError("tuple or Point expected")
return value
.. versionadded:: 0.7.10,0.8.0b2
Support for the :meth:`.MutableBase.coerce` method in conjunction with
objects of type :class:`.MutableComposite`.
Supporting Pickling
--------------------
As is the case with :class:`.Mutable`, the :class:`.MutableComposite` helper
class uses a ``weakref.WeakKeyDictionary`` available via the
:meth:`.MutableBase._parents` attribute which isn't picklable. If we need to
pickle instances of ``Point`` or its owning class ``Vertex``, we at least need
to define a ``__getstate__`` that doesn't include the ``_parents`` dictionary.
Below we define both a ``__getstate__`` and a ``__setstate__`` that package up
the minimal form of our ``Point`` class::
class Point(MutableComposite):
# ...
def __getstate__(self):
return self.x, self.y
def __setstate__(self, state):
self.x, self.y = state
As with :class:`.Mutable`, the :class:`.MutableComposite` augments the
pickling process of the parent's object-relational state so that the
:meth:`.MutableBase._parents` collection is restored to all ``Point`` objects.
"""
from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy import event, types
from sqlalchemy.orm import mapper, object_mapper, Mapper
from sqlalchemy.util import memoized_property
import weakref
class MutableBase(object):
"""Common base class to :class:`.Mutable` and :class:`.MutableComposite`."""
@memoized_property
def _parents(self):
"""Dictionary of parent object->attribute name on the parent.
This attribute is a so-called "memoized" property. It initializes
itself with a new ``weakref.WeakKeyDictionary`` the first time
it is accessed, returning the same object upon subsequent access.
"""
return weakref.WeakKeyDictionary()
@classmethod
def coerce(cls, key, value):
"""Given a value, coerce it into the target type.
Can be overridden by custom subclasses to coerce incoming
data into a particular type.
By default, raises ``ValueError``.
This method is called in different scenarios depending on if
the parent class is of type :class:`.Mutable` or of type
:class:`.MutableComposite`. In the case of the former, it is called
for both attribute-set operations as well as during ORM loading
operations. For the latter, it is only called during attribute-set
operations; the mechanics of the :func:`.composite` construct
handle coercion during load operations.
:param key: string name of the ORM-mapped attribute being set.
:param value: the incoming value.
:return: the method should return the coerced value, or raise
``ValueError`` if the coercion cannot be completed.
"""
if value is None:
return None
raise ValueError("Attribute '%s' does not accept objects of type %s" % (key, type(value)))
@classmethod
def _listen_on_attribute(cls, attribute, coerce, parent_cls):
"""Establish this type as a mutation listener for the given
mapped descriptor.
"""
key = attribute.key
if parent_cls is not attribute.class_:
return
# rely on "propagate" here
parent_cls = attribute.class_
def load(state, *args):
"""Listen for objects loaded or refreshed.
Wrap the target data member's value with
``Mutable``.
"""
val = state.dict.get(key, None)
if val is not None:
if coerce:
val = cls.coerce(key, val)
state.dict[key] = val
val._parents[state.obj()] = key
def set(target, value, oldvalue, initiator):
"""Listen for set/replace events on the target
data member.
Establish a weak reference to the parent object
on the incoming value, remove it for the one
outgoing.
"""
if not isinstance(value, cls):
value = cls.coerce(key, value)
if value is not None:
value._parents[target.obj()] = key
if isinstance(oldvalue, cls):
oldvalue._parents.pop(target.obj(), None)
return value
def pickle(state, state_dict):
val = state.dict.get(key, None)
if val is not None:
if 'ext.mutable.values' not in state_dict:
state_dict['ext.mutable.values'] = []
state_dict['ext.mutable.values'].append(val)
def unpickle(state, state_dict):
if 'ext.mutable.values' in state_dict:
for val in state_dict['ext.mutable.values']:
val._parents[state.obj()] = key
event.listen(parent_cls, 'load', load, raw=True, propagate=True)
event.listen(parent_cls, 'refresh', load, raw=True, propagate=True)
event.listen(attribute, 'set', set, raw=True, retval=True, propagate=True)
event.listen(parent_cls, 'pickle', pickle, raw=True, propagate=True)
event.listen(parent_cls, 'unpickle', unpickle, raw=True, propagate=True)
class Mutable(MutableBase):
"""Mixin that defines transparent propagation of change
events to a parent object.
See the example in :ref:`mutable_scalars` for usage information.
"""
def changed(self):
"""Subclasses should call this method whenever change events occur."""
for parent, key in self._parents.items():
flag_modified(parent, key)
@classmethod
def associate_with_attribute(cls, attribute):
"""Establish this type as a mutation listener for the given
mapped descriptor.
"""
cls._listen_on_attribute(attribute, True, attribute.class_)
@classmethod
def associate_with(cls, sqltype):
"""Associate this wrapper with all future mapped columns
of the given type.
This is a convenience method that calls ``associate_with_attribute`` automatically.
.. warning::
The listeners established by this method are *global*
to all mappers, and are *not* garbage collected. Only use
:meth:`.associate_with` for types that are permanent to an application,
not with ad-hoc types else this will cause unbounded growth
in memory usage.
"""
def listen_for_type(mapper, class_):
for prop in mapper.iterate_properties:
if hasattr(prop, 'columns'):
if isinstance(prop.columns[0].type, sqltype):
cls.associate_with_attribute(getattr(class_, prop.key))
event.listen(mapper, 'mapper_configured', listen_for_type)
@classmethod
def as_mutable(cls, sqltype):
"""Associate a SQL type with this mutable Python type.
This establishes listeners that will detect ORM mappings against
the given type, adding mutation event trackers to those mappings.
The type is returned, unconditionally as an instance, so that
:meth:`.as_mutable` can be used inline::
Table('mytable', metadata,
Column('id', Integer, primary_key=True),
Column('data', MyMutableType.as_mutable(PickleType))
)
Note that the returned type is always an instance, even if a class
is given, and that only columns which are declared specifically with that
type instance receive additional instrumentation.
To associate a particular mutable type with all occurrences of a
particular type, use the :meth:`.Mutable.associate_with` classmethod
of the particular :meth:`.Mutable` subclass to establish a global
association.
.. warning::
The listeners established by this method are *global*
to all mappers, and are *not* garbage collected. Only use
:meth:`.as_mutable` for types that are permanent to an application,
not with ad-hoc types else this will cause unbounded growth
in memory usage.
"""
sqltype = types.to_instance(sqltype)
def listen_for_type(mapper, class_):
for prop in mapper.iterate_properties:
if hasattr(prop, 'columns'):
if prop.columns[0].type is sqltype:
cls.associate_with_attribute(getattr(class_, prop.key))
event.listen(mapper, 'mapper_configured', listen_for_type)
return sqltype
class MutableComposite(MutableBase):
"""Mixin that defines transparent propagation of change
events on a SQLAlchemy "composite" object to its
owning parent or parents.
See the example in :ref:`mutable_composites` for usage information.
.. warning::
The listeners established by the :class:`.MutableComposite`
class are *global* to all mappers, and are *not* garbage collected. Only use
:class:`.MutableComposite` for types that are permanent to an application,
not with ad-hoc types else this will cause unbounded growth
in memory usage.
"""
def changed(self):
"""Subclasses should call this method whenever change events occur."""
for parent, key in self._parents.items():
prop = object_mapper(parent).get_property(key)
for value, attr_name in zip(
self.__composite_values__(),
prop._attribute_keys):
setattr(parent, attr_name, value)
def _setup_composite_listener():
def _listen_for_type(mapper, class_):
for prop in mapper.iterate_properties:
if (hasattr(prop, 'composite_class') and
issubclass(prop.composite_class, MutableComposite)):
prop.composite_class._listen_on_attribute(
getattr(class_, prop.key), False, class_)
if not Mapper.dispatch.mapper_configured._contains(Mapper, _listen_for_type):
event.listen(Mapper, 'mapper_configured', _listen_for_type)
_setup_composite_listener()
|
berquist/cclib | refs/heads/master | test/method/testmoments.py | 3 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2018, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Test the Moments method in cclib"""
import unittest
from unittest import mock
import numpy as np
from numpy.testing import assert_equal, assert_almost_equal
from cclib.method import Moments
class TestIdealizedInputs(unittest.TestCase):
linear_dipole_attrs = {
'atomcoords': np.array([[[-1, 0, 0], [ 1, 0, 0]]]),
'atomcharges': {'mulliken': [-1, 1]},
'atomnos': [1, 1]
}
@mock.patch('cclib.parser.ccData', spec=True)
def test_dipole_moment(self, mock):
mock.configure_mock(**self.linear_dipole_attrs)
x = Moments(mock).calculate()[1]
assert_almost_equal(x / 4.80320425, [2, 0, 0])
@unittest.skip("This does not pass for some reason.")
@mock.patch('cclib.parser.ccData', spec=True)
def test_nonzero_quadrupole_moment(self, mock):
mock.atomcoords = np.array([[
[-1, 0, 0],
[0, 0, 0],
[1, 0, 0]]])
# The periods are for Python2.
mock.atomcharges = {'mulliken': [1/2., -1., 1/2.]}
mock.atomnos = np.ones(mock.atomcoords.shape[1])
x = Moments(mock).calculate()
self.assertEqual(np.count_nonzero(x[1]), 0)
assert_almost_equal(x[2] / 4.80320423, [1, 0, 0, -0.5, 0, -0.5])
@mock.patch('cclib.parser.ccData', spec=True)
def test_zero_moments(self, mock):
mock.atomcoords = np.array([[
[-2, 0, 0],
[-1, 0, 0],
[0, 0, 0],
[1, 0, 0],
[2, 0, 0]]])
# The periods are for Python2.
mock.atomcharges = {'mulliken': [-1/8., 1/2., -3/4., 1/2., -1/8.]}
mock.atomnos = np.ones(mock.atomcoords.shape[1])
x = Moments(mock).calculate()
self.assertEqual(np.count_nonzero(x[1]), 0)
self.assertEqual(np.count_nonzero(x[2]), 0)
@mock.patch('cclib.parser.ccData', spec=True)
def test_invariant_to_origin_dislacement(self, mock):
mock.configure_mock(**self.linear_dipole_attrs)
x = Moments(mock).calculate(origin=[0, 0, 0])[1]
y = Moments(mock).calculate(origin=[1, 1, 1])[1]
assert_equal(x, y)
@mock.patch('cclib.parser.ccData', spec=True)
def test_variant_to_origin_dislacement(self, mock):
attrs = dict(self.linear_dipole_attrs, **{
'atomcharges': {'mulliken': [-1, 2]}
})
mock.configure_mock(**attrs)
x = Moments(mock).calculate(origin=[0, 0, 0])[1]
y = Moments(mock).calculate(origin=[1, 1, 1])[1]
self.assertFalse(np.array_equal(x, y))
@mock.patch('cclib.parser.ccData', spec=True)
def test_origin_at_center_of_nuclear_charge(self, mock):
mock.configure_mock(**self.linear_dipole_attrs)
x = Moments(mock).calculate()[0]
assert_equal(x, [0, 0, 0])
@mock.patch('cclib.parser.ccData', spec=True)
def test_origin_at_center_of_mass(self, mock):
mock.configure_mock(**self.linear_dipole_attrs)
mock.atommasses = np.ones(mock.atomcoords.shape[1])
x = Moments(mock).calculate(origin='mass')[0]
assert_equal(x, [0, 0, 0])
@mock.patch('cclib.parser.ccData', spec=True)
def test_user_provided_origin(self, mock):
mock.configure_mock(**self.linear_dipole_attrs)
x = Moments(mock).calculate(origin=[1, 1, 1])
assert_almost_equal(x[0], [1, 1, 1])
@mock.patch('cclib.parser.ccData', spec=True)
def test_user_provided_masses(self, mock):
mock.configure_mock(**self.linear_dipole_attrs)
x = Moments(mock).calculate(masses=[1, 3], origin='mass')
assert_almost_equal(x[0], [0.5, 0, 0])
@mock.patch('cclib.parser.ccData', spec=True)
def test_not_providing_masses(self, mock):
mock.configure_mock(**self.linear_dipole_attrs)
# Replace with the refex version when Python2 is dropped.
# with self.assertRaisesRegex(ValueError, 'masses'):
with self.assertRaises(ValueError):
Moments(mock).calculate(origin='mass')
@mock.patch('cclib.parser.ccData', spec=True)
def test_results_storing(self, mock):
mock.configure_mock(**self.linear_dipole_attrs)
mock.atomcharges.update({'lowdin': [-0.5, 0.5]})
m = Moments(mock)
m.calculate(population='mulliken')
m.calculate(population='lowdin')
a, b = m.results['mulliken'][1], m.results['lowdin'][1]
self.assertFalse(np.array_equal(a, b))
if __name__ == '__main__':
suite = unittest.makeSuite(TestIdealizedInputs)
unittest.TextTestRunner(verbosity=2).run(suite)
|
dunkhong/grr | refs/heads/master | grr/server/grr_response_server/gui/api_plugins/report_plugins/server_report_plugins.py | 2 | #!/usr/bin/env python
"""UI server report handling classes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import collections
import math
import re
from future.builtins import range
from future.utils import iteritems
from grr_response_core.lib import rdfvalue
from grr_response_core.lib.rdfvalues import events as rdf_events
from grr_response_server import access_control
from grr_response_server import data_store
from grr_response_server.gui.api_plugins.report_plugins import rdf_report_plugins
from grr_response_server.gui.api_plugins.report_plugins import report_plugin_base
RepresentationType = rdf_report_plugins.ApiReportData.RepresentationType
def _LoadAuditEvents(handlers, get_report_args, transformers=None):
"""Returns AuditEvents for given handlers, actions, and timerange."""
if transformers is None:
transformers = {}
entries = data_store.REL_DB.ReadAPIAuditEntries(
min_timestamp=get_report_args.start_time,
max_timestamp=get_report_args.start_time + get_report_args.duration,
router_method_names=list(handlers.keys()))
rows = [_EntryToEvent(entry, handlers, transformers) for entry in entries]
rows.sort(key=lambda row: row.timestamp, reverse=True)
return rows
def _EntryToEvent(entry, handlers, transformers):
"""Converts an APIAuditEntry to a legacy AuditEvent."""
event = rdf_events.AuditEvent(
timestamp=entry.timestamp,
user=entry.username,
action=handlers[entry.router_method_name])
for fn in transformers:
fn(entry, event)
return event
def _ExtractClientIdFromPath(entry, event):
"""Extracts a Client ID from an APIAuditEntry's HTTP request path."""
match = re.match(r".*(C\.[0-9a-fA-F]{16}).*", entry.http_request_path)
if match:
event.client = match.group(1)
# TODO: Remove AFF4 URNs from the API data format.
def _ExtractCronJobIdFromPath(entry, event):
"""Extracts a CronJob ID from an APIAuditEntry's HTTP request path."""
match = re.match(r".*cron-job/([^/]+).*", entry.http_request_path)
if match:
event.urn = "aff4:/cron/{}".format(match.group(1))
def _ExtractHuntIdFromPath(entry, event):
"""Extracts a Hunt ID from an APIAuditEntry's HTTP request path."""
match = re.match(r".*hunt/([^/]+).*", entry.http_request_path)
if match:
event.urn = "aff4:/hunts/{}".format(match.group(1))
class ClientApprovalsReportPlugin(report_plugin_base.ReportPluginBase):
"""Given timerange's client approvals."""
TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.SERVER
TITLE = "Client Approvals"
SUMMARY = "Client approval requests and grants for the given timerange."
REQUIRES_TIME_RANGE = True
USED_FIELDS = ["action", "client", "timestamp", "user"]
# TODO: Rework API data format, to remove need for legacy
# AuditEvent.Action.
HANDLERS = {
"GrantClientApproval":
rdf_events.AuditEvent.Action.CLIENT_APPROVAL_GRANT,
"CreateClientApproval":
rdf_events.AuditEvent.Action.CLIENT_APPROVAL_REQUEST,
}
def GetReportData(self, get_report_args, token=None):
"""Filter the cron job approvals in the given timerange."""
ret = rdf_report_plugins.ApiReportData(
representation_type=RepresentationType.AUDIT_CHART,
audit_chart=rdf_report_plugins.ApiAuditChartReportData(
used_fields=self.USED_FIELDS))
ret.audit_chart.rows = _LoadAuditEvents(
self.HANDLERS, get_report_args, transformers=[_ExtractClientIdFromPath])
return ret
class CronApprovalsReportPlugin(report_plugin_base.ReportPluginBase):
"""Given timerange's cron job approvals."""
TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.SERVER
TITLE = "Cron Job Approvals"
SUMMARY = "Cron job approval requests and grants for the given timerange."
REQUIRES_TIME_RANGE = True
USED_FIELDS = ["action", "timestamp", "user", "urn"]
HANDLERS = {
"GrantCronJobApproval":
rdf_events.AuditEvent.Action.CRON_APPROVAL_GRANT,
"CreateCronJobApproval":
rdf_events.AuditEvent.Action.CRON_APPROVAL_REQUEST,
}
def GetReportData(self, get_report_args, token):
"""Filter the cron job approvals in the given timerange."""
ret = rdf_report_plugins.ApiReportData(
representation_type=RepresentationType.AUDIT_CHART,
audit_chart=rdf_report_plugins.ApiAuditChartReportData(
used_fields=self.USED_FIELDS))
ret.audit_chart.rows = _LoadAuditEvents(
self.HANDLERS,
get_report_args,
transformers=[_ExtractCronJobIdFromPath])
return ret
# TODO: Migrate from AuditEvent to Hunts database table as source.
class HuntActionsReportPlugin(report_plugin_base.ReportPluginBase):
"""Hunt actions in the given timerange."""
TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.SERVER
TITLE = "Hunts"
SUMMARY = "Hunt management actions for the given timerange."
REQUIRES_TIME_RANGE = True
USED_FIELDS = ["action", "timestamp", "user"]
TYPES = [
rdf_events.AuditEvent.Action.HUNT_CREATED,
rdf_events.AuditEvent.Action.HUNT_MODIFIED,
rdf_events.AuditEvent.Action.HUNT_PAUSED,
rdf_events.AuditEvent.Action.HUNT_STARTED,
rdf_events.AuditEvent.Action.HUNT_STOPPED
]
HANDLERS = {
"CreateHunt": rdf_events.AuditEvent.Action.HUNT_CREATED,
"ModifyHunt": rdf_events.AuditEvent.Action.HUNT_MODIFIED,
}
def GetReportData(self, get_report_args, token):
"""Filter the hunt actions in the given timerange."""
ret = rdf_report_plugins.ApiReportData(
representation_type=RepresentationType.AUDIT_CHART,
audit_chart=rdf_report_plugins.ApiAuditChartReportData(
used_fields=self.USED_FIELDS))
ret.audit_chart.rows = _LoadAuditEvents(self.HANDLERS, get_report_args)
return ret
class HuntApprovalsReportPlugin(report_plugin_base.ReportPluginBase):
"""Given timerange's hunt approvals."""
TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.SERVER
TITLE = "Hunt Approvals"
SUMMARY = "Hunt approval requests and grants for the given timerange."
REQUIRES_TIME_RANGE = True
USED_FIELDS = ["action", "timestamp", "user", "urn"]
HANDLERS = {
"GrantHuntApproval": rdf_events.AuditEvent.Action.HUNT_APPROVAL_GRANT,
"CreateHuntApproval": rdf_events.AuditEvent.Action.HUNT_APPROVAL_REQUEST,
}
def GetReportData(self, get_report_args, token):
"""Filter the hunt approvals in the given timerange."""
ret = rdf_report_plugins.ApiReportData(
representation_type=RepresentationType.AUDIT_CHART,
audit_chart=rdf_report_plugins.ApiAuditChartReportData(
used_fields=self.USED_FIELDS))
ret.audit_chart.rows = _LoadAuditEvents(
self.HANDLERS, get_report_args, transformers=[_ExtractHuntIdFromPath])
return ret
class MostActiveUsersReportPlugin(report_plugin_base.ReportPluginBase):
"""Reports client activity by week."""
TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.SERVER
TITLE = "User Breakdown"
SUMMARY = "Active user actions."
REQUIRES_TIME_RANGE = True
def _GetUserCounts(self, get_report_args, token=None):
counter = collections.Counter()
entries = data_store.REL_DB.CountAPIAuditEntriesByUserAndDay(
min_timestamp=get_report_args.start_time,
max_timestamp=get_report_args.start_time + get_report_args.duration)
for (username, _), count in iteritems(entries):
counter[username] += count
return counter
def GetReportData(self, get_report_args, token):
"""Filter the last week of user actions."""
ret = rdf_report_plugins.ApiReportData(
representation_type=RepresentationType.PIE_CHART)
counts = self._GetUserCounts(get_report_args, token)
for username in access_control.SYSTEM_USERS:
del counts[username]
ret.pie_chart.data = [
rdf_report_plugins.ApiReportDataPoint1D(x=count, label=user)
for user, count in sorted(iteritems(counts))
]
return ret
class BaseUserFlowReportPlugin(report_plugin_base.ReportPluginBase):
"""Count given timerange's flows by type."""
def IncludeUser(self, username):
return True
def _GetFlows(self, get_report_args, token):
counts = collections.defaultdict(collections.Counter)
flows = data_store.REL_DB.ReadAllFlowObjects(
min_create_time=get_report_args.start_time,
max_create_time=get_report_args.start_time + get_report_args.duration,
include_child_flows=False)
for flow in flows:
if self.IncludeUser(flow.creator):
counts[flow.flow_class_name][flow.creator] += 1
return counts
def GetReportData(self, get_report_args, token):
ret = rdf_report_plugins.ApiReportData(
representation_type=RepresentationType.STACK_CHART,
stack_chart=rdf_report_plugins.ApiStackChartReportData(x_ticks=[]))
counts = self._GetFlows(get_report_args, token)
total_counts = collections.Counter(
{flow: sum(cts.values()) for flow, cts in iteritems(counts)})
for i, (flow, total_count) in enumerate(total_counts.most_common()):
topusercounts = counts[flow].most_common(3)
topusers = ", ".join(
"{} ({})".format(user, count) for user, count in topusercounts)
ret.stack_chart.data.append(
rdf_report_plugins.ApiReportDataSeries2D(
# \u2003 is an emspace, a long whitespace character.
label="{}\u2003Run By: {}".format(flow, topusers),
points=[
rdf_report_plugins.ApiReportDataPoint2D(x=i, y=total_count)
]))
return ret
class UserFlowsReportPlugin(BaseUserFlowReportPlugin):
"""Count given timerange's user-created flows by type."""
TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.SERVER
TITLE = "User Flows"
SUMMARY = ("Flows launched by GRR users over the given timerange grouped by "
"type.")
REQUIRES_TIME_RANGE = True
def IncludeUser(self, username):
return username not in access_control.SYSTEM_USERS
class SystemFlowsReportPlugin(BaseUserFlowReportPlugin):
"""Count given timerange's system-created flows by type."""
TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.SERVER
TITLE = "System Flows"
SUMMARY = ("Flows launched by GRR crons and workers over the given timerange"
" grouped by type.")
REQUIRES_TIME_RANGE = True
def IncludeUser(self, username):
return username in access_control.SYSTEM_USERS
class UserActivityReportPlugin(report_plugin_base.ReportPluginBase):
"""Display user activity by week."""
TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.SERVER
TITLE = "User Activity"
SUMMARY = "Number of flows ran by each user."
REQUIRES_TIME_RANGE = True
def _LoadUserActivity(self, start_time, end_time, token):
counts = data_store.REL_DB.CountAPIAuditEntriesByUserAndDay(
min_timestamp=start_time, max_timestamp=end_time)
for (username, day), count in iteritems(counts):
yield username, day, count
def GetReportData(self, get_report_args, token):
"""Filter the last week of user actions."""
ret = rdf_report_plugins.ApiReportData(
representation_type=RepresentationType.STACK_CHART)
week_duration = rdfvalue.Duration.From(7, rdfvalue.DAYS)
num_weeks = int(
math.ceil(
rdfvalue.Duration(get_report_args.duration).ToFractional(
rdfvalue.SECONDS) /
week_duration.ToFractional(rdfvalue.SECONDS)))
weeks = range(0, num_weeks)
start_time = get_report_args.start_time
end_time = start_time + num_weeks * week_duration
user_activity = collections.defaultdict(lambda: {week: 0 for week in weeks})
entries = self._LoadUserActivity(
start_time=get_report_args.start_time, end_time=end_time, token=token)
for username, timestamp, count in entries:
week = (timestamp - start_time).ToInt(
rdfvalue.SECONDS) // week_duration.ToInt(rdfvalue.SECONDS)
if week in user_activity[username]:
user_activity[username][week] += count
user_activity = sorted(iteritems(user_activity))
user_activity = [(user, data)
for user, data in user_activity
if user not in access_control.SYSTEM_USERS]
ret.stack_chart.data = [
rdf_report_plugins.ApiReportDataSeries2D(
label=user,
points=(rdf_report_plugins.ApiReportDataPoint2D(x=x, y=y)
for x, y in sorted(data.items())))
for user, data in user_activity
]
return ret
|
don-github/edx-platform | refs/heads/master | cms/djangoapps/contentstore/features/transcripts.py | 54 | # disable missing docstring
# pylint: disable=missing-docstring
import os
from lettuce import world, step
from django.conf import settings
from xmodule.contentstore.content import StaticContent
from xmodule.contentstore.django import contentstore
from xmodule.exceptions import NotFoundError
from splinter.request_handler.request_handler import RequestHandler
TEST_ROOT = settings.COMMON_TEST_DATA_ROOT
# We should wait 300 ms for event handler invocation + 200ms for safety.
DELAY = 0.5
ERROR_MESSAGES = {
'url_format': u'Incorrect url format.',
'file_type': u'Link types should be unique.',
'links_duplication': u'Links should be unique.',
}
STATUSES = {
'found': u'Timed Transcript Found',
'not found on edx': u'No EdX Timed Transcript',
'not found': u'No Timed Transcript',
'replace': u'Timed Transcript Conflict',
'uploaded_successfully': u'Timed Transcript Uploaded Successfully',
'use existing': u'Confirm Timed Transcript',
}
SELECTORS = {
'error_bar': '.transcripts-error-message',
'url_inputs': '.videolist-settings-item input.input',
'collapse_link': '.collapse-action.collapse-setting',
'collapse_bar': '.videolist-extra-videos',
'status_bar': '.transcripts-message-status',
}
# button type , button css selector, button message
TRANSCRIPTS_BUTTONS = {
'import': ('.setting-import', 'Import YouTube Transcript'),
'download_to_edit': ('.setting-download', 'Download Transcript for Editing'),
'disabled_download_to_edit': ('.setting-download.is-disabled', 'Download Transcript for Editing'),
'upload_new_timed_transcripts': ('.setting-upload', 'Upload New Transcript'),
'replace': ('.setting-replace', 'Yes, replace the edX transcript with the YouTube transcript'),
'choose': ('.setting-choose', 'Timed Transcript from {}'),
'use_existing': ('.setting-use-existing', 'Use Current Transcript'),
}
@step('I clear fields$')
def clear_fields(_step):
# Clear the input fields and trigger an 'input' event
script = """
$('{selector}')
.prop('disabled', false)
.removeClass('is-disabled')
.attr('aria-disabled', false)
.val('')
.trigger('input');
""".format(selector=SELECTORS['url_inputs'])
world.browser.execute_script(script)
world.wait(DELAY)
world.wait_for_ajax_complete()
@step('I clear field number (.+)$')
def clear_field(_step, index):
index = int(index) - 1
world.css_fill(SELECTORS['url_inputs'], '', index)
# For some reason ChromeDriver doesn't trigger an 'input' event after filling
# the field with an empty value. That's why we trigger it manually via jQuery.
world.trigger_event(SELECTORS['url_inputs'], event='input', index=index)
world.wait(DELAY)
world.wait_for_ajax_complete()
@step('I expect (.+) inputs are disabled$')
def inputs_are_disabled(_step, indexes):
index_list = [int(i.strip()) - 1 for i in indexes.split(',')]
for index in index_list:
el = world.css_find(SELECTORS['url_inputs'])[index]
assert el['disabled']
@step('I expect inputs are enabled$')
def inputs_are_enabled(_step):
for index in range(3):
el = world.css_find(SELECTORS['url_inputs'])[index]
assert not el['disabled']
@step('I do not see error message$')
def i_do_not_see_error_message(_step):
assert not world.css_visible(SELECTORS['error_bar'])
@step('I see error message "([^"]*)"$')
def i_see_error_message(_step, error):
assert world.css_has_text(SELECTORS['error_bar'], ERROR_MESSAGES[error])
@step('I do not see status message$')
def i_do_not_see_status_message(_step):
assert not world.css_visible(SELECTORS['status_bar'])
@step('I see status message "([^"]*)"$')
def i_see_status_message(_step, status):
assert not world.css_visible(SELECTORS['error_bar'])
assert world.css_has_text(SELECTORS['status_bar'], STATUSES[status])
DOWNLOAD_BUTTON = TRANSCRIPTS_BUTTONS["download_to_edit"][0]
if world.is_css_present(DOWNLOAD_BUTTON, wait_time=1) and not world.css_find(DOWNLOAD_BUTTON)[0].has_class('is-disabled'):
assert _transcripts_are_downloaded()
@step('I (.*)see button "([^"]*)"$')
def i_see_button(_step, not_see, button_type):
button = button_type.strip()
if not_see.strip():
assert world.is_css_not_present(TRANSCRIPTS_BUTTONS[button][0])
else:
assert world.css_has_text(TRANSCRIPTS_BUTTONS[button][0], TRANSCRIPTS_BUTTONS[button][1])
@step('I (.*)see (.*)button "([^"]*)" number (\d+)$')
def i_see_button_with_custom_text(_step, not_see, button_type, custom_text, index):
button = button_type.strip()
custom_text = custom_text.strip()
index = int(index.strip()) - 1
if not_see.strip():
assert world.is_css_not_present(TRANSCRIPTS_BUTTONS[button][0])
else:
assert world.css_has_text(TRANSCRIPTS_BUTTONS[button][0], TRANSCRIPTS_BUTTONS[button][1].format(custom_text), index)
@step('I click transcript button "([^"]*)"$')
def click_button_transcripts_variant(_step, button_type):
button = button_type.strip()
world.css_click(TRANSCRIPTS_BUTTONS[button][0])
world.wait_for_ajax_complete()
@step('I click transcript button "([^"]*)" number (\d+)$')
def click_button_index(_step, button_type, index):
button = button_type.strip()
index = int(index.strip()) - 1
world.css_click(TRANSCRIPTS_BUTTONS[button][0], index)
world.wait_for_ajax_complete()
@step('I remove "([^"]+)" transcripts id from store')
def remove_transcripts_from_store(_step, subs_id):
"""Remove from store, if transcripts content exists."""
filename = 'subs_{0}.srt.sjson'.format(subs_id.strip())
content_location = StaticContent.compute_location(
world.scenario_dict['COURSE'].id,
filename
)
try:
content = contentstore().find(content_location)
contentstore().delete(content.location)
print('Transcript file was removed from store.')
except NotFoundError:
print('Transcript file was NOT found and not removed.')
@step('I enter a "([^"]+)" source to field number (\d+)$')
def i_enter_a_source(_step, link, index):
index = int(index) - 1
if index is not 0 and not world.css_visible(SELECTORS['collapse_bar']):
world.css_click(SELECTORS['collapse_link'])
assert world.css_visible(SELECTORS['collapse_bar'])
world.css_fill(SELECTORS['url_inputs'], link, index)
world.wait(DELAY)
world.wait_for_ajax_complete()
@step('I upload the transcripts file "([^"]*)"$')
def upload_file(_step, file_name):
path = os.path.join(TEST_ROOT, 'uploads/', file_name.strip())
world.browser.execute_script("$('form.file-chooser').show()")
world.browser.attach_file('transcript-file', os.path.abspath(path))
world.wait_for_ajax_complete()
@step('I see "([^"]*)" text in the captions')
def check_text_in_the_captions(_step, text):
world.wait_for_present('.video.is-captions-rendered')
world.wait_for(lambda _: world.css_text('.subtitles'), timeout=30)
actual_text = world.css_text('.subtitles')
assert text in actual_text
@step('I see value "([^"]*)" in the field "([^"]*)"$')
def check_transcripts_field(_step, values, field_name):
world.select_editor_tab('Advanced')
tab = world.css_find('#settings-tab').first
field_id = '#' + tab.find_by_xpath('.//label[text()="%s"]' % field_name.strip())[0]['for']
values_list = [i.strip() == world.css_value(field_id) for i in values.split('|')]
assert any(values_list)
world.select_editor_tab('Basic')
@step('I save changes$')
def save_changes(_step):
world.save_component()
@step('I open tab "([^"]*)"$')
def open_tab(_step, tab_name):
world.select_editor_tab(tab_name)
@step('I set value "([^"]*)" to the field "([^"]*)"$')
def set_value_transcripts_field(_step, value, field_name):
tab = world.css_find('#settings-tab').first
XPATH = './/label[text()="{name}"]'.format(name=field_name)
SELECTOR = '#' + tab.find_by_xpath(XPATH)[0]['for']
element = world.css_find(SELECTOR).first
if element['type'] == 'text':
SCRIPT = '$("{selector}").val("{value}").change()'.format(
selector=SELECTOR,
value=value
)
world.browser.execute_script(SCRIPT)
assert world.css_has_value(SELECTOR, value)
else:
assert False, 'Incorrect element type.'
world.wait_for_ajax_complete()
@step('I revert the transcript field "([^"]*)"$')
def revert_transcripts_field(_step, field_name):
world.revert_setting_entry(field_name)
def _transcripts_are_downloaded():
world.wait_for_ajax_complete()
request = RequestHandler()
DOWNLOAD_BUTTON = world.css_find(TRANSCRIPTS_BUTTONS["download_to_edit"][0]).first
url = DOWNLOAD_BUTTON['href']
request.connect(url)
return request.status_code.is_success()
|
tlhallock/line-search-dfo | refs/heads/master | python/algorithms/filter_linesearch.py | 1 | from math import inf as infinity
from numpy import int as integral
from numpy import bmat as blockmat
from numpy import concatenate
from numpy import dot
from numpy import empty
from numpy import zeros
from numpy.linalg import cond as condition_number
from numpy.linalg import lstsq
from numpy.linalg import norm as norm
from numpy.linalg import solve as linsolve
from scipy.optimize import minimize
import matplotlib.pyplot as plt
from utilities.nondom import NonDomSet
class Constants:
def __init__(self, theta_max):
self.theta_max = theta_max # (theta(x0), infty)
self.gamma_theta = .01 # (0,1)
self.gamma_f = .75 # (0,1)
self.delta = .01 # (0,infty)
self.gamma_alpha = .5 # (0,1]
self.s_theta = 2 # (1,infty)
self.s_f = 3 # [1,infty)
self.eta_f = .025 # (0, .5)
self.tau_one = .25 # (0, tau_two]
self.tau_two = .75 # [tau_two, 1)
self.plot = True # Should all the plots be generated
self.max_condition_number = 1000
self.tau = (self.tau_one + self.tau_two) / 2
class Result:
def __init__(self):
self.number_of_iterations = -1 # we increment by one in the first iteration
self.restorations = 0
self.ftype_iterations = 0
self.filter_modified_count = 0
self.pareto = NonDomSet()
self.success = False
self.f_min = infinity
self.x_min = 0
self.filterRejectedCount = 0
self.criteria_satifisfied_but_trust_region_not = 0
def newF(self, otherX, otherF):
if self.f_min < otherF:
return
self.f_min = otherF
self.x_min = otherX
def theta(statement, x):
return getThetaAndIneq(statement, x)[0]
def getThetaAndIneq(statement, x):
c, _, ineq = getConstraintInfo(statement, x)
return norm(c), ineq
# retVal = 0
# if statement.hasEqualityConstraints():
# retVal += norm(statement.equalityConstraints(x))
# if statement.hasInequalityConstraints():
# c = statement.inequalityConstraints(x)
# retVal += norm(c[c > -statement.tol])
# return retVal
def getConstraintInfo(statement, x):
if statement.hasEqualityConstraints():
cEq = statement.equalityConstraints(x)
aEq = statement.equalityConstraintsJacobian(x)
if not statement.hasInequalityConstraints():
return cEq, aEq, empty(0)
if statement.hasInequalityConstraints():
cIneqAll = statement.inequalityConstraints(x)
aIneqAll = statement.inequalityConstraintsJacobian(x)
active = cIneqAll > -statement.tol
cIneqActive = cIneqAll[active]
aIneqActive = aIneqAll[active]
if statement.hasEqualityConstraints():
c = concatenate([cEq, cIneqActive])
A = blockmat([[aEq], [aIneqActive]])
return c, A, cIneqAll
else:
return cIneqActive, aIneqActive, cIneqAll
return None, None, None
# don't check constraints that are currently active going to false...
def addedActiveConstraint(newIneq, cIneq, tol):
# Check that we are not adding any active constraints...
# Don't want to just check the "active" variable from computeConstraintInfo
# because of the tolerance issue while we are on it.
# addedInactive = all([not newIneq[i] for i, x in enumerate(state.cIneq) if not x])
# comparing with zero instead of tolerance (not entirely sure why...)
# I might should use -tol...
return any([newIneq[i] > 0 for i, x in enumerate(cIneq) if x < tol])
class AlgorithmState:
def __init__(self, statement):
self.x = statement.x0
self.grad = 0
self.pareto = NonDomSet()
self.f = infinity
self.grad = None
self.hess = None
self.A = None
self.c = None
self.cIneq = None
self.d = empty(len(self.x))
self.x_new = None
self.ftype = False
self.accept = False
self.theta = None
def setCurrentIterate(self, statement):
self.f = statement.objective(self.x)
self.grad = statement.gradient(self.x)
self.hess = statement.hessian(self.x)
self.theta = theta(statement, self.x)
self.c, self.A, self.cIneq = getConstraintInfo(statement, self.x)
def createKKT(self):
if self.A is None:
return self.hess
m = self.getM()
return blockmat([[self.hess, self.A.T], [self.A, zeros((m, m))]])
def createRhs(self):
if self.A is None:
return self.grad
return concatenate([self.grad, self.c])
def getN(self):
return len(self.x)
def getM(self):
if self.A is None:
return 0
return self.A.shape[0]
def show(self, statement):
fileName = statement.createBasePlotAt(self.x)
self.model.addPointsToPlot()
#amin(shifted, 0)
totalDist = norm(self.x_new - self.x)
hw = .1 * totalDist
hl = .1 * totalDist
plt.arrow(x=self.x[0], y=self.x[1],
dx = (self.x_new[0] - self.x[0]), dy = (self.x_new[1] - self.x[1]),
head_width = hw, head_length = hl, fc = 'g', ec = 'g')
plt.arrow(x=self.x[0], y=self.x[1],
dx = -totalDist * self.grad[0] / norm(self.grad),
dy = -totalDist * self.grad[1] / norm(self.grad),
head_width = hw, head_length = hl, fc = 'y', ec = 'y')
plt.savefig(fileName)
plt.close()
def checkStoppingCriteria(statement, state):
hasConstraints = statement.hasEqualityConstraints() or statement.hasInequalityConstraints()
if not hasConstraints:
return norm(state.grad) < statement.tol
if statement.hasConstraints() and (state.c > statement.tol).any():
return False
lmbda,_,_,_ = lstsq(state.A.T, -state.grad)
# What exactly is this supposed to be checking?
if norm(state.grad + dot(state.A.T, lmbda)) > statement.tol:
return False
if statement.hasInequalityConstraints():
numEqualityConstraints = statement.getNumEqualityConstraints()
if any(lmbda[numEqualityConstraints:len(lmbda)] < -statement.tol):
return False
return True
def compute_alpha_min(statement, constants, state):
gDotd = dot(state.grad.T, state.d)
if gDotd < -statement.tol:
return constants.gamma_alpha * min(
constants.gamma_theta,
-constants.gamma_f*state.theta/(gDotd),
(constants.delta*state.theta**constants.s_theta)/((-gDotd)**constants.s_f))
else:
return constants.gamma_alpha * constants.gamma_theta
def restore_feasibility(statement, x0):
res = minimize(lambda x: theta(statement, x), x0, method='Nelder-Mead', options={'xtol': 1e-8, 'disp': False, 'maxfev': 1000})
return res.x
def filter_line_search(program, constants):
results = Result()
state = AlgorithmState(program)
while True:
results.number_of_iterations += 1
print(results.number_of_iterations)
state.setCurrentIterate(program)
n = state.getN()
if checkStoppingCriteria(program, state):
if not program.converged():
results.criteria_satifisfied_but_trust_region_not += 1
continue
results.newF(state.x, state.f)
results.success = True
break
kktmat = state.createKKT()
state.cond = condition_number(kktmat)
if state.cond > constants.max_condition_number:
results.restorations += 1
state.x = restore_feasibility(program, state.x)
continue
rhs = state.createRhs()
vec = linsolve(kktmat, rhs.T)
state.d[:] = -vec[0:n]
state.alpha_min = compute_alpha_min(program, constants, state)
state.alpha = 1
state.accept = False
gDotd = dot(state.grad.T, state.d)
while not state.accept:
m = state.alpha * gDotd
# Hack, maybe: clip to trust region: this should be solved in the subproblem!!!
state.d = program.clipToTrustRegion(state.d)
if state.alpha < state.alpha_min:
state.x = restore_feasibility(program, state.x)
results.restorations += 1
break
state.x_new = state.x + state.alpha * state.d
state.theta_new, newIneq = getThetaAndIneq(program, state.x_new)
state.f_new = program.objective(state.x_new)
if norm(state.d) * state.alpha < program.model.modelRadius / 4:
program.model.multiplyRadius(program.radius_decrease)
program._improve()
# If we are about to add a constraint that was not active, then don't
if addedActiveConstraint(newIneq, state.cIneq, program.tol):
state.alpha = state.alpha * constants.tau
continue
if constants.plot:
state.show(program)
if results.pareto.is_dominated((state.theta_new, state.f_new)):
state.alpha = state.alpha * constants.tau
results.filterRejectedCount += 1
continue
state.ftype = m < 0 and ((-m)**constants.s_f * state.alpha**(1-constants.s_f) > constants.delta * state.theta ** constants.s_theta);
if state.ftype:
if state.f_new <= state.f + constants.eta_f * m:
state.accept = True
else:
eight_a = state.theta_new <= (1-constants.gamma_theta) * state.theta
eight_b = state.f_new <= state.f - constants.gamma_f * state.theta_new
if eight_a or eight_b:
state.accept = True
state.alpha = state.alpha * constants.tau
if state.accept:
if not program.acceptable(state.x_new):
continue
if state.ftype:
results.ftype_iterations += 1
if (1-constants.gamma_theta) * state.theta_new > program.tol:
results.pareto.add(((1 - constants.gamma_theta) * state.theta_new, state.f_new - constants.gamma_f * state.theta_new))
results.filter_modified_count += 1
state.x = state.x_new
return results
|
ofgulban/scikit-image | refs/heads/ncut-rag-options | skimage/feature/tests/test_match.py | 11 | import numpy as np
from numpy.testing import assert_equal, assert_raises
from skimage import data
from skimage import transform as tf
from skimage.color import rgb2gray
from skimage.feature import (BRIEF, match_descriptors,
corner_peaks, corner_harris)
def test_binary_descriptors_unequal_descriptor_sizes_error():
"""Sizes of descriptors of keypoints to be matched should be equal."""
descs1 = np.array([[True, True, False, True],
[False, True, False, True]])
descs2 = np.array([[True, False, False, True, False],
[False, True, True, True, False]])
assert_raises(ValueError, match_descriptors, descs1, descs2)
def test_binary_descriptors():
descs1 = np.array([[True, True, False, True, True],
[False, True, False, True, True]])
descs2 = np.array([[True, False, False, True, False],
[False, False, True, True, True]])
matches = match_descriptors(descs1, descs2)
assert_equal(matches, [[0, 0], [1, 1]])
def test_binary_descriptors_rotation_crosscheck_false():
"""Verify matched keypoints and their corresponding masks results between
image and its rotated version with the expected keypoint pairs with
cross_check disabled."""
img = data.astronaut()
img = rgb2gray(img)
tform = tf.SimilarityTransform(scale=1, rotation=0.15, translation=(0, 0))
rotated_img = tf.warp(img, tform, clip=False)
extractor = BRIEF(descriptor_size=512)
keypoints1 = corner_peaks(corner_harris(img), min_distance=5,
threshold_abs=0, threshold_rel=0.1)
extractor.extract(img, keypoints1)
descriptors1 = extractor.descriptors
keypoints2 = corner_peaks(corner_harris(rotated_img), min_distance=5,
threshold_abs=0, threshold_rel=0.1)
extractor.extract(rotated_img, keypoints2)
descriptors2 = extractor.descriptors
matches = match_descriptors(descriptors1, descriptors2, cross_check=False)
exp_matches1 = np.array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46])
exp_matches2 = np.array([ 0, 31, 2, 3, 1, 4, 6, 4, 38, 5, 27, 7,
13, 10, 9, 27, 7, 11, 15, 8, 23, 14, 12, 16,
10, 25, 18, 19, 21, 20, 41, 24, 25, 26, 28, 27,
22, 23, 29, 30, 31, 32, 35, 33, 34, 30, 36])
assert_equal(matches[:, 0], exp_matches1)
assert_equal(matches[:, 1], exp_matches2)
def test_binary_descriptors_rotation_crosscheck_true():
"""Verify matched keypoints and their corresponding masks results between
image and its rotated version with the expected keypoint pairs with
cross_check enabled."""
img = data.astronaut()
img = rgb2gray(img)
tform = tf.SimilarityTransform(scale=1, rotation=0.15, translation=(0, 0))
rotated_img = tf.warp(img, tform, clip=False)
extractor = BRIEF(descriptor_size=512)
keypoints1 = corner_peaks(corner_harris(img), min_distance=5,
threshold_abs=0, threshold_rel=0.1)
extractor.extract(img, keypoints1)
descriptors1 = extractor.descriptors
keypoints2 = corner_peaks(corner_harris(rotated_img), min_distance=5,
threshold_abs=0, threshold_rel=0.1)
extractor.extract(rotated_img, keypoints2)
descriptors2 = extractor.descriptors
matches = match_descriptors(descriptors1, descriptors2, cross_check=True)
exp_matches1 = np.array([ 0, 2, 3, 4, 5, 6, 9, 11, 12, 13, 14, 17,
18, 19, 21, 22, 23, 26, 27, 28, 29, 31, 32, 33,
34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46])
exp_matches2 = np.array([ 0, 2, 3, 1, 4, 6, 5, 7, 13, 10, 9, 11,
15, 8, 14, 12, 16, 18, 19, 21, 20, 24, 25, 26,
28, 27, 22, 23, 29, 30, 31, 32, 35, 33, 34, 36])
assert_equal(matches[:, 0], exp_matches1)
assert_equal(matches[:, 1], exp_matches2)
def test_max_distance():
descs1 = np.zeros((10, 128))
descs2 = np.zeros((15, 128))
descs1[0, :] = 1
matches = match_descriptors(descs1, descs2, metric='euclidean',
max_distance=0.1, cross_check=False)
assert len(matches) == 9
matches = match_descriptors(descs1, descs2, metric='euclidean',
max_distance=np.sqrt(128.1),
cross_check=False)
assert len(matches) == 10
matches = match_descriptors(descs1, descs2, metric='euclidean',
max_distance=0.1,
cross_check=True)
assert_equal(matches, [[1, 0]])
matches = match_descriptors(descs1, descs2, metric='euclidean',
max_distance=np.sqrt(128.1),
cross_check=True)
assert_equal(matches, [[1, 0]])
if __name__ == '__main__':
from numpy import testing
testing.run_module_suite()
|
amitsaha/learning | refs/heads/master | recipes/print_all_leaders.py | 1 | '''
Print all leaders from an array:
A leader is an element for which all the numbers to it's right are
either equal or less than it
'''
from __future__ import print_function
def find_leaders(arr):
max = arr[-1]
for elem in arr[::-1]:
if elem >= max:
print(elem)
max = elem
find_leaders([10, 2, 3, -1, -2, 5, 4, 3, 2 ,1])
find_leaders([10, 2, 3, 5, 3, 2 ,1])
|
Epse/EpPos | refs/heads/master | web/pos/helper.py | 1 | from django.contrib.auth.models import User
from .models import Order, Cash, Order_Item, Setting
def get_currency():
"""
Gets the current currency from the Settings database table
"""
currency, is_created = Setting.objects.get_or_create(key="currency")
if is_created:
currency.value = "€"
currency.save()
return currency.value
def get_company():
"""
Gets the company name from the Settings in the database
"""
company, is_created = Setting.objects.get_or_create(key="company")
if is_created:
company.value = "EpPos"
company.save()
return company.value
def get_can_negative_stock():
"""
Gets from database if negative stock is allowed. Returns True when allowed.
"""
setting, is_created = Setting.objects.get_or_create(key="negative_stock")
if is_created:
setting.value = "off"
setting.save()
return False
print(setting.value)
if setting.value == "off" or setting.value == "no":
return False
return True
def setup_handling(request):
"""
Boilerplate that gets the Cash, current order and currency.
"""
cash, _ = Cash.objects.get_or_create(id=0)
current_order = get_current_user_order(request.user.username)
currency = get_currency()
return (cash, current_order, currency)
def get_current_user_order(username):
"""
Gets the order for the current user.
"""
usr = User.objects.get_by_natural_key(username)
q = Order.objects.filter(user=usr, done=False)\
.order_by('-last_change')
if q.count() >= 1:
return q[0]
else:
return Order.objects.create(user=usr)
def order_item_from_product(product, order):
"""
Creates an Order-Item from a given Product,
to be added to an Order.
"""
return Order_Item.objects.create(product=product,
order=order,
price=product.price,
name=product.name)
def product_list_from_order(order):
"""
Returns a list of Products that appear in an Order
"""
product_list = []
order_item_list = Order_Item.objects.filter(order=order)
for order_item in order_item_list:
product_list.append(order_item.product)
return product_list
|
jlspyaozhongkai/Uter | refs/heads/master | third_party_backup/Python-2.7.9/Lib/stat.py | 319 | """Constants/functions for interpreting results of os.stat() and os.lstat().
Suggested usage: from stat import *
"""
# Indices for stat struct members in the tuple returned by os.stat()
ST_MODE = 0
ST_INO = 1
ST_DEV = 2
ST_NLINK = 3
ST_UID = 4
ST_GID = 5
ST_SIZE = 6
ST_ATIME = 7
ST_MTIME = 8
ST_CTIME = 9
# Extract bits from the mode
def S_IMODE(mode):
return mode & 07777
def S_IFMT(mode):
return mode & 0170000
# Constants used as S_IFMT() for various file types
# (not all are implemented on all systems)
S_IFDIR = 0040000
S_IFCHR = 0020000
S_IFBLK = 0060000
S_IFREG = 0100000
S_IFIFO = 0010000
S_IFLNK = 0120000
S_IFSOCK = 0140000
# Functions to test for each file type
def S_ISDIR(mode):
return S_IFMT(mode) == S_IFDIR
def S_ISCHR(mode):
return S_IFMT(mode) == S_IFCHR
def S_ISBLK(mode):
return S_IFMT(mode) == S_IFBLK
def S_ISREG(mode):
return S_IFMT(mode) == S_IFREG
def S_ISFIFO(mode):
return S_IFMT(mode) == S_IFIFO
def S_ISLNK(mode):
return S_IFMT(mode) == S_IFLNK
def S_ISSOCK(mode):
return S_IFMT(mode) == S_IFSOCK
# Names for permission bits
S_ISUID = 04000
S_ISGID = 02000
S_ENFMT = S_ISGID
S_ISVTX = 01000
S_IREAD = 00400
S_IWRITE = 00200
S_IEXEC = 00100
S_IRWXU = 00700
S_IRUSR = 00400
S_IWUSR = 00200
S_IXUSR = 00100
S_IRWXG = 00070
S_IRGRP = 00040
S_IWGRP = 00020
S_IXGRP = 00010
S_IRWXO = 00007
S_IROTH = 00004
S_IWOTH = 00002
S_IXOTH = 00001
# Names for file flags
UF_NODUMP = 0x00000001
UF_IMMUTABLE = 0x00000002
UF_APPEND = 0x00000004
UF_OPAQUE = 0x00000008
UF_NOUNLINK = 0x00000010
UF_COMPRESSED = 0x00000020 # OS X: file is hfs-compressed
UF_HIDDEN = 0x00008000 # OS X: file should not be displayed
SF_ARCHIVED = 0x00010000
SF_IMMUTABLE = 0x00020000
SF_APPEND = 0x00040000
SF_NOUNLINK = 0x00100000
SF_SNAPSHOT = 0x00200000
|
rledisez/shinken | refs/heads/master | test/test_nested_hostgroups.py | 17 | #!/usr/bin/env python
# Copyright (C) 2009-2014:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test reading and processing of config files
#
from shinken_test import *
class TestNestedHostgroups(ShinkenTest):
def setUp(self):
self.setup_with_file('etc/shinken_nested_hostgroups.cfg')
# We got the service "NestedService" apply in High level
# group. And this one got a sub group, low one. each got ONE
# Host, so we must have this servie on both.
def test_lookup_nested_hostgroups(self):
host = self.sched.hosts.find_by_name("test_host_0")
router = self.sched.hosts.find_by_name("test_router_0")
hg_high = self.sched.conf.hostgroups.find_by_name('high_level')
self.assertIsNot(hg_high, None)
self.assertIn(host, hg_high.members)
self.assertIn(router, hg_high.members)
hg_low = self.sched.conf.hostgroups.find_by_name('low_level')
self.assertIsNot(hg_low, None)
self.assertIn(host, hg_low.members)
svc1 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "NestedService")
self.assertIsNot(svc1, None)
svc2 = self.sched.services.find_srv_by_name_and_hostname("test_router_0", "NestedService")
self.assertIsNot(svc2, None)
# And now look for the service testHostToGroup apply on the group
# high_level, and the host test_host_2 should be on it, so it must have
# this service too
host2 = self.sched.hosts.find_by_name("test_host_2")
self.assertIn(host2, hg_high.members)
svc3 = self.sched.services.find_srv_by_name_and_hostname("test_host_2", "testHostToGroup")
self.assertIsNot(svc3, None)
# And same with a host in the low_group, should have it too
host3 = self.sched.hosts.find_by_name("test_host_3")
self.assertIn(host3, hg_high.members)
svc4 = self.sched.services.find_srv_by_name_and_hostname("test_host_3", "testHostToGroup")
self.assertIsNot(svc4, None)
if __name__ == '__main__':
unittest.main()
|
holmes/intellij-community | refs/heads/master | python/testData/quickFixes/AddCallSuperQuickFixTest/newStyle_after.py | 80 |
class A(object):
def __init__(self):
a = 1
class C(A):
def __init__(self):
super(C, self).__init__()
def foo(self):
pass |
CredoReference/edx-platform | refs/heads/integration-hawthorn-qa | lms/djangoapps/commerce/migrations/0004_auto_20160531_0950.py | 50 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('commerce', '0003_auto_20160329_0709'),
]
operations = [
migrations.AddField(
model_name='commerceconfiguration',
name='cache_ttl',
field=models.PositiveIntegerField(default=0, help_text='Specified in seconds. Enable caching by setting this to a value greater than 0.', verbose_name='Cache Time To Live'),
),
migrations.AddField(
model_name='commerceconfiguration',
name='receipt_page',
field=models.CharField(default=b'/commerce/checkout/receipt/?orderNum=', help_text='Path to order receipt page.', max_length=255),
),
]
|
hep-gc/glint-horizon | refs/heads/master | horizon/forms/fields.py | 2 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import netaddr
from django.core.exceptions import ValidationError # noqa
from django.core import urlresolvers
from django.forms import fields
from django.forms import widgets
from django.utils.encoding import force_unicode
from django.utils.functional import Promise # noqa
from django.utils import html
from django.utils.translation import ugettext_lazy as _
ip_allowed_symbols_re = re.compile(r'^[a-fA-F0-9:/\.]+$')
IPv4 = 1
IPv6 = 2
class IPField(fields.Field):
"""Form field for entering IP/range values, with validation.
Supports IPv4/IPv6 in the format:
.. xxx.xxx.xxx.xxx
.. xxx.xxx.xxx.xxx/zz
.. ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff
.. ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/zz
and all compressed forms. Also the short forms
are supported:
xxx/yy
xxx.xxx/yy
.. attribute:: version
Specifies which IP version to validate,
valid values are 1 (fields.IPv4), 2 (fields.IPv6) or
both - 3 (fields.IPv4 | fields.IPv6).
Defaults to IPv4 (1)
.. attribute:: mask
Boolean flag to validate subnet masks along with IP address.
E.g: 10.0.0.1/32
.. attribute:: mask_range_from
Subnet range limitation, e.g. 16
That means the input mask will be checked to be in the range
16:max_value. Useful to limit the subnet ranges
to A/B/C-class networks.
"""
invalid_format_message = _("Incorrect format for IP address")
invalid_version_message = _("Invalid version for IP address")
invalid_mask_message = _("Invalid subnet mask")
max_v4_mask = 32
max_v6_mask = 128
def __init__(self, *args, **kwargs):
self.mask = kwargs.pop("mask", None)
self.min_mask = kwargs.pop("mask_range_from", 0)
self.version = kwargs.pop('version', IPv4)
super(IPField, self).__init__(*args, **kwargs)
def validate(self, value):
super(IPField, self).validate(value)
if not value and not self.required:
return
try:
if self.mask:
self.ip = netaddr.IPNetwork(value)
else:
self.ip = netaddr.IPAddress(value)
except Exception:
raise ValidationError(self.invalid_format_message)
if not any([self.version & IPv4 > 0 and self.ip.version == 4,
self.version & IPv6 > 0 and self.ip.version == 6]):
raise ValidationError(self.invalid_version_message)
if self.mask:
if self.ip.version == 4 and \
not self.min_mask <= self.ip.prefixlen <= self.max_v4_mask:
raise ValidationError(self.invalid_mask_message)
if self.ip.version == 6 and \
not self.min_mask <= self.ip.prefixlen <= self.max_v6_mask:
raise ValidationError(self.invalid_mask_message)
def clean(self, value):
super(IPField, self).clean(value)
return str(getattr(self, "ip", ""))
class MultiIPField(IPField):
"""Extends IPField to allow comma-separated lists of addresses."""
def validate(self, value):
self.addresses = []
if value:
addresses = value.split(',')
for ip in addresses:
super(MultiIPField, self).validate(ip)
self.addresses.append(ip)
else:
super(MultiIPField, self).validate(value)
def clean(self, value):
super(MultiIPField, self).clean(value)
return str(','.join(getattr(self, "addresses", [])))
class SelectWidget(widgets.Select):
"""Customizable select widget, that allows to render
data-xxx attributes from choices.
.. attribute:: data_attrs
Specifies object properties to serialize as
data-xxx attribute. If passed ('id', ),
this will be rendered as:
<option data-id="123">option_value</option>
where 123 is the value of choice_value.id
.. attribute:: transform
A callable used to render the display value
from the option object.
"""
def __init__(self, attrs=None, choices=(), data_attrs=(), transform=None):
self.data_attrs = data_attrs
self.transform = transform
super(SelectWidget, self).__init__(attrs, choices)
def render_option(self, selected_choices, option_value, option_label):
option_value = force_unicode(option_value)
other_html = (option_value in selected_choices) and \
u' selected="selected"' or ''
if not isinstance(option_label, (basestring, Promise)):
for data_attr in self.data_attrs:
data_value = html.conditional_escape(
force_unicode(getattr(option_label,
data_attr, "")))
other_html += ' data-%s="%s"' % (data_attr, data_value)
if self.transform:
option_label = self.transform(option_label)
return u'<option value="%s"%s>%s</option>' % (
html.escape(option_value), other_html,
html.conditional_escape(force_unicode(option_label)))
class DynamicSelectWidget(widgets.Select):
"""A subclass of the ``Select`` widget which renders extra attributes for
use in callbacks to handle dynamic changes to the available choices.
"""
_data_add_url_attr = "data-add-item-url"
def render(self, *args, **kwargs):
add_item_url = self.get_add_item_url()
if add_item_url is not None:
self.attrs.update({self._data_add_url_attr: add_item_url})
return super(DynamicSelectWidget, self).render(*args, **kwargs)
def get_add_item_url(self):
if callable(self.add_item_link):
return self.add_item_link()
try:
if self.add_item_link_args:
return urlresolvers.reverse(self.add_item_link,
args=[self.add_item_link_args])
else:
return urlresolvers.reverse(self.add_item_link)
except urlresolvers.NoReverseMatch:
return self.add_item_link
class DynamicChoiceField(fields.ChoiceField):
"""A subclass of ``ChoiceField`` with additional properties that make
dynamically updating its elements easier.
Notably, the field declaration takes an extra argument, ``add_item_link``
which may be a string or callable defining the URL that should be used
for the "add" link associated with the field.
"""
widget = DynamicSelectWidget
def __init__(self,
add_item_link=None,
add_item_link_args=None,
*args,
**kwargs):
super(DynamicChoiceField, self).__init__(*args, **kwargs)
self.widget.add_item_link = add_item_link
self.widget.add_item_link_args = add_item_link_args
class DynamicTypedChoiceField(DynamicChoiceField, fields.TypedChoiceField):
"""Simple mix of ``DynamicChoiceField`` and ``TypedChoiceField``."""
pass
|
julian-seward1/servo | refs/heads/master | tests/wpt/harness/wptrunner/config.py | 196 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import ConfigParser
import os
import sys
from collections import OrderedDict
here = os.path.split(__file__)[0]
class ConfigDict(dict):
def __init__(self, base_path, *args, **kwargs):
self.base_path = base_path
dict.__init__(self, *args, **kwargs)
def get_path(self, key, default=None):
if key not in self:
return default
path = self[key]
os.path.expanduser(path)
return os.path.abspath(os.path.join(self.base_path, path))
def read(config_path):
config_path = os.path.abspath(config_path)
config_root = os.path.split(config_path)[0]
parser = ConfigParser.SafeConfigParser()
success = parser.read(config_path)
assert config_path in success, success
subns = {"pwd": os.path.abspath(os.path.curdir)}
rv = OrderedDict()
for section in parser.sections():
rv[section] = ConfigDict(config_root)
for key in parser.options(section):
rv[section][key] = parser.get(section, key, False, subns)
return rv
def path(argv=None):
if argv is None:
argv = []
path = None
for i, arg in enumerate(argv):
if arg == "--config":
if i + 1 < len(argv):
path = argv[i + 1]
elif arg.startswith("--config="):
path = arg.split("=", 1)[1]
if path is not None:
break
if path is None:
if os.path.exists("wptrunner.ini"):
path = os.path.abspath("wptrunner.ini")
else:
path = os.path.join(here, "..", "wptrunner.default.ini")
return os.path.abspath(path)
def load():
return read(path(sys.argv))
|
tinfoil/phantomjs | refs/heads/master | src/breakpad/src/tools/gyp/test/generator-output/gyptest-rules.py | 151 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies --generator-output= behavior when using rules.
"""
import TestGyp
test = TestGyp.TestGyp()
test.writable(test.workpath('rules'), False)
test.run_gyp('rules.gyp',
'--generator-output=' + test.workpath('gypfiles'),
chdir='rules')
test.writable(test.workpath('rules'), True)
test.relocate('rules', 'relocate/rules')
test.relocate('gypfiles', 'relocate/gypfiles')
test.writable(test.workpath('relocate/rules'), False)
test.writable(test.workpath('relocate/rules/build'), True)
test.writable(test.workpath('relocate/rules/subdir1/build'), True)
test.writable(test.workpath('relocate/rules/subdir2/build'), True)
test.writable(test.workpath('relocate/rules/subdir2/rules-out'), True)
test.build('rules.gyp', test.ALL, chdir='relocate/gypfiles')
expect = """\
Hello from program.c
Hello from function1.in1
Hello from function2.in1
Hello from define3.in0
Hello from define4.in0
"""
if test.format == 'xcode':
chdir = 'relocate/rules/subdir1'
else:
chdir = 'relocate/gypfiles'
test.run_built_executable('program', chdir=chdir, stdout=expect)
test.must_match('relocate/rules/subdir2/rules-out/file1.out',
"Hello from file1.in0\n")
test.must_match('relocate/rules/subdir2/rules-out/file2.out',
"Hello from file2.in0\n")
test.must_match('relocate/rules/subdir2/rules-out/file3.out',
"Hello from file3.in1\n")
test.must_match('relocate/rules/subdir2/rules-out/file4.out',
"Hello from file4.in1\n")
test.pass_test()
|
jinankjain/zamboni | refs/heads/master | apps/zadmin/__init__.py | 22 | from django.shortcuts import render
from django.template import loader
from django.template.response import SimpleTemplateResponse
import jingo
def jinja_for_django(template_name, context=None, **kw):
"""
If you want to use some built in logic (or a contrib app) but need to
override the templates to work with Jinja, replace the object's
render_to_response function with this one. That will render a Jinja
template through Django's functions. An example can be found in the users
app.
"""
if context is None:
context = {}
context_instance = kw.pop('context_instance')
request = context_instance['request']
for d in context_instance.dicts:
context.update(d)
return render(request, template_name, context, **kw)
# We monkeypatch SimpleTemplateResponse.rendered_content to use our jinja
# rendering pipeline (most of the time). The exception is the admin app, where
# we render their Django templates and pipe the result through jinja to render
# our page skeleton.
def rendered_content(self):
template = self.template_name
context_instance = self.resolve_context(self.context_data)
request = context_instance['request']
# Gross, let's figure out if we're in the admin.
if self._current_app == 'admin':
source = loader.render_to_string(template, context_instance)
template = jingo.env.from_string(source)
# This interferes with our media() helper.
if 'media' in self.context_data:
del self.context_data['media']
# ``render_to_string`` only accepts a Template instance or a template name,
# not a list.
if isinstance(template, (list, tuple)):
template = loader.select_template(template)
return jingo.render_to_string(request, template, self.context_data)
SimpleTemplateResponse.rendered_content = property(rendered_content)
|
jordotech/satchmofork | refs/heads/master | scripts/clonesatchmo.py | 6 | #!/usr/bin/env python
"""
This is the installation script for Satchmo. It will create the base Satchmo configuration.
Before running this script, you must have python and pip installed.
It is also recommended that you install Python Imaging using your distribution's
package method.
The simplest way to install Satchmo would be:
pip install -r http://bitbucket.org/chris1610/satchmo/raw/tip/scripts/requirements.txt
pip install -e hg+http://bitbucket.org/chris1610/satchmo/#egg=satchmo
Then run:
python clonesatchmo.py
"""
import os
import shutil
import sys
from random import choice
import re
from optparse import OptionParser
import string
__VERSION__ = "0.3"
def parse_command_line():
usage = 'usage: %prog [options]'
version = 'Version: %prog ' + '%s' % __VERSION__
parser = OptionParser(usage=usage, version=version)
parser.add_option('-s', '--site', action='store',type='string', default='store',
dest='site_name', help="Top level directory name for the site. [default: %default]")
parser.add_option('-l', '--localsite', action='store',type='string', default='localsite',
dest='local_site_name', help="Name for the local application stub. [default: %default]")
parser.add_option('--skel', action='store', type='string', default = None,
dest='skeleton_dir', help="Path to the skeleton directory")
opts, args = parser.parse_args()
return opts, args
def check_skeleton_dir(skel_dir):
"""
Verify that the skeleton directory exists and that it points
to a location with a localsite subdir.
"""
if skel_dir is None:
return (True, "")
if os.path.isdir(skel_dir):
check_dir = os.path.join(skel_dir, 'localsite')
if not os.path.isdir(check_dir):
return (False, "Skeleton directory does not contain localsite subdirectory. Use --skel=/path/to/satchmo/projects/skeleton")
else:
return (False, "Skeleton directory not found. Use --skel=/path/to/satchmo/projects/skeleton")
return (True, "")
def create_satchmo_site(site_name, skeleton_dir):
"""
If we are passed a skeleton_dir, use it
If we aren't we assume the script is being run from the source tree so
we try to find it.
If this doesn't work, let the user know they need to specify it manually
"""
if skeleton_dir:
src_dir = os.path.abspath(skeleton_dir)
else:
clone_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(clone_dir, '../satchmo/projects/skeleton'))
result, msg = check_skeleton_dir(src_dir)
if not result:
try:
import satchmo_skeleton
except ImportError:
return (False, msg)
src_dir = os.path.dirname(satchmo_skeleton.__file__)
dest_dir = os.path.join('./', site_name)
shutil.copytree(src_dir, dest_dir)
return (True, "")
def customize_files(site_name, local_site_name):
"""
We need to make a couple of change to the files copied from the skeleton directory.
Set the SECRET_KEY to a random value
Set the ROOT_URLCONF
Set the DJANGO_PROJECT
Set the DJANGO_SETTINGS_MODULE
We also need to change the directory name to local_site_name
"""
dest_dir = os.path.join('./',site_name)
# Create a random SECRET_KEY hash, and put it in the main settings.
main_settings_file = os.path.join(dest_dir, 'settings.py')
settings_contents = open(main_settings_file, 'r').read()
fp = open(main_settings_file, 'w')
secret_key = ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)])
settings_contents = re.sub(r"(?<=SECRET_KEY = ')'", secret_key + "'", settings_contents)
# Configure the other variables that need to be modified
root_urlconf = site_name + '.urls'
settings_contents = re.sub(r"(?<=ROOT_URLCONF = ')'", root_urlconf + "'",settings_contents)
django_settings = site_name + '.settings'
settings_contents = re.sub(r"(?<=DJANGO_PROJECT = ')'", site_name + "'",settings_contents)
settings_contents = re.sub(r"(?<=DJANGO_SETTINGS_MODULE = ')'", django_settings + "'",settings_contents)
local_app = "%s.%s" % (site_name,local_site_name)
settings_contents = settings_contents.replace("simple.localsite",local_app)
fp.write(settings_contents)
fp.close()
# rename the local_app directory
os.rename(os.path.join(dest_dir,'localsite'), os.path.join(dest_dir,local_site_name))
def setup_satchmo(site_name, local_site_name):
"""
Do the final configs for satchmo
"""
variables = {'site_name':site_name, 'python':sys.executable}
errors = []
copy_check = os.system('cd %(site_name)s && %(python)s manage.py satchmo_copy_static' % variables)
if copy_check != 0:
errors.append("Can not copy the static files.")
sync_check = os.system('cd %(site_name)s && %(python)s manage.py syncdb' % variables)
if sync_check != 0:
errors.append("Can not syncdb.")
else:
l10n_check = os.system('cd %(site_name)s && %(python)s manage.py satchmo_load_l10n' % variables)
if l10n_check != 0:
errors.append("Can not load l10n data.")
load_check = os.system('cd %(site_name)s && %(python)s manage.py satchmo_load_store' % variables)
if load_check != 0:
errors.append("Can not load sample store data.")
pricing_check = os.system('cd %(site_name)s && %(python)s manage.py satchmo_rebuild_pricing' % variables)
if pricing_check != 0:
errors.append("Can not rebuild pricing.")
return errors
if __name__ == '__main__':
opts, args = parse_command_line()
errors = []
dest_dir = os.path.join('./',opts.site_name)
skeleton_dir = opts.skeleton_dir
if skeleton_dir and skeleton_dir.startswith('~/'):
skeleton_dir = os.path.join(os.environ.get('HOME', ''), skeleton_dir[2:])
result, msg = check_skeleton_dir(skeleton_dir)
if not result:
errors.append(msg)
if os.path.isdir(dest_dir):
errors.append("The destination directory already exists. This script can only be used to create new projects.")
try:
import PIL as Image
except ImportError:
errors.append("The Python Imaging Library is not installed. Install from your distribution binaries.")
if not errors:
print "Creating the Satchmo Application"
result, msg = create_satchmo_site(opts.site_name, skeleton_dir)
if not result:
print msg
sys.exit()
print "Customizing the files"
customize_files(opts.site_name, opts.local_site_name)
print "Performing initial data synching"
errors = setup_satchmo(opts.site_name, opts.local_site_name)
if errors:
for error in errors:
print "Error: %s" % error
sys.exit()
else:
print "Store installation complete."
print "You may run the server by typing: \n cd %s \n python manage.py runserver" % opts.site_name
|
cloudera/hue | refs/heads/master | desktop/libs/notebook/src/notebook/models.py | 2 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from future import standard_library
standard_library.install_aliases()
from builtins import str, object
import datetime
import json
import logging
import math
import numbers
import sys
import uuid
from datetime import timedelta
from django.contrib.sessions.models import Session
from django.db.models import Count
from django.db.models.functions import Trunc
from django.utils.html import escape
from desktop.conf import has_connectors, TASK_SERVER
from desktop.lib.connectors.models import _get_installed_connectors
from desktop.lib.i18n import smart_unicode
from desktop.lib.paths import SAFE_CHARACTERS_URI
from desktop.models import Directory, Document2
from useradmin.models import User, install_sample_user
from notebook.conf import EXAMPLES, get_ordered_interpreters
from notebook.connectors.base import Notebook, get_api as _get_api, get_interpreter
if sys.version_info[0] > 2:
from urllib.parse import quote as urllib_quote
from django.utils.translation import gettext as _
else:
from django.utils.translation import ugettext as _
from urllib import quote as urllib_quote
LOG = logging.getLogger(__name__)
# Materialize and HTML escape results
def escape_rows(rows, nulls_only=False, encoding=None):
data = []
try:
for row in rows:
escaped_row = []
for field in row:
if isinstance(field, numbers.Number):
if math.isnan(field) or math.isinf(field):
escaped_field = json.dumps(field)
else:
escaped_field = field
elif field is None:
escaped_field = 'NULL'
else:
# Prevent error when getting back non utf8 like charset=iso-8859-1
escaped_field = smart_unicode(field, errors='replace', encoding=encoding)
if not nulls_only:
escaped_field = escape(escaped_field).replace(' ', ' ')
escaped_row.append(escaped_field)
data.append(escaped_row)
except RuntimeError:
pass # pep-0479: expected Py3.8 generator raised StopIteration
return data
def make_notebook(
name='Browse', description='', editor_type='hive', statement='', status='ready',
files=None, functions=None, settings=None, is_saved=False, database='default', snippet_properties=None, batch_submit=False,
on_success_url=None, skip_historify=False, is_task=False, last_executed=-1, is_notebook=False, pub_sub_url=None, result_properties={},
namespace=None, compute=None, is_presentation_mode=False):
'''
skip_historify: do not add the task to the query history. e.g. SQL Dashboard
is_task / isManaged: true when being a managed by Hue operation (include_managed=True in document),
e.g. exporting query result, dropping some tables
'''
from notebook.connectors.hiveserver2 import HS2Api
if has_connectors():
interpreter = get_interpreter(connector_type=editor_type)
editor_connector = editor_type
editor_type = interpreter['dialect']
else:
editor_connector = editor_type
editor = Notebook()
if snippet_properties is None:
snippet_properties = {}
if editor_type == 'hive':
sessions_properties = HS2Api.get_properties(editor_type)
if files is not None:
_update_property_value(sessions_properties, 'files', files)
if functions is not None:
_update_property_value(sessions_properties, 'functions', functions)
if settings is not None:
_update_property_value(sessions_properties, 'settings', settings)
elif editor_type == 'impala':
sessions_properties = HS2Api.get_properties(editor_type)
if settings is not None:
_update_property_value(sessions_properties, 'files', files)
elif editor_type == 'java':
sessions_properties = [] # Java options
else:
sessions_properties = []
data = {
'name': name,
'uuid': str(uuid.uuid4()),
'description': description,
'sessions': [
{
'type': editor_connector,
'properties': sessions_properties,
'id': None
}
],
'selectedSnippet': editor_connector, # TODO: might need update in notebook.ko.js
'type': 'notebook' if is_notebook else 'query-%s' % editor_type,
'showHistory': True,
'isSaved': is_saved,
'onSuccessUrl': urllib_quote(on_success_url.encode('utf-8'), safe=SAFE_CHARACTERS_URI) if on_success_url else None,
'pubSubUrl': pub_sub_url,
'skipHistorify': skip_historify,
'isPresentationModeDefault': is_presentation_mode,
'isManaged': is_task,
'snippets': [
{
'status': status,
'id': str(uuid.uuid4()),
'statement_raw': statement,
'statement': statement,
'type': editor_connector,
'wasBatchExecuted': batch_submit,
'lastExecuted': last_executed,
'properties': {
'files': [] if files is None else files,
'functions': [] if functions is None else functions,
'settings': [] if settings is None else settings
},
'name': name,
'database': database,
'namespace': namespace if namespace else {},
'compute': compute if compute else {},
'result': {'handle': {}},
'variables': []
}
] if not is_notebook else []
}
if has_connectors(): # To improve
data['dialect'] = interpreter['dialect']
data['type'] = '%s-%s' % (editor_type, editor_connector) # e.g. 'flink-' + editor_connector
if snippet_properties:
data['snippets'][0]['properties'].update(snippet_properties)
if result_properties:
data['snippets'][0]['result'].update(result_properties)
editor.data = json.dumps(data)
return editor
def make_notebook2(name='Browse', description='', is_saved=False, snippets=None):
from notebook.connectors.hiveserver2 import HS2Api
editor = Notebook()
_snippets = []
for snippet in snippets:
default_properties = {
'files': [],
'functions': [],
'settings': []
}
default_properties.update(snippet['properties'])
snippet['properties'] = default_properties
_snippets.append(snippet)
data = {
'name': name,
'uuid': str(uuid.uuid4()),
'type': 'notebook',
'description': description,
'sessions': [
{
'type': _snippet['type'],
'properties': HS2Api.get_properties(snippet['type']),
'id': None
} for _snippet in _snippets # Non unique types currently
],
'selectedSnippet': _snippets[0]['type'],
'showHistory': False,
'isSaved': is_saved,
'snippets': [
{
'status': _snippet.get('status', 'ready'),
'id': str(uuid.uuid4()),
'statement_raw': _snippet.get('statement', ''),
'statement': _snippet.get('statement', ''),
'type': _snippet.get('type'),
'properties': _snippet['properties'],
'name': name,
'database': _snippet.get('database'),
'result': {'handle': {}},
'variables': []
} for _snippet in _snippets
]
}
editor.data = json.dumps(data)
return editor
def _get_notebook_api(user, connector_id, interpreter=None):
'''
Helper utils until the API gets simplified.
'''
notebook_json = """
{
"selectedSnippet": "hive",
"showHistory": false,
"description": "Test Query",
"name": "Test Query",
"sessions": [
{
"type": "hive",
"properties": [],
"id": null
}
],
"type": "hive",
"id": null,
"snippets": [{"id":"2b7d1f46-17a0-30af-efeb-33d4c29b1055","type":"%(connector_id)s","status":"running",\
"statement":"select * from web_logs","properties":{"settings":[],"variables":[],"files":[],"functions":[]},\
"result":{"id":"b424befa-f4f5-8799-a0b4-79753f2552b1","type":"table",\
"handle":{"log_context":null,"statements_count":1,\
"end":{"column":21,"row":0},"statement_id":0,"has_more_statements":false,\
"start":{"column":0,"row":0},"secret":"rVRWw7YPRGqPT7LZ/TeFaA==an","has_result_set":true,\
"statement":"select * from web_logs","operation_type":0,"modified_row_count":null,"guid":"7xm6+epkRx6dyvYvGNYePA==an"}},\
"lastExecuted": 1462554843817,"database":"default"}],
"uuid": "d9efdee1-ef25-4d43-b8f9-1a170f69a05a"
}
""" % {
'connector_id': connector_id,
}
snippet = json.loads(notebook_json)['snippets'][0]
snippet['interpreter'] = interpreter
request = MockRequest(user)
return get_api(request, snippet)
class MockedDjangoRequest(object):
def __init__(self, user, get=None, post=None, method='POST'):
self.user = user
self.jt = None
self.GET = get if get is not None else {'format': 'json'}
self.POST = post if post is not None else {}
self.REQUEST = {}
self.method = method
def import_saved_beeswax_query(bquery, interpreter=None):
design = bquery.get_design()
return make_notebook(
name=bquery.name,
description=bquery.desc,
editor_type=interpreter['type'] if interpreter else _convert_type(bquery.type, bquery.data),
statement=design.hql_query,
status='ready',
files=design.file_resources,
functions=design.functions,
settings=design.settings,
is_saved=True,
database=design.database
)
def import_saved_pig_script(pig_script):
snippet_properties = {}
snippet_properties['hadoopProperties'] = []
if pig_script.dict.get('hadoopProperties'):
for prop in pig_script.dict.get('hadoopProperties'):
snippet_properties['hadoopProperties'].append("%s=%s" % (prop.get('name'), prop.get('value')))
snippet_properties['parameters'] = []
if pig_script.dict.get('parameters'):
for param in pig_script.dict.get('parameters'):
snippet_properties['parameters'].append("%s=%s" % (param.get('name'), param.get('value')))
snippet_properties['resources'] = []
if pig_script.dict.get('resources'):
for resource in pig_script.dict.get('resources'):
snippet_properties['resources'].append(resource.get('value'))
notebook = make_notebook(
name=pig_script.dict.get('name'),
editor_type='pig',
statement=pig_script.dict.get('script'),
status='ready',
snippet_properties=snippet_properties,
is_saved=True
)
# Remove files, functions, settings from snippet properties
data = notebook.get_data()
data['snippets'][0]['properties'].pop('files')
data['snippets'][0]['properties'].pop('functions')
data['snippets'][0]['properties'].pop('settings')
notebook.data = json.dumps(data)
return notebook
def import_saved_mapreduce_job(wf):
snippet_properties = {}
node = wf.start.get_child('to')
try:
files = json.loads(node.files)
for filepath in files:
snippet_properties['files'].append({'type': 'file', 'path': filepath})
except ValueError as e:
LOG.warning('Failed to parse files for mapreduce job design "%s".' % wf.name)
snippet_properties['archives'] = []
try:
archives = json.loads(node.archives)
for filepath in archives:
snippet_properties['archives'].append(filepath)
except ValueError as e:
LOG.warning('Failed to parse archives for mapreduce job design "%s".' % wf.name)
snippet_properties['hadoopProperties'] = []
try:
properties = json.loads(node.job_properties)
if properties:
for prop in properties:
snippet_properties['hadoopProperties'].append("%s=%s" % (prop.get('name'), prop.get('value')))
except ValueError as e:
LOG.warning('Failed to parse job properties for mapreduce job design "%s".' % wf.name)
snippet_properties['app_jar'] = node.jar_path
notebook = make_notebook(
name=wf.name,
description=wf.description,
editor_type='mapreduce',
statement='',
status='ready',
snippet_properties=snippet_properties,
is_saved=True
)
# Remove functions, settings from snippet properties
data = notebook.get_data()
data['snippets'][0]['properties'].pop('functions')
data['snippets'][0]['properties'].pop('settings')
notebook.data = json.dumps(data)
return notebook
def import_saved_shell_job(wf):
snippet_properties = {}
node = wf.start.get_child('to')
snippet_properties['command_path'] = node.command
snippet_properties['arguments'] = []
snippet_properties['env_var'] = []
try:
params = json.loads(node.params)
if params:
for param in params:
if param['type'] == 'argument':
snippet_properties['arguments'].append(param['value'])
else:
snippet_properties['env_var'].append(param['value'])
except ValueError as e:
LOG.warning('Failed to parse parameters for shell job design "%s".' % wf.name)
snippet_properties['hadoopProperties'] = []
try:
properties = json.loads(node.job_properties)
if properties:
for prop in properties:
snippet_properties['hadoopProperties'].append("%s=%s" % (prop.get('name'), prop.get('value')))
except ValueError as e:
LOG.warning('Failed to parse job properties for shell job design "%s".' % wf.name)
snippet_properties['files'] = []
try:
files = json.loads(node.files)
for filepath in files:
snippet_properties['files'].append({'type': 'file', 'path': filepath})
except ValueError as e:
LOG.warning('Failed to parse files for shell job design "%s".' % wf.name)
snippet_properties['archives'] = []
try:
archives = json.loads(node.archives)
for archive in archives:
snippet_properties['archives'].append(archive['name'])
except ValueError as e:
LOG.warning('Failed to parse archives for shell job design "%s".' % wf.name)
snippet_properties['capture_output'] = node.capture_output
notebook = make_notebook(
name=wf.name,
description=wf.description,
editor_type='shell',
statement='',
status='ready',
snippet_properties=snippet_properties,
is_saved=True
)
# Remove functions, settings from snippet properties
data = notebook.get_data()
data['snippets'][0]['properties'].pop('functions')
data['snippets'][0]['properties'].pop('settings')
notebook.data = json.dumps(data)
return notebook
def import_saved_java_job(wf):
snippet_properties = {}
node = wf.start.get_child('to')
snippet_properties['app_jar'] = node.jar_path
snippet_properties['class'] = node.main_class
snippet_properties['args'] = node.args if node.args else ''
snippet_properties['java_opts'] = node.java_opts if node.java_opts else ''
snippet_properties['hadoopProperties'] = []
try:
properties = json.loads(node.job_properties)
if properties:
for prop in properties:
snippet_properties['hadoopProperties'].append("%s=%s" % (prop.get('name'), prop.get('value')))
except ValueError as e:
LOG.warning('Failed to parse job properties for Java job design "%s".' % wf.name)
snippet_properties['files'] = []
try:
files = json.loads(node.files)
for filepath in files:
snippet_properties['files'].append({'type': 'file', 'path': filepath})
except ValueError as e:
LOG.warning('Failed to parse files for Java job design "%s".' % wf.name)
snippet_properties['archives'] = []
try:
archives = json.loads(node.archives)
for archive in archives:
snippet_properties['archives'].append(archive['name'])
except ValueError as e:
LOG.warning('Failed to parse archives for Java job design "%s".' % wf.name)
snippet_properties['capture_output'] = node.capture_output
notebook = make_notebook(
name=wf.name,
description=wf.description,
editor_type='java',
statement='',
status='ready',
snippet_properties=snippet_properties,
is_saved=True
)
# Remove functions, settings from snippet properties
data = notebook.get_data()
data['snippets'][0]['properties'].pop('functions')
data['snippets'][0]['properties'].pop('settings')
notebook.data = json.dumps(data)
return notebook
def _convert_type(btype, bdata):
from beeswax.models import HQL, IMPALA, RDBMS, SPARK
if btype == HQL:
return 'hive'
elif btype == IMPALA:
return 'impala'
elif btype == RDBMS:
data = json.loads(bdata)
return data['query']['server']
elif btype == SPARK: # We should not import
return 'spark'
else:
return 'hive'
def _update_property_value(properties, key, value):
"""
Update property dict in list of properties where prop has "key": key, set "value": value
"""
for prop in properties:
if prop['key'] == key:
prop.update({'value': value})
def _get_editor_type(editor_id):
document = Document2.objects.get(id=editor_id)
return document.type.rsplit('-', 1)[-1]
def _get_example_directory(user):
home_dir = Directory.objects.get_home_directory(user)
examples_dir, created = Directory.objects.get_or_create(
parent_directory=home_dir,
owner=user,
name=Document2.EXAMPLES_DIR
)
return examples_dir
def _get_dialect_example(dialect):
sample_user = install_sample_user()
examples_dir = _get_example_directory(sample_user)
return Document2.objects.filter(
owner=sample_user,
type='query-%s' % dialect,
is_history=False,
parent_directory=examples_dir
).first()
class ApiWrapper():
def __init__(self, request, snippet):
self.request = request
self.api = _get_api(request, snippet)
def __getattr__(self, name):
if TASK_SERVER.ENABLED.get():
from notebook import tasks as ntasks
if hasattr(ntasks, name):
attr = getattr(ntasks, name)
def _method(*args, **kwargs):
return attr(*args, **dict(kwargs, postdict=self.request.POST, user_id=self.request.user.id))
return _method
else:
LOG.debug('Skipping Task Server call %s' % name)
return getattr(self.api, name)
def get_api(request, snippet):
return ApiWrapper(request, snippet)
def upgrade_session_properties(request, notebook):
# Upgrade session data if using old format
data = notebook.get_data()
for session in data.get('sessions', []):
api = get_api(request, session)
if 'type' in session and hasattr(api, 'upgrade_properties'):
properties = session.get('properties', None)
session['properties'] = api.upgrade_properties(session['type'], properties)
notebook.data = json.dumps(data)
return notebook
class Analytics(object):
@classmethod
def admin_stats(cls):
stats = []
one_day = datetime.date.today() - timedelta(days=1)
one_week = datetime.date.today() - timedelta(weeks=1)
one_month = datetime.date.today() - timedelta(days=30)
three_months = datetime.date.today() - timedelta(days=90)
stats.append(('Last modified', '1 day'))
stats.append(('Users', User.objects.filter(last_login__gte=one_day).count()))
stats.append(('Sessions', Session.objects.filter(expire_date__gte=one_day).count()))
stats.append(('Executed queries', Document2.objects.filter(
last_modified__gte=one_day, is_history=True, type__startswith='query-').count()
)
)
stats.append(('\nLast modified', '1 week'))
stats.append(('Users', User.objects.filter(last_login__gte=one_week).count()))
stats.append(('Sessions', Session.objects.filter(expire_date__gte=one_week).count()))
stats.append(('Executed queries', Document2.objects.filter(
last_modified__gte=one_week, is_history=True, type__startswith='query-').count()
)
)
stats.append(('Saved queries', Document2.objects.filter(
last_modified__gte=one_week, is_history=False, type__startswith='query-').count()
)
)
stats.append(('\nAll', ''))
stats.append(('Active users 30 days', User.objects.filter(last_login__gte=one_month).count()))
stats.append(('Sessions 30 days', Session.objects.filter(expire_date__gte=one_month).count()))
stats.append(('Executed queries 30 days', Document2.objects.filter(
last_modified__gte=one_month, is_history=True, type__startswith='query-').count()
)
)
stats.append(('Active users 90 days', User.objects.filter(last_login__gte=three_months).count()))
stats.append(('\nDialect executions', ''))
queries = Document2.objects.filter(type__startswith='query-', is_trashed=False, is_managed=False)
last_month_qdialects = queries.filter(
last_modified__gte=one_month
).values('type').annotate(c=Count('type')).values('type', 'c').order_by('-c')
stats.append(('30 days', ', '.join(['%(type)s: %(c)s' % d for d in last_month_qdialects])))
return stats
@classmethod
def user_stats(cls, user_id=None, user=None):
stats = []
one_month = datetime.date.today() - timedelta(days=30)
user = User.objects.get(id=user_id) if user is None else user
queries = Document2.objects.filter(owner__id=user_id, type__startswith='query-', is_trashed=False, is_managed=False)
stats.append({
'name': 'user',
'value': '%s - %s' % (user_id, user.username), 'description': _('User info')
})
query_executions = queries.filter(is_history=True, type__startswith='query-')
stats.append({
'name': 'query_executions',
'values': query_executions.count(),
'description': _('Query executions count')
})
stats.append({
'name': 'saved_queries_count',
'value': queries.filter(is_history=False, type__startswith='query-').count(),
'description': _('Saved queries count')
})
stats.append({
'name': 'query_executions_30_days_count',
'value': query_executions.filter(last_modified__gte=one_month).count(),
'description': _('Query executions 30 days total')
})
last_month_daily = queries.filter(
last_modified__gte=one_month).annotate(
day=Trunc('last_modified', 'day')
).values('day').annotate(c=Count('day')).values('day', 'c').order_by('day')
stats.append({
'name': 'query_executions_30_days_histogram',
'value': last_month_daily,
'description': _('Daily executions 30 days')
})
return stats
@classmethod
def query_stats(cls, query_id=None, query=None):
stats = []
one_month = datetime.date.today() - timedelta(days=30)
query = Document2.objects.get(id=query_id) if query is None else query
stats.append({
'name': 'query',
'value': '%s - %s' % (query_id, query.name),
'description': _('Query info')
})
executions = query.dependents.filter(is_history=True, type__startswith='query-')
stats.append({
'name': 'execution_count',
'value': executions.count(),
'description': _('How many times executed')
})
stats.append({
'name': 'execution_count_shared',
'value': executions.exclude(owner=query.owner).count(),
'description': _('Executions by others')
})
last_month_daily = executions.filter(
last_modified__gte=one_month).annotate(
day=Trunc('last_modified', 'day')
).values('day').annotate(c=Count('day')).values('day', 'c').order_by('day')
stats.append({
'name': 'executions_30_days_histogram',
'value': last_month_daily,
'description': _('Daily executions 30 days')
})
# Could count number of "forks" (but would need to start tracking parent of Saved As query cf. saveAsNotebook)
return stats
class MockRequest():
def __init__(self, user, fs=None, jt=None):
self.user = user
self.fs = fs
self.jt = jt
self.POST = {}
self.GET = {}
def install_custom_examples():
if EXAMPLES.AUTO_LOAD.get():
from desktop.auth.backend import rewrite_user
from beeswax.management.commands import beeswax_install_examples
from useradmin.models import install_sample_user
user = rewrite_user(
install_sample_user()
)
if has_connectors():
interpreters = [
{
'type': connector['id'],
'dialect': connector['dialect']
}
for connector in _get_installed_connectors(category='editor')
]
else:
interpreters = [
{
'type': interpreter['dialect'],
'dialect': interpreter['dialect']
}
for interpreter in get_ordered_interpreters(user)
# Only for hive/impala currently, would also need to port to Notebook install examples.
if interpreter['dialect'] in ('hive', 'impala')
]
queries = EXAMPLES.QUERIES.get()
tables = EXAMPLES.TABLES.get() # No-op. Only for the saved query samples, not the tables currently.
LOG.info('Installing custom examples queries: %(queries)s, tables: %(tables)s for dialects %(dialects)s '
'belonging to user %(user)s' % {
'queries': queries,
'tables': tables,
'dialects': [interpreter['dialect'] for interpreter in interpreters],
'user': user
}
)
result = []
for interpreter in interpreters:
successes, errors = beeswax_install_examples.Command().handle(
dialect=interpreter['dialect'],
user=user,
interpreter=interpreter,
queries=queries,
tables=tables,
request=None
)
LOG.info('Dialect %(dialect)s installed samples: %(successes)s, %(errors)s,' % {
'dialect': interpreter['dialect'],
'successes': successes,
'errors': errors,
})
result.append((successes, errors))
return result
|
blueboxgroup/nova | refs/heads/master | nova/tests/unit/api/openstack/compute/contrib/test_fixed_ips.py | 12 | # Copyright 2012 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from nova.api.openstack.compute.contrib import fixed_ips as fixed_ips_v2
from nova.api.openstack.compute.plugins.v3 import fixed_ips as fixed_ips_v21
from nova import context
from nova import db
from nova import exception
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit.objects import test_network
fake_fixed_ips = [{'id': 1,
'address': '192.168.1.1',
'network_id': 1,
'virtual_interface_id': 1,
'instance_uuid': '1',
'allocated': False,
'leased': False,
'reserved': False,
'host': None,
'instance': None,
'network': test_network.fake_network,
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': False},
{'id': 2,
'address': '192.168.1.2',
'network_id': 1,
'virtual_interface_id': 2,
'instance_uuid': '2',
'allocated': False,
'leased': False,
'reserved': False,
'host': None,
'instance': None,
'network': test_network.fake_network,
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': False},
{'id': 3,
'address': '10.0.0.2',
'network_id': 1,
'virtual_interface_id': 3,
'instance_uuid': '3',
'allocated': False,
'leased': False,
'reserved': False,
'host': None,
'instance': None,
'network': test_network.fake_network,
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': True},
]
def fake_fixed_ip_get_by_address(context, address, columns_to_join=None):
if address == 'inv.ali.d.ip':
msg = "Invalid fixed IP Address %s in request" % address
raise exception.FixedIpInvalid(msg)
for fixed_ip in fake_fixed_ips:
if fixed_ip['address'] == address and not fixed_ip['deleted']:
return fixed_ip
raise exception.FixedIpNotFoundForAddress(address=address)
def fake_fixed_ip_update(context, address, values):
fixed_ip = fake_fixed_ip_get_by_address(context, address)
if fixed_ip is None:
raise exception.FixedIpNotFoundForAddress(address=address)
else:
for key in values:
fixed_ip[key] = values[key]
class FakeModel(object):
"""Stubs out for model."""
def __init__(self, values):
self.values = values
def __getattr__(self, name):
return self.values[name]
def __getitem__(self, key):
if key in self.values:
return self.values[key]
else:
raise NotImplementedError()
def __repr__(self):
return '<FakeModel: %s>' % self.values
def fake_network_get_all(context):
network = {'id': 1,
'cidr': "192.168.1.0/24"}
return [FakeModel(network)]
class FixedIpTestV21(test.NoDBTestCase):
fixed_ips = fixed_ips_v21
url = '/v2/fake/os-fixed-ips'
def setUp(self):
super(FixedIpTestV21, self).setUp()
self.stubs.Set(db, "fixed_ip_get_by_address",
fake_fixed_ip_get_by_address)
self.stubs.Set(db, "fixed_ip_update", fake_fixed_ip_update)
self.context = context.get_admin_context()
self.controller = self.fixed_ips.FixedIPController()
def _assert_equal(self, ret, exp):
self.assertEqual(ret.wsgi_code, exp)
def _get_reserve_action(self):
return self.controller.reserve
def _get_unreserve_action(self):
return self.controller.unreserve
def test_fixed_ips_get(self):
req = fakes.HTTPRequest.blank('%s/192.168.1.1' % self.url)
res_dict = self.controller.show(req, '192.168.1.1')
response = {'fixed_ip': {'cidr': '192.168.1.0/24',
'hostname': None,
'host': None,
'address': '192.168.1.1'}}
self.assertEqual(response, res_dict)
def test_fixed_ips_get_bad_ip_fail(self):
req = fakes.HTTPRequest.blank('%s/10.0.0.1' % self.url)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show, req,
'10.0.0.1')
def test_fixed_ips_get_invalid_ip_address(self):
req = fakes.HTTPRequest.blank('%s/inv.ali.d.ip' % self.url)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.show, req,
'inv.ali.d.ip')
def test_fixed_ips_get_deleted_ip_fail(self):
req = fakes.HTTPRequest.blank('%s/10.0.0.2' % self.url)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show, req,
'10.0.0.2')
def test_fixed_ip_reserve(self):
fake_fixed_ips[0]['reserved'] = False
body = {'reserve': None}
req = fakes.HTTPRequest.blank('%s/192.168.1.1/action' % self.url)
action = self._get_reserve_action()
result = action(req, "192.168.1.1", body=body)
self._assert_equal(result or action, 202)
self.assertEqual(fake_fixed_ips[0]['reserved'], True)
def test_fixed_ip_reserve_bad_ip(self):
body = {'reserve': None}
req = fakes.HTTPRequest.blank('%s/10.0.0.1/action' % self.url)
action = self._get_reserve_action()
self.assertRaises(webob.exc.HTTPNotFound, action, req,
'10.0.0.1', body=body)
def test_fixed_ip_reserve_invalid_ip_address(self):
body = {'reserve': None}
req = fakes.HTTPRequest.blank('%s/inv.ali.d.ip/action' % self.url)
action = self._get_reserve_action()
self.assertRaises(webob.exc.HTTPBadRequest,
action, req, 'inv.ali.d.ip', body=body)
def test_fixed_ip_reserve_deleted_ip(self):
body = {'reserve': None}
action = self._get_reserve_action()
req = fakes.HTTPRequest.blank('%s/10.0.0.2/action' % self.url)
self.assertRaises(webob.exc.HTTPNotFound, action, req,
'10.0.0.2', body=body)
def test_fixed_ip_unreserve(self):
fake_fixed_ips[0]['reserved'] = True
body = {'unreserve': None}
req = fakes.HTTPRequest.blank('%s/192.168.1.1/action' % self.url)
action = self._get_unreserve_action()
result = action(req, "192.168.1.1", body=body)
self._assert_equal(result or action, 202)
self.assertEqual(fake_fixed_ips[0]['reserved'], False)
def test_fixed_ip_unreserve_bad_ip(self):
body = {'unreserve': None}
req = fakes.HTTPRequest.blank('%s/10.0.0.1/action' % self.url)
action = self._get_unreserve_action()
self.assertRaises(webob.exc.HTTPNotFound, action, req,
'10.0.0.1', body=body)
def test_fixed_ip_unreserve_invalid_ip_address(self):
body = {'unreserve': None}
req = fakes.HTTPRequest.blank('%s/inv.ali.d.ip/action' % self.url)
action = self._get_unreserve_action()
self.assertRaises(webob.exc.HTTPBadRequest,
action, req, 'inv.ali.d.ip', body=body)
def test_fixed_ip_unreserve_deleted_ip(self):
body = {'unreserve': None}
req = fakes.HTTPRequest.blank('%s/10.0.0.2/action' % self.url)
action = self._get_unreserve_action()
self.assertRaises(webob.exc.HTTPNotFound, action, req,
'10.0.0.2', body=body)
class FixedIpTestV2(FixedIpTestV21):
fixed_ips = fixed_ips_v2
def _assert_equal(self, ret, exp):
self.assertEqual(ret.status, '202 Accepted')
def _get_reserve_action(self):
return self.controller.action
def _get_unreserve_action(self):
return self.controller.action
|
chrisfilda/edx_platform | refs/heads/master | lms/djangoapps/django_comment_client/migrations/__init__.py | 12133432 | |
hobarrera/django | refs/heads/master | django/test/testcases.py | 11 | from __future__ import unicode_literals
import difflib
import errno
import json
import os
import posixpath
import socket
import sys
import threading
import unittest
import warnings
from collections import Counter
from contextlib import contextmanager
from copy import copy
from functools import wraps
from unittest.util import safe_repr
from django.apps import apps
from django.conf import settings
from django.core import mail
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.core.files import locks
from django.core.handlers.wsgi import WSGIHandler, get_path_info
from django.core.management import call_command
from django.core.management.color import no_style
from django.core.management.sql import emit_post_migrate_signal
from django.core.servers.basehttp import WSGIRequestHandler, WSGIServer
from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction
from django.forms.fields import CharField
from django.http import QueryDict
from django.test.client import Client
from django.test.html import HTMLParseError, parse_html
from django.test.signals import setting_changed, template_rendered
from django.test.utils import (
CaptureQueriesContext, ContextList, compare_xml, modify_settings,
override_settings,
)
from django.utils import six
from django.utils.decorators import classproperty
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_text
from django.utils.six.moves.urllib.parse import (
unquote, urljoin, urlparse, urlsplit, urlunsplit,
)
from django.utils.six.moves.urllib.request import url2pathname
from django.views.static import serve
__all__ = ('TestCase', 'TransactionTestCase',
'SimpleTestCase', 'skipIfDBFeature', 'skipUnlessDBFeature')
def to_list(value):
"""
Puts value into a list if it's not already one.
Returns an empty list if value is None.
"""
if value is None:
value = []
elif not isinstance(value, list):
value = [value]
return value
def assert_and_parse_html(self, html, user_msg, msg):
try:
dom = parse_html(html)
except HTMLParseError as e:
standardMsg = '%s\n%s' % (msg, e.msg)
self.fail(self._formatMessage(user_msg, standardMsg))
return dom
class _AssertNumQueriesContext(CaptureQueriesContext):
def __init__(self, test_case, num, connection):
self.test_case = test_case
self.num = num
super(_AssertNumQueriesContext, self).__init__(connection)
def __exit__(self, exc_type, exc_value, traceback):
super(_AssertNumQueriesContext, self).__exit__(exc_type, exc_value, traceback)
if exc_type is not None:
return
executed = len(self)
self.test_case.assertEqual(
executed, self.num,
"%d queries executed, %d expected\nCaptured queries were:\n%s" % (
executed, self.num,
'\n'.join(
query['sql'] for query in self.captured_queries
)
)
)
class _AssertTemplateUsedContext(object):
def __init__(self, test_case, template_name):
self.test_case = test_case
self.template_name = template_name
self.rendered_templates = []
self.rendered_template_names = []
self.context = ContextList()
def on_template_render(self, sender, signal, template, context, **kwargs):
self.rendered_templates.append(template)
self.rendered_template_names.append(template.name)
self.context.append(copy(context))
def test(self):
return self.template_name in self.rendered_template_names
def message(self):
return '%s was not rendered.' % self.template_name
def __enter__(self):
template_rendered.connect(self.on_template_render)
return self
def __exit__(self, exc_type, exc_value, traceback):
template_rendered.disconnect(self.on_template_render)
if exc_type is not None:
return
if not self.test():
message = self.message()
if len(self.rendered_templates) == 0:
message += ' No template was rendered.'
else:
message += ' Following templates were rendered: %s' % (
', '.join(self.rendered_template_names))
self.test_case.fail(message)
class _AssertTemplateNotUsedContext(_AssertTemplateUsedContext):
def test(self):
return self.template_name not in self.rendered_template_names
def message(self):
return '%s was rendered.' % self.template_name
class _CursorFailure(object):
def __init__(self, cls_name, wrapped):
self.cls_name = cls_name
self.wrapped = wrapped
def __call__(self):
raise AssertionError(
"Database queries aren't allowed in SimpleTestCase. "
"Either use TestCase or TransactionTestCase to ensure proper test isolation or "
"set %s.allow_database_queries to True to silence this failure." % self.cls_name
)
class SimpleTestCase(unittest.TestCase):
# The class we'll use for the test client self.client.
# Can be overridden in derived classes.
client_class = Client
_overridden_settings = None
_modified_settings = None
# Tests shouldn't be allowed to query the database since
# this base class doesn't enforce any isolation.
allow_database_queries = False
@classmethod
def setUpClass(cls):
super(SimpleTestCase, cls).setUpClass()
if cls._overridden_settings:
cls._cls_overridden_context = override_settings(**cls._overridden_settings)
cls._cls_overridden_context.enable()
if cls._modified_settings:
cls._cls_modified_context = modify_settings(cls._modified_settings)
cls._cls_modified_context.enable()
if not cls.allow_database_queries:
for alias in connections:
connection = connections[alias]
connection.cursor = _CursorFailure(cls.__name__, connection.cursor)
@classmethod
def tearDownClass(cls):
if not cls.allow_database_queries:
for alias in connections:
connection = connections[alias]
connection.cursor = connection.cursor.wrapped
if hasattr(cls, '_cls_modified_context'):
cls._cls_modified_context.disable()
delattr(cls, '_cls_modified_context')
if hasattr(cls, '_cls_overridden_context'):
cls._cls_overridden_context.disable()
delattr(cls, '_cls_overridden_context')
super(SimpleTestCase, cls).tearDownClass()
def __call__(self, result=None):
"""
Wrapper around default __call__ method to perform common Django test
set up. This means that user-defined Test Cases aren't required to
include a call to super().setUp().
"""
testMethod = getattr(self, self._testMethodName)
skipped = (
getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)
)
if not skipped:
try:
self._pre_setup()
except Exception:
result.addError(self, sys.exc_info())
return
super(SimpleTestCase, self).__call__(result)
if not skipped:
try:
self._post_teardown()
except Exception:
result.addError(self, sys.exc_info())
return
def _pre_setup(self):
"""Performs any pre-test setup. This includes:
* Creating a test client.
* Clearing the mail test outbox.
"""
self.client = self.client_class()
mail.outbox = []
def _post_teardown(self):
"""Perform any post-test things."""
pass
def settings(self, **kwargs):
"""
A context manager that temporarily sets a setting and reverts to the original value when exiting the context.
"""
return override_settings(**kwargs)
def modify_settings(self, **kwargs):
"""
A context manager that temporarily applies changes a list setting and
reverts back to the original value when exiting the context.
"""
return modify_settings(**kwargs)
def assertRedirects(self, response, expected_url, status_code=302,
target_status_code=200, host=None, msg_prefix='',
fetch_redirect_response=True):
"""Asserts that a response redirected to a specific URL, and that the
redirect URL can be loaded.
Note that assertRedirects won't work for external links since it uses
TestClient to do a request (use fetch_redirect_response=False to check
such links without fetching them).
"""
if host is not None:
warnings.warn(
"The host argument is deprecated and no longer used by assertRedirects",
RemovedInDjango20Warning, stacklevel=2
)
if msg_prefix:
msg_prefix += ": "
if hasattr(response, 'redirect_chain'):
# The request was a followed redirect
self.assertTrue(
len(response.redirect_chain) > 0,
msg_prefix + "Response didn't redirect as expected: Response code was %d (expected %d)"
% (response.status_code, status_code)
)
self.assertEqual(
response.redirect_chain[0][1], status_code,
msg_prefix + "Initial response didn't redirect as expected: Response code was %d (expected %d)"
% (response.redirect_chain[0][1], status_code)
)
url, status_code = response.redirect_chain[-1]
scheme, netloc, path, query, fragment = urlsplit(url)
self.assertEqual(
response.status_code, target_status_code,
msg_prefix + "Response didn't redirect as expected: Final Response code was %d (expected %d)"
% (response.status_code, target_status_code)
)
else:
# Not a followed redirect
self.assertEqual(
response.status_code, status_code,
msg_prefix + "Response didn't redirect as expected: Response code was %d (expected %d)"
% (response.status_code, status_code)
)
url = response.url
scheme, netloc, path, query, fragment = urlsplit(url)
# Prepend the request path to handle relative path redirects.
if not path.startswith('/'):
url = urljoin(response.request['PATH_INFO'], url)
path = urljoin(response.request['PATH_INFO'], path)
if fetch_redirect_response:
redirect_response = response.client.get(path, QueryDict(query), secure=(scheme == 'https'))
# Get the redirection page, using the same client that was used
# to obtain the original response.
self.assertEqual(
redirect_response.status_code, target_status_code,
msg_prefix + "Couldn't retrieve redirection page '%s': response code was %d (expected %d)"
% (path, redirect_response.status_code, target_status_code)
)
if url != expected_url:
# For temporary backwards compatibility, try to compare with a relative url
e_scheme, e_netloc, e_path, e_query, e_fragment = urlsplit(expected_url)
relative_url = urlunsplit(('', '', e_path, e_query, e_fragment))
if url == relative_url:
warnings.warn(
"assertRedirects had to strip the scheme and domain from the "
"expected URL, as it was always added automatically to URLs "
"before Django 1.9. Please update your expected URLs by "
"removing the scheme and domain.",
RemovedInDjango20Warning, stacklevel=2)
expected_url = relative_url
self.assertEqual(
url, expected_url,
msg_prefix + "Response redirected to '%s', expected '%s'" % (url, expected_url)
)
def _assert_contains(self, response, text, status_code, msg_prefix, html):
# If the response supports deferred rendering and hasn't been rendered
# yet, then ensure that it does get rendered before proceeding further.
if hasattr(response, 'render') and callable(response.render) and not response.is_rendered:
response.render()
if msg_prefix:
msg_prefix += ": "
self.assertEqual(
response.status_code, status_code,
msg_prefix + "Couldn't retrieve content: Response code was %d"
" (expected %d)" % (response.status_code, status_code)
)
if response.streaming:
content = b''.join(response.streaming_content)
else:
content = response.content
if not isinstance(text, bytes) or html:
text = force_text(text, encoding=response.charset)
content = content.decode(response.charset)
text_repr = "'%s'" % text
else:
text_repr = repr(text)
if html:
content = assert_and_parse_html(self, content, None, "Response's content is not valid HTML:")
text = assert_and_parse_html(self, text, None, "Second argument is not valid HTML:")
real_count = content.count(text)
return (text_repr, real_count, msg_prefix)
def assertContains(self, response, text, count=None, status_code=200, msg_prefix='', html=False):
"""
Asserts that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected), and that
``text`` occurs ``count`` times in the content of the response.
If ``count`` is None, the count doesn't matter - the assertion is true
if the text occurs at least once in the response.
"""
text_repr, real_count, msg_prefix = self._assert_contains(
response, text, status_code, msg_prefix, html)
if count is not None:
self.assertEqual(
real_count, count,
msg_prefix + "Found %d instances of %s in response (expected %d)" % (real_count, text_repr, count)
)
else:
self.assertTrue(real_count != 0, msg_prefix + "Couldn't find %s in response" % text_repr)
def assertNotContains(self, response, text, status_code=200, msg_prefix='', html=False):
"""
Asserts that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected), and that
``text`` doesn't occurs in the content of the response.
"""
text_repr, real_count, msg_prefix = self._assert_contains(
response, text, status_code, msg_prefix, html)
self.assertEqual(real_count, 0, msg_prefix + "Response should not contain %s" % text_repr)
def assertFormError(self, response, form, field, errors, msg_prefix=''):
"""
Asserts that a form used to render the response has a specific field
error.
"""
if msg_prefix:
msg_prefix += ": "
# Put context(s) into a list to simplify processing.
contexts = to_list(response.context)
if not contexts:
self.fail(msg_prefix + "Response did not use any contexts to render the response")
# Put error(s) into a list to simplify processing.
errors = to_list(errors)
# Search all contexts for the error.
found_form = False
for i, context in enumerate(contexts):
if form not in context:
continue
found_form = True
for err in errors:
if field:
if field in context[form].errors:
field_errors = context[form].errors[field]
self.assertTrue(
err in field_errors,
msg_prefix + "The field '%s' on form '%s' in"
" context %d does not contain the error '%s'"
" (actual errors: %s)" %
(field, form, i, err, repr(field_errors))
)
elif field in context[form].fields:
self.fail(
msg_prefix + "The field '%s' on form '%s' in context %d contains no errors" %
(field, form, i)
)
else:
self.fail(
msg_prefix + "The form '%s' in context %d does not contain the field '%s'" %
(form, i, field)
)
else:
non_field_errors = context[form].non_field_errors()
self.assertTrue(
err in non_field_errors,
msg_prefix + "The form '%s' in context %d does not"
" contain the non-field error '%s'"
" (actual errors: %s)" %
(form, i, err, non_field_errors)
)
if not found_form:
self.fail(msg_prefix + "The form '%s' was not used to render the response" % form)
def assertFormsetError(self, response, formset, form_index, field, errors,
msg_prefix=''):
"""
Asserts that a formset used to render the response has a specific error.
For field errors, specify the ``form_index`` and the ``field``.
For non-field errors, specify the ``form_index`` and the ``field`` as
None.
For non-form errors, specify ``form_index`` as None and the ``field``
as None.
"""
# Add punctuation to msg_prefix
if msg_prefix:
msg_prefix += ": "
# Put context(s) into a list to simplify processing.
contexts = to_list(response.context)
if not contexts:
self.fail(msg_prefix + 'Response did not use any contexts to '
'render the response')
# Put error(s) into a list to simplify processing.
errors = to_list(errors)
# Search all contexts for the error.
found_formset = False
for i, context in enumerate(contexts):
if formset not in context:
continue
found_formset = True
for err in errors:
if field is not None:
if field in context[formset].forms[form_index].errors:
field_errors = context[formset].forms[form_index].errors[field]
self.assertTrue(
err in field_errors,
msg_prefix + "The field '%s' on formset '%s', "
"form %d in context %d does not contain the "
"error '%s' (actual errors: %s)" %
(field, formset, form_index, i, err, repr(field_errors))
)
elif field in context[formset].forms[form_index].fields:
self.fail(
msg_prefix + "The field '%s' on formset '%s', form %d in context %d contains no errors"
% (field, formset, form_index, i)
)
else:
self.fail(
msg_prefix + "The formset '%s', form %d in context %d does not contain the field '%s'"
% (formset, form_index, i, field)
)
elif form_index is not None:
non_field_errors = context[formset].forms[form_index].non_field_errors()
self.assertFalse(
len(non_field_errors) == 0,
msg_prefix + "The formset '%s', form %d in context %d "
"does not contain any non-field errors." % (formset, form_index, i)
)
self.assertTrue(
err in non_field_errors,
msg_prefix + "The formset '%s', form %d in context %d "
"does not contain the non-field error '%s' (actual errors: %s)"
% (formset, form_index, i, err, repr(non_field_errors))
)
else:
non_form_errors = context[formset].non_form_errors()
self.assertFalse(
len(non_form_errors) == 0,
msg_prefix + "The formset '%s' in context %d does not "
"contain any non-form errors." % (formset, i)
)
self.assertTrue(
err in non_form_errors,
msg_prefix + "The formset '%s' in context %d does not "
"contain the non-form error '%s' (actual errors: %s)"
% (formset, i, err, repr(non_form_errors))
)
if not found_formset:
self.fail(msg_prefix + "The formset '%s' was not used to render the response" % formset)
def _assert_template_used(self, response, template_name, msg_prefix):
if response is None and template_name is None:
raise TypeError('response and/or template_name argument must be provided')
if msg_prefix:
msg_prefix += ": "
if template_name is not None and response is not None and not hasattr(response, 'templates'):
raise ValueError(
"assertTemplateUsed() and assertTemplateNotUsed() are only "
"usable on responses fetched using the Django test Client."
)
if not hasattr(response, 'templates') or (response is None and template_name):
if response:
template_name = response
response = None
# use this template with context manager
return template_name, None, msg_prefix
template_names = [t.name for t in response.templates if t.name is not None]
return None, template_names, msg_prefix
def assertTemplateUsed(self, response=None, template_name=None, msg_prefix='', count=None):
"""
Asserts that the template with the provided name was used in rendering
the response. Also usable as context manager.
"""
context_mgr_template, template_names, msg_prefix = self._assert_template_used(
response, template_name, msg_prefix)
if context_mgr_template:
# Use assertTemplateUsed as context manager.
return _AssertTemplateUsedContext(self, context_mgr_template)
if not template_names:
self.fail(msg_prefix + "No templates used to render the response")
self.assertTrue(
template_name in template_names,
msg_prefix + "Template '%s' was not a template used to render"
" the response. Actual template(s) used: %s"
% (template_name, ', '.join(template_names))
)
if count is not None:
self.assertEqual(
template_names.count(template_name), count,
msg_prefix + "Template '%s' was expected to be rendered %d "
"time(s) but was actually rendered %d time(s)."
% (template_name, count, template_names.count(template_name))
)
def assertTemplateNotUsed(self, response=None, template_name=None, msg_prefix=''):
"""
Asserts that the template with the provided name was NOT used in
rendering the response. Also usable as context manager.
"""
context_mgr_template, template_names, msg_prefix = self._assert_template_used(
response, template_name, msg_prefix
)
if context_mgr_template:
# Use assertTemplateNotUsed as context manager.
return _AssertTemplateNotUsedContext(self, context_mgr_template)
self.assertFalse(
template_name in template_names,
msg_prefix + "Template '%s' was used unexpectedly in rendering the response" % template_name
)
@contextmanager
def _assert_raises_message_cm(self, expected_exception, expected_message):
with self.assertRaises(expected_exception) as cm:
yield cm
self.assertIn(expected_message, str(cm.exception))
def assertRaisesMessage(self, expected_exception, expected_message, *args, **kwargs):
"""
Asserts that expected_message is found in the the message of a raised
exception.
Args:
expected_exception: Exception class expected to be raised.
expected_message: expected error message string value.
args: Function to be called and extra positional args.
kwargs: Extra kwargs.
"""
# callable_obj was a documented kwarg in Django 1.8 and older.
callable_obj = kwargs.pop('callable_obj', None)
if callable_obj:
warnings.warn(
'The callable_obj kwarg is deprecated. Pass the callable '
'as a positional argument instead.', RemovedInDjango20Warning
)
elif len(args):
callable_obj = args[0]
args = args[1:]
cm = self._assert_raises_message_cm(expected_exception, expected_message)
# Assertion used in context manager fashion.
if callable_obj is None:
return cm
# Assertion was passed a callable.
with cm:
callable_obj(*args, **kwargs)
def assertFieldOutput(self, fieldclass, valid, invalid, field_args=None,
field_kwargs=None, empty_value=''):
"""
Asserts that a form field behaves correctly with various inputs.
Args:
fieldclass: the class of the field to be tested.
valid: a dictionary mapping valid inputs to their expected
cleaned values.
invalid: a dictionary mapping invalid inputs to one or more
raised error messages.
field_args: the args passed to instantiate the field
field_kwargs: the kwargs passed to instantiate the field
empty_value: the expected clean output for inputs in empty_values
"""
if field_args is None:
field_args = []
if field_kwargs is None:
field_kwargs = {}
required = fieldclass(*field_args, **field_kwargs)
optional = fieldclass(*field_args, **dict(field_kwargs, required=False))
# test valid inputs
for input, output in valid.items():
self.assertEqual(required.clean(input), output)
self.assertEqual(optional.clean(input), output)
# test invalid inputs
for input, errors in invalid.items():
with self.assertRaises(ValidationError) as context_manager:
required.clean(input)
self.assertEqual(context_manager.exception.messages, errors)
with self.assertRaises(ValidationError) as context_manager:
optional.clean(input)
self.assertEqual(context_manager.exception.messages, errors)
# test required inputs
error_required = [force_text(required.error_messages['required'])]
for e in required.empty_values:
with self.assertRaises(ValidationError) as context_manager:
required.clean(e)
self.assertEqual(context_manager.exception.messages, error_required)
self.assertEqual(optional.clean(e), empty_value)
# test that max_length and min_length are always accepted
if issubclass(fieldclass, CharField):
field_kwargs.update({'min_length': 2, 'max_length': 20})
self.assertIsInstance(fieldclass(*field_args, **field_kwargs), fieldclass)
def assertHTMLEqual(self, html1, html2, msg=None):
"""
Asserts that two HTML snippets are semantically the same.
Whitespace in most cases is ignored, and attribute ordering is not
significant. The passed-in arguments must be valid HTML.
"""
dom1 = assert_and_parse_html(self, html1, msg, 'First argument is not valid HTML:')
dom2 = assert_and_parse_html(self, html2, msg, 'Second argument is not valid HTML:')
if dom1 != dom2:
standardMsg = '%s != %s' % (
safe_repr(dom1, True), safe_repr(dom2, True))
diff = ('\n' + '\n'.join(difflib.ndiff(
six.text_type(dom1).splitlines(),
six.text_type(dom2).splitlines(),
)))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertHTMLNotEqual(self, html1, html2, msg=None):
"""Asserts that two HTML snippets are not semantically equivalent."""
dom1 = assert_and_parse_html(self, html1, msg, 'First argument is not valid HTML:')
dom2 = assert_and_parse_html(self, html2, msg, 'Second argument is not valid HTML:')
if dom1 == dom2:
standardMsg = '%s == %s' % (
safe_repr(dom1, True), safe_repr(dom2, True))
self.fail(self._formatMessage(msg, standardMsg))
def assertInHTML(self, needle, haystack, count=None, msg_prefix=''):
needle = assert_and_parse_html(self, needle, None, 'First argument is not valid HTML:')
haystack = assert_and_parse_html(self, haystack, None, 'Second argument is not valid HTML:')
real_count = haystack.count(needle)
if count is not None:
self.assertEqual(
real_count, count,
msg_prefix + "Found %d instances of '%s' in response (expected %d)" % (real_count, needle, count)
)
else:
self.assertTrue(real_count != 0, msg_prefix + "Couldn't find '%s' in response" % needle)
def assertJSONEqual(self, raw, expected_data, msg=None):
"""
Asserts that the JSON fragments raw and expected_data are equal.
Usual JSON non-significant whitespace rules apply as the heavyweight
is delegated to the json library.
"""
try:
data = json.loads(raw)
except ValueError:
self.fail("First argument is not valid JSON: %r" % raw)
if isinstance(expected_data, six.string_types):
try:
expected_data = json.loads(expected_data)
except ValueError:
self.fail("Second argument is not valid JSON: %r" % expected_data)
self.assertEqual(data, expected_data, msg=msg)
def assertJSONNotEqual(self, raw, expected_data, msg=None):
"""
Asserts that the JSON fragments raw and expected_data are not equal.
Usual JSON non-significant whitespace rules apply as the heavyweight
is delegated to the json library.
"""
try:
data = json.loads(raw)
except ValueError:
self.fail("First argument is not valid JSON: %r" % raw)
if isinstance(expected_data, six.string_types):
try:
expected_data = json.loads(expected_data)
except ValueError:
self.fail("Second argument is not valid JSON: %r" % expected_data)
self.assertNotEqual(data, expected_data, msg=msg)
def assertXMLEqual(self, xml1, xml2, msg=None):
"""
Asserts that two XML snippets are semantically the same.
Whitespace in most cases is ignored, and attribute ordering is not
significant. The passed-in arguments must be valid XML.
"""
try:
result = compare_xml(xml1, xml2)
except Exception as e:
standardMsg = 'First or second argument is not valid XML\n%s' % e
self.fail(self._formatMessage(msg, standardMsg))
else:
if not result:
standardMsg = '%s != %s' % (safe_repr(xml1, True), safe_repr(xml2, True))
diff = ('\n' + '\n'.join(
difflib.ndiff(
six.text_type(xml1).splitlines(),
six.text_type(xml2).splitlines(),
)
))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertXMLNotEqual(self, xml1, xml2, msg=None):
"""
Asserts that two XML snippets are not semantically equivalent.
Whitespace in most cases is ignored, and attribute ordering is not
significant. The passed-in arguments must be valid XML.
"""
try:
result = compare_xml(xml1, xml2)
except Exception as e:
standardMsg = 'First or second argument is not valid XML\n%s' % e
self.fail(self._formatMessage(msg, standardMsg))
else:
if result:
standardMsg = '%s == %s' % (safe_repr(xml1, True), safe_repr(xml2, True))
self.fail(self._formatMessage(msg, standardMsg))
class TransactionTestCase(SimpleTestCase):
# Subclasses can ask for resetting of auto increment sequence before each
# test case
reset_sequences = False
# Subclasses can enable only a subset of apps for faster tests
available_apps = None
# Subclasses can define fixtures which will be automatically installed.
fixtures = None
# If transactions aren't available, Django will serialize the database
# contents into a fixture during setup and flush and reload them
# during teardown (as flush does not restore data from migrations).
# This can be slow; this flag allows enabling on a per-case basis.
serialized_rollback = False
# Since tests will be wrapped in a transaction, or serialized if they
# are not available, we allow queries to be run.
allow_database_queries = True
def _pre_setup(self):
"""Performs any pre-test setup. This includes:
* If the class has an 'available_apps' attribute, restricting the app
registry to these applications, then firing post_migrate -- it must
run with the correct set of applications for the test case.
* If the class has a 'fixtures' attribute, installing these fixtures.
"""
super(TransactionTestCase, self)._pre_setup()
if self.available_apps is not None:
apps.set_available_apps(self.available_apps)
setting_changed.send(
sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=self.available_apps,
enter=True,
)
for db_name in self._databases_names(include_mirrors=False):
emit_post_migrate_signal(verbosity=0, interactive=False, db=db_name)
try:
self._fixture_setup()
except Exception:
if self.available_apps is not None:
apps.unset_available_apps()
setting_changed.send(
sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=settings.INSTALLED_APPS,
enter=False,
)
raise
@classmethod
def _databases_names(cls, include_mirrors=True):
# If the test case has a multi_db=True flag, act on all databases,
# including mirrors or not. Otherwise, just on the default DB.
if getattr(cls, 'multi_db', False):
return [
alias for alias in connections
if include_mirrors or not connections[alias].settings_dict['TEST']['MIRROR']
]
else:
return [DEFAULT_DB_ALIAS]
def _reset_sequences(self, db_name):
conn = connections[db_name]
if conn.features.supports_sequence_reset:
sql_list = conn.ops.sequence_reset_by_name_sql(
no_style(), conn.introspection.sequence_list())
if sql_list:
with transaction.atomic(using=db_name):
cursor = conn.cursor()
for sql in sql_list:
cursor.execute(sql)
def _fixture_setup(self):
for db_name in self._databases_names(include_mirrors=False):
# Reset sequences
if self.reset_sequences:
self._reset_sequences(db_name)
# If we need to provide replica initial data from migrated apps,
# then do so.
if self.serialized_rollback and hasattr(connections[db_name], "_test_serialized_contents"):
if self.available_apps is not None:
apps.unset_available_apps()
connections[db_name].creation.deserialize_db_from_string(
connections[db_name]._test_serialized_contents
)
if self.available_apps is not None:
apps.set_available_apps(self.available_apps)
if self.fixtures:
# We have to use this slightly awkward syntax due to the fact
# that we're using *args and **kwargs together.
call_command('loaddata', *self.fixtures,
**{'verbosity': 0, 'database': db_name})
def _should_reload_connections(self):
return True
def _post_teardown(self):
"""Performs any post-test things. This includes:
* Flushing the contents of the database, to leave a clean slate. If
the class has an 'available_apps' attribute, post_migrate isn't fired.
* Force-closing the connection, so the next test gets a clean cursor.
"""
try:
self._fixture_teardown()
super(TransactionTestCase, self)._post_teardown()
if self._should_reload_connections():
# Some DB cursors include SQL statements as part of cursor
# creation. If you have a test that does a rollback, the effect
# of these statements is lost, which can affect the operation of
# tests (e.g., losing a timezone setting causing objects to be
# created with the wrong time). To make sure this doesn't
# happen, get a clean connection at the start of every test.
for conn in connections.all():
conn.close()
finally:
if self.available_apps is not None:
apps.unset_available_apps()
setting_changed.send(sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=settings.INSTALLED_APPS,
enter=False)
def _fixture_teardown(self):
# Allow TRUNCATE ... CASCADE and don't emit the post_migrate signal
# when flushing only a subset of the apps
for db_name in self._databases_names(include_mirrors=False):
# Flush the database
inhibit_post_migrate = (
self.available_apps is not None or
( # Inhibit the post_migrate signal when using serialized
# rollback to avoid trying to recreate the serialized data.
self.serialized_rollback and
hasattr(connections[db_name], '_test_serialized_contents')
)
)
call_command('flush', verbosity=0, interactive=False,
database=db_name, reset_sequences=False,
allow_cascade=self.available_apps is not None,
inhibit_post_migrate=inhibit_post_migrate)
def assertQuerysetEqual(self, qs, values, transform=repr, ordered=True, msg=None):
items = six.moves.map(transform, qs)
if not ordered:
return self.assertEqual(Counter(items), Counter(values), msg=msg)
values = list(values)
# For example qs.iterator() could be passed as qs, but it does not
# have 'ordered' attribute.
if len(values) > 1 and hasattr(qs, 'ordered') and not qs.ordered:
raise ValueError("Trying to compare non-ordered queryset "
"against more than one ordered values")
return self.assertEqual(list(items), values, msg=msg)
def assertNumQueries(self, num, func=None, *args, **kwargs):
using = kwargs.pop("using", DEFAULT_DB_ALIAS)
conn = connections[using]
context = _AssertNumQueriesContext(self, num, conn)
if func is None:
return context
with context:
func(*args, **kwargs)
def connections_support_transactions():
"""
Returns True if all connections support transactions.
"""
return all(conn.features.supports_transactions
for conn in connections.all())
class TestCase(TransactionTestCase):
"""
Similar to TransactionTestCase, but uses `transaction.atomic()` to achieve
test isolation.
In most situations, TestCase should be preferred to TransactionTestCase as
it allows faster execution. However, there are some situations where using
TransactionTestCase might be necessary (e.g. testing some transactional
behavior).
On database backends with no transaction support, TestCase behaves as
TransactionTestCase.
"""
@classmethod
def _enter_atomics(cls):
"""Helper method to open atomic blocks for multiple databases"""
atomics = {}
for db_name in cls._databases_names():
atomics[db_name] = transaction.atomic(using=db_name)
atomics[db_name].__enter__()
return atomics
@classmethod
def _rollback_atomics(cls, atomics):
"""Rollback atomic blocks opened through the previous method"""
for db_name in reversed(cls._databases_names()):
transaction.set_rollback(True, using=db_name)
atomics[db_name].__exit__(None, None, None)
@classmethod
def setUpClass(cls):
super(TestCase, cls).setUpClass()
if not connections_support_transactions():
return
cls.cls_atomics = cls._enter_atomics()
if cls.fixtures:
for db_name in cls._databases_names(include_mirrors=False):
try:
call_command('loaddata', *cls.fixtures, **{
'verbosity': 0,
'commit': False,
'database': db_name,
})
except Exception:
cls._rollback_atomics(cls.cls_atomics)
raise
try:
cls.setUpTestData()
except Exception:
cls._rollback_atomics(cls.cls_atomics)
raise
@classmethod
def tearDownClass(cls):
if connections_support_transactions():
cls._rollback_atomics(cls.cls_atomics)
for conn in connections.all():
conn.close()
super(TestCase, cls).tearDownClass()
@classmethod
def setUpTestData(cls):
"""Load initial data for the TestCase"""
pass
def _should_reload_connections(self):
if connections_support_transactions():
return False
return super(TestCase, self)._should_reload_connections()
def _fixture_setup(self):
if not connections_support_transactions():
# If the backend does not support transactions, we should reload
# class data before each test
self.setUpTestData()
return super(TestCase, self)._fixture_setup()
assert not self.reset_sequences, 'reset_sequences cannot be used on TestCase instances'
self.atomics = self._enter_atomics()
def _fixture_teardown(self):
if not connections_support_transactions():
return super(TestCase, self)._fixture_teardown()
try:
for db_name in reversed(self._databases_names()):
if self._should_check_constraints(connections[db_name]):
connections[db_name].check_constraints()
finally:
self._rollback_atomics(self.atomics)
def _should_check_constraints(self, connection):
return (
connection.features.can_defer_constraint_checks and
not connection.needs_rollback and connection.is_usable()
)
class CheckCondition(object):
"""Descriptor class for deferred condition checking"""
def __init__(self, cond_func):
self.cond_func = cond_func
def __get__(self, instance, cls=None):
return self.cond_func()
def _deferredSkip(condition, reason):
def decorator(test_func):
if not (isinstance(test_func, type) and
issubclass(test_func, unittest.TestCase)):
@wraps(test_func)
def skip_wrapper(*args, **kwargs):
if condition():
raise unittest.SkipTest(reason)
return test_func(*args, **kwargs)
test_item = skip_wrapper
else:
# Assume a class is decorated
test_item = test_func
test_item.__unittest_skip__ = CheckCondition(condition)
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
def skipIfDBFeature(*features):
"""
Skip a test if a database has at least one of the named features.
"""
return _deferredSkip(
lambda: any(getattr(connection.features, feature, False) for feature in features),
"Database has feature(s) %s" % ", ".join(features)
)
def skipUnlessDBFeature(*features):
"""
Skip a test unless a database has all the named features.
"""
return _deferredSkip(
lambda: not all(getattr(connection.features, feature, False) for feature in features),
"Database doesn't support feature(s): %s" % ", ".join(features)
)
def skipUnlessAnyDBFeature(*features):
"""
Skip a test unless a database has any of the named features.
"""
return _deferredSkip(
lambda: not any(getattr(connection.features, feature, False) for feature in features),
"Database doesn't support any of the feature(s): %s" % ", ".join(features)
)
class QuietWSGIRequestHandler(WSGIRequestHandler):
"""
Just a regular WSGIRequestHandler except it doesn't log to the standard
output any of the requests received, so as to not clutter the output for
the tests' results.
"""
def log_message(*args):
pass
class FSFilesHandler(WSGIHandler):
"""
WSGI middleware that intercepts calls to a directory, as defined by one of
the *_ROOT settings, and serves those files, publishing them under *_URL.
"""
def __init__(self, application):
self.application = application
self.base_url = urlparse(self.get_base_url())
super(FSFilesHandler, self).__init__()
def _should_handle(self, path):
"""
Checks if the path should be handled. Ignores the path if:
* the host is provided as part of the base_url
* the request's path isn't under the media path (or equal)
"""
return path.startswith(self.base_url[2]) and not self.base_url[1]
def file_path(self, url):
"""
Returns the relative path to the file on disk for the given URL.
"""
relative_url = url[len(self.base_url[2]):]
return url2pathname(relative_url)
def get_response(self, request):
from django.http import Http404
if self._should_handle(request.path):
try:
return self.serve(request)
except Http404:
pass
return super(FSFilesHandler, self).get_response(request)
def serve(self, request):
os_rel_path = self.file_path(request.path)
os_rel_path = posixpath.normpath(unquote(os_rel_path))
# Emulate behavior of django.contrib.staticfiles.views.serve() when it
# invokes staticfiles' finders functionality.
# TODO: Modify if/when that internal API is refactored
final_rel_path = os_rel_path.replace('\\', '/').lstrip('/')
return serve(request, final_rel_path, document_root=self.get_base_dir())
def __call__(self, environ, start_response):
if not self._should_handle(get_path_info(environ)):
return self.application(environ, start_response)
return super(FSFilesHandler, self).__call__(environ, start_response)
class _StaticFilesHandler(FSFilesHandler):
"""
Handler for serving static files. A private class that is meant to be used
solely as a convenience by LiveServerThread.
"""
def get_base_dir(self):
return settings.STATIC_ROOT
def get_base_url(self):
return settings.STATIC_URL
class _MediaFilesHandler(FSFilesHandler):
"""
Handler for serving the media files. A private class that is meant to be
used solely as a convenience by LiveServerThread.
"""
def get_base_dir(self):
return settings.MEDIA_ROOT
def get_base_url(self):
return settings.MEDIA_URL
class LiveServerThread(threading.Thread):
"""
Thread for running a live http server while the tests are running.
"""
def __init__(self, host, possible_ports, static_handler, connections_override=None):
self.host = host
self.port = None
self.possible_ports = possible_ports
self.is_ready = threading.Event()
self.error = None
self.static_handler = static_handler
self.connections_override = connections_override
super(LiveServerThread, self).__init__()
def run(self):
"""
Sets up the live server and databases, and then loops over handling
http requests.
"""
if self.connections_override:
# Override this thread's database connections with the ones
# provided by the main thread.
for alias, conn in self.connections_override.items():
connections[alias] = conn
try:
# Create the handler for serving static and media files
handler = self.static_handler(_MediaFilesHandler(WSGIHandler()))
# Go through the list of possible ports, hoping that we can find
# one that is free to use for the WSGI server.
for index, port in enumerate(self.possible_ports):
try:
self.httpd = self._create_server(port)
except socket.error as e:
if (index + 1 < len(self.possible_ports) and
e.errno == errno.EADDRINUSE):
# This port is already in use, so we go on and try with
# the next one in the list.
continue
else:
# Either none of the given ports are free or the error
# is something else than "Address already in use". So
# we let that error bubble up to the main thread.
raise
else:
# A free port was found.
self.port = port
break
self.httpd.set_app(handler)
self.is_ready.set()
self.httpd.serve_forever()
except Exception as e:
self.error = e
self.is_ready.set()
def _create_server(self, port):
return WSGIServer((self.host, port), QuietWSGIRequestHandler, allow_reuse_address=False)
def terminate(self):
if hasattr(self, 'httpd'):
# Stop the WSGI server
self.httpd.shutdown()
self.httpd.server_close()
class LiveServerTestCase(TransactionTestCase):
"""
Does basically the same as TransactionTestCase but also launches a live
http server in a separate thread so that the tests may use another testing
framework, such as Selenium for example, instead of the built-in dummy
client.
Note that it inherits from TransactionTestCase instead of TestCase because
the threads do not share the same transactions (unless if using in-memory
sqlite) and each thread needs to commit all their transactions so that the
other thread can see the changes.
"""
static_handler = _StaticFilesHandler
@classproperty
def live_server_url(cls):
return 'http://%s:%s' % (
cls.server_thread.host, cls.server_thread.port)
@classmethod
def setUpClass(cls):
super(LiveServerTestCase, cls).setUpClass()
connections_override = {}
for conn in connections.all():
# If using in-memory sqlite databases, pass the connections to
# the server thread.
if conn.vendor == 'sqlite' and conn.is_in_memory_db(conn.settings_dict['NAME']):
# Explicitly enable thread-shareability for this connection
conn.allow_thread_sharing = True
connections_override[conn.alias] = conn
# Launch the live server's thread
specified_address = os.environ.get(
'DJANGO_LIVE_TEST_SERVER_ADDRESS', 'localhost:8081-8179')
# The specified ports may be of the form '8000-8010,8080,9200-9300'
# i.e. a comma-separated list of ports or ranges of ports, so we break
# it down into a detailed list of all possible ports.
possible_ports = []
try:
host, port_ranges = specified_address.split(':')
for port_range in port_ranges.split(','):
# A port range can be of either form: '8000' or '8000-8010'.
extremes = list(map(int, port_range.split('-')))
assert len(extremes) in [1, 2]
if len(extremes) == 1:
# Port range of the form '8000'
possible_ports.append(extremes[0])
else:
# Port range of the form '8000-8010'
for port in range(extremes[0], extremes[1] + 1):
possible_ports.append(port)
except Exception:
msg = 'Invalid address ("%s") for live server.' % specified_address
six.reraise(ImproperlyConfigured, ImproperlyConfigured(msg), sys.exc_info()[2])
cls.server_thread = cls._create_server_thread(host, possible_ports, connections_override)
cls.server_thread.daemon = True
cls.server_thread.start()
# Wait for the live server to be ready
cls.server_thread.is_ready.wait()
if cls.server_thread.error:
# Clean up behind ourselves, since tearDownClass won't get called in
# case of errors.
cls._tearDownClassInternal()
raise cls.server_thread.error
@classmethod
def _create_server_thread(cls, host, possible_ports, connections_override):
return LiveServerThread(
host,
possible_ports,
cls.static_handler,
connections_override=connections_override,
)
@classmethod
def _tearDownClassInternal(cls):
# There may not be a 'server_thread' attribute if setUpClass() for some
# reasons has raised an exception.
if hasattr(cls, 'server_thread'):
# Terminate the live server's thread
cls.server_thread.terminate()
cls.server_thread.join()
# Restore sqlite in-memory database connections' non-shareability
for conn in connections.all():
if conn.vendor == 'sqlite' and conn.is_in_memory_db(conn.settings_dict['NAME']):
conn.allow_thread_sharing = False
@classmethod
def tearDownClass(cls):
cls._tearDownClassInternal()
super(LiveServerTestCase, cls).tearDownClass()
class SerializeMixin(object):
"""
Mixin to enforce serialization of TestCases that share a common resource.
Define a common 'lockfile' for each set of TestCases to serialize. This
file must exist on the filesystem.
Place it early in the MRO in order to isolate setUpClass / tearDownClass.
"""
lockfile = None
@classmethod
def setUpClass(cls):
if cls.lockfile is None:
raise ValueError(
"{}.lockfile isn't set. Set it to a unique value "
"in the base class.".format(cls.__name__))
cls._lockfile = open(cls.lockfile)
locks.lock(cls._lockfile, locks.LOCK_EX)
super(SerializeMixin, cls).setUpClass()
@classmethod
def tearDownClass(cls):
super(SerializeMixin, cls).tearDownClass()
cls._lockfile.close()
|
Sciprios/EvolutionaryPartyProblemSimulator | refs/heads/master | PartyProblemSimulator/BooleanEquation/AndNode.py | 2 | from PartyProblemSimulator.BooleanEquation.CombinationNode import CombinationNode
class AndNode(CombinationNode):
""" This class allows two child nodes to be AND'ed together. """
def evaluate(self, input_vector):
""" And's two child nodes to produce an output. """
return (self._lhs_child.evaluate(input_vector) and self._rhs_child.evaluate(input_vector)) |
2uller/LotF | refs/heads/master | App/Lib/site-packages/numpy/distutils/from_template.py | 43 | #!/usr/bin/python
"""
process_file(filename)
takes templated file .xxx.src and produces .xxx file where .xxx
is .pyf .f90 or .f using the following template rules:
'<..>' denotes a template.
All function and subroutine blocks in a source file with names that
contain '<..>' will be replicated according to the rules in '<..>'.
The number of comma-separeted words in '<..>' will determine the number of
replicates.
'<..>' may have two different forms, named and short. For example,
named:
<p=d,s,z,c> where anywhere inside a block '<p>' will be replaced with
'd', 's', 'z', and 'c' for each replicate of the block.
<_c> is already defined: <_c=s,d,c,z>
<_t> is already defined: <_t=real,double precision,complex,double complex>
short:
<s,d,c,z>, a short form of the named, useful when no <p> appears inside
a block.
In general, '<..>' contains a comma separated list of arbitrary
expressions. If these expression must contain a comma|leftarrow|rightarrow,
then prepend the comma|leftarrow|rightarrow with a backslash.
If an expression matches '\\<index>' then it will be replaced
by <index>-th expression.
Note that all '<..>' forms in a block must have the same number of
comma-separated entries.
Predefined named template rules:
<prefix=s,d,c,z>
<ftype=real,double precision,complex,double complex>
<ftypereal=real,double precision,\\0,\\1>
<ctype=float,double,complex_float,complex_double>
<ctypereal=float,double,\\0,\\1>
"""
__all__ = ['process_str','process_file']
import os
import sys
import re
routine_start_re = re.compile(r'(\n|\A)(( (\$|\*))|)\s*(subroutine|function)\b',re.I)
routine_end_re = re.compile(r'\n\s*end\s*(subroutine|function)\b.*(\n|\Z)',re.I)
function_start_re = re.compile(r'\n (\$|\*)\s*function\b',re.I)
def parse_structure(astr):
""" Return a list of tuples for each function or subroutine each
tuple is the start and end of a subroutine or function to be
expanded.
"""
spanlist = []
ind = 0
while 1:
m = routine_start_re.search(astr,ind)
if m is None:
break
start = m.start()
if function_start_re.match(astr,start,m.end()):
while 1:
i = astr.rfind('\n',ind,start)
if i==-1:
break
start = i
if astr[i:i+7]!='\n $':
break
start += 1
m = routine_end_re.search(astr,m.end())
ind = end = m and m.end()-1 or len(astr)
spanlist.append((start,end))
return spanlist
template_re = re.compile(r"<\s*(\w[\w\d]*)\s*>")
named_re = re.compile(r"<\s*(\w[\w\d]*)\s*=\s*(.*?)\s*>")
list_re = re.compile(r"<\s*((.*?))\s*>")
def find_repl_patterns(astr):
reps = named_re.findall(astr)
names = {}
for rep in reps:
name = rep[0].strip() or unique_key(names)
repl = rep[1].replace('\,','@comma@')
thelist = conv(repl)
names[name] = thelist
return names
item_re = re.compile(r"\A\\(?P<index>\d+)\Z")
def conv(astr):
b = astr.split(',')
l = [x.strip() for x in b]
for i in range(len(l)):
m = item_re.match(l[i])
if m:
j = int(m.group('index'))
l[i] = l[j]
return ','.join(l)
def unique_key(adict):
""" Obtain a unique key given a dictionary."""
allkeys = adict.keys()
done = False
n = 1
while not done:
newkey = '__l%s' % (n)
if newkey in allkeys:
n += 1
else:
done = True
return newkey
template_name_re = re.compile(r'\A\s*(\w[\w\d]*)\s*\Z')
def expand_sub(substr,names):
substr = substr.replace('\>','@rightarrow@')
substr = substr.replace('\<','@leftarrow@')
lnames = find_repl_patterns(substr)
substr = named_re.sub(r"<\1>",substr) # get rid of definition templates
def listrepl(mobj):
thelist = conv(mobj.group(1).replace('\,','@comma@'))
if template_name_re.match(thelist):
return "<%s>" % (thelist)
name = None
for key in lnames.keys(): # see if list is already in dictionary
if lnames[key] == thelist:
name = key
if name is None: # this list is not in the dictionary yet
name = unique_key(lnames)
lnames[name] = thelist
return "<%s>" % name
substr = list_re.sub(listrepl, substr) # convert all lists to named templates
# newnames are constructed as needed
numsubs = None
base_rule = None
rules = {}
for r in template_re.findall(substr):
if r not in rules:
thelist = lnames.get(r,names.get(r,None))
if thelist is None:
raise ValueError('No replicates found for <%s>' % (r))
if r not in names and not thelist.startswith('_'):
names[r] = thelist
rule = [i.replace('@comma@',',') for i in thelist.split(',')]
num = len(rule)
if numsubs is None:
numsubs = num
rules[r] = rule
base_rule = r
elif num == numsubs:
rules[r] = rule
else:
print("Mismatch in number of replacements (base <%s=%s>)"\
" for <%s=%s>. Ignoring." % (base_rule,
','.join(rules[base_rule]),
r,thelist))
if not rules:
return substr
def namerepl(mobj):
name = mobj.group(1)
return rules.get(name,(k+1)*[name])[k]
newstr = ''
for k in range(numsubs):
newstr += template_re.sub(namerepl, substr) + '\n\n'
newstr = newstr.replace('@rightarrow@','>')
newstr = newstr.replace('@leftarrow@','<')
return newstr
def process_str(allstr):
newstr = allstr
writestr = '' #_head # using _head will break free-format files
struct = parse_structure(newstr)
oldend = 0
names = {}
names.update(_special_names)
for sub in struct:
writestr += newstr[oldend:sub[0]]
names.update(find_repl_patterns(newstr[oldend:sub[0]]))
writestr += expand_sub(newstr[sub[0]:sub[1]],names)
oldend = sub[1]
writestr += newstr[oldend:]
return writestr
include_src_re = re.compile(r"(\n|\A)\s*include\s*['\"](?P<name>[\w\d./\\]+[.]src)['\"]",re.I)
def resolve_includes(source):
d = os.path.dirname(source)
fid = open(source)
lines = []
for line in fid.readlines():
m = include_src_re.match(line)
if m:
fn = m.group('name')
if not os.path.isabs(fn):
fn = os.path.join(d,fn)
if os.path.isfile(fn):
print ('Including file',fn)
lines.extend(resolve_includes(fn))
else:
lines.append(line)
else:
lines.append(line)
fid.close()
return lines
def process_file(source):
lines = resolve_includes(source)
return process_str(''.join(lines))
_special_names = find_repl_patterns('''
<_c=s,d,c,z>
<_t=real,double precision,complex,double complex>
<prefix=s,d,c,z>
<ftype=real,double precision,complex,double complex>
<ctype=float,double,complex_float,complex_double>
<ftypereal=real,double precision,\\0,\\1>
<ctypereal=float,double,\\0,\\1>
''')
if __name__ == "__main__":
try:
file = sys.argv[1]
except IndexError:
fid = sys.stdin
outfile = sys.stdout
else:
fid = open(file,'r')
(base, ext) = os.path.splitext(file)
newname = base
outfile = open(newname,'w')
allstr = fid.read()
writestr = process_str(allstr)
outfile.write(writestr)
|
roadmapper/ansible | refs/heads/devel | test/units/modules/network/fortios/test_fortios_wireless_controller_hotspot20_h2qp_conn_capability.py | 21 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_wireless_controller_hotspot20_h2qp_conn_capability
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_wireless_controller_hotspot20_h2qp_conn_capability.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_wireless_controller_hotspot20_h2qp_conn_capability_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'wireless_controller_hotspot20_h2qp_conn_capability': {
'esp_port': 'closed',
'ftp_port': 'closed',
'http_port': 'closed',
'icmp_port': 'closed',
'ikev2_port': 'closed',
'ikev2_xx_port': 'closed',
'name': 'default_name_9',
'pptp_vpn_port': 'closed',
'ssh_port': 'closed',
'tls_port': 'closed',
'voip_tcp_port': 'closed',
'voip_udp_port': 'closed'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_h2qp_conn_capability.fortios_wireless_controller_hotspot20(input_data, fos_instance)
expected_data = {
'esp-port': 'closed',
'ftp-port': 'closed',
'http-port': 'closed',
'icmp-port': 'closed',
'ikev2-port': 'closed',
'ikev2-xx-port': 'closed',
'name': 'default_name_9',
'pptp-vpn-port': 'closed',
'ssh-port': 'closed',
'tls-port': 'closed',
'voip-tcp-port': 'closed',
'voip-udp-port': 'closed'
}
set_method_mock.assert_called_with('wireless-controller.hotspot20', 'h2qp-conn-capability', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_wireless_controller_hotspot20_h2qp_conn_capability_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'wireless_controller_hotspot20_h2qp_conn_capability': {
'esp_port': 'closed',
'ftp_port': 'closed',
'http_port': 'closed',
'icmp_port': 'closed',
'ikev2_port': 'closed',
'ikev2_xx_port': 'closed',
'name': 'default_name_9',
'pptp_vpn_port': 'closed',
'ssh_port': 'closed',
'tls_port': 'closed',
'voip_tcp_port': 'closed',
'voip_udp_port': 'closed'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_h2qp_conn_capability.fortios_wireless_controller_hotspot20(input_data, fos_instance)
expected_data = {
'esp-port': 'closed',
'ftp-port': 'closed',
'http-port': 'closed',
'icmp-port': 'closed',
'ikev2-port': 'closed',
'ikev2-xx-port': 'closed',
'name': 'default_name_9',
'pptp-vpn-port': 'closed',
'ssh-port': 'closed',
'tls-port': 'closed',
'voip-tcp-port': 'closed',
'voip-udp-port': 'closed'
}
set_method_mock.assert_called_with('wireless-controller.hotspot20', 'h2qp-conn-capability', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_wireless_controller_hotspot20_h2qp_conn_capability_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'wireless_controller_hotspot20_h2qp_conn_capability': {
'esp_port': 'closed',
'ftp_port': 'closed',
'http_port': 'closed',
'icmp_port': 'closed',
'ikev2_port': 'closed',
'ikev2_xx_port': 'closed',
'name': 'default_name_9',
'pptp_vpn_port': 'closed',
'ssh_port': 'closed',
'tls_port': 'closed',
'voip_tcp_port': 'closed',
'voip_udp_port': 'closed'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_h2qp_conn_capability.fortios_wireless_controller_hotspot20(input_data, fos_instance)
delete_method_mock.assert_called_with('wireless-controller.hotspot20', 'h2qp-conn-capability', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_wireless_controller_hotspot20_h2qp_conn_capability_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'wireless_controller_hotspot20_h2qp_conn_capability': {
'esp_port': 'closed',
'ftp_port': 'closed',
'http_port': 'closed',
'icmp_port': 'closed',
'ikev2_port': 'closed',
'ikev2_xx_port': 'closed',
'name': 'default_name_9',
'pptp_vpn_port': 'closed',
'ssh_port': 'closed',
'tls_port': 'closed',
'voip_tcp_port': 'closed',
'voip_udp_port': 'closed'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_h2qp_conn_capability.fortios_wireless_controller_hotspot20(input_data, fos_instance)
delete_method_mock.assert_called_with('wireless-controller.hotspot20', 'h2qp-conn-capability', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_wireless_controller_hotspot20_h2qp_conn_capability_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'wireless_controller_hotspot20_h2qp_conn_capability': {
'esp_port': 'closed',
'ftp_port': 'closed',
'http_port': 'closed',
'icmp_port': 'closed',
'ikev2_port': 'closed',
'ikev2_xx_port': 'closed',
'name': 'default_name_9',
'pptp_vpn_port': 'closed',
'ssh_port': 'closed',
'tls_port': 'closed',
'voip_tcp_port': 'closed',
'voip_udp_port': 'closed'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_h2qp_conn_capability.fortios_wireless_controller_hotspot20(input_data, fos_instance)
expected_data = {
'esp-port': 'closed',
'ftp-port': 'closed',
'http-port': 'closed',
'icmp-port': 'closed',
'ikev2-port': 'closed',
'ikev2-xx-port': 'closed',
'name': 'default_name_9',
'pptp-vpn-port': 'closed',
'ssh-port': 'closed',
'tls-port': 'closed',
'voip-tcp-port': 'closed',
'voip-udp-port': 'closed'
}
set_method_mock.assert_called_with('wireless-controller.hotspot20', 'h2qp-conn-capability', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_wireless_controller_hotspot20_h2qp_conn_capability_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'wireless_controller_hotspot20_h2qp_conn_capability': {
'random_attribute_not_valid': 'tag',
'esp_port': 'closed',
'ftp_port': 'closed',
'http_port': 'closed',
'icmp_port': 'closed',
'ikev2_port': 'closed',
'ikev2_xx_port': 'closed',
'name': 'default_name_9',
'pptp_vpn_port': 'closed',
'ssh_port': 'closed',
'tls_port': 'closed',
'voip_tcp_port': 'closed',
'voip_udp_port': 'closed'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_h2qp_conn_capability.fortios_wireless_controller_hotspot20(input_data, fos_instance)
expected_data = {
'esp-port': 'closed',
'ftp-port': 'closed',
'http-port': 'closed',
'icmp-port': 'closed',
'ikev2-port': 'closed',
'ikev2-xx-port': 'closed',
'name': 'default_name_9',
'pptp-vpn-port': 'closed',
'ssh-port': 'closed',
'tls-port': 'closed',
'voip-tcp-port': 'closed',
'voip-udp-port': 'closed'
}
set_method_mock.assert_called_with('wireless-controller.hotspot20', 'h2qp-conn-capability', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
|
edmorley/django | refs/heads/master | django/conf/locale/da/formats.py | 65 | # This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i'
FIRST_DAY_OF_WEEK = 1
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', # '25.10.2006'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
zcm19900902/picasso-graphic | refs/heads/master | tools/gyp/tools/pretty_sln.py | 806 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Prints the information in a sln file in a diffable way.
It first outputs each projects in alphabetical order with their
dependencies.
Then it outputs a possible build order.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import os
import re
import sys
import pretty_vcproj
def BuildProject(project, built, projects, deps):
# if all dependencies are done, we can build it, otherwise we try to build the
# dependency.
# This is not infinite-recursion proof.
for dep in deps[project]:
if dep not in built:
BuildProject(dep, built, projects, deps)
print project
built.append(project)
def ParseSolution(solution_file):
# All projects, their clsid and paths.
projects = dict()
# A list of dependencies associated with a project.
dependencies = dict()
# Regular expressions that matches the SLN format.
# The first line of a project definition.
begin_project = re.compile(('^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
'}"\) = "(.*)", "(.*)", "(.*)"$'))
# The last line of a project definition.
end_project = re.compile('^EndProject$')
# The first line of a dependency list.
begin_dep = re.compile('ProjectSection\(ProjectDependencies\) = postProject$')
# The last line of a dependency list.
end_dep = re.compile('EndProjectSection$')
# A line describing a dependency.
dep_line = re.compile(' *({.*}) = ({.*})$')
in_deps = False
solution = open(solution_file)
for line in solution:
results = begin_project.search(line)
if results:
# Hack to remove icu because the diff is too different.
if results.group(1).find('icu') != -1:
continue
# We remove "_gyp" from the names because it helps to diff them.
current_project = results.group(1).replace('_gyp', '')
projects[current_project] = [results.group(2).replace('_gyp', ''),
results.group(3),
results.group(2)]
dependencies[current_project] = []
continue
results = end_project.search(line)
if results:
current_project = None
continue
results = begin_dep.search(line)
if results:
in_deps = True
continue
results = end_dep.search(line)
if results:
in_deps = False
continue
results = dep_line.search(line)
if results and in_deps and current_project:
dependencies[current_project].append(results.group(1))
continue
# Change all dependencies clsid to name instead.
for project in dependencies:
# For each dependencies in this project
new_dep_array = []
for dep in dependencies[project]:
# Look for the project name matching this cldis
for project_info in projects:
if projects[project_info][1] == dep:
new_dep_array.append(project_info)
dependencies[project] = sorted(new_dep_array)
return (projects, dependencies)
def PrintDependencies(projects, deps):
print "---------------------------------------"
print "Dependencies for all projects"
print "---------------------------------------"
print "-- --"
for (project, dep_list) in sorted(deps.items()):
print "Project : %s" % project
print "Path : %s" % projects[project][0]
if dep_list:
for dep in dep_list:
print " - %s" % dep
print ""
print "-- --"
def PrintBuildOrder(projects, deps):
print "---------------------------------------"
print "Build order "
print "---------------------------------------"
print "-- --"
built = []
for (project, _) in sorted(deps.items()):
if project not in built:
BuildProject(project, built, projects, deps)
print "-- --"
def PrintVCProj(projects):
for project in projects:
print "-------------------------------------"
print "-------------------------------------"
print project
print project
print project
print "-------------------------------------"
print "-------------------------------------"
project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
projects[project][2]))
pretty = pretty_vcproj
argv = [ '',
project_path,
'$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]),
]
argv.extend(sys.argv[3:])
pretty.main(argv)
def main():
# check if we have exactly 1 parameter.
if len(sys.argv) < 2:
print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
return 1
(projects, deps) = ParseSolution(sys.argv[1])
PrintDependencies(projects, deps)
PrintBuildOrder(projects, deps)
if '--recursive' in sys.argv:
PrintVCProj(projects)
return 0
if __name__ == '__main__':
sys.exit(main())
|
UOMx/edx-platform | refs/heads/master | common/lib/xmodule/xmodule/tests/xml/test_inheritance.py | 193 | """
Test that inherited fields work correctly when parsing XML
"""
from nose.tools import assert_equals, assert_in # pylint: disable=no-name-in-module
from xmodule.tests.xml import XModuleXmlImportTest
from xmodule.tests.xml.factories import CourseFactory, SequenceFactory, ProblemFactory, XmlImportFactory
class TestInheritedFieldParsing(XModuleXmlImportTest):
"""
Test that inherited fields work correctly when parsing XML
"""
def test_null_string(self):
# Test that the string inherited fields are passed through 'deserialize_field',
# which converts the string "null" to the python value None
root = CourseFactory.build(days_early_for_beta="null")
sequence = SequenceFactory.build(parent=root)
ProblemFactory.build(parent=sequence)
course = self.process_xml(root)
assert_equals(None, course.days_early_for_beta)
sequence = course.get_children()[0]
assert_equals(None, sequence.days_early_for_beta)
problem = sequence.get_children()[0]
assert_equals(None, problem.days_early_for_beta)
def test_video_attr(self):
"""
Test that video's definition_from_xml handles unknown attrs w/o choking
"""
# Fixes LMS-11491
root = CourseFactory.build()
sequence = SequenceFactory.build(parent=root)
video = XmlImportFactory(
parent=sequence,
tag='video',
attribs={
'parent_url': 'foo', 'garbage': 'asdlk',
'download_video': 'true',
}
)
video_block = self.process_xml(video)
assert_in('garbage', video_block.xml_attributes)
|
ubirch/aws-tools | refs/heads/master | virtual-env/lib/python2.7/site-packages/pip/_vendor/html5lib/serializer/__init__.py | 1731 | from __future__ import absolute_import, division, unicode_literals
from .. import treewalkers
from .htmlserializer import HTMLSerializer
def serialize(input, tree="etree", format="html", encoding=None,
**serializer_opts):
# XXX: Should we cache this?
walker = treewalkers.getTreeWalker(tree)
if format == "html":
s = HTMLSerializer(**serializer_opts)
else:
raise ValueError("type must be html")
return s.render(walker(input), encoding)
|
wibowo87/doorsale | refs/heads/master | doorsale/payments/migrations/0001_initial.py | 3 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.core.management import call_command
def load_data(apps, schema_editor):
call_command('loaddata', 'initial_data', app_label='payments', verbosity=0)
class Migration(migrations.Migration):
dependencies = [
('sales', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CardIssuer',
fields=[
('descriptor', models.CharField(max_length=100, serialize=False, primary_key=True)),
('name', models.CharField(max_length=100)),
('is_active', models.BooleanField(default=False)),
('updated_on', models.DateTimeField(auto_now=True)),
('updated_by', models.CharField(max_length=100)),
('created_on', models.DateTimeField(auto_now_add=True)),
('created_by', models.CharField(max_length=100)),
],
options={
'db_table': 'payments_card_issuer',
'verbose_name_plural': 'Card Issuers',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Gateway',
fields=[
('name', models.CharField(help_text='Payment processing gateway.', max_length=10, serialize=False, primary_key=True, choices=[('PP', 'PayPal'), ('ST', 'Stripe'), ('AP', 'Amazon Payments')])),
('account', models.CharField(help_text='Account name of gateway for reference.', max_length=100)),
('is_active', models.BooleanField(default=False, help_text='Gateway active for customer to buy through it.')),
('is_sandbox', models.BooleanField(default=False, help_text='Sandbox mode for testing & debugging.')),
('accept_credit_card', models.BooleanField(default=False, help_text='Process credit card payments.')),
('accept_account', models.BooleanField(default=False, help_text="Process payments with customer's existing accounts on gateway, like PayPal account.")),
('updated_on', models.DateTimeField(auto_now=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_by', models.CharField(max_length=100)),
('created_by', models.CharField(max_length=100)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='GatewayParam',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Gateway settings parameter name.', max_length=250)),
('value', models.CharField(help_text='Gateway settings parameter value.', max_length=500)),
('updated_on', models.DateTimeField(auto_now=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_by', models.CharField(max_length=100)),
('created_by', models.CharField(max_length=100)),
('gateway', models.ForeignKey(related_name='params', to='payments.Gateway')),
],
options={
'db_table': 'payments_gateway_param',
'verbose_name_plural': 'Gateway Params',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Transaction',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('description', models.CharField(max_length=250)),
('error_message', models.CharField(max_length=1000)),
('status', models.CharField(max_length=100, choices=[('PE', 'Pending'), ('PR', 'Processing'), ('AP', 'Approved'), ('FA', 'Failed'), ('RE', 'Refunded')])),
('currency', models.CharField(max_length=3)),
('amount', models.DecimalField(max_digits=9, decimal_places=2)),
('refund_amount', models.DecimalField(null=True, max_digits=9, decimal_places=2, blank=True)),
('updated_on', models.DateTimeField(auto_now=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_by', models.CharField(max_length=100)),
('created_by', models.CharField(max_length=100)),
('gateway', models.ForeignKey(to='payments.Gateway')),
('order', models.ForeignKey(to='sales.Order')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TransactionParam',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Transaction parameter name.', max_length=100)),
('value', models.CharField(help_text='Transaction parameter value.', max_length=250)),
('created_on', models.DateTimeField(auto_now_add=True)),
('created_by', models.CharField(max_length=100)),
('transaction', models.ForeignKey(related_name='params', to='payments.Transaction')),
],
options={
'db_table': 'payments_transaction_param',
'verbose_name_plural': 'Transaction Params',
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='transactionparam',
unique_together=set([('transaction', 'name')]),
),
migrations.AlterUniqueTogether(
name='gatewayparam',
unique_together=set([('gateway', 'name')]),
),
migrations.RunPython(load_data),
]
|
rrrrrr8/vnpy | refs/heads/master | vnpy/api/ctp/pyscript/generate_md_functions.py | 25 | # encoding: UTF-8
__author__ = 'CHENXY'
from string import join
from ctp_struct import structDict
def processCallBack(line):
orignalLine = line
line = line.replace('\tvirtual void ', '') # 删除行首的无效内容
line = line.replace('{};\n', '') # 删除行尾的无效内容
content = line.split('(')
cbName = content[0] # 回调函数名称
cbArgs = content[1] # 回调函数参数
if cbArgs[-1] == ' ':
cbArgs = cbArgs.replace(') ', '')
else:
cbArgs = cbArgs.replace(')', '')
cbArgsList = cbArgs.split(', ') # 将每个参数转化为列表
cbArgsTypeList = []
cbArgsValueList = []
for arg in cbArgsList: # 开始处理参数
content = arg.split(' ')
if len(content) > 1:
cbArgsTypeList.append(content[0]) # 参数类型列表
cbArgsValueList.append(content[1]) # 参数数据列表
createTask(cbName, cbArgsTypeList, cbArgsValueList, orignalLine)
createProcess(cbName, cbArgsTypeList, cbArgsValueList)
# 生成.h文件中的process部分
process_line = 'void process' + cbName[2:] + '(Task task);\n'
fheaderprocess.write(process_line)
fheaderprocess.write('\n')
# 生成.h文件中的on部分
if 'OnRspError' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict error, int id, bool last) {};\n'
elif 'OnRsp' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data, dict error, int id, bool last) {};\n'
elif 'OnRtn' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data) {};\n'
elif 'OnErrRtn' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data, dict error) {};\n'
else:
on_line = ''
fheaderon.write(on_line)
fheaderon.write('\n')
# 生成封装部分
createWrap(cbName)
#----------------------------------------------------------------------
def createWrap(cbName):
"""在Python封装段代码中进行处理"""
# 生成.h文件中的on部分
if 'OnRspError' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict error, int id, bool last)\n'
override_line = '("on' + cbName[2:] + '")(error, id, last);\n'
elif 'OnRsp' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data, dict error, int id, bool last)\n'
override_line = '("on' + cbName[2:] + '")(data, error, id, last);\n'
elif 'OnRtn' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data)\n'
override_line = '("on' + cbName[2:] + '")(data);\n'
elif 'OnErrRtn' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data, dict error)\n'
override_line = '("on' + cbName[2:] + '")(data, error);\n'
else:
on_line = ''
if on_line is not '':
fwrap.write(on_line)
fwrap.write('{\n')
fwrap.write('\ttry\n')
fwrap.write('\t{\n')
fwrap.write('\t\tthis->get_override'+override_line)
fwrap.write('\t}\n')
fwrap.write('\tcatch (error_already_set const &)\n')
fwrap.write('\t{\n')
fwrap.write('\t\tPyErr_Print();\n')
fwrap.write('\t}\n')
fwrap.write('};\n')
fwrap.write('\n')
def createTask(cbName, cbArgsTypeList, cbArgsValueList, orignalLine):
# 从回调函数生成任务对象,并放入队列
funcline = orignalLine.replace('\tvirtual void ', 'void ' + apiName + '::')
funcline = funcline.replace('{};', '')
ftask.write(funcline)
ftask.write('{\n')
ftask.write("\tTask task = Task();\n")
ftask.write("\ttask.task_name = " + cbName.upper() + ";\n")
# define常量
global define_count
fdefine.write("#define " + cbName.upper() + ' ' + str(define_count) + '\n')
define_count = define_count + 1
# switch段代码
fswitch.write("case " + cbName.upper() + ':\n')
fswitch.write("{\n")
fswitch.write("\tthis->" + cbName.replace('On', 'process') + '(task);\n')
fswitch.write("\tbreak;\n")
fswitch.write("}\n")
fswitch.write("\n")
for i, type_ in enumerate(cbArgsTypeList):
if type_ == 'int':
ftask.write("\ttask.task_id = " + cbArgsValueList[i] + ";\n")
elif type_ == 'bool':
ftask.write("\ttask.task_last = " + cbArgsValueList[i] + ";\n")
elif 'RspInfoField' in type_:
ftask.write("\n")
ftask.write("\tif (pRspInfo)\n")
ftask.write("\t{\n")
ftask.write("\t\ttask.task_error = " + cbArgsValueList[i] + ";\n")
ftask.write("\t}\n")
ftask.write("\telse\n")
ftask.write("\t{\n")
ftask.write("\t\tCThostFtdcRspInfoField empty_error = CThostFtdcRspInfoField();\n")
ftask.write("\t\tmemset(&empty_error, 0, sizeof(empty_error));\n")
ftask.write("\t\ttask.task_error = empty_error;\n")
ftask.write("\t}\n")
else:
ftask.write("\n")
ftask.write("\tif (" + cbArgsValueList[i][1:] + ")\n")
ftask.write("\t{\n")
ftask.write("\t\ttask.task_data = " + cbArgsValueList[i] + ";\n")
ftask.write("\t}\n")
ftask.write("\telse\n")
ftask.write("\t{\n")
ftask.write("\t\t" + type_ + " empty_data = " + type_ + "();\n")
ftask.write("\t\tmemset(&empty_data, 0, sizeof(empty_data));\n")
ftask.write("\t\ttask.task_data = empty_data;\n")
ftask.write("\t}\n")
ftask.write("\tthis->task_queue.push(task);\n")
ftask.write("};\n")
ftask.write("\n")
def createProcess(cbName, cbArgsTypeList, cbArgsValueList):
# 从队列中提取任务,并转化为python字典
fprocess.write("void " + apiName + '::' + cbName.replace('On', 'process') + '(Task task)' + "\n")
fprocess.write("{\n")
fprocess.write("\tPyLock lock;\n")
onArgsList = []
for i, type_ in enumerate(cbArgsTypeList):
if 'RspInfoField' in type_:
fprocess.write("\t"+ type_ + ' task_error = any_cast<' + type_ + '>(task.task_error);\n')
fprocess.write("\t"+ "dict error;\n")
struct = structDict[type_]
for key in struct.keys():
fprocess.write("\t"+ 'error["' + key + '"] = task_error.' + key + ';\n')
fprocess.write("\n")
onArgsList.append('error')
elif type_ in structDict:
fprocess.write("\t"+ type_ + ' task_data = any_cast<' + type_ + '>(task.task_data);\n')
fprocess.write("\t"+ "dict data;\n")
struct = structDict[type_]
for key in struct.keys():
fprocess.write("\t"+ 'data["' + key + '"] = task_data.' + key + ';\n')
fprocess.write("\n")
onArgsList.append('data')
elif type_ == 'bool':
onArgsList.append('task.task_last')
elif type_ == 'int':
onArgsList.append('task.task_id')
onArgs = join(onArgsList, ', ')
fprocess.write('\tthis->' + cbName.replace('On', 'on') + '(' + onArgs +');\n')
fprocess.write("};\n")
fprocess.write("\n")
def processFunction(line):
line = line.replace('\tvirtual int ', '') # 删除行首的无效内容
line = line.replace(') = 0;\n', '') # 删除行尾的无效内容
content = line.split('(')
fcName = content[0] # 回调函数名称
fcArgs = content[1] # 回调函数参数
fcArgs = fcArgs.replace(')', '')
fcArgsList = fcArgs.split(', ') # 将每个参数转化为列表
fcArgsTypeList = []
fcArgsValueList = []
for arg in fcArgsList: # 开始处理参数
content = arg.split(' ')
if len(content) > 1:
fcArgsTypeList.append(content[0]) # 参数类型列表
fcArgsValueList.append(content[1]) # 参数数据列表
if len(fcArgsTypeList)>0 and fcArgsTypeList[0] in structDict:
createFunction(fcName, fcArgsTypeList, fcArgsValueList)
# 生成.h文件中的主动函数部分
if 'Req' in fcName:
req_line = 'int req' + fcName[3:] + '(dict req, int nRequestID);\n'
fheaderfunction.write(req_line)
fheaderfunction.write('\n')
def createFunction(fcName, fcArgsTypeList, fcArgsValueList):
type_ = fcArgsTypeList[0]
struct = structDict[type_]
ffunction.write('int MdApi::req' + fcName[3:] + '(dict req, int nRequestID)\n')
ffunction.write('{\n')
ffunction.write('\t' + type_ +' myreq = ' + type_ + '();\n')
ffunction.write('\tmemset(&myreq, 0, sizeof(myreq));\n')
for key, value in struct.items():
if value == 'string':
line = '\tgetStr(req, "' + key + '", myreq.' + key + ');\n'
elif value == 'char':
line = '\tgetChar(req, "' + key + '", &myreq.' + key + ');\n'
elif value == 'int':
line = '\tgetInt(req, "' + key + '", &myreq.' + key + ');\n'
elif value == 'double':
line = '\tgetDouble(req, "' + key + '", &myreq.' + key + ');\n'
ffunction.write(line)
ffunction.write('\tint i = this->api->' + fcName + '(&myreq, nRequestID);\n')
ffunction.write('\treturn i;\n')
ffunction.write('};\n')
ffunction.write('\n')
#########################################################
apiName = 'MdApi'
fcpp = open('ThostFtdcMdApi.h', 'r')
ftask = open('ctp_md_task.cpp', 'w')
fprocess = open('ctp_md_process.cpp', 'w')
ffunction = open('ctp_md_function.cpp', 'w')
fdefine = open('ctp_md_define.cpp', 'w')
fswitch = open('ctp_md_switch.cpp', 'w')
fheaderprocess = open('ctp_md_header_process.h', 'w')
fheaderon = open('ctp_md_header_on.h', 'w')
fheaderfunction = open('ctp_md_header_function.h', 'w')
fwrap = open('ctp_md_wrap.cpp', 'w')
define_count = 1
for line in fcpp:
if "\tvirtual void On" in line:
processCallBack(line)
elif "\tvirtual int" in line:
processFunction(line)
fcpp.close()
ftask.close()
fprocess.close()
ffunction.close()
fswitch.close()
fdefine.close()
fheaderprocess.close()
fheaderon.close()
fheaderfunction.close()
fwrap.close() |
bohlian/erpnext | refs/heads/develop | erpnext/manufacturing/__init__.py | 12133432 | |
ammarkhann/FinalSeniorCode | refs/heads/master | lib/python2.7/site-packages/nbformat/v2/tests/__init__.py | 12133432 | |
Lokke/eden | refs/heads/master | modules/ClimateDataPortal/DSL/Stringification.py | 53 |
from . import *
def Months__str__(month_filter):
return "Months(%s)" % (
", ".join(
Months.sequence[month_number + 1]
for month_number in month_filter.month_numbers
)
)
Months.__str__ = Months__str__
def From__str__(from_date):
original_args = [from_date.year]
if from_date.month is not None:
original_args.append(from_date.month)
if from_date.day is not None:
original_args.append(from_date.day)
return "From(%s)" % ", ".join(map(str,original_args))
From.__str__ = From__str__
def To__str__(to_date):
original_args = [to_date.year]
if to_date.month is not None:
original_args.append(to_date.month)
if to_date.day is not None:
original_args.append(to_date.day)
return "To(%s)" % ", ".join(map(str,original_args))
To.__str__ = To__str__
def Number__str__(number):
return "%s %s" % (number.value, number.units)
Number.__str__ = Number__str__
def AggregationNode__str__(aggregation):
return "".join((
type(aggregation).__name__, "(\"",
aggregation.dataset_name, "\", ",
", ".join(
map(str, aggregation.specification)
),
")"
))
AggregationNode.__str__ = AggregationNode__str__
def BinaryOperator__str__(binop):
return str(binop.left)+" "+binop.op+" "+str(binop.right)
BinaryOperator.__str__ = BinaryOperator__str__
|
Loudr/asana-hub | refs/heads/master | asana_hub/actions/__init__.py | 1 | """actions module contains all actions."""
import os
import glob
modules = glob.glob(os.path.dirname(__file__)+"/*.py")
__all__ = [ os.path.basename(f)[:-3] for f in modules]
|
bob123bob/Sick-Beard | refs/heads/development | cherrypy/_cptree.py | 45 | """CherryPy Application and Tree objects."""
import os
import cherrypy
from cherrypy import _cpconfig, _cplogging, _cprequest, _cpwsgi, tools
from cherrypy.lib import httputil
class Application(object):
"""A CherryPy Application.
Servers and gateways should not instantiate Request objects directly.
Instead, they should ask an Application object for a request object.
An instance of this class may also be used as a WSGI callable
(WSGI application object) for itself.
"""
__metaclass__ = cherrypy._AttributeDocstrings
root = None
root__doc = """
The top-most container of page handlers for this app. Handlers should
be arranged in a hierarchy of attributes, matching the expected URI
hierarchy; the default dispatcher then searches this hierarchy for a
matching handler. When using a dispatcher other than the default,
this value may be None."""
config = {}
config__doc = """
A dict of {path: pathconf} pairs, where 'pathconf' is itself a dict
of {key: value} pairs."""
namespaces = _cpconfig.NamespaceSet()
toolboxes = {'tools': cherrypy.tools}
log = None
log__doc = """A LogManager instance. See _cplogging."""
wsgiapp = None
wsgiapp__doc = """A CPWSGIApp instance. See _cpwsgi."""
request_class = _cprequest.Request
response_class = _cprequest.Response
relative_urls = False
def __init__(self, root, script_name="", config=None):
self.log = _cplogging.LogManager(id(self), cherrypy.log.logger_root)
self.root = root
self.script_name = script_name
self.wsgiapp = _cpwsgi.CPWSGIApp(self)
self.namespaces = self.namespaces.copy()
self.namespaces["log"] = lambda k, v: setattr(self.log, k, v)
self.namespaces["wsgi"] = self.wsgiapp.namespace_handler
self.config = self.__class__.config.copy()
if config:
self.merge(config)
def __repr__(self):
return "%s.%s(%r, %r)" % (self.__module__, self.__class__.__name__,
self.root, self.script_name)
script_name__doc = """
The URI "mount point" for this app. A mount point is that portion of
the URI which is constant for all URIs that are serviced by this
application; it does not include scheme, host, or proxy ("virtual host")
portions of the URI.
For example, if script_name is "/my/cool/app", then the URL
"http://www.example.com/my/cool/app/page1" might be handled by a
"page1" method on the root object.
The value of script_name MUST NOT end in a slash. If the script_name
refers to the root of the URI, it MUST be an empty string (not "/").
If script_name is explicitly set to None, then the script_name will be
provided for each call from request.wsgi_environ['SCRIPT_NAME'].
"""
def _get_script_name(self):
if self._script_name is None:
# None signals that the script name should be pulled from WSGI environ.
return cherrypy.serving.request.wsgi_environ['SCRIPT_NAME'].rstrip("/")
return self._script_name
def _set_script_name(self, value):
if value:
value = value.rstrip("/")
self._script_name = value
script_name = property(fget=_get_script_name, fset=_set_script_name,
doc=script_name__doc)
def merge(self, config):
"""Merge the given config into self.config."""
_cpconfig.merge(self.config, config)
# Handle namespaces specified in config.
self.namespaces(self.config.get("/", {}))
def find_config(self, path, key, default=None):
"""Return the most-specific value for key along path, or default."""
trail = path or "/"
while trail:
nodeconf = self.config.get(trail, {})
if key in nodeconf:
return nodeconf[key]
lastslash = trail.rfind("/")
if lastslash == -1:
break
elif lastslash == 0 and trail != "/":
trail = "/"
else:
trail = trail[:lastslash]
return default
def get_serving(self, local, remote, scheme, sproto):
"""Create and return a Request and Response object."""
req = self.request_class(local, remote, scheme, sproto)
req.app = self
for name, toolbox in self.toolboxes.items():
req.namespaces[name] = toolbox
resp = self.response_class()
cherrypy.serving.load(req, resp)
cherrypy.engine.timeout_monitor.acquire()
cherrypy.engine.publish('acquire_thread')
return req, resp
def release_serving(self):
"""Release the current serving (request and response)."""
req = cherrypy.serving.request
cherrypy.engine.timeout_monitor.release()
try:
req.close()
except:
cherrypy.log(traceback=True, severity=40)
cherrypy.serving.clear()
def __call__(self, environ, start_response):
return self.wsgiapp(environ, start_response)
class Tree(object):
"""A registry of CherryPy applications, mounted at diverse points.
An instance of this class may also be used as a WSGI callable
(WSGI application object), in which case it dispatches to all
mounted apps.
"""
apps = {}
apps__doc = """
A dict of the form {script name: application}, where "script name"
is a string declaring the URI mount point (no trailing slash), and
"application" is an instance of cherrypy.Application (or an arbitrary
WSGI callable if you happen to be using a WSGI server)."""
def __init__(self):
self.apps = {}
def mount(self, root, script_name="", config=None):
"""Mount a new app from a root object, script_name, and config.
root: an instance of a "controller class" (a collection of page
handler methods) which represents the root of the application.
This may also be an Application instance, or None if using
a dispatcher other than the default.
script_name: a string containing the "mount point" of the application.
This should start with a slash, and be the path portion of the
URL at which to mount the given root. For example, if root.index()
will handle requests to "http://www.example.com:8080/dept/app1/",
then the script_name argument would be "/dept/app1".
It MUST NOT end in a slash. If the script_name refers to the
root of the URI, it MUST be an empty string (not "/").
config: a file or dict containing application config.
"""
if script_name is None:
raise TypeError(
"The 'script_name' argument may not be None. Application "
"objects may, however, possess a script_name of None (in "
"order to inpect the WSGI environ for SCRIPT_NAME upon each "
"request). You cannot mount such Applications on this Tree; "
"you must pass them to a WSGI server interface directly.")
# Next line both 1) strips trailing slash and 2) maps "/" -> "".
script_name = script_name.rstrip("/")
if isinstance(root, Application):
app = root
if script_name != "" and script_name != app.script_name:
raise ValueError("Cannot specify a different script name and "
"pass an Application instance to cherrypy.mount")
script_name = app.script_name
else:
app = Application(root, script_name)
# If mounted at "", add favicon.ico
if (script_name == "" and root is not None
and not hasattr(root, "favicon_ico")):
favicon = os.path.join(os.getcwd(), os.path.dirname(__file__),
"favicon.ico")
root.favicon_ico = tools.staticfile.handler(favicon)
if config:
app.merge(config)
self.apps[script_name] = app
return app
def graft(self, wsgi_callable, script_name=""):
"""Mount a wsgi callable at the given script_name."""
# Next line both 1) strips trailing slash and 2) maps "/" -> "".
script_name = script_name.rstrip("/")
self.apps[script_name] = wsgi_callable
def script_name(self, path=None):
"""The script_name of the app at the given path, or None.
If path is None, cherrypy.request is used.
"""
if path is None:
try:
request = cherrypy.serving.request
path = httputil.urljoin(request.script_name,
request.path_info)
except AttributeError:
return None
while True:
if path in self.apps:
return path
if path == "":
return None
# Move one node up the tree and try again.
path = path[:path.rfind("/")]
def __call__(self, environ, start_response):
# If you're calling this, then you're probably setting SCRIPT_NAME
# to '' (some WSGI servers always set SCRIPT_NAME to '').
# Try to look up the app using the full path.
env1x = environ
if environ.get(u'wsgi.version') == (u'u', 0):
env1x = _cpwsgi.downgrade_wsgi_ux_to_1x(environ)
path = httputil.urljoin(env1x.get('SCRIPT_NAME', ''),
env1x.get('PATH_INFO', ''))
sn = self.script_name(path or "/")
if sn is None:
start_response('404 Not Found', [])
return []
app = self.apps[sn]
# Correct the SCRIPT_NAME and PATH_INFO environ entries.
environ = environ.copy()
if environ.get(u'wsgi.version') == (u'u', 0):
# Python 2/WSGI u.0: all strings MUST be of type unicode
enc = environ[u'wsgi.url_encoding']
environ[u'SCRIPT_NAME'] = sn.decode(enc)
environ[u'PATH_INFO'] = path[len(sn.rstrip("/")):].decode(enc)
else:
# Python 2/WSGI 1.x: all strings MUST be of type str
environ['SCRIPT_NAME'] = sn
environ['PATH_INFO'] = path[len(sn.rstrip("/")):]
return app(environ, start_response)
|
htzy/bigfour | refs/heads/master | cms/djangoapps/xblock_config/migrations/__init__.py | 12133432 | |
pombreda/swarming | refs/heads/master | appengine/components/components/auth/proto/__init__.py | 12133432 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.