repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
CrazyGuo/bokeh
|
refs/heads/master
|
tests/travis/__init__.py
|
12133432
| |
sushramesh/lwc
|
refs/heads/master
|
lib/python2.7/site-packages/django/core/servers/__init__.py
|
12133432
| |
slohse/ansible
|
refs/heads/devel
|
test/units/module_utils/gcp/test_utils.py
|
127
|
# -*- coding: utf-8 -*-
# (c) 2016, Tom Melendez <tom@supertom.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
from ansible.compat.tests import mock, unittest
from ansible.module_utils.gcp import check_min_pkg_version, GCPUtils, GCPInvalidURLError
def build_distribution(version):
obj = mock.MagicMock()
obj.version = '0.5.0'
return obj
class GCPUtilsTestCase(unittest.TestCase):
params_dict = {
'url_map_name': 'foo_url_map_name',
'description': 'foo_url_map description',
'host_rules': [
{
'description': 'host rules description',
'hosts': [
'www.example.com',
'www2.example.com'
],
'path_matcher': 'host_rules_path_matcher'
}
],
'path_matchers': [
{
'name': 'path_matcher_one',
'description': 'path matcher one',
'defaultService': 'bes-pathmatcher-one-default',
'pathRules': [
{
'service': 'my-one-bes',
'paths': [
'/',
'/aboutus'
]
}
]
},
{
'name': 'path_matcher_two',
'description': 'path matcher two',
'defaultService': 'bes-pathmatcher-two-default',
'pathRules': [
{
'service': 'my-two-bes',
'paths': [
'/webapp',
'/graphs'
]
}
]
}
]
}
@mock.patch("pkg_resources.get_distribution", side_effect=build_distribution)
def test_check_minimum_pkg_version(self, mockobj):
self.assertTrue(check_min_pkg_version('foobar', '0.4.0'))
self.assertTrue(check_min_pkg_version('foobar', '0.5.0'))
self.assertFalse(check_min_pkg_version('foobar', '0.6.0'))
def test_parse_gcp_url(self):
# region, resource, entity, method
input_url = 'https://www.googleapis.com/compute/v1/projects/myproject/regions/us-east1/instanceGroupManagers/my-mig/recreateInstances'
actual = GCPUtils.parse_gcp_url(input_url)
self.assertEquals('compute', actual['service'])
self.assertEquals('v1', actual['api_version'])
self.assertEquals('myproject', actual['project'])
self.assertEquals('us-east1', actual['region'])
self.assertEquals('instanceGroupManagers', actual['resource_name'])
self.assertEquals('my-mig', actual['entity_name'])
self.assertEquals('recreateInstances', actual['method_name'])
# zone, resource, entity, method
input_url = 'https://www.googleapis.com/compute/v1/projects/myproject/zones/us-east1-c/instanceGroupManagers/my-mig/recreateInstances'
actual = GCPUtils.parse_gcp_url(input_url)
self.assertEquals('compute', actual['service'])
self.assertEquals('v1', actual['api_version'])
self.assertEquals('myproject', actual['project'])
self.assertEquals('us-east1-c', actual['zone'])
self.assertEquals('instanceGroupManagers', actual['resource_name'])
self.assertEquals('my-mig', actual['entity_name'])
self.assertEquals('recreateInstances', actual['method_name'])
# global, resource
input_url = 'https://www.googleapis.com/compute/v1/projects/myproject/global/urlMaps'
actual = GCPUtils.parse_gcp_url(input_url)
self.assertEquals('compute', actual['service'])
self.assertEquals('v1', actual['api_version'])
self.assertEquals('myproject', actual['project'])
self.assertTrue('global' in actual)
self.assertTrue(actual['global'])
self.assertEquals('urlMaps', actual['resource_name'])
# global, resource, entity
input_url = 'https://www.googleapis.com/compute/v1/projects/myproject/global/urlMaps/my-url-map'
actual = GCPUtils.parse_gcp_url(input_url)
self.assertEquals('myproject', actual['project'])
self.assertTrue('global' in actual)
self.assertTrue(actual['global'])
self.assertEquals('v1', actual['api_version'])
self.assertEquals('compute', actual['service'])
# global URL, resource, entity, method_name
input_url = 'https://www.googleapis.com/compute/v1/projects/myproject/global/backendServices/mybackendservice/getHealth'
actual = GCPUtils.parse_gcp_url(input_url)
self.assertEquals('compute', actual['service'])
self.assertEquals('v1', actual['api_version'])
self.assertEquals('myproject', actual['project'])
self.assertTrue('global' in actual)
self.assertTrue(actual['global'])
self.assertEquals('backendServices', actual['resource_name'])
self.assertEquals('mybackendservice', actual['entity_name'])
self.assertEquals('getHealth', actual['method_name'])
# no location in URL
input_url = 'https://www.googleapis.com/compute/v1/projects/myproject/targetHttpProxies/mytargetproxy/setUrlMap'
actual = GCPUtils.parse_gcp_url(input_url)
self.assertEquals('compute', actual['service'])
self.assertEquals('v1', actual['api_version'])
self.assertEquals('myproject', actual['project'])
self.assertFalse('global' in actual)
self.assertEquals('targetHttpProxies', actual['resource_name'])
self.assertEquals('mytargetproxy', actual['entity_name'])
self.assertEquals('setUrlMap', actual['method_name'])
input_url = 'https://www.googleapis.com/compute/v1/projects/myproject/targetHttpProxies/mytargetproxy'
actual = GCPUtils.parse_gcp_url(input_url)
self.assertEquals('compute', actual['service'])
self.assertEquals('v1', actual['api_version'])
self.assertEquals('myproject', actual['project'])
self.assertFalse('global' in actual)
self.assertEquals('targetHttpProxies', actual['resource_name'])
self.assertEquals('mytargetproxy', actual['entity_name'])
input_url = 'https://www.googleapis.com/compute/v1/projects/myproject/targetHttpProxies'
actual = GCPUtils.parse_gcp_url(input_url)
self.assertEquals('compute', actual['service'])
self.assertEquals('v1', actual['api_version'])
self.assertEquals('myproject', actual['project'])
self.assertFalse('global' in actual)
self.assertEquals('targetHttpProxies', actual['resource_name'])
# test exceptions
no_projects_input_url = 'https://www.googleapis.com/compute/v1/not-projects/myproject/global/backendServices/mybackendservice/getHealth'
no_resource_input_url = 'https://www.googleapis.com/compute/v1/not-projects/myproject/global'
no_resource_no_loc_input_url = 'https://www.googleapis.com/compute/v1/not-projects/myproject'
with self.assertRaises(GCPInvalidURLError) as cm:
GCPUtils.parse_gcp_url(no_projects_input_url)
self.assertTrue(cm.exception, GCPInvalidURLError)
with self.assertRaises(GCPInvalidURLError) as cm:
GCPUtils.parse_gcp_url(no_resource_input_url)
self.assertTrue(cm.exception, GCPInvalidURLError)
with self.assertRaises(GCPInvalidURLError) as cm:
GCPUtils.parse_gcp_url(no_resource_no_loc_input_url)
self.assertTrue(cm.exception, GCPInvalidURLError)
def test_params_to_gcp_dict(self):
expected = {
'description': 'foo_url_map description',
'hostRules': [
{
'description': 'host rules description',
'hosts': [
'www.example.com',
'www2.example.com'
],
'pathMatcher': 'host_rules_path_matcher'
}
],
'name': 'foo_url_map_name',
'pathMatchers': [
{
'defaultService': 'bes-pathmatcher-one-default',
'description': 'path matcher one',
'name': 'path_matcher_one',
'pathRules': [
{
'paths': [
'/',
'/aboutus'
],
'service': 'my-one-bes'
}
]
},
{
'defaultService': 'bes-pathmatcher-two-default',
'description': 'path matcher two',
'name': 'path_matcher_two',
'pathRules': [
{
'paths': [
'/webapp',
'/graphs'
],
'service': 'my-two-bes'
}
]
}
]
}
actual = GCPUtils.params_to_gcp_dict(self.params_dict, 'url_map_name')
self.assertEqual(expected, actual)
def test_get_gcp_resource_from_methodId(self):
input_data = 'compute.urlMaps.list'
actual = GCPUtils.get_gcp_resource_from_methodId(input_data)
self.assertEqual('urlMaps', actual)
input_data = None
actual = GCPUtils.get_gcp_resource_from_methodId(input_data)
self.assertFalse(actual)
input_data = 666
actual = GCPUtils.get_gcp_resource_from_methodId(input_data)
self.assertFalse(actual)
def test_get_entity_name_from_resource_name(self):
input_data = 'urlMaps'
actual = GCPUtils.get_entity_name_from_resource_name(input_data)
self.assertEqual('urlMap', actual)
input_data = 'targetHttpProxies'
actual = GCPUtils.get_entity_name_from_resource_name(input_data)
self.assertEqual('targetHttpProxy', actual)
input_data = 'globalForwardingRules'
actual = GCPUtils.get_entity_name_from_resource_name(input_data)
self.assertEqual('forwardingRule', actual)
input_data = ''
actual = GCPUtils.get_entity_name_from_resource_name(input_data)
self.assertEqual(None, actual)
input_data = 666
actual = GCPUtils.get_entity_name_from_resource_name(input_data)
self.assertEqual(None, actual)
def test_are_params_equal(self):
params1 = {'one': 1}
params2 = {'one': 1}
actual = GCPUtils.are_params_equal(params1, params2)
self.assertTrue(actual)
params1 = {'one': 1}
params2 = {'two': 2}
actual = GCPUtils.are_params_equal(params1, params2)
self.assertFalse(actual)
params1 = {'three': 3, 'two': 2, 'one': 1}
params2 = {'one': 1, 'two': 2, 'three': 3}
actual = GCPUtils.are_params_equal(params1, params2)
self.assertTrue(actual)
params1 = {
"creationTimestamp": "2017-04-21T11:19:20.718-07:00",
"defaultService": "https://www.googleapis.com/compute/v1/projects/myproject/global/backendServices/default-backend-service",
"description": "",
"fingerprint": "ickr_pwlZPU=",
"hostRules": [
{
"description": "",
"hosts": [
"*."
],
"pathMatcher": "path-matcher-one"
}
],
"id": "8566395781175047111",
"kind": "compute#urlMap",
"name": "newtesturlmap-foo",
"pathMatchers": [
{
"defaultService": "https://www.googleapis.com/compute/v1/projects/myproject/global/backendServices/bes-pathmatcher-one-default",
"description": "path matcher one",
"name": "path-matcher-one",
"pathRules": [
{
"paths": [
"/data",
"/aboutus"
],
"service": "https://www.googleapis.com/compute/v1/projects/myproject/global/backendServices/my-one-bes"
}
]
}
],
"selfLink": "https://www.googleapis.com/compute/v1/projects/myproject/global/urlMaps/newtesturlmap-foo"
}
params2 = {
"defaultService": "https://www.googleapis.com/compute/v1/projects/myproject/global/backendServices/default-backend-service",
"hostRules": [
{
"description": "",
"hosts": [
"*."
],
"pathMatcher": "path-matcher-one"
}
],
"name": "newtesturlmap-foo",
"pathMatchers": [
{
"defaultService": "https://www.googleapis.com/compute/v1/projects/myproject/global/backendServices/bes-pathmatcher-one-default",
"description": "path matcher one",
"name": "path-matcher-one",
"pathRules": [
{
"paths": [
"/data",
"/aboutus"
],
"service": "https://www.googleapis.com/compute/v1/projects/myproject/global/backendServices/my-one-bes"
}
]
}
],
}
# params1 has exclude fields, params2 doesn't. Should be equal
actual = GCPUtils.are_params_equal(params1, params2)
self.assertTrue(actual)
def test_filter_gcp_fields(self):
input_data = {
u'kind': u'compute#httpsHealthCheck',
u'description': u'',
u'timeoutSec': 5,
u'checkIntervalSec': 5,
u'port': 443,
u'healthyThreshold': 2,
u'host': u'',
u'requestPath': u'/',
u'unhealthyThreshold': 2,
u'creationTimestamp': u'2017-05-16T15:09:36.546-07:00',
u'id': u'8727093129334146639',
u'selfLink': u'https://www.googleapis.com/compute/v1/projects/myproject/global/httpsHealthChecks/myhealthcheck',
u'name': u'myhealthcheck'}
expected = {
'name': 'myhealthcheck',
'checkIntervalSec': 5,
'port': 443,
'unhealthyThreshold': 2,
'healthyThreshold': 2,
'host': '',
'timeoutSec': 5,
'requestPath': '/'}
actual = GCPUtils.filter_gcp_fields(input_data)
self.assertEquals(expected, actual)
|
prometheus42/MangaLoader
|
refs/heads/master
|
src/plugins/MangaFoxPlugin.py
|
1
|
#!/usr/bin/python3
import logging
import urllib
from bs4 import BeautifulSoup
import src.PluginBase as PluginBase
from src.data import Manga, Chapter, Image
from src.helper import memoized
logger = logging.getLogger('MangaLoader.MangaFoxPlugin')
BASE_URL = 'http://mangafox.me/'
MANGA_LIST_URL = BASE_URL + 'manga/'
# -------------------------------------------------------------------------------------------------
# MangaFoxPlugin class
# -------------------------------------------------------------------------------------------------
class MangaFoxPlugin(PluginBase.PluginBase):
def __init__(self):
pass
@memoized
def load_manga_list(self):
loaded_manga_list = PluginBase.load_url(MANGA_LIST_URL)
return self._parse_manga_list(loaded_manga_list)
@staticmethod
def _parse_manga_list(data):
doc = BeautifulSoup(data, 'html.parser')
list_of_mangas = []
for div in doc.find_all('div', class_='manga_list'):
for li in div.find_all('li'):
for a in li.find_all('a'):
if a.string and a['class'] != 'top':
manga = Manga(a.string)
manga.url = a['href']
is_open = 'manga_open' in a['class']
manga.is_open = is_open
list_of_mangas.append(manga)
return list_of_mangas
@memoized
def load_chapter_list(self, manga):
response = PluginBase.load_url(manga.url)
chapter_list = self._parse_chapter_list(manga, response)
for chapter in chapter_list:
manga.add_chapter(chapter)
return chapter_list
@staticmethod
def _parse_chapter_list(manga, data):
doc = BeautifulSoup(data, 'html.parser')
list_of_chapters = []
for div in doc.find_all('div', id='chapters'):
for ul in div.find_all('ul', class_='chlist'):
for li in ul.find_all('li'):
inner_div = li.find('div')
a = inner_div.find('a', class_='tips')
span = inner_div.find('span', class_='title nowrap')
words = a.string.split()
number_string = words[len(words)-1]
if number_string.isdigit(): # ignore 'half' chapters
chapter = Chapter(manga, int(number_string))
chapter.url = a['href']
chapter.text = a.get_text()
if span is not None:
chapter.title = span.string
list_of_chapters.append(chapter)
return list_of_chapters
@memoized
def load_images_for_chapter(self, chapter):
response = PluginBase.load_url(chapter.url)
image_list = self._parse_image_list(chapter, response)
for image in image_list:
chapter.add_image(image)
return image_list
def _parse_image_list(self, chapter, data):
result = []
options = []
doc = BeautifulSoup(data, 'html.parser')
div = doc.find('div', class_='r m')
select = div.find('select', class_='m')
for option in select.find_all():
value = option['value']
if value is not None and value.isdigit():
options.append(int(value))
base_url = chapter.url.rsplit('/',1)[0] + '/'
for option in options:
if option > 0:
image = Image(chapter, option)
image.url = self._parse_image_page(urllib.parse.urljoin(base_url, '{}.html'.format(option)))
result.append(image)
return result
@memoized
def load_image_url(self, image):
list_of_images = self.load_images_for_chapter(image.chapter)
for i in list_of_images:
if i.imageNo == image.imageNo:
image.url = i.url
return True
return False
@staticmethod
def _parse_image_page(page_url):
data = PluginBase.load_url(page_url)
doc = BeautifulSoup(data, 'html.parser')
outer_div = doc.find('div', id='viewer')
inner_div = outer_div.find('div', class_='read_img')
img = inner_div.find('img', id='image')
return img['src']
def postprocess_image(self, filename):
logger.debug('Cropping image file to delete ads.')
# image = PIL.Image.open(filename)
# w, h = image.size
# image.crop((0, 0, w, h-30)).save(filename)
# -------------------------------------------------------------------------------------------------
# <module>
# -------------------------------------------------------------------------------------------------
if __name__ == '__main__':
plugin = MangaFoxPlugin()
print('testing MangaFoxPlugin._parse_manga_list()')
response = open('../../testdata/MangaFox/manga_list.htm', encoding='UTF-8')
manga_list = plugin._parse_manga_list(response)
assert(len(manga_list) == 16340)
print('test successful')
print('######################################################################')
print('testing MangaFoxPlugin._parse_chapter_list()')
response = open('../../testdata/MangaFox/chapter_list.htm', encoding='UTF-8')
chapter_list = plugin._parse_chapter_list('', response)
assert(len(chapter_list) == 821)
print('test successful')
print('######################################################################')
print('testing MangaFoxPlugin._parse_image_list()')
response = open('../../testdata/MangaFox/image.htm', encoding='UTF-8')
image_list = plugin._parse_image_list(response)
assert(len(image_list) == 17)
print('test successful')
print('######################################################################')
print('testing MangaFoxPlugin._parse_image_url()')
response = open('../../testdata/MangaFox/image.htm', encoding='UTF-8')
url = plugin._parse_image_url(response)
assert(url == 'image-Dateien/t001.jpg')
print('test successful')
|
marxin/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/fourtube.py
|
39
|
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_request,
)
from ..utils import (
parse_duration,
parse_iso8601,
str_to_int,
)
class FourTubeIE(InfoExtractor):
IE_NAME = '4tube'
_VALID_URL = r'https?://(?:www\.)?4tube\.com/videos/(?P<id>\d+)'
_TEST = {
'url': 'http://www.4tube.com/videos/209733/hot-babe-holly-michaels-gets-her-ass-stuffed-by-black',
'md5': '6516c8ac63b03de06bc8eac14362db4f',
'info_dict': {
'id': '209733',
'ext': 'mp4',
'title': 'Hot Babe Holly Michaels gets her ass stuffed by black',
'uploader': 'WCP Club',
'uploader_id': 'wcp-club',
'upload_date': '20131031',
'timestamp': 1383263892,
'duration': 583,
'view_count': int,
'like_count': int,
'categories': list,
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
title = self._html_search_meta('name', webpage)
timestamp = parse_iso8601(self._html_search_meta(
'uploadDate', webpage))
thumbnail = self._html_search_meta('thumbnailUrl', webpage)
uploader_id = self._html_search_regex(
r'<a class="img-avatar" href="[^"]+/channels/([^/"]+)" title="Go to [^"]+ page">',
webpage, 'uploader id')
uploader = self._html_search_regex(
r'<a class="img-avatar" href="[^"]+/channels/[^/"]+" title="Go to ([^"]+) page">',
webpage, 'uploader')
categories_html = self._search_regex(
r'(?s)><i class="icon icon-tag"></i>\s*Categories / Tags\s*.*?<ul class="list">(.*?)</ul>',
webpage, 'categories', fatal=False)
categories = None
if categories_html:
categories = [
c.strip() for c in re.findall(
r'(?s)<li><a.*?>(.*?)</a>', categories_html)]
view_count = str_to_int(self._search_regex(
r'<meta itemprop="interactionCount" content="UserPlays:([0-9,]+)">',
webpage, 'view count', fatal=False))
like_count = str_to_int(self._search_regex(
r'<meta itemprop="interactionCount" content="UserLikes:([0-9,]+)">',
webpage, 'like count', fatal=False))
duration = parse_duration(self._html_search_meta('duration', webpage))
params_js = self._search_regex(
r'\$\.ajax\(url,\ opts\);\s*\}\s*\}\)\(([0-9,\[\] ]+)\)',
webpage, 'initialization parameters'
)
params = self._parse_json('[%s]' % params_js, video_id)
media_id = params[0]
sources = ['%s' % p for p in params[2]]
token_url = 'http://tkn.4tube.com/{0}/desktop/{1}'.format(
media_id, '+'.join(sources))
headers = {
b'Content-Type': b'application/x-www-form-urlencoded',
b'Origin': b'http://www.4tube.com',
}
token_req = compat_urllib_request.Request(token_url, b'{}', headers)
tokens = self._download_json(token_req, video_id)
formats = [{
'url': tokens[format]['token'],
'format_id': format + 'p',
'resolution': format + 'p',
'quality': int(format),
} for format in sources]
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'formats': formats,
'categories': categories,
'thumbnail': thumbnail,
'uploader': uploader,
'uploader_id': uploader_id,
'timestamp': timestamp,
'like_count': like_count,
'view_count': view_count,
'duration': duration,
'age_limit': 18,
}
|
AloneRoad/Inforlearn
|
refs/heads/1.0-rc3
|
django/core/files/base.py
|
16
|
import os
from django.utils.encoding import smart_str, smart_unicode
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
class File(object):
DEFAULT_CHUNK_SIZE = 64 * 2**10
def __init__(self, file):
self.file = file
self._name = file.name
self._mode = file.mode
self._closed = False
def __str__(self):
return smart_str(self.name or '')
def __unicode__(self):
return smart_unicode(self.name or u'')
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self or "None")
def __nonzero__(self):
return not not self.name
def __len__(self):
return self.size
def _get_name(self):
return self._name
name = property(_get_name)
def _get_mode(self):
return self._mode
mode = property(_get_mode)
def _get_closed(self):
return self._closed
closed = property(_get_closed)
def _get_size(self):
if not hasattr(self, '_size'):
if hasattr(self.file, 'size'):
self._size = self.file.size
elif os.path.exists(self.file.name):
self._size = os.path.getsize(self.file.name)
else:
raise AttributeError("Unable to determine the file's size.")
return self._size
def _set_size(self, size):
self._size = size
size = property(_get_size, _set_size)
def chunks(self, chunk_size=None):
"""
Read the file and yield chucks of ``chunk_size`` bytes (defaults to
``UploadedFile.DEFAULT_CHUNK_SIZE``).
"""
if not chunk_size:
chunk_size = self.__class__.DEFAULT_CHUNK_SIZE
if hasattr(self, 'seek'):
self.seek(0)
# Assume the pointer is at zero...
counter = self.size
while counter > 0:
yield self.read(chunk_size)
counter -= chunk_size
def multiple_chunks(self, chunk_size=None):
"""
Returns ``True`` if you can expect multiple chunks.
NB: If a particular file representation is in memory, subclasses should
always return ``False`` -- there's no good reason to read from memory in
chunks.
"""
if not chunk_size:
chunk_size = self.DEFAULT_CHUNK_SIZE
return self.size > chunk_size
def xreadlines(self):
return iter(self)
def readlines(self):
return list(self.xreadlines())
def __iter__(self):
# Iterate over this file-like object by newlines
buffer_ = None
for chunk in self.chunks():
chunk_buffer = StringIO(chunk)
for line in chunk_buffer:
if buffer_:
line = buffer_ + line
buffer_ = None
# If this is the end of a line, yield
# otherwise, wait for the next round
if line[-1] in ('\n', '\r'):
yield line
else:
buffer_ = line
if buffer_ is not None:
yield buffer_
def open(self, mode=None):
if not self.closed:
self.seek(0)
elif os.path.exists(self.file.name):
self.file = open(self.file.name, mode or self.file.mode)
else:
raise ValueError("The file cannot be reopened.")
def seek(self, position):
self.file.seek(position)
def tell(self):
return self.file.tell()
def read(self, num_bytes=None):
if num_bytes is None:
return self.file.read()
return self.file.read(num_bytes)
def write(self, content):
if not self.mode.startswith('w'):
raise IOError("File was not opened with write access.")
self.file.write(content)
def flush(self):
if not self.mode.startswith('w'):
raise IOError("File was not opened with write access.")
self.file.flush()
def close(self):
self.file.close()
self._closed = True
class ContentFile(File):
"""
A File-like object that takes just raw content, rather than an actual file.
"""
def __init__(self, content):
self.file = StringIO(content or '')
self.size = len(content or '')
self.file.seek(0)
self._closed = False
def __str__(self):
return 'Raw content'
def __nonzero__(self):
return True
def open(self, mode=None):
if self._closed:
self._closed = False
self.seek(0)
|
santidediego/LearningDjango
|
refs/heads/master
|
lib/python3.5/site-packages/pip/_vendor/html5lib/filters/sanitizer.py
|
1734
|
from __future__ import absolute_import, division, unicode_literals
from . import _base
from ..sanitizer import HTMLSanitizerMixin
class Filter(_base.Filter, HTMLSanitizerMixin):
def __iter__(self):
for token in _base.Filter.__iter__(self):
token = self.sanitize_token(token)
if token:
yield token
|
mattilyra/scikit-learn
|
refs/heads/master
|
sklearn/tests/test_multioutput.py
|
39
|
import numpy as np
import scipy.sparse as sp
from sklearn.utils import shuffle
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_raises_regex
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_equal
from sklearn.exceptions import NotFittedError
from sklearn import datasets
from sklearn.base import clone
from sklearn.ensemble import GradientBoostingRegressor, RandomForestClassifier
from sklearn.linear_model import Lasso
from sklearn.svm import LinearSVC
from sklearn.multiclass import OneVsRestClassifier
from sklearn.multioutput import MultiOutputRegressor, MultiOutputClassifier
def test_multi_target_regression():
X, y = datasets.make_regression(n_targets=3)
X_train, y_train = X[:50], y[:50]
X_test, y_test = X[50:], y[50:]
references = np.zeros_like(y_test)
for n in range(3):
rgr = GradientBoostingRegressor(random_state=0)
rgr.fit(X_train, y_train[:, n])
references[:,n] = rgr.predict(X_test)
rgr = MultiOutputRegressor(GradientBoostingRegressor(random_state=0))
rgr.fit(X_train, y_train)
y_pred = rgr.predict(X_test)
assert_almost_equal(references, y_pred)
def test_multi_target_regression_one_target():
# Test multi target regression raises
X, y = datasets.make_regression(n_targets=1)
X_train, y_train = X[:50], y[:50]
X_test, y_test = X[50:], y[50:]
rgr = MultiOutputRegressor(GradientBoostingRegressor(random_state=0))
assert_raises(ValueError, rgr.fit, X_train, y_train)
def test_multi_target_sparse_regression():
X, y = datasets.make_regression(n_targets=3)
X_train, y_train = X[:50], y[:50]
X_test, y_test = X[50:], y[50:]
for sparse in [sp.csr_matrix, sp.csc_matrix, sp.coo_matrix, sp.dok_matrix,
sp.lil_matrix]:
rgr = MultiOutputRegressor(Lasso(random_state=0))
rgr_sparse = MultiOutputRegressor(Lasso(random_state=0))
rgr.fit(X_train, y_train)
rgr_sparse.fit(sparse(X_train), y_train)
assert_almost_equal(rgr.predict(X_test), rgr_sparse.predict(sparse(X_test)))
def test_multi_target_sample_weights_api():
X = [[1,2,3], [4,5,6]]
y = [[3.141, 2.718], [2.718, 3.141]]
w = [0.8, 0.6]
rgr = MultiOutputRegressor(Lasso())
assert_raises_regex(ValueError, "does not support sample weights",
rgr.fit, X, y, w)
# no exception should be raised if the base estimator supports weights
rgr = MultiOutputRegressor(GradientBoostingRegressor(random_state=0))
rgr.fit(X, y, w)
def test_multi_target_sample_weights():
# weighted regressor
Xw = [[1,2,3], [4,5,6]]
yw = [[3.141, 2.718], [2.718, 3.141]]
w = [2., 1.]
rgr_w = MultiOutputRegressor(GradientBoostingRegressor(random_state=0))
rgr_w.fit(Xw, yw, w)
# unweighted, but with repeated samples
X = [[1,2,3], [1,2,3], [4,5,6]]
y = [[3.141, 2.718], [3.141, 2.718], [2.718, 3.141]]
rgr = MultiOutputRegressor(GradientBoostingRegressor(random_state=0))
rgr.fit(X, y)
X_test = [[1.5,2.5,3.5], [3.5,4.5,5.5]]
assert_almost_equal(rgr.predict(X_test), rgr_w.predict(X_test))
# Import the data
iris = datasets.load_iris()
# create a multiple targets by randomized shuffling and concatenating y.
X = iris.data
y1 = iris.target
y2 = shuffle(y1, random_state=1)
y3 = shuffle(y1, random_state=2)
y = np.column_stack((y1, y2, y3))
n_samples, n_features = X.shape
n_outputs = y.shape[1]
n_classes = len(np.unique(y1))
def test_multi_output_classification():
# test if multi_target initializes correctly with base estimator and fit
# assert predictions work as expected for predict, prodict_proba and score
forest = RandomForestClassifier(n_estimators=10, random_state=1)
multi_target_forest = MultiOutputClassifier(forest)
# train the multi_target_forest and also get the predictions.
multi_target_forest.fit(X, y)
predictions = multi_target_forest.predict(X)
assert_equal((n_samples, n_outputs), predictions.shape)
predict_proba = multi_target_forest.predict_proba(X)
assert_equal((n_samples, n_classes, n_outputs), predict_proba.shape)
assert_array_equal(np.argmax(predict_proba, axis=1), predictions)
# train the forest with each column and assert that predictions are equal
for i in range(3):
forest_ = clone(forest) # create a clone with the same state
forest_.fit(X, y[:, i])
assert_equal(list(forest_.predict(X)), list(predictions[:, i]))
assert_array_equal(list(forest_.predict_proba(X)),
list(predict_proba[:, :, i]))
def test_multiclass_multioutput_estimator():
# test to check meta of meta estimators
svc = LinearSVC(random_state=0)
multi_class_svc = OneVsRestClassifier(svc)
multi_target_svc = MultiOutputClassifier(multi_class_svc)
multi_target_svc.fit(X, y)
predictions = multi_target_svc.predict(X)
assert_equal((n_samples, n_outputs), predictions.shape)
# train the forest with each column and assert that predictions are equal
for i in range(3):
multi_class_svc_ = clone(multi_class_svc) # create a clone
multi_class_svc_.fit(X, y[:, i])
assert_equal(list(multi_class_svc_.predict(X)),
list(predictions[:, i]))
def test_multi_output_classification_sample_weights():
# weighted classifier
Xw = [[1, 2, 3], [4, 5, 6]]
yw = [[3, 2], [2, 3]]
w = np.asarray([2., 1.])
forest = RandomForestClassifier(n_estimators=10, random_state=1)
clf_w = MultiOutputClassifier(forest)
clf_w.fit(Xw, yw, w)
# unweighted, but with repeated samples
X = [[1, 2, 3], [1, 2, 3], [4, 5, 6]]
y = [[3, 2], [3, 2], [2, 3]]
forest = RandomForestClassifier(n_estimators=10, random_state=1)
clf = MultiOutputClassifier(forest)
clf.fit(X, y)
X_test = [[1.5, 2.5, 3.5], [3.5, 4.5, 5.5]]
assert_almost_equal(clf.predict(X_test), clf_w.predict(X_test))
def test_multi_output_exceptions():
# NotFittedError when fit is not done but score, predict and
# and predict_proba are called
moc = MultiOutputClassifier(LinearSVC(random_state=0))
assert_raises(NotFittedError, moc.predict, y)
assert_raises(NotFittedError, moc.predict_proba, y)
assert_raises(NotFittedError, moc.score, X, y)
# ValueError when number of outputs is different
# for fit and score
y_new = np.column_stack((y1, y2))
moc.fit(X, y)
assert_raises(ValueError, moc.score, X, y_new)
|
bright-sparks/wpull
|
refs/heads/master
|
wpull/document/__init__.py
|
2
|
'''Document handling.'''
|
eeneku/baller
|
refs/heads/master
|
src/systems/movement_system.py
|
1
|
# -*- coding: utf-8 -*-
from engine import system
from components import Transform
from components import Movement
class MovementSystem(system.System):
""" Movement system. Moves the entity around. """
def __init__(self, entity_manager=None, *args, **kwargs):
super(MovementSystem, self).__init__(*args, **kwargs)
self.entity_manager = entity_manager
def update(self, dt):
move_components, trans_components = self.entity_manager.get_all_components_of_types([Movement, Transform])
for move, trans in zip(move_components, trans_components):
if move.moving:
trans.x += move.x * dt
trans.y += move.y * dt
|
40223145c2g18/c2g18
|
refs/heads/master
|
w2/static/Brython2.0.0-20140209-164925/Lib/_collections.py
|
115
|
# "High performance data structures
# "
# copied from pypy repo
#
# Copied and completed from the sandbox of CPython
# (nondist/sandbox/collections/pydeque.py rev 1.1, Raymond Hettinger)
#
# edited for Brython line 558 : catch ImportError instead of AttributeError
import operator
#try:
# from thread import get_ident as _thread_ident
#except ImportError:
def _thread_ident():
return -1
n = 30
LFTLNK = n
RGTLNK = n+1
BLOCKSIZ = n+2
# The deque's size limit is d.maxlen. The limit can be zero or positive, or
# None. After an item is added to a deque, we check to see if the size has
# grown past the limit. If it has, we get the size back down to the limit by
# popping an item off of the opposite end. The methods that can trigger this
# are append(), appendleft(), extend(), and extendleft().
#class deque(object):
class deque:
def __new__(cls, iterable=(), *args, **kw):
#fixme
#self = super(deque, cls).__new__(cls, *args, **kw)
self=object.__new__(cls, *args, **kw)
self.clear()
return self
def __init__(self, iterable=(), maxlen=None):
object.__init__(self)
self.clear()
if maxlen is not None:
if maxlen < 0:
raise ValueError("maxlen must be non-negative")
self._maxlen = maxlen
add = self.append
for elem in iterable:
add(elem)
@property
def maxlen(self):
return self._maxlen
def clear(self):
self.right = self.left = [None] * BLOCKSIZ
self.rightndx = n//2 # points to last written element
self.leftndx = n//2+1
self.length = 0
self.state = 0
def append(self, x):
self.state += 1
self.rightndx += 1
if self.rightndx == n:
newblock = [None] * BLOCKSIZ
self.right[RGTLNK] = newblock
newblock[LFTLNK] = self.right
self.right = newblock
self.rightndx = 0
self.length += 1
self.right[self.rightndx] = x
if self.maxlen is not None and self.length > self.maxlen:
self.popleft()
def appendleft(self, x):
self.state += 1
self.leftndx -= 1
if self.leftndx == -1:
newblock = [None] * BLOCKSIZ
self.left[LFTLNK] = newblock
newblock[RGTLNK] = self.left
self.left = newblock
self.leftndx = n-1
self.length += 1
self.left[self.leftndx] = x
if self.maxlen is not None and self.length > self.maxlen:
self.pop()
def extend(self, iterable):
if iterable is self:
iterable = list(iterable)
for elem in iterable:
self.append(elem)
def extendleft(self, iterable):
if iterable is self:
iterable = list(iterable)
for elem in iterable:
self.appendleft(elem)
def pop(self):
if self.left is self.right and self.leftndx > self.rightndx:
#raise IndexError, "pop from an empty deque" # does not work in brython
raise IndexError("pop from an empty deque")
x = self.right[self.rightndx]
self.right[self.rightndx] = None
self.length -= 1
self.rightndx -= 1
self.state += 1
if self.rightndx == -1:
prevblock = self.right[LFTLNK]
if prevblock is None:
# the deque has become empty; recenter instead of freeing block
self.rightndx = n//2
self.leftndx = n//2+1
else:
prevblock[RGTLNK] = None
self.right[LFTLNK] = None
self.right = prevblock
self.rightndx = n-1
return x
def popleft(self):
if self.left is self.right and self.leftndx > self.rightndx:
#raise IndexError, "pop from an empty deque"
raise IndexError("pop from an empty deque")
x = self.left[self.leftndx]
self.left[self.leftndx] = None
self.length -= 1
self.leftndx += 1
self.state += 1
if self.leftndx == n:
prevblock = self.left[RGTLNK]
if prevblock is None:
# the deque has become empty; recenter instead of freeing block
self.rightndx = n//2
self.leftndx = n//2+1
else:
prevblock[LFTLNK] = None
self.left[RGTLNK] = None
self.left = prevblock
self.leftndx = 0
return x
def count(self, value):
c = 0
for item in self:
if item == value:
c += 1
return c
def remove(self, value):
# Need to be defensive for mutating comparisons
for i in range(len(self)):
if self[i] == value:
del self[i]
return
raise ValueError("deque.remove(x): x not in deque")
def rotate(self, n=1):
length = len(self)
if length == 0:
return
halflen = (length+1) >> 1
if n > halflen or n < -halflen:
n %= length
if n > halflen:
n -= length
elif n < -halflen:
n += length
while n > 0:
self.appendleft(self.pop())
n -= 1
while n < 0:
self.append(self.popleft())
n += 1
def reverse(self):
"reverse *IN PLACE*"
leftblock = self.left
rightblock = self.right
leftindex = self.leftndx
rightindex = self.rightndx
for i in range(self.length // 2):
# Validate that pointers haven't met in the middle
assert leftblock != rightblock or leftindex < rightindex
# Swap
(rightblock[rightindex], leftblock[leftindex]) = (
leftblock[leftindex], rightblock[rightindex])
# Advance left block/index pair
leftindex += 1
if leftindex == n:
leftblock = leftblock[RGTLNK]
assert leftblock is not None
leftindex = 0
# Step backwards with the right block/index pair
rightindex -= 1
if rightindex == -1:
rightblock = rightblock[LFTLNK]
assert rightblock is not None
rightindex = n - 1
def __repr__(self):
threadlocalattr = '__repr' + str(_thread_ident())
if threadlocalattr in self.__dict__:
return 'deque([...])'
else:
self.__dict__[threadlocalattr] = True
try:
if self.maxlen is not None:
return 'deque(%r, maxlen=%s)' % (list(self), self.maxlen)
else:
return 'deque(%r)' % (list(self),)
finally:
del self.__dict__[threadlocalattr]
def __iter__(self):
return deque_iterator(self, self._iter_impl)
def _iter_impl(self, original_state, giveup):
if self.state != original_state:
giveup()
block = self.left
while block:
l, r = 0, n
if block is self.left:
l = self.leftndx
if block is self.right:
r = self.rightndx + 1
for elem in block[l:r]:
yield elem
if self.state != original_state:
giveup()
block = block[RGTLNK]
def __reversed__(self):
return deque_iterator(self, self._reversed_impl)
def _reversed_impl(self, original_state, giveup):
if self.state != original_state:
giveup()
block = self.right
while block:
l, r = 0, n
if block is self.left:
l = self.leftndx
if block is self.right:
r = self.rightndx + 1
for elem in reversed(block[l:r]):
yield elem
if self.state != original_state:
giveup()
block = block[LFTLNK]
def __len__(self):
#sum = 0
#block = self.left
#while block:
# sum += n
# block = block[RGTLNK]
#return sum + self.rightndx - self.leftndx + 1 - n
return self.length
def __getref(self, index):
if index >= 0:
block = self.left
while block:
l, r = 0, n
if block is self.left:
l = self.leftndx
if block is self.right:
r = self.rightndx + 1
span = r-l
if index < span:
return block, l+index
index -= span
block = block[RGTLNK]
else:
block = self.right
while block:
l, r = 0, n
if block is self.left:
l = self.leftndx
if block is self.right:
r = self.rightndx + 1
negative_span = l-r
if index >= negative_span:
return block, r+index
index -= negative_span
block = block[LFTLNK]
raise IndexError("deque index out of range")
def __getitem__(self, index):
block, index = self.__getref(index)
return block[index]
def __setitem__(self, index, value):
block, index = self.__getref(index)
block[index] = value
def __delitem__(self, index):
length = len(self)
if index >= 0:
if index >= length:
raise IndexError("deque index out of range")
self.rotate(-index)
self.popleft()
self.rotate(index)
else:
#index = ~index #todo until bit wise operators are in bython
index= index^(2**31)
if index >= length:
raise IndexError("deque index out of range")
self.rotate(index)
self.pop()
self.rotate(-index)
def __reduce_ex__(self, proto):
return type(self), (list(self), self.maxlen)
def __hash__(self):
#raise TypeError, "deque objects are unhashable"
raise TypeError("deque objects are unhashable")
def __copy__(self):
return self.__class__(self, self.maxlen)
# XXX make comparison more efficient
def __eq__(self, other):
if isinstance(other, deque):
return list(self) == list(other)
else:
return NotImplemented
def __ne__(self, other):
if isinstance(other, deque):
return list(self) != list(other)
else:
return NotImplemented
def __lt__(self, other):
if isinstance(other, deque):
return list(self) < list(other)
else:
return NotImplemented
def __le__(self, other):
if isinstance(other, deque):
return list(self) <= list(other)
else:
return NotImplemented
def __gt__(self, other):
if isinstance(other, deque):
return list(self) > list(other)
else:
return NotImplemented
def __ge__(self, other):
if isinstance(other, deque):
return list(self) >= list(other)
else:
return NotImplemented
def __iadd__(self, other):
self.extend(other)
return self
class deque_iterator(object):
def __init__(self, deq, itergen):
self.counter = len(deq)
def giveup():
self.counter = 0
#raise RuntimeError, "deque mutated during iteration"
raise RuntimeError("deque mutated during iteration")
self._gen = itergen(deq.state, giveup)
def next(self):
res = self._gen.next()
self.counter -= 1
return res
def __iter__(self):
return self
class defaultdict(dict):
def __init__(self, *args, **kwds):
if len(args) > 0:
default_factory = args[0]
args = args[1:]
if not callable(default_factory) and default_factory is not None:
raise TypeError("first argument must be callable")
else:
default_factory = None
dict.__init__(self, args, kwds)
self.default_factory = default_factory
self.update(args, kwds)
#super(defaultdict, self).__init__(*args, **kwds)
#fixme.. had to add this function to get defaultdict working with brython correctly
def __getitem__(self, key):
if self.__contains__(key):
return dict.__getitem__(self,key)
return self.__missing__(key)
def __missing__(self, key):
# from defaultdict docs
if self.default_factory is None:
raise KeyError(key)
self[key] = value = self.default_factory()
return value
def __repr__(self, recurse=set()):
if id(self) in recurse:
return "defaultdict(...)"
try:
recurse.add(id(self))
return "defaultdict(%s, %s)" % (repr(self.default_factory), super(defaultdict, self).__repr__())
finally:
recurse.remove(id(self))
def copy(self):
return type(self)(self.default_factory, self)
def __copy__(self):
return self.copy()
def __reduce__(self):
#
#__reduce__ must return a 5-tuple as follows:
#
# - factory function
# - tuple of args for the factory function
# - additional state (here None)
# - sequence iterator (here None)
# - dictionary iterator (yielding successive (key, value) pairs
# This API is used by pickle.py and copy.py.
#
return (type(self), (self.default_factory,), None, None, self.iteritems())
from operator import itemgetter as _itemgetter
from keyword import iskeyword as _iskeyword
import sys as _sys
def namedtuple(typename, field_names, verbose=False, rename=False):
"""Returns a new subclass of tuple with named fields.
>>> Point = namedtuple('Point', 'x y')
>>> Point.__doc__ # docstring for the new class
'Point(x, y)'
>>> p = Point(11, y=22) # instantiate with positional args or keywords
>>> p[0] + p[1] # indexable like a plain tuple
33
>>> x, y = p # unpack like a regular tuple
>>> x, y
(11, 22)
>>> p.x + p.y # fields also accessable by name
33
>>> d = p._asdict() # convert to a dictionary
>>> d['x']
11
>>> Point(**d) # convert from a dictionary
Point(x=11, y=22)
>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
Point(x=100, y=22)
"""
# Parse and validate the field names. Validation serves two purposes,
# generating informative error messages and preventing template injection attacks.
if isinstance(field_names, str):
field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas
field_names = tuple(map(str, field_names))
if rename:
names = list(field_names)
seen = set()
for i, name in enumerate(names):
if (not min(c.isalnum() or c=='_' for c in name) or _iskeyword(name)
or not name or name[0].isdigit() or name.startswith('_')
or name in seen):
names[i] = '_%d' % i
seen.add(name)
field_names = tuple(names)
for name in (typename,) + field_names:
if not min(c.isalnum() or c=='_' for c in name):
raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name)
if _iskeyword(name):
raise ValueError('Type names and field names cannot be a keyword: %r' % name)
if name[0].isdigit():
raise ValueError('Type names and field names cannot start with a number: %r' % name)
seen_names = set()
for name in field_names:
if name.startswith('_') and not rename:
raise ValueError('Field names cannot start with an underscore: %r' % name)
if name in seen_names:
raise ValueError('Encountered duplicate field name: %r' % name)
seen_names.add(name)
# Create and fill-in the class template
numfields = len(field_names)
argtxt = repr(field_names).replace("'", "")[1:-1] # tuple repr without parens or quotes
reprtxt = ', '.join('%s=%%r' % name for name in field_names)
template = '''class %(typename)s(tuple):
'%(typename)s(%(argtxt)s)' \n
__slots__ = () \n
_fields = %(field_names)r \n
def __new__(_cls, %(argtxt)s):
return _tuple.__new__(_cls, (%(argtxt)s)) \n
@classmethod
def _make(cls, iterable, new=tuple.__new__, len=len):
'Make a new %(typename)s object from a sequence or iterable'
result = new(cls, iterable)
if len(result) != %(numfields)d:
raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))
return result \n
def __repr__(self):
return '%(typename)s(%(reprtxt)s)' %% self \n
def _asdict(self):
'Return a new dict which maps field names to their values'
return dict(zip(self._fields, self)) \n
def _replace(_self, **kwds):
'Return a new %(typename)s object replacing specified fields with new values'
result = _self._make(map(kwds.pop, %(field_names)r, _self))
if kwds:
raise ValueError('Got unexpected field names: %%r' %% kwds.keys())
return result \n
def __getnewargs__(self):
return tuple(self) \n\n''' % locals()
for i, name in enumerate(field_names):
template += ' %s = _property(_itemgetter(%d))\n' % (name, i)
if verbose:
print(template)
# Execute the template string in a temporary namespace
namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename,
_property=property, _tuple=tuple)
try:
exec(template,namespace)
except SyntaxError as e:
raise SyntaxError(e.message + ':\n' + template)
result = namespace[typename]
# For pickling to work, the __module__ variable needs to be set to the frame
# where the named tuple is created. Bypass this step in enviroments where
# sys._getframe is not defined (Jython for example) or sys._getframe is not
# defined for arguments greater than 0 (IronPython).
try:
result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
pass
return result
if __name__ == '__main__':
Point = namedtuple('Point', ['x', 'y'])
p = Point(11, y=22)
print(p[0]+p[1])
x,y=p
print(x,y)
print(p.x+p.y)
print(p)
|
iulian787/spack
|
refs/heads/develop
|
var/spack/repos/builtin/packages/qnnpack/package.py
|
5
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Qnnpack(CMakePackage):
"""QNNPACK (Quantized Neural Networks PACKage) is a mobile-optimized
library for low-precision high-performance neural network inference.
QNNPACK provides implementation of common neural network operators on
quantized 8-bit tensors."""
homepage = "https://github.com/pytorch/QNNPACK"
git = "https://github.com/pytorch/QNNPACK.git"
version('master', branch='master')
depends_on('cmake@3.5:', type='build')
resource(
name='cpuinfo',
git='https://github.com/Maratyszcza/cpuinfo.git',
destination='deps',
placement='cpuinfo'
)
resource(
name='fp16',
git='https://github.com/Maratyszcza/FP16.git',
destination='deps',
placement='fp16'
)
resource(
name='fxdiv',
git='https://github.com/Maratyszcza/FXdiv.git',
destination='deps',
placement='fxdiv'
)
resource(
name='googlebenchmark',
url='https://github.com/google/benchmark/archive/v1.4.1.zip',
sha256='61ae07eb5d4a0b02753419eb17a82b7d322786bb36ab62bd3df331a4d47c00a7',
destination='deps',
placement='googlebenchmark',
)
resource(
name='googletest',
url='https://github.com/google/googletest/archive/release-1.8.0.zip',
sha256='f3ed3b58511efd272eb074a3a6d6fb79d7c2e6a0e374323d1e6bcbcc1ef141bf',
destination='deps',
placement='googletest',
)
resource(
name='psimd',
git='https://github.com/Maratyszcza/psimd.git',
destination='deps',
placement='psimd'
)
resource(
name='pthreadpool',
git='https://github.com/Maratyszcza/pthreadpool.git',
destination='deps',
placement='pthreadpool'
)
def cmake_args(self):
return [
'-DCPUINFO_SOURCE_DIR={0}'.format(
join_path(self.stage.source_path, 'deps/cpuinfo')),
'-DFP16_SOURCE_DIR={0}'.format(
join_path(self.stage.source_path, 'deps/fp16')),
'-DFXDIV_SOURCE_DIR={0}'.format(
join_path(self.stage.source_path, 'deps/fxdiv')),
'-DPSIMD_SOURCE_DIR={0}'.format(
join_path(self.stage.source_path, 'deps/psimd')),
'-DPTHREADPOOL_SOURCE_DIR={0}'.format(
join_path(self.stage.source_path, 'deps/pthreadpool')),
'-DGOOGLEBENCHMARK_SOURCE_DIR={0}'.format(
join_path(self.stage.source_path, 'deps/googlebenchmark')),
'-DGOOGLETEST_SOURCE_DIR={0}'.format(
join_path(self.stage.source_path, 'deps/googletest')),
]
|
uwafsl/ardupilot
|
refs/heads/master
|
mk/PX4/Tools/genmsg/test/test_genmsg_base.py
|
216
|
def test_log():
from genmsg.base import log
log("hello", "there")
def test_plog():
class Foo(object):
pass
from genmsg.base import plog
plog("hello", Foo())
def test_exceptions():
from genmsg import InvalidMsgSpec
try:
raise InvalidMsgSpec('hello')
except InvalidMsgSpec:
pass
|
kelseyoo14/Wander
|
refs/heads/master
|
venv_2_7/lib/python2.7/site-packages/pandas/io/tests/test_sql.py
|
9
|
"""SQL io tests
The SQL tests are broken down in different classes:
- `PandasSQLTest`: base class with common methods for all test classes
- Tests for the public API (only tests with sqlite3)
- `_TestSQLApi` base class
- `TestSQLApi`: test the public API with sqlalchemy engine
- `TestSQLiteFallbackApi`: test the public API with a sqlite DBAPI connection
- Tests for the different SQL flavors (flavor specific type conversions)
- Tests for the sqlalchemy mode: `_TestSQLAlchemy` is the base class with
common methods, `_TestSQLAlchemyConn` tests the API with a SQLAlchemy
Connection object. The different tested flavors (sqlite3, MySQL, PostgreSQL)
derive from the base class
- Tests for the fallback mode (`TestSQLiteFallback` and `TestMySQLLegacy`)
"""
from __future__ import print_function
import unittest
import sqlite3
import csv
import os
import sys
import nose
import warnings
import numpy as np
import pandas as pd
from datetime import datetime, date, time
from pandas import DataFrame, Series, Index, MultiIndex, isnull, concat
from pandas import date_range, to_datetime, to_timedelta, Timestamp
import pandas.compat as compat
from pandas.compat import StringIO, range, lrange, string_types
from pandas.core import common as com
from pandas.core.datetools import format as date_format
import pandas.io.sql as sql
from pandas.io.sql import read_sql_table, read_sql_query
import pandas.util.testing as tm
try:
import sqlalchemy
import sqlalchemy.schema
import sqlalchemy.sql.sqltypes as sqltypes
from sqlalchemy.ext import declarative
from sqlalchemy.orm import session as sa_session
SQLALCHEMY_INSTALLED = True
except ImportError:
SQLALCHEMY_INSTALLED = False
SQL_STRINGS = {
'create_iris': {
'sqlite': """CREATE TABLE iris (
"SepalLength" REAL,
"SepalWidth" REAL,
"PetalLength" REAL,
"PetalWidth" REAL,
"Name" TEXT
)""",
'mysql': """CREATE TABLE iris (
`SepalLength` DOUBLE,
`SepalWidth` DOUBLE,
`PetalLength` DOUBLE,
`PetalWidth` DOUBLE,
`Name` VARCHAR(200)
)""",
'postgresql': """CREATE TABLE iris (
"SepalLength" DOUBLE PRECISION,
"SepalWidth" DOUBLE PRECISION,
"PetalLength" DOUBLE PRECISION,
"PetalWidth" DOUBLE PRECISION,
"Name" VARCHAR(200)
)"""
},
'insert_iris': {
'sqlite': """INSERT INTO iris VALUES(?, ?, ?, ?, ?)""",
'mysql': """INSERT INTO iris VALUES(%s, %s, %s, %s, "%s");""",
'postgresql': """INSERT INTO iris VALUES(%s, %s, %s, %s, %s);"""
},
'create_test_types': {
'sqlite': """CREATE TABLE types_test_data (
"TextCol" TEXT,
"DateCol" TEXT,
"IntDateCol" INTEGER,
"FloatCol" REAL,
"IntCol" INTEGER,
"BoolCol" INTEGER,
"IntColWithNull" INTEGER,
"BoolColWithNull" INTEGER
)""",
'mysql': """CREATE TABLE types_test_data (
`TextCol` TEXT,
`DateCol` DATETIME,
`IntDateCol` INTEGER,
`FloatCol` DOUBLE,
`IntCol` INTEGER,
`BoolCol` BOOLEAN,
`IntColWithNull` INTEGER,
`BoolColWithNull` BOOLEAN
)""",
'postgresql': """CREATE TABLE types_test_data (
"TextCol" TEXT,
"DateCol" TIMESTAMP,
"DateColWithTz" TIMESTAMP WITH TIME ZONE,
"IntDateCol" INTEGER,
"FloatCol" DOUBLE PRECISION,
"IntCol" INTEGER,
"BoolCol" BOOLEAN,
"IntColWithNull" INTEGER,
"BoolColWithNull" BOOLEAN
)"""
},
'insert_test_types': {
'sqlite': {
'query': """
INSERT INTO types_test_data
VALUES(?, ?, ?, ?, ?, ?, ?, ?)
""",
'fields': (
'TextCol', 'DateCol', 'IntDateCol', 'FloatCol',
'IntCol', 'BoolCol', 'IntColWithNull', 'BoolColWithNull'
)
},
'mysql': {
'query': """
INSERT INTO types_test_data
VALUES("%s", %s, %s, %s, %s, %s, %s, %s)
""",
'fields': (
'TextCol', 'DateCol', 'IntDateCol', 'FloatCol',
'IntCol', 'BoolCol', 'IntColWithNull', 'BoolColWithNull'
)
},
'postgresql': {
'query': """
INSERT INTO types_test_data
VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s)
""",
'fields': (
'TextCol', 'DateCol', 'DateColWithTz', 'IntDateCol', 'FloatCol',
'IntCol', 'BoolCol', 'IntColWithNull', 'BoolColWithNull'
)
},
},
'read_parameters': {
'sqlite': "SELECT * FROM iris WHERE Name=? AND SepalLength=?",
'mysql': 'SELECT * FROM iris WHERE `Name`="%s" AND `SepalLength`=%s',
'postgresql': 'SELECT * FROM iris WHERE "Name"=%s AND "SepalLength"=%s'
},
'read_named_parameters': {
'sqlite': """
SELECT * FROM iris WHERE Name=:name AND SepalLength=:length
""",
'mysql': """
SELECT * FROM iris WHERE
`Name`="%(name)s" AND `SepalLength`=%(length)s
""",
'postgresql': """
SELECT * FROM iris WHERE
"Name"=%(name)s AND "SepalLength"=%(length)s
"""
},
'create_view': {
'sqlite': """
CREATE VIEW iris_view AS
SELECT * FROM iris
"""
}
}
class MixInBase(object):
def tearDown(self):
for tbl in self._get_all_tables():
self.drop_table(tbl)
self._close_conn()
class MySQLMixIn(MixInBase):
def drop_table(self, table_name):
cur = self.conn.cursor()
cur.execute("DROP TABLE IF EXISTS %s" % sql._get_valid_mysql_name(table_name))
self.conn.commit()
def _get_all_tables(self):
cur = self.conn.cursor()
cur.execute('SHOW TABLES')
return [table[0] for table in cur.fetchall()]
def _close_conn(self):
from pymysql.err import Error
try:
self.conn.close()
except Error:
pass
class SQLiteMixIn(MixInBase):
def drop_table(self, table_name):
self.conn.execute("DROP TABLE IF EXISTS %s" % sql._get_valid_sqlite_name(table_name))
self.conn.commit()
def _get_all_tables(self):
c = self.conn.execute("SELECT name FROM sqlite_master WHERE type='table'")
return [table[0] for table in c.fetchall()]
def _close_conn(self):
self.conn.close()
class SQLAlchemyMixIn(MixInBase):
def drop_table(self, table_name):
sql.SQLDatabase(self.conn).drop_table(table_name)
def _get_all_tables(self):
meta = sqlalchemy.schema.MetaData(bind=self.conn)
meta.reflect()
table_list = meta.tables.keys()
return table_list
def _close_conn(self):
pass
class PandasSQLTest(unittest.TestCase):
"""
Base class with common private methods for SQLAlchemy and fallback cases.
"""
def _get_exec(self):
if hasattr(self.conn, 'execute'):
return self.conn
else:
return self.conn.cursor()
def _load_iris_data(self):
import io
iris_csv_file = os.path.join(tm.get_data_path(), 'iris.csv')
self.drop_table('iris')
self._get_exec().execute(SQL_STRINGS['create_iris'][self.flavor])
with io.open(iris_csv_file, mode='r', newline=None) as iris_csv:
r = csv.reader(iris_csv)
next(r) # skip header row
ins = SQL_STRINGS['insert_iris'][self.flavor]
for row in r:
self._get_exec().execute(ins, row)
def _load_iris_view(self):
self.drop_table('iris_view')
self._get_exec().execute(SQL_STRINGS['create_view'][self.flavor])
def _check_iris_loaded_frame(self, iris_frame):
pytype = iris_frame.dtypes[0].type
row = iris_frame.iloc[0]
self.assertTrue(
issubclass(pytype, np.floating), 'Loaded frame has incorrect type')
tm.equalContents(row.values, [5.1, 3.5, 1.4, 0.2, 'Iris-setosa'])
def _load_test1_data(self):
columns = ['index', 'A', 'B', 'C', 'D']
data = [(
'2000-01-03 00:00:00', 0.980268513777, 3.68573087906, -0.364216805298, -1.15973806169),
('2000-01-04 00:00:00', 1.04791624281, -
0.0412318367011, -0.16181208307, 0.212549316967),
('2000-01-05 00:00:00', 0.498580885705,
0.731167677815, -0.537677223318, 1.34627041952),
('2000-01-06 00:00:00', 1.12020151869, 1.56762092543, 0.00364077397681, 0.67525259227)]
self.test_frame1 = DataFrame(data, columns=columns)
def _load_test2_data(self):
df = DataFrame(dict(A=[4, 1, 3, 6],
B=['asd', 'gsq', 'ylt', 'jkl'],
C=[1.1, 3.1, 6.9, 5.3],
D=[False, True, True, False],
E=['1990-11-22', '1991-10-26', '1993-11-26', '1995-12-12']))
df['E'] = to_datetime(df['E'])
self.test_frame2 = df
def _load_test3_data(self):
columns = ['index', 'A', 'B']
data = [(
'2000-01-03 00:00:00', 2 ** 31 - 1, -1.987670),
('2000-01-04 00:00:00', -29, -0.0412318367011),
('2000-01-05 00:00:00', 20000, 0.731167677815),
('2000-01-06 00:00:00', -290867, 1.56762092543)]
self.test_frame3 = DataFrame(data, columns=columns)
def _load_raw_sql(self):
self.drop_table('types_test_data')
self._get_exec().execute(SQL_STRINGS['create_test_types'][self.flavor])
ins = SQL_STRINGS['insert_test_types'][self.flavor]
data = [
{
'TextCol': 'first',
'DateCol': '2000-01-03 00:00:00',
'DateColWithTz': '2000-01-01 00:00:00-08:00',
'IntDateCol': 535852800,
'FloatCol': 10.10,
'IntCol': 1,
'BoolCol': False,
'IntColWithNull': 1,
'BoolColWithNull': False,
},
{
'TextCol': 'first',
'DateCol': '2000-01-04 00:00:00',
'DateColWithTz': '2000-06-01 00:00:00-07:00',
'IntDateCol': 1356998400,
'FloatCol': 10.10,
'IntCol': 1,
'BoolCol': False,
'IntColWithNull': None,
'BoolColWithNull': None,
},
]
for d in data:
self._get_exec().execute(
ins['query'],
[d[field] for field in ins['fields']]
)
def _count_rows(self, table_name):
result = self._get_exec().execute(
"SELECT count(*) AS count_1 FROM %s" % table_name).fetchone()
return result[0]
def _read_sql_iris(self):
iris_frame = self.pandasSQL.read_query("SELECT * FROM iris")
self._check_iris_loaded_frame(iris_frame)
def _read_sql_iris_parameter(self):
query = SQL_STRINGS['read_parameters'][self.flavor]
params = ['Iris-setosa', 5.1]
iris_frame = self.pandasSQL.read_query(query, params=params)
self._check_iris_loaded_frame(iris_frame)
def _read_sql_iris_named_parameter(self):
query = SQL_STRINGS['read_named_parameters'][self.flavor]
params = {'name': 'Iris-setosa', 'length': 5.1}
iris_frame = self.pandasSQL.read_query(query, params=params)
self._check_iris_loaded_frame(iris_frame)
def _to_sql(self):
self.drop_table('test_frame1')
self.pandasSQL.to_sql(self.test_frame1, 'test_frame1')
self.assertTrue(self.pandasSQL.has_table(
'test_frame1'), 'Table not written to DB')
# Nuke table
self.drop_table('test_frame1')
def _to_sql_empty(self):
self.drop_table('test_frame1')
self.pandasSQL.to_sql(self.test_frame1.iloc[:0], 'test_frame1')
def _to_sql_fail(self):
self.drop_table('test_frame1')
self.pandasSQL.to_sql(
self.test_frame1, 'test_frame1', if_exists='fail')
self.assertTrue(self.pandasSQL.has_table(
'test_frame1'), 'Table not written to DB')
self.assertRaises(ValueError, self.pandasSQL.to_sql,
self.test_frame1, 'test_frame1', if_exists='fail')
self.drop_table('test_frame1')
def _to_sql_replace(self):
self.drop_table('test_frame1')
self.pandasSQL.to_sql(
self.test_frame1, 'test_frame1', if_exists='fail')
# Add to table again
self.pandasSQL.to_sql(
self.test_frame1, 'test_frame1', if_exists='replace')
self.assertTrue(self.pandasSQL.has_table(
'test_frame1'), 'Table not written to DB')
num_entries = len(self.test_frame1)
num_rows = self._count_rows('test_frame1')
self.assertEqual(
num_rows, num_entries, "not the same number of rows as entries")
self.drop_table('test_frame1')
def _to_sql_append(self):
# Nuke table just in case
self.drop_table('test_frame1')
self.pandasSQL.to_sql(
self.test_frame1, 'test_frame1', if_exists='fail')
# Add to table again
self.pandasSQL.to_sql(
self.test_frame1, 'test_frame1', if_exists='append')
self.assertTrue(self.pandasSQL.has_table(
'test_frame1'), 'Table not written to DB')
num_entries = 2 * len(self.test_frame1)
num_rows = self._count_rows('test_frame1')
self.assertEqual(
num_rows, num_entries, "not the same number of rows as entries")
self.drop_table('test_frame1')
def _roundtrip(self):
self.drop_table('test_frame_roundtrip')
self.pandasSQL.to_sql(self.test_frame1, 'test_frame_roundtrip')
result = self.pandasSQL.read_query('SELECT * FROM test_frame_roundtrip')
result.set_index('level_0', inplace=True)
# result.index.astype(int)
result.index.name = None
tm.assert_frame_equal(result, self.test_frame1)
def _execute_sql(self):
# drop_sql = "DROP TABLE IF EXISTS test" # should already be done
iris_results = self.pandasSQL.execute("SELECT * FROM iris")
row = iris_results.fetchone()
tm.equalContents(row, [5.1, 3.5, 1.4, 0.2, 'Iris-setosa'])
def _to_sql_save_index(self):
df = DataFrame.from_records([(1,2.1,'line1'), (2,1.5,'line2')],
columns=['A','B','C'], index=['A'])
self.pandasSQL.to_sql(df, 'test_to_sql_saves_index')
ix_cols = self._get_index_columns('test_to_sql_saves_index')
self.assertEqual(ix_cols, [['A',],])
def _transaction_test(self):
self.pandasSQL.execute("CREATE TABLE test_trans (A INT, B TEXT)")
ins_sql = "INSERT INTO test_trans (A,B) VALUES (1, 'blah')"
# Make sure when transaction is rolled back, no rows get inserted
try:
with self.pandasSQL.run_transaction() as trans:
trans.execute(ins_sql)
raise Exception('error')
except:
# ignore raised exception
pass
res = self.pandasSQL.read_query('SELECT * FROM test_trans')
self.assertEqual(len(res), 0)
# Make sure when transaction is committed, rows do get inserted
with self.pandasSQL.run_transaction() as trans:
trans.execute(ins_sql)
res2 = self.pandasSQL.read_query('SELECT * FROM test_trans')
self.assertEqual(len(res2), 1)
#------------------------------------------------------------------------------
#--- Testing the public API
class _TestSQLApi(PandasSQLTest):
"""
Base class to test the public API.
From this two classes are derived to run these tests for both the
sqlalchemy mode (`TestSQLApi`) and the fallback mode (`TestSQLiteFallbackApi`).
These tests are run with sqlite3. Specific tests for the different
sql flavours are included in `_TestSQLAlchemy`.
Notes:
flavor can always be passed even in SQLAlchemy mode,
should be correctly ignored.
we don't use drop_table because that isn't part of the public api
"""
flavor = 'sqlite'
mode = None
def setUp(self):
self.conn = self.connect()
self._load_iris_data()
self._load_iris_view()
self._load_test1_data()
self._load_test2_data()
self._load_test3_data()
self._load_raw_sql()
def test_read_sql_iris(self):
iris_frame = sql.read_sql_query(
"SELECT * FROM iris", self.conn)
self._check_iris_loaded_frame(iris_frame)
def test_read_sql_view(self):
iris_frame = sql.read_sql_query(
"SELECT * FROM iris_view", self.conn)
self._check_iris_loaded_frame(iris_frame)
def test_legacy_read_frame(self):
with tm.assert_produces_warning(FutureWarning):
iris_frame = sql.read_frame(
"SELECT * FROM iris", self.conn)
self._check_iris_loaded_frame(iris_frame)
def test_to_sql(self):
sql.to_sql(self.test_frame1, 'test_frame1', self.conn, flavor='sqlite')
self.assertTrue(
sql.has_table('test_frame1', self.conn, flavor='sqlite'), 'Table not written to DB')
def test_to_sql_fail(self):
sql.to_sql(self.test_frame1, 'test_frame2',
self.conn, flavor='sqlite', if_exists='fail')
self.assertTrue(
sql.has_table('test_frame2', self.conn, flavor='sqlite'), 'Table not written to DB')
self.assertRaises(ValueError, sql.to_sql, self.test_frame1,
'test_frame2', self.conn, flavor='sqlite', if_exists='fail')
def test_to_sql_replace(self):
sql.to_sql(self.test_frame1, 'test_frame3',
self.conn, flavor='sqlite', if_exists='fail')
# Add to table again
sql.to_sql(self.test_frame1, 'test_frame3',
self.conn, flavor='sqlite', if_exists='replace')
self.assertTrue(
sql.has_table('test_frame3', self.conn, flavor='sqlite'),
'Table not written to DB')
num_entries = len(self.test_frame1)
num_rows = self._count_rows('test_frame3')
self.assertEqual(
num_rows, num_entries, "not the same number of rows as entries")
def test_to_sql_append(self):
sql.to_sql(self.test_frame1, 'test_frame4',
self.conn, flavor='sqlite', if_exists='fail')
# Add to table again
sql.to_sql(self.test_frame1, 'test_frame4',
self.conn, flavor='sqlite', if_exists='append')
self.assertTrue(
sql.has_table('test_frame4', self.conn, flavor='sqlite'),
'Table not written to DB')
num_entries = 2 * len(self.test_frame1)
num_rows = self._count_rows('test_frame4')
self.assertEqual(
num_rows, num_entries, "not the same number of rows as entries")
def test_to_sql_type_mapping(self):
sql.to_sql(self.test_frame3, 'test_frame5',
self.conn, flavor='sqlite', index=False)
result = sql.read_sql("SELECT * FROM test_frame5", self.conn)
tm.assert_frame_equal(self.test_frame3, result)
def test_to_sql_series(self):
s = Series(np.arange(5, dtype='int64'), name='series')
sql.to_sql(s, "test_series", self.conn, flavor='sqlite', index=False)
s2 = sql.read_sql_query("SELECT * FROM test_series", self.conn)
tm.assert_frame_equal(s.to_frame(), s2)
def test_to_sql_panel(self):
panel = tm.makePanel()
self.assertRaises(NotImplementedError, sql.to_sql, panel,
'test_panel', self.conn, flavor='sqlite')
def test_legacy_write_frame(self):
# Assume that functionality is already tested above so just do
# quick check that it basically works
with tm.assert_produces_warning(FutureWarning):
sql.write_frame(self.test_frame1, 'test_frame_legacy', self.conn,
flavor='sqlite')
self.assertTrue(
sql.has_table('test_frame_legacy', self.conn, flavor='sqlite'),
'Table not written to DB')
def test_roundtrip(self):
sql.to_sql(self.test_frame1, 'test_frame_roundtrip',
con=self.conn, flavor='sqlite')
result = sql.read_sql_query(
'SELECT * FROM test_frame_roundtrip',
con=self.conn)
# HACK!
result.index = self.test_frame1.index
result.set_index('level_0', inplace=True)
result.index.astype(int)
result.index.name = None
tm.assert_frame_equal(result, self.test_frame1)
def test_roundtrip_chunksize(self):
sql.to_sql(self.test_frame1, 'test_frame_roundtrip', con=self.conn,
index=False, flavor='sqlite', chunksize=2)
result = sql.read_sql_query(
'SELECT * FROM test_frame_roundtrip',
con=self.conn)
tm.assert_frame_equal(result, self.test_frame1)
def test_execute_sql(self):
# drop_sql = "DROP TABLE IF EXISTS test" # should already be done
iris_results = sql.execute("SELECT * FROM iris", con=self.conn)
row = iris_results.fetchone()
tm.equalContents(row, [5.1, 3.5, 1.4, 0.2, 'Iris-setosa'])
def test_date_parsing(self):
# Test date parsing in read_sq
# No Parsing
df = sql.read_sql_query("SELECT * FROM types_test_data", self.conn)
self.assertFalse(
issubclass(df.DateCol.dtype.type, np.datetime64),
"DateCol loaded with incorrect type")
df = sql.read_sql_query("SELECT * FROM types_test_data", self.conn,
parse_dates=['DateCol'])
self.assertTrue(
issubclass(df.DateCol.dtype.type, np.datetime64),
"DateCol loaded with incorrect type")
df = sql.read_sql_query("SELECT * FROM types_test_data", self.conn,
parse_dates={'DateCol': '%Y-%m-%d %H:%M:%S'})
self.assertTrue(
issubclass(df.DateCol.dtype.type, np.datetime64),
"DateCol loaded with incorrect type")
df = sql.read_sql_query("SELECT * FROM types_test_data", self.conn,
parse_dates=['IntDateCol'])
self.assertTrue(issubclass(df.IntDateCol.dtype.type, np.datetime64),
"IntDateCol loaded with incorrect type")
df = sql.read_sql_query("SELECT * FROM types_test_data", self.conn,
parse_dates={'IntDateCol': 's'})
self.assertTrue(issubclass(df.IntDateCol.dtype.type, np.datetime64),
"IntDateCol loaded with incorrect type")
def test_date_and_index(self):
# Test case where same column appears in parse_date and index_col
df = sql.read_sql_query("SELECT * FROM types_test_data", self.conn,
index_col='DateCol',
parse_dates=['DateCol', 'IntDateCol'])
self.assertTrue(issubclass(df.index.dtype.type, np.datetime64),
"DateCol loaded with incorrect type")
self.assertTrue(issubclass(df.IntDateCol.dtype.type, np.datetime64),
"IntDateCol loaded with incorrect type")
def test_timedelta(self):
# see #6921
df = to_timedelta(Series(['00:00:01', '00:00:03'], name='foo')).to_frame()
with tm.assert_produces_warning(UserWarning):
df.to_sql('test_timedelta', self.conn)
result = sql.read_sql_query('SELECT * FROM test_timedelta', self.conn)
tm.assert_series_equal(result['foo'], df['foo'].astype('int64'))
def test_complex(self):
df = DataFrame({'a':[1+1j, 2j]})
# Complex data type should raise error
self.assertRaises(ValueError, df.to_sql, 'test_complex', self.conn)
def test_to_sql_index_label(self):
temp_frame = DataFrame({'col1': range(4)})
# no index name, defaults to 'index'
sql.to_sql(temp_frame, 'test_index_label', self.conn)
frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn)
self.assertEqual(frame.columns[0], 'index')
# specifying index_label
sql.to_sql(temp_frame, 'test_index_label', self.conn,
if_exists='replace', index_label='other_label')
frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn)
self.assertEqual(frame.columns[0], 'other_label',
"Specified index_label not written to database")
# using the index name
temp_frame.index.name = 'index_name'
sql.to_sql(temp_frame, 'test_index_label', self.conn,
if_exists='replace')
frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn)
self.assertEqual(frame.columns[0], 'index_name',
"Index name not written to database")
# has index name, but specifying index_label
sql.to_sql(temp_frame, 'test_index_label', self.conn,
if_exists='replace', index_label='other_label')
frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn)
self.assertEqual(frame.columns[0], 'other_label',
"Specified index_label not written to database")
def test_to_sql_index_label_multiindex(self):
temp_frame = DataFrame({'col1': range(4)},
index=MultiIndex.from_product([('A0', 'A1'), ('B0', 'B1')]))
# no index name, defaults to 'level_0' and 'level_1'
sql.to_sql(temp_frame, 'test_index_label', self.conn)
frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn)
self.assertEqual(frame.columns[0], 'level_0')
self.assertEqual(frame.columns[1], 'level_1')
# specifying index_label
sql.to_sql(temp_frame, 'test_index_label', self.conn,
if_exists='replace', index_label=['A', 'B'])
frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn)
self.assertEqual(frame.columns[:2].tolist(), ['A', 'B'],
"Specified index_labels not written to database")
# using the index name
temp_frame.index.names = ['A', 'B']
sql.to_sql(temp_frame, 'test_index_label', self.conn,
if_exists='replace')
frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn)
self.assertEqual(frame.columns[:2].tolist(), ['A', 'B'],
"Index names not written to database")
# has index name, but specifying index_label
sql.to_sql(temp_frame, 'test_index_label', self.conn,
if_exists='replace', index_label=['C', 'D'])
frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn)
self.assertEqual(frame.columns[:2].tolist(), ['C', 'D'],
"Specified index_labels not written to database")
# wrong length of index_label
self.assertRaises(ValueError, sql.to_sql, temp_frame,
'test_index_label', self.conn, if_exists='replace',
index_label='C')
def test_multiindex_roundtrip(self):
df = DataFrame.from_records([(1,2.1,'line1'), (2,1.5,'line2')],
columns=['A','B','C'], index=['A','B'])
df.to_sql('test_multiindex_roundtrip', self.conn)
result = sql.read_sql_query('SELECT * FROM test_multiindex_roundtrip',
self.conn, index_col=['A','B'])
tm.assert_frame_equal(df, result, check_index_type=True)
def test_integer_col_names(self):
df = DataFrame([[1, 2], [3, 4]], columns=[0, 1])
sql.to_sql(df, "test_frame_integer_col_names", self.conn,
if_exists='replace')
def test_get_schema(self):
create_sql = sql.get_schema(self.test_frame1, 'test', 'sqlite',
con=self.conn)
self.assertTrue('CREATE' in create_sql)
def test_get_schema_dtypes(self):
float_frame = DataFrame({'a':[1.1,1.2], 'b':[2.1,2.2]})
dtype = sqlalchemy.Integer if self.mode == 'sqlalchemy' else 'INTEGER'
create_sql = sql.get_schema(float_frame, 'test', 'sqlite',
con=self.conn, dtype={'b':dtype})
self.assertTrue('CREATE' in create_sql)
self.assertTrue('INTEGER' in create_sql)
def test_get_schema_keys(self):
frame = DataFrame({'Col1':[1.1,1.2], 'Col2':[2.1,2.2]})
create_sql = sql.get_schema(frame, 'test', 'sqlite',
con=self.conn, keys='Col1')
constraint_sentence = 'CONSTRAINT test_pk PRIMARY KEY ("Col1")'
self.assertTrue(constraint_sentence in create_sql)
# multiple columns as key (GH10385)
create_sql = sql.get_schema(self.test_frame1, 'test', 'sqlite',
con=self.conn, keys=['A', 'B'])
constraint_sentence = 'CONSTRAINT test_pk PRIMARY KEY ("A", "B")'
self.assertTrue(constraint_sentence in create_sql)
def test_chunksize_read(self):
df = DataFrame(np.random.randn(22, 5), columns=list('abcde'))
df.to_sql('test_chunksize', self.conn, index=False)
# reading the query in one time
res1 = sql.read_sql_query("select * from test_chunksize", self.conn)
# reading the query in chunks with read_sql_query
res2 = DataFrame()
i = 0
sizes = [5, 5, 5, 5, 2]
for chunk in sql.read_sql_query("select * from test_chunksize",
self.conn, chunksize=5):
res2 = concat([res2, chunk], ignore_index=True)
self.assertEqual(len(chunk), sizes[i])
i += 1
tm.assert_frame_equal(res1, res2)
# reading the query in chunks with read_sql_query
if self.mode == 'sqlalchemy':
res3 = DataFrame()
i = 0
sizes = [5, 5, 5, 5, 2]
for chunk in sql.read_sql_table("test_chunksize", self.conn,
chunksize=5):
res3 = concat([res3, chunk], ignore_index=True)
self.assertEqual(len(chunk), sizes[i])
i += 1
tm.assert_frame_equal(res1, res3)
def test_categorical(self):
# GH8624
# test that categorical gets written correctly as dense column
df = DataFrame(
{'person_id': [1, 2, 3],
'person_name': ['John P. Doe', 'Jane Dove', 'John P. Doe']})
df2 = df.copy()
df2['person_name'] = df2['person_name'].astype('category')
df2.to_sql('test_categorical', self.conn, index=False)
res = sql.read_sql_query('SELECT * FROM test_categorical', self.conn)
tm.assert_frame_equal(res, df)
def test_unicode_column_name(self):
# GH 11431
df = DataFrame([[1,2],[3,4]], columns = [u'\xe9',u'b'])
df.to_sql('test_unicode', self.conn, index=False)
class TestSQLApi(SQLAlchemyMixIn, _TestSQLApi):
"""
Test the public API as it would be used directly
Tests for `read_sql_table` are included here, as this is specific for the
sqlalchemy mode.
"""
flavor = 'sqlite'
mode = 'sqlalchemy'
def connect(self):
if SQLALCHEMY_INSTALLED:
return sqlalchemy.create_engine('sqlite:///:memory:')
else:
raise nose.SkipTest('SQLAlchemy not installed')
def test_read_table_columns(self):
# test columns argument in read_table
sql.to_sql(self.test_frame1, 'test_frame', self.conn)
cols = ['A', 'B']
result = sql.read_sql_table('test_frame', self.conn, columns=cols)
self.assertEqual(result.columns.tolist(), cols,
"Columns not correctly selected")
def test_read_table_index_col(self):
# test columns argument in read_table
sql.to_sql(self.test_frame1, 'test_frame', self.conn)
result = sql.read_sql_table('test_frame', self.conn, index_col="index")
self.assertEqual(result.index.names, ["index"],
"index_col not correctly set")
result = sql.read_sql_table('test_frame', self.conn, index_col=["A", "B"])
self.assertEqual(result.index.names, ["A", "B"],
"index_col not correctly set")
result = sql.read_sql_table('test_frame', self.conn, index_col=["A", "B"],
columns=["C", "D"])
self.assertEqual(result.index.names, ["A", "B"],
"index_col not correctly set")
self.assertEqual(result.columns.tolist(), ["C", "D"],
"columns not set correctly whith index_col")
def test_read_sql_delegate(self):
iris_frame1 = sql.read_sql_query(
"SELECT * FROM iris", self.conn)
iris_frame2 = sql.read_sql(
"SELECT * FROM iris", self.conn)
tm.assert_frame_equal(iris_frame1, iris_frame2)
iris_frame1 = sql.read_sql_table('iris', self.conn)
iris_frame2 = sql.read_sql('iris', self.conn)
tm.assert_frame_equal(iris_frame1, iris_frame2)
def test_not_reflect_all_tables(self):
# create invalid table
qry = """CREATE TABLE invalid (x INTEGER, y UNKNOWN);"""
self.conn.execute(qry)
qry = """CREATE TABLE other_table (x INTEGER, y INTEGER);"""
self.conn.execute(qry)
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
sql.read_sql_table('other_table', self.conn)
sql.read_sql_query('SELECT * FROM other_table', self.conn)
# Verify some things
self.assertEqual(len(w), 0, "Warning triggered for other table")
def test_warning_case_insensitive_table_name(self):
# see GH7815.
# We can't test that this warning is triggered, a the database
# configuration would have to be altered. But here we test that
# the warning is certainly NOT triggered in a normal case.
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# This should not trigger a Warning
self.test_frame1.to_sql('CaseSensitive', self.conn)
# Verify some things
self.assertEqual(len(w), 0, "Warning triggered for writing a table")
def _get_index_columns(self, tbl_name):
from sqlalchemy.engine import reflection
insp = reflection.Inspector.from_engine(self.conn)
ixs = insp.get_indexes('test_index_saved')
ixs = [i['column_names'] for i in ixs]
return ixs
def test_sqlalchemy_type_mapping(self):
# Test Timestamp objects (no datetime64 because of timezone) (GH9085)
df = DataFrame({'time': to_datetime(['201412120154', '201412110254'],
utc=True)})
db = sql.SQLDatabase(self.conn)
table = sql.SQLTable("test_type", db, frame=df)
self.assertTrue(isinstance(table.table.c['time'].type, sqltypes.DateTime))
def test_to_sql_read_sql_with_database_uri(self):
# Test read_sql and .to_sql method with a database URI (GH10654)
test_frame1 = self.test_frame1
#db_uri = 'sqlite:///:memory:' # raises sqlalchemy.exc.OperationalError: (sqlite3.OperationalError) near "iris": syntax error [SQL: 'iris']
with tm.ensure_clean() as name:
db_uri = 'sqlite:///' + name
table = 'iris'
test_frame1.to_sql(table, db_uri, if_exists='replace', index=False)
test_frame2 = sql.read_sql(table, db_uri)
test_frame3 = sql.read_sql_table(table, db_uri)
query = 'SELECT * FROM iris'
test_frame4 = sql.read_sql_query(query, db_uri)
tm.assert_frame_equal(test_frame1, test_frame2)
tm.assert_frame_equal(test_frame1, test_frame3)
tm.assert_frame_equal(test_frame1, test_frame4)
def _make_iris_table_metadata(self):
sa = sqlalchemy
metadata = sa.MetaData()
iris = sa.Table('iris', metadata,
sa.Column('SepalLength', sa.REAL),
sa.Column('SepalWidth', sa.REAL),
sa.Column('PetalLength', sa.REAL),
sa.Column('PetalWidth', sa.REAL),
sa.Column('Name', sa.TEXT)
)
return iris
def test_query_by_text_obj(self):
# WIP : GH10846
name_text = sqlalchemy.text('select * from iris where name=:name')
iris_df = sql.read_sql(name_text, self.conn, params={'name': 'Iris-versicolor'})
all_names = set(iris_df['Name'])
self.assertEqual(all_names, set(['Iris-versicolor']))
def test_query_by_select_obj(self):
# WIP : GH10846
iris = self._make_iris_table_metadata()
name_select = sqlalchemy.select([iris]).where(iris.c.Name == sqlalchemy.bindparam('name'))
iris_df = sql.read_sql(name_select, self.conn, params={'name': 'Iris-setosa'})
all_names = set(iris_df['Name'])
self.assertEqual(all_names, set(['Iris-setosa']))
class _EngineToConnMixin(object):
"""
A mixin that causes setup_connect to create a conn rather than an engine.
"""
def setUp(self):
super(_EngineToConnMixin, self).setUp()
engine = self.conn
conn = engine.connect()
self.__tx = conn.begin()
self.pandasSQL = sql.SQLDatabase(conn)
self.__engine = engine
self.conn = conn
def tearDown(self):
self.__tx.rollback()
self.conn.close()
self.conn = self.__engine
self.pandasSQL = sql.SQLDatabase(self.__engine)
super(_EngineToConnMixin, self).tearDown()
class TestSQLApiConn(_EngineToConnMixin, TestSQLApi):
pass
class TestSQLiteFallbackApi(SQLiteMixIn, _TestSQLApi):
"""
Test the public sqlite connection fallback API
"""
flavor = 'sqlite'
mode = 'fallback'
def connect(self, database=":memory:"):
return sqlite3.connect(database)
def test_sql_open_close(self):
# Test if the IO in the database still work if the connection closed
# between the writing and reading (as in many real situations).
with tm.ensure_clean() as name:
conn = self.connect(name)
sql.to_sql(self.test_frame3, "test_frame3_legacy", conn,
flavor="sqlite", index=False)
conn.close()
conn = self.connect(name)
result = sql.read_sql_query("SELECT * FROM test_frame3_legacy;",
conn)
conn.close()
tm.assert_frame_equal(self.test_frame3, result)
def test_read_sql_delegate(self):
iris_frame1 = sql.read_sql_query("SELECT * FROM iris", self.conn)
iris_frame2 = sql.read_sql("SELECT * FROM iris", self.conn)
tm.assert_frame_equal(iris_frame1, iris_frame2)
self.assertRaises(sql.DatabaseError, sql.read_sql, 'iris', self.conn)
def test_safe_names_warning(self):
# GH 6798
df = DataFrame([[1, 2], [3, 4]], columns=['a', 'b ']) # has a space
# warns on create table with spaces in names
with tm.assert_produces_warning():
sql.to_sql(df, "test_frame3_legacy", self.conn,
flavor="sqlite", index=False)
def test_get_schema2(self):
# without providing a connection object (available for backwards comp)
create_sql = sql.get_schema(self.test_frame1, 'test', 'sqlite')
self.assertTrue('CREATE' in create_sql)
def test_tquery(self):
with tm.assert_produces_warning(FutureWarning):
iris_results = sql.tquery("SELECT * FROM iris", con=self.conn)
row = iris_results[0]
tm.equalContents(row, [5.1, 3.5, 1.4, 0.2, 'Iris-setosa'])
def test_uquery(self):
with tm.assert_produces_warning(FutureWarning):
rows = sql.uquery("SELECT * FROM iris LIMIT 1", con=self.conn)
self.assertEqual(rows, -1)
def _get_sqlite_column_type(self, schema, column):
for col in schema.split('\n'):
if col.split()[0].strip('""') == column:
return col.split()[1]
raise ValueError('Column %s not found' % (column))
def test_sqlite_type_mapping(self):
# Test Timestamp objects (no datetime64 because of timezone) (GH9085)
df = DataFrame({'time': to_datetime(['201412120154', '201412110254'],
utc=True)})
db = sql.SQLiteDatabase(self.conn, self.flavor)
table = sql.SQLiteTable("test_type", db, frame=df)
schema = table.sql_schema()
self.assertEqual(self._get_sqlite_column_type(schema, 'time'),
"TIMESTAMP")
#------------------------------------------------------------------------------
#--- Database flavor specific tests
class _TestSQLAlchemy(SQLAlchemyMixIn, PandasSQLTest):
"""
Base class for testing the sqlalchemy backend.
Subclasses for specific database types are created below. Tests that
deviate for each flavor are overwritten there.
"""
flavor = None
@classmethod
def setUpClass(cls):
cls.setup_import()
cls.setup_driver()
# test connection
try:
conn = cls.connect()
conn.connect()
except sqlalchemy.exc.OperationalError:
msg = "{0} - can't connect to {1} server".format(cls, cls.flavor)
raise nose.SkipTest(msg)
def setUp(self):
self.setup_connect()
self._load_iris_data()
self._load_raw_sql()
self._load_test1_data()
@classmethod
def setup_import(cls):
# Skip this test if SQLAlchemy not available
if not SQLALCHEMY_INSTALLED:
raise nose.SkipTest('SQLAlchemy not installed')
@classmethod
def setup_driver(cls):
raise NotImplementedError()
@classmethod
def connect(cls):
raise NotImplementedError()
def setup_connect(self):
try:
self.conn = self.connect()
self.pandasSQL = sql.SQLDatabase(self.conn)
# to test if connection can be made:
self.conn.connect()
except sqlalchemy.exc.OperationalError:
raise nose.SkipTest("Can't connect to {0} server".format(self.flavor))
def test_aread_sql(self):
self._read_sql_iris()
def test_read_sql_parameter(self):
self._read_sql_iris_parameter()
def test_read_sql_named_parameter(self):
self._read_sql_iris_named_parameter()
def test_to_sql(self):
self._to_sql()
def test_to_sql_empty(self):
self._to_sql_empty()
def test_to_sql_fail(self):
self._to_sql_fail()
def test_to_sql_replace(self):
self._to_sql_replace()
def test_to_sql_append(self):
self._to_sql_append()
def test_create_table(self):
temp_conn = self.connect()
temp_frame = DataFrame(
{'one': [1., 2., 3., 4.], 'two': [4., 3., 2., 1.]})
pandasSQL = sql.SQLDatabase(temp_conn)
pandasSQL.to_sql(temp_frame, 'temp_frame')
self.assertTrue(
temp_conn.has_table('temp_frame'), 'Table not written to DB')
def test_drop_table(self):
temp_conn = self.connect()
temp_frame = DataFrame(
{'one': [1., 2., 3., 4.], 'two': [4., 3., 2., 1.]})
pandasSQL = sql.SQLDatabase(temp_conn)
pandasSQL.to_sql(temp_frame, 'temp_frame')
self.assertTrue(
temp_conn.has_table('temp_frame'), 'Table not written to DB')
pandasSQL.drop_table('temp_frame')
self.assertFalse(
temp_conn.has_table('temp_frame'), 'Table not deleted from DB')
def test_roundtrip(self):
self._roundtrip()
def test_execute_sql(self):
self._execute_sql()
def test_read_table(self):
iris_frame = sql.read_sql_table("iris", con=self.conn)
self._check_iris_loaded_frame(iris_frame)
def test_read_table_columns(self):
iris_frame = sql.read_sql_table(
"iris", con=self.conn, columns=['SepalLength', 'SepalLength'])
tm.equalContents(
iris_frame.columns.values, ['SepalLength', 'SepalLength'])
def test_read_table_absent(self):
self.assertRaises(
ValueError, sql.read_sql_table, "this_doesnt_exist", con=self.conn)
def test_default_type_conversion(self):
df = sql.read_sql_table("types_test_data", self.conn)
self.assertTrue(issubclass(df.FloatCol.dtype.type, np.floating),
"FloatCol loaded with incorrect type")
self.assertTrue(issubclass(df.IntCol.dtype.type, np.integer),
"IntCol loaded with incorrect type")
self.assertTrue(issubclass(df.BoolCol.dtype.type, np.bool_),
"BoolCol loaded with incorrect type")
# Int column with NA values stays as float
self.assertTrue(issubclass(df.IntColWithNull.dtype.type, np.floating),
"IntColWithNull loaded with incorrect type")
# Bool column with NA values becomes object
self.assertTrue(issubclass(df.BoolColWithNull.dtype.type, np.object),
"BoolColWithNull loaded with incorrect type")
def test_bigint(self):
# int64 should be converted to BigInteger, GH7433
df = DataFrame(data={'i64':[2**62]})
df.to_sql('test_bigint', self.conn, index=False)
result = sql.read_sql_table('test_bigint', self.conn)
tm.assert_frame_equal(df, result)
def test_default_date_load(self):
df = sql.read_sql_table("types_test_data", self.conn)
# IMPORTANT - sqlite has no native date type, so shouldn't parse, but
# MySQL SHOULD be converted.
self.assertTrue(issubclass(df.DateCol.dtype.type, np.datetime64),
"DateCol loaded with incorrect type")
def test_datetime_with_timezone(self):
# edge case that converts postgresql datetime with time zone types
# to datetime64[ns,psycopg2.tz.FixedOffsetTimezone..], which is ok
# but should be more natural, so coerce to datetime64[ns] for now
def check(col):
# check that a column is either datetime64[ns]
# or datetime64[ns, UTC]
if com.is_datetime64_dtype(col.dtype):
# "2000-01-01 00:00:00-08:00" should convert to "2000-01-01 08:00:00"
self.assertEqual(col[0], Timestamp('2000-01-01 08:00:00'))
# "2000-06-01 00:00:00-07:00" should convert to "2000-06-01 07:00:00"
self.assertEqual(col[1], Timestamp('2000-06-01 07:00:00'))
elif com.is_datetime64tz_dtype(col.dtype):
self.assertTrue(str(col.dt.tz) == 'UTC')
# "2000-01-01 00:00:00-08:00" should convert to "2000-01-01 08:00:00"
self.assertEqual(col[0], Timestamp('2000-01-01 08:00:00', tz='UTC'))
# "2000-06-01 00:00:00-07:00" should convert to "2000-06-01 07:00:00"
self.assertEqual(col[1], Timestamp('2000-06-01 07:00:00', tz='UTC'))
else:
raise AssertionError("DateCol loaded with incorrect type -> {0}".format(col.dtype))
# GH11216
df = pd.read_sql_query("select * from types_test_data", self.conn)
if not hasattr(df,'DateColWithTz'):
raise nose.SkipTest("no column with datetime with time zone")
# this is parsed on Travis (linux), but not on macosx for some reason
# even with the same versions of psycopg2 & sqlalchemy, possibly a Postgrsql server
# version difference
col = df.DateColWithTz
self.assertTrue(com.is_object_dtype(col.dtype) or com.is_datetime64_dtype(col.dtype) \
or com.is_datetime64tz_dtype(col.dtype),
"DateCol loaded with incorrect type -> {0}".format(col.dtype))
df = pd.read_sql_query("select * from types_test_data", self.conn, parse_dates=['DateColWithTz'])
if not hasattr(df,'DateColWithTz'):
raise nose.SkipTest("no column with datetime with time zone")
check(df.DateColWithTz)
df = pd.concat(list(pd.read_sql_query("select * from types_test_data",
self.conn,chunksize=1)),ignore_index=True)
col = df.DateColWithTz
self.assertTrue(com.is_datetime64tz_dtype(col.dtype),
"DateCol loaded with incorrect type -> {0}".format(col.dtype))
self.assertTrue(str(col.dt.tz) == 'UTC')
expected = sql.read_sql_table("types_test_data", self.conn)
tm.assert_series_equal(df.DateColWithTz, expected.DateColWithTz.astype('datetime64[ns, UTC]'))
# xref #7139
# this might or might not be converted depending on the postgres driver
df = sql.read_sql_table("types_test_data", self.conn)
check(df.DateColWithTz)
def test_date_parsing(self):
# No Parsing
df = sql.read_sql_table("types_test_data", self.conn)
df = sql.read_sql_table("types_test_data", self.conn,
parse_dates=['DateCol'])
self.assertTrue(issubclass(df.DateCol.dtype.type, np.datetime64),
"DateCol loaded with incorrect type")
df = sql.read_sql_table("types_test_data", self.conn,
parse_dates={'DateCol': '%Y-%m-%d %H:%M:%S'})
self.assertTrue(issubclass(df.DateCol.dtype.type, np.datetime64),
"DateCol loaded with incorrect type")
df = sql.read_sql_table("types_test_data", self.conn, parse_dates={
'DateCol': {'format': '%Y-%m-%d %H:%M:%S'}})
self.assertTrue(issubclass(df.DateCol.dtype.type, np.datetime64),
"IntDateCol loaded with incorrect type")
df = sql.read_sql_table(
"types_test_data", self.conn, parse_dates=['IntDateCol'])
self.assertTrue(issubclass(df.IntDateCol.dtype.type, np.datetime64),
"IntDateCol loaded with incorrect type")
df = sql.read_sql_table(
"types_test_data", self.conn, parse_dates={'IntDateCol': 's'})
self.assertTrue(issubclass(df.IntDateCol.dtype.type, np.datetime64),
"IntDateCol loaded with incorrect type")
df = sql.read_sql_table(
"types_test_data", self.conn, parse_dates={'IntDateCol': {'unit': 's'}})
self.assertTrue(issubclass(df.IntDateCol.dtype.type, np.datetime64),
"IntDateCol loaded with incorrect type")
def test_datetime(self):
df = DataFrame({'A': date_range('2013-01-01 09:00:00', periods=3),
'B': np.arange(3.0)})
df.to_sql('test_datetime', self.conn)
# with read_table -> type information from schema used
result = sql.read_sql_table('test_datetime', self.conn)
result = result.drop('index', axis=1)
tm.assert_frame_equal(result, df)
# with read_sql -> no type information -> sqlite has no native
result = sql.read_sql_query('SELECT * FROM test_datetime', self.conn)
result = result.drop('index', axis=1)
if self.flavor == 'sqlite':
self.assertTrue(isinstance(result.loc[0, 'A'], string_types))
result['A'] = to_datetime(result['A'])
tm.assert_frame_equal(result, df)
else:
tm.assert_frame_equal(result, df)
def test_datetime_NaT(self):
df = DataFrame({'A': date_range('2013-01-01 09:00:00', periods=3),
'B': np.arange(3.0)})
df.loc[1, 'A'] = np.nan
df.to_sql('test_datetime', self.conn, index=False)
# with read_table -> type information from schema used
result = sql.read_sql_table('test_datetime', self.conn)
tm.assert_frame_equal(result, df)
# with read_sql -> no type information -> sqlite has no native
result = sql.read_sql_query('SELECT * FROM test_datetime', self.conn)
if self.flavor == 'sqlite':
self.assertTrue(isinstance(result.loc[0, 'A'], string_types))
result['A'] = to_datetime(result['A'], errors='coerce')
tm.assert_frame_equal(result, df)
else:
tm.assert_frame_equal(result, df)
def test_datetime_date(self):
# test support for datetime.date
df = DataFrame([date(2014, 1, 1), date(2014, 1, 2)], columns=["a"])
df.to_sql('test_date', self.conn, index=False)
res = read_sql_table('test_date', self.conn)
# comes back as datetime64
tm.assert_series_equal(res['a'], to_datetime(df['a']))
def test_datetime_time(self):
# test support for datetime.time
df = DataFrame([time(9, 0, 0), time(9, 1, 30)], columns=["a"])
df.to_sql('test_time', self.conn, index=False)
res = read_sql_table('test_time', self.conn)
tm.assert_frame_equal(res, df)
def test_mixed_dtype_insert(self):
# see GH6509
s1 = Series(2**25 + 1,dtype=np.int32)
s2 = Series(0.0,dtype=np.float32)
df = DataFrame({'s1': s1, 's2': s2})
# write and read again
df.to_sql("test_read_write", self.conn, index=False)
df2 = sql.read_sql_table("test_read_write", self.conn)
tm.assert_frame_equal(df, df2, check_dtype=False, check_exact=True)
def test_nan_numeric(self):
# NaNs in numeric float column
df = DataFrame({'A':[0, 1, 2], 'B':[0.2, np.nan, 5.6]})
df.to_sql('test_nan', self.conn, index=False)
# with read_table
result = sql.read_sql_table('test_nan', self.conn)
tm.assert_frame_equal(result, df)
# with read_sql
result = sql.read_sql_query('SELECT * FROM test_nan', self.conn)
tm.assert_frame_equal(result, df)
def test_nan_fullcolumn(self):
# full NaN column (numeric float column)
df = DataFrame({'A':[0, 1, 2], 'B':[np.nan, np.nan, np.nan]})
df.to_sql('test_nan', self.conn, index=False)
# with read_table
result = sql.read_sql_table('test_nan', self.conn)
tm.assert_frame_equal(result, df)
# with read_sql -> not type info from table -> stays None
df['B'] = df['B'].astype('object')
df['B'] = None
result = sql.read_sql_query('SELECT * FROM test_nan', self.conn)
tm.assert_frame_equal(result, df)
def test_nan_string(self):
# NaNs in string column
df = DataFrame({'A':[0, 1, 2], 'B':['a', 'b', np.nan]})
df.to_sql('test_nan', self.conn, index=False)
# NaNs are coming back as None
df.loc[2, 'B'] = None
# with read_table
result = sql.read_sql_table('test_nan', self.conn)
tm.assert_frame_equal(result, df)
# with read_sql
result = sql.read_sql_query('SELECT * FROM test_nan', self.conn)
tm.assert_frame_equal(result, df)
def _get_index_columns(self, tbl_name):
from sqlalchemy.engine import reflection
insp = reflection.Inspector.from_engine(self.conn)
ixs = insp.get_indexes(tbl_name)
ixs = [i['column_names'] for i in ixs]
return ixs
def test_to_sql_save_index(self):
self._to_sql_save_index()
def test_transactions(self):
self._transaction_test()
def test_get_schema_create_table(self):
# Use a dataframe without a bool column, since MySQL converts bool to
# TINYINT (which read_sql_table returns as an int and causes a dtype
# mismatch)
self._load_test3_data()
tbl = 'test_get_schema_create_table'
create_sql = sql.get_schema(self.test_frame3, tbl, con=self.conn)
blank_test_df = self.test_frame3.iloc[:0]
self.drop_table(tbl)
self.conn.execute(create_sql)
returned_df = sql.read_sql_table(tbl, self.conn)
tm.assert_frame_equal(returned_df, blank_test_df, check_index_type=False)
self.drop_table(tbl)
def test_dtype(self):
cols = ['A', 'B']
data = [(0.8, True),
(0.9, None)]
df = DataFrame(data, columns=cols)
df.to_sql('dtype_test', self.conn)
df.to_sql('dtype_test2', self.conn, dtype={'B': sqlalchemy.TEXT})
meta = sqlalchemy.schema.MetaData(bind=self.conn)
meta.reflect()
sqltype = meta.tables['dtype_test2'].columns['B'].type
self.assertTrue(isinstance(sqltype, sqlalchemy.TEXT))
self.assertRaises(ValueError, df.to_sql,
'error', self.conn, dtype={'B': str})
# GH9083
df.to_sql('dtype_test3', self.conn, dtype={'B': sqlalchemy.String(10)})
meta.reflect()
sqltype = meta.tables['dtype_test3'].columns['B'].type
self.assertTrue(isinstance(sqltype, sqlalchemy.String))
self.assertEqual(sqltype.length, 10)
def test_notnull_dtype(self):
cols = {'Bool': Series([True,None]),
'Date': Series([datetime(2012, 5, 1), None]),
'Int' : Series([1, None], dtype='object'),
'Float': Series([1.1, None])
}
df = DataFrame(cols)
tbl = 'notnull_dtype_test'
df.to_sql(tbl, self.conn)
returned_df = sql.read_sql_table(tbl, self.conn)
meta = sqlalchemy.schema.MetaData(bind=self.conn)
meta.reflect()
if self.flavor == 'mysql':
my_type = sqltypes.Integer
else:
my_type = sqltypes.Boolean
col_dict = meta.tables[tbl].columns
self.assertTrue(isinstance(col_dict['Bool'].type, my_type))
self.assertTrue(isinstance(col_dict['Date'].type, sqltypes.DateTime))
self.assertTrue(isinstance(col_dict['Int'].type, sqltypes.Integer))
self.assertTrue(isinstance(col_dict['Float'].type, sqltypes.Float))
def test_double_precision(self):
V = 1.23456789101112131415
df = DataFrame({'f32':Series([V,], dtype='float32'),
'f64':Series([V,], dtype='float64'),
'f64_as_f32':Series([V,], dtype='float64'),
'i32':Series([5,], dtype='int32'),
'i64':Series([5,], dtype='int64'),
})
df.to_sql('test_dtypes', self.conn, index=False, if_exists='replace',
dtype={'f64_as_f32':sqlalchemy.Float(precision=23)})
res = sql.read_sql_table('test_dtypes', self.conn)
# check precision of float64
self.assertEqual(np.round(df['f64'].iloc[0],14),
np.round(res['f64'].iloc[0],14))
# check sql types
meta = sqlalchemy.schema.MetaData(bind=self.conn)
meta.reflect()
col_dict = meta.tables['test_dtypes'].columns
self.assertEqual(str(col_dict['f32'].type),
str(col_dict['f64_as_f32'].type))
self.assertTrue(isinstance(col_dict['f32'].type, sqltypes.Float))
self.assertTrue(isinstance(col_dict['f64'].type, sqltypes.Float))
self.assertTrue(isinstance(col_dict['i32'].type, sqltypes.Integer))
self.assertTrue(isinstance(col_dict['i64'].type, sqltypes.BigInteger))
def test_connectable_issue_example(self):
# This tests the example raised in issue
# https://github.com/pydata/pandas/issues/10104
def foo(connection):
query = 'SELECT test_foo_data FROM test_foo_data'
return sql.read_sql_query(query, con=connection)
def bar(connection, data):
data.to_sql(name='test_foo_data', con=connection, if_exists='append')
def main(connectable):
with connectable.connect() as conn:
with conn.begin():
foo_data = conn.run_callable(foo)
conn.run_callable(bar, foo_data)
DataFrame({'test_foo_data': [0, 1, 2]}).to_sql('test_foo_data', self.conn)
main(self.conn)
def test_temporary_table(self):
test_data = u'Hello, World!'
expected = DataFrame({'spam': [test_data]})
Base = declarative.declarative_base()
class Temporary(Base):
__tablename__ = 'temp_test'
__table_args__ = {'prefixes': ['TEMPORARY']}
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
spam = sqlalchemy.Column(sqlalchemy.Unicode(30), nullable=False)
Session = sa_session.sessionmaker(bind=self.conn)
session = Session()
with session.transaction:
conn = session.connection()
Temporary.__table__.create(conn)
session.add(Temporary(spam=test_data))
session.flush()
df = sql.read_sql_query(
sql=sqlalchemy.select([Temporary.spam]),
con=conn,
)
tm.assert_frame_equal(df, expected)
class _TestSQLAlchemyConn(_EngineToConnMixin, _TestSQLAlchemy):
def test_transactions(self):
raise nose.SkipTest("Nested transactions rollbacks don't work with Pandas")
class _TestSQLiteAlchemy(object):
"""
Test the sqlalchemy backend against an in-memory sqlite database.
"""
flavor = 'sqlite'
@classmethod
def connect(cls):
return sqlalchemy.create_engine('sqlite:///:memory:')
@classmethod
def setup_driver(cls):
# sqlite3 is built-in
cls.driver = None
def test_default_type_conversion(self):
df = sql.read_sql_table("types_test_data", self.conn)
self.assertTrue(issubclass(df.FloatCol.dtype.type, np.floating),
"FloatCol loaded with incorrect type")
self.assertTrue(issubclass(df.IntCol.dtype.type, np.integer),
"IntCol loaded with incorrect type")
# sqlite has no boolean type, so integer type is returned
self.assertTrue(issubclass(df.BoolCol.dtype.type, np.integer),
"BoolCol loaded with incorrect type")
# Int column with NA values stays as float
self.assertTrue(issubclass(df.IntColWithNull.dtype.type, np.floating),
"IntColWithNull loaded with incorrect type")
# Non-native Bool column with NA values stays as float
self.assertTrue(issubclass(df.BoolColWithNull.dtype.type, np.floating),
"BoolColWithNull loaded with incorrect type")
def test_default_date_load(self):
df = sql.read_sql_table("types_test_data", self.conn)
# IMPORTANT - sqlite has no native date type, so shouldn't parse, but
self.assertFalse(issubclass(df.DateCol.dtype.type, np.datetime64),
"DateCol loaded with incorrect type")
def test_bigint_warning(self):
# test no warning for BIGINT (to support int64) is raised (GH7433)
df = DataFrame({'a':[1,2]}, dtype='int64')
df.to_sql('test_bigintwarning', self.conn, index=False)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
sql.read_sql_table('test_bigintwarning', self.conn)
self.assertEqual(len(w), 0, "Warning triggered for other table")
class _TestMySQLAlchemy(object):
"""
Test the sqlalchemy backend against an MySQL database.
"""
flavor = 'mysql'
@classmethod
def connect(cls):
url = 'mysql+{driver}://root@localhost/pandas_nosetest'
return sqlalchemy.create_engine(url.format(driver=cls.driver))
@classmethod
def setup_driver(cls):
try:
import pymysql
cls.driver = 'pymysql'
except ImportError:
raise nose.SkipTest('pymysql not installed')
def test_default_type_conversion(self):
df = sql.read_sql_table("types_test_data", self.conn)
self.assertTrue(issubclass(df.FloatCol.dtype.type, np.floating),
"FloatCol loaded with incorrect type")
self.assertTrue(issubclass(df.IntCol.dtype.type, np.integer),
"IntCol loaded with incorrect type")
# MySQL has no real BOOL type (it's an alias for TINYINT)
self.assertTrue(issubclass(df.BoolCol.dtype.type, np.integer),
"BoolCol loaded with incorrect type")
# Int column with NA values stays as float
self.assertTrue(issubclass(df.IntColWithNull.dtype.type, np.floating),
"IntColWithNull loaded with incorrect type")
# Bool column with NA = int column with NA values => becomes float
self.assertTrue(issubclass(df.BoolColWithNull.dtype.type, np.floating),
"BoolColWithNull loaded with incorrect type")
def test_read_procedure(self):
# see GH7324. Although it is more an api test, it is added to the
# mysql tests as sqlite does not have stored procedures
df = DataFrame({'a': [1, 2, 3], 'b':[0.1, 0.2, 0.3]})
df.to_sql('test_procedure', self.conn, index=False)
proc = """DROP PROCEDURE IF EXISTS get_testdb;
CREATE PROCEDURE get_testdb ()
BEGIN
SELECT * FROM test_procedure;
END"""
connection = self.conn.connect()
trans = connection.begin()
try:
r1 = connection.execute(proc)
trans.commit()
except:
trans.rollback()
raise
res1 = sql.read_sql_query("CALL get_testdb();", self.conn)
tm.assert_frame_equal(df, res1)
# test delegation to read_sql_query
res2 = sql.read_sql("CALL get_testdb();", self.conn)
tm.assert_frame_equal(df, res2)
class _TestPostgreSQLAlchemy(object):
"""
Test the sqlalchemy backend against an PostgreSQL database.
"""
flavor = 'postgresql'
@classmethod
def connect(cls):
url = 'postgresql+{driver}://postgres@localhost/pandas_nosetest'
return sqlalchemy.create_engine(url.format(driver=cls.driver))
@classmethod
def setup_driver(cls):
try:
import psycopg2
cls.driver = 'psycopg2'
except ImportError:
raise nose.SkipTest('psycopg2 not installed')
def test_schema_support(self):
# only test this for postgresql (schema's not supported in mysql/sqlite)
df = DataFrame({'col1':[1, 2], 'col2':[0.1, 0.2], 'col3':['a', 'n']})
# create a schema
self.conn.execute("DROP SCHEMA IF EXISTS other CASCADE;")
self.conn.execute("CREATE SCHEMA other;")
# write dataframe to different schema's
df.to_sql('test_schema_public', self.conn, index=False)
df.to_sql('test_schema_public_explicit', self.conn, index=False,
schema='public')
df.to_sql('test_schema_other', self.conn, index=False, schema='other')
# read dataframes back in
res1 = sql.read_sql_table('test_schema_public', self.conn)
tm.assert_frame_equal(df, res1)
res2 = sql.read_sql_table('test_schema_public_explicit', self.conn)
tm.assert_frame_equal(df, res2)
res3 = sql.read_sql_table('test_schema_public_explicit', self.conn,
schema='public')
tm.assert_frame_equal(df, res3)
res4 = sql.read_sql_table('test_schema_other', self.conn,
schema='other')
tm.assert_frame_equal(df, res4)
self.assertRaises(ValueError, sql.read_sql_table, 'test_schema_other',
self.conn, schema='public')
## different if_exists options
# create a schema
self.conn.execute("DROP SCHEMA IF EXISTS other CASCADE;")
self.conn.execute("CREATE SCHEMA other;")
# write dataframe with different if_exists options
df.to_sql('test_schema_other', self.conn, schema='other', index=False)
df.to_sql('test_schema_other', self.conn, schema='other', index=False,
if_exists='replace')
df.to_sql('test_schema_other', self.conn, schema='other', index=False,
if_exists='append')
res = sql.read_sql_table('test_schema_other', self.conn, schema='other')
tm.assert_frame_equal(concat([df, df], ignore_index=True), res)
## specifying schema in user-provided meta
# The schema won't be applied on another Connection
# because of transactional schemas
if isinstance(self.conn, sqlalchemy.engine.Engine):
engine2 = self.connect()
meta = sqlalchemy.MetaData(engine2, schema='other')
pdsql = sql.SQLDatabase(engine2, meta=meta)
pdsql.to_sql(df, 'test_schema_other2', index=False)
pdsql.to_sql(df, 'test_schema_other2', index=False, if_exists='replace')
pdsql.to_sql(df, 'test_schema_other2', index=False, if_exists='append')
res1 = sql.read_sql_table('test_schema_other2', self.conn, schema='other')
res2 = pdsql.read_table('test_schema_other2')
tm.assert_frame_equal(res1, res2)
class TestMySQLAlchemy(_TestMySQLAlchemy, _TestSQLAlchemy):
pass
class TestMySQLAlchemyConn(_TestMySQLAlchemy, _TestSQLAlchemyConn):
pass
class TestPostgreSQLAlchemy(_TestPostgreSQLAlchemy, _TestSQLAlchemy):
pass
class TestPostgreSQLAlchemyConn(_TestPostgreSQLAlchemy, _TestSQLAlchemyConn):
pass
class TestSQLiteAlchemy(_TestSQLiteAlchemy, _TestSQLAlchemy):
pass
class TestSQLiteAlchemyConn(_TestSQLiteAlchemy, _TestSQLAlchemyConn):
pass
#------------------------------------------------------------------------------
#--- Test Sqlite / MySQL fallback
class TestSQLiteFallback(SQLiteMixIn, PandasSQLTest):
"""
Test the fallback mode against an in-memory sqlite database.
"""
flavor = 'sqlite'
@classmethod
def connect(cls):
return sqlite3.connect(':memory:')
def setUp(self):
self.conn = self.connect()
self.pandasSQL = sql.SQLiteDatabase(self.conn, 'sqlite')
self._load_iris_data()
self._load_test1_data()
def test_invalid_flavor(self):
self.assertRaises(
NotImplementedError, sql.SQLiteDatabase, self.conn, 'oracle')
def test_read_sql(self):
self._read_sql_iris()
def test_read_sql_parameter(self):
self._read_sql_iris_parameter()
def test_read_sql_named_parameter(self):
self._read_sql_iris_named_parameter()
def test_to_sql(self):
self._to_sql()
def test_to_sql_empty(self):
self._to_sql_empty()
def test_to_sql_fail(self):
self._to_sql_fail()
def test_to_sql_replace(self):
self._to_sql_replace()
def test_to_sql_append(self):
self._to_sql_append()
def test_create_and_drop_table(self):
temp_frame = DataFrame(
{'one': [1., 2., 3., 4.], 'two': [4., 3., 2., 1.]})
self.pandasSQL.to_sql(temp_frame, 'drop_test_frame')
self.assertTrue(self.pandasSQL.has_table('drop_test_frame'),
'Table not written to DB')
self.pandasSQL.drop_table('drop_test_frame')
self.assertFalse(self.pandasSQL.has_table('drop_test_frame'),
'Table not deleted from DB')
def test_roundtrip(self):
self._roundtrip()
def test_execute_sql(self):
self._execute_sql()
def test_datetime_date(self):
# test support for datetime.date
df = DataFrame([date(2014, 1, 1), date(2014, 1, 2)], columns=["a"])
df.to_sql('test_date', self.conn, index=False, flavor=self.flavor)
res = read_sql_query('SELECT * FROM test_date', self.conn)
if self.flavor == 'sqlite':
# comes back as strings
tm.assert_frame_equal(res, df.astype(str))
elif self.flavor == 'mysql':
tm.assert_frame_equal(res, df)
def test_datetime_time(self):
# test support for datetime.time
df = DataFrame([time(9, 0, 0), time(9, 1, 30)], columns=["a"])
# test it raises an error and not fails silently (GH8341)
if self.flavor == 'sqlite':
self.assertRaises(sqlite3.InterfaceError, sql.to_sql, df,
'test_time', self.conn)
def _get_index_columns(self, tbl_name):
ixs = sql.read_sql_query(
"SELECT * FROM sqlite_master WHERE type = 'index' " +
"AND tbl_name = '%s'" % tbl_name, self.conn)
ix_cols = []
for ix_name in ixs.name:
ix_info = sql.read_sql_query(
"PRAGMA index_info(%s)" % ix_name, self.conn)
ix_cols.append(ix_info.name.tolist())
return ix_cols
def test_to_sql_save_index(self):
self._to_sql_save_index()
def test_transactions(self):
self._transaction_test()
def _get_sqlite_column_type(self, table, column):
recs = self.conn.execute('PRAGMA table_info(%s)' % table)
for cid, name, ctype, not_null, default, pk in recs:
if name == column:
return ctype
raise ValueError('Table %s, column %s not found' % (table, column))
def test_dtype(self):
if self.flavor == 'mysql':
raise nose.SkipTest('Not applicable to MySQL legacy')
cols = ['A', 'B']
data = [(0.8, True),
(0.9, None)]
df = DataFrame(data, columns=cols)
df.to_sql('dtype_test', self.conn)
df.to_sql('dtype_test2', self.conn, dtype={'B': 'STRING'})
# sqlite stores Boolean values as INTEGER
self.assertEqual(self._get_sqlite_column_type('dtype_test', 'B'), 'INTEGER')
self.assertEqual(self._get_sqlite_column_type('dtype_test2', 'B'), 'STRING')
self.assertRaises(ValueError, df.to_sql,
'error', self.conn, dtype={'B': bool})
def test_notnull_dtype(self):
if self.flavor == 'mysql':
raise nose.SkipTest('Not applicable to MySQL legacy')
cols = {'Bool': Series([True,None]),
'Date': Series([datetime(2012, 5, 1), None]),
'Int' : Series([1, None], dtype='object'),
'Float': Series([1.1, None])
}
df = DataFrame(cols)
tbl = 'notnull_dtype_test'
df.to_sql(tbl, self.conn)
self.assertEqual(self._get_sqlite_column_type(tbl, 'Bool'), 'INTEGER')
self.assertEqual(self._get_sqlite_column_type(tbl, 'Date'), 'TIMESTAMP')
self.assertEqual(self._get_sqlite_column_type(tbl, 'Int'), 'INTEGER')
self.assertEqual(self._get_sqlite_column_type(tbl, 'Float'), 'REAL')
def test_illegal_names(self):
# For sqlite, these should work fine
df = DataFrame([[1, 2], [3, 4]], columns=['a', 'b'])
# Raise error on blank
self.assertRaises(ValueError, df.to_sql, "", self.conn,
flavor=self.flavor)
for ndx, weird_name in enumerate(['test_weird_name]','test_weird_name[',
'test_weird_name`','test_weird_name"', 'test_weird_name\'',
'_b.test_weird_name_01-30', '"_b.test_weird_name_01-30"',
'99beginswithnumber', '12345', u'\xe9']):
df.to_sql(weird_name, self.conn, flavor=self.flavor)
sql.table_exists(weird_name, self.conn)
df2 = DataFrame([[1, 2], [3, 4]], columns=['a', weird_name])
c_tbl = 'test_weird_col_name%d'%ndx
df2.to_sql(c_tbl, self.conn, flavor=self.flavor)
sql.table_exists(c_tbl, self.conn)
class TestMySQLLegacy(MySQLMixIn, TestSQLiteFallback):
"""
Test the legacy mode against a MySQL database.
"""
flavor = 'mysql'
@classmethod
def setUpClass(cls):
cls.setup_driver()
# test connection
try:
cls.connect()
except cls.driver.err.OperationalError:
raise nose.SkipTest("{0} - can't connect to MySQL server".format(cls))
@classmethod
def setup_driver(cls):
try:
import pymysql
cls.driver = pymysql
except ImportError:
raise nose.SkipTest('pymysql not installed')
@classmethod
def connect(cls):
return cls.driver.connect(host='127.0.0.1', user='root', passwd='', db='pandas_nosetest')
def _count_rows(self, table_name):
cur = self._get_exec()
cur.execute(
"SELECT count(*) AS count_1 FROM %s" % table_name)
rows = cur.fetchall()
return rows[0][0]
def setUp(self):
try:
self.conn = self.connect()
except self.driver.err.OperationalError:
raise nose.SkipTest("Can't connect to MySQL server")
self.pandasSQL = sql.SQLiteDatabase(self.conn, 'mysql')
self._load_iris_data()
self._load_test1_data()
def test_a_deprecation(self):
with tm.assert_produces_warning(FutureWarning):
sql.to_sql(self.test_frame1, 'test_frame1', self.conn,
flavor='mysql')
self.assertTrue(
sql.has_table('test_frame1', self.conn, flavor='mysql'),
'Table not written to DB')
def _get_index_columns(self, tbl_name):
ixs = sql.read_sql_query(
"SHOW INDEX IN %s" % tbl_name, self.conn)
ix_cols = {}
for ix_name, ix_col in zip(ixs.Key_name, ixs.Column_name):
if ix_name not in ix_cols:
ix_cols[ix_name] = []
ix_cols[ix_name].append(ix_col)
return list(ix_cols.values())
def test_to_sql_save_index(self):
self._to_sql_save_index()
for ix_name, ix_col in zip(ixs.Key_name, ixs.Column_name):
if ix_name not in ix_cols:
ix_cols[ix_name] = []
ix_cols[ix_name].append(ix_col)
return ix_cols.values()
def test_to_sql_save_index(self):
self._to_sql_save_index()
def test_illegal_names(self):
df = DataFrame([[1, 2], [3, 4]], columns=['a', 'b'])
# These tables and columns should be ok
for ndx, ok_name in enumerate(['99beginswithnumber','12345']):
df.to_sql(ok_name, self.conn, flavor=self.flavor, index=False,
if_exists='replace')
df2 = DataFrame([[1, 2], [3, 4]], columns=['a', ok_name])
df2.to_sql('test_ok_col_name', self.conn, flavor=self.flavor, index=False,
if_exists='replace')
# For MySQL, these should raise ValueError
for ndx, illegal_name in enumerate(['test_illegal_name]','test_illegal_name[',
'test_illegal_name`','test_illegal_name"', 'test_illegal_name\'', '']):
self.assertRaises(ValueError, df.to_sql, illegal_name, self.conn,
flavor=self.flavor, index=False)
df2 = DataFrame([[1, 2], [3, 4]], columns=['a', illegal_name])
self.assertRaises(ValueError, df2.to_sql, 'test_illegal_col_name%d'%ndx,
self.conn, flavor=self.flavor, index=False)
#------------------------------------------------------------------------------
#--- Old tests from 0.13.1 (before refactor using sqlalchemy)
_formatters = {
datetime: lambda dt: "'%s'" % date_format(dt),
str: lambda x: "'%s'" % x,
np.str_: lambda x: "'%s'" % x,
compat.text_type: lambda x: "'%s'" % x,
compat.binary_type: lambda x: "'%s'" % x,
float: lambda x: "%.8f" % x,
int: lambda x: "%s" % x,
type(None): lambda x: "NULL",
np.float64: lambda x: "%.10f" % x,
bool: lambda x: "'%s'" % x,
}
def format_query(sql, *args):
"""
"""
processed_args = []
for arg in args:
if isinstance(arg, float) and isnull(arg):
arg = None
formatter = _formatters[type(arg)]
processed_args.append(formatter(arg))
return sql % tuple(processed_args)
def _skip_if_no_pymysql():
try:
import pymysql
except ImportError:
raise nose.SkipTest('pymysql not installed, skipping')
class TestXSQLite(SQLiteMixIn, tm.TestCase):
def setUp(self):
self.conn = sqlite3.connect(':memory:')
def test_basic(self):
frame = tm.makeTimeDataFrame()
self._check_roundtrip(frame)
def test_write_row_by_row(self):
frame = tm.makeTimeDataFrame()
frame.ix[0, 0] = np.nan
create_sql = sql.get_schema(frame, 'test', 'sqlite')
cur = self.conn.cursor()
cur.execute(create_sql)
cur = self.conn.cursor()
ins = "INSERT INTO test VALUES (%s, %s, %s, %s)"
for idx, row in frame.iterrows():
fmt_sql = format_query(ins, *row)
sql.tquery(fmt_sql, cur=cur)
self.conn.commit()
result = sql.read_frame("select * from test", con=self.conn)
result.index = frame.index
tm.assert_frame_equal(result, frame)
def test_execute(self):
frame = tm.makeTimeDataFrame()
create_sql = sql.get_schema(frame, 'test', 'sqlite')
cur = self.conn.cursor()
cur.execute(create_sql)
ins = "INSERT INTO test VALUES (?, ?, ?, ?)"
row = frame.ix[0]
sql.execute(ins, self.conn, params=tuple(row))
self.conn.commit()
result = sql.read_frame("select * from test", self.conn)
result.index = frame.index[:1]
tm.assert_frame_equal(result, frame[:1])
def test_schema(self):
frame = tm.makeTimeDataFrame()
create_sql = sql.get_schema(frame, 'test', 'sqlite')
lines = create_sql.splitlines()
for l in lines:
tokens = l.split(' ')
if len(tokens) == 2 and tokens[0] == 'A':
self.assertTrue(tokens[1] == 'DATETIME')
frame = tm.makeTimeDataFrame()
create_sql = sql.get_schema(frame, 'test', 'sqlite', keys=['A', 'B'],)
lines = create_sql.splitlines()
self.assertTrue('PRIMARY KEY ("A", "B")' in create_sql)
cur = self.conn.cursor()
cur.execute(create_sql)
def test_execute_fail(self):
create_sql = """
CREATE TABLE test
(
a TEXT,
b TEXT,
c REAL,
PRIMARY KEY (a, b)
);
"""
cur = self.conn.cursor()
cur.execute(create_sql)
sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn)
sql.execute('INSERT INTO test VALUES("foo", "baz", 2.567)', self.conn)
try:
sys.stdout = StringIO()
self.assertRaises(Exception, sql.execute,
'INSERT INTO test VALUES("foo", "bar", 7)',
self.conn)
finally:
sys.stdout = sys.__stdout__
def test_execute_closed_connection(self):
create_sql = """
CREATE TABLE test
(
a TEXT,
b TEXT,
c REAL,
PRIMARY KEY (a, b)
);
"""
cur = self.conn.cursor()
cur.execute(create_sql)
sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn)
self.conn.close()
try:
sys.stdout = StringIO()
self.assertRaises(Exception, sql.tquery, "select * from test",
con=self.conn)
finally:
sys.stdout = sys.__stdout__
# Initialize connection again (needed for tearDown)
self.setUp()
def test_na_roundtrip(self):
pass
def _check_roundtrip(self, frame):
sql.write_frame(frame, name='test_table', con=self.conn)
result = sql.read_frame("select * from test_table", self.conn)
# HACK! Change this once indexes are handled properly.
result.index = frame.index
expected = frame
tm.assert_frame_equal(result, expected)
frame['txt'] = ['a'] * len(frame)
frame2 = frame.copy()
frame2['Idx'] = Index(lrange(len(frame2))) + 10
sql.write_frame(frame2, name='test_table2', con=self.conn)
result = sql.read_frame("select * from test_table2", self.conn,
index_col='Idx')
expected = frame.copy()
expected.index = Index(lrange(len(frame2))) + 10
expected.index.name = 'Idx'
tm.assert_frame_equal(expected, result)
def test_tquery(self):
frame = tm.makeTimeDataFrame()
sql.write_frame(frame, name='test_table', con=self.conn)
result = sql.tquery("select A from test_table", self.conn)
expected = Series(frame.A.values, frame.index) # not to have name
result = Series(result, frame.index)
tm.assert_series_equal(result, expected)
try:
sys.stdout = StringIO()
self.assertRaises(sql.DatabaseError, sql.tquery,
'select * from blah', con=self.conn)
self.assertRaises(sql.DatabaseError, sql.tquery,
'select * from blah', con=self.conn, retry=True)
finally:
sys.stdout = sys.__stdout__
def test_uquery(self):
frame = tm.makeTimeDataFrame()
sql.write_frame(frame, name='test_table', con=self.conn)
stmt = 'INSERT INTO test_table VALUES(2.314, -123.1, 1.234, 2.3)'
self.assertEqual(sql.uquery(stmt, con=self.conn), 1)
try:
sys.stdout = StringIO()
self.assertRaises(sql.DatabaseError, sql.tquery,
'insert into blah values (1)', con=self.conn)
self.assertRaises(sql.DatabaseError, sql.tquery,
'insert into blah values (1)', con=self.conn,
retry=True)
finally:
sys.stdout = sys.__stdout__
def test_keyword_as_column_names(self):
'''
'''
df = DataFrame({'From':np.ones(5)})
sql.write_frame(df, con = self.conn, name = 'testkeywords')
def test_onecolumn_of_integer(self):
# GH 3628
# a column_of_integers dataframe should transfer well to sql
mono_df=DataFrame([1 , 2], columns=['c0'])
sql.write_frame(mono_df, con = self.conn, name = 'mono_df')
# computing the sum via sql
con_x=self.conn
the_sum=sum([my_c0[0] for my_c0 in con_x.execute("select * from mono_df")])
# it should not fail, and gives 3 ( Issue #3628 )
self.assertEqual(the_sum , 3)
result = sql.read_frame("select * from mono_df",con_x)
tm.assert_frame_equal(result,mono_df)
def test_if_exists(self):
df_if_exists_1 = DataFrame({'col1': [1, 2], 'col2': ['A', 'B']})
df_if_exists_2 = DataFrame({'col1': [3, 4, 5], 'col2': ['C', 'D', 'E']})
table_name = 'table_if_exists'
sql_select = "SELECT * FROM %s" % table_name
def clean_up(test_table_to_drop):
"""
Drops tables created from individual tests
so no dependencies arise from sequential tests
"""
self.drop_table(test_table_to_drop)
# test if invalid value for if_exists raises appropriate error
self.assertRaises(ValueError,
sql.write_frame,
frame=df_if_exists_1,
con=self.conn,
name=table_name,
flavor='sqlite',
if_exists='notvalidvalue')
clean_up(table_name)
# test if_exists='fail'
sql.write_frame(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='sqlite', if_exists='fail')
self.assertRaises(ValueError,
sql.write_frame,
frame=df_if_exists_1,
con=self.conn,
name=table_name,
flavor='sqlite',
if_exists='fail')
# test if_exists='replace'
sql.write_frame(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='sqlite', if_exists='replace')
self.assertEqual(sql.tquery(sql_select, con=self.conn),
[(1, 'A'), (2, 'B')])
sql.write_frame(frame=df_if_exists_2, con=self.conn, name=table_name,
flavor='sqlite', if_exists='replace')
self.assertEqual(sql.tquery(sql_select, con=self.conn),
[(3, 'C'), (4, 'D'), (5, 'E')])
clean_up(table_name)
# test if_exists='append'
sql.write_frame(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='sqlite', if_exists='fail')
self.assertEqual(sql.tquery(sql_select, con=self.conn),
[(1, 'A'), (2, 'B')])
sql.write_frame(frame=df_if_exists_2, con=self.conn, name=table_name,
flavor='sqlite', if_exists='append')
self.assertEqual(sql.tquery(sql_select, con=self.conn),
[(1, 'A'), (2, 'B'), (3, 'C'), (4, 'D'), (5, 'E')])
clean_up(table_name)
class TestXMySQL(MySQLMixIn, tm.TestCase):
@classmethod
def setUpClass(cls):
_skip_if_no_pymysql()
# test connection
import pymysql
try:
# Try Travis defaults.
# No real user should allow root access with a blank password.
pymysql.connect(host='localhost', user='root', passwd='',
db='pandas_nosetest')
except:
pass
else:
return
try:
pymysql.connect(read_default_group='pandas')
except pymysql.ProgrammingError as e:
raise nose.SkipTest(
"Create a group of connection parameters under the heading "
"[pandas] in your system's mysql default file, "
"typically located at ~/.my.cnf or /etc/.my.cnf. ")
except pymysql.Error as e:
raise nose.SkipTest(
"Cannot connect to database. "
"Create a group of connection parameters under the heading "
"[pandas] in your system's mysql default file, "
"typically located at ~/.my.cnf or /etc/.my.cnf. ")
def setUp(self):
_skip_if_no_pymysql()
import pymysql
try:
# Try Travis defaults.
# No real user should allow root access with a blank password.
self.conn = pymysql.connect(host='localhost', user='root', passwd='',
db='pandas_nosetest')
except:
pass
else:
return
try:
self.conn = pymysql.connect(read_default_group='pandas')
except pymysql.ProgrammingError as e:
raise nose.SkipTest(
"Create a group of connection parameters under the heading "
"[pandas] in your system's mysql default file, "
"typically located at ~/.my.cnf or /etc/.my.cnf. ")
except pymysql.Error as e:
raise nose.SkipTest(
"Cannot connect to database. "
"Create a group of connection parameters under the heading "
"[pandas] in your system's mysql default file, "
"typically located at ~/.my.cnf or /etc/.my.cnf. ")
def test_basic(self):
_skip_if_no_pymysql()
frame = tm.makeTimeDataFrame()
self._check_roundtrip(frame)
def test_write_row_by_row(self):
_skip_if_no_pymysql()
frame = tm.makeTimeDataFrame()
frame.ix[0, 0] = np.nan
drop_sql = "DROP TABLE IF EXISTS test"
create_sql = sql.get_schema(frame, 'test', 'mysql')
cur = self.conn.cursor()
cur.execute(drop_sql)
cur.execute(create_sql)
ins = "INSERT INTO test VALUES (%s, %s, %s, %s)"
for idx, row in frame.iterrows():
fmt_sql = format_query(ins, *row)
sql.tquery(fmt_sql, cur=cur)
self.conn.commit()
result = sql.read_frame("select * from test", con=self.conn)
result.index = frame.index
tm.assert_frame_equal(result, frame)
def test_execute(self):
_skip_if_no_pymysql()
frame = tm.makeTimeDataFrame()
drop_sql = "DROP TABLE IF EXISTS test"
create_sql = sql.get_schema(frame, 'test', 'mysql')
cur = self.conn.cursor()
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "Unknown table.*")
cur.execute(drop_sql)
cur.execute(create_sql)
ins = "INSERT INTO test VALUES (%s, %s, %s, %s)"
row = frame.ix[0].values.tolist()
sql.execute(ins, self.conn, params=tuple(row))
self.conn.commit()
result = sql.read_frame("select * from test", self.conn)
result.index = frame.index[:1]
tm.assert_frame_equal(result, frame[:1])
def test_schema(self):
_skip_if_no_pymysql()
frame = tm.makeTimeDataFrame()
create_sql = sql.get_schema(frame, 'test', 'mysql')
lines = create_sql.splitlines()
for l in lines:
tokens = l.split(' ')
if len(tokens) == 2 and tokens[0] == 'A':
self.assertTrue(tokens[1] == 'DATETIME')
frame = tm.makeTimeDataFrame()
drop_sql = "DROP TABLE IF EXISTS test"
create_sql = sql.get_schema(frame, 'test', 'mysql', keys=['A', 'B'],)
lines = create_sql.splitlines()
self.assertTrue('PRIMARY KEY (`A`, `B`)' in create_sql)
cur = self.conn.cursor()
cur.execute(drop_sql)
cur.execute(create_sql)
def test_execute_fail(self):
_skip_if_no_pymysql()
drop_sql = "DROP TABLE IF EXISTS test"
create_sql = """
CREATE TABLE test
(
a TEXT,
b TEXT,
c REAL,
PRIMARY KEY (a(5), b(5))
);
"""
cur = self.conn.cursor()
cur.execute(drop_sql)
cur.execute(create_sql)
sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn)
sql.execute('INSERT INTO test VALUES("foo", "baz", 2.567)', self.conn)
try:
sys.stdout = StringIO()
self.assertRaises(Exception, sql.execute,
'INSERT INTO test VALUES("foo", "bar", 7)',
self.conn)
finally:
sys.stdout = sys.__stdout__
def test_execute_closed_connection(self):
_skip_if_no_pymysql()
drop_sql = "DROP TABLE IF EXISTS test"
create_sql = """
CREATE TABLE test
(
a TEXT,
b TEXT,
c REAL,
PRIMARY KEY (a(5), b(5))
);
"""
cur = self.conn.cursor()
cur.execute(drop_sql)
cur.execute(create_sql)
sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn)
self.conn.close()
try:
sys.stdout = StringIO()
self.assertRaises(Exception, sql.tquery, "select * from test",
con=self.conn)
finally:
sys.stdout = sys.__stdout__
# Initialize connection again (needed for tearDown)
self.setUp()
def test_na_roundtrip(self):
_skip_if_no_pymysql()
pass
def _check_roundtrip(self, frame):
_skip_if_no_pymysql()
drop_sql = "DROP TABLE IF EXISTS test_table"
cur = self.conn.cursor()
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "Unknown table.*")
cur.execute(drop_sql)
sql.write_frame(frame, name='test_table', con=self.conn, flavor='mysql')
result = sql.read_frame("select * from test_table", self.conn)
# HACK! Change this once indexes are handled properly.
result.index = frame.index
result.index.name = frame.index.name
expected = frame
tm.assert_frame_equal(result, expected)
frame['txt'] = ['a'] * len(frame)
frame2 = frame.copy()
index = Index(lrange(len(frame2))) + 10
frame2['Idx'] = index
drop_sql = "DROP TABLE IF EXISTS test_table2"
cur = self.conn.cursor()
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "Unknown table.*")
cur.execute(drop_sql)
sql.write_frame(frame2, name='test_table2', con=self.conn, flavor='mysql')
result = sql.read_frame("select * from test_table2", self.conn,
index_col='Idx')
expected = frame.copy()
# HACK! Change this once indexes are handled properly.
expected.index = index
expected.index.names = result.index.names
tm.assert_frame_equal(expected, result)
def test_tquery(self):
try:
import pymysql
except ImportError:
raise nose.SkipTest("no pymysql")
frame = tm.makeTimeDataFrame()
drop_sql = "DROP TABLE IF EXISTS test_table"
cur = self.conn.cursor()
cur.execute(drop_sql)
sql.write_frame(frame, name='test_table', con=self.conn, flavor='mysql')
result = sql.tquery("select A from test_table", self.conn)
expected = Series(frame.A.values, frame.index) # not to have name
result = Series(result, frame.index)
tm.assert_series_equal(result, expected)
try:
sys.stdout = StringIO()
self.assertRaises(sql.DatabaseError, sql.tquery,
'select * from blah', con=self.conn)
self.assertRaises(sql.DatabaseError, sql.tquery,
'select * from blah', con=self.conn, retry=True)
finally:
sys.stdout = sys.__stdout__
def test_uquery(self):
try:
import pymysql
except ImportError:
raise nose.SkipTest("no pymysql")
frame = tm.makeTimeDataFrame()
drop_sql = "DROP TABLE IF EXISTS test_table"
cur = self.conn.cursor()
cur.execute(drop_sql)
sql.write_frame(frame, name='test_table', con=self.conn, flavor='mysql')
stmt = 'INSERT INTO test_table VALUES(2.314, -123.1, 1.234, 2.3)'
self.assertEqual(sql.uquery(stmt, con=self.conn), 1)
try:
sys.stdout = StringIO()
self.assertRaises(sql.DatabaseError, sql.tquery,
'insert into blah values (1)', con=self.conn)
self.assertRaises(sql.DatabaseError, sql.tquery,
'insert into blah values (1)', con=self.conn,
retry=True)
finally:
sys.stdout = sys.__stdout__
def test_keyword_as_column_names(self):
'''
'''
_skip_if_no_pymysql()
df = DataFrame({'From':np.ones(5)})
sql.write_frame(df, con = self.conn, name = 'testkeywords',
if_exists='replace', flavor='mysql')
def test_if_exists(self):
_skip_if_no_pymysql()
df_if_exists_1 = DataFrame({'col1': [1, 2], 'col2': ['A', 'B']})
df_if_exists_2 = DataFrame({'col1': [3, 4, 5], 'col2': ['C', 'D', 'E']})
table_name = 'table_if_exists'
sql_select = "SELECT * FROM %s" % table_name
def clean_up(test_table_to_drop):
"""
Drops tables created from individual tests
so no dependencies arise from sequential tests
"""
self.drop_table(test_table_to_drop)
# test if invalid value for if_exists raises appropriate error
self.assertRaises(ValueError,
sql.write_frame,
frame=df_if_exists_1,
con=self.conn,
name=table_name,
flavor='mysql',
if_exists='notvalidvalue')
clean_up(table_name)
# test if_exists='fail'
sql.write_frame(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='mysql', if_exists='fail')
self.assertRaises(ValueError,
sql.write_frame,
frame=df_if_exists_1,
con=self.conn,
name=table_name,
flavor='mysql',
if_exists='fail')
# test if_exists='replace'
sql.write_frame(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='mysql', if_exists='replace')
self.assertEqual(sql.tquery(sql_select, con=self.conn),
[(1, 'A'), (2, 'B')])
sql.write_frame(frame=df_if_exists_2, con=self.conn, name=table_name,
flavor='mysql', if_exists='replace')
self.assertEqual(sql.tquery(sql_select, con=self.conn),
[(3, 'C'), (4, 'D'), (5, 'E')])
clean_up(table_name)
# test if_exists='append'
sql.write_frame(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='mysql', if_exists='fail')
self.assertEqual(sql.tquery(sql_select, con=self.conn),
[(1, 'A'), (2, 'B')])
sql.write_frame(frame=df_if_exists_2, con=self.conn, name=table_name,
flavor='mysql', if_exists='append')
self.assertEqual(sql.tquery(sql_select, con=self.conn),
[(1, 'A'), (2, 'B'), (3, 'C'), (4, 'D'), (5, 'E')])
clean_up(table_name)
if __name__ == '__main__':
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
exit=False)
|
aviarypl/mozilla-l10n-addons-server
|
refs/heads/master
|
src/olympia/translations/__init__.py
|
18
|
from django.conf import settings
from django.utils.translation import trans_real
from jinja2.filters import do_dictsort
LOCALES = [(trans_real.to_locale(k).replace('_', '-'), v) for k, v in
do_dictsort(settings.LANGUAGES)]
|
codrut3/tensorflow
|
refs/heads/master
|
tensorflow/python/framework/tensor_util_test.py
|
14
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for tensor_util."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_state_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
class TensorUtilTest(test.TestCase):
def testFloat(self):
value = 10.0
t = tensor_util.make_tensor_proto(value)
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape {}
float_val: %.1f
""" % value, t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array(value, dtype=np.float32), a)
def testFloatN(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0])
if sys.byteorder == "big":
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "A \000\000A\240\000\000A\360\000\000"
""", t)
else:
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testFloatTyped(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0], dtype=dtypes.float32)
if sys.byteorder == "big":
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "A \000\000A\240\000\000A\360\000\000"
""", t)
else:
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testFloatTypeCoerce(self):
t = tensor_util.make_tensor_proto([10, 20, 30], dtype=dtypes.float32)
if sys.byteorder == "big":
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "A \000\000A\240\000\000A\360\000\000"
""", t)
else:
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testFloatTypeCoerceNdarray(self):
arr = np.asarray([10, 20, 30], dtype="int")
t = tensor_util.make_tensor_proto(arr, dtype=dtypes.float32)
if sys.byteorder == "big":
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "A \000\000A\240\000\000A\360\000\000"
""", t)
else:
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testFloatSizes(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0], shape=[1, 3])
if sys.byteorder == "big":
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "A \000\000A\240\000\000A\360\000\000"
""", t)
else:
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array([[10.0, 20.0, 30.0]], dtype=np.float32), a)
def testFloatSizes2(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0], shape=[3, 1])
if sys.byteorder == "big":
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } dim { size: 1 } }
tensor_content: "A \000\000A\240\000\000A\360\000\000"
""", t)
else:
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } dim { size: 1 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array([[10.0], [20.0], [30.0]], dtype=np.float32), a)
def testFloatSizesLessValues(self):
t = tensor_util.make_tensor_proto(10.0, shape=[1, 3])
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 1 } dim { size: 3 } }
float_val: 10.0
""", t)
# No conversion to Ndarray for this one: not enough values.
def testFloatNpArrayFloat64(self):
t = tensor_util.make_tensor_proto(
np.array([[10.0, 20.0, 30.0]], dtype=np.float64))
if sys.byteorder == "big":
self.assertProtoEquals("""
dtype: DT_DOUBLE
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "@$\000\000\000\000\000\000@4\000\000\000\000\000\000@>\000\000\000\000\000\000"
""", t)
else:
self.assertProtoEquals("""
dtype: DT_DOUBLE
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "\000\000\000\000\000\000$@\000\000\000\000\000\0004@\000\000\000\000\000\000>@"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float64, a.dtype)
self.assertAllClose(
np.array([[10.0, 20.0, 30.0]], dtype=np.float64),
tensor_util.MakeNdarray(t))
def testFloatTypesWithImplicitRepeat(self):
for dtype, nptype in [(dtypes.float32, np.float32),
(dtypes.float64, np.float64)]:
t = tensor_util.make_tensor_proto([10.0], shape=[3, 4], dtype=dtype)
a = tensor_util.MakeNdarray(t)
self.assertAllClose(
np.array(
[[10.0, 10.0, 10.0, 10.0],
[10.0, 10.0, 10.0, 10.0],
[10.0, 10.0, 10.0, 10.0]],
dtype=nptype),
a)
def testHalf(self):
t = tensor_util.make_tensor_proto(np.array([10.0, 20.0], dtype=np.float16))
self.assertProtoEquals("""
dtype: DT_HALF
tensor_shape {
dim {
size: 2
}
}
half_val: 18688
half_val: 19712
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float16, a.dtype)
self.assertAllClose(np.array([10.0, 20.0], dtype=np.float16), a)
def testInt(self):
t = tensor_util.make_tensor_proto(10)
self.assertProtoEquals("""
dtype: DT_INT32
tensor_shape {}
int_val: 10
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.int32, a.dtype)
self.assertAllClose(np.array(10, dtype=np.int32), a)
def testLargeInt(self):
value = np.iinfo(np.int64).max
t = tensor_util.make_tensor_proto(value)
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape {}
int64_val: %d
""" % value, t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.int64, a.dtype)
self.assertAllClose(np.array(value, dtype=np.int64), a)
def testLargeNegativeInt(self):
# We don't use the min np.int64 value here
# because it breaks np.abs().
#
# np.iinfo(np.int64).min = -9223372036854775808
# np.iinfo(np.int64).max = 9223372036854775807
# np.abs(-9223372036854775808) = -9223372036854775808
value = np.iinfo(np.int64).min + 1
t = tensor_util.make_tensor_proto(value)
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape {}
int64_val: %d
""" % value, t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.int64, a.dtype)
self.assertAllClose(np.array(value, dtype=np.int64), a)
def testIntNDefaultType(self):
t = tensor_util.make_tensor_proto([10, 20, 30, 40], shape=[2, 2])
if sys.byteorder == "big":
self.assertProtoEquals("""
dtype: DT_INT32
tensor_shape { dim { size: 2 } dim { size: 2 } }
tensor_content: "\000\000\000\\n\000\000\000\024\000\000\000\036\000\000\000("
""", t)
else:
self.assertProtoEquals("""
dtype: DT_INT32
tensor_shape { dim { size: 2 } dim { size: 2 } }
tensor_content: "\\n\000\000\000\024\000\000\000\036\000\000\000(\000\000\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.int32, a.dtype)
self.assertAllClose(np.array([[10, 20], [30, 40]], dtype=np.int32), a)
def testIntTypes(self):
for dtype, nptype in [(dtypes.int32, np.int32),
(dtypes.uint8, np.uint8),
(dtypes.uint16, np.uint16),
(dtypes.int16, np.int16),
(dtypes.int8, np.int8)]:
# Test with array.
t = tensor_util.make_tensor_proto([10, 20, 30], dtype=dtype)
self.assertEquals(dtype, t.dtype)
self.assertProtoEquals("dim { size: 3 }", t.tensor_shape)
a = tensor_util.MakeNdarray(t)
self.assertEquals(nptype, a.dtype)
self.assertAllClose(np.array([10, 20, 30], dtype=nptype), a)
# Test with ndarray.
t = tensor_util.make_tensor_proto(np.array([10, 20, 30], dtype=nptype))
self.assertEquals(dtype, t.dtype)
self.assertProtoEquals("dim { size: 3 }", t.tensor_shape)
a = tensor_util.MakeNdarray(t)
self.assertEquals(nptype, a.dtype)
self.assertAllClose(np.array([10, 20, 30], dtype=nptype), a)
def testIntTypesWithImplicitRepeat(self):
for dtype, nptype in [(dtypes.int64, np.int64),
(dtypes.int32, np.int32),
(dtypes.uint8, np.uint8),
(dtypes.uint16, np.uint16),
(dtypes.int16, np.int16),
(dtypes.int8, np.int8)]:
self.assertAllEqual(
np.array(
[[10, 10, 10, 10],
[10, 10, 10, 10],
[10, 10, 10, 10]],
dtype=nptype),
tensor_util.MakeNdarray(
tensor_util.make_tensor_proto(
[10],
shape=[3, 4],
dtype=dtype)))
def testIntMixedWithDimension(self):
# Github issue: 11974
dtype = dtypes.int32
nptype = np.int32
t = tensor_util.make_tensor_proto(
[10, tensor_shape.Dimension(20), 30], dtype=dtype)
self.assertEquals(dtype, t.dtype)
a = tensor_util.MakeNdarray(t)
self.assertEquals(nptype, a.dtype)
self.assertAllClose(np.array([10, 20, 30], dtype=nptype), a)
def testLong(self):
t = tensor_util.make_tensor_proto(10, dtype=dtypes.int64)
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape {}
int64_val: 10
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.int64, a.dtype)
self.assertAllClose(np.array(10, dtype=np.int64), a)
def testLongN(self):
t = tensor_util.make_tensor_proto(
[10, 20, 30], shape=[1, 3], dtype=dtypes.int64)
if sys.byteorder == "big":
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "\000\000\000\000\000\000\000\\n\000\000\000\000\000\000\000\024\000\000\000\000\000\000\000\036"
""", t)
else:
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "\\n\000\000\000\000\000\000\000\024\000\000\000\000\000\000\000\036\000\000\000\000\000\000\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.int64, a.dtype)
self.assertAllClose(np.array([[10, 20, 30]], dtype=np.int64), a)
def testLongNpArray(self):
t = tensor_util.make_tensor_proto(np.array([10, 20, 30]))
if sys.byteorder == "big":
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000\000\000\000\000\000\\n\000\000\000\000\000\000\000\024\000\000\000\000\000\000\000\036"
""", t)
else:
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape { dim { size: 3 } }
tensor_content: "\\n\000\000\000\000\000\000\000\024\000\000\000\000\000\000\000\036\000\000\000\000\000\000\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.int64, a.dtype)
self.assertAllClose(np.array([10, 20, 30], dtype=np.int64), a)
def testQuantizedTypes(self):
# Test with array.
data = [(21,), (22,), (23,)]
t = tensor_util.make_tensor_proto(data, dtype=dtypes.qint32)
if sys.byteorder == "big":
self.assertProtoEquals("""
dtype: DT_QINT32
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000\000\025\000\000\000\026\000\000\000\027"
""", t)
else:
self.assertProtoEquals("""
dtype: DT_QINT32
tensor_shape { dim { size: 3 } }
tensor_content: "\025\000\000\000\026\000\000\000\027\000\000\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(dtypes.qint32.as_numpy_dtype, a.dtype)
self.assertAllEqual(np.array(data, dtype=a.dtype), a)
t = tensor_util.make_tensor_proto(data, dtype=dtypes.quint8)
self.assertProtoEquals("""
dtype: DT_QUINT8
tensor_shape { dim { size: 3 } }
tensor_content: "\025\026\027"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(dtypes.quint8.as_numpy_dtype, a.dtype)
self.assertAllEqual(np.array(data, dtype=a.dtype), a)
t = tensor_util.make_tensor_proto(data, dtype=dtypes.qint8)
self.assertProtoEquals("""
dtype: DT_QINT8
tensor_shape { dim { size: 3 } }
tensor_content: "\025\026\027"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(dtypes.qint8.as_numpy_dtype, a.dtype)
self.assertAllEqual(np.array(data, dtype=a.dtype), a)
t = tensor_util.make_tensor_proto(data, dtype=dtypes.quint16)
if sys.byteorder == "big":
self.assertProtoEquals("""
dtype: DT_QUINT16
tensor_shape { dim { size: 3 } }
tensor_content: "\000\025\000\026\000\027"
""", t)
else:
self.assertProtoEquals("""
dtype: DT_QUINT16
tensor_shape { dim { size: 3 } }
tensor_content: "\025\000\026\000\027\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(dtypes.quint16.as_numpy_dtype, a.dtype)
self.assertAllEqual(np.array(data, dtype=a.dtype), a)
t = tensor_util.make_tensor_proto(data, dtype=dtypes.qint16)
if sys.byteorder == "big":
self.assertProtoEquals("""
dtype: DT_QINT16
tensor_shape { dim { size: 3 } }
tensor_content: "\000\025\000\026\000\027"
""", t)
else:
self.assertProtoEquals("""
dtype: DT_QINT16
tensor_shape { dim { size: 3 } }
tensor_content: "\025\000\026\000\027\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(dtypes.qint16.as_numpy_dtype, a.dtype)
self.assertAllEqual(np.array(data, dtype=a.dtype), a)
def testString(self):
t = tensor_util.make_tensor_proto("foo")
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape {}
string_val: "foo"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.object, a.dtype)
self.assertEquals([b"foo"], a)
def testStringWithImplicitRepeat(self):
t = tensor_util.make_tensor_proto("f", shape=[3, 4])
a = tensor_util.MakeNdarray(t)
self.assertAllEqual(np.array([[b"f"] * 4] * 3, dtype=np.object), a)
def testStringN(self):
t = tensor_util.make_tensor_proto([b"foo", b"bar", b"baz"], shape=[1, 3])
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape { dim { size: 1 } dim { size: 3 } }
string_val: "foo"
string_val: "bar"
string_val: "baz"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.object, a.dtype)
self.assertAllEqual(np.array([[b"foo", b"bar", b"baz"]]), a)
def testStringNpArray(self):
t = tensor_util.make_tensor_proto(
np.array([[b"a", b"ab"], [b"abc", b"abcd"]]))
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape { dim { size: 2 } dim { size: 2 } }
string_val: "a"
string_val: "ab"
string_val: "abc"
string_val: "abcd"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.object, a.dtype)
self.assertAllEqual(np.array([[b"a", b"ab"], [b"abc", b"abcd"]]), a)
def testArrayMethod(self):
class Wrapper(object):
def __array__(self):
return np.array([b"foo", b"bar", b"baz"])
t = tensor_util.make_tensor_proto(Wrapper(), shape=[1, 3])
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape { dim { size: 1 } dim { size: 3 } }
string_val: "foo"
string_val: "bar"
string_val: "baz"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.object, a.dtype)
self.assertAllEqual(np.array([[b"foo", b"bar", b"baz"]]), a)
def testArrayInterface(self):
class Wrapper(object):
@property
def __array_interface__(self):
return np.array([b"foo", b"bar", b"baz"]).__array_interface__
t = tensor_util.make_tensor_proto(Wrapper(), shape=[1, 3])
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape { dim { size: 1 } dim { size: 3 } }
string_val: "foo"
string_val: "bar"
string_val: "baz"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.object, a.dtype)
self.assertAllEqual(np.array([[b"foo", b"bar", b"baz"]]), a)
def testStringTuple(self):
t = tensor_util.make_tensor_proto((b"a", b"ab", b"abc", b"abcd"))
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape { dim { size: 4 } }
string_val: "a"
string_val: "ab"
string_val: "abc"
string_val: "abcd"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.object, a.dtype)
self.assertAllEqual(np.array((b"a", b"ab", b"abc", b"abcd")), a)
def testStringNestedTuple(self):
t = tensor_util.make_tensor_proto(((b"a", b"ab"), (b"abc", b"abcd")))
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape { dim { size: 2 } dim { size: 2 } }
string_val: "a"
string_val: "ab"
string_val: "abc"
string_val: "abcd"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.object, a.dtype)
self.assertAllEqual(np.array(((b"a", b"ab"), (b"abc", b"abcd"))), a)
def testComplex64(self):
t = tensor_util.make_tensor_proto((1 + 2j), dtype=dtypes.complex64)
self.assertProtoEquals("""
dtype: DT_COMPLEX64
tensor_shape {}
scomplex_val: 1
scomplex_val: 2
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.complex64, a.dtype)
self.assertAllEqual(np.array(1 + 2j), a)
def testComplex128(self):
t = tensor_util.make_tensor_proto((1 + 2j), dtype=dtypes.complex128)
self.assertProtoEquals("""
dtype: DT_COMPLEX128
tensor_shape {}
dcomplex_val: 1
dcomplex_val: 2
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.complex128, a.dtype)
self.assertAllEqual(np.array(1 + 2j), a)
def testComplexWithImplicitRepeat(self):
for dtype, np_dtype in [(dtypes.complex64, np.complex64),
(dtypes.complex128, np.complex128)]:
t = tensor_util.make_tensor_proto((1 + 1j), shape=[3, 4], dtype=dtype)
a = tensor_util.MakeNdarray(t)
self.assertAllClose(
np.array(
[[(1 + 1j), (1 + 1j), (1 + 1j), (1 + 1j)],
[(1 + 1j), (1 + 1j), (1 + 1j), (1 + 1j)],
[(1 + 1j), (1 + 1j), (1 + 1j), (1 + 1j)]],
dtype=np_dtype),
a)
def testComplex64N(self):
t = tensor_util.make_tensor_proto(
[(1 + 2j), (3 + 4j), (5 + 6j)], shape=[1, 3], dtype=dtypes.complex64)
self.assertProtoEquals("""
dtype: DT_COMPLEX64
tensor_shape { dim { size: 1 } dim { size: 3 } }
scomplex_val: 1
scomplex_val: 2
scomplex_val: 3
scomplex_val: 4
scomplex_val: 5
scomplex_val: 6
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.complex64, a.dtype)
self.assertAllEqual(np.array([[(1 + 2j), (3 + 4j), (5 + 6j)]]), a)
def testComplex128N(self):
t = tensor_util.make_tensor_proto(
[(1 + 2j), (3 + 4j), (5 + 6j)], shape=[1, 3], dtype=dtypes.complex128)
self.assertProtoEquals("""
dtype: DT_COMPLEX128
tensor_shape { dim { size: 1 } dim { size: 3 } }
dcomplex_val: 1
dcomplex_val: 2
dcomplex_val: 3
dcomplex_val: 4
dcomplex_val: 5
dcomplex_val: 6
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.complex128, a.dtype)
self.assertAllEqual(np.array([[(1 + 2j), (3 + 4j), (5 + 6j)]]), a)
def testComplex64NpArray(self):
t = tensor_util.make_tensor_proto(
np.array([[(1 + 2j), (3 + 4j)], [(5 + 6j), (7 + 8j)]]),
dtype=dtypes.complex64)
# scomplex_val are real_0, imag_0, real_1, imag_1, ...
self.assertProtoEquals("""
dtype: DT_COMPLEX64
tensor_shape { dim { size: 2 } dim { size: 2 } }
scomplex_val: 1
scomplex_val: 2
scomplex_val: 3
scomplex_val: 4
scomplex_val: 5
scomplex_val: 6
scomplex_val: 7
scomplex_val: 8
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.complex64, a.dtype)
self.assertAllEqual(
np.array([[(1 + 2j), (3 + 4j)], [(5 + 6j), (7 + 8j)]]), a)
def testComplex128NpArray(self):
t = tensor_util.make_tensor_proto(
np.array([[(1 + 2j), (3 + 4j)], [(5 + 6j), (7 + 8j)]]),
dtype=dtypes.complex128)
# scomplex_val are real_0, imag_0, real_1, imag_1, ...
self.assertProtoEquals("""
dtype: DT_COMPLEX128
tensor_shape { dim { size: 2 } dim { size: 2 } }
dcomplex_val: 1
dcomplex_val: 2
dcomplex_val: 3
dcomplex_val: 4
dcomplex_val: 5
dcomplex_val: 6
dcomplex_val: 7
dcomplex_val: 8
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.complex128, a.dtype)
self.assertAllEqual(
np.array([[(1 + 2j), (3 + 4j)], [(5 + 6j), (7 + 8j)]]), a)
def testUnsupportedDTypes(self):
with self.assertRaises(TypeError):
tensor_util.make_tensor_proto(np.array([1]), 0)
with self.assertRaises(TypeError):
tensor_util.make_tensor_proto(3, dtype=dtypes.qint8)
with self.assertRaises(TypeError):
tensor_util.make_tensor_proto([3], dtype=dtypes.qint8)
# Validate the helpful error message when trying to convert an
# unconvertible list as strings.
with self.assertRaisesRegexp(TypeError, "Failed to convert object"):
tensor_util.make_tensor_proto([tensor_shape.Dimension(1)])
def testTensorShapeVerification(self):
array = np.array([[1], [2]])
correct_shape = (2, 1)
incorrect_shape = (1, 2)
tensor_util.make_tensor_proto(array, shape=correct_shape, verify_shape=True)
with self.assertRaises(TypeError):
tensor_util.make_tensor_proto(
array, shape=incorrect_shape, verify_shape=True)
def testShapeTooLarge(self):
with self.assertRaises(ValueError):
tensor_util.make_tensor_proto(np.array([1, 2]), shape=[1])
def testLowRankSupported(self):
t = tensor_util.make_tensor_proto(np.array(7))
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape {}
int64_val: 7
""", t)
def testShapeEquals(self):
t = tensor_util.make_tensor_proto([10, 20, 30, 40], shape=[2, 2])
self.assertTrue(tensor_util.ShapeEquals(t, [2, 2]))
self.assertTrue(tensor_util.ShapeEquals(t, (2, 2)))
self.assertTrue(
tensor_util.ShapeEquals(t, tensor_shape.as_shape([2, 2]).as_proto()))
self.assertFalse(tensor_util.ShapeEquals(t, [5, 3]))
self.assertFalse(tensor_util.ShapeEquals(t, [1, 4]))
self.assertFalse(tensor_util.ShapeEquals(t, [4]))
def testMockArray(self):
class MockArray(object):
def __init__(self, array):
self.array = array
def __array__(self, dtype=None):
return np.asarray(self.array, dtype)
with self.test_session() as sess:
ma = MockArray(np.array([10, 20, 30]))
t = ops.convert_to_tensor(ma)
a = sess.run(t)
self.assertEquals(np.int64, a.dtype)
self.assertAllClose(np.array([10, 20, 30], dtype=np.int64), a)
class ConstantValueTest(test.TestCase):
def testConstant(self):
np_val = np.random.rand(3, 4, 7).astype(np.float32)
tf_val = constant_op.constant(np_val)
self.assertAllClose(np_val, tensor_util.constant_value(tf_val))
np_val = np.random.rand(3, 0, 7).astype(np.float32)
tf_val = constant_op.constant(np_val)
self.assertAllClose(np_val, tensor_util.constant_value(tf_val))
def testUnknown(self):
tf_val = gen_state_ops._variable(
shape=[3, 4, 7],
dtype=dtypes.float32,
name="tf_val",
container="",
shared_name="")
self.assertIs(None, tensor_util.constant_value(tf_val))
def testShape(self):
np_val = np.array([1, 2, 3], dtype=np.int32)
tf_val = array_ops.shape(constant_op.constant(0.0, shape=[1, 2, 3]))
c_val = tensor_util.constant_value(tf_val)
self.assertAllEqual(np_val, c_val)
self.assertEqual(np.int32, c_val.dtype)
def testFill(self):
np_val = np.array([-1, -1, -1], dtype=np.float32)
tf_val = array_ops.fill([3], constant_op.constant(-1.0))
c_val = tensor_util.constant_value(tf_val)
self.assertAllEqual(np_val, c_val)
self.assertEqual(np.float32, c_val.dtype)
def testSize(self):
tf_val = array_ops.size(constant_op.constant(0.0, shape=[1, 2, 3]))
c_val = tensor_util.constant_value(tf_val)
self.assertEqual(6, c_val)
def testSizeOfScalar(self):
tf_val = array_ops.size(constant_op.constant(0.0))
c_val = tensor_util.constant_value(tf_val)
self.assertEqual(1, c_val)
self.assertEqual(np.ndarray, type(c_val))
def testRank(self):
tf_val = array_ops.rank(constant_op.constant(0.0, shape=[1, 2, 3]))
c_val = tensor_util.constant_value(tf_val)
self.assertEqual(np.ndarray, type(c_val))
self.assertEqual((), c_val.shape)
self.assertEqual(3, c_val)
# Repeat test using array_ops.rank_internal to avoid the optimization that
# happens in the rank function.
tf_val = array_ops.rank_internal(
constant_op.constant(
0.0, shape=[1, 2, 3]), optimize=False)
c_val = tensor_util.constant_value(tf_val)
self.assertEqual(np.ndarray, type(c_val))
self.assertEqual((), c_val.shape)
self.assertEqual(3, c_val)
self.assertEqual([3], c_val)
def testCast(self):
np_val = np.random.rand(3, 4, 7).astype(np.float32)
tf_val = math_ops.cast(constant_op.constant(np_val), dtypes.float64)
c_val = tensor_util.constant_value(tf_val)
self.assertAllClose(np_val.astype(np.float64), c_val)
np_val = np.random.rand(3, 0, 7).astype(np.float32)
tf_val = math_ops.cast(constant_op.constant(np_val), dtypes.float64)
c_val = tensor_util.constant_value(tf_val)
self.assertAllClose(np_val.astype(np.float64), c_val)
def testConcat(self):
np_val = np.random.rand(3, 4, 7).astype(np.float32)
tf_val = array_ops.concat(
[np_val[0:1, :, :], np_val[1:2, :, :], np_val[2:3, :, :]], 0)
c_val = tensor_util.constant_value(tf_val)
self.assertAllClose(np_val, c_val)
tf_val = array_ops.concat(
[np_val[0, :, :], np_val[1, :, :], np_val[2, :, :]],
array_ops.placeholder(dtypes.int32))
c_val = tensor_util.constant_value(tf_val)
self.assertIs(None, c_val)
tf_val = array_ops.concat([
np_val[0, :, :], array_ops.placeholder(dtypes.float32), np_val[2, :, :]
], 1)
c_val = tensor_util.constant_value(tf_val)
self.assertIs(None, c_val)
def testPack_Axis0(self):
inputs = [np.random.rand(4, 7) for _ in range(3)]
np_val = np.array(inputs)
tf_val = array_ops.stack(inputs)
c_val = tensor_util.constant_value(tf_val)
self.assertAllClose(np_val, c_val)
tf_val = array_ops.stack(
[inputs[0], array_ops.placeholder(dtypes.float32), inputs[2]])
c_val = tensor_util.constant_value(tf_val)
self.assertIs(None, c_val)
def testPack_Axis1(self):
inputs = [np.random.rand(4, 7) for _ in range(3)]
tf_val = array_ops.stack(inputs, axis=1)
c_val = tensor_util.constant_value(tf_val)
self.assertIsNone(c_val)
tf_val = array_ops.stack(
[inputs[0], array_ops.placeholder(dtypes.float32), inputs[2]], axis=1)
c_val = tensor_util.constant_value(tf_val)
self.assertIs(None, c_val)
def testPack_Partial_Axis0(self):
input_ = np.random.rand(4, 7)
tf_val = array_ops.stack([input_, array_ops.placeholder(dtypes.float32)])
c_val = tensor_util.constant_value(tf_val, partial=True)
self.assertAllClose(input_, c_val[0])
self.assertIsNone(c_val[1])
def testPack_Partial_Axis1(self):
input_ = np.random.rand(4, 7)
tf_val = array_ops.stack([input_, array_ops.placeholder(dtypes.float32)],
axis=1)
c_val = tensor_util.constant_value(tf_val, partial=True)
self.assertIsNone(c_val)
def testEqual(self):
# Scalar inputs.
tf_val = math_ops.equal(constant_op.constant(1), constant_op.constant(1))
self.assertEqual(tensor_util.constant_value(tf_val), True)
tf_val = math_ops.equal(constant_op.constant(1), constant_op.constant(0))
self.assertEqual(tensor_util.constant_value(tf_val), False)
# Shaped inputs with broadcast semantics.
tf_val = math_ops.equal(constant_op.constant([[0, 1]]),
constant_op.constant([[0], [1]]))
c_val = tensor_util.constant_value(tf_val)
self.assertAllEqual(c_val, [[True, False], [False, True]])
def testNotEqual(self):
# Scalar inputs.
tf_val = math_ops.not_equal(constant_op.constant(1),
constant_op.constant(1))
self.assertEqual(tensor_util.constant_value(tf_val), False)
tf_val = math_ops.not_equal(constant_op.constant(1),
constant_op.constant(0))
self.assertEqual(tensor_util.constant_value(tf_val), True)
# Shaped inputs with broadcast semantics.
tf_val = math_ops.not_equal(constant_op.constant([[0, 1]]),
constant_op.constant([[0], [1]]))
c_val = tensor_util.constant_value(tf_val)
self.assertAllEqual(c_val, [[False, True], [True, False]])
class ConstantValueAsShapeTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes()
def testConstant(self):
np_val = np.random.rand(3).astype(np.int32)
tf_val = constant_op.constant(np_val)
self.assertEqual(
tensor_shape.TensorShape(np_val),
tensor_util.constant_value_as_shape(tf_val))
tf_val = constant_op.constant([], dtype=dtypes.int32)
self.assertEqual(
tensor_shape.TensorShape([]),
tensor_util.constant_value_as_shape(tf_val))
@test_util.run_in_graph_and_eager_modes()
def testShape(self):
tf_val = array_ops.shape(constant_op.constant(0.0, shape=[1, 2, 3]))
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual(tensor_shape.TensorShape([1, 2, 3]), c_val)
@test_util.run_in_graph_and_eager_modes()
def testMinusOneBecomesNone(self):
tf_val = constant_op.constant([-1, 1, -1], shape=[3])
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([None, 1, None], c_val.as_list())
def testPack(self):
tf_val = array_ops.stack(
[constant_op.constant(16), 37, array_ops.placeholder(dtypes.int32)])
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([16, 37, None], c_val.as_list())
def testConcat(self):
tf_val = array_ops.concat(
[[16, 37], array_ops.placeholder(
dtypes.int32, shape=(2,))], 0)
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([16, 37, None, None], c_val.as_list())
tf_val = array_ops.concat(
[[16, 37], array_ops.placeholder(
dtypes.int32, shape=(1,)), [48]], 0)
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([16, 37, None, 48], c_val.as_list())
def testSlice(self):
tf_val = array_ops.placeholder(dtypes.int32, shape=(4,))[0:2]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([None, None], c_val.as_list())
# begin:end
tf_val = constant_op.constant([10, 20, 30])[1:3]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([20, 30], c_val.as_list())
# begin:end:stride
tf_val = array_ops.strided_slice(
constant_op.constant([10, 20, 30]), [1], [3], strides=[2])
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([20], c_val.as_list())
# [1, 2, 16, 37, None, 48]
tf_val_orig = array_ops.concat(
[[1, 2, 16, 37], array_ops.placeholder(
dtypes.int32, shape=(1,)), [48]], 0)
# begin: no end
tf_val = tf_val_orig[2:]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([16, 37, None, 48], c_val.as_list())
# begin::negative slice
tf_val = tf_val_orig[2::-1]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([16, 2, 1], c_val.as_list())
# :end:negative slice
tf_val = tf_val_orig[:1:-2]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([48, 37], c_val.as_list())
# begin:end:negative slice
tf_val = tf_val_orig[3:1:-1]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([37, 16], c_val.as_list())
# begin:negative end:slice
tf_val = tf_val_orig[1:-3:1]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([2, 16], c_val.as_list())
# negative begin::slice
tf_val = tf_val_orig[-3::1]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([37, None, 48], c_val.as_list())
# negative begin::negative slice
tf_val = tf_val_orig[-3::-1]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([37, 16, 2, 1], c_val.as_list())
# negative begin:negative end:negative slice
tf_val = tf_val_orig[-3:-5:-1]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([37, 16], c_val.as_list())
# Do not support shape inference for additional arguments
tf_val = constant_op.constant([10, 20, 30])[...]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([None, None, None], c_val.as_list())
# Do not support shape inference for tensor slices.
tf_val = constant_op.constant([10, 20, 30])[
array_ops.placeholder(dtypes.int32, shape=()):]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual(tensor_shape.unknown_shape(), c_val)
# Do not support shape inference for higher rank
with self.assertRaises(ValueError):
tf_val = constant_op.constant([[10], [20], [30]])[:, 0:]
c_val = tensor_util.constant_value_as_shape(tf_val)
if __name__ == "__main__":
test.main()
|
elainexmas/boto
|
refs/heads/develop
|
tests/integration/kms/test_kms.py
|
99
|
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from boto.kms.exceptions import NotFoundException
from tests.compat import unittest
class TestKMS(unittest.TestCase):
def setUp(self):
self.kms = boto.connect_kms()
def test_list_keys(self):
response = self.kms.list_keys()
self.assertIn('Keys', response)
def test_handle_not_found_exception(self):
with self.assertRaises(NotFoundException):
# Describe some key that does not exists
self.kms.describe_key(
key_id='nonexistant_key',
)
|
jayrav13/presidency
|
refs/heads/master
|
presidency/executive_orders/year.py
|
1
|
from lxml import html
import requests
import json
import time
import sys
"""
Year
A class to return all years for which Executive Orders are archived.
"""
class Year():
"""
all
Return a list of all years.
"""
@staticmethod
def all():
page = requests.get('http://www.presidency.ucsb.edu/executive_orders.php')
tree = html.document_fromstring(page.text)
years = tree.xpath('//select[@id="year"]')[0].xpath('option')
return [int(x.text_content()) for x in years]
|
ondrokrc/gramps
|
refs/heads/master
|
gramps/plugins/export/export.gpr.py
|
1
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2009 Benny Malengier
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
MODULE_VERSION="5.0"
#------------------------------------------------------------------------
#
# Comma _Separated Values Spreadsheet (CSV)
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ex_csv'
plg.name = _("Comma Separated Values Spreadsheet (CSV)")
plg.name_accell = _("Comma _Separated Values Spreadsheet (CSV)")
plg.description = _("CSV is a common spreadsheet format.")
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'exportcsv.py'
plg.ptype = EXPORT
plg.export_function = 'exportData'
plg.export_options = 'CSVWriterOptionBox'
plg.export_options_title = _('CSV spreadsheet options')
plg.extension = "csv"
#------------------------------------------------------------------------
#
# Web Family Tree export
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ex_webfamtree'
plg.name = _('Web Family Tree')
plg.name_accell = _('_Web Family Tree')
plg.description = _("Web Family Tree format")
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'exportftree.py'
plg.ptype = EXPORT
plg.export_function = 'writeData'
plg.export_options = 'WriterOptionBox'
plg.export_options_title = _('Web Family Tree export options')
plg.extension = "wft"
#------------------------------------------------------------------------
#
# GEDCOM
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ex_ged'
plg.name = _('GEDCOM')
plg.name_accell = _('GE_DCOM')
plg.description = _('GEDCOM is used to transfer data between genealogy programs. '
'Most genealogy software will accept a GEDCOM file as input.')
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'exportgedcom.py'
plg.ptype = EXPORT
plg.export_function = 'export_data'
plg.export_options = 'WriterOptionBox'
plg.export_options_title = _('GEDCOM export options')
plg.extension = "ged"
#------------------------------------------------------------------------
#
# Geneweb
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ex_geneweb'
plg.name = _('GeneWeb')
plg.name_accell = _('_GeneWeb')
plg.description = _('GeneWeb is a web based genealogy program.')
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'exportgeneweb.py'
plg.ptype = EXPORT
plg.export_function = 'exportData'
plg.export_options = 'WriterOptionBox'
plg.export_options_title = _('GeneWeb export options')
plg.extension = "gw"
#------------------------------------------------------------------------
#
# GRAMPS package (portable XML)
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ex_gpkg'
plg.name = _('Gramps XML Package (family tree and media)')
plg.name_accell = _('Gra_mps XML Package (family tree and media)')
plg.description = _('Gramps package is an archived XML family tree together '
'with the media object files.')
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'exportpkg.py'
plg.ptype = EXPORT
plg.export_function = 'writeData'
plg.export_options = 'WriterOptionBox'
plg.export_options_title = _('Gramps package export options')
plg.extension = "gpkg"
#------------------------------------------------------------------------
#
# GRAMPS XML database
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ex_gramps'
plg.name = _('Gramps XML (family tree)')
plg.name_accell = _('Gramps _XML (family tree)')
plg.description = _('Gramps XML export is a complete archived XML backup of a'
' Gramps family tree without the media object files.'
' Suitable for backup purposes.')
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'exportxml.py'
plg.ptype = EXPORT
plg.export_function = 'export_data'
plg.export_options = 'WriterOptionBox'
plg.export_options_title = _('Gramps XML export options')
plg.extension = "gramps"
#------------------------------------------------------------------------
#
# vCalendar
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ex_vcal'
plg.name = _('vCalendar')
plg.name_accell = _('vC_alendar')
plg.description = _('vCalendar is used in many calendaring and PIM applications.')
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'exportvcalendar.py'
plg.ptype = EXPORT
plg.export_function = 'exportData'
plg.export_options = 'WriterOptionBox'
plg.export_options_title = _('vCalendar export options')
plg.extension = "vcs"
#------------------------------------------------------------------------
#
# vCard
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ex_vcard'
plg.name = _('vCard')
plg.name_accell = _('_vCard')
plg.description = _('vCard is used in many addressbook and pim applications.')
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'exportvcard.py'
plg.ptype = EXPORT
plg.export_function = 'exportData'
plg.export_options = 'WriterOptionBox'
plg.export_options_title = _('vCard export options')
plg.extension = "vcf"
|
hj3938/panda3d
|
refs/heads/master
|
direct/src/interval/IndirectInterval.py
|
8
|
"""IndirectInterval module: contains the IndirectInterval class"""
__all__ = ['IndirectInterval']
from panda3d.core import *
from panda3d.direct import *
from direct.directnotify.DirectNotifyGlobal import *
import Interval
import LerpBlendHelpers
class IndirectInterval(Interval.Interval):
"""
This class can be used to play samples of another interval, so
that only a subset of the interval is played, or the time is
compressed, or some such nonsense.
It keeps a reference to the interval itself and repeatedly calls
setT() on it, rather than actually starting the interval or
copying its members like Sequence() or Parallel(). This means two
IndirectIntervals that operate on the same nested interval may
have some interaction that you should be aware of.
"""
# Interval counter
indirectIntervalNum = 1
notify = directNotify.newCategory('IndirectInterval')
# Class methods
def __init__(self, interval,
startT = 0, endT = None, playRate = 1,
duration = None, blendType = 'noBlend', name = None):
self.interval = interval
self.startAtStart = (startT == 0)
self.endAtEnd = (endT == None or endT == interval.getDuration())
if endT == None:
endT = interval.getDuration()
if duration == None:
duration = abs(endT - startT) / playRate
if (name == None):
name = ('IndirectInterval-%d' %
IndirectInterval.indirectIntervalNum)
IndirectInterval.indirectIntervalNum += 1
self.startT = startT
self.endT = endT
self.deltaT = endT - startT
self.blendType = LerpBlendHelpers.getBlend(blendType)
Interval.Interval.__init__(self, name, duration)
def __calcT(self, t):
return self.startT + self.deltaT * self.blendType(t / self.duration)
def privInitialize(self, t):
state = self.interval.getState()
if state == CInterval.SInitial or state == CInterval.SFinal:
self.interval.privInitialize(self.__calcT(t))
else:
self.interval.privStep(self.__calcT(t))
self.currT = t
self.state = CInterval.SStarted
self.interval.privPostEvent()
def privInstant(self):
state = self.interval.getState()
if (state == CInterval.SInitial or state == CInterval.SFinal) and \
self.endAtEnd:
self.interval.privInstant()
self.currT = self.getDuration()
self.interval.privPostEvent()
self.intervalDone()
else:
if state == CInterval.SInitial or state == CInterval.SFinal:
self.interval.privInitialize(self.startT)
else:
self.interval.privStep(self.startT)
self.privFinalize()
def privStep(self, t):
self.interval.privStep(self.__calcT(t))
self.currT = t
self.state = CInterval.SStarted
self.interval.privPostEvent()
def privFinalize(self):
if self.endAtEnd:
self.interval.privFinalize()
else:
self.interval.privStep(self.endT)
self.interval.privInterrupt()
self.currT = self.getDuration()
self.state = CInterval.SFinal
self.interval.privPostEvent()
self.intervalDone()
def privReverseInitialize(self, t):
state = self.interval.getState()
if state == CInterval.SInitial or state == CInterval.SFinal:
self.interval.privReverseInitialize(self.__calcT(t))
else:
self.interval.privStep(self.__calcT(t))
self.currT = t
self.state = CInterval.SStarted
self.interval.privPostEvent()
def privReverseInstant(self):
state = self.interval.getState()
if (state == CInterval.SInitial or state == CInterval.SFinal) and \
self.startAtStart:
self.interval.privReverseInstant()
self.currT = 0
self.interval.privPostEvent()
else:
if state == CInterval.SInitial or state == CInterval.SFinal:
self.interval.privReverseInitialize(self.endT)
else:
self.interval.privStep(self.endT)
self.privReverseFinalize()
def privReverseFinalize(self):
if self.startAtStart:
self.interval.privReverseFinalize()
else:
self.interval.privStep(self.endT)
self.interval.privInterrupt()
self.currT = 0
self.state = CInterval.SInitial
self.interval.privPostEvent()
def privInterrupt(self):
self.interval.privInterrupt()
self.interval.privPostEvent()
|
mcrowson/django
|
refs/heads/master
|
tests/auth_tests/test_remote_user.py
|
275
|
from datetime import datetime
from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth.backends import RemoteUserBackend
from django.contrib.auth.middleware import RemoteUserMiddleware
from django.contrib.auth.models import User
from django.test import TestCase, modify_settings, override_settings
from django.utils import timezone
@override_settings(ROOT_URLCONF='auth_tests.urls')
class RemoteUserTest(TestCase):
middleware = 'django.contrib.auth.middleware.RemoteUserMiddleware'
backend = 'django.contrib.auth.backends.RemoteUserBackend'
header = 'REMOTE_USER'
# Usernames to be passed in REMOTE_USER for the test_known_user test case.
known_user = 'knownuser'
known_user2 = 'knownuser2'
def setUp(self):
self.patched_settings = modify_settings(
AUTHENTICATION_BACKENDS={'append': self.backend},
MIDDLEWARE_CLASSES={'append': self.middleware},
)
self.patched_settings.enable()
def tearDown(self):
self.patched_settings.disable()
def test_no_remote_user(self):
"""
Tests requests where no remote user is specified and insures that no
users get created.
"""
num_users = User.objects.count()
response = self.client.get('/remote_user/')
self.assertTrue(response.context['user'].is_anonymous())
self.assertEqual(User.objects.count(), num_users)
response = self.client.get('/remote_user/', **{self.header: None})
self.assertTrue(response.context['user'].is_anonymous())
self.assertEqual(User.objects.count(), num_users)
response = self.client.get('/remote_user/', **{self.header: ''})
self.assertTrue(response.context['user'].is_anonymous())
self.assertEqual(User.objects.count(), num_users)
def test_unknown_user(self):
"""
Tests the case where the username passed in the header does not exist
as a User.
"""
num_users = User.objects.count()
response = self.client.get('/remote_user/', **{self.header: 'newuser'})
self.assertEqual(response.context['user'].username, 'newuser')
self.assertEqual(User.objects.count(), num_users + 1)
User.objects.get(username='newuser')
# Another request with same user should not create any new users.
response = self.client.get('/remote_user/', **{self.header: 'newuser'})
self.assertEqual(User.objects.count(), num_users + 1)
def test_known_user(self):
"""
Tests the case where the username passed in the header is a valid User.
"""
User.objects.create(username='knownuser')
User.objects.create(username='knownuser2')
num_users = User.objects.count()
response = self.client.get('/remote_user/',
**{self.header: self.known_user})
self.assertEqual(response.context['user'].username, 'knownuser')
self.assertEqual(User.objects.count(), num_users)
# Test that a different user passed in the headers causes the new user
# to be logged in.
response = self.client.get('/remote_user/',
**{self.header: self.known_user2})
self.assertEqual(response.context['user'].username, 'knownuser2')
self.assertEqual(User.objects.count(), num_users)
def test_last_login(self):
"""
Tests that a user's last_login is set the first time they make a
request but not updated in subsequent requests with the same session.
"""
user = User.objects.create(username='knownuser')
# Set last_login to something so we can determine if it changes.
default_login = datetime(2000, 1, 1)
if settings.USE_TZ:
default_login = default_login.replace(tzinfo=timezone.utc)
user.last_login = default_login
user.save()
response = self.client.get('/remote_user/',
**{self.header: self.known_user})
self.assertNotEqual(default_login, response.context['user'].last_login)
user = User.objects.get(username='knownuser')
user.last_login = default_login
user.save()
response = self.client.get('/remote_user/',
**{self.header: self.known_user})
self.assertEqual(default_login, response.context['user'].last_login)
def test_header_disappears(self):
"""
Tests that a logged in user is logged out automatically when
the REMOTE_USER header disappears during the same browser session.
"""
User.objects.create(username='knownuser')
# Known user authenticates
response = self.client.get('/remote_user/',
**{self.header: self.known_user})
self.assertEqual(response.context['user'].username, 'knownuser')
# During the session, the REMOTE_USER header disappears. Should trigger logout.
response = self.client.get('/remote_user/')
self.assertEqual(response.context['user'].is_anonymous(), True)
# verify the remoteuser middleware will not remove a user
# authenticated via another backend
User.objects.create_user(username='modeluser', password='foo')
self.client.login(username='modeluser', password='foo')
authenticate(username='modeluser', password='foo')
response = self.client.get('/remote_user/')
self.assertEqual(response.context['user'].username, 'modeluser')
def test_user_switch_forces_new_login(self):
"""
Tests that if the username in the header changes between requests
that the original user is logged out
"""
User.objects.create(username='knownuser')
# Known user authenticates
response = self.client.get('/remote_user/',
**{self.header: self.known_user})
self.assertEqual(response.context['user'].username, 'knownuser')
# During the session, the REMOTE_USER changes to a different user.
response = self.client.get('/remote_user/',
**{self.header: "newnewuser"})
# Ensure that the current user is not the prior remote_user
# In backends that create a new user, username is "newnewuser"
# In backends that do not create new users, it is '' (anonymous user)
self.assertNotEqual(response.context['user'].username, 'knownuser')
class RemoteUserNoCreateBackend(RemoteUserBackend):
"""Backend that doesn't create unknown users."""
create_unknown_user = False
class RemoteUserNoCreateTest(RemoteUserTest):
"""
Contains the same tests as RemoteUserTest, but using a custom auth backend
class that doesn't create unknown users.
"""
backend = 'auth_tests.test_remote_user.RemoteUserNoCreateBackend'
def test_unknown_user(self):
num_users = User.objects.count()
response = self.client.get('/remote_user/', **{self.header: 'newuser'})
self.assertTrue(response.context['user'].is_anonymous())
self.assertEqual(User.objects.count(), num_users)
class CustomRemoteUserBackend(RemoteUserBackend):
"""
Backend that overrides RemoteUserBackend methods.
"""
def clean_username(self, username):
"""
Grabs username before the @ character.
"""
return username.split('@')[0]
def configure_user(self, user):
"""
Sets user's email address.
"""
user.email = 'user@example.com'
user.save()
return user
class RemoteUserCustomTest(RemoteUserTest):
"""
Tests a custom RemoteUserBackend subclass that overrides the clean_username
and configure_user methods.
"""
backend = 'auth_tests.test_remote_user.CustomRemoteUserBackend'
# REMOTE_USER strings with email addresses for the custom backend to
# clean.
known_user = 'knownuser@example.com'
known_user2 = 'knownuser2@example.com'
def test_known_user(self):
"""
The strings passed in REMOTE_USER should be cleaned and the known users
should not have been configured with an email address.
"""
super(RemoteUserCustomTest, self).test_known_user()
self.assertEqual(User.objects.get(username='knownuser').email, '')
self.assertEqual(User.objects.get(username='knownuser2').email, '')
def test_unknown_user(self):
"""
The unknown user created should be configured with an email address.
"""
super(RemoteUserCustomTest, self).test_unknown_user()
newuser = User.objects.get(username='newuser')
self.assertEqual(newuser.email, 'user@example.com')
class CustomHeaderMiddleware(RemoteUserMiddleware):
"""
Middleware that overrides custom HTTP auth user header.
"""
header = 'HTTP_AUTHUSER'
class CustomHeaderRemoteUserTest(RemoteUserTest):
"""
Tests a custom RemoteUserMiddleware subclass with custom HTTP auth user
header.
"""
middleware = (
'auth_tests.test_remote_user.CustomHeaderMiddleware'
)
header = 'HTTP_AUTHUSER'
class PersistentRemoteUserTest(RemoteUserTest):
"""
PersistentRemoteUserMiddleware keeps the user logged in even if the
subsequent calls do not contain the header value.
"""
middleware = 'django.contrib.auth.middleware.PersistentRemoteUserMiddleware'
require_header = False
def test_header_disappears(self):
"""
A logged in user is kept logged in even if the REMOTE_USER header
disappears during the same browser session.
"""
User.objects.create(username='knownuser')
# Known user authenticates
response = self.client.get('/remote_user/', **{self.header: self.known_user})
self.assertEqual(response.context['user'].username, 'knownuser')
# Should stay logged in if the REMOTE_USER header disappears.
response = self.client.get('/remote_user/')
self.assertEqual(response.context['user'].is_anonymous(), False)
self.assertEqual(response.context['user'].username, 'knownuser')
|
kailIII/geraldo
|
refs/heads/master
|
site/newsite/django_1_0/django/contrib/localflavor/it/it_province.py
|
30
|
# -*- coding: utf-8 -*
PROVINCE_CHOICES = (
('AG', 'Agrigento'),
('AL', 'Alessandria'),
('AN', 'Ancona'),
('AO', 'Aosta'),
('AR', 'Arezzo'),
('AP', 'Ascoli Piceno'),
('AT', 'Asti'),
('AV', 'Avellino'),
('BA', 'Bari'),
# ('BT', 'Barletta-Andria-Trani'), # active starting from 2009
('BL', 'Belluno'),
('BN', 'Benevento'),
('BG', 'Bergamo'),
('BI', 'Biella'),
('BO', 'Bologna'),
('BZ', 'Bolzano/Bozen'),
('BS', 'Brescia'),
('BR', 'Brindisi'),
('CA', 'Cagliari'),
('CL', 'Caltanissetta'),
('CB', 'Campobasso'),
('CI', 'Carbonia-Iglesias'),
('CE', 'Caserta'),
('CT', 'Catania'),
('CZ', 'Catanzaro'),
('CH', 'Chieti'),
('CO', 'Como'),
('CS', 'Cosenza'),
('CR', 'Cremona'),
('KR', 'Crotone'),
('CN', 'Cuneo'),
('EN', 'Enna'),
# ('FM', 'Fermo'), # active starting from 2009
('FE', 'Ferrara'),
('FI', 'Firenze'),
('FG', 'Foggia'),
('FC', 'Forlì-Cesena'),
('FR', 'Frosinone'),
('GE', 'Genova'),
('GO', 'Gorizia'),
('GR', 'Grosseto'),
('IM', 'Imperia'),
('IS', 'Isernia'),
('SP', 'La Spezia'),
('AQ', u'L’Aquila'),
('LT', 'Latina'),
('LE', 'Lecce'),
('LC', 'Lecco'),
('LI', 'Livorno'),
('LO', 'Lodi'),
('LU', 'Lucca'),
('MC', 'Macerata'),
('MN', 'Mantova'),
('MS', 'Massa-Carrara'),
('MT', 'Matera'),
('VS', 'Medio Campidano'),
('ME', 'Messina'),
('MI', 'Milano'),
('MO', 'Modena'),
# ('MB', 'Monza e Brianza'), # active starting from 2009
('NA', 'Napoli'),
('NO', 'Novara'),
('NU', 'Nuoro'),
('OG', 'Ogliastra'),
('OT', 'Olbia-Tempio'),
('OR', 'Oristano'),
('PD', 'Padova'),
('PA', 'Palermo'),
('PR', 'Parma'),
('PV', 'Pavia'),
('PG', 'Perugia'),
('PU', 'Pesaro e Urbino'),
('PE', 'Pescara'),
('PC', 'Piacenza'),
('PI', 'Pisa'),
('PT', 'Pistoia'),
('PN', 'Pordenone'),
('PZ', 'Potenza'),
('PO', 'Prato'),
('RG', 'Ragusa'),
('RA', 'Ravenna'),
('RC', 'Reggio Calabria'),
('RE', 'Reggio Emilia'),
('RI', 'Rieti'),
('RN', 'Rimini'),
('RM', 'Roma'),
('RO', 'Rovigo'),
('SA', 'Salerno'),
('SS', 'Sassari'),
('SV', 'Savona'),
('SI', 'Siena'),
('SR', 'Siracusa'),
('SO', 'Sondrio'),
('TA', 'Taranto'),
('TE', 'Teramo'),
('TR', 'Terni'),
('TO', 'Torino'),
('TP', 'Trapani'),
('TN', 'Trento'),
('TV', 'Treviso'),
('TS', 'Trieste'),
('UD', 'Udine'),
('VA', 'Varese'),
('VE', 'Venezia'),
('VB', 'Verbano Cusio Ossola'),
('VC', 'Vercelli'),
('VR', 'Verona'),
('VV', 'Vibo Valentia'),
('VI', 'Vicenza'),
('VT', 'Viterbo'),
)
|
xiandiancloud/edx-platform
|
refs/heads/master
|
common/djangoapps/course_modes/migrations/0005_auto__add_field_coursemode_expiration_datetime.py
|
59
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'CourseMode.expiration_datetime'
db.add_column('course_modes_coursemode', 'expiration_datetime',
self.gf('django.db.models.fields.DateTimeField')(default=None, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'CourseMode.expiration_datetime'
db.delete_column('course_modes_coursemode', 'expiration_datetime')
models = {
'course_modes.coursemode': {
'Meta': {'unique_together': "(('course_id', 'mode_slug', 'currency'),)", 'object_name': 'CourseMode'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'expiration_date': ('django.db.models.fields.DateField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'expiration_datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'min_price': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'mode_display_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'mode_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'suggested_prices': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'default': "''", 'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['course_modes']
|
tomekwojcik/BTHEventSource
|
refs/heads/master
|
backend/btheventsource.py
|
2
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2011 by Tomasz Wójcik <labs@tomekwojcik.pl>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Tornado handler for BTHEventSource"""
import tornado.web
try:
import json
except ImportError:
import simplejson as json
class BTHEventStreamHandler(tornado.web.RequestHandler):
def initialize(self):
accept = self.request.headers.get('Accept', None)
requested_with = self.request.headers.get('X-Requested-With', None)
self.is_xhr_polling = False
sse_content_type = 'text/event-stream'
opera_argument = self.get_argument('opera', None)
if opera_argument != None:
sse_content_type = 'application/x-dom-event-stream' # Web standards my ass.
else:
if accept != 'text/event-stream' or requested_with == 'XMLHttpRequest':
self.is_xhr_polling = True
if self.is_xhr_polling == False:
self.set_header('Content-Type', sse_content_type)
self.set_header('Cache-Control', 'no-cache')
self.last_event_id = self.request.headers.get('Last-Event-Id', None)
else:
self.last_event_id = self.get_argument('last_event_id', None)
def emit(self, data, event=None, id=None):
if self.is_xhr_polling == False:
_data = json.dumps(data)
_response = u''
if id != None:
_response += u'id: ' + unicode(id).strip() + u'\n'
if event != None:
_response += u'event: ' + unicode(event).strip() + u'\n'
_response += u'data: ' + _data.strip() + u'\n\n'
self.write(_response)
self.flush()
else:
_response = { 'data': data }
if event != None:
_response['event'] = event
if id != None:
_response['id'] = id
self.write(_response)
self.finish()
|
gram526/VTK
|
refs/heads/master
|
ThirdParty/Twisted/twisted/protocols/sip.py
|
31
|
# -*- test-case-name: twisted.test.test_sip -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""Session Initialization Protocol.
Documented in RFC 2543.
[Superceded by 3261]
This module contains a deprecated implementation of HTTP Digest authentication.
See L{twisted.cred.credentials} and L{twisted.cred._digest} for its new home.
"""
# system imports
import socket, time, sys, random, warnings
from hashlib import md5
from zope.interface import implements, Interface
# twisted imports
from twisted.python import log, util
from twisted.python.deprecate import deprecated
from twisted.python.versions import Version
from twisted.internet import protocol, defer, reactor
from twisted import cred
import twisted.cred.error
from twisted.cred.credentials import UsernameHashedPassword, UsernamePassword
# sibling imports
from twisted.protocols import basic
PORT = 5060
# SIP headers have short forms
shortHeaders = {"call-id": "i",
"contact": "m",
"content-encoding": "e",
"content-length": "l",
"content-type": "c",
"from": "f",
"subject": "s",
"to": "t",
"via": "v",
}
longHeaders = {}
for k, v in shortHeaders.items():
longHeaders[v] = k
del k, v
statusCodes = {
100: "Trying",
180: "Ringing",
181: "Call Is Being Forwarded",
182: "Queued",
183: "Session Progress",
200: "OK",
300: "Multiple Choices",
301: "Moved Permanently",
302: "Moved Temporarily",
303: "See Other",
305: "Use Proxy",
380: "Alternative Service",
400: "Bad Request",
401: "Unauthorized",
402: "Payment Required",
403: "Forbidden",
404: "Not Found",
405: "Method Not Allowed",
406: "Not Acceptable",
407: "Proxy Authentication Required",
408: "Request Timeout",
409: "Conflict", # Not in RFC3261
410: "Gone",
411: "Length Required", # Not in RFC3261
413: "Request Entity Too Large",
414: "Request-URI Too Large",
415: "Unsupported Media Type",
416: "Unsupported URI Scheme",
420: "Bad Extension",
421: "Extension Required",
423: "Interval Too Brief",
480: "Temporarily Unavailable",
481: "Call/Transaction Does Not Exist",
482: "Loop Detected",
483: "Too Many Hops",
484: "Address Incomplete",
485: "Ambiguous",
486: "Busy Here",
487: "Request Terminated",
488: "Not Acceptable Here",
491: "Request Pending",
493: "Undecipherable",
500: "Internal Server Error",
501: "Not Implemented",
502: "Bad Gateway", # no donut
503: "Service Unavailable",
504: "Server Time-out",
505: "SIP Version not supported",
513: "Message Too Large",
600: "Busy Everywhere",
603: "Decline",
604: "Does not exist anywhere",
606: "Not Acceptable",
}
specialCases = {
'cseq': 'CSeq',
'call-id': 'Call-ID',
'www-authenticate': 'WWW-Authenticate',
}
def dashCapitalize(s):
''' Capitalize a string, making sure to treat - as a word seperator '''
return '-'.join([ x.capitalize() for x in s.split('-')])
def unq(s):
if s[0] == s[-1] == '"':
return s[1:-1]
return s
def DigestCalcHA1(
pszAlg,
pszUserName,
pszRealm,
pszPassword,
pszNonce,
pszCNonce,
):
m = md5()
m.update(pszUserName)
m.update(":")
m.update(pszRealm)
m.update(":")
m.update(pszPassword)
HA1 = m.digest()
if pszAlg == "md5-sess":
m = md5()
m.update(HA1)
m.update(":")
m.update(pszNonce)
m.update(":")
m.update(pszCNonce)
HA1 = m.digest()
return HA1.encode('hex')
DigestCalcHA1 = deprecated(Version("Twisted", 9, 0, 0))(DigestCalcHA1)
def DigestCalcResponse(
HA1,
pszNonce,
pszNonceCount,
pszCNonce,
pszQop,
pszMethod,
pszDigestUri,
pszHEntity,
):
m = md5()
m.update(pszMethod)
m.update(":")
m.update(pszDigestUri)
if pszQop == "auth-int":
m.update(":")
m.update(pszHEntity)
HA2 = m.digest().encode('hex')
m = md5()
m.update(HA1)
m.update(":")
m.update(pszNonce)
m.update(":")
if pszNonceCount and pszCNonce: # pszQop:
m.update(pszNonceCount)
m.update(":")
m.update(pszCNonce)
m.update(":")
m.update(pszQop)
m.update(":")
m.update(HA2)
hash = m.digest().encode('hex')
return hash
DigestCalcResponse = deprecated(Version("Twisted", 9, 0, 0))(DigestCalcResponse)
_absent = object()
class Via(object):
"""
A L{Via} is a SIP Via header, representing a segment of the path taken by
the request.
See RFC 3261, sections 8.1.1.7, 18.2.2, and 20.42.
@ivar transport: Network protocol used for this leg. (Probably either "TCP"
or "UDP".)
@type transport: C{str}
@ivar branch: Unique identifier for this request.
@type branch: C{str}
@ivar host: Hostname or IP for this leg.
@type host: C{str}
@ivar port: Port used for this leg.
@type port C{int}, or None.
@ivar rportRequested: Whether to request RFC 3581 client processing or not.
@type rportRequested: C{bool}
@ivar rportValue: Servers wishing to honor requests for RFC 3581 processing
should set this parameter to the source port the request was received
from.
@type rportValue: C{int}, or None.
@ivar ttl: Time-to-live for requests on multicast paths.
@type ttl: C{int}, or None.
@ivar maddr: The destination multicast address, if any.
@type maddr: C{str}, or None.
@ivar hidden: Obsolete in SIP 2.0.
@type hidden: C{bool}
@ivar otherParams: Any other parameters in the header.
@type otherParams: C{dict}
"""
def __init__(self, host, port=PORT, transport="UDP", ttl=None,
hidden=False, received=None, rport=_absent, branch=None,
maddr=None, **kw):
"""
Set parameters of this Via header. All arguments correspond to
attributes of the same name.
To maintain compatibility with old SIP
code, the 'rport' argument is used to determine the values of
C{rportRequested} and C{rportValue}. If None, C{rportRequested} is set
to True. (The deprecated method for doing this is to pass True.) If an
integer, C{rportValue} is set to the given value.
Any arguments not explicitly named here are collected into the
C{otherParams} dict.
"""
self.transport = transport
self.host = host
self.port = port
self.ttl = ttl
self.hidden = hidden
self.received = received
if rport is True:
warnings.warn(
"rport=True is deprecated since Twisted 9.0.",
DeprecationWarning,
stacklevel=2)
self.rportValue = None
self.rportRequested = True
elif rport is None:
self.rportValue = None
self.rportRequested = True
elif rport is _absent:
self.rportValue = None
self.rportRequested = False
else:
self.rportValue = rport
self.rportRequested = False
self.branch = branch
self.maddr = maddr
self.otherParams = kw
def _getrport(self):
"""
Returns the rport value expected by the old SIP code.
"""
if self.rportRequested == True:
return True
elif self.rportValue is not None:
return self.rportValue
else:
return None
def _setrport(self, newRPort):
"""
L{Base._fixupNAT} sets C{rport} directly, so this method sets
C{rportValue} based on that.
@param newRPort: The new rport value.
@type newRPort: C{int}
"""
self.rportValue = newRPort
self.rportRequested = False
rport = property(_getrport, _setrport)
def toString(self):
"""
Serialize this header for use in a request or response.
"""
s = "SIP/2.0/%s %s:%s" % (self.transport, self.host, self.port)
if self.hidden:
s += ";hidden"
for n in "ttl", "branch", "maddr", "received":
value = getattr(self, n)
if value is not None:
s += ";%s=%s" % (n, value)
if self.rportRequested:
s += ";rport"
elif self.rportValue is not None:
s += ";rport=%s" % (self.rport,)
etc = self.otherParams.items()
etc.sort()
for k, v in etc:
if v is None:
s += ";" + k
else:
s += ";%s=%s" % (k, v)
return s
def parseViaHeader(value):
"""
Parse a Via header.
@return: The parsed version of this header.
@rtype: L{Via}
"""
parts = value.split(";")
sent, params = parts[0], parts[1:]
protocolinfo, by = sent.split(" ", 1)
by = by.strip()
result = {}
pname, pversion, transport = protocolinfo.split("/")
if pname != "SIP" or pversion != "2.0":
raise ValueError, "wrong protocol or version: %r" % value
result["transport"] = transport
if ":" in by:
host, port = by.split(":")
result["port"] = int(port)
result["host"] = host
else:
result["host"] = by
for p in params:
# it's the comment-striping dance!
p = p.strip().split(" ", 1)
if len(p) == 1:
p, comment = p[0], ""
else:
p, comment = p
if p == "hidden":
result["hidden"] = True
continue
parts = p.split("=", 1)
if len(parts) == 1:
name, value = parts[0], None
else:
name, value = parts
if name in ("rport", "ttl"):
value = int(value)
result[name] = value
return Via(**result)
class URL:
"""A SIP URL."""
def __init__(self, host, username=None, password=None, port=None,
transport=None, usertype=None, method=None,
ttl=None, maddr=None, tag=None, other=None, headers=None):
self.username = username
self.host = host
self.password = password
self.port = port
self.transport = transport
self.usertype = usertype
self.method = method
self.tag = tag
self.ttl = ttl
self.maddr = maddr
if other == None:
self.other = []
else:
self.other = other
if headers == None:
self.headers = {}
else:
self.headers = headers
def toString(self):
l = []; w = l.append
w("sip:")
if self.username != None:
w(self.username)
if self.password != None:
w(":%s" % self.password)
w("@")
w(self.host)
if self.port != None:
w(":%d" % self.port)
if self.usertype != None:
w(";user=%s" % self.usertype)
for n in ("transport", "ttl", "maddr", "method", "tag"):
v = getattr(self, n)
if v != None:
w(";%s=%s" % (n, v))
for v in self.other:
w(";%s" % v)
if self.headers:
w("?")
w("&".join([("%s=%s" % (specialCases.get(h) or dashCapitalize(h), v)) for (h, v) in self.headers.items()]))
return "".join(l)
def __str__(self):
return self.toString()
def __repr__(self):
return '<URL %s:%s@%s:%r/%s>' % (self.username, self.password, self.host, self.port, self.transport)
def parseURL(url, host=None, port=None):
"""Return string into URL object.
URIs are of of form 'sip:user@example.com'.
"""
d = {}
if not url.startswith("sip:"):
raise ValueError("unsupported scheme: " + url[:4])
parts = url[4:].split(";")
userdomain, params = parts[0], parts[1:]
udparts = userdomain.split("@", 1)
if len(udparts) == 2:
userpass, hostport = udparts
upparts = userpass.split(":", 1)
if len(upparts) == 1:
d["username"] = upparts[0]
else:
d["username"] = upparts[0]
d["password"] = upparts[1]
else:
hostport = udparts[0]
hpparts = hostport.split(":", 1)
if len(hpparts) == 1:
d["host"] = hpparts[0]
else:
d["host"] = hpparts[0]
d["port"] = int(hpparts[1])
if host != None:
d["host"] = host
if port != None:
d["port"] = port
for p in params:
if p == params[-1] and "?" in p:
d["headers"] = h = {}
p, headers = p.split("?", 1)
for header in headers.split("&"):
k, v = header.split("=")
h[k] = v
nv = p.split("=", 1)
if len(nv) == 1:
d.setdefault("other", []).append(p)
continue
name, value = nv
if name == "user":
d["usertype"] = value
elif name in ("transport", "ttl", "maddr", "method", "tag"):
if name == "ttl":
value = int(value)
d[name] = value
else:
d.setdefault("other", []).append(p)
return URL(**d)
def cleanRequestURL(url):
"""Clean a URL from a Request line."""
url.transport = None
url.maddr = None
url.ttl = None
url.headers = {}
def parseAddress(address, host=None, port=None, clean=0):
"""Return (name, uri, params) for From/To/Contact header.
@param clean: remove unnecessary info, usually for From and To headers.
"""
address = address.strip()
# simple 'sip:foo' case
if address.startswith("sip:"):
return "", parseURL(address, host=host, port=port), {}
params = {}
name, url = address.split("<", 1)
name = name.strip()
if name.startswith('"'):
name = name[1:]
if name.endswith('"'):
name = name[:-1]
url, paramstring = url.split(">", 1)
url = parseURL(url, host=host, port=port)
paramstring = paramstring.strip()
if paramstring:
for l in paramstring.split(";"):
if not l:
continue
k, v = l.split("=")
params[k] = v
if clean:
# rfc 2543 6.21
url.ttl = None
url.headers = {}
url.transport = None
url.maddr = None
return name, url, params
class SIPError(Exception):
def __init__(self, code, phrase=None):
if phrase is None:
phrase = statusCodes[code]
Exception.__init__(self, "SIP error (%d): %s" % (code, phrase))
self.code = code
self.phrase = phrase
class RegistrationError(SIPError):
"""Registration was not possible."""
class Message:
"""A SIP message."""
length = None
def __init__(self):
self.headers = util.OrderedDict() # map name to list of values
self.body = ""
self.finished = 0
def addHeader(self, name, value):
name = name.lower()
name = longHeaders.get(name, name)
if name == "content-length":
self.length = int(value)
self.headers.setdefault(name,[]).append(value)
def bodyDataReceived(self, data):
self.body += data
def creationFinished(self):
if (self.length != None) and (self.length != len(self.body)):
raise ValueError, "wrong body length"
self.finished = 1
def toString(self):
s = "%s\r\n" % self._getHeaderLine()
for n, vs in self.headers.items():
for v in vs:
s += "%s: %s\r\n" % (specialCases.get(n) or dashCapitalize(n), v)
s += "\r\n"
s += self.body
return s
def _getHeaderLine(self):
raise NotImplementedError
class Request(Message):
"""A Request for a URI"""
def __init__(self, method, uri, version="SIP/2.0"):
Message.__init__(self)
self.method = method
if isinstance(uri, URL):
self.uri = uri
else:
self.uri = parseURL(uri)
cleanRequestURL(self.uri)
def __repr__(self):
return "<SIP Request %d:%s %s>" % (id(self), self.method, self.uri.toString())
def _getHeaderLine(self):
return "%s %s SIP/2.0" % (self.method, self.uri.toString())
class Response(Message):
"""A Response to a URI Request"""
def __init__(self, code, phrase=None, version="SIP/2.0"):
Message.__init__(self)
self.code = code
if phrase == None:
phrase = statusCodes[code]
self.phrase = phrase
def __repr__(self):
return "<SIP Response %d:%s>" % (id(self), self.code)
def _getHeaderLine(self):
return "SIP/2.0 %s %s" % (self.code, self.phrase)
class MessagesParser(basic.LineReceiver):
"""A SIP messages parser.
Expects dataReceived, dataDone repeatedly,
in that order. Shouldn't be connected to actual transport.
"""
version = "SIP/2.0"
acceptResponses = 1
acceptRequests = 1
state = "firstline" # or "headers", "body" or "invalid"
debug = 0
def __init__(self, messageReceivedCallback):
self.messageReceived = messageReceivedCallback
self.reset()
def reset(self, remainingData=""):
self.state = "firstline"
self.length = None # body length
self.bodyReceived = 0 # how much of the body we received
self.message = None
self.header = None
self.setLineMode(remainingData)
def invalidMessage(self):
self.state = "invalid"
self.setRawMode()
def dataDone(self):
# clear out any buffered data that may be hanging around
self.clearLineBuffer()
if self.state == "firstline":
return
if self.state != "body":
self.reset()
return
if self.length == None:
# no content-length header, so end of data signals message done
self.messageDone()
elif self.length < self.bodyReceived:
# aborted in the middle
self.reset()
else:
# we have enough data and message wasn't finished? something is wrong
raise RuntimeError, "this should never happen"
def dataReceived(self, data):
try:
basic.LineReceiver.dataReceived(self, data)
except:
log.err()
self.invalidMessage()
def handleFirstLine(self, line):
"""Expected to create self.message."""
raise NotImplementedError
def lineLengthExceeded(self, line):
self.invalidMessage()
def lineReceived(self, line):
if self.state == "firstline":
while line.startswith("\n") or line.startswith("\r"):
line = line[1:]
if not line:
return
try:
a, b, c = line.split(" ", 2)
except ValueError:
self.invalidMessage()
return
if a == "SIP/2.0" and self.acceptResponses:
# response
try:
code = int(b)
except ValueError:
self.invalidMessage()
return
self.message = Response(code, c)
elif c == "SIP/2.0" and self.acceptRequests:
self.message = Request(a, b)
else:
self.invalidMessage()
return
self.state = "headers"
return
else:
assert self.state == "headers"
if line:
# multiline header
if line.startswith(" ") or line.startswith("\t"):
name, value = self.header
self.header = name, (value + line.lstrip())
else:
# new header
if self.header:
self.message.addHeader(*self.header)
self.header = None
try:
name, value = line.split(":", 1)
except ValueError:
self.invalidMessage()
return
self.header = name, value.lstrip()
# XXX we assume content-length won't be multiline
if name.lower() == "content-length":
try:
self.length = int(value.lstrip())
except ValueError:
self.invalidMessage()
return
else:
# CRLF, we now have message body until self.length bytes,
# or if no length was given, until there is no more data
# from the connection sending us data.
self.state = "body"
if self.header:
self.message.addHeader(*self.header)
self.header = None
if self.length == 0:
self.messageDone()
return
self.setRawMode()
def messageDone(self, remainingData=""):
assert self.state == "body"
self.message.creationFinished()
self.messageReceived(self.message)
self.reset(remainingData)
def rawDataReceived(self, data):
assert self.state in ("body", "invalid")
if self.state == "invalid":
return
if self.length == None:
self.message.bodyDataReceived(data)
else:
dataLen = len(data)
expectedLen = self.length - self.bodyReceived
if dataLen > expectedLen:
self.message.bodyDataReceived(data[:expectedLen])
self.messageDone(data[expectedLen:])
return
else:
self.bodyReceived += dataLen
self.message.bodyDataReceived(data)
if self.bodyReceived == self.length:
self.messageDone()
class Base(protocol.DatagramProtocol):
"""Base class for SIP clients and servers."""
PORT = PORT
debug = False
def __init__(self):
self.messages = []
self.parser = MessagesParser(self.addMessage)
def addMessage(self, msg):
self.messages.append(msg)
def datagramReceived(self, data, addr):
self.parser.dataReceived(data)
self.parser.dataDone()
for m in self.messages:
self._fixupNAT(m, addr)
if self.debug:
log.msg("Received %r from %r" % (m.toString(), addr))
if isinstance(m, Request):
self.handle_request(m, addr)
else:
self.handle_response(m, addr)
self.messages[:] = []
def _fixupNAT(self, message, (srcHost, srcPort)):
# RFC 2543 6.40.2,
senderVia = parseViaHeader(message.headers["via"][0])
if senderVia.host != srcHost:
senderVia.received = srcHost
if senderVia.port != srcPort:
senderVia.rport = srcPort
message.headers["via"][0] = senderVia.toString()
elif senderVia.rport == True:
senderVia.received = srcHost
senderVia.rport = srcPort
message.headers["via"][0] = senderVia.toString()
def deliverResponse(self, responseMessage):
"""Deliver response.
Destination is based on topmost Via header."""
destVia = parseViaHeader(responseMessage.headers["via"][0])
# XXX we don't do multicast yet
host = destVia.received or destVia.host
port = destVia.rport or destVia.port or self.PORT
destAddr = URL(host=host, port=port)
self.sendMessage(destAddr, responseMessage)
def responseFromRequest(self, code, request):
"""Create a response to a request message."""
response = Response(code)
for name in ("via", "to", "from", "call-id", "cseq"):
response.headers[name] = request.headers.get(name, [])[:]
return response
def sendMessage(self, destURL, message):
"""Send a message.
@param destURL: C{URL}. This should be a *physical* URL, not a logical one.
@param message: The message to send.
"""
if destURL.transport not in ("udp", None):
raise RuntimeError, "only UDP currently supported"
if self.debug:
log.msg("Sending %r to %r" % (message.toString(), destURL))
self.transport.write(message.toString(), (destURL.host, destURL.port or self.PORT))
def handle_request(self, message, addr):
"""Override to define behavior for requests received
@type message: C{Message}
@type addr: C{tuple}
"""
raise NotImplementedError
def handle_response(self, message, addr):
"""Override to define behavior for responses received.
@type message: C{Message}
@type addr: C{tuple}
"""
raise NotImplementedError
class IContact(Interface):
"""A user of a registrar or proxy"""
class Registration:
def __init__(self, secondsToExpiry, contactURL):
self.secondsToExpiry = secondsToExpiry
self.contactURL = contactURL
class IRegistry(Interface):
"""Allows registration of logical->physical URL mapping."""
def registerAddress(domainURL, logicalURL, physicalURL):
"""Register the physical address of a logical URL.
@return: Deferred of C{Registration} or failure with RegistrationError.
"""
def unregisterAddress(domainURL, logicalURL, physicalURL):
"""Unregister the physical address of a logical URL.
@return: Deferred of C{Registration} or failure with RegistrationError.
"""
def getRegistrationInfo(logicalURL):
"""Get registration info for logical URL.
@return: Deferred of C{Registration} object or failure of LookupError.
"""
class ILocator(Interface):
"""Allow looking up physical address for logical URL."""
def getAddress(logicalURL):
"""Return physical URL of server for logical URL of user.
@param logicalURL: a logical C{URL}.
@return: Deferred which becomes URL or fails with LookupError.
"""
class Proxy(Base):
"""SIP proxy."""
PORT = PORT
locator = None # object implementing ILocator
def __init__(self, host=None, port=PORT):
"""Create new instance.
@param host: our hostname/IP as set in Via headers.
@param port: our port as set in Via headers.
"""
self.host = host or socket.getfqdn()
self.port = port
Base.__init__(self)
def getVia(self):
"""Return value of Via header for this proxy."""
return Via(host=self.host, port=self.port)
def handle_request(self, message, addr):
# send immediate 100/trying message before processing
#self.deliverResponse(self.responseFromRequest(100, message))
f = getattr(self, "handle_%s_request" % message.method, None)
if f is None:
f = self.handle_request_default
try:
d = f(message, addr)
except SIPError, e:
self.deliverResponse(self.responseFromRequest(e.code, message))
except:
log.err()
self.deliverResponse(self.responseFromRequest(500, message))
else:
if d is not None:
d.addErrback(lambda e:
self.deliverResponse(self.responseFromRequest(e.code, message))
)
def handle_request_default(self, message, (srcHost, srcPort)):
"""Default request handler.
Default behaviour for OPTIONS and unknown methods for proxies
is to forward message on to the client.
Since at the moment we are stateless proxy, thats basically
everything.
"""
def _mungContactHeader(uri, message):
message.headers['contact'][0] = uri.toString()
return self.sendMessage(uri, message)
viaHeader = self.getVia()
if viaHeader.toString() in message.headers["via"]:
# must be a loop, so drop message
log.msg("Dropping looped message.")
return
message.headers["via"].insert(0, viaHeader.toString())
name, uri, tags = parseAddress(message.headers["to"][0], clean=1)
# this is broken and needs refactoring to use cred
d = self.locator.getAddress(uri)
d.addCallback(self.sendMessage, message)
d.addErrback(self._cantForwardRequest, message)
def _cantForwardRequest(self, error, message):
error.trap(LookupError)
del message.headers["via"][0] # this'll be us
self.deliverResponse(self.responseFromRequest(404, message))
def deliverResponse(self, responseMessage):
"""Deliver response.
Destination is based on topmost Via header."""
destVia = parseViaHeader(responseMessage.headers["via"][0])
# XXX we don't do multicast yet
host = destVia.received or destVia.host
port = destVia.rport or destVia.port or self.PORT
destAddr = URL(host=host, port=port)
self.sendMessage(destAddr, responseMessage)
def responseFromRequest(self, code, request):
"""Create a response to a request message."""
response = Response(code)
for name in ("via", "to", "from", "call-id", "cseq"):
response.headers[name] = request.headers.get(name, [])[:]
return response
def handle_response(self, message, addr):
"""Default response handler."""
v = parseViaHeader(message.headers["via"][0])
if (v.host, v.port) != (self.host, self.port):
# we got a message not intended for us?
# XXX note this check breaks if we have multiple external IPs
# yay for suck protocols
log.msg("Dropping incorrectly addressed message")
return
del message.headers["via"][0]
if not message.headers["via"]:
# this message is addressed to us
self.gotResponse(message, addr)
return
self.deliverResponse(message)
def gotResponse(self, message, addr):
"""Called with responses that are addressed at this server."""
pass
class IAuthorizer(Interface):
def getChallenge(peer):
"""Generate a challenge the client may respond to.
@type peer: C{tuple}
@param peer: The client's address
@rtype: C{str}
@return: The challenge string
"""
def decode(response):
"""Create a credentials object from the given response.
@type response: C{str}
"""
class BasicAuthorizer:
"""Authorizer for insecure Basic (base64-encoded plaintext) authentication.
This form of authentication is broken and insecure. Do not use it.
"""
implements(IAuthorizer)
def __init__(self):
"""
This method exists solely to issue a deprecation warning.
"""
warnings.warn(
"twisted.protocols.sip.BasicAuthorizer was deprecated "
"in Twisted 9.0.0",
category=DeprecationWarning,
stacklevel=2)
def getChallenge(self, peer):
return None
def decode(self, response):
# At least one SIP client improperly pads its Base64 encoded messages
for i in range(3):
try:
creds = (response + ('=' * i)).decode('base64')
except:
pass
else:
break
else:
# Totally bogus
raise SIPError(400)
p = creds.split(':', 1)
if len(p) == 2:
return UsernamePassword(*p)
raise SIPError(400)
class DigestedCredentials(UsernameHashedPassword):
"""Yet Another Simple Digest-MD5 authentication scheme"""
def __init__(self, username, fields, challenges):
warnings.warn(
"twisted.protocols.sip.DigestedCredentials was deprecated "
"in Twisted 9.0.0",
category=DeprecationWarning,
stacklevel=2)
self.username = username
self.fields = fields
self.challenges = challenges
def checkPassword(self, password):
method = 'REGISTER'
response = self.fields.get('response')
uri = self.fields.get('uri')
nonce = self.fields.get('nonce')
cnonce = self.fields.get('cnonce')
nc = self.fields.get('nc')
algo = self.fields.get('algorithm', 'MD5')
qop = self.fields.get('qop-options', 'auth')
opaque = self.fields.get('opaque')
if opaque not in self.challenges:
return False
del self.challenges[opaque]
user, domain = self.username.split('@', 1)
if uri is None:
uri = 'sip:' + domain
expected = DigestCalcResponse(
DigestCalcHA1(algo, user, domain, password, nonce, cnonce),
nonce, nc, cnonce, qop, method, uri, None,
)
return expected == response
class DigestAuthorizer:
CHALLENGE_LIFETIME = 15
implements(IAuthorizer)
def __init__(self):
warnings.warn(
"twisted.protocols.sip.DigestAuthorizer was deprecated "
"in Twisted 9.0.0",
category=DeprecationWarning,
stacklevel=2)
self.outstanding = {}
def generateNonce(self):
c = tuple([random.randrange(sys.maxint) for _ in range(3)])
c = '%d%d%d' % c
return c
def generateOpaque(self):
return str(random.randrange(sys.maxint))
def getChallenge(self, peer):
c = self.generateNonce()
o = self.generateOpaque()
self.outstanding[o] = c
return ','.join((
'nonce="%s"' % c,
'opaque="%s"' % o,
'qop-options="auth"',
'algorithm="MD5"',
))
def decode(self, response):
response = ' '.join(response.splitlines())
parts = response.split(',')
auth = dict([(k.strip(), unq(v.strip())) for (k, v) in [p.split('=', 1) for p in parts]])
try:
username = auth['username']
except KeyError:
raise SIPError(401)
try:
return DigestedCredentials(username, auth, self.outstanding)
except:
raise SIPError(400)
class RegisterProxy(Proxy):
"""A proxy that allows registration for a specific domain.
Unregistered users won't be handled.
"""
portal = None
registry = None # should implement IRegistry
authorizers = {}
def __init__(self, *args, **kw):
Proxy.__init__(self, *args, **kw)
self.liveChallenges = {}
if "digest" not in self.authorizers:
self.authorizers["digest"] = DigestAuthorizer()
def handle_ACK_request(self, message, (host, port)):
# XXX
# ACKs are a client's way of indicating they got the last message
# Responding to them is not a good idea.
# However, we should keep track of terminal messages and re-transmit
# if no ACK is received.
pass
def handle_REGISTER_request(self, message, (host, port)):
"""Handle a registration request.
Currently registration is not proxied.
"""
if self.portal is None:
# There is no portal. Let anyone in.
self.register(message, host, port)
else:
# There is a portal. Check for credentials.
if not message.headers.has_key("authorization"):
return self.unauthorized(message, host, port)
else:
return self.login(message, host, port)
def unauthorized(self, message, host, port):
m = self.responseFromRequest(401, message)
for (scheme, auth) in self.authorizers.iteritems():
chal = auth.getChallenge((host, port))
if chal is None:
value = '%s realm="%s"' % (scheme.title(), self.host)
else:
value = '%s %s,realm="%s"' % (scheme.title(), chal, self.host)
m.headers.setdefault('www-authenticate', []).append(value)
self.deliverResponse(m)
def login(self, message, host, port):
parts = message.headers['authorization'][0].split(None, 1)
a = self.authorizers.get(parts[0].lower())
if a:
try:
c = a.decode(parts[1])
except SIPError:
raise
except:
log.err()
self.deliverResponse(self.responseFromRequest(500, message))
else:
c.username += '@' + self.host
self.portal.login(c, None, IContact
).addCallback(self._cbLogin, message, host, port
).addErrback(self._ebLogin, message, host, port
).addErrback(log.err
)
else:
self.deliverResponse(self.responseFromRequest(501, message))
def _cbLogin(self, (i, a, l), message, host, port):
# It's stateless, matey. What a joke.
self.register(message, host, port)
def _ebLogin(self, failure, message, host, port):
failure.trap(cred.error.UnauthorizedLogin)
self.unauthorized(message, host, port)
def register(self, message, host, port):
"""Allow all users to register"""
name, toURL, params = parseAddress(message.headers["to"][0], clean=1)
contact = None
if message.headers.has_key("contact"):
contact = message.headers["contact"][0]
if message.headers.get("expires", [None])[0] == "0":
self.unregister(message, toURL, contact)
else:
# XXX Check expires on appropriate URL, and pass it to registry
# instead of having registry hardcode it.
if contact is not None:
name, contactURL, params = parseAddress(contact, host=host, port=port)
d = self.registry.registerAddress(message.uri, toURL, contactURL)
else:
d = self.registry.getRegistrationInfo(toURL)
d.addCallbacks(self._cbRegister, self._ebRegister,
callbackArgs=(message,),
errbackArgs=(message,)
)
def _cbRegister(self, registration, message):
response = self.responseFromRequest(200, message)
if registration.contactURL != None:
response.addHeader("contact", registration.contactURL.toString())
response.addHeader("expires", "%d" % registration.secondsToExpiry)
response.addHeader("content-length", "0")
self.deliverResponse(response)
def _ebRegister(self, error, message):
error.trap(RegistrationError, LookupError)
# XXX return error message, and alter tests to deal with
# this, currently tests assume no message sent on failure
def unregister(self, message, toURL, contact):
try:
expires = int(message.headers["expires"][0])
except ValueError:
self.deliverResponse(self.responseFromRequest(400, message))
else:
if expires == 0:
if contact == "*":
contactURL = "*"
else:
name, contactURL, params = parseAddress(contact)
d = self.registry.unregisterAddress(message.uri, toURL, contactURL)
d.addCallback(self._cbUnregister, message
).addErrback(self._ebUnregister, message
)
def _cbUnregister(self, registration, message):
msg = self.responseFromRequest(200, message)
msg.headers.setdefault('contact', []).append(registration.contactURL.toString())
msg.addHeader("expires", "0")
self.deliverResponse(msg)
def _ebUnregister(self, registration, message):
pass
class InMemoryRegistry:
"""A simplistic registry for a specific domain."""
implements(IRegistry, ILocator)
def __init__(self, domain):
self.domain = domain # the domain we handle registration for
self.users = {} # map username to (IDelayedCall for expiry, address URI)
def getAddress(self, userURI):
if userURI.host != self.domain:
return defer.fail(LookupError("unknown domain"))
if userURI.username in self.users:
dc, url = self.users[userURI.username]
return defer.succeed(url)
else:
return defer.fail(LookupError("no such user"))
def getRegistrationInfo(self, userURI):
if userURI.host != self.domain:
return defer.fail(LookupError("unknown domain"))
if self.users.has_key(userURI.username):
dc, url = self.users[userURI.username]
return defer.succeed(Registration(int(dc.getTime() - time.time()), url))
else:
return defer.fail(LookupError("no such user"))
def _expireRegistration(self, username):
try:
dc, url = self.users[username]
except KeyError:
return defer.fail(LookupError("no such user"))
else:
dc.cancel()
del self.users[username]
return defer.succeed(Registration(0, url))
def registerAddress(self, domainURL, logicalURL, physicalURL):
if domainURL.host != self.domain:
log.msg("Registration for domain we don't handle.")
return defer.fail(RegistrationError(404))
if logicalURL.host != self.domain:
log.msg("Registration for domain we don't handle.")
return defer.fail(RegistrationError(404))
if logicalURL.username in self.users:
dc, old = self.users[logicalURL.username]
dc.reset(3600)
else:
dc = reactor.callLater(3600, self._expireRegistration, logicalURL.username)
log.msg("Registered %s at %s" % (logicalURL.toString(), physicalURL.toString()))
self.users[logicalURL.username] = (dc, physicalURL)
return defer.succeed(Registration(int(dc.getTime() - time.time()), physicalURL))
def unregisterAddress(self, domainURL, logicalURL, physicalURL):
return self._expireRegistration(logicalURL.username)
|
RobertWWong/WebDev
|
refs/heads/master
|
djangoApp/ENV/lib/python3.5/site-packages/django/conf/locale/sk/formats.py
|
504
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'G:i'
DATETIME_FORMAT = 'j. F Y G:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y G:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
'%y-%m-%d', # '06-10-25'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
|
blueboxgroup/nova
|
refs/heads/master
|
nova/db/api.py
|
3
|
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Defines interface for DB access.
Functions in this module are imported into the nova.db namespace. Call these
functions from nova.db namespace, not the nova.db.api namespace.
All functions in this module return objects that implement a dictionary-like
interface. Currently, many of these objects are sqlalchemy objects that
implement a dictionary interface. However, a future goal is to have all of
these objects be simple dictionaries.
"""
from oslo_config import cfg
from oslo_db import concurrency
from oslo_log import log as logging
from nova.cells import rpcapi as cells_rpcapi
from nova.i18n import _LE
db_opts = [
cfg.BoolOpt('enable_new_services',
default=True,
help='Services to be added to the available pool on create'),
cfg.StrOpt('instance_name_template',
default='instance-%08x',
help='Template string to be used to generate instance names'),
cfg.StrOpt('snapshot_name_template',
default='snapshot-%s',
help='Template string to be used to generate snapshot names'),
]
CONF = cfg.CONF
CONF.register_opts(db_opts)
_BACKEND_MAPPING = {'sqlalchemy': 'nova.db.sqlalchemy.api'}
IMPL = concurrency.TpoolDbapiWrapper(CONF, backend_mapping=_BACKEND_MAPPING)
LOG = logging.getLogger(__name__)
# The maximum value a signed INT type may have
MAX_INT = 0x7FFFFFFF
###################
def constraint(**conditions):
"""Return a constraint object suitable for use with some updates."""
return IMPL.constraint(**conditions)
def equal_any(*values):
"""Return an equality condition object suitable for use in a constraint.
Equal_any conditions require that a model object's attribute equal any
one of the given values.
"""
return IMPL.equal_any(*values)
def not_equal(*values):
"""Return an inequality condition object suitable for use in a constraint.
Not_equal conditions require that a model object's attribute differs from
all of the given values.
"""
return IMPL.not_equal(*values)
###################
def service_destroy(context, service_id):
"""Destroy the service or raise if it does not exist."""
return IMPL.service_destroy(context, service_id)
def service_get(context, service_id, use_slave=False):
"""Get a service or raise if it does not exist."""
return IMPL.service_get(context, service_id,
use_slave=use_slave)
def service_get_by_host_and_topic(context, host, topic):
"""Get a service by hostname and topic it listens to."""
return IMPL.service_get_by_host_and_topic(context, host, topic)
def service_get_by_host_and_binary(context, host, binary):
"""Get a service by hostname and binary."""
return IMPL.service_get_by_host_and_binary(context, host, binary)
def service_get_all(context, disabled=None):
"""Get all services."""
return IMPL.service_get_all(context, disabled)
def service_get_all_by_topic(context, topic):
"""Get all services for a given topic."""
return IMPL.service_get_all_by_topic(context, topic)
def service_get_all_by_binary(context, binary):
"""Get all services for a given binary."""
return IMPL.service_get_all_by_binary(context, binary)
def service_get_all_by_host(context, host):
"""Get all services for a given host."""
return IMPL.service_get_all_by_host(context, host)
def service_get_by_compute_host(context, host, use_slave=False):
"""Get the service entry for a given compute host.
Returns the service entry joined with the compute_node entry.
"""
return IMPL.service_get_by_compute_host(context, host,
use_slave=use_slave)
def service_create(context, values):
"""Create a service from the values dictionary."""
return IMPL.service_create(context, values)
def service_update(context, service_id, values):
"""Set the given properties on a service and update it.
Raises NotFound if service does not exist.
"""
return IMPL.service_update(context, service_id, values)
###################
def compute_node_get(context, compute_id):
"""Get a compute node by its id.
:param context: The security context
:param compute_id: ID of the compute node
:returns: Dictionary-like object containing properties of the compute node
Raises ComputeHostNotFound if compute node with the given ID doesn't exist.
"""
return IMPL.compute_node_get(context, compute_id)
def compute_nodes_get_by_service_id(context, service_id):
"""Get a list of compute nodes by their associated service id.
:param context: The security context
:param service_id: ID of the associated service
:returns: List of dictionary-like objects, each containing properties of
the compute node, including its corresponding service and
statistics
Raises ServiceNotFound if service with the given ID doesn't exist.
"""
return IMPL.compute_nodes_get_by_service_id(context, service_id)
def compute_node_get_by_host_and_nodename(context, host, nodename):
"""Get a compute node by its associated host and nodename.
:param context: The security context (admin)
:param host: Name of the host
:param nodename: Name of the node
:returns: Dictionary-like object containing properties of the compute node,
including its statistics
Raises ComputeHostNotFound if host with the given name doesn't exist.
"""
return IMPL.compute_node_get_by_host_and_nodename(context, host, nodename)
def compute_node_get_all(context):
"""Get all computeNodes.
:param context: The security context
:returns: List of dictionaries each containing compute node properties
"""
return IMPL.compute_node_get_all(context)
def compute_node_get_all_by_host(context, host, use_slave=False):
"""Get compute nodes by host name
:param context: The security context (admin)
:param host: Name of the host
:returns: List of dictionaries each containing compute node properties
"""
return IMPL.compute_node_get_all_by_host(context, host, use_slave)
def compute_node_search_by_hypervisor(context, hypervisor_match):
"""Get compute nodes by hypervisor hostname.
:param context: The security context
:param hypervisor_match: The hypervisor hostname
:returns: List of dictionary-like objects each containing compute node
properties
"""
return IMPL.compute_node_search_by_hypervisor(context, hypervisor_match)
def compute_node_create(context, values):
"""Create a compute node from the values dictionary.
:param context: The security context
:param values: Dictionary containing compute node properties
:returns: Dictionary-like object containing the properties of the created
node, including its corresponding service and statistics
"""
return IMPL.compute_node_create(context, values)
def compute_node_update(context, compute_id, values):
"""Set the given properties on a compute node and update it.
:param context: The security context
:param compute_id: ID of the compute node
:param values: Dictionary containing compute node properties to be updated
:returns: Dictionary-like object containing the properties of the updated
compute node, including its corresponding service and statistics
Raises ComputeHostNotFound if compute node with the given ID doesn't exist.
"""
return IMPL.compute_node_update(context, compute_id, values)
def compute_node_delete(context, compute_id):
"""Delete a compute node from the database.
:param context: The security context
:param compute_id: ID of the compute node
Raises ComputeHostNotFound if compute node with the given ID doesn't exist.
"""
return IMPL.compute_node_delete(context, compute_id)
def compute_node_statistics(context):
"""Get aggregate statistics over all compute nodes.
:param context: The security context
:returns: Dictionary containing compute node characteristics summed up
over all the compute nodes, e.g. 'vcpus', 'free_ram_mb' etc.
"""
return IMPL.compute_node_statistics(context)
###################
def certificate_create(context, values):
"""Create a certificate from the values dictionary."""
return IMPL.certificate_create(context, values)
def certificate_get_all_by_project(context, project_id):
"""Get all certificates for a project."""
return IMPL.certificate_get_all_by_project(context, project_id)
def certificate_get_all_by_user(context, user_id):
"""Get all certificates for a user."""
return IMPL.certificate_get_all_by_user(context, user_id)
def certificate_get_all_by_user_and_project(context, user_id, project_id):
"""Get all certificates for a user and project."""
return IMPL.certificate_get_all_by_user_and_project(context,
user_id,
project_id)
###################
def floating_ip_get(context, id):
return IMPL.floating_ip_get(context, id)
def floating_ip_get_pools(context):
"""Returns a list of floating ip pools."""
return IMPL.floating_ip_get_pools(context)
def floating_ip_allocate_address(context, project_id, pool,
auto_assigned=False):
"""Allocate free floating ip from specified pool and return the address.
Raises if one is not available.
"""
return IMPL.floating_ip_allocate_address(context, project_id, pool,
auto_assigned)
def floating_ip_bulk_create(context, ips, want_result=True):
"""Create a lot of floating ips from the values dictionary.
:param want_result: If set to True, return floating ips inserted
"""
return IMPL.floating_ip_bulk_create(context, ips, want_result=want_result)
def floating_ip_bulk_destroy(context, ips):
"""Destroy a lot of floating ips from the values dictionary."""
return IMPL.floating_ip_bulk_destroy(context, ips)
def floating_ip_create(context, values):
"""Create a floating ip from the values dictionary."""
return IMPL.floating_ip_create(context, values)
def floating_ip_deallocate(context, address):
"""Deallocate a floating ip by address."""
return IMPL.floating_ip_deallocate(context, address)
def floating_ip_destroy(context, address):
"""Destroy the floating_ip or raise if it does not exist."""
return IMPL.floating_ip_destroy(context, address)
def floating_ip_disassociate(context, address):
"""Disassociate a floating ip from a fixed ip by address.
:returns: the fixed ip record joined to network record or None
if the ip was not associated to an ip.
"""
return IMPL.floating_ip_disassociate(context, address)
def floating_ip_fixed_ip_associate(context, floating_address,
fixed_address, host):
"""Associate a floating ip to a fixed_ip by address.
:returns: the fixed ip record joined to network record or None
if the ip was already associated to the fixed ip.
"""
return IMPL.floating_ip_fixed_ip_associate(context,
floating_address,
fixed_address,
host)
def floating_ip_get_all(context):
"""Get all floating ips."""
return IMPL.floating_ip_get_all(context)
def floating_ip_get_all_by_host(context, host):
"""Get all floating ips by host."""
return IMPL.floating_ip_get_all_by_host(context, host)
def floating_ip_get_all_by_project(context, project_id):
"""Get all floating ips by project."""
return IMPL.floating_ip_get_all_by_project(context, project_id)
def floating_ip_get_by_address(context, address):
"""Get a floating ip by address or raise if it doesn't exist."""
return IMPL.floating_ip_get_by_address(context, address)
def floating_ip_get_by_fixed_address(context, fixed_address):
"""Get a floating ips by fixed address."""
return IMPL.floating_ip_get_by_fixed_address(context, fixed_address)
def floating_ip_get_by_fixed_ip_id(context, fixed_ip_id):
"""Get a floating ips by fixed address."""
return IMPL.floating_ip_get_by_fixed_ip_id(context, fixed_ip_id)
def floating_ip_update(context, address, values):
"""Update a floating ip by address or raise if it doesn't exist."""
return IMPL.floating_ip_update(context, address, values)
def dnsdomain_get_all(context):
"""Get a list of all dnsdomains in our database."""
return IMPL.dnsdomain_get_all(context)
def dnsdomain_register_for_zone(context, fqdomain, zone):
"""Associated a DNS domain with an availability zone."""
return IMPL.dnsdomain_register_for_zone(context, fqdomain, zone)
def dnsdomain_register_for_project(context, fqdomain, project):
"""Associated a DNS domain with a project id."""
return IMPL.dnsdomain_register_for_project(context, fqdomain, project)
def dnsdomain_unregister(context, fqdomain):
"""Purge associations for the specified DNS zone."""
return IMPL.dnsdomain_unregister(context, fqdomain)
def dnsdomain_get(context, fqdomain):
"""Get the db record for the specified domain."""
return IMPL.dnsdomain_get(context, fqdomain)
####################
def migration_update(context, id, values):
"""Update a migration instance."""
return IMPL.migration_update(context, id, values)
def migration_create(context, values):
"""Create a migration record."""
return IMPL.migration_create(context, values)
def migration_get(context, migration_id):
"""Finds a migration by the id."""
return IMPL.migration_get(context, migration_id)
def migration_get_by_instance_and_status(context, instance_uuid, status):
"""Finds a migration by the instance uuid its migrating."""
return IMPL.migration_get_by_instance_and_status(context, instance_uuid,
status)
def migration_get_unconfirmed_by_dest_compute(context, confirm_window,
dest_compute, use_slave=False):
"""Finds all unconfirmed migrations within the confirmation window for
a specific destination compute host.
"""
return IMPL.migration_get_unconfirmed_by_dest_compute(context,
confirm_window, dest_compute, use_slave=use_slave)
def migration_get_in_progress_by_host_and_node(context, host, node):
"""Finds all migrations for the given host + node that are not yet
confirmed or reverted.
"""
return IMPL.migration_get_in_progress_by_host_and_node(context, host, node)
def migration_get_all_by_filters(context, filters):
"""Finds all migrations in progress."""
return IMPL.migration_get_all_by_filters(context, filters)
####################
def fixed_ip_associate(context, address, instance_uuid, network_id=None,
reserved=False):
"""Associate fixed ip to instance.
Raises if fixed ip is not available.
"""
return IMPL.fixed_ip_associate(context, address, instance_uuid, network_id,
reserved)
def fixed_ip_associate_pool(context, network_id, instance_uuid=None,
host=None):
"""Find free ip in network and associate it to instance or host.
Raises if one is not available.
"""
return IMPL.fixed_ip_associate_pool(context, network_id,
instance_uuid, host)
def fixed_ip_create(context, values):
"""Create a fixed ip from the values dictionary."""
return IMPL.fixed_ip_create(context, values)
def fixed_ip_bulk_create(context, ips):
"""Create a lot of fixed ips from the values dictionary."""
return IMPL.fixed_ip_bulk_create(context, ips)
def fixed_ip_disassociate(context, address):
"""Disassociate a fixed ip from an instance by address."""
return IMPL.fixed_ip_disassociate(context, address)
def fixed_ip_disassociate_all_by_timeout(context, host, time):
"""Disassociate old fixed ips from host."""
return IMPL.fixed_ip_disassociate_all_by_timeout(context, host, time)
def fixed_ip_get(context, id, get_network=False):
"""Get fixed ip by id or raise if it does not exist.
If get_network is true, also return the associated network.
"""
return IMPL.fixed_ip_get(context, id, get_network)
def fixed_ip_get_all(context):
"""Get all defined fixed ips."""
return IMPL.fixed_ip_get_all(context)
def fixed_ip_get_by_address(context, address, columns_to_join=None):
"""Get a fixed ip by address or raise if it does not exist."""
return IMPL.fixed_ip_get_by_address(context, address,
columns_to_join=columns_to_join)
def fixed_ip_get_by_floating_address(context, floating_address):
"""Get a fixed ip by a floating address."""
return IMPL.fixed_ip_get_by_floating_address(context, floating_address)
def fixed_ip_get_by_instance(context, instance_uuid):
"""Get fixed ips by instance or raise if none exist."""
return IMPL.fixed_ip_get_by_instance(context, instance_uuid)
def fixed_ip_get_by_host(context, host):
"""Get fixed ips by compute host."""
return IMPL.fixed_ip_get_by_host(context, host)
def fixed_ip_get_by_network_host(context, network_uuid, host):
"""Get fixed ip for a host in a network."""
return IMPL.fixed_ip_get_by_network_host(context, network_uuid, host)
def fixed_ips_by_virtual_interface(context, vif_id):
"""Get fixed ips by virtual interface or raise if none exist."""
return IMPL.fixed_ips_by_virtual_interface(context, vif_id)
def fixed_ip_update(context, address, values):
"""Create a fixed ip from the values dictionary."""
return IMPL.fixed_ip_update(context, address, values)
####################
def virtual_interface_create(context, values):
"""Create a virtual interface record in the database."""
return IMPL.virtual_interface_create(context, values)
def virtual_interface_get(context, vif_id):
"""Gets a virtual interface from the table."""
return IMPL.virtual_interface_get(context, vif_id)
def virtual_interface_get_by_address(context, address):
"""Gets a virtual interface from the table filtering on address."""
return IMPL.virtual_interface_get_by_address(context, address)
def virtual_interface_get_by_uuid(context, vif_uuid):
"""Gets a virtual interface from the table filtering on vif uuid."""
return IMPL.virtual_interface_get_by_uuid(context, vif_uuid)
def virtual_interface_get_by_instance(context, instance_id, use_slave=False):
"""Gets all virtual_interfaces for instance."""
return IMPL.virtual_interface_get_by_instance(context, instance_id,
use_slave=use_slave)
def virtual_interface_get_by_instance_and_network(context, instance_id,
network_id):
"""Gets all virtual interfaces for instance."""
return IMPL.virtual_interface_get_by_instance_and_network(context,
instance_id,
network_id)
def virtual_interface_delete_by_instance(context, instance_id):
"""Delete virtual interface records associated with instance."""
return IMPL.virtual_interface_delete_by_instance(context, instance_id)
def virtual_interface_get_all(context):
"""Gets all virtual interfaces from the table."""
return IMPL.virtual_interface_get_all(context)
####################
def instance_create(context, values):
"""Create an instance from the values dictionary."""
return IMPL.instance_create(context, values)
def instance_destroy(context, instance_uuid, constraint=None,
update_cells=True):
"""Destroy the instance or raise if it does not exist."""
rv = IMPL.instance_destroy(context, instance_uuid, constraint)
if update_cells:
try:
cells_rpcapi.CellsAPI().instance_destroy_at_top(context, rv)
except Exception:
LOG.exception(_LE("Failed to notify cells of instance destroy"))
return rv
def instance_get_by_uuid(context, uuid, columns_to_join=None, use_slave=False):
"""Get an instance or raise if it does not exist."""
return IMPL.instance_get_by_uuid(context, uuid,
columns_to_join, use_slave=use_slave)
def instance_get(context, instance_id, columns_to_join=None):
"""Get an instance or raise if it does not exist."""
return IMPL.instance_get(context, instance_id,
columns_to_join=columns_to_join)
def instance_get_all(context, columns_to_join=None):
"""Get all instances."""
return IMPL.instance_get_all(context, columns_to_join=columns_to_join)
def instance_get_all_by_filters(context, filters, sort_key='created_at',
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False):
"""Get all instances that match all filters."""
# Note: This function exists for backwards compatibility since calls to
# the instance layer coming in over RPC may specify the single sort
# key/direction values; in this case, this function is invoked instead
# of the 'instance_get_all_by_filters_sort' function.
return IMPL.instance_get_all_by_filters(context, filters, sort_key,
sort_dir, limit=limit,
marker=marker,
columns_to_join=columns_to_join,
use_slave=use_slave)
def instance_get_all_by_filters_sort(context, filters, limit=None,
marker=None, columns_to_join=None,
use_slave=False, sort_keys=None,
sort_dirs=None):
"""Get all instances that match all filters sorted by multiple keys.
sort_keys and sort_dirs must be a list of strings.
"""
return IMPL.instance_get_all_by_filters_sort(
context, filters, limit=limit, marker=marker,
columns_to_join=columns_to_join, use_slave=use_slave,
sort_keys=sort_keys, sort_dirs=sort_dirs)
def instance_get_active_by_window_joined(context, begin, end=None,
project_id=None, host=None,
use_slave=False,
columns_to_join=None):
"""Get instances and joins active during a certain time window.
Specifying a project_id will filter for a certain project.
Specifying a host will filter for instances on a given compute host.
"""
return IMPL.instance_get_active_by_window_joined(context, begin, end,
project_id, host,
use_slave=use_slave,
columns_to_join=columns_to_join)
def instance_get_all_by_host(context, host,
columns_to_join=None, use_slave=False):
"""Get all instances belonging to a host."""
return IMPL.instance_get_all_by_host(context, host,
columns_to_join,
use_slave=use_slave)
def instance_get_all_by_host_and_node(context, host, node,
columns_to_join=None):
"""Get all instances belonging to a node."""
return IMPL.instance_get_all_by_host_and_node(
context, host, node, columns_to_join=columns_to_join)
def instance_get_all_by_host_and_not_type(context, host, type_id=None):
"""Get all instances belonging to a host with a different type_id."""
return IMPL.instance_get_all_by_host_and_not_type(context, host, type_id)
def instance_floating_address_get_all(context, instance_uuid):
"""Get all floating ip addresses of an instance."""
return IMPL.instance_floating_address_get_all(context, instance_uuid)
# NOTE(hanlind): This method can be removed as conductor RPC API moves to v2.0.
def instance_get_all_hung_in_rebooting(context, reboot_window):
"""Get all instances stuck in a rebooting state."""
return IMPL.instance_get_all_hung_in_rebooting(context, reboot_window)
def instance_update(context, instance_uuid, values, update_cells=True):
"""Set the given properties on an instance and update it.
Raises NotFound if instance does not exist.
"""
rv = IMPL.instance_update(context, instance_uuid, values)
if update_cells:
try:
cells_rpcapi.CellsAPI().instance_update_at_top(context, rv)
except Exception:
LOG.exception(_LE("Failed to notify cells of instance update"))
return rv
# FIXME(comstud): 'update_cells' is temporary as we transition to using
# objects. When everything is using Instance.save(), we can remove the
# argument and the RPC to nova-cells.
def instance_update_and_get_original(context, instance_uuid, values,
update_cells=True,
columns_to_join=None):
"""Set the given properties on an instance and update it. Return
a shallow copy of the original instance reference, as well as the
updated one.
:param context: = request context object
:param instance_uuid: = instance id or uuid
:param values: = dict containing column values
:returns: a tuple of the form (old_instance_ref, new_instance_ref)
Raises NotFound if instance does not exist.
"""
rv = IMPL.instance_update_and_get_original(context, instance_uuid, values,
columns_to_join=columns_to_join)
if update_cells:
try:
cells_rpcapi.CellsAPI().instance_update_at_top(context, rv[1])
except Exception:
LOG.exception(_LE("Failed to notify cells of instance update"))
return rv
def instance_add_security_group(context, instance_id, security_group_id):
"""Associate the given security group with the given instance."""
return IMPL.instance_add_security_group(context, instance_id,
security_group_id)
def instance_remove_security_group(context, instance_id, security_group_id):
"""Disassociate the given security group from the given instance."""
return IMPL.instance_remove_security_group(context, instance_id,
security_group_id)
####################
def instance_group_create(context, values, policies=None, members=None):
"""Create a new group.
Each group will receive a unique uuid. This will be used for access to the
group.
"""
return IMPL.instance_group_create(context, values, policies, members)
def instance_group_get(context, group_uuid):
"""Get a specific group by id."""
return IMPL.instance_group_get(context, group_uuid)
def instance_group_get_by_instance(context, instance_uuid):
"""Get the group an instance is a member of."""
return IMPL.instance_group_get_by_instance(context, instance_uuid)
def instance_group_update(context, group_uuid, values):
"""Update the attributes of an group."""
return IMPL.instance_group_update(context, group_uuid, values)
def instance_group_delete(context, group_uuid):
"""Delete an group."""
return IMPL.instance_group_delete(context, group_uuid)
def instance_group_get_all(context):
"""Get all groups."""
return IMPL.instance_group_get_all(context)
def instance_group_get_all_by_project_id(context, project_id):
"""Get all groups for a specific project_id."""
return IMPL.instance_group_get_all_by_project_id(context, project_id)
def instance_group_members_add(context, group_uuid, members,
set_delete=False):
"""Add members to the group."""
return IMPL.instance_group_members_add(context, group_uuid, members,
set_delete=set_delete)
def instance_group_member_delete(context, group_uuid, instance_id):
"""Delete a specific member from the group."""
return IMPL.instance_group_member_delete(context, group_uuid, instance_id)
def instance_group_members_get(context, group_uuid):
"""Get the members from the group."""
return IMPL.instance_group_members_get(context, group_uuid)
def instance_group_policies_add(context, group_uuid, policies,
set_delete=False):
"""Add policies to the group."""
return IMPL.instance_group_policies_add(context, group_uuid, policies,
set_delete=set_delete)
def instance_group_policy_delete(context, group_uuid, policy):
"""Delete a specific policy from the group."""
return IMPL.instance_group_policy_delete(context, group_uuid, policy)
def instance_group_policies_get(context, group_uuid):
"""Get the policies from the group."""
return IMPL.instance_group_policies_get(context, group_uuid)
###################
def instance_info_cache_get(context, instance_uuid):
"""Gets an instance info cache from the table.
:param instance_uuid: = uuid of the info cache's instance
"""
return IMPL.instance_info_cache_get(context, instance_uuid)
def instance_info_cache_update(context, instance_uuid, values):
"""Update an instance info cache record in the table.
:param instance_uuid: = uuid of info cache's instance
:param values: = dict containing column values to update
"""
return IMPL.instance_info_cache_update(context, instance_uuid, values)
def instance_info_cache_delete(context, instance_uuid):
"""Deletes an existing instance_info_cache record
:param instance_uuid: = uuid of the instance tied to the cache record
"""
return IMPL.instance_info_cache_delete(context, instance_uuid)
###################
def instance_extra_get_by_instance_uuid(context, instance_uuid, columns=None):
"""Get the instance extra record
:param instance_uuid: = uuid of the instance tied to the topology record
:param columns: A list of the columns to load, or None for 'all of them'
"""
return IMPL.instance_extra_get_by_instance_uuid(
context, instance_uuid, columns=columns)
def instance_extra_update_by_uuid(context, instance_uuid, updates):
"""Update the instance extra record by instance uuid
:param instance_uuid: = uuid of the instance tied to the record
:param updates: A dict of updates to apply
"""
return IMPL.instance_extra_update_by_uuid(context, instance_uuid,
updates)
###################
def key_pair_create(context, values):
"""Create a key_pair from the values dictionary."""
return IMPL.key_pair_create(context, values)
def key_pair_destroy(context, user_id, name):
"""Destroy the key_pair or raise if it does not exist."""
return IMPL.key_pair_destroy(context, user_id, name)
def key_pair_get(context, user_id, name):
"""Get a key_pair or raise if it does not exist."""
return IMPL.key_pair_get(context, user_id, name)
def key_pair_get_all_by_user(context, user_id):
"""Get all key_pairs by user."""
return IMPL.key_pair_get_all_by_user(context, user_id)
def key_pair_count_by_user(context, user_id):
"""Count number of key pairs for the given user ID."""
return IMPL.key_pair_count_by_user(context, user_id)
####################
def network_associate(context, project_id, network_id=None, force=False):
"""Associate a free network to a project."""
return IMPL.network_associate(context, project_id, network_id, force)
def network_count_reserved_ips(context, network_id):
"""Return the number of reserved ips in the network."""
return IMPL.network_count_reserved_ips(context, network_id)
def network_create_safe(context, values):
"""Create a network from the values dict.
The network is only returned if the create succeeds. If the create violates
constraints because the network already exists, no exception is raised.
"""
return IMPL.network_create_safe(context, values)
def network_delete_safe(context, network_id):
"""Delete network with key network_id.
This method assumes that the network is not associated with any project
"""
return IMPL.network_delete_safe(context, network_id)
def network_disassociate(context, network_id, disassociate_host=True,
disassociate_project=True):
"""Disassociate the network from project or host
Raises if it does not exist.
"""
return IMPL.network_disassociate(context, network_id, disassociate_host,
disassociate_project)
def network_get(context, network_id, project_only="allow_none"):
"""Get a network or raise if it does not exist."""
return IMPL.network_get(context, network_id, project_only=project_only)
def network_get_all(context, project_only="allow_none"):
"""Return all defined networks."""
return IMPL.network_get_all(context, project_only)
def network_get_all_by_uuids(context, network_uuids,
project_only="allow_none"):
"""Return networks by ids."""
return IMPL.network_get_all_by_uuids(context, network_uuids,
project_only=project_only)
def network_in_use_on_host(context, network_id, host=None):
"""Indicates if a network is currently in use on host."""
return IMPL.network_in_use_on_host(context, network_id, host)
def network_get_associated_fixed_ips(context, network_id, host=None):
"""Get all network's ips that have been associated."""
return IMPL.network_get_associated_fixed_ips(context, network_id, host)
def network_get_by_uuid(context, uuid):
"""Get a network by uuid or raise if it does not exist."""
return IMPL.network_get_by_uuid(context, uuid)
def network_get_by_cidr(context, cidr):
"""Get a network by cidr or raise if it does not exist."""
return IMPL.network_get_by_cidr(context, cidr)
def network_get_all_by_host(context, host):
"""All networks for which the given host is the network host."""
return IMPL.network_get_all_by_host(context, host)
def network_set_host(context, network_id, host_id):
"""Safely set the host for network."""
return IMPL.network_set_host(context, network_id, host_id)
def network_update(context, network_id, values):
"""Set the given properties on a network and update it.
Raises NotFound if network does not exist.
"""
return IMPL.network_update(context, network_id, values)
###############
def quota_create(context, project_id, resource, limit, user_id=None):
"""Create a quota for the given project and resource."""
return IMPL.quota_create(context, project_id, resource, limit,
user_id=user_id)
def quota_get(context, project_id, resource, user_id=None):
"""Retrieve a quota or raise if it does not exist."""
return IMPL.quota_get(context, project_id, resource, user_id=user_id)
def quota_get_all_by_project_and_user(context, project_id, user_id):
"""Retrieve all quotas associated with a given project and user."""
return IMPL.quota_get_all_by_project_and_user(context, project_id, user_id)
def quota_get_all_by_project(context, project_id):
"""Retrieve all quotas associated with a given project."""
return IMPL.quota_get_all_by_project(context, project_id)
def quota_get_all(context, project_id):
"""Retrieve all user quotas associated with a given project."""
return IMPL.quota_get_all(context, project_id)
def quota_update(context, project_id, resource, limit, user_id=None):
"""Update a quota or raise if it does not exist."""
return IMPL.quota_update(context, project_id, resource, limit,
user_id=user_id)
###################
def quota_class_create(context, class_name, resource, limit):
"""Create a quota class for the given name and resource."""
return IMPL.quota_class_create(context, class_name, resource, limit)
def quota_class_get(context, class_name, resource):
"""Retrieve a quota class or raise if it does not exist."""
return IMPL.quota_class_get(context, class_name, resource)
def quota_class_get_default(context):
"""Retrieve all default quotas."""
return IMPL.quota_class_get_default(context)
def quota_class_get_all_by_name(context, class_name):
"""Retrieve all quotas associated with a given quota class."""
return IMPL.quota_class_get_all_by_name(context, class_name)
def quota_class_update(context, class_name, resource, limit):
"""Update a quota class or raise if it does not exist."""
return IMPL.quota_class_update(context, class_name, resource, limit)
###################
def quota_usage_get(context, project_id, resource, user_id=None):
"""Retrieve a quota usage or raise if it does not exist."""
return IMPL.quota_usage_get(context, project_id, resource, user_id=user_id)
def quota_usage_get_all_by_project_and_user(context, project_id, user_id):
"""Retrieve all usage associated with a given resource."""
return IMPL.quota_usage_get_all_by_project_and_user(context,
project_id, user_id)
def quota_usage_get_all_by_project(context, project_id):
"""Retrieve all usage associated with a given resource."""
return IMPL.quota_usage_get_all_by_project(context, project_id)
def quota_usage_update(context, project_id, user_id, resource, **kwargs):
"""Update a quota usage or raise if it does not exist."""
return IMPL.quota_usage_update(context, project_id, user_id, resource,
**kwargs)
###################
def quota_reserve(context, resources, quotas, user_quotas, deltas, expire,
until_refresh, max_age, project_id=None, user_id=None):
"""Check quotas and create appropriate reservations."""
return IMPL.quota_reserve(context, resources, quotas, user_quotas, deltas,
expire, until_refresh, max_age,
project_id=project_id, user_id=user_id)
def reservation_commit(context, reservations, project_id=None, user_id=None):
"""Commit quota reservations."""
return IMPL.reservation_commit(context, reservations,
project_id=project_id,
user_id=user_id)
def reservation_rollback(context, reservations, project_id=None, user_id=None):
"""Roll back quota reservations."""
return IMPL.reservation_rollback(context, reservations,
project_id=project_id,
user_id=user_id)
def quota_destroy_all_by_project_and_user(context, project_id, user_id):
"""Destroy all quotas associated with a given project and user."""
return IMPL.quota_destroy_all_by_project_and_user(context,
project_id, user_id)
def quota_destroy_all_by_project(context, project_id):
"""Destroy all quotas associated with a given project."""
return IMPL.quota_destroy_all_by_project(context, project_id)
def reservation_expire(context):
"""Roll back any expired reservations."""
return IMPL.reservation_expire(context)
###################
def ec2_volume_create(context, volume_id, forced_id=None):
return IMPL.ec2_volume_create(context, volume_id, forced_id)
def ec2_volume_get_by_id(context, volume_id):
return IMPL.ec2_volume_get_by_id(context, volume_id)
def ec2_volume_get_by_uuid(context, volume_uuid):
return IMPL.ec2_volume_get_by_uuid(context, volume_uuid)
def ec2_snapshot_create(context, snapshot_id, forced_id=None):
return IMPL.ec2_snapshot_create(context, snapshot_id, forced_id)
def ec2_snapshot_get_by_ec2_id(context, ec2_id):
return IMPL.ec2_snapshot_get_by_ec2_id(context, ec2_id)
def ec2_snapshot_get_by_uuid(context, snapshot_uuid):
return IMPL.ec2_snapshot_get_by_uuid(context, snapshot_uuid)
####################
def block_device_mapping_create(context, values, legacy=True):
"""Create an entry of block device mapping."""
return IMPL.block_device_mapping_create(context, values, legacy)
def block_device_mapping_update(context, bdm_id, values, legacy=True):
"""Update an entry of block device mapping."""
return IMPL.block_device_mapping_update(context, bdm_id, values, legacy)
def block_device_mapping_update_or_create(context, values, legacy=True):
"""Update an entry of block device mapping.
If not existed, create a new entry
"""
return IMPL.block_device_mapping_update_or_create(context, values, legacy)
def block_device_mapping_get_all_by_instance(context, instance_uuid,
use_slave=False):
"""Get all block device mapping belonging to an instance."""
return IMPL.block_device_mapping_get_all_by_instance(context,
instance_uuid,
use_slave)
def block_device_mapping_get_by_volume_id(context, volume_id,
columns_to_join=None):
"""Get block device mapping for a given volume."""
return IMPL.block_device_mapping_get_by_volume_id(context, volume_id,
columns_to_join)
def block_device_mapping_destroy(context, bdm_id):
"""Destroy the block device mapping."""
return IMPL.block_device_mapping_destroy(context, bdm_id)
def block_device_mapping_destroy_by_instance_and_device(context, instance_uuid,
device_name):
"""Destroy the block device mapping."""
return IMPL.block_device_mapping_destroy_by_instance_and_device(
context, instance_uuid, device_name)
def block_device_mapping_destroy_by_instance_and_volume(context, instance_uuid,
volume_id):
"""Destroy the block device mapping."""
return IMPL.block_device_mapping_destroy_by_instance_and_volume(
context, instance_uuid, volume_id)
####################
def security_group_get_all(context):
"""Get all security groups."""
return IMPL.security_group_get_all(context)
def security_group_get(context, security_group_id, columns_to_join=None):
"""Get security group by its id."""
return IMPL.security_group_get(context, security_group_id,
columns_to_join)
def security_group_get_by_name(context, project_id, group_name,
columns_to_join=None):
"""Returns a security group with the specified name from a project."""
return IMPL.security_group_get_by_name(context, project_id, group_name,
columns_to_join=None)
def security_group_get_by_project(context, project_id):
"""Get all security groups belonging to a project."""
return IMPL.security_group_get_by_project(context, project_id)
def security_group_get_by_instance(context, instance_uuid):
"""Get security groups to which the instance is assigned."""
return IMPL.security_group_get_by_instance(context, instance_uuid)
def security_group_in_use(context, group_id):
"""Indicates if a security group is currently in use."""
return IMPL.security_group_in_use(context, group_id)
def security_group_create(context, values):
"""Create a new security group."""
return IMPL.security_group_create(context, values)
def security_group_update(context, security_group_id, values,
columns_to_join=None):
"""Update a security group."""
return IMPL.security_group_update(context, security_group_id, values,
columns_to_join=columns_to_join)
def security_group_ensure_default(context):
"""Ensure default security group exists for a project_id.
Returns a tuple with the first element being a bool indicating
if the default security group previously existed. Second
element is the dict used to create the default security group.
"""
return IMPL.security_group_ensure_default(context)
def security_group_destroy(context, security_group_id):
"""Deletes a security group."""
return IMPL.security_group_destroy(context, security_group_id)
####################
def security_group_rule_create(context, values):
"""Create a new security group."""
return IMPL.security_group_rule_create(context, values)
def security_group_rule_get_by_security_group(context, security_group_id,
columns_to_join=None):
"""Get all rules for a given security group."""
return IMPL.security_group_rule_get_by_security_group(
context, security_group_id, columns_to_join=columns_to_join)
def security_group_rule_get_by_security_group_grantee(context,
security_group_id):
"""Get all rules that grant access to the given security group."""
return IMPL.security_group_rule_get_by_security_group_grantee(context,
security_group_id)
def security_group_rule_destroy(context, security_group_rule_id):
"""Deletes a security group rule."""
return IMPL.security_group_rule_destroy(context, security_group_rule_id)
def security_group_rule_get(context, security_group_rule_id):
"""Gets a security group rule."""
return IMPL.security_group_rule_get(context, security_group_rule_id)
def security_group_rule_count_by_group(context, security_group_id):
"""Count rules in a given security group."""
return IMPL.security_group_rule_count_by_group(context, security_group_id)
###################
def security_group_default_rule_get(context, security_group_rule_default_id):
return IMPL.security_group_default_rule_get(context,
security_group_rule_default_id)
def security_group_default_rule_destroy(context,
security_group_rule_default_id):
return IMPL.security_group_default_rule_destroy(
context, security_group_rule_default_id)
def security_group_default_rule_create(context, values):
return IMPL.security_group_default_rule_create(context, values)
def security_group_default_rule_list(context):
return IMPL.security_group_default_rule_list(context)
###################
def provider_fw_rule_create(context, rule):
"""Add a firewall rule at the provider level (all hosts & instances)."""
return IMPL.provider_fw_rule_create(context, rule)
def provider_fw_rule_get_all(context):
"""Get all provider-level firewall rules."""
return IMPL.provider_fw_rule_get_all(context)
def provider_fw_rule_destroy(context, rule_id):
"""Delete a provider firewall rule from the database."""
return IMPL.provider_fw_rule_destroy(context, rule_id)
###################
def project_get_networks(context, project_id, associate=True):
"""Return the network associated with the project.
If associate is true, it will attempt to associate a new
network if one is not found, otherwise it returns None.
"""
return IMPL.project_get_networks(context, project_id, associate)
###################
def console_pool_create(context, values):
"""Create console pool."""
return IMPL.console_pool_create(context, values)
def console_pool_get_by_host_type(context, compute_host, proxy_host,
console_type):
"""Fetch a console pool for a given proxy host, compute host, and type."""
return IMPL.console_pool_get_by_host_type(context,
compute_host,
proxy_host,
console_type)
def console_pool_get_all_by_host_type(context, host, console_type):
"""Fetch all pools for given proxy host and type."""
return IMPL.console_pool_get_all_by_host_type(context,
host,
console_type)
def console_create(context, values):
"""Create a console."""
return IMPL.console_create(context, values)
def console_delete(context, console_id):
"""Delete a console."""
return IMPL.console_delete(context, console_id)
def console_get_by_pool_instance(context, pool_id, instance_uuid):
"""Get console entry for a given instance and pool."""
return IMPL.console_get_by_pool_instance(context, pool_id, instance_uuid)
def console_get_all_by_instance(context, instance_uuid, columns_to_join=None):
"""Get consoles for a given instance."""
return IMPL.console_get_all_by_instance(context, instance_uuid,
columns_to_join)
def console_get(context, console_id, instance_uuid=None):
"""Get a specific console (possibly on a given instance)."""
return IMPL.console_get(context, console_id, instance_uuid)
##################
def flavor_create(context, values, projects=None):
"""Create a new instance type."""
return IMPL.flavor_create(context, values, projects=projects)
def flavor_get_all(context, inactive=False, filters=None, sort_key='flavorid',
sort_dir='asc', limit=None, marker=None):
"""Get all instance flavors."""
return IMPL.flavor_get_all(
context, inactive=inactive, filters=filters, sort_key=sort_key,
sort_dir=sort_dir, limit=limit, marker=marker)
def flavor_get(context, id):
"""Get instance type by id."""
return IMPL.flavor_get(context, id)
def flavor_get_by_name(context, name):
"""Get instance type by name."""
return IMPL.flavor_get_by_name(context, name)
def flavor_get_by_flavor_id(context, id, read_deleted=None):
"""Get instance type by flavor id."""
return IMPL.flavor_get_by_flavor_id(context, id, read_deleted)
def flavor_destroy(context, name):
"""Delete an instance type."""
return IMPL.flavor_destroy(context, name)
def flavor_access_get_by_flavor_id(context, flavor_id):
"""Get flavor access by flavor id."""
return IMPL.flavor_access_get_by_flavor_id(context, flavor_id)
def flavor_access_add(context, flavor_id, project_id):
"""Add flavor access for project."""
return IMPL.flavor_access_add(context, flavor_id, project_id)
def flavor_access_remove(context, flavor_id, project_id):
"""Remove flavor access for project."""
return IMPL.flavor_access_remove(context, flavor_id, project_id)
def flavor_extra_specs_get(context, flavor_id):
"""Get all extra specs for an instance type."""
return IMPL.flavor_extra_specs_get(context, flavor_id)
def flavor_extra_specs_delete(context, flavor_id, key):
"""Delete the given extra specs item."""
IMPL.flavor_extra_specs_delete(context, flavor_id, key)
def flavor_extra_specs_update_or_create(context, flavor_id,
extra_specs):
"""Create or update instance type extra specs.
This adds or modifies the key/value pairs specified in the
extra specs dict argument
"""
IMPL.flavor_extra_specs_update_or_create(context, flavor_id,
extra_specs)
####################
def pci_device_get_by_addr(context, node_id, dev_addr):
"""Get PCI device by address."""
return IMPL.pci_device_get_by_addr(context, node_id, dev_addr)
def pci_device_get_by_id(context, id):
"""Get PCI device by id."""
return IMPL.pci_device_get_by_id(context, id)
def pci_device_get_all_by_node(context, node_id):
"""Get all PCI devices for one host."""
return IMPL.pci_device_get_all_by_node(context, node_id)
def pci_device_get_all_by_instance_uuid(context, instance_uuid):
"""Get PCI devices allocated to instance."""
return IMPL.pci_device_get_all_by_instance_uuid(context, instance_uuid)
def pci_device_destroy(context, node_id, address):
"""Delete a PCI device record."""
return IMPL.pci_device_destroy(context, node_id, address)
def pci_device_update(context, node_id, address, value):
"""Update a pci device."""
return IMPL.pci_device_update(context, node_id, address, value)
###################
def cell_create(context, values):
"""Create a new child Cell entry."""
return IMPL.cell_create(context, values)
def cell_update(context, cell_name, values):
"""Update a child Cell entry."""
return IMPL.cell_update(context, cell_name, values)
def cell_delete(context, cell_name):
"""Delete a child Cell."""
return IMPL.cell_delete(context, cell_name)
def cell_get(context, cell_name):
"""Get a specific child Cell."""
return IMPL.cell_get(context, cell_name)
def cell_get_all(context):
"""Get all child Cells."""
return IMPL.cell_get_all(context)
####################
def instance_metadata_get(context, instance_uuid):
"""Get all metadata for an instance."""
return IMPL.instance_metadata_get(context, instance_uuid)
def instance_metadata_delete(context, instance_uuid, key):
"""Delete the given metadata item."""
IMPL.instance_metadata_delete(context, instance_uuid, key)
def instance_metadata_update(context, instance_uuid, metadata, delete):
"""Update metadata if it exists, otherwise create it."""
return IMPL.instance_metadata_update(context, instance_uuid,
metadata, delete)
####################
def instance_system_metadata_get(context, instance_uuid):
"""Get all system metadata for an instance."""
return IMPL.instance_system_metadata_get(context, instance_uuid)
def instance_system_metadata_update(context, instance_uuid, metadata, delete):
"""Update metadata if it exists, otherwise create it."""
IMPL.instance_system_metadata_update(
context, instance_uuid, metadata, delete)
####################
def agent_build_create(context, values):
"""Create a new agent build entry."""
return IMPL.agent_build_create(context, values)
def agent_build_get_by_triple(context, hypervisor, os, architecture):
"""Get agent build by hypervisor/OS/architecture triple."""
return IMPL.agent_build_get_by_triple(context, hypervisor, os,
architecture)
def agent_build_get_all(context, hypervisor=None):
"""Get all agent builds."""
return IMPL.agent_build_get_all(context, hypervisor)
def agent_build_destroy(context, agent_update_id):
"""Destroy agent build entry."""
IMPL.agent_build_destroy(context, agent_update_id)
def agent_build_update(context, agent_build_id, values):
"""Update agent build entry."""
IMPL.agent_build_update(context, agent_build_id, values)
####################
def bw_usage_get(context, uuid, start_period, mac, use_slave=False):
"""Return bw usage for instance and mac in a given audit period."""
return IMPL.bw_usage_get(context, uuid, start_period, mac,
use_slave=use_slave)
def bw_usage_get_by_uuids(context, uuids, start_period, use_slave=False):
"""Return bw usages for instance(s) in a given audit period."""
return IMPL.bw_usage_get_by_uuids(context, uuids, start_period,
use_slave=use_slave)
def bw_usage_update(context, uuid, mac, start_period, bw_in, bw_out,
last_ctr_in, last_ctr_out, last_refreshed=None,
update_cells=True):
"""Update cached bandwidth usage for an instance's network based on mac
address. Creates new record if needed.
"""
rv = IMPL.bw_usage_update(context, uuid, mac, start_period, bw_in,
bw_out, last_ctr_in, last_ctr_out, last_refreshed=last_refreshed)
if update_cells:
try:
cells_rpcapi.CellsAPI().bw_usage_update_at_top(context,
uuid, mac, start_period, bw_in, bw_out,
last_ctr_in, last_ctr_out, last_refreshed)
except Exception:
LOG.exception(_LE("Failed to notify cells of bw_usage update"))
return rv
###################
def vol_get_usage_by_time(context, begin):
"""Return volumes usage that have been updated after a specified time."""
return IMPL.vol_get_usage_by_time(context, begin)
def vol_usage_update(context, id, rd_req, rd_bytes, wr_req, wr_bytes,
instance_id, project_id, user_id, availability_zone,
update_totals=False):
"""Update cached volume usage for a volume
Creates new record if needed.
"""
return IMPL.vol_usage_update(context, id, rd_req, rd_bytes, wr_req,
wr_bytes, instance_id, project_id, user_id,
availability_zone,
update_totals=update_totals)
###################
def s3_image_get(context, image_id):
"""Find local s3 image represented by the provided id."""
return IMPL.s3_image_get(context, image_id)
def s3_image_get_by_uuid(context, image_uuid):
"""Find local s3 image represented by the provided uuid."""
return IMPL.s3_image_get_by_uuid(context, image_uuid)
def s3_image_create(context, image_uuid):
"""Create local s3 image represented by provided uuid."""
return IMPL.s3_image_create(context, image_uuid)
####################
def aggregate_create(context, values, metadata=None):
"""Create a new aggregate with metadata."""
return IMPL.aggregate_create(context, values, metadata)
def aggregate_get(context, aggregate_id):
"""Get a specific aggregate by id."""
return IMPL.aggregate_get(context, aggregate_id)
def aggregate_get_by_host(context, host, key=None):
"""Get a list of aggregates that host belongs to."""
return IMPL.aggregate_get_by_host(context, host, key)
def aggregate_metadata_get_by_host(context, host, key=None):
"""Get metadata for all aggregates that host belongs to.
Returns a dictionary where each value is a set, this is to cover the case
where there two aggregates have different values for the same key.
Optional key filter
"""
return IMPL.aggregate_metadata_get_by_host(context, host, key)
def aggregate_metadata_get_by_metadata_key(context, aggregate_id, key):
"""Get metadata for an aggregate by metadata key."""
return IMPL.aggregate_metadata_get_by_metadata_key(context, aggregate_id,
key)
def aggregate_get_by_metadata_key(context, key):
return IMPL.aggregate_get_by_metadata_key(context, key)
def aggregate_update(context, aggregate_id, values):
"""Update the attributes of an aggregates.
If values contains a metadata key, it updates the aggregate metadata too.
"""
return IMPL.aggregate_update(context, aggregate_id, values)
def aggregate_delete(context, aggregate_id):
"""Delete an aggregate."""
return IMPL.aggregate_delete(context, aggregate_id)
def aggregate_get_all(context):
"""Get all aggregates."""
return IMPL.aggregate_get_all(context)
def aggregate_metadata_add(context, aggregate_id, metadata, set_delete=False):
"""Add/update metadata. If set_delete=True, it adds only."""
IMPL.aggregate_metadata_add(context, aggregate_id, metadata, set_delete)
def aggregate_metadata_get(context, aggregate_id):
"""Get metadata for the specified aggregate."""
return IMPL.aggregate_metadata_get(context, aggregate_id)
def aggregate_metadata_delete(context, aggregate_id, key):
"""Delete the given metadata key."""
IMPL.aggregate_metadata_delete(context, aggregate_id, key)
def aggregate_host_add(context, aggregate_id, host):
"""Add host to the aggregate."""
IMPL.aggregate_host_add(context, aggregate_id, host)
def aggregate_host_get_all(context, aggregate_id):
"""Get hosts for the specified aggregate."""
return IMPL.aggregate_host_get_all(context, aggregate_id)
def aggregate_host_delete(context, aggregate_id, host):
"""Delete the given host from the aggregate."""
IMPL.aggregate_host_delete(context, aggregate_id, host)
####################
def instance_fault_create(context, values):
"""Create a new Instance Fault."""
return IMPL.instance_fault_create(context, values)
def instance_fault_get_by_instance_uuids(context, instance_uuids):
"""Get all instance faults for the provided instance_uuids."""
return IMPL.instance_fault_get_by_instance_uuids(context, instance_uuids)
####################
def action_start(context, values):
"""Start an action for an instance."""
return IMPL.action_start(context, values)
def action_finish(context, values):
"""Finish an action for an instance."""
return IMPL.action_finish(context, values)
def actions_get(context, uuid):
"""Get all instance actions for the provided instance."""
return IMPL.actions_get(context, uuid)
def action_get_by_request_id(context, uuid, request_id):
"""Get the action by request_id and given instance."""
return IMPL.action_get_by_request_id(context, uuid, request_id)
def action_event_start(context, values):
"""Start an event on an instance action."""
return IMPL.action_event_start(context, values)
def action_event_finish(context, values):
"""Finish an event on an instance action."""
return IMPL.action_event_finish(context, values)
def action_events_get(context, action_id):
"""Get the events by action id."""
return IMPL.action_events_get(context, action_id)
def action_event_get_by_id(context, action_id, event_id):
return IMPL.action_event_get_by_id(context, action_id, event_id)
####################
def get_instance_uuid_by_ec2_id(context, ec2_id):
"""Get uuid through ec2 id from instance_id_mappings table."""
return IMPL.get_instance_uuid_by_ec2_id(context, ec2_id)
def ec2_instance_create(context, instance_uuid, id=None):
"""Create the ec2 id to instance uuid mapping on demand."""
return IMPL.ec2_instance_create(context, instance_uuid, id)
def ec2_instance_get_by_uuid(context, instance_uuid):
return IMPL.ec2_instance_get_by_uuid(context, instance_uuid)
def ec2_instance_get_by_id(context, instance_id):
return IMPL.ec2_instance_get_by_id(context, instance_id)
####################
def task_log_end_task(context, task_name,
period_beginning,
period_ending,
host,
errors,
message=None):
"""Mark a task as complete for a given host/time period."""
return IMPL.task_log_end_task(context, task_name,
period_beginning,
period_ending,
host,
errors,
message)
def task_log_begin_task(context, task_name,
period_beginning,
period_ending,
host,
task_items=None,
message=None):
"""Mark a task as started for a given host/time period."""
return IMPL.task_log_begin_task(context, task_name,
period_beginning,
period_ending,
host,
task_items,
message)
def task_log_get_all(context, task_name, period_beginning,
period_ending, host=None, state=None):
return IMPL.task_log_get_all(context, task_name, period_beginning,
period_ending, host, state)
def task_log_get(context, task_name, period_beginning,
period_ending, host, state=None):
return IMPL.task_log_get(context, task_name, period_beginning,
period_ending, host, state)
####################
def archive_deleted_rows(context, max_rows=None):
"""Move up to max_rows rows from production tables to corresponding shadow
tables.
:returns: number of rows archived.
"""
return IMPL.archive_deleted_rows(context, max_rows=max_rows)
def archive_deleted_rows_for_table(context, tablename, max_rows=None):
"""Move up to max_rows rows from tablename to corresponding shadow
table.
:returns: number of rows archived.
"""
return IMPL.archive_deleted_rows_for_table(context, tablename,
max_rows=max_rows)
def migrate_flavor_data(context, max_count, flavor_cache):
"""Migrate instance flavor data from system_metadata to instance_extra.
:param max_count: The maximum number of instances to consider in this
run.
:param flavor_cache: A dict to persist flavor information in across
calls (just pass an empty dict here)
:returns: number of instances needing migration, number of instances
migrated (both will always be less than max_count)
"""
return IMPL.migrate_flavor_data(context, max_count, flavor_cache)
####################
def instance_tag_add(context, instance_uuid, tag):
"""Add tag to the instance."""
return IMPL.instance_tag_add(context, instance_uuid, tag)
def instance_tag_set(context, instance_uuid, tags):
"""Replace all of the instance tags with specified list of tags."""
return IMPL.instance_tag_set(context, instance_uuid, tags)
def instance_tag_get_by_instance_uuid(context, instance_uuid):
"""Get all tags for a given instance."""
return IMPL.instance_tag_get_by_instance_uuid(context, instance_uuid)
def instance_tag_delete(context, instance_uuid, tag):
"""Delete specified tag from the instance."""
return IMPL.instance_tag_delete(context, instance_uuid, tag)
def instance_tag_delete_all(context, instance_uuid):
"""Delete all tags from the instance."""
return IMPL.instance_tag_delete_all(context, instance_uuid)
|
jefftc/changlab
|
refs/heads/master
|
web2py/gluon/contrib/login_methods/cas_auth.py
|
5
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This file is part of web2py Web Framework (Copyrighted, 2007-2009).
Developed by Massimo Di Pierro <mdipierro@cs.depaul.edu>.
License: GPL v2
Tinkered by Szabolcs Gyuris < szimszo n @ o regpreshaz dot eu>
"""
from gluon import current, redirect
class CasAuth(object):
"""
Login will be done via Web2py's CAS application, instead of web2py's
login form.
Include in your model (eg db.py)::
from gluon.contrib.login_methods.cas_auth import CasAuth
auth.define_tables(username=True)
auth.settings.login_form=CasAuth(
urlbase = "https://[your CAS provider]/app/default/user/cas",
actions=['login','validate','logout'])
where urlbase is the actual CAS server url without the login,logout...
Enjoy.
###UPDATE###
if you want to connect to a CAS version 2 JASIG Server use this:
auth.settings.login_form=CasAuth(
urlbase = "https://[Your CAS server]/cas",
actions = ['login','serviceValidate','logout'],
casversion = 2,
casusername = "cas:user")
where casusername is the xml node returned by CAS server which contains
user's username.
"""
def __init__(self, g=None, # g for backward compatibility ###
urlbase="https://web2py.com/cas/cas",
actions=['login', 'validate', 'logout'],
maps=dict(username=lambda v: v.get('username', v['user']),
email=lambda v: v.get('email', None),
user_id=lambda v: v['user']),
casversion=1,
casusername='cas:user'
):
self.urlbase = urlbase
self.cas_login_url = "%s/%s" % (self.urlbase, actions[0])
self.cas_check_url = "%s/%s" % (self.urlbase, actions[1])
self.cas_logout_url = "%s/%s" % (self.urlbase, actions[2])
self.maps = maps
self.casversion = casversion
self.casusername = casusername
http_host = current.request.env.http_x_forwarded_host
if not http_host:
http_host = current.request.env.http_host
if current.request.env.wsgi_url_scheme in ['https', 'HTTPS']:
scheme = 'https'
else:
scheme = 'http'
self.cas_my_url = '%s://%s%s' % (
scheme, http_host, current.request.env.path_info)
def login_url(self, next="/"):
current.session.token = self._CAS_login()
return next
def logout_url(self, next="/"):
current.session.token = None
current.session.auth = None
self._CAS_logout()
return next
def get_user(self):
user = current.session.token
if user:
d = {'source': 'web2py cas'}
for key in self.maps:
d[key] = self.maps[key](user)
return d
return None
def _CAS_login(self):
"""
exposed as CAS.login(request)
returns a token on success, None on failed authentication
"""
import urllib
self.ticket = current.request.vars.ticket
if not current.request.vars.ticket:
redirect("%s?service=%s" % (self.cas_login_url,
self.cas_my_url))
else:
url = "%s?service=%s&ticket=%s" % (self.cas_check_url,
self.cas_my_url,
self.ticket)
data = urllib.urlopen(url).read()
if data.startswith('yes') or data.startswith('no'):
data = data.split('\n')
if data[0] == 'yes':
if ':' in data[1]: # for Compatibility with Custom CAS
items = data[1].split(':')
a = items[0]
b = len(items) > 1 and items[1] or a
c = len(items) > 2 and items[2] or b
else:
a = b = c = data[1]
return dict(user=a, email=b, username=c)
return None
import xml.dom.minidom as dom
import xml.parsers.expat as expat
try:
dxml = dom.parseString(data)
envelop = dxml.getElementsByTagName(
"cas:authenticationSuccess")
if len(envelop) > 0:
res = dict()
for x in envelop[0].childNodes:
if x.nodeName.startswith('cas:') and len(x.childNodes):
key = x.nodeName[4:].encode('utf8')
value = x.childNodes[0].nodeValue.encode('utf8')
if not key in res:
res[key] = value
else:
if not isinstance(res[key], list):
res[key] = [res[key]]
res[key].append(value)
return res
except expat.ExpatError:
pass
return None # fallback
def _CAS_logout(self):
"""
exposed CAS.logout()
redirects to the CAS logout page
"""
import urllib
redirect("%s?service=%s" % (self.cas_logout_url, self.cas_my_url))
|
hisilicon/git-repo
|
refs/heads/master
|
project.py
|
5
|
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import contextlib
import errno
import filecmp
import os
import random
import re
import shutil
import stat
import subprocess
import sys
import tarfile
import tempfile
import time
import traceback
from color import Coloring
from git_command import GitCommand, git_require
from git_config import GitConfig, IsId, GetSchemeFromUrl, ID_RE
from error import GitError, HookError, UploadError
from error import ManifestInvalidRevisionError
from error import NoManifestException
from trace import IsTrace, Trace
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M
from pyversion import is_python3
if not is_python3():
# pylint:disable=W0622
input = raw_input
# pylint:enable=W0622
def _lwrite(path, content):
lock = '%s.lock' % path
fd = open(lock, 'w')
try:
fd.write(content)
finally:
fd.close()
try:
os.rename(lock, path)
except OSError:
os.remove(lock)
raise
def _error(fmt, *args):
msg = fmt % args
print('error: %s' % msg, file=sys.stderr)
def not_rev(r):
return '^' + r
def sq(r):
return "'" + r.replace("'", "'\''") + "'"
_project_hook_list = None
def _ProjectHooks():
"""List the hooks present in the 'hooks' directory.
These hooks are project hooks and are copied to the '.git/hooks' directory
of all subprojects.
This function caches the list of hooks (based on the contents of the
'repo/hooks' directory) on the first call.
Returns:
A list of absolute paths to all of the files in the hooks directory.
"""
global _project_hook_list
if _project_hook_list is None:
d = os.path.realpath(os.path.abspath(os.path.dirname(__file__)))
d = os.path.join(d, 'hooks')
_project_hook_list = [os.path.join(d, x) for x in os.listdir(d)]
return _project_hook_list
class DownloadedChange(object):
_commit_cache = None
def __init__(self, project, base, change_id, ps_id, commit):
self.project = project
self.base = base
self.change_id = change_id
self.ps_id = ps_id
self.commit = commit
@property
def commits(self):
if self._commit_cache is None:
self._commit_cache = self.project.bare_git.rev_list(
'--abbrev=8',
'--abbrev-commit',
'--pretty=oneline',
'--reverse',
'--date-order',
not_rev(self.base),
self.commit,
'--')
return self._commit_cache
class ReviewableBranch(object):
_commit_cache = None
def __init__(self, project, branch, base):
self.project = project
self.branch = branch
self.base = base
@property
def name(self):
return self.branch.name
@property
def commits(self):
if self._commit_cache is None:
self._commit_cache = self.project.bare_git.rev_list(
'--abbrev=8',
'--abbrev-commit',
'--pretty=oneline',
'--reverse',
'--date-order',
not_rev(self.base),
R_HEADS + self.name,
'--')
return self._commit_cache
@property
def unabbrev_commits(self):
r = dict()
for commit in self.project.bare_git.rev_list(
not_rev(self.base),
R_HEADS + self.name,
'--'):
r[commit[0:8]] = commit
return r
@property
def date(self):
return self.project.bare_git.log(
'--pretty=format:%cd',
'-n', '1',
R_HEADS + self.name,
'--')
def UploadForReview(self, people, auto_topic=False, draft=False, dest_branch=None):
self.project.UploadForReview(self.name,
people,
auto_topic=auto_topic,
draft=draft,
dest_branch=dest_branch)
def GetPublishedRefs(self):
refs = {}
output = self.project.bare_git.ls_remote(
self.branch.remote.SshReviewUrl(self.project.UserEmail),
'refs/changes/*')
for line in output.split('\n'):
try:
(sha, ref) = line.split()
refs[sha] = ref
except ValueError:
pass
return refs
class StatusColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'status')
self.project = self.printer('header', attr='bold')
self.branch = self.printer('header', attr='bold')
self.nobranch = self.printer('nobranch', fg='red')
self.important = self.printer('important', fg='red')
self.added = self.printer('added', fg='green')
self.changed = self.printer('changed', fg='red')
self.untracked = self.printer('untracked', fg='red')
class DiffColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'diff')
self.project = self.printer('header', attr='bold')
class _Annotation(object):
def __init__(self, name, value, keep):
self.name = name
self.value = value
self.keep = keep
class _CopyFile(object):
def __init__(self, src, dest, abssrc, absdest):
self.src = src
self.dest = dest
self.abs_src = abssrc
self.abs_dest = absdest
def _Copy(self):
src = self.abs_src
dest = self.abs_dest
# copy file if it does not exist or is out of date
if not os.path.exists(dest) or not filecmp.cmp(src, dest):
try:
# remove existing file first, since it might be read-only
if os.path.exists(dest):
os.remove(dest)
else:
dest_dir = os.path.dirname(dest)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
shutil.copy(src, dest)
# make the file read-only
mode = os.stat(dest)[stat.ST_MODE]
mode = mode & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
os.chmod(dest, mode)
except IOError:
_error('Cannot copy file %s to %s', src, dest)
class _LinkFile(object):
def __init__(self, src, dest, abssrc, absdest):
self.src = src
self.dest = dest
self.abs_src = abssrc
self.abs_dest = absdest
def _Link(self):
src = self.abs_src
dest = self.abs_dest
# link file if it does not exist or is out of date
if not os.path.islink(dest) or os.readlink(dest) != src:
try:
# remove existing file first, since it might be read-only
if os.path.exists(dest):
os.remove(dest)
else:
dest_dir = os.path.dirname(dest)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
os.symlink(src, dest)
except IOError:
_error('Cannot link file %s to %s', src, dest)
class RemoteSpec(object):
def __init__(self,
name,
url=None,
review=None,
revision=None):
self.name = name
self.url = url
self.review = review
self.revision = revision
class RepoHook(object):
"""A RepoHook contains information about a script to run as a hook.
Hooks are used to run a python script before running an upload (for instance,
to run presubmit checks). Eventually, we may have hooks for other actions.
This shouldn't be confused with files in the 'repo/hooks' directory. Those
files are copied into each '.git/hooks' folder for each project. Repo-level
hooks are associated instead with repo actions.
Hooks are always python. When a hook is run, we will load the hook into the
interpreter and execute its main() function.
"""
def __init__(self,
hook_type,
hooks_project,
topdir,
abort_if_user_denies=False):
"""RepoHook constructor.
Params:
hook_type: A string representing the type of hook. This is also used
to figure out the name of the file containing the hook. For
example: 'pre-upload'.
hooks_project: The project containing the repo hooks. If you have a
manifest, this is manifest.repo_hooks_project. OK if this is None,
which will make the hook a no-op.
topdir: Repo's top directory (the one containing the .repo directory).
Scripts will run with CWD as this directory. If you have a manifest,
this is manifest.topdir
abort_if_user_denies: If True, we'll throw a HookError() if the user
doesn't allow us to run the hook.
"""
self._hook_type = hook_type
self._hooks_project = hooks_project
self._topdir = topdir
self._abort_if_user_denies = abort_if_user_denies
# Store the full path to the script for convenience.
if self._hooks_project:
self._script_fullpath = os.path.join(self._hooks_project.worktree,
self._hook_type + '.py')
else:
self._script_fullpath = None
def _GetHash(self):
"""Return a hash of the contents of the hooks directory.
We'll just use git to do this. This hash has the property that if anything
changes in the directory we will return a different has.
SECURITY CONSIDERATION:
This hash only represents the contents of files in the hook directory, not
any other files imported or called by hooks. Changes to imported files
can change the script behavior without affecting the hash.
Returns:
A string representing the hash. This will always be ASCII so that it can
be printed to the user easily.
"""
assert self._hooks_project, "Must have hooks to calculate their hash."
# We will use the work_git object rather than just calling GetRevisionId().
# That gives us a hash of the latest checked in version of the files that
# the user will actually be executing. Specifically, GetRevisionId()
# doesn't appear to change even if a user checks out a different version
# of the hooks repo (via git checkout) nor if a user commits their own revs.
#
# NOTE: Local (non-committed) changes will not be factored into this hash.
# I think this is OK, since we're really only worried about warning the user
# about upstream changes.
return self._hooks_project.work_git.rev_parse('HEAD')
def _GetMustVerb(self):
"""Return 'must' if the hook is required; 'should' if not."""
if self._abort_if_user_denies:
return 'must'
else:
return 'should'
def _CheckForHookApproval(self):
"""Check to see whether this hook has been approved.
We'll look at the hash of all of the hooks. If this matches the hash that
the user last approved, we're done. If it doesn't, we'll ask the user
about approval.
Note that we ask permission for each individual hook even though we use
the hash of all hooks when detecting changes. We'd like the user to be
able to approve / deny each hook individually. We only use the hash of all
hooks because there is no other easy way to detect changes to local imports.
Returns:
True if this hook is approved to run; False otherwise.
Raises:
HookError: Raised if the user doesn't approve and abort_if_user_denies
was passed to the consturctor.
"""
hooks_config = self._hooks_project.config
git_approval_key = 'repo.hooks.%s.approvedhash' % self._hook_type
# Get the last hash that the user approved for this hook; may be None.
old_hash = hooks_config.GetString(git_approval_key)
# Get the current hash so we can tell if scripts changed since approval.
new_hash = self._GetHash()
if old_hash is not None:
# User previously approved hook and asked not to be prompted again.
if new_hash == old_hash:
# Approval matched. We're done.
return True
else:
# Give the user a reason why we're prompting, since they last told
# us to "never ask again".
prompt = 'WARNING: Scripts have changed since %s was allowed.\n\n' % (
self._hook_type)
else:
prompt = ''
# Prompt the user if we're not on a tty; on a tty we'll assume "no".
if sys.stdout.isatty():
prompt += ('Repo %s run the script:\n'
' %s\n'
'\n'
'Do you want to allow this script to run '
'(yes/yes-never-ask-again/NO)? ') % (
self._GetMustVerb(), self._script_fullpath)
response = input(prompt).lower()
print()
# User is doing a one-time approval.
if response in ('y', 'yes'):
return True
elif response == 'yes-never-ask-again':
hooks_config.SetString(git_approval_key, new_hash)
return True
# For anything else, we'll assume no approval.
if self._abort_if_user_denies:
raise HookError('You must allow the %s hook or use --no-verify.' %
self._hook_type)
return False
def _ExecuteHook(self, **kwargs):
"""Actually execute the given hook.
This will run the hook's 'main' function in our python interpreter.
Args:
kwargs: Keyword arguments to pass to the hook. These are often specific
to the hook type. For instance, pre-upload hooks will contain
a project_list.
"""
# Keep sys.path and CWD stashed away so that we can always restore them
# upon function exit.
orig_path = os.getcwd()
orig_syspath = sys.path
try:
# Always run hooks with CWD as topdir.
os.chdir(self._topdir)
# Put the hook dir as the first item of sys.path so hooks can do
# relative imports. We want to replace the repo dir as [0] so
# hooks can't import repo files.
sys.path = [os.path.dirname(self._script_fullpath)] + sys.path[1:]
# Exec, storing global context in the context dict. We catch exceptions
# and convert to a HookError w/ just the failing traceback.
context = {}
try:
exec(compile(open(self._script_fullpath).read(),
self._script_fullpath, 'exec'), context)
except Exception:
raise HookError('%s\nFailed to import %s hook; see traceback above.' % (
traceback.format_exc(), self._hook_type))
# Running the script should have defined a main() function.
if 'main' not in context:
raise HookError('Missing main() in: "%s"' % self._script_fullpath)
# Add 'hook_should_take_kwargs' to the arguments to be passed to main.
# We don't actually want hooks to define their main with this argument--
# it's there to remind them that their hook should always take **kwargs.
# For instance, a pre-upload hook should be defined like:
# def main(project_list, **kwargs):
#
# This allows us to later expand the API without breaking old hooks.
kwargs = kwargs.copy()
kwargs['hook_should_take_kwargs'] = True
# Call the main function in the hook. If the hook should cause the
# build to fail, it will raise an Exception. We'll catch that convert
# to a HookError w/ just the failing traceback.
try:
context['main'](**kwargs)
except Exception:
raise HookError('%s\nFailed to run main() for %s hook; see traceback '
'above.' % (
traceback.format_exc(), self._hook_type))
finally:
# Restore sys.path and CWD.
sys.path = orig_syspath
os.chdir(orig_path)
def Run(self, user_allows_all_hooks, **kwargs):
"""Run the hook.
If the hook doesn't exist (because there is no hooks project or because
this particular hook is not enabled), this is a no-op.
Args:
user_allows_all_hooks: If True, we will never prompt about running the
hook--we'll just assume it's OK to run it.
kwargs: Keyword arguments to pass to the hook. These are often specific
to the hook type. For instance, pre-upload hooks will contain
a project_list.
Raises:
HookError: If there was a problem finding the hook or the user declined
to run a required hook (from _CheckForHookApproval).
"""
# No-op if there is no hooks project or if hook is disabled.
if ((not self._hooks_project) or
(self._hook_type not in self._hooks_project.enabled_repo_hooks)):
return
# Bail with a nice error if we can't find the hook.
if not os.path.isfile(self._script_fullpath):
raise HookError('Couldn\'t find repo hook: "%s"' % self._script_fullpath)
# Make sure the user is OK with running the hook.
if (not user_allows_all_hooks) and (not self._CheckForHookApproval()):
return
# Run the hook with the same version of python we're using.
self._ExecuteHook(**kwargs)
class Project(object):
def __init__(self,
manifest,
name,
remote,
gitdir,
objdir,
worktree,
relpath,
revisionExpr,
revisionId,
rebase=True,
groups=None,
sync_c=False,
sync_s=False,
clone_depth=None,
upstream=None,
parent=None,
is_derived=False,
dest_branch=None):
"""Init a Project object.
Args:
manifest: The XmlManifest object.
name: The `name` attribute of manifest.xml's project element.
remote: RemoteSpec object specifying its remote's properties.
gitdir: Absolute path of git directory.
objdir: Absolute path of directory to store git objects.
worktree: Absolute path of git working tree.
relpath: Relative path of git working tree to repo's top directory.
revisionExpr: The `revision` attribute of manifest.xml's project element.
revisionId: git commit id for checking out.
rebase: The `rebase` attribute of manifest.xml's project element.
groups: The `groups` attribute of manifest.xml's project element.
sync_c: The `sync-c` attribute of manifest.xml's project element.
sync_s: The `sync-s` attribute of manifest.xml's project element.
upstream: The `upstream` attribute of manifest.xml's project element.
parent: The parent Project object.
is_derived: False if the project was explicitly defined in the manifest;
True if the project is a discovered submodule.
dest_branch: The branch to which to push changes for review by default.
"""
self.manifest = manifest
self.name = name
self.remote = remote
self.gitdir = gitdir.replace('\\', '/')
self.objdir = objdir.replace('\\', '/')
if worktree:
self.worktree = worktree.replace('\\', '/')
else:
self.worktree = None
self.relpath = relpath
self.revisionExpr = revisionExpr
if revisionId is None \
and revisionExpr \
and IsId(revisionExpr):
self.revisionId = revisionExpr
else:
self.revisionId = revisionId
self.rebase = rebase
self.groups = groups
self.sync_c = sync_c
self.sync_s = sync_s
self.clone_depth = clone_depth
self.upstream = upstream
self.parent = parent
self.is_derived = is_derived
self.subprojects = []
self.snapshots = {}
self.copyfiles = []
self.linkfiles = []
self.annotations = []
self.config = GitConfig.ForRepository(
gitdir=self.gitdir,
defaults=self.manifest.globalConfig)
if self.worktree:
self.work_git = self._GitGetByExec(self, bare=False, gitdir=gitdir)
else:
self.work_git = None
self.bare_git = self._GitGetByExec(self, bare=True, gitdir=gitdir)
self.bare_ref = GitRefs(gitdir)
self.bare_objdir = self._GitGetByExec(self, bare=True, gitdir=objdir)
self.dest_branch = dest_branch
# This will be filled in if a project is later identified to be the
# project containing repo hooks.
self.enabled_repo_hooks = []
@property
def Derived(self):
return self.is_derived
@property
def Exists(self):
return os.path.isdir(self.gitdir)
@property
def CurrentBranch(self):
"""Obtain the name of the currently checked out branch.
The branch name omits the 'refs/heads/' prefix.
None is returned if the project is on a detached HEAD.
"""
b = self.work_git.GetHead()
if b.startswith(R_HEADS):
return b[len(R_HEADS):]
return None
def IsRebaseInProgress(self):
w = self.worktree
g = os.path.join(w, '.git')
return os.path.exists(os.path.join(g, 'rebase-apply')) \
or os.path.exists(os.path.join(g, 'rebase-merge')) \
or os.path.exists(os.path.join(w, '.dotest'))
def IsDirty(self, consider_untracked=True):
"""Is the working directory modified in some way?
"""
self.work_git.update_index('-q',
'--unmerged',
'--ignore-missing',
'--refresh')
if self.work_git.DiffZ('diff-index', '-M', '--cached', HEAD):
return True
if self.work_git.DiffZ('diff-files'):
return True
if consider_untracked and self.work_git.LsOthers():
return True
return False
_userident_name = None
_userident_email = None
@property
def UserName(self):
"""Obtain the user's personal name.
"""
if self._userident_name is None:
self._LoadUserIdentity()
return self._userident_name
@property
def UserEmail(self):
"""Obtain the user's email address. This is very likely
to be their Gerrit login.
"""
if self._userident_email is None:
self._LoadUserIdentity()
return self._userident_email
def _LoadUserIdentity(self):
u = self.bare_git.var('GIT_COMMITTER_IDENT')
m = re.compile("^(.*) <([^>]*)> ").match(u)
if m:
self._userident_name = m.group(1)
self._userident_email = m.group(2)
else:
self._userident_name = ''
self._userident_email = ''
def GetRemote(self, name):
"""Get the configuration for a single remote.
"""
return self.config.GetRemote(name)
def GetBranch(self, name):
"""Get the configuration for a single branch.
"""
return self.config.GetBranch(name)
def GetBranches(self):
"""Get all existing local branches.
"""
current = self.CurrentBranch
all_refs = self._allrefs
heads = {}
for name, ref_id in all_refs.items():
if name.startswith(R_HEADS):
name = name[len(R_HEADS):]
b = self.GetBranch(name)
b.current = name == current
b.published = None
b.revision = ref_id
heads[name] = b
for name, ref_id in all_refs.items():
if name.startswith(R_PUB):
name = name[len(R_PUB):]
b = heads.get(name)
if b:
b.published = ref_id
return heads
def MatchesGroups(self, manifest_groups):
"""Returns true if the manifest groups specified at init should cause
this project to be synced.
Prefixing a manifest group with "-" inverts the meaning of a group.
All projects are implicitly labelled with "all".
labels are resolved in order. In the example case of
project_groups: "all,group1,group2"
manifest_groups: "-group1,group2"
the project will be matched.
The special manifest group "default" will match any project that
does not have the special project group "notdefault"
"""
expanded_manifest_groups = manifest_groups or ['default']
expanded_project_groups = ['all'] + (self.groups or [])
if not 'notdefault' in expanded_project_groups:
expanded_project_groups += ['default']
matched = False
for group in expanded_manifest_groups:
if group.startswith('-') and group[1:] in expanded_project_groups:
matched = False
elif group in expanded_project_groups:
matched = True
return matched
## Status Display ##
def UncommitedFiles(self, get_all=True):
"""Returns a list of strings, uncommitted files in the git tree.
Args:
get_all: a boolean, if True - get information about all different
uncommitted files. If False - return as soon as any kind of
uncommitted files is detected.
"""
details = []
self.work_git.update_index('-q',
'--unmerged',
'--ignore-missing',
'--refresh')
if self.IsRebaseInProgress():
details.append("rebase in progress")
if not get_all:
return details
changes = self.work_git.DiffZ('diff-index', '--cached', HEAD).keys()
if changes:
details.extend(changes)
if not get_all:
return details
changes = self.work_git.DiffZ('diff-files').keys()
if changes:
details.extend(changes)
if not get_all:
return details
changes = self.work_git.LsOthers()
if changes:
details.extend(changes)
return details
def HasChanges(self):
"""Returns true if there are uncommitted changes.
"""
if self.UncommitedFiles(get_all=False):
return True
else:
return False
def PrintWorkTreeStatus(self, output_redir=None):
"""Prints the status of the repository to stdout.
Args:
output: If specified, redirect the output to this object.
"""
if not os.path.isdir(self.worktree):
if output_redir == None:
output_redir = sys.stdout
print(file=output_redir)
print('project %s/' % self.relpath, file=output_redir)
print(' missing (run "repo sync")', file=output_redir)
return
self.work_git.update_index('-q',
'--unmerged',
'--ignore-missing',
'--refresh')
rb = self.IsRebaseInProgress()
di = self.work_git.DiffZ('diff-index', '-M', '--cached', HEAD)
df = self.work_git.DiffZ('diff-files')
do = self.work_git.LsOthers()
if not rb and not di and not df and not do and not self.CurrentBranch:
return 'CLEAN'
out = StatusColoring(self.config)
if not output_redir == None:
out.redirect(output_redir)
out.project('project %-40s', self.relpath + '/ ')
branch = self.CurrentBranch
if branch is None:
out.nobranch('(*** NO BRANCH ***)')
else:
out.branch('branch %s', branch)
out.nl()
if rb:
out.important('prior sync failed; rebase still in progress')
out.nl()
paths = list()
paths.extend(di.keys())
paths.extend(df.keys())
paths.extend(do)
for p in sorted(set(paths)):
try:
i = di[p]
except KeyError:
i = None
try:
f = df[p]
except KeyError:
f = None
if i:
i_status = i.status.upper()
else:
i_status = '-'
if f:
f_status = f.status.lower()
else:
f_status = '-'
if i and i.src_path:
line = ' %s%s\t%s => %s (%s%%)' % (i_status, f_status,
i.src_path, p, i.level)
else:
line = ' %s%s\t%s' % (i_status, f_status, p)
if i and not f:
out.added('%s', line)
elif (i and f) or (not i and f):
out.changed('%s', line)
elif not i and not f:
out.untracked('%s', line)
else:
out.write('%s', line)
out.nl()
return 'DIRTY'
def PrintWorkTreeDiff(self, absolute_paths=False):
"""Prints the status of the repository to stdout.
"""
out = DiffColoring(self.config)
cmd = ['diff']
if out.is_on:
cmd.append('--color')
cmd.append(HEAD)
if absolute_paths:
cmd.append('--src-prefix=a/%s/' % self.relpath)
cmd.append('--dst-prefix=b/%s/' % self.relpath)
cmd.append('--')
p = GitCommand(self,
cmd,
capture_stdout=True,
capture_stderr=True)
has_diff = False
for line in p.process.stdout:
if not has_diff:
out.nl()
out.project('project %s/' % self.relpath)
out.nl()
has_diff = True
print(line[:-1])
p.Wait()
## Publish / Upload ##
def WasPublished(self, branch, all_refs=None):
"""Was the branch published (uploaded) for code review?
If so, returns the SHA-1 hash of the last published
state for the branch.
"""
key = R_PUB + branch
if all_refs is None:
try:
return self.bare_git.rev_parse(key)
except GitError:
return None
else:
try:
return all_refs[key]
except KeyError:
return None
def CleanPublishedCache(self, all_refs=None):
"""Prunes any stale published refs.
"""
if all_refs is None:
all_refs = self._allrefs
heads = set()
canrm = {}
for name, ref_id in all_refs.items():
if name.startswith(R_HEADS):
heads.add(name)
elif name.startswith(R_PUB):
canrm[name] = ref_id
for name, ref_id in canrm.items():
n = name[len(R_PUB):]
if R_HEADS + n not in heads:
self.bare_git.DeleteRef(name, ref_id)
def GetUploadableBranches(self, selected_branch=None):
"""List any branches which can be uploaded for review.
"""
heads = {}
pubed = {}
for name, ref_id in self._allrefs.items():
if name.startswith(R_HEADS):
heads[name[len(R_HEADS):]] = ref_id
elif name.startswith(R_PUB):
pubed[name[len(R_PUB):]] = ref_id
ready = []
for branch, ref_id in heads.items():
if branch in pubed and pubed[branch] == ref_id:
continue
if selected_branch and branch != selected_branch:
continue
rb = self.GetUploadableBranch(branch)
if rb:
ready.append(rb)
return ready
def GetUploadableBranch(self, branch_name):
"""Get a single uploadable branch, or None.
"""
branch = self.GetBranch(branch_name)
base = branch.LocalMerge
if branch.LocalMerge:
rb = ReviewableBranch(self, branch, base)
if rb.commits:
return rb
return None
def UploadForReview(self, branch=None,
people=([], []),
auto_topic=False,
draft=False,
dest_branch=None):
"""Uploads the named branch for code review.
"""
if branch is None:
branch = self.CurrentBranch
if branch is None:
raise GitError('not currently on a branch')
branch = self.GetBranch(branch)
if not branch.LocalMerge:
raise GitError('branch %s does not track a remote' % branch.name)
if not branch.remote.review:
raise GitError('remote %s has no review url' % branch.remote.name)
if dest_branch is None:
dest_branch = self.dest_branch
if dest_branch is None:
dest_branch = branch.merge
if not dest_branch.startswith(R_HEADS):
dest_branch = R_HEADS + dest_branch
if not branch.remote.projectname:
branch.remote.projectname = self.name
branch.remote.Save()
url = branch.remote.ReviewUrl(self.UserEmail)
if url is None:
raise UploadError('review not configured')
cmd = ['push']
if url.startswith('ssh://'):
rp = ['gerrit receive-pack']
for e in people[0]:
rp.append('--reviewer=%s' % sq(e))
for e in people[1]:
rp.append('--cc=%s' % sq(e))
cmd.append('--receive-pack=%s' % " ".join(rp))
cmd.append(url)
if dest_branch.startswith(R_HEADS):
dest_branch = dest_branch[len(R_HEADS):]
upload_type = 'for'
if draft:
upload_type = 'drafts'
ref_spec = '%s:refs/%s/%s' % (R_HEADS + branch.name, upload_type,
dest_branch)
if auto_topic:
ref_spec = ref_spec + '/' + branch.name
if not url.startswith('ssh://'):
rp = ['r=%s' % p for p in people[0]] + \
['cc=%s' % p for p in people[1]]
if rp:
ref_spec = ref_spec + '%' + ','.join(rp)
cmd.append(ref_spec)
if GitCommand(self, cmd, bare=True).Wait() != 0:
raise UploadError('Upload failed')
msg = "posted to %s for %s" % (branch.remote.review, dest_branch)
self.bare_git.UpdateRef(R_PUB + branch.name,
R_HEADS + branch.name,
message=msg)
## Sync ##
def _ExtractArchive(self, tarpath, path=None):
"""Extract the given tar on its current location
Args:
- tarpath: The path to the actual tar file
"""
try:
with tarfile.open(tarpath, 'r') as tar:
tar.extractall(path=path)
return True
except (IOError, tarfile.TarError) as e:
print("error: Cannot extract archive %s: "
"%s" % (tarpath, str(e)), file=sys.stderr)
return False
def Sync_NetworkHalf(self,
quiet=False,
is_new=None,
current_branch_only=False,
clone_bundle=True,
no_tags=False,
archive=False):
"""Perform only the network IO portion of the sync process.
Local working directory/branch state is not affected.
"""
if archive and not isinstance(self, MetaProject):
if self.remote.url.startswith(('http://', 'https://')):
print("error: %s: Cannot fetch archives from http/https "
"remotes." % self.name, file=sys.stderr)
return False
name = self.relpath.replace('\\', '/')
name = name.replace('/', '_')
tarpath = '%s.tar' % name
topdir = self.manifest.topdir
try:
self._FetchArchive(tarpath, cwd=topdir)
except GitError as e:
print('error: %s' % str(e), file=sys.stderr)
return False
# From now on, we only need absolute tarpath
tarpath = os.path.join(topdir, tarpath)
if not self._ExtractArchive(tarpath, path=topdir):
return False
try:
os.remove(tarpath)
except OSError as e:
print("warn: Cannot remove archive %s: "
"%s" % (tarpath, str(e)), file=sys.stderr)
self._CopyAndLinkFiles()
return True
if is_new is None:
is_new = not self.Exists
if is_new:
self._InitGitDir()
else:
self._UpdateHooks()
self._InitRemote()
if is_new:
alt = os.path.join(self.gitdir, 'objects/info/alternates')
try:
fd = open(alt, 'rb')
try:
alt_dir = fd.readline().rstrip()
finally:
fd.close()
except IOError:
alt_dir = None
else:
alt_dir = None
if clone_bundle \
and alt_dir is None \
and self._ApplyCloneBundle(initial=is_new, quiet=quiet):
is_new = False
if not current_branch_only:
if self.sync_c:
current_branch_only = True
elif not self.manifest._loaded:
# Manifest cannot check defaults until it syncs.
current_branch_only = False
elif self.manifest.default.sync_c:
current_branch_only = True
has_sha1 = ID_RE.match(self.revisionExpr) and self._CheckForSha1()
if (not has_sha1 #Need to fetch since we don't already have this revision
and not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir,
current_branch_only=current_branch_only,
no_tags=no_tags)):
return False
if self.worktree:
self._InitMRef()
else:
self._InitMirrorHead()
try:
os.remove(os.path.join(self.gitdir, 'FETCH_HEAD'))
except OSError:
pass
return True
def PostRepoUpgrade(self):
self._InitHooks()
def _CopyAndLinkFiles(self):
for copyfile in self.copyfiles:
copyfile._Copy()
for linkfile in self.linkfiles:
linkfile._Link()
def GetCommitRevisionId(self):
"""Get revisionId of a commit.
Use this method instead of GetRevisionId to get the id of the commit rather
than the id of the current git object (for example, a tag)
"""
if not self.revisionExpr.startswith(R_TAGS):
return self.GetRevisionId(self._allrefs)
try:
return self.bare_git.rev_list(self.revisionExpr, '-1')[0]
except GitError:
raise ManifestInvalidRevisionError(
'revision %s in %s not found' % (self.revisionExpr,
self.name))
def GetRevisionId(self, all_refs=None):
if self.revisionId:
return self.revisionId
rem = self.GetRemote(self.remote.name)
rev = rem.ToLocal(self.revisionExpr)
if all_refs is not None and rev in all_refs:
return all_refs[rev]
try:
return self.bare_git.rev_parse('--verify', '%s^0' % rev)
except GitError:
raise ManifestInvalidRevisionError(
'revision %s in %s not found' % (self.revisionExpr,
self.name))
def Sync_LocalHalf(self, syncbuf):
"""Perform only the local IO portion of the sync process.
Network access is not required.
"""
self._InitWorkTree()
all_refs = self.bare_ref.all
self.CleanPublishedCache(all_refs)
revid = self.GetRevisionId(all_refs)
def _doff():
self._FastForward(revid)
self._CopyAndLinkFiles()
head = self.work_git.GetHead()
if head.startswith(R_HEADS):
branch = head[len(R_HEADS):]
try:
head = all_refs[head]
except KeyError:
head = None
else:
branch = None
if branch is None or syncbuf.detach_head:
# Currently on a detached HEAD. The user is assumed to
# not have any local modifications worth worrying about.
#
if self.IsRebaseInProgress():
syncbuf.fail(self, _PriorSyncFailedError())
return
if head == revid:
# No changes; don't do anything further.
# Except if the head needs to be detached
#
if not syncbuf.detach_head:
return
else:
lost = self._revlist(not_rev(revid), HEAD)
if lost:
syncbuf.info(self, "discarding %d commits", len(lost))
try:
self._Checkout(revid, quiet=True)
except GitError as e:
syncbuf.fail(self, e)
return
self._CopyAndLinkFiles()
return
if head == revid:
# No changes; don't do anything further.
#
return
branch = self.GetBranch(branch)
if not branch.LocalMerge:
# The current branch has no tracking configuration.
# Jump off it to a detached HEAD.
#
syncbuf.info(self,
"leaving %s; does not track upstream",
branch.name)
try:
self._Checkout(revid, quiet=True)
except GitError as e:
syncbuf.fail(self, e)
return
self._CopyAndLinkFiles()
return
upstream_gain = self._revlist(not_rev(HEAD), revid)
pub = self.WasPublished(branch.name, all_refs)
if pub:
not_merged = self._revlist(not_rev(revid), pub)
if not_merged:
if upstream_gain:
# The user has published this branch and some of those
# commits are not yet merged upstream. We do not want
# to rewrite the published commits so we punt.
#
syncbuf.fail(self,
"branch %s is published (but not merged) and is now %d commits behind"
% (branch.name, len(upstream_gain)))
return
elif pub == head:
# All published commits are merged, and thus we are a
# strict subset. We can fast-forward safely.
#
syncbuf.later1(self, _doff)
return
# Examine the local commits not in the remote. Find the
# last one attributed to this user, if any.
#
local_changes = self._revlist(not_rev(revid), HEAD, format='%H %ce')
last_mine = None
cnt_mine = 0
for commit in local_changes:
commit_id, committer_email = commit.decode('utf-8').split(' ', 1)
if committer_email == self.UserEmail:
last_mine = commit_id
cnt_mine += 1
if not upstream_gain and cnt_mine == len(local_changes):
return
if self.IsDirty(consider_untracked=False):
syncbuf.fail(self, _DirtyError())
return
# If the upstream switched on us, warn the user.
#
if branch.merge != self.revisionExpr:
if branch.merge and self.revisionExpr:
syncbuf.info(self,
'manifest switched %s...%s',
branch.merge,
self.revisionExpr)
elif branch.merge:
syncbuf.info(self,
'manifest no longer tracks %s',
branch.merge)
if cnt_mine < len(local_changes):
# Upstream rebased. Not everything in HEAD
# was created by this user.
#
syncbuf.info(self,
"discarding %d commits removed from upstream",
len(local_changes) - cnt_mine)
branch.remote = self.GetRemote(self.remote.name)
if not ID_RE.match(self.revisionExpr):
# in case of manifest sync the revisionExpr might be a SHA1
branch.merge = self.revisionExpr
branch.Save()
if cnt_mine > 0 and self.rebase:
def _dorebase():
self._Rebase(upstream='%s^1' % last_mine, onto=revid)
self._CopyAndLinkFiles()
syncbuf.later2(self, _dorebase)
elif local_changes:
try:
self._ResetHard(revid)
self._CopyAndLinkFiles()
except GitError as e:
syncbuf.fail(self, e)
return
else:
syncbuf.later1(self, _doff)
def AddCopyFile(self, src, dest, absdest):
# dest should already be an absolute path, but src is project relative
# make src an absolute path
abssrc = os.path.join(self.worktree, src)
self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest))
def AddLinkFile(self, src, dest, absdest):
# dest should already be an absolute path, but src is project relative
# make src an absolute path
abssrc = os.path.join(self.worktree, src)
self.linkfiles.append(_LinkFile(src, dest, abssrc, absdest))
def AddAnnotation(self, name, value, keep):
self.annotations.append(_Annotation(name, value, keep))
def DownloadPatchSet(self, change_id, patch_id):
"""Download a single patch set of a single change to FETCH_HEAD.
"""
remote = self.GetRemote(self.remote.name)
cmd = ['fetch', remote.name]
cmd.append('refs/changes/%2.2d/%d/%d' \
% (change_id % 100, change_id, patch_id))
if GitCommand(self, cmd, bare=True).Wait() != 0:
return None
return DownloadedChange(self,
self.GetRevisionId(),
change_id,
patch_id,
self.bare_git.rev_parse('FETCH_HEAD'))
## Branch Management ##
def StartBranch(self, name):
"""Create a new branch off the manifest's revision.
"""
head = self.work_git.GetHead()
if head == (R_HEADS + name):
return True
all_refs = self.bare_ref.all
if R_HEADS + name in all_refs:
return GitCommand(self,
['checkout', name, '--'],
capture_stdout=True,
capture_stderr=True).Wait() == 0
branch = self.GetBranch(name)
branch.remote = self.GetRemote(self.remote.name)
branch.merge = self.revisionExpr
revid = self.GetRevisionId(all_refs)
if head.startswith(R_HEADS):
try:
head = all_refs[head]
except KeyError:
head = None
if revid and head and revid == head:
ref = os.path.join(self.gitdir, R_HEADS + name)
try:
os.makedirs(os.path.dirname(ref))
except OSError:
pass
_lwrite(ref, '%s\n' % revid)
_lwrite(os.path.join(self.worktree, '.git', HEAD),
'ref: %s%s\n' % (R_HEADS, name))
branch.Save()
return True
if GitCommand(self,
['checkout', '-b', branch.name, revid],
capture_stdout=True,
capture_stderr=True).Wait() == 0:
branch.Save()
return True
return False
def CheckoutBranch(self, name):
"""Checkout a local topic branch.
Args:
name: The name of the branch to checkout.
Returns:
True if the checkout succeeded; False if it didn't; None if the branch
didn't exist.
"""
rev = R_HEADS + name
head = self.work_git.GetHead()
if head == rev:
# Already on the branch
#
return True
all_refs = self.bare_ref.all
try:
revid = all_refs[rev]
except KeyError:
# Branch does not exist in this project
#
return None
if head.startswith(R_HEADS):
try:
head = all_refs[head]
except KeyError:
head = None
if head == revid:
# Same revision; just update HEAD to point to the new
# target branch, but otherwise take no other action.
#
_lwrite(os.path.join(self.worktree, '.git', HEAD),
'ref: %s%s\n' % (R_HEADS, name))
return True
return GitCommand(self,
['checkout', name, '--'],
capture_stdout=True,
capture_stderr=True).Wait() == 0
def AbandonBranch(self, name):
"""Destroy a local topic branch.
Args:
name: The name of the branch to abandon.
Returns:
True if the abandon succeeded; False if it didn't; None if the branch
didn't exist.
"""
rev = R_HEADS + name
all_refs = self.bare_ref.all
if rev not in all_refs:
# Doesn't exist
return None
head = self.work_git.GetHead()
if head == rev:
# We can't destroy the branch while we are sitting
# on it. Switch to a detached HEAD.
#
head = all_refs[head]
revid = self.GetRevisionId(all_refs)
if head == revid:
_lwrite(os.path.join(self.worktree, '.git', HEAD),
'%s\n' % revid)
else:
self._Checkout(revid, quiet=True)
return GitCommand(self,
['branch', '-D', name],
capture_stdout=True,
capture_stderr=True).Wait() == 0
def PruneHeads(self):
"""Prune any topic branches already merged into upstream.
"""
cb = self.CurrentBranch
kill = []
left = self._allrefs
for name in left.keys():
if name.startswith(R_HEADS):
name = name[len(R_HEADS):]
if cb is None or name != cb:
kill.append(name)
rev = self.GetRevisionId(left)
if cb is not None \
and not self._revlist(HEAD + '...' + rev) \
and not self.IsDirty(consider_untracked=False):
self.work_git.DetachHead(HEAD)
kill.append(cb)
if kill:
old = self.bare_git.GetHead()
if old is None:
old = 'refs/heads/please_never_use_this_as_a_branch_name'
try:
self.bare_git.DetachHead(rev)
b = ['branch', '-d']
b.extend(kill)
b = GitCommand(self, b, bare=True,
capture_stdout=True,
capture_stderr=True)
b.Wait()
finally:
self.bare_git.SetHead(old)
left = self._allrefs
for branch in kill:
if (R_HEADS + branch) not in left:
self.CleanPublishedCache()
break
if cb and cb not in kill:
kill.append(cb)
kill.sort()
kept = []
for branch in kill:
if R_HEADS + branch in left:
branch = self.GetBranch(branch)
base = branch.LocalMerge
if not base:
base = rev
kept.append(ReviewableBranch(self, branch, base))
return kept
## Submodule Management ##
def GetRegisteredSubprojects(self):
result = []
def rec(subprojects):
if not subprojects:
return
result.extend(subprojects)
for p in subprojects:
rec(p.subprojects)
rec(self.subprojects)
return result
def _GetSubmodules(self):
# Unfortunately we cannot call `git submodule status --recursive` here
# because the working tree might not exist yet, and it cannot be used
# without a working tree in its current implementation.
def get_submodules(gitdir, rev):
# Parse .gitmodules for submodule sub_paths and sub_urls
sub_paths, sub_urls = parse_gitmodules(gitdir, rev)
if not sub_paths:
return []
# Run `git ls-tree` to read SHAs of submodule object, which happen to be
# revision of submodule repository
sub_revs = git_ls_tree(gitdir, rev, sub_paths)
submodules = []
for sub_path, sub_url in zip(sub_paths, sub_urls):
try:
sub_rev = sub_revs[sub_path]
except KeyError:
# Ignore non-exist submodules
continue
submodules.append((sub_rev, sub_path, sub_url))
return submodules
re_path = re.compile(r'^submodule\.([^.]+)\.path=(.*)$')
re_url = re.compile(r'^submodule\.([^.]+)\.url=(.*)$')
def parse_gitmodules(gitdir, rev):
cmd = ['cat-file', 'blob', '%s:.gitmodules' % rev]
try:
p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True,
bare=True, gitdir=gitdir)
except GitError:
return [], []
if p.Wait() != 0:
return [], []
gitmodules_lines = []
fd, temp_gitmodules_path = tempfile.mkstemp()
try:
os.write(fd, p.stdout)
os.close(fd)
cmd = ['config', '--file', temp_gitmodules_path, '--list']
p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True,
bare=True, gitdir=gitdir)
if p.Wait() != 0:
return [], []
gitmodules_lines = p.stdout.split('\n')
except GitError:
return [], []
finally:
os.remove(temp_gitmodules_path)
names = set()
paths = {}
urls = {}
for line in gitmodules_lines:
if not line:
continue
m = re_path.match(line)
if m:
names.add(m.group(1))
paths[m.group(1)] = m.group(2)
continue
m = re_url.match(line)
if m:
names.add(m.group(1))
urls[m.group(1)] = m.group(2)
continue
names = sorted(names)
return ([paths.get(name, '') for name in names],
[urls.get(name, '') for name in names])
def git_ls_tree(gitdir, rev, paths):
cmd = ['ls-tree', rev, '--']
cmd.extend(paths)
try:
p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True,
bare=True, gitdir=gitdir)
except GitError:
return []
if p.Wait() != 0:
return []
objects = {}
for line in p.stdout.split('\n'):
if not line.strip():
continue
object_rev, object_path = line.split()[2:4]
objects[object_path] = object_rev
return objects
try:
rev = self.GetRevisionId()
except GitError:
return []
return get_submodules(self.gitdir, rev)
def GetDerivedSubprojects(self):
result = []
if not self.Exists:
# If git repo does not exist yet, querying its submodules will
# mess up its states; so return here.
return result
for rev, path, url in self._GetSubmodules():
name = self.manifest.GetSubprojectName(self, path)
relpath, worktree, gitdir, objdir = \
self.manifest.GetSubprojectPaths(self, name, path)
project = self.manifest.paths.get(relpath)
if project:
result.extend(project.GetDerivedSubprojects())
continue
remote = RemoteSpec(self.remote.name,
url=url,
review=self.remote.review,
revision=self.remote.revision)
subproject = Project(manifest=self.manifest,
name=name,
remote=remote,
gitdir=gitdir,
objdir=objdir,
worktree=worktree,
relpath=relpath,
revisionExpr=self.revisionExpr,
revisionId=rev,
rebase=self.rebase,
groups=self.groups,
sync_c=self.sync_c,
sync_s=self.sync_s,
parent=self,
is_derived=True)
result.append(subproject)
result.extend(subproject.GetDerivedSubprojects())
return result
## Direct Git Commands ##
def _CheckForSha1(self):
try:
# if revision (sha or tag) is not present then following function
# throws an error.
self.bare_git.rev_parse('--verify', '%s^0' % self.revisionExpr)
return True
except GitError:
# There is no such persistent revision. We have to fetch it.
return False
def _FetchArchive(self, tarpath, cwd=None):
cmd = ['archive', '-v', '-o', tarpath]
cmd.append('--remote=%s' % self.remote.url)
cmd.append('--prefix=%s/' % self.relpath)
cmd.append(self.revisionExpr)
command = GitCommand(self, cmd, cwd=cwd,
capture_stdout=True,
capture_stderr=True)
if command.Wait() != 0:
raise GitError('git archive %s: %s' % (self.name, command.stderr))
def _RemoteFetch(self, name=None,
current_branch_only=False,
initial=False,
quiet=False,
alt_dir=None,
no_tags=False):
is_sha1 = False
tag_name = None
depth = None
# The depth should not be used when fetching to a mirror because
# it will result in a shallow repository that cannot be cloned or
# fetched from.
if not self.manifest.IsMirror:
if self.clone_depth:
depth = self.clone_depth
else:
depth = self.manifest.manifestProject.config.GetString('repo.depth')
# The repo project should never be synced with partial depth
if self.relpath == '.repo/repo':
depth = None
if depth:
current_branch_only = True
if ID_RE.match(self.revisionExpr) is not None:
is_sha1 = True
if current_branch_only:
if self.revisionExpr.startswith(R_TAGS):
# this is a tag and its sha1 value should never change
tag_name = self.revisionExpr[len(R_TAGS):]
if is_sha1 or tag_name is not None:
if self._CheckForSha1():
return True
if is_sha1 and not depth:
# When syncing a specific commit and --depth is not set:
# * if upstream is explicitly specified and is not a sha1, fetch only
# upstream as users expect only upstream to be fetch.
# Note: The commit might not be in upstream in which case the sync
# will fail.
# * otherwise, fetch all branches to make sure we end up with the
# specific commit.
current_branch_only = self.upstream and not ID_RE.match(self.upstream)
if not name:
name = self.remote.name
ssh_proxy = False
remote = self.GetRemote(name)
if remote.PreConnectFetch():
ssh_proxy = True
if initial:
if alt_dir and 'objects' == os.path.basename(alt_dir):
ref_dir = os.path.dirname(alt_dir)
packed_refs = os.path.join(self.gitdir, 'packed-refs')
remote = self.GetRemote(name)
all_refs = self.bare_ref.all
ids = set(all_refs.values())
tmp = set()
for r, ref_id in GitRefs(ref_dir).all.items():
if r not in all_refs:
if r.startswith(R_TAGS) or remote.WritesTo(r):
all_refs[r] = ref_id
ids.add(ref_id)
continue
if ref_id in ids:
continue
r = 'refs/_alt/%s' % ref_id
all_refs[r] = ref_id
ids.add(ref_id)
tmp.add(r)
tmp_packed = ''
old_packed = ''
for r in sorted(all_refs):
line = '%s %s\n' % (all_refs[r], r)
tmp_packed += line
if r not in tmp:
old_packed += line
_lwrite(packed_refs, tmp_packed)
else:
alt_dir = None
cmd = ['fetch']
if depth:
cmd.append('--depth=%s' % depth)
if quiet:
cmd.append('--quiet')
if not self.worktree:
cmd.append('--update-head-ok')
cmd.append(name)
# If using depth then we should not get all the tags since they may
# be outside of the depth.
if no_tags or depth:
cmd.append('--no-tags')
else:
cmd.append('--tags')
spec = []
if not current_branch_only:
# Fetch whole repo
spec.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
elif tag_name is not None:
spec.append('tag')
spec.append(tag_name)
branch = self.revisionExpr
if is_sha1 and depth:
# Shallow checkout of a specific commit, fetch from that commit and not
# the heads only as the commit might be deeper in the history.
spec.append(branch)
else:
if is_sha1:
branch = self.upstream
if branch is not None and branch.strip():
if not branch.startswith('refs/'):
branch = R_HEADS + branch
spec.append(str((u'+%s:' % branch) + remote.ToLocal(branch)))
cmd.extend(spec)
shallowfetch = self.config.GetString('repo.shallowfetch')
if shallowfetch and shallowfetch != ' '.join(spec):
GitCommand(self, ['fetch', '--unshallow', name] + shallowfetch.split(),
bare=True, ssh_proxy=ssh_proxy).Wait()
if depth:
self.config.SetString('repo.shallowfetch', ' '.join(spec))
else:
self.config.SetString('repo.shallowfetch', None)
ok = False
for _i in range(2):
gitcmd = GitCommand(self, cmd, bare=True, capture_stderr=True,
ssh_proxy=ssh_proxy)
ret = gitcmd.Wait()
print(gitcmd.stderr, file=sys.stderr, end='')
if ret == 0:
ok = True
break
# If needed, run the 'git remote prune' the first time through the loop
elif (not _i and
"error:" in gitcmd.stderr and
"git remote prune" in gitcmd.stderr):
prunecmd = GitCommand(self, ['remote', 'prune', name], bare=True,
capture_stderr=True, ssh_proxy=ssh_proxy)
ret = prunecmd.Wait()
print(prunecmd.stderr, file=sys.stderr, end='')
if ret:
break
continue
elif current_branch_only and is_sha1 and ret == 128:
# Exit code 128 means "couldn't find the ref you asked for"; if we're in sha1
# mode, we just tried sync'ing from the upstream field; it doesn't exist, thus
# abort the optimization attempt and do a full sync.
break
time.sleep(random.randint(30, 45))
if initial:
if alt_dir:
if old_packed != '':
_lwrite(packed_refs, old_packed)
else:
os.remove(packed_refs)
self.bare_git.pack_refs('--all', '--prune')
if is_sha1 and current_branch_only and self.upstream:
# We just synced the upstream given branch; verify we
# got what we wanted, else trigger a second run of all
# refs.
if not self._CheckForSha1():
return self._RemoteFetch(name=name, current_branch_only=False,
initial=False, quiet=quiet, alt_dir=alt_dir)
return ok
def _ApplyCloneBundle(self, initial=False, quiet=False):
if initial and (self.manifest.manifestProject.config.GetString('repo.depth') or self.clone_depth):
return False
remote = self.GetRemote(self.remote.name)
bundle_url = remote.url + '/clone.bundle'
bundle_url = GitConfig.ForUser().UrlInsteadOf(bundle_url)
if GetSchemeFromUrl(bundle_url) not in (
'http', 'https', 'persistent-http', 'persistent-https'):
return False
bundle_dst = os.path.join(self.gitdir, 'clone.bundle')
bundle_tmp = os.path.join(self.gitdir, 'clone.bundle.tmp')
exist_dst = os.path.exists(bundle_dst)
exist_tmp = os.path.exists(bundle_tmp)
if not initial and not exist_dst and not exist_tmp:
return False
if not exist_dst:
exist_dst = self._FetchBundle(bundle_url, bundle_tmp, bundle_dst, quiet)
if not exist_dst:
return False
cmd = ['fetch']
if quiet:
cmd.append('--quiet')
if not self.worktree:
cmd.append('--update-head-ok')
cmd.append(bundle_dst)
for f in remote.fetch:
cmd.append(str(f))
cmd.append('refs/tags/*:refs/tags/*')
ok = GitCommand(self, cmd, bare=True).Wait() == 0
if os.path.exists(bundle_dst):
os.remove(bundle_dst)
if os.path.exists(bundle_tmp):
os.remove(bundle_tmp)
return ok
def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet):
if os.path.exists(dstPath):
os.remove(dstPath)
cmd = ['curl', '--fail', '--output', tmpPath, '--netrc', '--location']
if quiet:
cmd += ['--silent']
if os.path.exists(tmpPath):
size = os.stat(tmpPath).st_size
if size >= 1024:
cmd += ['--continue-at', '%d' % (size,)]
else:
os.remove(tmpPath)
if 'http_proxy' in os.environ and 'darwin' == sys.platform:
cmd += ['--proxy', os.environ['http_proxy']]
with self._GetBundleCookieFile(srcUrl, quiet) as cookiefile:
if cookiefile:
cmd += ['--cookie', cookiefile, '--cookie-jar', cookiefile]
if srcUrl.startswith('persistent-'):
srcUrl = srcUrl[len('persistent-'):]
cmd += [srcUrl]
if IsTrace():
Trace('%s', ' '.join(cmd))
try:
proc = subprocess.Popen(cmd)
except OSError:
return False
curlret = proc.wait()
if curlret == 22:
# From curl man page:
# 22: HTTP page not retrieved. The requested url was not found or
# returned another error with the HTTP error code being 400 or above.
# This return code only appears if -f, --fail is used.
if not quiet:
print("Server does not provide clone.bundle; ignoring.",
file=sys.stderr)
return False
if os.path.exists(tmpPath):
if curlret == 0 and self._IsValidBundle(tmpPath, quiet):
os.rename(tmpPath, dstPath)
return True
else:
os.remove(tmpPath)
return False
else:
return False
def _IsValidBundle(self, path, quiet):
try:
with open(path) as f:
if f.read(16) == '# v2 git bundle\n':
return True
else:
if not quiet:
print("Invalid clone.bundle file; ignoring.", file=sys.stderr)
return False
except OSError:
return False
@contextlib.contextmanager
def _GetBundleCookieFile(self, url, quiet):
if url.startswith('persistent-'):
try:
p = subprocess.Popen(
['git-remote-persistent-https', '-print_config', url],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
try:
prefix = 'http.cookiefile='
cookiefile = None
for line in p.stdout:
line = line.strip()
if line.startswith(prefix):
cookiefile = line[len(prefix):]
break
# Leave subprocess open, as cookie file may be transient.
if cookiefile:
yield cookiefile
return
finally:
p.stdin.close()
if p.wait():
err_msg = p.stderr.read()
if ' -print_config' in err_msg:
pass # Persistent proxy doesn't support -print_config.
elif not quiet:
print(err_msg, file=sys.stderr)
except OSError as e:
if e.errno == errno.ENOENT:
pass # No persistent proxy.
raise
yield GitConfig.ForUser().GetString('http.cookiefile')
def _Checkout(self, rev, quiet=False):
cmd = ['checkout']
if quiet:
cmd.append('-q')
cmd.append(rev)
cmd.append('--')
if GitCommand(self, cmd).Wait() != 0:
if self._allrefs:
raise GitError('%s checkout %s ' % (self.name, rev))
def _CherryPick(self, rev):
cmd = ['cherry-pick']
cmd.append(rev)
cmd.append('--')
if GitCommand(self, cmd).Wait() != 0:
if self._allrefs:
raise GitError('%s cherry-pick %s ' % (self.name, rev))
def _Revert(self, rev):
cmd = ['revert']
cmd.append('--no-edit')
cmd.append(rev)
cmd.append('--')
if GitCommand(self, cmd).Wait() != 0:
if self._allrefs:
raise GitError('%s revert %s ' % (self.name, rev))
def _ResetHard(self, rev, quiet=True):
cmd = ['reset', '--hard']
if quiet:
cmd.append('-q')
cmd.append(rev)
if GitCommand(self, cmd).Wait() != 0:
raise GitError('%s reset --hard %s ' % (self.name, rev))
def _Rebase(self, upstream, onto=None):
cmd = ['rebase']
if onto is not None:
cmd.extend(['--onto', onto])
cmd.append(upstream)
if GitCommand(self, cmd).Wait() != 0:
raise GitError('%s rebase %s ' % (self.name, upstream))
def _FastForward(self, head, ffonly=False):
cmd = ['merge', head]
if ffonly:
cmd.append("--ff-only")
if GitCommand(self, cmd).Wait() != 0:
raise GitError('%s merge %s ' % (self.name, head))
def _InitGitDir(self, mirror_git=None):
if not os.path.exists(self.gitdir):
# Initialize the bare repository, which contains all of the objects.
if not os.path.exists(self.objdir):
os.makedirs(self.objdir)
self.bare_objdir.init()
# If we have a separate directory to hold refs, initialize it as well.
if self.objdir != self.gitdir:
os.makedirs(self.gitdir)
self._ReferenceGitDir(self.objdir, self.gitdir, share_refs=False,
copy_all=True)
mp = self.manifest.manifestProject
ref_dir = mp.config.GetString('repo.reference') or ''
if ref_dir or mirror_git:
if not mirror_git:
mirror_git = os.path.join(ref_dir, self.name + '.git')
repo_git = os.path.join(ref_dir, '.repo', 'projects',
self.relpath + '.git')
if os.path.exists(mirror_git):
ref_dir = mirror_git
elif os.path.exists(repo_git):
ref_dir = repo_git
else:
ref_dir = None
if ref_dir:
_lwrite(os.path.join(self.gitdir, 'objects/info/alternates'),
os.path.join(ref_dir, 'objects') + '\n')
self._UpdateHooks()
m = self.manifest.manifestProject.config
for key in ['user.name', 'user.email']:
if m.Has(key, include_defaults=False):
self.config.SetString(key, m.GetString(key))
if self.manifest.IsMirror:
self.config.SetString('core.bare', 'true')
else:
self.config.SetString('core.bare', None)
def _UpdateHooks(self):
if os.path.exists(self.gitdir):
self._InitHooks()
def _InitHooks(self):
hooks = os.path.realpath(self._gitdir_path('hooks'))
if not os.path.exists(hooks):
os.makedirs(hooks)
for stock_hook in _ProjectHooks():
name = os.path.basename(stock_hook)
if name in ('commit-msg',) and not self.remote.review \
and not self is self.manifest.manifestProject:
# Don't install a Gerrit Code Review hook if this
# project does not appear to use it for reviews.
#
# Since the manifest project is one of those, but also
# managed through gerrit, it's excluded
continue
dst = os.path.join(hooks, name)
if os.path.islink(dst):
continue
if os.path.exists(dst):
if filecmp.cmp(stock_hook, dst, shallow=False):
os.remove(dst)
else:
_error("%s: Not replacing %s hook", self.relpath, name)
continue
try:
os.symlink(os.path.relpath(stock_hook, os.path.dirname(dst)), dst)
except OSError as e:
if e.errno == errno.EPERM:
raise GitError('filesystem must support symlinks')
else:
raise
def _InitRemote(self):
if self.remote.url:
remote = self.GetRemote(self.remote.name)
remote.url = self.remote.url
remote.review = self.remote.review
remote.projectname = self.name
if self.worktree:
remote.ResetFetch(mirror=False)
else:
remote.ResetFetch(mirror=True)
remote.Save()
def _InitMRef(self):
if self.manifest.branch:
self._InitAnyMRef(R_M + self.manifest.branch)
def _InitMirrorHead(self):
self._InitAnyMRef(HEAD)
def _InitAnyMRef(self, ref):
cur = self.bare_ref.symref(ref)
if self.revisionId:
if cur != '' or self.bare_ref.get(ref) != self.revisionId:
msg = 'manifest set to %s' % self.revisionId
dst = self.revisionId + '^0'
self.bare_git.UpdateRef(ref, dst, message=msg, detach=True)
else:
remote = self.GetRemote(self.remote.name)
dst = remote.ToLocal(self.revisionExpr)
if cur != dst:
msg = 'manifest set to %s' % self.revisionExpr
self.bare_git.symbolic_ref('-m', msg, ref, dst)
def _ReferenceGitDir(self, gitdir, dotgit, share_refs, copy_all):
"""Update |dotgit| to reference |gitdir|, using symlinks where possible.
Args:
gitdir: The bare git repository. Must already be initialized.
dotgit: The repository you would like to initialize.
share_refs: If true, |dotgit| will store its refs under |gitdir|.
Only one work tree can store refs under a given |gitdir|.
copy_all: If true, copy all remaining files from |gitdir| -> |dotgit|.
This saves you the effort of initializing |dotgit| yourself.
"""
# These objects can be shared between several working trees.
symlink_files = ['description', 'info']
symlink_dirs = ['hooks', 'objects', 'rr-cache', 'svn']
if share_refs:
# These objects can only be used by a single working tree.
symlink_files += ['config', 'packed-refs', 'shallow']
symlink_dirs += ['logs', 'refs']
to_symlink = symlink_files + symlink_dirs
to_copy = []
if copy_all:
to_copy = os.listdir(gitdir)
for name in set(to_copy).union(to_symlink):
try:
src = os.path.realpath(os.path.join(gitdir, name))
dst = os.path.realpath(os.path.join(dotgit, name))
if os.path.lexists(dst) and not os.path.islink(dst):
raise GitError('cannot overwrite a local work tree')
# If the source dir doesn't exist, create an empty dir.
if name in symlink_dirs and not os.path.lexists(src):
os.makedirs(src)
# If the source file doesn't exist, ensure the destination
# file doesn't either.
if name in symlink_files and not os.path.lexists(src):
try:
os.remove(dst)
except OSError:
pass
if name in to_symlink:
os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst)
elif copy_all and not os.path.islink(dst):
if os.path.isdir(src):
shutil.copytree(src, dst)
elif os.path.isfile(src):
shutil.copy(src, dst)
except OSError as e:
if e.errno == errno.EPERM:
raise GitError('filesystem must support symlinks')
else:
raise
def _InitWorkTree(self):
dotgit = os.path.join(self.worktree, '.git')
if not os.path.exists(dotgit):
os.makedirs(dotgit)
self._ReferenceGitDir(self.gitdir, dotgit, share_refs=True,
copy_all=False)
_lwrite(os.path.join(dotgit, HEAD), '%s\n' % self.GetRevisionId())
cmd = ['read-tree', '--reset', '-u']
cmd.append('-v')
cmd.append(HEAD)
if GitCommand(self, cmd).Wait() != 0:
raise GitError("cannot initialize work tree")
self._CopyAndLinkFiles()
def _gitdir_path(self, path):
return os.path.realpath(os.path.join(self.gitdir, path))
def _revlist(self, *args, **kw):
a = []
a.extend(args)
a.append('--')
return self.work_git.rev_list(*a, **kw)
@property
def _allrefs(self):
return self.bare_ref.all
def _getLogs(self, rev1, rev2, oneline=False, color=True):
"""Get logs between two revisions of this project."""
comp = '..'
if rev1:
revs = [rev1]
if rev2:
revs.extend([comp, rev2])
cmd = ['log', ''.join(revs)]
out = DiffColoring(self.config)
if out.is_on and color:
cmd.append('--color')
if oneline:
cmd.append('--oneline')
try:
log = GitCommand(self, cmd, capture_stdout=True, capture_stderr=True)
if log.Wait() == 0:
return log.stdout
except GitError:
# worktree may not exist if groups changed for example. In that case,
# try in gitdir instead.
if not os.path.exists(self.worktree):
return self.bare_git.log(*cmd[1:])
else:
raise
return None
def getAddedAndRemovedLogs(self, toProject, oneline=False, color=True):
"""Get the list of logs from this revision to given revisionId"""
logs = {}
selfId = self.GetRevisionId(self._allrefs)
toId = toProject.GetRevisionId(toProject._allrefs)
logs['added'] = self._getLogs(selfId, toId, oneline=oneline, color=color)
logs['removed'] = self._getLogs(toId, selfId, oneline=oneline, color=color)
return logs
class _GitGetByExec(object):
def __init__(self, project, bare, gitdir):
self._project = project
self._bare = bare
self._gitdir = gitdir
def LsOthers(self):
p = GitCommand(self._project,
['ls-files',
'-z',
'--others',
'--exclude-standard'],
bare=False,
gitdir=self._gitdir,
capture_stdout=True,
capture_stderr=True)
if p.Wait() == 0:
out = p.stdout
if out:
return out[:-1].split('\0') # pylint: disable=W1401
# Backslash is not anomalous
return []
def DiffZ(self, name, *args):
cmd = [name]
cmd.append('-z')
cmd.extend(args)
p = GitCommand(self._project,
cmd,
gitdir=self._gitdir,
bare=False,
capture_stdout=True,
capture_stderr=True)
try:
out = p.process.stdout.read()
r = {}
if out:
out = iter(out[:-1].split('\0')) # pylint: disable=W1401
while out:
try:
info = next(out)
path = next(out)
except StopIteration:
break
class _Info(object):
def __init__(self, path, omode, nmode, oid, nid, state):
self.path = path
self.src_path = None
self.old_mode = omode
self.new_mode = nmode
self.old_id = oid
self.new_id = nid
if len(state) == 1:
self.status = state
self.level = None
else:
self.status = state[:1]
self.level = state[1:]
while self.level.startswith('0'):
self.level = self.level[1:]
info = info[1:].split(' ')
info = _Info(path, *info)
if info.status in ('R', 'C'):
info.src_path = info.path
info.path = next(out)
r[info.path] = info
return r
finally:
p.Wait()
def GetHead(self):
if self._bare:
path = os.path.join(self._project.gitdir, HEAD)
else:
path = os.path.join(self._project.worktree, '.git', HEAD)
try:
fd = open(path, 'rb')
except IOError as e:
raise NoManifestException(path, str(e))
try:
line = fd.read()
finally:
fd.close()
try:
line = line.decode()
except AttributeError:
pass
if line.startswith('ref: '):
return line[5:-1]
return line[:-1]
def SetHead(self, ref, message=None):
cmdv = []
if message is not None:
cmdv.extend(['-m', message])
cmdv.append(HEAD)
cmdv.append(ref)
self.symbolic_ref(*cmdv)
def DetachHead(self, new, message=None):
cmdv = ['--no-deref']
if message is not None:
cmdv.extend(['-m', message])
cmdv.append(HEAD)
cmdv.append(new)
self.update_ref(*cmdv)
def UpdateRef(self, name, new, old=None,
message=None,
detach=False):
cmdv = []
if message is not None:
cmdv.extend(['-m', message])
if detach:
cmdv.append('--no-deref')
cmdv.append(name)
cmdv.append(new)
if old is not None:
cmdv.append(old)
self.update_ref(*cmdv)
def DeleteRef(self, name, old=None):
if not old:
old = self.rev_parse(name)
self.update_ref('-d', name, old)
self._project.bare_ref.deleted(name)
def rev_list(self, *args, **kw):
if 'format' in kw:
cmdv = ['log', '--pretty=format:%s' % kw['format']]
else:
cmdv = ['rev-list']
cmdv.extend(args)
p = GitCommand(self._project,
cmdv,
bare=self._bare,
gitdir=self._gitdir,
capture_stdout=True,
capture_stderr=True)
r = []
for line in p.process.stdout:
if line[-1] == '\n':
line = line[:-1]
r.append(line)
if p.Wait() != 0:
raise GitError('%s rev-list %s: %s' % (
self._project.name,
str(args),
p.stderr))
return r
def __getattr__(self, name):
"""Allow arbitrary git commands using pythonic syntax.
This allows you to do things like:
git_obj.rev_parse('HEAD')
Since we don't have a 'rev_parse' method defined, the __getattr__ will
run. We'll replace the '_' with a '-' and try to run a git command.
Any other positional arguments will be passed to the git command, and the
following keyword arguments are supported:
config: An optional dict of git config options to be passed with '-c'.
Args:
name: The name of the git command to call. Any '_' characters will
be replaced with '-'.
Returns:
A callable object that will try to call git with the named command.
"""
name = name.replace('_', '-')
def runner(*args, **kwargs):
cmdv = []
config = kwargs.pop('config', None)
for k in kwargs:
raise TypeError('%s() got an unexpected keyword argument %r'
% (name, k))
if config is not None:
if not git_require((1, 7, 2)):
raise ValueError('cannot set config on command line for %s()'
% name)
for k, v in config.items():
cmdv.append('-c')
cmdv.append('%s=%s' % (k, v))
cmdv.append(name)
cmdv.extend(args)
p = GitCommand(self._project,
cmdv,
bare=self._bare,
gitdir=self._gitdir,
capture_stdout=True,
capture_stderr=True)
if p.Wait() != 0:
raise GitError('%s %s: %s' % (
self._project.name,
name,
p.stderr))
r = p.stdout
try:
r = r.decode('utf-8')
except AttributeError:
pass
if r.endswith('\n') and r.index('\n') == len(r) - 1:
return r[:-1]
return r
return runner
class _PriorSyncFailedError(Exception):
def __str__(self):
return 'prior sync failed; rebase still in progress'
class _DirtyError(Exception):
def __str__(self):
return 'contains uncommitted changes'
class _InfoMessage(object):
def __init__(self, project, text):
self.project = project
self.text = text
def Print(self, syncbuf):
syncbuf.out.info('%s/: %s', self.project.relpath, self.text)
syncbuf.out.nl()
class _Failure(object):
def __init__(self, project, why):
self.project = project
self.why = why
def Print(self, syncbuf):
syncbuf.out.fail('error: %s/: %s',
self.project.relpath,
str(self.why))
syncbuf.out.nl()
class _Later(object):
def __init__(self, project, action):
self.project = project
self.action = action
def Run(self, syncbuf):
out = syncbuf.out
out.project('project %s/', self.project.relpath)
out.nl()
try:
self.action()
out.nl()
return True
except GitError:
out.nl()
return False
class _SyncColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'reposync')
self.project = self.printer('header', attr='bold')
self.info = self.printer('info')
self.fail = self.printer('fail', fg='red')
class SyncBuffer(object):
def __init__(self, config, detach_head=False):
self._messages = []
self._failures = []
self._later_queue1 = []
self._later_queue2 = []
self.out = _SyncColoring(config)
self.out.redirect(sys.stderr)
self.detach_head = detach_head
self.clean = True
def info(self, project, fmt, *args):
self._messages.append(_InfoMessage(project, fmt % args))
def fail(self, project, err=None):
self._failures.append(_Failure(project, err))
self.clean = False
def later1(self, project, what):
self._later_queue1.append(_Later(project, what))
def later2(self, project, what):
self._later_queue2.append(_Later(project, what))
def Finish(self):
self._PrintMessages()
self._RunLater()
self._PrintMessages()
return self.clean
def _RunLater(self):
for q in ['_later_queue1', '_later_queue2']:
if not self._RunQueue(q):
return
def _RunQueue(self, queue):
for m in getattr(self, queue):
if not m.Run(self):
self.clean = False
return False
setattr(self, queue, [])
return True
def _PrintMessages(self):
for m in self._messages:
m.Print(self)
for m in self._failures:
m.Print(self)
self._messages = []
self._failures = []
class MetaProject(Project):
"""A special project housed under .repo.
"""
def __init__(self, manifest, name, gitdir, worktree):
Project.__init__(self,
manifest=manifest,
name=name,
gitdir=gitdir,
objdir=gitdir,
worktree=worktree,
remote=RemoteSpec('origin'),
relpath='.repo/%s' % name,
revisionExpr='refs/heads/master',
revisionId=None,
groups=None)
def PreSync(self):
if self.Exists:
cb = self.CurrentBranch
if cb:
base = self.GetBranch(cb).merge
if base:
self.revisionExpr = base
self.revisionId = None
def MetaBranchSwitch(self):
""" Prepare MetaProject for manifest branch switch
"""
# detach and delete manifest branch, allowing a new
# branch to take over
syncbuf = SyncBuffer(self.config, detach_head=True)
self.Sync_LocalHalf(syncbuf)
syncbuf.Finish()
return GitCommand(self,
['update-ref', '-d', 'refs/heads/default'],
capture_stdout=True,
capture_stderr=True).Wait() == 0
@property
def LastFetch(self):
try:
fh = os.path.join(self.gitdir, 'FETCH_HEAD')
return os.path.getmtime(fh)
except OSError:
return 0
@property
def HasChanges(self):
"""Has the remote received new commits not yet checked out?
"""
if not self.remote or not self.revisionExpr:
return False
all_refs = self.bare_ref.all
revid = self.GetRevisionId(all_refs)
head = self.work_git.GetHead()
if head.startswith(R_HEADS):
try:
head = all_refs[head]
except KeyError:
head = None
if revid == head:
return False
elif self._revlist(not_rev(HEAD), revid):
return True
return False
|
eric100lin/Qt-4.8.6
|
refs/heads/master
|
src/3rdparty/webkit/Source/ThirdParty/gyp/test/actions/src/subdir2/make-file.py
|
973
|
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
contents = "Hello from make-file.py\n"
open(sys.argv[1], 'wb').write(contents)
|
weepingdog/byteofpython
|
refs/heads/master
|
src/using_list.py
|
2
|
#!/usr/bin/env python
# Filename: using_list.py
# This is my shopping list
shoplist=['apple','mango','carrot','banana']
print 'I have',len(shoplist),'items to purchase.'
print 'These items are:', # Notice the comma at end of the line
for item in shoplist:
print item,
print '\nI also have to buy rice.'
shoplist.append('rice')
print 'My shopping list is now',shoplist
print 'I will sort my list now'
shoplist.sort()
print 'Sorted shopping list is',shoplist
print 'The first item I will buy is',shoplist[0]
olditem=shoplist[0]
del shoplist[0]
print 'I bought the',olditem
print 'My shopping list is now',shoplist
|
zzzombat/lucid-python-django
|
refs/heads/master
|
tests/regressiontests/forms/tests/models.py
|
50
|
# -*- coding: utf-8 -*-
import datetime
from django.core.files.uploadedfile import SimpleUploadedFile
from django.forms import Form, ModelForm, FileField, ModelChoiceField
from django.test import TestCase
from regressiontests.forms.models import ChoiceModel, ChoiceOptionModel, ChoiceFieldModel, FileModel, Group, BoundaryModel, Defaults
class ChoiceFieldForm(ModelForm):
class Meta:
model = ChoiceFieldModel
class FileForm(Form):
file1 = FileField()
class TestTicket12510(TestCase):
''' It is not necessary to generate choices for ModelChoiceField (regression test for #12510). '''
def setUp(self):
self.groups = [Group.objects.create(name=name) for name in 'abc']
def test_choices_not_fetched_when_not_rendering(self):
def test():
field = ModelChoiceField(Group.objects.order_by('-name'))
self.assertEqual('a', field.clean(self.groups[0].pk).name)
# only one query is required to pull the model from DB
self.assertNumQueries(1, test)
class ModelFormCallableModelDefault(TestCase):
def test_no_empty_option(self):
"If a model's ForeignKey has blank=False and a default, no empty option is created (Refs #10792)."
option = ChoiceOptionModel.objects.create(name='default')
choices = list(ChoiceFieldForm().fields['choice'].choices)
self.assertEqual(len(choices), 1)
self.assertEqual(choices[0], (option.pk, unicode(option)))
def test_callable_initial_value(self):
"The initial value for a callable default returning a queryset is the pk (refs #13769)"
obj1 = ChoiceOptionModel.objects.create(id=1, name='default')
obj2 = ChoiceOptionModel.objects.create(id=2, name='option 2')
obj3 = ChoiceOptionModel.objects.create(id=3, name='option 3')
self.assertEqual(ChoiceFieldForm().as_p(), """<p><label for="id_choice">Choice:</label> <select name="choice" id="id_choice">
<option value="1" selected="selected">ChoiceOption 1</option>
<option value="2">ChoiceOption 2</option>
<option value="3">ChoiceOption 3</option>
</select><input type="hidden" name="initial-choice" value="1" id="initial-id_choice" /></p>
<p><label for="id_choice_int">Choice int:</label> <select name="choice_int" id="id_choice_int">
<option value="1" selected="selected">ChoiceOption 1</option>
<option value="2">ChoiceOption 2</option>
<option value="3">ChoiceOption 3</option>
</select><input type="hidden" name="initial-choice_int" value="1" id="initial-id_choice_int" /></p>
<p><label for="id_multi_choice">Multi choice:</label> <select multiple="multiple" name="multi_choice" id="id_multi_choice">
<option value="1" selected="selected">ChoiceOption 1</option>
<option value="2">ChoiceOption 2</option>
<option value="3">ChoiceOption 3</option>
</select><input type="hidden" name="initial-multi_choice" value="1" id="initial-id_multi_choice_0" /> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></p>
<p><label for="id_multi_choice_int">Multi choice int:</label> <select multiple="multiple" name="multi_choice_int" id="id_multi_choice_int">
<option value="1" selected="selected">ChoiceOption 1</option>
<option value="2">ChoiceOption 2</option>
<option value="3">ChoiceOption 3</option>
</select><input type="hidden" name="initial-multi_choice_int" value="1" id="initial-id_multi_choice_int_0" /> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></p>""")
def test_initial_instance_value(self):
"Initial instances for model fields may also be instances (refs #7287)"
obj1 = ChoiceOptionModel.objects.create(id=1, name='default')
obj2 = ChoiceOptionModel.objects.create(id=2, name='option 2')
obj3 = ChoiceOptionModel.objects.create(id=3, name='option 3')
self.assertEqual(ChoiceFieldForm(initial={
'choice': obj2,
'choice_int': obj2,
'multi_choice': [obj2,obj3],
'multi_choice_int': ChoiceOptionModel.objects.exclude(name="default"),
}).as_p(), """<p><label for="id_choice">Choice:</label> <select name="choice" id="id_choice">
<option value="1">ChoiceOption 1</option>
<option value="2" selected="selected">ChoiceOption 2</option>
<option value="3">ChoiceOption 3</option>
</select><input type="hidden" name="initial-choice" value="2" id="initial-id_choice" /></p>
<p><label for="id_choice_int">Choice int:</label> <select name="choice_int" id="id_choice_int">
<option value="1">ChoiceOption 1</option>
<option value="2" selected="selected">ChoiceOption 2</option>
<option value="3">ChoiceOption 3</option>
</select><input type="hidden" name="initial-choice_int" value="2" id="initial-id_choice_int" /></p>
<p><label for="id_multi_choice">Multi choice:</label> <select multiple="multiple" name="multi_choice" id="id_multi_choice">
<option value="1">ChoiceOption 1</option>
<option value="2" selected="selected">ChoiceOption 2</option>
<option value="3" selected="selected">ChoiceOption 3</option>
</select><input type="hidden" name="initial-multi_choice" value="2" id="initial-id_multi_choice_0" />
<input type="hidden" name="initial-multi_choice" value="3" id="initial-id_multi_choice_1" /> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></p>
<p><label for="id_multi_choice_int">Multi choice int:</label> <select multiple="multiple" name="multi_choice_int" id="id_multi_choice_int">
<option value="1">ChoiceOption 1</option>
<option value="2" selected="selected">ChoiceOption 2</option>
<option value="3" selected="selected">ChoiceOption 3</option>
</select><input type="hidden" name="initial-multi_choice_int" value="2" id="initial-id_multi_choice_int_0" />
<input type="hidden" name="initial-multi_choice_int" value="3" id="initial-id_multi_choice_int_1" /> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></p>""")
class FormsModelTestCase(TestCase):
def test_unicode_filename(self):
# FileModel with unicode filename and data #########################
f = FileForm(data={}, files={'file1': SimpleUploadedFile('我隻氣墊船裝滿晒鱔.txt', 'मेरी मँडराने वाली नाव सर्पमीनों से भरी ह')}, auto_id=False)
self.assertTrue(f.is_valid())
self.assertTrue('file1' in f.cleaned_data)
m = FileModel.objects.create(file=f.cleaned_data['file1'])
self.assertEqual(m.file.name, u'tests/\u6211\u96bb\u6c23\u588a\u8239\u88dd\u6eff\u6652\u9c54.txt')
m.delete()
def test_boundary_conditions(self):
# Boundary conditions on a PostitiveIntegerField #########################
class BoundaryForm(ModelForm):
class Meta:
model = BoundaryModel
f = BoundaryForm({'positive_integer': 100})
self.assertTrue(f.is_valid())
f = BoundaryForm({'positive_integer': 0})
self.assertTrue(f.is_valid())
f = BoundaryForm({'positive_integer': -100})
self.assertFalse(f.is_valid())
def test_formfield_initial(self):
# Formfield initial values ########
# If the model has default values for some fields, they are used as the formfield
# initial values.
class DefaultsForm(ModelForm):
class Meta:
model = Defaults
self.assertEqual(DefaultsForm().fields['name'].initial, u'class default value')
self.assertEqual(DefaultsForm().fields['def_date'].initial, datetime.date(1980, 1, 1))
self.assertEqual(DefaultsForm().fields['value'].initial, 42)
r1 = DefaultsForm()['callable_default'].as_widget()
r2 = DefaultsForm()['callable_default'].as_widget()
self.assertNotEqual(r1, r2)
# In a ModelForm that is passed an instance, the initial values come from the
# instance's values, not the model's defaults.
foo_instance = Defaults(name=u'instance value', def_date=datetime.date(1969, 4, 4), value=12)
instance_form = DefaultsForm(instance=foo_instance)
self.assertEqual(instance_form.initial['name'], u'instance value')
self.assertEqual(instance_form.initial['def_date'], datetime.date(1969, 4, 4))
self.assertEqual(instance_form.initial['value'], 12)
from django.forms import CharField
class ExcludingForm(ModelForm):
name = CharField(max_length=255)
class Meta:
model = Defaults
exclude = ['name', 'callable_default']
f = ExcludingForm({'name': u'Hello', 'value': 99, 'def_date': datetime.date(1999, 3, 2)})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['name'], u'Hello')
obj = f.save()
self.assertEqual(obj.name, u'class default value')
self.assertEqual(obj.value, 99)
self.assertEqual(obj.def_date, datetime.date(1999, 3, 2))
|
danbradham/mtoatools
|
refs/heads/master
|
mtoatools/packages/yaml/serializer.py
|
561
|
__all__ = ['Serializer', 'SerializerError']
from error import YAMLError
from events import *
from nodes import *
class SerializerError(YAMLError):
pass
class Serializer(object):
ANCHOR_TEMPLATE = u'id%03d'
def __init__(self, encoding=None,
explicit_start=None, explicit_end=None, version=None, tags=None):
self.use_encoding = encoding
self.use_explicit_start = explicit_start
self.use_explicit_end = explicit_end
self.use_version = version
self.use_tags = tags
self.serialized_nodes = {}
self.anchors = {}
self.last_anchor_id = 0
self.closed = None
def open(self):
if self.closed is None:
self.emit(StreamStartEvent(encoding=self.use_encoding))
self.closed = False
elif self.closed:
raise SerializerError("serializer is closed")
else:
raise SerializerError("serializer is already opened")
def close(self):
if self.closed is None:
raise SerializerError("serializer is not opened")
elif not self.closed:
self.emit(StreamEndEvent())
self.closed = True
#def __del__(self):
# self.close()
def serialize(self, node):
if self.closed is None:
raise SerializerError("serializer is not opened")
elif self.closed:
raise SerializerError("serializer is closed")
self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
version=self.use_version, tags=self.use_tags))
self.anchor_node(node)
self.serialize_node(node, None, None)
self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
self.serialized_nodes = {}
self.anchors = {}
self.last_anchor_id = 0
def anchor_node(self, node):
if node in self.anchors:
if self.anchors[node] is None:
self.anchors[node] = self.generate_anchor(node)
else:
self.anchors[node] = None
if isinstance(node, SequenceNode):
for item in node.value:
self.anchor_node(item)
elif isinstance(node, MappingNode):
for key, value in node.value:
self.anchor_node(key)
self.anchor_node(value)
def generate_anchor(self, node):
self.last_anchor_id += 1
return self.ANCHOR_TEMPLATE % self.last_anchor_id
def serialize_node(self, node, parent, index):
alias = self.anchors[node]
if node in self.serialized_nodes:
self.emit(AliasEvent(alias))
else:
self.serialized_nodes[node] = True
self.descend_resolver(parent, index)
if isinstance(node, ScalarNode):
detected_tag = self.resolve(ScalarNode, node.value, (True, False))
default_tag = self.resolve(ScalarNode, node.value, (False, True))
implicit = (node.tag == detected_tag), (node.tag == default_tag)
self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
style=node.style))
elif isinstance(node, SequenceNode):
implicit = (node.tag
== self.resolve(SequenceNode, node.value, True))
self.emit(SequenceStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
index = 0
for item in node.value:
self.serialize_node(item, node, index)
index += 1
self.emit(SequenceEndEvent())
elif isinstance(node, MappingNode):
implicit = (node.tag
== self.resolve(MappingNode, node.value, True))
self.emit(MappingStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
for key, value in node.value:
self.serialize_node(key, node, None)
self.serialize_node(value, node, key)
self.emit(MappingEndEvent())
self.ascend_resolver()
|
cnsoft/kbengine-cocos2dx
|
refs/heads/cocos2dx-cnsoft
|
kbe/res/scripts/common/Lib/test/test_asynchat.py
|
89
|
# test asynchat
from test import support
# If this fails, the test will be skipped.
thread = support.import_module('_thread')
import asyncore, asynchat, socket, time
import unittest
import sys
try:
import threading
except ImportError:
threading = None
HOST = support.HOST
SERVER_QUIT = b'QUIT\n'
if threading:
class echo_server(threading.Thread):
# parameter to determine the number of bytes passed back to the
# client each send
chunk_size = 1
def __init__(self, event):
threading.Thread.__init__(self)
self.event = event
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.port = support.bind_port(self.sock)
# This will be set if the client wants us to wait before echoing data
# back.
self.start_resend_event = None
def run(self):
self.sock.listen(1)
self.event.set()
conn, client = self.sock.accept()
self.buffer = b""
# collect data until quit message is seen
while SERVER_QUIT not in self.buffer:
data = conn.recv(1)
if not data:
break
self.buffer = self.buffer + data
# remove the SERVER_QUIT message
self.buffer = self.buffer.replace(SERVER_QUIT, b'')
if self.start_resend_event:
self.start_resend_event.wait()
# re-send entire set of collected data
try:
# this may fail on some tests, such as test_close_when_done, since
# the client closes the channel when it's done sending
while self.buffer:
n = conn.send(self.buffer[:self.chunk_size])
time.sleep(0.001)
self.buffer = self.buffer[n:]
except:
pass
conn.close()
self.sock.close()
class echo_client(asynchat.async_chat):
def __init__(self, terminator, server_port):
asynchat.async_chat.__init__(self)
self.contents = []
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connect((HOST, server_port))
self.set_terminator(terminator)
self.buffer = b""
def handle_connect(self):
pass
if sys.platform == 'darwin':
# select.poll returns a select.POLLHUP at the end of the tests
# on darwin, so just ignore it
def handle_expt(self):
pass
def collect_incoming_data(self, data):
self.buffer += data
def found_terminator(self):
self.contents.append(self.buffer)
self.buffer = b""
def start_echo_server():
event = threading.Event()
s = echo_server(event)
s.start()
event.wait()
event.clear()
time.sleep(0.01) # Give server time to start accepting.
return s, event
@unittest.skipUnless(threading, 'Threading required for this test.')
class TestAsynchat(unittest.TestCase):
usepoll = False
def setUp (self):
self._threads = support.threading_setup()
def tearDown (self):
support.threading_cleanup(*self._threads)
def line_terminator_check(self, term, server_chunk):
event = threading.Event()
s = echo_server(event)
s.chunk_size = server_chunk
s.start()
event.wait()
event.clear()
time.sleep(0.01) # Give server time to start accepting.
c = echo_client(term, s.port)
c.push(b"hello ")
c.push(b"world" + term)
c.push(b"I'm not dead yet!" + term)
c.push(SERVER_QUIT)
asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01)
s.join()
self.assertEqual(c.contents, [b"hello world", b"I'm not dead yet!"])
# the line terminator tests below check receiving variously-sized
# chunks back from the server in order to exercise all branches of
# async_chat.handle_read
def test_line_terminator1(self):
# test one-character terminator
for l in (1,2,3):
self.line_terminator_check(b'\n', l)
def test_line_terminator2(self):
# test two-character terminator
for l in (1,2,3):
self.line_terminator_check(b'\r\n', l)
def test_line_terminator3(self):
# test three-character terminator
for l in (1,2,3):
self.line_terminator_check(b'qqq', l)
def numeric_terminator_check(self, termlen):
# Try reading a fixed number of bytes
s, event = start_echo_server()
c = echo_client(termlen, s.port)
data = b"hello world, I'm not dead yet!\n"
c.push(data)
c.push(SERVER_QUIT)
asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01)
s.join()
self.assertEqual(c.contents, [data[:termlen]])
def test_numeric_terminator1(self):
# check that ints & longs both work (since type is
# explicitly checked in async_chat.handle_read)
self.numeric_terminator_check(1)
def test_numeric_terminator2(self):
self.numeric_terminator_check(6)
def test_none_terminator(self):
# Try reading a fixed number of bytes
s, event = start_echo_server()
c = echo_client(None, s.port)
data = b"hello world, I'm not dead yet!\n"
c.push(data)
c.push(SERVER_QUIT)
asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01)
s.join()
self.assertEqual(c.contents, [])
self.assertEqual(c.buffer, data)
def test_simple_producer(self):
s, event = start_echo_server()
c = echo_client(b'\n', s.port)
data = b"hello world\nI'm not dead yet!\n"
p = asynchat.simple_producer(data+SERVER_QUIT, buffer_size=8)
c.push_with_producer(p)
asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01)
s.join()
self.assertEqual(c.contents, [b"hello world", b"I'm not dead yet!"])
def test_string_producer(self):
s, event = start_echo_server()
c = echo_client(b'\n', s.port)
data = b"hello world\nI'm not dead yet!\n"
c.push_with_producer(data+SERVER_QUIT)
asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01)
s.join()
self.assertEqual(c.contents, [b"hello world", b"I'm not dead yet!"])
def test_empty_line(self):
# checks that empty lines are handled correctly
s, event = start_echo_server()
c = echo_client(b'\n', s.port)
c.push(b"hello world\n\nI'm not dead yet!\n")
c.push(SERVER_QUIT)
asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01)
s.join()
self.assertEqual(c.contents,
[b"hello world", b"", b"I'm not dead yet!"])
def test_close_when_done(self):
s, event = start_echo_server()
s.start_resend_event = threading.Event()
c = echo_client(b'\n', s.port)
c.push(b"hello world\nI'm not dead yet!\n")
c.push(SERVER_QUIT)
c.close_when_done()
asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01)
# Only allow the server to start echoing data back to the client after
# the client has closed its connection. This prevents a race condition
# where the server echoes all of its data before we can check that it
# got any down below.
s.start_resend_event.set()
s.join()
self.assertEqual(c.contents, [])
# the server might have been able to send a byte or two back, but this
# at least checks that it received something and didn't just fail
# (which could still result in the client not having received anything)
self.assertGreater(len(s.buffer), 0)
class TestAsynchat_WithPoll(TestAsynchat):
usepoll = True
class TestHelperFunctions(unittest.TestCase):
def test_find_prefix_at_end(self):
self.assertEqual(asynchat.find_prefix_at_end("qwerty\r", "\r\n"), 1)
self.assertEqual(asynchat.find_prefix_at_end("qwertydkjf", "\r\n"), 0)
class TestFifo(unittest.TestCase):
def test_basic(self):
f = asynchat.fifo()
f.push(7)
f.push(b'a')
self.assertEqual(len(f), 2)
self.assertEqual(f.first(), 7)
self.assertEqual(f.pop(), (1, 7))
self.assertEqual(len(f), 1)
self.assertEqual(f.first(), b'a')
self.assertEqual(f.is_empty(), False)
self.assertEqual(f.pop(), (1, b'a'))
self.assertEqual(len(f), 0)
self.assertEqual(f.is_empty(), True)
self.assertEqual(f.pop(), (0, None))
def test_given_list(self):
f = asynchat.fifo([b'x', 17, 3])
self.assertEqual(len(f), 3)
self.assertEqual(f.pop(), (1, b'x'))
self.assertEqual(f.pop(), (1, 17))
self.assertEqual(f.pop(), (1, 3))
self.assertEqual(f.pop(), (0, None))
def test_main(verbose=None):
support.run_unittest(TestAsynchat, TestAsynchat_WithPoll,
TestHelperFunctions, TestFifo)
if __name__ == "__main__":
test_main(verbose=True)
|
openstack/keystone
|
refs/heads/master
|
keystone/common/sql/expand_repo/versions/077_placeholder.py
|
30
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This is a placeholder for Ussuri backports. Do not use this number for new
# Victoria work. New Victoria work starts after all the placeholders.
def upgrade(migrate_engine):
pass
|
SatoshiNXSimudrone/sl4a-damon-clone
|
refs/heads/master
|
python/gdata/src/gdata/Crypto/test.py
|
225
|
#
# Test script for the Python Cryptography Toolkit.
#
__revision__ = "$Id: test.py,v 1.7 2002/07/11 14:31:19 akuchling Exp $"
import os, sys
# Add the build directory to the front of sys.path
from distutils.util import get_platform
s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
s = os.path.join(os.getcwd(), s)
sys.path.insert(0, s)
s = os.path.join(os.getcwd(), 'test')
sys.path.insert(0, s)
from Crypto.Util import test
args = sys.argv[1:]
quiet = "--quiet" in args
if quiet: args.remove('--quiet')
if not quiet:
print '\nStream Ciphers:'
print '==============='
if args: test.TestStreamModules(args, verbose= not quiet)
else: test.TestStreamModules(verbose= not quiet)
if not quiet:
print '\nBlock Ciphers:'
print '=============='
if args: test.TestBlockModules(args, verbose= not quiet)
else: test.TestBlockModules(verbose= not quiet)
|
Balaji2198/coala
|
refs/heads/master
|
tests/test_bears/ErrorTestBear.py
|
26
|
from coalib.bearlib.abstractions.Linter import linter
@linter(executable='I_do_not_exist',
output_format='regex',
output_regex=r'.+:(?P<line>\d+):(?P<message>.*)')
class ErrorTestBear:
"""
Causes error when run due to missing executable.
"""
@staticmethod
def create_arguments(filename, file, config_file):
return ()
|
tchellomello/home-assistant
|
refs/heads/dev
|
homeassistant/components/bmw_connected_drive/device_tracker.py
|
14
|
"""Device tracker for BMW Connected Drive vehicles."""
import logging
from homeassistant.util import slugify
from . import DOMAIN as BMW_DOMAIN
_LOGGER = logging.getLogger(__name__)
def setup_scanner(hass, config, see, discovery_info=None):
"""Set up the BMW tracker."""
accounts = hass.data[BMW_DOMAIN]
_LOGGER.debug("Found BMW accounts: %s", ", ".join([a.name for a in accounts]))
for account in accounts:
for vehicle in account.account.vehicles:
tracker = BMWDeviceTracker(see, vehicle)
account.add_update_listener(tracker.update)
tracker.update()
return True
class BMWDeviceTracker:
"""BMW Connected Drive device tracker."""
def __init__(self, see, vehicle):
"""Initialize the Tracker."""
self._see = see
self.vehicle = vehicle
def update(self) -> None:
"""Update the device info.
Only update the state in Home Assistant if tracking in
the car is enabled.
"""
dev_id = slugify(self.vehicle.name)
if not self.vehicle.state.is_vehicle_tracking_enabled:
_LOGGER.debug("Tracking is disabled for vehicle %s", dev_id)
return
_LOGGER.debug("Updating %s", dev_id)
attrs = {"vin": self.vehicle.vin}
self._see(
dev_id=dev_id,
host_name=self.vehicle.name,
gps=self.vehicle.state.gps_position,
attributes=attrs,
icon="mdi:car",
)
|
kvalle/diy-lang
|
refs/heads/master
|
tests/test_2_evaluating_simple_expressions.py
|
3
|
# -*- coding: utf-8 -*-
from nose.tools import assert_equals, assert_raises
from diylang.types import DiyLangError
from diylang.types import Environment
from diylang.evaluator import evaluate
from diylang.parser import parse
"""
We will start by implementing evaluation of simple expressions.
"""
def test_evaluating_boolean():
"""TEST 2.1: Booleans should evaluate to themselves."""
assert_equals(True, evaluate(True, Environment()))
assert_equals(False, evaluate(False, Environment()))
def test_evaluating_integer():
"""TEST 2.2: ...and so should integers."""
assert_equals(42, evaluate(42, Environment()))
def test_evaluating_quote():
"""TEST 2.3: When a call is done to the `quote` form, the argument should
be returned without being evaluated.
(quote foo) -> foo
"""
assert_equals("foo", evaluate(["quote", "foo"], Environment()))
assert_equals([1, 2, False],
evaluate(["quote", [1, 2, False]], Environment()))
assert_equals([], evaluate(["quote", []], Environment()))
def test_evaluating_atom_function():
"""TEST 2.4: The `atom` form is used to determine whether an expression is
an atom.
Atoms are expressions that are not list, i.e. integers, booleans or
symbols. Remember that the argument to `atom` must be evaluated before the
check is done.
"""
assert_equals(True, evaluate(["atom", True], Environment()))
assert_equals(True, evaluate(["atom", False], Environment()))
assert_equals(True, evaluate(["atom", 42], Environment()))
assert_equals(True, evaluate(["atom", ["quote", "foo"]], Environment()))
assert_equals(False, evaluate(["atom", ["quote", [1, 2]]], Environment()))
def test_evaluating_eq_function():
"""TEST 2.5: The `eq` form is used to check whether two expressions are
the same atom."""
assert_equals(True, evaluate(["eq", 1, 1], Environment()))
assert_equals(False, evaluate(["eq", 1, 2], Environment()))
# From this point, the ASTs might sometimes be too long or cumbersome to
# write down explicitly, and we'll use `parse` to make them for us.
# Remember, if you need to have a look at exactly what is passed to
# `evaluate`, just add a print statement in the test (or in `evaluate`).
assert_equals(True, evaluate(parse("(eq 'foo 'foo)"), Environment()))
assert_equals(False, evaluate(parse("(eq 'foo 'bar)"), Environment()))
# Lists are never equal, because lists are not atoms
assert_equals(
False, evaluate(parse("(eq '(1 2 3) '(1 2 3))"), Environment()))
def test_basic_math_operators():
"""TEST 2.6: To be able to do anything useful, we need some basic math
operators.
Since we only operate with integers, `/` must represent integer division.
`mod` is the modulo operator.
"""
assert_equals(4, evaluate(["+", 2, 2], Environment()))
assert_equals(1, evaluate(["-", 2, 1], Environment()))
assert_equals(3, evaluate(["/", 6, 2], Environment()))
assert_equals(3, evaluate(["/", 7, 2], Environment()))
assert_equals(6, evaluate(["*", 2, 3], Environment()))
assert_equals(1, evaluate(["mod", 7, 2], Environment()))
assert_equals(True, evaluate([">", 7, 2], Environment()))
assert_equals(False, evaluate([">", 2, 7], Environment()))
assert_equals(False, evaluate([">", 7, 7], Environment()))
def test_math_operators_only_work_on_numbers():
"""TEST 2.7: The math functions should only allow numbers as arguments."""
with assert_raises(DiyLangError):
evaluate(parse("(+ 1 'foo)"), Environment())
with assert_raises(DiyLangError):
evaluate(parse("(- 1 'foo)"), Environment())
with assert_raises(DiyLangError):
evaluate(parse("(/ 1 'foo)"), Environment())
with assert_raises(DiyLangError):
evaluate(parse("(mod 1 'foo)"), Environment())
|
thurt/arangodb
|
refs/heads/devel
|
3rdParty/V8-4.3.61/third_party/python_26/Lib/site-packages/win32/lib/ntsecuritycon.py
|
21
|
# Hacked from winnt.h
DELETE = (65536)
READ_CONTROL = (131072)
WRITE_DAC = (262144)
WRITE_OWNER = (524288)
SYNCHRONIZE = (1048576)
STANDARD_RIGHTS_REQUIRED = (983040)
STANDARD_RIGHTS_READ = (READ_CONTROL)
STANDARD_RIGHTS_WRITE = (READ_CONTROL)
STANDARD_RIGHTS_EXECUTE = (READ_CONTROL)
STANDARD_RIGHTS_ALL = (2031616)
SPECIFIC_RIGHTS_ALL = (65535)
ACCESS_SYSTEM_SECURITY = (16777216)
MAXIMUM_ALLOWED = (33554432)
GENERIC_READ = (-2147483648)
GENERIC_WRITE = (1073741824)
GENERIC_EXECUTE = (536870912)
GENERIC_ALL = (268435456)
# file security permissions
FILE_READ_DATA= ( 1 )
FILE_LIST_DIRECTORY= ( 1 )
FILE_WRITE_DATA= ( 2 )
FILE_ADD_FILE= ( 2 )
FILE_APPEND_DATA= ( 4 )
FILE_ADD_SUBDIRECTORY= ( 4 )
FILE_CREATE_PIPE_INSTANCE= ( 4 )
FILE_READ_EA= ( 8 )
FILE_WRITE_EA= ( 16 )
FILE_EXECUTE= ( 32 )
FILE_TRAVERSE= ( 32 )
FILE_DELETE_CHILD= ( 64 )
FILE_READ_ATTRIBUTES= ( 128 )
FILE_WRITE_ATTRIBUTES= ( 256 )
FILE_ALL_ACCESS= (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 1023)
FILE_GENERIC_READ= (STANDARD_RIGHTS_READ | FILE_READ_DATA | FILE_READ_ATTRIBUTES | FILE_READ_EA | SYNCHRONIZE)
FILE_GENERIC_WRITE= (STANDARD_RIGHTS_WRITE | FILE_WRITE_DATA | FILE_WRITE_ATTRIBUTES | FILE_WRITE_EA | FILE_APPEND_DATA | SYNCHRONIZE)
FILE_GENERIC_EXECUTE= (STANDARD_RIGHTS_EXECUTE | FILE_READ_ATTRIBUTES | FILE_EXECUTE | SYNCHRONIZE)
SECURITY_NULL_SID_AUTHORITY = (0,0,0,0,0,0)
SECURITY_WORLD_SID_AUTHORITY = (0,0,0,0,0,1)
SECURITY_LOCAL_SID_AUTHORITY = (0,0,0,0,0,2)
SECURITY_CREATOR_SID_AUTHORITY = (0,0,0,0,0,3)
SECURITY_NON_UNIQUE_AUTHORITY = (0,0,0,0,0,4)
SECURITY_RESOURCE_MANAGER_AUTHORITY = (0,0,0,0,0,9)
SECURITY_NULL_RID = 0
SECURITY_WORLD_RID = 0
SECURITY_LOCAL_RID = 0X00000000
SECURITY_CREATOR_OWNER_RID = 0
SECURITY_CREATOR_GROUP_RID = 1
SECURITY_CREATOR_OWNER_SERVER_RID = 2
SECURITY_CREATOR_GROUP_SERVER_RID = 3
SECURITY_CREATOR_OWNER_RIGHTS_RID = 4
# NT well-known SIDs
SECURITY_NT_AUTHORITY = (0,0,0,0,0,5)
SECURITY_DIALUP_RID = 1
SECURITY_NETWORK_RID = 2
SECURITY_BATCH_RID = 3
SECURITY_INTERACTIVE_RID = 4
SECURITY_SERVICE_RID = 6
SECURITY_ANONYMOUS_LOGON_RID = 7
SECURITY_PROXY_RID = 8
SECURITY_SERVER_LOGON_RID = 9
SECURITY_LOGON_IDS_RID = 5
SECURITY_LOGON_IDS_RID_COUNT = 3
SECURITY_LOCAL_SYSTEM_RID = 18
SECURITY_NT_NON_UNIQUE = 21
SECURITY_BUILTIN_DOMAIN_RID = 32
# well-known domain relative sub-authority values (RIDs)...
DOMAIN_USER_RID_ADMIN = 500
DOMAIN_USER_RID_GUEST = 501
DOMAIN_USER_RID_KRBTGT = 502
DOMAIN_USER_RID_MAX = 999
# well-known groups ...
DOMAIN_GROUP_RID_ADMINS = 512
DOMAIN_GROUP_RID_USERS = 513
DOMAIN_GROUP_RID_GUESTS = 514
DOMAIN_GROUP_RID_COMPUTERS = 515
DOMAIN_GROUP_RID_CONTROLLERS = 516
DOMAIN_GROUP_RID_CERT_ADMINS = 517
DOMAIN_GROUP_RID_SCHEMA_ADMINS = 518
DOMAIN_GROUP_RID_ENTERPRISE_ADMINS = 519
DOMAIN_GROUP_RID_POLICY_ADMINS = 520
DOMAIN_GROUP_RID_READONLY_CONTROLLERS = 521
# well-known aliases ...
DOMAIN_ALIAS_RID_ADMINS = 544
DOMAIN_ALIAS_RID_USERS = 545
DOMAIN_ALIAS_RID_GUESTS = 546
DOMAIN_ALIAS_RID_POWER_USERS = 547
DOMAIN_ALIAS_RID_ACCOUNT_OPS = 548
DOMAIN_ALIAS_RID_SYSTEM_OPS = 549
DOMAIN_ALIAS_RID_PRINT_OPS = 550
DOMAIN_ALIAS_RID_BACKUP_OPS = 551
DOMAIN_ALIAS_RID_REPLICATOR = 552
DOMAIN_ALIAS_RID_RAS_SERVERS = 553
DOMAIN_ALIAS_RID_PREW2KCOMPACCESS = 554
DOMAIN_ALIAS_RID_REMOTE_DESKTOP_USERS = 555
DOMAIN_ALIAS_RID_NETWORK_CONFIGURATION_OPS = 556
DOMAIN_ALIAS_RID_INCOMING_FOREST_TRUST_BUILDERS = 557
DOMAIN_ALIAS_RID_MONITORING_USERS = 558
DOMAIN_ALIAS_RID_LOGGING_USERS = 559
DOMAIN_ALIAS_RID_AUTHORIZATIONACCESS = 560
DOMAIN_ALIAS_RID_TS_LICENSE_SERVERS = 561
DOMAIN_ALIAS_RID_DCOM_USERS = 562
DOMAIN_ALIAS_RID_IUSERS = 568
DOMAIN_ALIAS_RID_CRYPTO_OPERATORS = 569
DOMAIN_ALIAS_RID_CACHEABLE_PRINCIPALS_GROUP = 571
DOMAIN_ALIAS_RID_NON_CACHEABLE_PRINCIPALS_GROUP = 572
DOMAIN_ALIAS_RID_EVENT_LOG_READERS_GROUP = 573
SECURITY_MANDATORY_LABEL_AUTHORITY = (0,0,0,0,0,16)
SECURITY_MANDATORY_UNTRUSTED_RID = 0x00000000
SECURITY_MANDATORY_LOW_RID = 0x00001000
SECURITY_MANDATORY_MEDIUM_RID = 0x00002000
SECURITY_MANDATORY_HIGH_RID = 0x00003000
SECURITY_MANDATORY_SYSTEM_RID = 0x00004000
SECURITY_MANDATORY_PROTECTED_PROCESS_RID = 0x00005000
SECURITY_MANDATORY_MAXIMUM_USER_RID = SECURITY_MANDATORY_SYSTEM_RID
SYSTEM_LUID = (999, 0)
ANONYMOUS_LOGON_LUID = (998, 0)
LOCALSERVICE_LUID = (997, 0)
NETWORKSERVICE_LUID = (996, 0)
IUSER_LUID = (995, 0)
# Group attributes
SE_GROUP_MANDATORY = 1
SE_GROUP_ENABLED_BY_DEFAULT = 2
SE_GROUP_ENABLED = 4
SE_GROUP_OWNER = 8
SE_GROUP_USE_FOR_DENY_ONLY = 16
SE_GROUP_INTEGRITY = 32
SE_GROUP_INTEGRITY_ENABLED = 64
SE_GROUP_RESOURCE = 536870912
SE_GROUP_LOGON_ID = -1073741824
# User attributes
# (None yet defined.)
# ACE types
ACCESS_MIN_MS_ACE_TYPE = (0)
ACCESS_ALLOWED_ACE_TYPE = (0)
ACCESS_DENIED_ACE_TYPE = (1)
SYSTEM_AUDIT_ACE_TYPE = (2)
SYSTEM_ALARM_ACE_TYPE = (3)
ACCESS_MAX_MS_V2_ACE_TYPE = (3)
ACCESS_ALLOWED_COMPOUND_ACE_TYPE = (4)
ACCESS_MAX_MS_V3_ACE_TYPE = (4)
ACCESS_MIN_MS_OBJECT_ACE_TYPE = (5)
ACCESS_ALLOWED_OBJECT_ACE_TYPE = (5)
ACCESS_DENIED_OBJECT_ACE_TYPE = (6)
SYSTEM_AUDIT_OBJECT_ACE_TYPE = (7)
SYSTEM_ALARM_OBJECT_ACE_TYPE = (8)
ACCESS_MAX_MS_OBJECT_ACE_TYPE = (8)
ACCESS_MAX_MS_V4_ACE_TYPE = (8)
ACCESS_MAX_MS_ACE_TYPE = (8)
ACCESS_ALLOWED_CALLBACK_ACE_TYPE = 9
ACCESS_DENIED_CALLBACK_ACE_TYPE = 10
ACCESS_ALLOWED_CALLBACK_OBJECT_ACE_TYPE = 11
ACCESS_DENIED_CALLBACK_OBJECT_ACE_TYPE = 12
SYSTEM_AUDIT_CALLBACK_ACE_TYPE = 13
SYSTEM_ALARM_CALLBACK_ACE_TYPE = 14
SYSTEM_AUDIT_CALLBACK_OBJECT_ACE_TYPE = 15
SYSTEM_ALARM_CALLBACK_OBJECT_ACE_TYPE = 16
SYSTEM_MANDATORY_LABEL_ACE_TYPE = 17
ACCESS_MAX_MS_V5_ACE_TYPE = 17
# The following are the inherit flags that go into the AceFlags field
# of an Ace header.
OBJECT_INHERIT_ACE = 1
CONTAINER_INHERIT_ACE = 2
NO_PROPAGATE_INHERIT_ACE = 4
INHERIT_ONLY_ACE = 8
VALID_INHERIT_FLAGS = 15
SUCCESSFUL_ACCESS_ACE_FLAG = 64
FAILED_ACCESS_ACE_FLAG = 128
SE_OWNER_DEFAULTED = 1
SE_GROUP_DEFAULTED = 2
SE_DACL_PRESENT = 4
SE_DACL_DEFAULTED = 8
SE_SACL_PRESENT = 16
SE_SACL_DEFAULTED = 32
SE_SELF_RELATIVE = 32768
SE_PRIVILEGE_ENABLED_BY_DEFAULT = 1
SE_PRIVILEGE_ENABLED = 2
SE_PRIVILEGE_USED_FOR_ACCESS = -2147483648
PRIVILEGE_SET_ALL_NECESSARY = 1
# NT Defined Privileges
SE_CREATE_TOKEN_NAME = "SeCreateTokenPrivilege"
SE_ASSIGNPRIMARYTOKEN_NAME = "SeAssignPrimaryTokenPrivilege"
SE_LOCK_MEMORY_NAME = "SeLockMemoryPrivilege"
SE_INCREASE_QUOTA_NAME = "SeIncreaseQuotaPrivilege"
SE_UNSOLICITED_INPUT_NAME = "SeUnsolicitedInputPrivilege"
SE_MACHINE_ACCOUNT_NAME = "SeMachineAccountPrivilege"
SE_TCB_NAME = "SeTcbPrivilege"
SE_SECURITY_NAME = "SeSecurityPrivilege"
SE_TAKE_OWNERSHIP_NAME = "SeTakeOwnershipPrivilege"
SE_LOAD_DRIVER_NAME = "SeLoadDriverPrivilege"
SE_SYSTEM_PROFILE_NAME = "SeSystemProfilePrivilege"
SE_SYSTEMTIME_NAME = "SeSystemtimePrivilege"
SE_PROF_SINGLE_PROCESS_NAME = "SeProfileSingleProcessPrivilege"
SE_INC_BASE_PRIORITY_NAME = "SeIncreaseBasePriorityPrivilege"
SE_CREATE_PAGEFILE_NAME = "SeCreatePagefilePrivilege"
SE_CREATE_PERMANENT_NAME = "SeCreatePermanentPrivilege"
SE_BACKUP_NAME = "SeBackupPrivilege"
SE_RESTORE_NAME = "SeRestorePrivilege"
SE_SHUTDOWN_NAME = "SeShutdownPrivilege"
SE_DEBUG_NAME = "SeDebugPrivilege"
SE_AUDIT_NAME = "SeAuditPrivilege"
SE_SYSTEM_ENVIRONMENT_NAME = "SeSystemEnvironmentPrivilege"
SE_CHANGE_NOTIFY_NAME = "SeChangeNotifyPrivilege"
SE_REMOTE_SHUTDOWN_NAME = "SeRemoteShutdownPrivilege"
# Enum SECURITY_IMPERSONATION_LEVEL:
SecurityAnonymous = 0
SecurityIdentification = 1
SecurityImpersonation = 2
SecurityDelegation = 3
SECURITY_MAX_IMPERSONATION_LEVEL = SecurityDelegation
DEFAULT_IMPERSONATION_LEVEL = SecurityImpersonation
TOKEN_ASSIGN_PRIMARY = 1
TOKEN_DUPLICATE = 2
TOKEN_IMPERSONATE = 4
TOKEN_QUERY = 8
TOKEN_QUERY_SOURCE = 16
TOKEN_ADJUST_PRIVILEGES = 32
TOKEN_ADJUST_GROUPS = 64
TOKEN_ADJUST_DEFAULT = 128
TOKEN_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED |\
TOKEN_ASSIGN_PRIMARY |\
TOKEN_DUPLICATE |\
TOKEN_IMPERSONATE |\
TOKEN_QUERY |\
TOKEN_QUERY_SOURCE |\
TOKEN_ADJUST_PRIVILEGES |\
TOKEN_ADJUST_GROUPS |\
TOKEN_ADJUST_DEFAULT)
TOKEN_READ = (STANDARD_RIGHTS_READ |\
TOKEN_QUERY)
TOKEN_WRITE = (STANDARD_RIGHTS_WRITE |\
TOKEN_ADJUST_PRIVILEGES |\
TOKEN_ADJUST_GROUPS |\
TOKEN_ADJUST_DEFAULT)
TOKEN_EXECUTE = (STANDARD_RIGHTS_EXECUTE)
SidTypeUser = 1
SidTypeGroup = 2
SidTypeDomain =3
SidTypeAlias = 4
SidTypeWellKnownGroup = 5
SidTypeDeletedAccount = 6
SidTypeInvalid = 7
SidTypeUnknown = 8
SidTypeComputer = 9
SidTypeLabel = 10
# Token types
TokenPrimary = 1
TokenImpersonation = 2
# TOKEN_INFORMATION_CLASS, used with Get/SetTokenInformation
TokenUser = 1
TokenGroups = 2
TokenPrivileges = 3
TokenOwner = 4
TokenPrimaryGroup = 5
TokenDefaultDacl = 6
TokenSource = 7
TokenType = 8
TokenImpersonationLevel = 9
TokenStatistics = 10
TokenRestrictedSids = 11
TokenSessionId = 12
TokenGroupsAndPrivileges = 13
TokenSessionReference = 14
TokenSandBoxInert = 15
TokenAuditPolicy = 16
TokenOrigin = 17
TokenElevationType = 18
TokenLinkedToken = 19
TokenElevation = 20
TokenHasRestrictions = 21
TokenAccessInformation = 22
TokenVirtualizationAllowed = 23
TokenVirtualizationEnabled = 24
TokenIntegrityLevel = 25
TokenUIAccess = 26
TokenMandatoryPolicy = 27
TokenLogonSid = 28
# DirectoryService related constants.
# Generated by h2py from NtDsAPI.h
DS_BEHAVIOR_WIN2000 = 0
DS_BEHAVIOR_WIN2003_WITH_MIXED_DOMAINS = 1
DS_BEHAVIOR_WIN2003 = 2
DS_SYNCED_EVENT_NAME = "NTDSInitialSyncsCompleted"
ACTRL_DS_OPEN = 0x00000000
ACTRL_DS_CREATE_CHILD = 0x00000001
ACTRL_DS_DELETE_CHILD = 0x00000002
ACTRL_DS_LIST = 0x00000004
ACTRL_DS_SELF = 0x00000008
ACTRL_DS_READ_PROP = 0x00000010
ACTRL_DS_WRITE_PROP = 0x00000020
ACTRL_DS_DELETE_TREE = 0x00000040
ACTRL_DS_LIST_OBJECT = 0x00000080
ACTRL_DS_CONTROL_ACCESS = 0x00000100
NTDSAPI_BIND_ALLOW_DELEGATION = (0x00000001)
DS_REPSYNC_ASYNCHRONOUS_OPERATION = 0x00000001
DS_REPSYNC_WRITEABLE = 0x00000002
DS_REPSYNC_PERIODIC = 0x00000004
DS_REPSYNC_INTERSITE_MESSAGING = 0x00000008
DS_REPSYNC_ALL_SOURCES = 0x00000010
DS_REPSYNC_FULL = 0x00000020
DS_REPSYNC_URGENT = 0x00000040
DS_REPSYNC_NO_DISCARD = 0x00000080
DS_REPSYNC_FORCE = 0x00000100
DS_REPSYNC_ADD_REFERENCE = 0x00000200
DS_REPSYNC_NEVER_COMPLETED = 0x00000400
DS_REPSYNC_TWO_WAY = 0x00000800
DS_REPSYNC_NEVER_NOTIFY = 0x00001000
DS_REPSYNC_INITIAL = 0x00002000
DS_REPSYNC_USE_COMPRESSION = 0x00004000
DS_REPSYNC_ABANDONED = 0x00008000
DS_REPSYNC_INITIAL_IN_PROGRESS = 0x00010000
DS_REPSYNC_PARTIAL_ATTRIBUTE_SET = 0x00020000
DS_REPSYNC_REQUEUE = 0x00040000
DS_REPSYNC_NOTIFICATION = 0x00080000
DS_REPSYNC_ASYNCHRONOUS_REPLICA = 0x00100000
DS_REPSYNC_CRITICAL = 0x00200000
DS_REPSYNC_FULL_IN_PROGRESS = 0x00400000
DS_REPSYNC_PREEMPTED = 0x00800000
DS_REPADD_ASYNCHRONOUS_OPERATION = 0x00000001
DS_REPADD_WRITEABLE = 0x00000002
DS_REPADD_INITIAL = 0x00000004
DS_REPADD_PERIODIC = 0x00000008
DS_REPADD_INTERSITE_MESSAGING = 0x00000010
DS_REPADD_ASYNCHRONOUS_REPLICA = 0x00000020
DS_REPADD_DISABLE_NOTIFICATION = 0x00000040
DS_REPADD_DISABLE_PERIODIC = 0x00000080
DS_REPADD_USE_COMPRESSION = 0x00000100
DS_REPADD_NEVER_NOTIFY = 0x00000200
DS_REPADD_TWO_WAY = 0x00000400
DS_REPADD_CRITICAL = 0x00000800
DS_REPDEL_ASYNCHRONOUS_OPERATION = 0x00000001
DS_REPDEL_WRITEABLE = 0x00000002
DS_REPDEL_INTERSITE_MESSAGING = 0x00000004
DS_REPDEL_IGNORE_ERRORS = 0x00000008
DS_REPDEL_LOCAL_ONLY = 0x00000010
DS_REPDEL_NO_SOURCE = 0x00000020
DS_REPDEL_REF_OK = 0x00000040
DS_REPMOD_ASYNCHRONOUS_OPERATION = 0x00000001
DS_REPMOD_WRITEABLE = 0x00000002
DS_REPMOD_UPDATE_FLAGS = 0x00000001
DS_REPMOD_UPDATE_ADDRESS = 0x00000002
DS_REPMOD_UPDATE_SCHEDULE = 0x00000004
DS_REPMOD_UPDATE_RESULT = 0x00000008
DS_REPMOD_UPDATE_TRANSPORT = 0x00000010
DS_REPUPD_ASYNCHRONOUS_OPERATION = 0x00000001
DS_REPUPD_WRITEABLE = 0x00000002
DS_REPUPD_ADD_REFERENCE = 0x00000004
DS_REPUPD_DELETE_REFERENCE = 0x00000008
DS_INSTANCETYPE_IS_NC_HEAD = 0x00000001
DS_INSTANCETYPE_NC_IS_WRITEABLE = 0x00000004
DS_INSTANCETYPE_NC_COMING = 0x00000010
DS_INSTANCETYPE_NC_GOING = 0x00000020
NTDSDSA_OPT_IS_GC = ( 1 << 0 )
NTDSDSA_OPT_DISABLE_INBOUND_REPL = ( 1 << 1 )
NTDSDSA_OPT_DISABLE_OUTBOUND_REPL = ( 1 << 2 )
NTDSDSA_OPT_DISABLE_NTDSCONN_XLATE = ( 1 << 3 )
NTDSCONN_OPT_IS_GENERATED = ( 1 << 0 )
NTDSCONN_OPT_TWOWAY_SYNC = ( 1 << 1 )
NTDSCONN_OPT_OVERRIDE_NOTIFY_DEFAULT = (1 << 2 )
NTDSCONN_OPT_USE_NOTIFY = (1 << 3)
NTDSCONN_OPT_DISABLE_INTERSITE_COMPRESSION = (1 << 4)
NTDSCONN_OPT_USER_OWNED_SCHEDULE = (1 << 5)
NTDSCONN_KCC_NO_REASON = ( 0 )
NTDSCONN_KCC_GC_TOPOLOGY = ( 1 << 0 )
NTDSCONN_KCC_RING_TOPOLOGY = ( 1 << 1 )
NTDSCONN_KCC_MINIMIZE_HOPS_TOPOLOGY = ( 1 << 2 )
NTDSCONN_KCC_STALE_SERVERS_TOPOLOGY = ( 1 << 3 )
NTDSCONN_KCC_OSCILLATING_CONNECTION_TOPOLOGY = ( 1 << 4 )
NTDSCONN_KCC_INTERSITE_GC_TOPOLOGY = (1 << 5)
NTDSCONN_KCC_INTERSITE_TOPOLOGY = (1 << 6)
NTDSCONN_KCC_SERVER_FAILOVER_TOPOLOGY = (1 << 7)
NTDSCONN_KCC_SITE_FAILOVER_TOPOLOGY = (1 << 8)
NTDSCONN_KCC_REDUNDANT_SERVER_TOPOLOGY = (1 << 9)
FRSCONN_PRIORITY_MASK = 0x70000000
FRSCONN_MAX_PRIORITY = 0x8
NTDSCONN_OPT_IGNORE_SCHEDULE_MASK = (-2147483648)
NTDSSETTINGS_OPT_IS_AUTO_TOPOLOGY_DISABLED = ( 1 << 0 )
NTDSSETTINGS_OPT_IS_TOPL_CLEANUP_DISABLED = ( 1 << 1 )
NTDSSETTINGS_OPT_IS_TOPL_MIN_HOPS_DISABLED = ( 1 << 2 )
NTDSSETTINGS_OPT_IS_TOPL_DETECT_STALE_DISABLED = ( 1 << 3 )
NTDSSETTINGS_OPT_IS_INTER_SITE_AUTO_TOPOLOGY_DISABLED = ( 1 << 4 )
NTDSSETTINGS_OPT_IS_GROUP_CACHING_ENABLED = ( 1 << 5 )
NTDSSETTINGS_OPT_FORCE_KCC_WHISTLER_BEHAVIOR = ( 1 << 6 )
NTDSSETTINGS_OPT_FORCE_KCC_W2K_ELECTION = ( 1 << 7 )
NTDSSETTINGS_OPT_IS_RAND_BH_SELECTION_DISABLED = ( 1 << 8 )
NTDSSETTINGS_OPT_IS_SCHEDULE_HASHING_ENABLED = ( 1 << 9 )
NTDSSETTINGS_OPT_IS_REDUNDANT_SERVER_TOPOLOGY_ENABLED = ( 1 << 10 )
NTDSSETTINGS_DEFAULT_SERVER_REDUNDANCY = 2
NTDSTRANSPORT_OPT_IGNORE_SCHEDULES = ( 1 << 0 )
NTDSTRANSPORT_OPT_BRIDGES_REQUIRED = (1 << 1 )
NTDSSITECONN_OPT_USE_NOTIFY = ( 1 << 0 )
NTDSSITECONN_OPT_TWOWAY_SYNC = ( 1 << 1 )
NTDSSITECONN_OPT_DISABLE_COMPRESSION = ( 1 << 2 )
NTDSSITELINK_OPT_USE_NOTIFY = ( 1 << 0 )
NTDSSITELINK_OPT_TWOWAY_SYNC = ( 1 << 1 )
NTDSSITELINK_OPT_DISABLE_COMPRESSION = ( 1 << 2 )
GUID_USERS_CONTAINER_A = "a9d1ca15768811d1aded00c04fd8d5cd"
GUID_COMPUTRS_CONTAINER_A = "aa312825768811d1aded00c04fd8d5cd"
GUID_SYSTEMS_CONTAINER_A = "ab1d30f3768811d1aded00c04fd8d5cd"
GUID_DOMAIN_CONTROLLERS_CONTAINER_A = "a361b2ffffd211d1aa4b00c04fd7d83a"
GUID_INFRASTRUCTURE_CONTAINER_A = "2fbac1870ade11d297c400c04fd8d5cd"
GUID_DELETED_OBJECTS_CONTAINER_A = "18e2ea80684f11d2b9aa00c04f79f805"
GUID_LOSTANDFOUND_CONTAINER_A = "ab8153b7768811d1aded00c04fd8d5cd"
GUID_FOREIGNSECURITYPRINCIPALS_CONTAINER_A = "22b70c67d56e4efb91e9300fca3dc1aa"
GUID_PROGRAM_DATA_CONTAINER_A = "09460c08ae1e4a4ea0f64aee7daa1e5a"
GUID_MICROSOFT_PROGRAM_DATA_CONTAINER_A = "f4be92a4c777485e878e9421d53087db"
GUID_NTDS_QUOTAS_CONTAINER_A = "6227f0af1fc2410d8e3bb10615bb5b0f"
GUID_USERS_CONTAINER_BYTE = "\xa9\xd1\xca\x15\x76\x88\x11\xd1\xad\xed\x00\xc0\x4f\xd8\xd5\xcd"
GUID_COMPUTRS_CONTAINER_BYTE = "\xaa\x31\x28\x25\x76\x88\x11\xd1\xad\xed\x00\xc0\x4f\xd8\xd5\xcd"
GUID_SYSTEMS_CONTAINER_BYTE = "\xab\x1d\x30\xf3\x76\x88\x11\xd1\xad\xed\x00\xc0\x4f\xd8\xd5\xcd"
GUID_DOMAIN_CONTROLLERS_CONTAINER_BYTE = "\xa3\x61\xb2\xff\xff\xd2\x11\xd1\xaa\x4b\x00\xc0\x4f\xd7\xd8\x3a"
GUID_INFRASTRUCTURE_CONTAINER_BYTE = "\x2f\xba\xc1\x87\x0a\xde\x11\xd2\x97\xc4\x00\xc0\x4f\xd8\xd5\xcd"
GUID_DELETED_OBJECTS_CONTAINER_BYTE = "\x18\xe2\xea\x80\x68\x4f\x11\xd2\xb9\xaa\x00\xc0\x4f\x79\xf8\x05"
GUID_LOSTANDFOUND_CONTAINER_BYTE = "\xab\x81\x53\xb7\x76\x88\x11\xd1\xad\xed\x00\xc0\x4f\xd8\xd5\xcd"
GUID_FOREIGNSECURITYPRINCIPALS_CONTAINER_BYTE = "\x22\xb7\x0c\x67\xd5\x6e\x4e\xfb\x91\xe9\x30\x0f\xca\x3d\xc1\xaa"
GUID_PROGRAM_DATA_CONTAINER_BYTE = "\x09\x46\x0c\x08\xae\x1e\x4a\x4e\xa0\xf6\x4a\xee\x7d\xaa\x1e\x5a"
GUID_MICROSOFT_PROGRAM_DATA_CONTAINER_BYTE = "\xf4\xbe\x92\xa4\xc7\x77\x48\x5e\x87\x8e\x94\x21\xd5\x30\x87\xdb"
GUID_NTDS_QUOTAS_CONTAINER_BYTE = "\x62\x27\xf0\xaf\x1f\xc2\x41\x0d\x8e\x3b\xb1\x06\x15\xbb\x5b\x0f"
DS_REPSYNCALL_NO_OPTIONS = 0x00000000
DS_REPSYNCALL_ABORT_IF_SERVER_UNAVAILABLE = 0x00000001
DS_REPSYNCALL_SYNC_ADJACENT_SERVERS_ONLY = 0x00000002
DS_REPSYNCALL_ID_SERVERS_BY_DN = 0x00000004
DS_REPSYNCALL_DO_NOT_SYNC = 0x00000008
DS_REPSYNCALL_SKIP_INITIAL_CHECK = 0x00000010
DS_REPSYNCALL_PUSH_CHANGES_OUTWARD = 0x00000020
DS_REPSYNCALL_CROSS_SITE_BOUNDARIES = 0x00000040
DS_LIST_DSA_OBJECT_FOR_SERVER = 0
DS_LIST_DNS_HOST_NAME_FOR_SERVER = 1
DS_LIST_ACCOUNT_OBJECT_FOR_SERVER = 2
DS_ROLE_SCHEMA_OWNER = 0
DS_ROLE_DOMAIN_OWNER = 1
DS_ROLE_PDC_OWNER = 2
DS_ROLE_RID_OWNER = 3
DS_ROLE_INFRASTRUCTURE_OWNER = 4
DS_SCHEMA_GUID_NOT_FOUND = 0
DS_SCHEMA_GUID_ATTR = 1
DS_SCHEMA_GUID_ATTR_SET = 2
DS_SCHEMA_GUID_CLASS = 3
DS_SCHEMA_GUID_CONTROL_RIGHT = 4
DS_KCC_FLAG_ASYNC_OP = (1 << 0)
DS_KCC_FLAG_DAMPED = (1 << 1)
DS_EXIST_ADVISORY_MODE = (0x1)
DS_REPL_INFO_FLAG_IMPROVE_LINKED_ATTRS = (0x00000001)
DS_REPL_NBR_WRITEABLE = (0x00000010)
DS_REPL_NBR_SYNC_ON_STARTUP = (0x00000020)
DS_REPL_NBR_DO_SCHEDULED_SYNCS = (0x00000040)
DS_REPL_NBR_USE_ASYNC_INTERSITE_TRANSPORT = (0x00000080)
DS_REPL_NBR_TWO_WAY_SYNC = (0x00000200)
DS_REPL_NBR_RETURN_OBJECT_PARENTS = (0x00000800)
DS_REPL_NBR_FULL_SYNC_IN_PROGRESS = (0x00010000)
DS_REPL_NBR_FULL_SYNC_NEXT_PACKET = (0x00020000)
DS_REPL_NBR_NEVER_SYNCED = (0x00200000)
DS_REPL_NBR_PREEMPTED = (0x01000000)
DS_REPL_NBR_IGNORE_CHANGE_NOTIFICATIONS = (0x04000000)
DS_REPL_NBR_DISABLE_SCHEDULED_SYNC = (0x08000000)
DS_REPL_NBR_COMPRESS_CHANGES = (0x10000000)
DS_REPL_NBR_NO_CHANGE_NOTIFICATIONS = (0x20000000)
DS_REPL_NBR_PARTIAL_ATTRIBUTE_SET = (0x40000000)
DS_REPL_NBR_MODIFIABLE_MASK = \
( \
DS_REPL_NBR_SYNC_ON_STARTUP | \
DS_REPL_NBR_DO_SCHEDULED_SYNCS | \
DS_REPL_NBR_TWO_WAY_SYNC | \
DS_REPL_NBR_IGNORE_CHANGE_NOTIFICATIONS | \
DS_REPL_NBR_DISABLE_SCHEDULED_SYNC | \
DS_REPL_NBR_COMPRESS_CHANGES | \
DS_REPL_NBR_NO_CHANGE_NOTIFICATIONS \
)
# from enum DS_NAME_FORMAT
DS_UNKNOWN_NAME = 0
DS_FQDN_1779_NAME = 1
DS_NT4_ACCOUNT_NAME = 2
DS_DISPLAY_NAME = 3
DS_UNIQUE_ID_NAME = 6
DS_CANONICAL_NAME = 7
DS_USER_PRINCIPAL_NAME = 8
DS_CANONICAL_NAME_EX = 9
DS_SERVICE_PRINCIPAL_NAME = 10
DS_SID_OR_SID_HISTORY_NAME = 11
DS_DNS_DOMAIN_NAME = 12
DS_DOMAIN_SIMPLE_NAME = DS_USER_PRINCIPAL_NAME
DS_ENTERPRISE_SIMPLE_NAME = DS_USER_PRINCIPAL_NAME
# from enum DS_NAME_FLAGS
DS_NAME_NO_FLAGS = 0x0
DS_NAME_FLAG_SYNTACTICAL_ONLY = 0x1
DS_NAME_FLAG_EVAL_AT_DC = 0x2
DS_NAME_FLAG_GCVERIFY = 0x4
DS_NAME_FLAG_TRUST_REFERRAL = 0x8
# from enum DS_NAME_ERROR
DS_NAME_NO_ERROR = 0
DS_NAME_ERROR_RESOLVING = 1
DS_NAME_ERROR_NOT_FOUND = 2
DS_NAME_ERROR_NOT_UNIQUE = 3
DS_NAME_ERROR_NO_MAPPING = 4
DS_NAME_ERROR_DOMAIN_ONLY = 5
DS_NAME_ERROR_NO_SYNTACTICAL_MAPPING = 6
DS_NAME_ERROR_TRUST_REFERRAL = 7
# from enum DS_SPN_NAME_TYPE
DS_SPN_DNS_HOST = 0
DS_SPN_DN_HOST = 1
DS_SPN_NB_HOST = 2
DS_SPN_DOMAIN = 3
DS_SPN_NB_DOMAIN = 4
DS_SPN_SERVICE = 5
# from enum DS_SPN_WRITE_OP
DS_SPN_ADD_SPN_OP = 0
DS_SPN_REPLACE_SPN_OP = 1
DS_SPN_DELETE_SPN_OP = 2
# Generated by h2py from DsGetDC.h
DS_FORCE_REDISCOVERY = 0x00000001
DS_DIRECTORY_SERVICE_REQUIRED = 0x00000010
DS_DIRECTORY_SERVICE_PREFERRED = 0x00000020
DS_GC_SERVER_REQUIRED = 0x00000040
DS_PDC_REQUIRED = 0x00000080
DS_BACKGROUND_ONLY = 0x00000100
DS_IP_REQUIRED = 0x00000200
DS_KDC_REQUIRED = 0x00000400
DS_TIMESERV_REQUIRED = 0x00000800
DS_WRITABLE_REQUIRED = 0x00001000
DS_GOOD_TIMESERV_PREFERRED = 0x00002000
DS_AVOID_SELF = 0x00004000
DS_ONLY_LDAP_NEEDED = 0x00008000
DS_IS_FLAT_NAME = 0x00010000
DS_IS_DNS_NAME = 0x00020000
DS_RETURN_DNS_NAME = 0x40000000
DS_RETURN_FLAT_NAME = (-2147483648)
DSGETDC_VALID_FLAGS = ( \
DS_FORCE_REDISCOVERY | \
DS_DIRECTORY_SERVICE_REQUIRED | \
DS_DIRECTORY_SERVICE_PREFERRED | \
DS_GC_SERVER_REQUIRED | \
DS_PDC_REQUIRED | \
DS_BACKGROUND_ONLY | \
DS_IP_REQUIRED | \
DS_KDC_REQUIRED | \
DS_TIMESERV_REQUIRED | \
DS_WRITABLE_REQUIRED | \
DS_GOOD_TIMESERV_PREFERRED | \
DS_AVOID_SELF | \
DS_ONLY_LDAP_NEEDED | \
DS_IS_FLAT_NAME | \
DS_IS_DNS_NAME | \
DS_RETURN_FLAT_NAME | \
DS_RETURN_DNS_NAME )
DS_INET_ADDRESS = 1
DS_NETBIOS_ADDRESS = 2
DS_PDC_FLAG = 0x00000001
DS_GC_FLAG = 0x00000004
DS_LDAP_FLAG = 0x00000008
DS_DS_FLAG = 0x00000010
DS_KDC_FLAG = 0x00000020
DS_TIMESERV_FLAG = 0x00000040
DS_CLOSEST_FLAG = 0x00000080
DS_WRITABLE_FLAG = 0x00000100
DS_GOOD_TIMESERV_FLAG = 0x00000200
DS_NDNC_FLAG = 0x00000400
DS_PING_FLAGS = 0x0000FFFF
DS_DNS_CONTROLLER_FLAG = 0x20000000
DS_DNS_DOMAIN_FLAG = 0x40000000
DS_DNS_FOREST_FLAG = (-2147483648)
DS_DOMAIN_IN_FOREST = 0x0001
DS_DOMAIN_DIRECT_OUTBOUND = 0x0002
DS_DOMAIN_TREE_ROOT = 0x0004
DS_DOMAIN_PRIMARY = 0x0008
DS_DOMAIN_NATIVE_MODE = 0x0010
DS_DOMAIN_DIRECT_INBOUND = 0x0020
DS_DOMAIN_VALID_FLAGS = ( \
DS_DOMAIN_IN_FOREST | \
DS_DOMAIN_DIRECT_OUTBOUND | \
DS_DOMAIN_TREE_ROOT | \
DS_DOMAIN_PRIMARY | \
DS_DOMAIN_NATIVE_MODE | \
DS_DOMAIN_DIRECT_INBOUND )
DS_GFTI_UPDATE_TDO = 0x1
DS_GFTI_VALID_FLAGS = 0x1
DS_ONLY_DO_SITE_NAME = 0x01
DS_NOTIFY_AFTER_SITE_RECORDS = 0x02
DS_OPEN_VALID_OPTION_FLAGS = ( DS_ONLY_DO_SITE_NAME | DS_NOTIFY_AFTER_SITE_RECORDS )
DS_OPEN_VALID_FLAGS = ( \
DS_FORCE_REDISCOVERY | \
DS_ONLY_LDAP_NEEDED | \
DS_KDC_REQUIRED | \
DS_PDC_REQUIRED | \
DS_GC_SERVER_REQUIRED | \
DS_WRITABLE_REQUIRED )
## from aclui.h
# SI_OBJECT_INFO.dwFlags
SI_EDIT_PERMS = 0x00000000L
SI_EDIT_OWNER = 0x00000001L
SI_EDIT_AUDITS = 0x00000002L
SI_CONTAINER = 0x00000004L
SI_READONLY = 0x00000008L
SI_ADVANCED = 0x00000010L
SI_RESET = 0x00000020L
SI_OWNER_READONLY = 0x00000040L
SI_EDIT_PROPERTIES = 0x00000080L
SI_OWNER_RECURSE = 0x00000100L
SI_NO_ACL_PROTECT = 0x00000200L
SI_NO_TREE_APPLY = 0x00000400L
SI_PAGE_TITLE = 0x00000800L
SI_SERVER_IS_DC = 0x00001000L
SI_RESET_DACL_TREE = 0x00004000L
SI_RESET_SACL_TREE = 0x00008000L
SI_OBJECT_GUID = 0x00010000L
SI_EDIT_EFFECTIVE = 0x00020000L
SI_RESET_DACL = 0x00040000L
SI_RESET_SACL = 0x00080000L
SI_RESET_OWNER = 0x00100000L
SI_NO_ADDITIONAL_PERMISSION = 0x00200000L
SI_MAY_WRITE = 0x10000000L
SI_EDIT_ALL = (SI_EDIT_PERMS | SI_EDIT_OWNER | SI_EDIT_AUDITS)
SI_AUDITS_ELEVATION_REQUIRED = 0x02000000L
SI_VIEW_ONLY = 0x00400000L
SI_OWNER_ELEVATION_REQUIRED = 0x04000000L
SI_PERMS_ELEVATION_REQUIRED = 0x01000000L
# SI_ACCESS.dwFlags
SI_ACCESS_SPECIFIC = 0x00010000L
SI_ACCESS_GENERAL = 0x00020000L
SI_ACCESS_CONTAINER = 0x00040000L
SI_ACCESS_PROPERTY = 0x00080000L
# SI_PAGE_TYPE enum
SI_PAGE_PERM = 0
SI_PAGE_ADVPERM = 1
SI_PAGE_AUDIT = 2
SI_PAGE_OWNER = 3
SI_PAGE_EFFECTIVE =4
CFSTR_ACLUI_SID_INFO_LIST = u"CFSTR_ACLUI_SID_INFO_LIST"
PSPCB_SI_INITDIALOG = 1025 ## WM_USER+1
|
turbokongen/home-assistant
|
refs/heads/dev
|
homeassistant/components/emulated_hue/const.py
|
21
|
"""Constants for emulated_hue."""
HUE_SERIAL_NUMBER = "001788FFFE23BFC2"
HUE_UUID = "2f402f80-da50-11e1-9b23-001788255acc"
|
shubhdev/openedx
|
refs/heads/master
|
pavelib/paver_tests/test_paver_quality.py
|
26
|
"""
Tests for paver quality tasks
"""
import os
from path import path # pylint: disable=no-name-in-module
import tempfile
import unittest
from mock import patch, MagicMock, mock_open
from ddt import ddt, file_data
import pavelib.quality
import paver.easy
import paver.tasks
from paver.easy import BuildFailure
@ddt
class TestPaverQualityViolations(unittest.TestCase):
"""
For testing the paver violations-counting tasks
"""
def setUp(self):
super(TestPaverQualityViolations, self).setUp()
self.f = tempfile.NamedTemporaryFile(delete=False)
self.f.close()
self.addCleanup(os.remove, self.f.name)
def test_pylint_parser_other_string(self):
with open(self.f.name, 'w') as f:
f.write("hello")
num = pavelib.quality._count_pylint_violations(f.name) # pylint: disable=protected-access
self.assertEqual(num, 0)
def test_pylint_parser_pep8(self):
# Pep8 violations should be ignored.
with open(self.f.name, 'w') as f:
f.write("foo/hello/test.py:304:15: E203 whitespace before ':'")
num = pavelib.quality._count_pylint_violations(f.name) # pylint: disable=protected-access
self.assertEqual(num, 0)
@file_data('pylint_test_list.json')
def test_pylint_parser_count_violations(self, value):
"""
Tests:
- Different types of violations
- One violation covering multiple lines
"""
with open(self.f.name, 'w') as f:
f.write(value)
num = pavelib.quality._count_pylint_violations(f.name) # pylint: disable=protected-access
self.assertEqual(num, 1)
def test_pep8_parser(self):
with open(self.f.name, 'w') as f:
f.write("hello\nhithere")
num, _violations = pavelib.quality._pep8_violations(f.name) # pylint: disable=protected-access
self.assertEqual(num, 2)
class TestPaverJsHintViolationsCounts(unittest.TestCase):
"""
For testing run_jshint
"""
def setUp(self):
super(TestPaverJsHintViolationsCounts, self).setUp()
# Mock the paver @needs decorator
self._mock_paver_needs = patch.object(pavelib.quality.run_quality, 'needs').start()
self._mock_paver_needs.return_value = 0
# Temporary file infrastructure
self.f = tempfile.NamedTemporaryFile(delete=False)
self.f.close()
# Cleanup various mocks and tempfiles
self.addCleanup(self._mock_paver_needs.stop)
self.addCleanup(os.remove, self.f.name)
def test_get_violations_count(self):
with open(self.f.name, 'w') as f:
f.write("3000 violations found")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name) # pylint: disable=protected-access
self.assertEqual(actual_count, 3000)
def test_get_violations_no_number_found(self):
with open(self.f.name, 'w') as f:
f.write("Not expected string regex")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name) # pylint: disable=protected-access
self.assertEqual(actual_count, None)
def test_get_violations_count_truncated_report(self):
"""
A truncated report (i.e. last line is just a violation)
"""
with open(self.f.name, 'w') as f:
f.write("foo/bar/js/fizzbuzz.js: line 45, col 59, Missing semicolon.")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name) # pylint: disable=protected-access
self.assertEqual(actual_count, None)
class TestPrepareReportDir(unittest.TestCase):
"""
Tests the report directory preparation
"""
def setUp(self):
super(TestPrepareReportDir, self).setUp()
self.test_dir = tempfile.mkdtemp()
self.test_file = tempfile.NamedTemporaryFile(delete=False, dir=self.test_dir)
self.addCleanup(os.removedirs, self.test_dir)
def test_report_dir_with_files(self):
self.assertTrue(os.path.exists(self.test_file.name))
pavelib.quality._prepare_report_dir(path(self.test_dir)) # pylint: disable=protected-access
self.assertFalse(os.path.exists(self.test_file.name))
def test_report_dir_without_files(self):
os.remove(self.test_file.name)
pavelib.quality._prepare_report_dir(path(self.test_dir)) # pylint: disable=protected-access
self.assertEqual(os.listdir(path(self.test_dir)), [])
class TestPaverRunQuality(unittest.TestCase):
"""
For testing the paver run_quality task
"""
def setUp(self):
super(TestPaverRunQuality, self).setUp()
# test_no_diff_quality_failures seems to alter the way that paver
# executes these lines is subsequent tests.
# https://github.com/paver/paver/blob/master/paver/tasks.py#L175-L180
#
# The other tests don't appear to have the same impact. This was
# causing a test order dependency. This line resets that state
# of environment._task_in_progress so that the paver commands in the
# tests will be considered top level tasks by paver, and we can predict
# which path it will chose in the above code block.
#
# TODO: Figure out why one test is altering the state to begin with.
paver.tasks.environment = paver.tasks.Environment()
# mock the @needs decorator to skip it
self._mock_paver_needs = patch.object(pavelib.quality.run_quality, 'needs').start()
self._mock_paver_needs.return_value = 0
patcher = patch('pavelib.quality.sh')
self._mock_paver_sh = patcher.start()
self.addCleanup(patcher.stop)
self.addCleanup(self._mock_paver_needs.stop)
@patch('__builtin__.open', mock_open())
def test_failure_on_diffquality_pep8(self):
"""
If pep8 finds errors, pylint should still be run
"""
# Mock _get_pep8_violations to return a violation
_mock_pep8_violations = MagicMock(
return_value=(1, ['lms/envs/common.py:32:2: E225 missing whitespace around operator'])
)
with patch('pavelib.quality._get_pep8_violations', _mock_pep8_violations):
with self.assertRaises(SystemExit):
pavelib.quality.run_quality("")
self.assertRaises(BuildFailure)
# Test that both pep8 and pylint were called by counting the calls to _get_pep8_violations
# (for pep8) and sh (for diff-quality pylint)
self.assertEqual(_mock_pep8_violations.call_count, 1)
self.assertEqual(self._mock_paver_sh.call_count, 1)
@patch('__builtin__.open', mock_open())
def test_failure_on_diffquality_pylint(self):
"""
If diff-quality fails on pylint, the paver task should also fail
"""
# Underlying sh call must fail when it is running the pylint diff-quality task
self._mock_paver_sh.side_effect = CustomShMock().fail_on_pylint
_mock_pep8_violations = MagicMock(return_value=(0, []))
with patch('pavelib.quality._get_pep8_violations', _mock_pep8_violations):
with self.assertRaises(SystemExit):
pavelib.quality.run_quality("")
self.assertRaises(BuildFailure)
# Test that both pep8 and pylint were called by counting the calls
# Assert that _get_pep8_violations (which calls "pep8") is called once
self.assertEqual(_mock_pep8_violations.call_count, 1)
# And assert that sh was called once (for the call to "pylint")
self.assertEqual(self._mock_paver_sh.call_count, 1)
@patch('__builtin__.open', mock_open())
def test_other_exception(self):
"""
If diff-quality fails for an unknown reason on the first run (pep8), then
pylint should not be run
"""
self._mock_paver_sh.side_effect = [Exception('unrecognized failure!'), 0]
with self.assertRaises(SystemExit):
pavelib.quality.run_quality("")
self.assertRaises(Exception)
# Test that pylint is NOT called by counting calls
self.assertEqual(self._mock_paver_sh.call_count, 1)
@patch('__builtin__.open', mock_open())
def test_no_diff_quality_failures(self):
# Assert nothing is raised
_mock_pep8_violations = MagicMock(return_value=(0, []))
with patch('pavelib.quality._get_pep8_violations', _mock_pep8_violations):
pavelib.quality.run_quality("")
# Assert that _get_pep8_violations (which calls "pep8") is called once
self.assertEqual(_mock_pep8_violations.call_count, 1)
# And assert that sh was called once (for the call to "pylint")
self.assertEqual(self._mock_paver_sh.call_count, 1)
class CustomShMock(object):
"""
Diff-quality makes a number of sh calls. None of those calls should be made during tests; however, some
of them need to have certain responses.
"""
def fail_on_pylint(self, arg):
"""
For our tests, we need the call for diff-quality running pep8 reports to fail, since that is what
is going to fail when we pass in a percentage ("p") requirement.
"""
if "pylint" in arg:
# Essentially mock diff-quality exiting with 1
paver.easy.sh("exit 1")
else:
return
|
KanchanChauhan/erpnext
|
refs/heads/develop
|
erpnext/patches/v6_6/remove_fiscal_year_from_leave_allocation.py
|
71
|
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doctype("Leave Allocation")
if frappe.db.has_column("Leave Allocation", "fiscal_year"):
for leave_allocation in frappe.db.sql("select name, fiscal_year from `tabLeave Allocation`", as_dict=True):
dates = frappe.db.get_value("Fiscal Year", leave_allocation["fiscal_year"],
["year_start_date", "year_end_date"])
if dates:
year_start_date, year_end_date = dates
frappe.db.sql("""update `tabLeave Allocation`
set from_date=%s, to_date=%s where name=%s""",
(year_start_date, year_end_date, leave_allocation["name"]))
|
vad/django-cms
|
refs/heads/develop
|
cms/south_migrations/0054_new_publisher_data.py
|
63
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
for page in orm['cms.Page'].objects.filter(publisher_is_draft=True):
titles = page.title_set.all()
languages = []
pub_page = page.publisher_public
if pub_page:
pub_titles = pub_page.title_set.all()
else:
pub_titles = []
for title in titles:
title.published = page.published
title.publisher_is_draft = page.publisher_is_draft
for pub_title in pub_titles:
if pub_title.language == title.language:
title.publisher_public = pub_title
pub_title.publisher_public = title
title.publisher_state = page.publisher_state
languages.append(title.language)
title.save()
pub_languages = []
for title in pub_titles:
title.published = pub_page.published
title.publisher_is_draft = pub_page.publisher_is_draft
title.publisher_state = pub_page.publisher_state
title.save()
pub_languages.append(title.language)
if page.published:
page.published_languages = ",".join(languages)
page.languages = ",".join(languages)
page.save()
if pub_page and pub_languages:
pub_page.languages = ",".join(pub_languages)
pub_page.save()
def backwards(self, orm):
for page in orm['cms.Page'].objects.filter(publisher_is_draft=True):
titles = page.title_set.all()
pub_page = page.publisher_public
if pub_page:
pub_titles = pub_page.title_set.all()
else:
pub_titles = []
for title in titles:
page.published = title.published
page.publisher_state = title.publisher_state
page.save()
break
for title in pub_titles:
pub_page.published = title.published
pub_page.publisher_state = title.publisher_state
pub_page.save()
break
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.globalpagepermission': {
'Meta': {'object_name': 'GlobalPagePermission'},
'can_add': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_recover_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('tree_id', 'lft')", 'unique_together': "(('publisher_is_draft', 'application_namespace'),)", 'object_name': 'Page'},
'application_namespace': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'application_urls': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'is_home': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'languages': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'limit_visibility_in_menu': ('django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'navigation_extenders': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Page']"}),
'placeholders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}),
'publication_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'published_languages': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'revision_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'djangocms_pages'", 'to': u"orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'default': "'INHERIT'", 'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.pagemoderatorstate': {
'Meta': {'ordering': "('page', 'action', '-created')", 'object_name': 'PageModeratorState'},
'action': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'})
},
'cms.pagepermission': {
'Meta': {'object_name': 'PagePermission'},
'can_add': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'grant_on': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cms.pageuser': {
'Meta': {'object_name': 'PageUser', '_ormbases': [u'auth.User']},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_users'", 'to': u"orm['auth.User']"}),
u'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True', 'primary_key': 'True'})
},
'cms.pageusergroup': {
'Meta': {'object_name': 'PageUserGroup', '_ormbases': [u'auth.Group']},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_usergroups'", 'to': u"orm['auth.User']"}),
u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'cms.placeholderreference': {
'Meta': {'object_name': 'PlaceholderReference', 'db_table': "u'cmsplugin_placeholderreference'", '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'placeholder_ref': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'})
},
'cms.staticplaceholder': {
'Meta': {'object_name': 'StaticPlaceholder'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'blank': 'True'}),
'creation_method': ('django.db.models.fields.CharField', [], {'default': "'code'", 'max_length': '20', 'blank': 'True'}),
'dirty': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'draft': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'static_draft'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'public': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'static_public'", 'null': 'True', 'to': "orm['cms.Placeholder']"})
},
'cms.title': {
'Meta': {'unique_together': "(('language', 'page'),)", 'object_name': 'Title'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'has_url_overwrite': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'menu_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'meta_description': ('django.db.models.fields.TextField', [], {'max_length': '155', 'null': 'True', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'title_set'", 'to': "orm['cms.Page']"}),
'page_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Title']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'redirect': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'cms.usersettings': {
'Meta': {'object_name': 'UserSettings'},
'clipboard': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'djangocms_usersettings'", 'to': u"orm['auth.User']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['cms']
symmetrical = True
|
marineam/coreos-dev-util
|
refs/heads/master
|
host/lib/write_firmware.py
|
3
|
# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import binascii
import glob
import os
import re
import struct
import time
from tools import CmdError
def RoundUp(value, boundary):
"""Align a value to the next power of 2 boundary.
Args:
value: The value to align.
boundary: The boundary value, e.g. 4096. Must be a power of 2.
Returns:
The rounded-up value.
"""
return (value + boundary - 1) & ~(boundary - 1)
class WriteFirmware:
"""Write firmware to a Tegra 2 board using USB A-A cable.
This class handles re-reflashing a board with new firmware using the Tegra's
built-in boot ROM feature. This works by putting the chip into a special mode
where it ignores any available firmware and instead reads it from a connected
host machine over USB.
In our case we use that feature to send U-Boot along with a suitable payload
and instructions to flash it to SPI flash. The payload is itself normally a
full Chrome OS image consisting of U-Boot, some keys and verification
information, images and a map of the flash memory.
Private attributes:
_servo_port: Port number to use to talk to servo with dut-control.
Special values are:
None: servo is not available.
0: any servo will do.
"""
def __init__(self, tools, fdt, output, bundle):
"""Set up a new WriteFirmware object.
Args:
tools: A tools library for us to use.
fdt: An fdt which gives us some info that we need.
output: An output object to use for printing progress and messages.
bundle: A BundleFirmware object which created the image.
"""
self._tools = tools
self._fdt = fdt
self._out = output
self._bundle = bundle
self.text_base = self._fdt.GetInt('/chromeos-config', 'textbase', -1)
# For speed, use the 'update' algorithm and don't verify
self.update = True
self.verify = False
# Use default servo port
self._servo_port = 0
def SelectServo(self, servo):
"""Select the servo to use for writing firmware.
Args:
servo: String containing description of servo to use:
'none' : Don't use servo, generate an error on any attempt.
'any' : Use any available servo.
'<port>': Use servo with that port number.
"""
if servo == 'none':
self._servo_port = None
elif servo == 'any':
self._servo_port = 0
else:
self._servo_port = int(servo)
self._out.Notice('Servo port %s' % str(self._servo_port))
def _GetFlashScript(self, payload_size, update, verify, boot_type, checksum,
bus='0'):
"""Get the U-Boot boot command needed to flash U-Boot.
We leave a marker in the string for the load address of the image,
since this depends on the size of this script. This can be replaced by
the caller provided that the marker length is unchanged.
Args:
payload_size: Size of payload in bytes.
update: Use faster update algorithm rather then full device erase
verify: Verify the write by doing a readback and CRC
boot_type: The source for bootdevice (nand, sdmmc, or spi)
checksum: The checksum of the payload (an integer)
bus: The bus number
Returns:
A tuple containing:
The script, as a string ready to use as a U-Boot boot command, with an
embedded marker for the load address.
The marker string, which the caller should replace with the correct
load address as 8 hex digits, without changing its length.
"""
replace_me = 'zsHEXYla'
page_size = 4096
if boot_type == 'sdmmc':
page_size = 512
if boot_type != 'spi':
update = False
cmds = [
'setenv address 0x%s' % replace_me,
'setenv firmware_size %#x' % payload_size,
'setenv length %#x' % RoundUp(payload_size, page_size),
'setenv blocks %#x' % (RoundUp(payload_size, page_size) / page_size),
'setenv _crc "crc32 -v ${address} ${firmware_size} %#08x"' %
checksum,
'setenv _clear "echo Clearing RAM; mw.b ${address} 0 ${length}"',
]
if boot_type == 'nand':
cmds.extend([
'setenv _init "echo Init NAND; nand info"',
'setenv _erase "echo Erase NAND; nand erase 0 ${length}"',
'setenv _write "echo Write NAND; nand write ${address} 0 ${length}"',
'setenv _read "echo Read NAND; nand read ${address} 0 ${length}"',
])
elif boot_type == 'sdmmc':
cmds.extend([
'setenv _init "echo Init EMMC; mmc rescan 0"',
'setenv _erase "echo Erase EMMC; "',
'setenv _write "echo Write EMMC; mmc write 0 ${address} 0 ' \
'${blocks} boot1"',
'setenv _read "echo Read EMMC; mmc read 0 ${address} 0 ' \
'${blocks} boot1"',
])
else:
cmds.extend([
'setenv _init "echo Init SPI; sf probe %s"' % bus,
'setenv _erase "echo Erase SPI; sf erase 0 ${length}"',
'setenv _write "echo Write SPI; sf write ${address} 0 ${length}"',
'setenv _read "echo Read SPI; sf read ${address} 0 ${length}"',
'setenv _update "echo Update SPI; sf update ${address} 0 ${length}"',
])
cmds.extend([
'echo Firmware loaded to ${address}, size ${firmware_size}, '
'length ${length}',
'if run _crc; then',
'run _init',
])
if update:
cmds += ['time run _update']
else:
cmds += ['run _erase', 'run _write']
if verify:
cmds += [
'run _clear',
'run _read',
'run _crc',
]
else:
cmds += ['echo Skipping verify']
cmds.extend([
'else',
'echo',
'echo "** Checksum error on load: please check download tool **"',
'fi',
])
script = '; '.join(cmds)
return script, replace_me
def PrepareFlasher(self, uboot, payload, update, verify, boot_type, bus):
"""Get a flasher ready for sending to the board.
The flasher is an executable image consisting of:
- U-Boot (u-boot.bin);
- a special FDT to tell it what to do in the form of a run command;
- (we could add some empty space here, in case U-Boot is not built to
be relocatable);
- the payload (which is a full flash image, or signed U-Boot + fdt).
Args:
uboot: Full path to u-boot.bin.
payload: Full path to payload.
update: Use faster update algorithm rather then full device erase
verify: Verify the write by doing a readback and CRC
boot_type: the src for bootdevice (nand, sdmmc, or spi)
Returns:
Filename of the flasher binary created.
"""
fdt = self._fdt.Copy(os.path.join(self._tools.outdir, 'flasher.dtb'))
payload_data = self._tools.ReadFile(payload)
# Make sure that the checksum is not negative
checksum = binascii.crc32(payload_data) & 0xffffffff
script, replace_me = self._GetFlashScript(len(payload_data), update,
verify, boot_type, checksum, bus)
data = self._tools.ReadFile(uboot)
fdt.PutString('/config', 'bootcmd', script)
fdt_data = self._tools.ReadFile(fdt.fname)
# Work out where to place the payload in memory. This is a chicken-and-egg
# problem (although in case you haven't heard, it was the chicken that
# came first), so we resolve it by replacing the string after
# fdt.PutString has done its job.
#
# Correction: Technically, the egg came first. Whatever genetic mutation
# created the new species would have been present in the egg, but not the
# parent (since if it was in the parent, it would have been present in the
# parent when it was an egg).
#
# Question: ok so who laid the egg then?
payload_offset = len(data) + len(fdt_data)
# NAND driver expects 4-byte alignment. Just go whole hog and do 4K.
alignment = 0x1000
payload_offset = (payload_offset + alignment - 1) & ~(alignment - 1)
load_address = self.text_base + payload_offset,
new_str = '%08x' % load_address
if len(replace_me) is not len(new_str):
raise ValueError("Internal error: replacement string '%s' length does "
"not match new string '%s'" % (replace_me, new_str))
matches = len(re.findall(replace_me, fdt_data))
if matches != 1:
raise ValueError("Internal error: replacement string '%s' already "
"exists in the fdt (%d matches)" % (replace_me, matches))
fdt_data = re.sub(replace_me, new_str, fdt_data)
# Now put it together.
data += fdt_data
data += "\0" * (payload_offset - len(data))
data += payload_data
flasher = os.path.join(self._tools.outdir, 'flasher-for-image.bin')
self._tools.WriteFile(flasher, data)
# Tell the user about a few things.
self._tools.OutputSize('U-Boot', uboot)
self._tools.OutputSize('Payload', payload)
self._out.Notice('Payload checksum %08x' % checksum)
self._tools.OutputSize('Flasher', flasher)
return flasher
def NvidiaFlashImage(self, flash_dest, uboot, bct, payload, bootstub):
"""Flash the image to SPI flash.
This creates a special Flasher binary, with the image to be flashed as
a payload. This is then sent to the board using the tegrarcm utility.
Args:
flash_dest: Destination for flasher, or None to not create a flasher
Valid options are spi, sdmmc
uboot: Full path to u-boot.bin.
bct: Full path to BCT file (binary chip timings file for Nvidia SOCs).
payload: Full path to payload.
bootstub: Full path to bootstub, which is the payload without the
signing information (i.e. bootstub is u-boot.bin + the FDT)
Returns:
True if ok, False if failed.
"""
# Use a Regex to pull Boot type from BCT file.
match = re.compile('DevType\[0\] = NvBootDevType_(?P<boot>([a-zA-Z])+);')
bct_dumped = self._tools.Run('bct_dump', [bct]).splitlines()
# TODO(sjg): The boot type is currently selected by the bct, rather than
# flash_dest selecting which bct to use. This is a bit backwards. For now
# we go with the bct's idea.
boot_type = filter(match.match, bct_dumped)
boot_type = match.match(boot_type[0]).group('boot').lower()
if flash_dest:
image = self.PrepareFlasher(uboot, payload, self.update, self.verify,
boot_type, 0)
elif bootstub:
image = bootstub
else:
image = payload
# If we don't know the textbase, extract it from the payload.
if self.text_base == -1:
data = self._tools.ReadFile(payload)
# Skip the BCT which is the first 64KB
self.text_base = self._bundle.DecodeTextBase(data[0x10000:])
self._out.Notice('TEXT_BASE is %#x' % self.text_base)
self._out.Progress('Uploading flasher image')
args = [
'--bct', bct,
'--bootloader', image,
'--loadaddr', "%#x" % self.text_base
]
# TODO(sjg): Check for existence of board - but chroot has no lsusb!
last_err = None
for _ in range(10):
try:
# TODO(sjg): Use Chromite library so we can monitor output
self._tools.Run('tegrarcm', args, sudo=True)
self._out.Notice('Flasher downloaded - please see serial output '
'for progress.')
return True
except CmdError as err:
if not self._out.stdout_is_tty:
return False
# Only show the error output once unless it changes.
err = str(err)
if not 'could not open USB device' in err:
raise CmdError('tegrarcm failed: %s' % err)
if err != last_err:
self._out.Notice(err)
last_err = err
self._out.Progress('Please connect USB A-A cable and do a '
'recovery-reset', True)
time.sleep(1)
return False
def _WaitForUSBDevice(self, name, vendor_id, product_id, timeout=10):
"""Wait until we see a device on the USB bus.
Args:
name: Board type name
vendor_id: USB vendor ID to look for
product_id: USB product ID to look for
timeout: Timeout to wait in seconds
Returns
True if the device was found, False if we timed out.
"""
self._out.Progress('Waiting for board to appear on USB bus')
start_time = time.time()
while time.time() - start_time < timeout:
try:
args = ['-d', '%04x:%04x' % (vendor_id, product_id)]
self._tools.Run('lsusb', args, sudo=True)
self._out.Progress('Found %s board' % name)
return True
except CmdError:
pass
return False
def _DutControl(self, args):
"""Run dut-control with supplied arguments.
The correct servo will be used based on self._servo_port.
Args:
args: List of arguments to dut-control.
Retruns:
a string, stdout generated by running the command
Raises:
IOError if no servo access is permitted.
"""
if self._servo_port is None:
raise IOError('No servo access available, please use --servo')
if self._servo_port:
args.extend(['-p', '%s' % self._servo_port])
return self._tools.Run('dut-control', args)
def _ExtractPayloadParts(self, payload):
"""Extract the BL1, BL2 and U-Boot parts from a payload.
An exynos image consists of 3 parts: BL1, BL2 and U-Boot/FDT.
This pulls out the various parts, puts them into files and returns
these files.
Args:
payload: Full path to payload.
Returns:
(bl1, bl2, image) where:
bl1 is the filename of the extracted BL1
bl2 is the filename of the extracted BL2
image is the filename of the extracted U-Boot image
"""
# Pull out the parts from the payload
bl1 = os.path.join(self._tools.outdir, 'bl1.bin')
bl2 = os.path.join(self._tools.outdir, 'bl2.bin')
image = os.path.join(self._tools.outdir, 'u-boot-from-image.bin')
data = self._tools.ReadFile(payload)
# The BL1 is always 8KB - extract that part into a new file
# TODO(sjg@chromium.org): Perhaps pick these up from the fdt?
bl1_size = 0x2000
self._tools.WriteFile(bl1, data[:bl1_size])
# Try to detect the BL2 size. We look for 0xea000014 which is the
# 'B reset' instruction at the start of U-Boot.
first_instr = struct.pack('<L', 0xea000014)
uboot_offset = data.find(first_instr, bl1_size + 0x3800)
if uboot_offset == -1:
raise ValueError('Could not locate start of U-Boot')
bl2_size = uboot_offset - bl1_size - 0x800 # 2KB gap after BL2
# Sanity check: At present we only allow 14KB and 30KB for SPL
allowed = [14, 30]
if (bl2_size >> 10) not in allowed:
raise ValueError('BL2 size is %dK - only %s supported' %
(bl2_size >> 10, ', '.join(
[str(size) for size in allowed])))
self._out.Notice('BL2 size is %dKB' % (bl2_size >> 10))
# The BL2 (U-Boot SPL) follows BL1. After that there is a 2KB gap
bl2_end = uboot_offset - 0x800
self._tools.WriteFile(bl2, data[0x2000:bl2_end])
# U-Boot itself starts after the gap
self._tools.WriteFile(image, data[uboot_offset:])
return bl1, bl2, image
def ExynosFlashImage(self, flash_dest, flash_uboot, bl1, bl2, payload,
kernel):
"""Flash the image to SPI flash.
This creates a special Flasher binary, with the image to be flashed as
a payload. This is then sent to the board using the tegrarcm utility.
Args:
flash_dest: Destination for flasher, or None to not create a flasher
Valid options are spi, sdmmc.
flash_uboot: Full path to u-boot.bin to use for flasher.
bl1: Full path to file containing BL1 (pre-boot).
bl2: Full path to file containing BL2 (SPL).
payload: Full path to payload.
kernel: Kernel to send after the payload, or None.
Returns:
True if ok, False if failed.
"""
if flash_dest:
image = self.PrepareFlasher(flash_uboot, payload, self.update,
self.verify, flash_dest, '1:0')
else:
bl1, bl2, image = self._ExtractPayloadParts(payload)
vendor_id = 0x04e8
product_id = 0x1234
# Preserve dut_hub_sel state.
preserved_dut_hub_sel = self._DutControl(['dut_hub_sel',]
).strip().split(':')[-1]
required_dut_hub_sel = 'dut_sees_servo'
args = ['warm_reset:on', 'fw_up:on', 'pwr_button:press', 'sleep:.1',
'warm_reset:off']
if preserved_dut_hub_sel != required_dut_hub_sel:
# Need to set it to get the port properly powered up.
args += ['dut_hub_sel:%s' % required_dut_hub_sel]
# TODO(sjg) If the board is bricked a reset does not seem to bring it
# back to life.
# BUG=chromium-os:28229
args = ['cold_reset:on', 'sleep:.2', 'cold_reset:off'] + args
self._out.Progress('Reseting board via servo')
self._DutControl(args)
# If we have a kernel to write, create a new image with that added.
if kernel:
dl_image = os.path.join(self._tools.outdir, 'image-plus-kernel.bin')
data = self._tools.ReadFile(image)
# Pad the original payload out to the original length
data += '\0' * (os.stat(payload).st_size - len(data))
data += self._tools.ReadFile(kernel)
self._tools.WriteFile(dl_image, data)
else:
dl_image = image
self._out.Progress('Uploading image')
download_list = [
# The numbers are the download addresses (in SRAM) for each piece
# TODO(sjg@chromium.org): Perhaps pick these up from the fdt?
['bl1', 0x02021400, bl1],
['bl2', 0x02023400, bl2],
['u-boot', 0x43e00000, dl_image]
]
try:
for upto in range(len(download_list)):
item = download_list[upto]
if not self._WaitForUSBDevice('exynos', vendor_id, product_id, 4):
if upto == 0:
raise CmdError('Could not find Exynos board on USB port')
raise CmdError("Stage '%s' did not complete" % item[0])
self._out.Notice(item[2])
self._out.Progress("Uploading stage '%s'" % item[0])
if upto == 0:
# The IROM needs roughly 200ms here to be ready for USB download
time.sleep(.5)
args = ['-a', '%#x' % item[1], '-f', item[2]]
self._tools.Run('smdk-usbdl', args, sudo=True)
if upto == 1:
# Once SPL starts up we can release the power buttom
args = ['fw_up:off', 'pwr_button:release']
self._DutControl(args)
finally:
# Make sure that the power button is released and dut_sel_hub state is
# restored, whatever happens
args = ['fw_up:off', 'pwr_button:release']
if preserved_dut_hub_sel != required_dut_hub_sel:
args += ['dut_hub_sel:%s' % preserved_dut_hub_sel]
self._DutControl(args)
self._out.Notice('Image downloaded - please see serial output '
'for progress.')
return True
def _GetDiskInfo(self, disk, item):
"""Returns information about a SCSI disk device.
Args:
disk: a block device name in sys/block, like '/sys/block/sdf'.
item: the item of disk information that is required.
Returns:
The information obtained, as a string, or '[Unknown]' if not found
"""
dev_path = os.path.join(disk, 'device')
# Search upwards and through symlinks looking for the item.
while os.path.isdir(dev_path) and dev_path != '/sys':
fname = os.path.join(dev_path, item)
if os.path.exists(fname):
with open(fname, 'r') as fd:
return fd.readline().rstrip()
# Move up a level and follow any symlink.
new_path = os.path.join(dev_path, '..')
if os.path.islink(new_path):
new_path = os.path.abspath(os.readlink(os.path.dirname(dev_path)))
dev_path = new_path
return '[Unknown]'
def _GetDiskCapacity(self, device):
"""Returns the disk capacity in GB, or 0 if not known.
Args:
device: Device to check, like '/dev/sdf'.
Returns:
Capacity of device in GB, or 0 if not known.
"""
args = ['-l', device]
stdout = self._tools.Run('fdisk', args, sudo=True)
if stdout:
# Seach for the line with capacity information.
re_capacity = re.compile('Disk .*: (\d+) \w+,')
lines = filter(re_capacity.match, stdout.splitlines())
if len(lines):
m = re_capacity.match(lines[0])
# We get something like 7859 MB, so turn into bytes, then GB
return int(m.group(1)) * 1024 * 1024 / 1e9
return 0
def _ListUsbDisks(self):
"""Return a list of available removable USB disks.
Returns:
List of USB devices, each element is itself a list containing:
device ('/dev/sdx')
manufacturer name
product name
capacity in GB (an integer)
full description (all of the above concatenated).
"""
disk_list = []
for disk in glob.glob('/sys/block/sd*'):
with open(disk + '/removable', 'r') as fd:
if int(fd.readline()) == 1:
device = '/dev/%s' % disk.split('/')[-1]
manuf = self._GetDiskInfo(disk, 'manufacturer')
product = self._GetDiskInfo(disk, 'product')
capacity = self._GetDiskCapacity(device)
if capacity:
desc = '%s: %s %s %d GB' % (device, manuf, product, capacity)
disk_list.append([device, manuf, product, capacity, desc])
return disk_list
def WriteToSd(self, flash_dest, disk, uboot, payload):
if flash_dest:
raw_image = self.PrepareFlasher(uboot, payload, self.update, self.verify,
flash_dest, '1:0')
bl1, bl2, _ = self._ExtractPayloadParts(payload)
spl_load_size = os.stat(raw_image).st_size
bl2 = self._bundle.ConfigureExynosBl2(self._fdt, spl_load_size, bl2,
'flasher')
data = self._tools.ReadFile(bl1) + self._tools.ReadFile(bl2)
# Pad BL2 out to the required size.
# We require that it be 24KB, but data will only contain 8KB + 14KB.
# Add the extra padding to bring it to 24KB.
data += '\0' * (0x6000 - len(data))
data += self._tools.ReadFile(raw_image)
image = os.path.join(self._tools.outdir, 'flasher-with-bl.bin')
self._tools.WriteFile(image, data)
self._out.Progress('Writing flasher to %s' % disk)
else:
image = payload
self._out.Progress('Writing image to %s' % disk)
args = ['if=%s' % image, 'of=%s' % disk, 'bs=512', 'seek=1']
self._tools.Run('dd', args, sudo=True)
def SendToSdCard(self, dest, flash_dest, uboot, payload):
"""Write a flasher to an SD card.
Args:
dest: Destination in one of these forms:
':<full description of device>'
':.' selects the only available device, fails if more than one option
':<device>' select deivce
Examples:
':/dev/sdd: Generic Flash Card Reader/Writer 8 GB'
':.'
':/dev/sdd'
flash_dest: Destination for flasher, or None to not create a flasher:
Valid options are spi, sdmmc.
uboot: Full path to u-boot.bin.
payload: Full path to payload.
"""
disk = None
disks = self._ListUsbDisks()
if dest[:1] == ':':
name = dest[1:]
# A '.' just means to use the only available disk.
if name == '.' and len(disks) == 1:
disk = disks[0][0]
for disk_info in disks:
# Use the full name or the device name.
if disk_info[4] == name or disk_info[1] == name:
disk = disk_info[0]
if disk:
self.WriteToSd(flash_dest, disk, uboot, payload)
else:
self._out.Error("Please specify destination -w 'sd:<disk_description>':")
self._out.Error(' - description can be . for the only disk, SCSI '
'device letter')
self._out.Error(' or the full description listed here')
msg = 'Found %d available disks.' % len(disks)
if not disks:
msg += ' Please insert an SD card and try again.'
self._out.UserOutput(msg)
# List available disks as a convenience.
for disk in disks:
self._out.UserOutput(' %s' % disk[4])
def Em100FlashImage(self, image_fname):
"""Send an image to an attached EM100 device.
This is a Dediprog EM100 SPI flash emulation device. We set up servo2
to do the SPI emulation, then write the image, then boot the board.
All going well, this is enough to get U-Boot running.
Args:
image_fname: Filename of image to send
"""
args = ['spi2_vref:off', 'spi2_buf_en:off', 'spi2_buf_on_flex_en:off']
args.append('spi_hold:on')
self._DutControl(args)
# TODO(sjg@chromium.org): This is for link. We could make this
# configurable from the fdt.
args = ['-c', 'W25Q64CV', '-d', self._tools.Filename(image_fname), '-r']
self._out.Progress('Writing image to em100')
self._tools.Run('em100', args, sudo=True)
self._out.Progress('Resetting board')
args = ['cold_reset:on', 'sleep:.2', 'cold_reset:off', 'sleep:.5']
args.extend(['pwr_button:press', 'sleep:.2', 'pwr_button:release'])
self._DutControl(args)
def DoWriteFirmware(output, tools, fdt, flasher, file_list, image_fname,
bundle, update=True, verify=False, dest=None,
flash_dest=None, kernel=None, bootstub=None, servo='any',
method='tegra'):
"""A simple function to write firmware to a device.
This creates a WriteFirmware object and uses it to write the firmware image
to the given destination device.
Args:
output: cros_output object to use.
tools: Tools object to use.
fdt: Fdt object to use as our device tree.
flasher: U-Boot binary to use as the flasher.
file_list: Dictionary containing files that we might need.
image_fname: Filename of image to write.
bundle: The bundle object which created the image.
update: Use faster update algorithm rather then full device erase.
verify: Verify the write by doing a readback and CRC.
dest: Destination device to write firmware to (usb, sd).
flash_dest: Destination device for flasher to program payload into.
kernel: Kernel file to write after U-Boot
bootstub: string, file name of the boot stub, if present
servo: Describes the servo unit to use: none=none; any=any; otherwise
port number of servo to use.
"""
write = WriteFirmware(tools, fdt, output, bundle)
write.SelectServo(servo)
write.update = update
write.verify = verify
if dest == 'usb':
method = fdt.GetString('/chromeos-config', 'flash-method', method)
if method == 'tegra':
tools.CheckTool('tegrarcm')
if flash_dest:
write.text_base = bundle.CalcTextBase('flasher ', fdt, flasher)
elif bootstub:
write.text_base = bundle.CalcTextBase('bootstub ', fdt, bootstub)
ok = write.NvidiaFlashImage(flash_dest, flasher, file_list['bct'],
image_fname, bootstub)
elif method == 'exynos':
tools.CheckTool('lsusb', 'usbutils')
tools.CheckTool('smdk-usbdl', 'smdk-dltool')
ok = write.ExynosFlashImage(flash_dest, flasher,
file_list['exynos-bl1'], file_list['exynos-bl2'], image_fname,
kernel)
else:
raise CmdError("Unknown flash method '%s'" % method)
if ok:
output.Progress('Image uploaded - please wait for flashing to '
'complete')
else:
raise CmdError('Image upload failed - please check board connection')
elif dest == 'em100':
# crosbug.com/31625
tools.CheckTool('em100')
write.Em100FlashImage(image_fname)
elif dest.startswith('sd'):
write.SendToSdCard(dest[2:], flash_dest, flasher, image_fname)
else:
raise CmdError("Unknown destination device '%s'" % dest)
|
jbassen/edx-platform
|
refs/heads/master
|
common/djangoapps/util/tests/test_db.py
|
109
|
"""Tests for util.db module."""
import ddt
import threading
import time
import unittest
from django.contrib.auth.models import User
from django.db import connection, IntegrityError
from django.db.transaction import commit_on_success, TransactionManagementError
from django.test import TestCase, TransactionTestCase
from util.db import commit_on_success_with_read_committed, generate_int_id
@ddt.ddt
class TransactionIsolationLevelsTestCase(TransactionTestCase):
"""
Tests the effects of changing transaction isolation level to READ COMMITTED instead of REPEATABLE READ.
Note: This TestCase only works with MySQL.
To run it on devstack:
1. Add TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' to envs/devstack.py
2. Run "./manage.py lms --settings=devstack test util.tests.test_db"
"""
@ddt.data(
(commit_on_success, IntegrityError, None, True),
(commit_on_success_with_read_committed, type(None), False, True),
)
@ddt.unpack
def test_concurrent_requests(self, transaction_decorator, exception_class, created_in_1, created_in_2):
"""
Test that when isolation level is set to READ COMMITTED get_or_create()
for the same row in concurrent requests does not raise an IntegrityError.
"""
if connection.vendor != 'mysql':
raise unittest.SkipTest('Only works on MySQL.')
class RequestThread(threading.Thread):
""" A thread which runs a dummy view."""
def __init__(self, delay, **kwargs):
super(RequestThread, self).__init__(**kwargs)
self.delay = delay
self.status = {}
@transaction_decorator
def run(self):
"""A dummy view."""
try:
try:
User.objects.get(username='student', email='student@edx.org')
except User.DoesNotExist:
pass
else:
raise AssertionError('Did not raise User.DoesNotExist.')
if self.delay > 0:
time.sleep(self.delay)
__, created = User.objects.get_or_create(username='student', email='student@edx.org')
except Exception as exception: # pylint: disable=broad-except
self.status['exception'] = exception
else:
self.status['created'] = created
thread1 = RequestThread(delay=1)
thread2 = RequestThread(delay=0)
thread1.start()
thread2.start()
thread2.join()
thread1.join()
self.assertIsInstance(thread1.status.get('exception'), exception_class)
self.assertEqual(thread1.status.get('created'), created_in_1)
self.assertIsNone(thread2.status.get('exception'))
self.assertEqual(thread2.status.get('created'), created_in_2)
def test_transaction_nesting(self):
"""Test that the decorator raises an error if there are already more than 1 levels of nested transactions."""
if connection.vendor != 'mysql':
raise unittest.SkipTest('Only works on MySQL.')
def do_nothing():
"""Just return."""
return
commit_on_success_with_read_committed(do_nothing)()
with commit_on_success():
commit_on_success_with_read_committed(do_nothing)()
with self.assertRaises(TransactionManagementError):
with commit_on_success():
with commit_on_success():
commit_on_success_with_read_committed(do_nothing)()
@ddt.ddt
class GenerateIntIdTestCase(TestCase):
"""Tests for `generate_int_id`"""
@ddt.data(10)
def test_no_used_ids(self, times):
"""
Verify that we get a random integer within the specified range
when there are no used ids.
"""
minimum = 1
maximum = times
for i in range(times):
self.assertIn(generate_int_id(minimum, maximum), range(minimum, maximum + 1))
@ddt.data(10)
def test_used_ids(self, times):
"""
Verify that we get a random integer within the specified range
but not in a list of used ids.
"""
minimum = 1
maximum = times
used_ids = {2, 4, 6, 8}
for i in range(times):
int_id = generate_int_id(minimum, maximum, used_ids)
self.assertIn(int_id, list(set(range(minimum, maximum + 1)) - used_ids))
|
harshmaur/elasticsearch-dsl-py
|
refs/heads/master
|
test_elasticsearch_dsl/test_integration/test_fields.py
|
16
|
from elasticsearch_dsl.search import Search
def test_search_can_be_limited_to_fields(data_client):
s = Search(using=data_client).index('git').doc_type('repos').fields('organization')
response = s.execute()
assert response.hits.total == 1
assert response.hits[0] == {'organization': ['elasticsearch']}
|
wkschwartz/django
|
refs/heads/stable/3.2.x
|
tests/user_commands/management/commands/subparser_dest.py
|
28
|
from django.core.management.base import BaseCommand
from django.utils.version import PY37
class Command(BaseCommand):
def add_arguments(self, parser):
kwargs = {'required': True} if PY37 else {}
subparsers = parser.add_subparsers(dest='subcommand', **kwargs)
parser_foo = subparsers.add_parser('foo')
parser_foo.add_argument('--bar')
def handle(self, *args, **options):
self.stdout.write(','.join(options))
|
ajgallegog/gem5_arm
|
refs/heads/master
|
ext/ply/test/lex_token4.py
|
174
|
# lex_token4.py
#
# Bad token name
import sys
if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
tokens = [
"PLUS",
"MINUS",
"-",
"NUMBER",
]
t_PLUS = r'\+'
t_MINUS = r'-'
t_NUMBER = r'\d+'
def t_error(t):
pass
lex.lex()
|
EddyTheB/Warbler
|
refs/heads/master
|
webCam/tests.py
|
24123
|
from django.test import TestCase
# Create your tests here.
|
MMaus/mutils
|
refs/heads/master
|
mmnotebooks/bslip.py
|
1
|
from libshai import integro
from pylab import (norm, pi, hstack, vstack, array, sign, sin, cos, arctan2,
sqrt, zeros,
figure, subplot, plot, legend, xlabel, ylabel)
from numpy import float64
from copy import deepcopy
import mutils.io as mio
import fastode # local!
class SimulationError(Exception):
pass
class BSLIP(mio.saveable):
""" Class of the bipedal walking SLIP """
def __init__(self, params=None, IC=None):
"""
The BSLIP is a bipedal walking SLIP model.
params (mutils.misc.Struct): parameter of the model
IC (array): initial conditions. [x, y, z, vx, vy, vz]
*NOTE* the system starts in single stance and *must* have
positive vertical velocity ("vy > 0")
"""
super(BSLIP, self).__init__()
self.params = deepcopy(params)
self.state = deepcopy(IC)
self.odd_step = True # leg 1 or leg two on ground?
self.dt = .01
self.odess = fastode.FastODE('bslipss')
self.odeds = fastode.FastODE('bslipds')
self.buf = zeros((2000, self.odess.WIDTH), dtype=float64)
self.t = 0
self.t_td = 0
self.t_to = 0
self.singleStance = True
self.failed = False
self.skip_forces = False
self.ErrMsg = ""
# storage for ode solutions
self.feet1_seq = []
self.feet2_seq = []
self.t_ss_seq = []
self.t_ds_seq = []
self.y_ss_seq = []
self.y_ds_seq = []
self.forces_ss_seq = []
self.forces_ds_seq = []
self.DEBUG = False
if self.params is not None:
self.feet1_seq.append(self.params['foot1'])
self.feet2_seq.append(self.params['foot2'])
def init_ode(self):
""" re-initialize the ODE solver """
self.ode = integro.odeDP5(self.dy_Stance, pars=self.params)
self.ode.ODE_RTOL = 1e-9
def restore(self, filename):
"""
update the restore procedure: re-initialize the ODE solver!
:args:
filename (str): the filename where the model information is stored
"""
super(BSLIP, self).restore(filename)
self.ode = integro.odeDP5(self.dy_Stance, pars=self.params)
self.ode.ODE_RTOL = 1e-9
def legfunc1(self, t, y, pars):
"""
Force (scalar) function of leg 1: Here, spring function
:args:
t (float): time (ignored)
y (6x float): CoM state [position, velocity]
pars (dict): parameters of the model. Must include
'foot1' (3x float) foot1 position
'lp1' (4x float) parameters of leg 1
:returns:
f (float): the axial leg force ["f = k * (l - l0)"]
NOTE: Overwrite this function to get different models.
The signature must not change.
"""
#DEBUG:
#print 'pf1: ', pars['foot1']
l1 = norm(array(y[:3]) - array(pars['foot1']))
return -pars['lp1'][0] * (l1 - pars['lp1'][1])
def legfunc2(self, t, y, pars):
"""
leg function of leg 2: a spring function
:args:
t (float): time (ignored)
y (6x float): CoM state [position, velocity]
pars (dict): parameters of the model. Must include
'foot1' (3x float) foot1 position
'lp1' (4x float) parameters of leg 1
:returns:
f (float): the axial leg force ["f = k * (l - l0)"]
NOTE: Overwrite this function to get different models.
The signature must not change.
"""
l2 = norm(array(y[:3]) - array(pars['foot2']))
return -pars['lp2'][0] * (l2 - pars['lp2'][1])
def evt_vy0(self, t, states, traj, p):
"""
triggers the vy=0 event
:args:
t (2x float): list of time prior to and after event
states (2x array): list of states prior to and after event
traj (trajectory patch): a trajectory patch (ignored here)
:returns:
(bool) vy=0 detected? (both directions)
"""
return sign(states[0][4]) * sign(states[1][4]) != 1
def update_params_ss(self):
"""
Updates the model parameters in the single stance vy=0 event.
Here, this function does nothing.
Overwrite it in derived models to enable e.g. control actions.
"""
pass
def update_params_ds(self):
"""
Updates the model parameters in the double stance vy=0 event.
Here, this function does nothing.
Overwrite it in derived models to enable e.g. control actions.
"""
pass
def update_params_td(self):
"""
Updates the model parameters at touchdown events.
Here, this function does nothing.
Overwrite it in derived models to enable e.g. control actions.
"""
pass
def update_params_to(self):
"""
Updates the model parameters at takeoff events.
Here, this function does nothing.
Overwrite it in derived models to enable e.g. control actions.
"""
pass
def takeoff_event(self, t, states, traj, pars, legfun):
"""
triggers the take off of a leg
Hint: use a lambda function to adapt the call signature
This function is force-triggered. The parameter format (pars) must
be the same as for legfun (which is called from here!)
*NOTE* you can overwrite this method for derived models. However,
this is not required if the takeoff condition is "zero force".
:args:
t (2x float): list of time prior to and after event
states (2x array): list of states prior to and after event
traj (trajectory patch): a trajectory patch (ignored here)
pars (<any>): the leg functions parameters
legfun (function of (t, y, pars) ): the leg force function.
:returns:
(bool) takeoff detected? (force has falling zero crossing)
"""
F0 = legfun(t[0], states[0], pars)
F1 = legfun(t[1], states[1], pars)
return F0 > 0 and F1 <= 0
def touchdown_event(self, t, states, traj, pars):
"""
triggers the touchdown of the leading leg.
Hint: use a lambda function to adapt the call signature
:args:
t (2x float): list of time prior to and after event
states (2x array): list of states prior to and after event
traj (trajectory patch): a trajectory patch (ignored here)
pars (4x float): the leg functions parameters. Format:
[l0, alpha, beta, floorlevel]
pars format:
[l0, alpha, beta, floorlevel]
:returns:
(bool) takeoff detected? (force has falling zero crossing)
"""
def zfoot(state, pars):
foot = state[1] - pars[0] * sin(pars[1])
return foot - pars[3]
return zfoot(states[0], pars) > 0 and zfoot(states[1], pars) <= 0
def touchdown_event_refine(self, t, state, pars):
"""
The touchdown event function for refinement of touchdown detection.
The zero-crossing of the output is defined as instant of the event.
Hint: use a lambda function to adapt the call signature
:args:
t (float): time (ignored)
y (6x float): CoM state [position, velocity]
pars (4x float): the leg functions parameters. Format:
[l0, alpha, beta, floorlevel]
:returns:
f (float): the axial leg force ["f = k * (l - l0)"]
"""
foot = state.squeeze()[1] - pars[0] * sin(pars[1])
return foot - pars[3] # foot - ground level
def dy_Stance(self, t, y, pars, return_force = False):
"""
This is the ode function that is passed to the solver. Internally, it calles:
legfunc1 - force of leg 1 (overwrite for new models)
legfunc2 - force of leg 2 (overwrite for new models)
:args:
t (float): simulation time
y (6x float): CoM state
pars (dict): parameters, will be passed to legfunc1 and legfunc2.
must also include 'foot1' (3x float), 'foot2' (3x float), 'm' (float)
and 'g' (3x float) indicating the feet positions, mass and direction of
gravity, respectively.
return_force (bool, default: False): return [F_leg1, F_leg2] (6x
float) instead of dy/dt.
"""
f1 = max(self.legfunc1(t, y, pars), 0) # only push
l1 = norm(array(y[:3]) - array(pars['foot1']))
f1_vec = (array(y[:3]) - array(pars['foot1'])) / l1 * f1
f2 = max(self.legfunc2(t, y, pars), 0) # only push
l2 = norm(array(y[:3]) - array(pars['foot2']))
f2_vec = (array(y[:3]) - array(pars['foot2'])) / l2 * f2
if return_force:
return hstack([f1_vec, f2_vec])
return hstack([y[3:], (f1_vec + f2_vec) / pars['m'] + pars['g']])
def get_touchdown(self, t, y, params):
"""
Compute the touchdown position of the leg. Overwrite this for different leg parameters!
:args:
t (float): time
y (6x float): state of the CoM
params (4x float): leg parameter: stiffness, l0, alpha, beta
:returns:
[xFoot, yFoot, zFoot] the position of the leg tip
"""
k, l0, alpha, beta = params
xf = y[0] + l0 * cos(alpha) * cos(beta)
yf = y[1] - l0 * sin(alpha)
zf = y[2] - l0 * cos(alpha) * sin(beta)
return array([xf, yf, zf])
def checkSim(self):
"""
Raises an error if the model failed.
Overwrite in derived classes to avoid raised errors.
"""
if self.failed:
raise SimulationError("simulation failed!")
def do_step(self):
"""
Performs a step from the current state, using the current parameters.
The simulation results are also stored in self.[y|t]_[s|d]s_seq,
the states and times of single and double support phases.
*requires*:
self.
- params (dict): model and leg function parameters
- odd_step (bool): whether or not to trigger contact of leg2 (leg1 otherwise)
- state (6x float): the initial state
:args:
(None)
:returns:
t_ss, y_ss, t_ds, y_ds: time and simulation results for single stance and double stance
phases
:raises:
TypeError - invalid IC or parameter
SimulationError - if the simulation fails.
"""
# test initial conditions.
# test wether there is a current state and current parameters
if self.params is None:
raise TypeError("parameters not set")
if self.state is None:
raise TypeError("state (initial condition) not set")
if self.failed:
raise SimulationError("Simulation failed previously.")
#demo_p_reduced = [13100, 12900, 68.5 * pi / 180., -.05] # [k1, k2, alpha, beta]
#demo_p = { 'foot1' : [0, 0, 0],
# 'foot2' : [-1.5, 0, 0],
# 'm' : 80,
# 'g' : [0, -9.81, 0],
# 'lp1' : [13100, 1, 68.5 * pi / 180, -0.05], # leg params: stiffness, l0, alpha, beta
# 'lp2' : [12900, 1, 68.5 * pi / 180, 0.1],
# 'delta_beta' : .05
# }
p = self.params # shortcut
leadingleg = 1. if self.odd_step else 2.
pars = [p['lp1'][0],
p['lp2'][0],
p['lp1'][2],
p['lp2'][2],
p['lp1'][1],
p['lp2'][1],
p['lp1'][3],
p['lp2'][3],
p['m'],
p['g'][1],
p['foot1'][0],
p['foot1'][1],
p['foot1'][2],
p['foot2'][0],
p['foot2'][1],
p['foot2'][2],
leadingleg]
# maximal time for simulation of single stance or double stance (each)
max_T = 1.
# run single stance
self.buf[0, 1:] = array(self.state) #.copy()
N = self.odess.odeOnce(self.buf, self.t + max_T, dt=1e-3, pars = pars)
self.state = self.buf[N,1:].copy()
self.y_ss_seq.append(self.buf[:N+1, 1:].copy())
self.t_ss_seq.append(self.buf[:N+1,0].copy())
# quick sanity check: simulation time not exceeded?
if self.buf[N,0] - self.t >= max_T - 1e-2:
self.failed=True
print "N=", N
raise SimulationError("Maximal simulation time (single stance) reached!")
self.t = self.buf[N,0]
# touchdown detected:
# update foot parameters
# (1) foot2 = foot1
# (2) foot1 = [NEW]
# (3) leading_leg = ~leading_leg
# update leg positions; change trailing leg
y = self.state # shortcut
vx, vz = y[3], y[5]
a_v_com = -arctan2(vz, vx) # correct with our coordinate system
pars[13] = pars[10]
pars[15] = pars[12]
if pars[16] == 1.:
# stance leg is leg 1 -> update leg 2 params
pars[10] = y[0] + cos(pars[3]) * cos(pars[7] + a_v_com) * pars[5]
pars[12] = y[2] - cos(pars[3]) * sin(pars[7] + a_v_com) * pars[5]
#pars[13] = res[N, 1] + cos(pars[3])*cos(pars[7])*pars[5]
#pars[15] = res[N, 3] + cos(pars[3])*sin(pars[7])*pars[5]
pars[16] = 2.;
else:
pars[10] = y[0] + cos(pars[2]) * cos(pars[6] + a_v_com) * pars[4]
pars[12] = y[2] - cos(pars[2]) * sin(pars[6] + a_v_com) * pars[4]
#pars[10] = res[N, 1] + cos(pars[2])*cos(pars[6])*pars[4]
#pars[12] = res[N, 3] + cos(pars[2])*sin(pars[6])*pars[4]
pars[16] = 1.;
self.params['foot1'] = pars[10:13][:]
self.params['foot2'] = pars[13:16][:]
# run double stance
self.buf[0, 1:] = array(self.state) #.copy()
N = self.odeds.odeOnce(self.buf, self.t + max_T, dt=1e-3, pars = pars)
self.state = self.buf[N,1:].copy()
self.feet1_seq.append(self.params['foot1'])
self.feet2_seq.append(self.params['foot2'])
self.y_ds_seq.append(self.buf[:N+1, 1:].copy())
self.t_ds_seq.append(self.buf[:N+1,0].copy())
# quick sanity check: simulation time not exceeded?
if self.buf[N,0] - self.t >= max_T - 1e-2:
self.failed=True
raise SimulationError("Maximal simulation time (double stance) reached!")
self.t = self.buf[N,0]
#self.y_ds_seq.append(y2)
#self.t_ds_seq.append(t2)
self.odd_step = not self.odd_step
return self.t_ss_seq[-1], self.y_ss_seq[-1], self.t_ds_seq[-1], self.y_ds_seq[-1]
if self.odd_step:
td_pars = self.params['lp2'][1:] + [ground, ] # set touchdown parameters
td_pars_2 = self.params['lp2'] # another format of touchdown parameters (for get_touchdown)
newfoot = 'foot2' # which foot position to update?
to_evt_fun = self.legfunc1 # force generation for takeoff trigger in double support
to_evt_ds_refine = self.legfunc1 # function for refinement of DS
self.odd_step = False # next step is "even": leg "2" in single stance on ground
else:
td_pars = self.params['lp1'][1:] + [ground, ] # set touchdown parameters
td_pars_2 = self.params['lp1'] # another format of touchdown parameters (for get_touchdown)
newfoot = 'foot1' # which foot position to update?
to_evt_fun = self.legfunc2 # force generation for takeoff trigger in double support
to_evt_ds_refine = self.legfunc2 # function for refinement of DS
self.odd_step = True # next step is "odd": leg "1" in single stance on ground
# stage 1a: simulate until vy=0
self.singleStance = True
self.ode.event = self.evt_vy0
if self.state[4] <= 0:
self.failed = True
self.ErrMsg = ("initial vertical velocity < 0: single " +
"stance apex cannot be reached!")
t0 = self.t
tE = t0 + max_T
t_a, y_a = self.ode(self.state, t0, tE, dt=self.dt)
#d_pars_l2 = self.params['lp2'][1:] + [ground, ]
if self.DEBUG:
print "finished stage 1 (raw)"
if t_a[-1] >= tE:
self.failed = True
self.ErrMsg = ("max. simulation time exceeded - " +
"this often indicates simulation failure")
else:
tt1, yy1 = self.ode.refine(lambda tf, yf: yf[4])
if self.DEBUG:
print "finished stage 1 (fine)"
self.state = yy1
# compute forces
if not self.skip_forces:
forces_ss = [self.dy_Stance(xt, xy, self.params, return_force=True) for
xt, xy in zip(t_a, y_a)]
#self.forces_ss_seq.append()
t = [] # dummy, if next step is not executed
y = array([[]])
if not self.failed:
self.update_params_ss()
# stage 1b: simulate until touchdown of leading leg
# touchdown event of leading leg
self.ode.event = lambda t,states,traj,p: self.touchdown_event(t, states, traj, td_pars)
t0 = tt1
tE = t0 + max_T
t, y = self.ode(self.state, t0, tE, dt=self.dt)
if self.DEBUG:
print "finished stage 2 (raw)"
if t[-1] >= tE:
self.failed = True
self.ErrMsg = ("max. sim time exceeded in single stance - no "
+ "touchdown occurred")
else:
#d_pars_l2 = self.params['lp2'][1:] + [ground, ]
tt, yy = self.ode.refine(lambda tf, yf: self.touchdown_event_refine(tf, yf, td_pars))
if self.DEBUG:
print "finished stage 2 (fine)"
self.state = yy
forces_ss.extend([self.dy_Stance(xt, xy, self.params, return_force=True) for
xt, xy in zip(t[1:], y[1:, :])])
if not self.skip_forces:
self.forces_ss_seq.append(vstack(forces_ss))
if not self.failed:
# allow application of control law
self.t_td = tt
self.singleStance = False
self.update_params_td()
# accumulate results from stage 1a and stage 1b
if not self.failed:
t = hstack([t_a, t[1:]])
y = vstack([y_a, y[1:, :]])
# stage 2: double support
# compute leg 2 touchdown position
t2_a = []
y2_a = array([[]])
if not self.failed:
xf, yf, zf = self.get_touchdown(tt, yy, td_pars_2)
self.params[newfoot] = [xf, yf, zf]
# stage 2a: simulate until vy=0
self.ode.event = self.evt_vy0
t0 = tt
tE = t0 + max_T
t2_a, y2_a = self.ode(self.state, t0, tE, dt=self.dt)
if t2_a[-1] >= tE:
self.failed = True
self.ErrMsg = ("max. sim time exceeded - no nadir event " +
"detected in double stance")
if self.DEBUG:
print "finished stage 3 (raw)"
else:
tt2, yy2 = self.ode.refine(lambda tf, yf: yf[4])
if self.DEBUG:
print "finished stage 3 (fine)"
self.state = yy2
if not self.skip_forces:
forces_ds = [self.dy_Stance(xt, xy, self.params, return_force=True) for
xt, xy in zip(t2_a, y2_a)]
if not self.failed:
# allow application of control law
self.update_params_ds()
# stage 2b: double stance - simulate until takeoff of trailing leg
# define and solve double stance ode
#ode = integro.odeDP5(self.dy_Stance, pars=self.params)
# event is takeoff of leg 1
t2_b = []
y2_b = array([[]])
if not self.failed:
self.ode.event = lambda t,states,traj,p: self.takeoff_event(t,
states, traj, p, legfun=to_evt_fun)
t0 = tt2
tE = t0 + max_T
t2_b, y2_b = self.ode(self.state, t0, tE, dt=self.dt)
if t2_b[-1] >= tE:
self.failed = True
self.ErrMsg = ("sim. time exeeded - takeoff of trailing leg " +
"not detected")
if self.DEBUG:
print "finished stage 4 (raw)"
else:
# refinement: force reaches zero
tt, yy = self.ode.refine(lambda tf, yf: to_evt_ds_refine(tf, yf, self.params))
if self.DEBUG:
print "finished stage 4 (fine)"
self.state = yy
if not self.skip_forces:
forces_ds.extend([self.dy_Stance(xt, xy, self.params, return_force=True) for
xt, xy in zip(t2_b[1:], y2_b[1:, :])])
self.forces_ds_seq.append(vstack(forces_ds))
# allow application of control law
self.t_to = tt
self.singleStance = True
self.update_params_to()
# accumulate results from stage 1a and stage 1b
if not self.failed:
t2 = hstack([t2_a, t2_b[1:]])
y2 = vstack([y2_a, y2_b[1:, :]])
#store simulation results
if not self.failed:
self.y_ss_seq.append(y)
self.y_ds_seq.append(y2)
self.t_ss_seq.append(t)
self.t_ds_seq.append(t2)
self.feet1_seq.append(self.params['foot1'])
self.feet2_seq.append(self.params['foot2'])
if not self.failed:
if len(t2) > 0:
self.t = t2[-1]
if self.failed:
raise SimulationError(self.ErrMsg)
return t, y, t2, y2
class BSLIP_newTD(BSLIP):
""" derived from BSLIP. The get_touchdown function is overwritten
such that the leg placement is w.r.t. walking direction.
*NOTE* This is also a show-case how to use inheritance for modelling here.
"""
def get_touchdown(self, t, y, params):
"""
Compute the touchdown position of the leg w.r.t. CoM velocity
:args:
t (float): time
y (6x float): state of the CoM
params (4x float): leg parameter: stiffness, l0, alpha, beta
:returns:
[xFoot, yFoot, zFoot] the position of the leg tip
"""
k, l0, alpha, beta = params
vx, vz = y[3], y[5]
a_v_com = -arctan2(vz, vx) # correct with our coordinate system
#for debugging
#print "v_com_angle:", a_v_com * 180. / pi
xf = y[0] + l0 * cos(alpha) * cos(beta + a_v_com)
yf = y[1] - l0 * sin(alpha)
zf = y[2] - l0 * cos(alpha) * sin(beta + a_v_com)
#for debugging
#print "foot: %2.3f,%2.3f,%2.3f," % ( xf,yf, zf)
return array([xf, yf, zf])
def ICeuklid_to_ICcircle(IC):
"""
converts from IC in euklidean space to IC in circle parameters (rotational invariant).
The formats are:
IC_euklid: [x, y, z, vx, vy, vz]
IC_circle: [y, vy, |v|, |l|, phiv], where |v| is the magnitude of CoM velocity, |l|
is the distance from leg1 (assumed to be at [0,0,0]) to CoM, and phiv the angle
of the velocity in horizontal plane wrt x-axis
*NOTE* for re-conversion, the leg position is additionally required
:args:
IC (6x float): the initial conditions in euklidean space
:returns:
IC (5x float): the initial conditions in circular coordinates
"""
x,y,z,vx,vy,vz = IC
v = sqrt(vx**2 + vy**2 + vz**2)
l = sqrt(x**2 + y**2 + z**2)
#phiv = arctan2(vz, vx)
#phiv = arctan2(-vz, vx)
phiv = -arctan2(-vz, vx)
#phix = arctan2(-z, -x)
phix = arctan2(z, -x)
# warnings.warn('TODO: fix phi_x (add)')
# print "phix:", phix * 180 / pi
return [y, vy, v, l, phiv + phix]
def ICcircle_to_ICeuklid(IC):
"""
converts from IC in cirle parameters to IC in euklidean space (rotational invariant).
The formats are:
IC_euklid: [x, y, z, vx, vy, vz]
IC_circle: [y, vy, |v|, |l|, phiv], where |v| is the magnitude of CoM velocity, |l|
is the distance from leg1 (assumed to be at [0,0,0]) to CoM, and phiv the angle
of the velocity in horizontal plane wrt x-axis
*NOTE* for re-conversion, the leg position is additionally required, assumed to be [0,0,0]
Further, it is assumed that the axis foot-CoM points in x-axis
:args:
IC (5x float): the initial conditions in circular coordinates
:returns:
IC (6x float): the initial conditions in euklidean space
"""
y, vy, v, l, phiv = IC
z = 0
xsq = l**2 - y**2
if xsq < 0:
raise RuntimeError('Error in initial conditions: y > l!')
x = -sqrt(xsq)
vhsq = v**2 - vy**2
if vhsq < 0:
raise RuntimeError('Error in initial conditions: |vy| > |v|!')
v_horiz = sqrt(vhsq)
vx = v_horiz * cos(phiv)
#vz = v_horiz * sin(phiv)
vz = v_horiz * sin(phiv)
return [x, y, z, vx, vy, vz]
def circ2normal_param(fixParams, P):
"""
converts the set (fixParams, P) to a set of initial conditions for
a BSLIP model.
:args:
fixParams (dict): set of parameters for BSLIP, plus "delta_beta" key
P [4x float]: step parameters k1, k2, alpha, beta (last two: for both legs)
"""
k1, k2, alpha, beta = P
par = deepcopy(fixParams)
par['foot1'] = [0, 0, 0]
par['foot2'] = [-2*par['lp2'][1], 0, 0] # set x to some very negative value
par['lp1'][0] = k1
par['lp2'][0] = k2
par['lp1'][2] = par['lp2'][2] = alpha
par['lp1'][3] = beta
par['lp2'][3] = -beta + par['delta_beta']
return par
def pred_to_p(baseParams, P):
"""
converts the set (fixParams, P) to a set of initial conditions for
a BSLIP model.
:args:
fixParams (dict): set of parameters for BSLIP
P [8x float]: step parameters k1, k2, alpha1, alpha2, beta1, beta2,
l01, l02
"""
k1, k2, a1, a2, b1, b2, l01, l02 = P
par = deepcopy(baseParams)
par['foot1'] = [0, 0, 0]
par['foot2'] = [-2*par['lp2'][1], 0, 0] # set x to some very negative value
par['lp1'][0] = k1
par['lp2'][0] = k2
par['lp1'][1] = l01
par['lp2'][1] = l02
par['lp1'][2] = a1
par['lp2'][2] = a2
par['lp1'][3] = b1
par['lp2'][3] = b2
return par
def new_stridefunction(fixParams):
""" returns a function that maps [IC, P] -> [FS],
in the BSLIP_newTD model
where IC: (reduced) initial conditions
P: reduced parameter vector (4x float)
FS: final state
"""
model = BSLIP_newTD(fixParams,[0,0,0,0,0,0])
model.skip_force = True #speed up simulation a little bit
def stridefun(IC, P):
""" performs a stride of the given model.
:args:
IC: (reduced) initial conditions: [y, vy, v, l, phiv]
P: (reduced) parameter set: [k1, k2, alpha, beta]
:returns:
FS: final state, same format as initial conditions
"""
full_IC = ICcircle_to_ICeuklid(IC)
par = circ2normal_param(fixParams, P)
model.state = full_IC
model.params = par
model.init_ode()
model.do_step()
model.do_step()
fs = model.state.copy() # final state of simulation
fs[:3] -= model.params['foot1'] # set origin to location of foot1 (which is on ground)
return array(ICeuklid_to_ICcircle(fs))
return stridefun
def stridefunction(fixParams):
""" returns a function that maps [IC, P] -> [FS],
in the BSLIP_newTD model
where IC: (reduced) initial conditions
P: reduced parameter vector (8x float): k1, k2, a1, a2, b1, b2, l01,
l02
FS: final state
"""
model = BSLIP_newTD(fixParams,[0,0,0,0,0,0])
model.skip_force = True #speed up simulation a little bit
def stridefun2(IC, P):
""" performs a stride of the given model.
:args:
IC: (reduced) initial conditions: [y, vy, v, l, phiv]
P: (reduced) parameter set: (k1, k2, a1, a2, b1, b2, l01, l02)
:returns:
FS: final state, same format as initial conditions
"""
full_IC = ICcircle_to_ICeuklid(IC)
par = pred_to_p(fixParams, P)
model.state = full_IC
model.params = par
model.init_ode()
model.do_step()
model.do_step()
fs = model.state.copy() # final state of simulation
fs[:3] -= model.params['foot1'] # set origin to location of foot1 (which is on ground)
return array(ICeuklid_to_ICcircle(fs))
return stridefun2
def vis_sim(mdl):
"""
quick hack that visualizes the simulation results from a model
:args:
mdl (BSLIP): model that has run some steps
"""
# visualize
fig = figure(figsize=(18,8))
fig.clf()
subplot(1,2,1)
rep = 0
for ys, yd, f1, f2 in zip(mdl.y_ss_seq, mdl.y_ds_seq, mdl.feet1_seq[1:], mdl.feet2_seq[1:]):
label1 = label2 = label3 = label4 = None
if rep == 0:
label1 = 'single stance'
label2 = 'double stance'
label3 = 'foot leg#1'
label4 = 'foot leg#2'
plot(ys[:, 0], ys[:, 1], 'b-', linewidth=1, label=label1)
plot(yd[:, 0], yd[: ,1], 'g-', linewidth=3, label=label2)
plot(f1[0], f1[1], 'kd', label=label3)
plot(f2[0], f2[1], 'cd', label=label4)
rep += 1
legend(loc='best')
xlabel('horizontal position [m]')
ylabel('vertical position [m]')
subplot(1,2,2)
rep = 0
for ys, yd, f1, f2 in zip(mdl.y_ss_seq, mdl.y_ds_seq, mdl.feet1_seq[1:], mdl.feet2_seq[1:]):
label1 = label2 = label3 = label4 = None
if rep == 0:
label1 = 'single stance'
label2 = 'double stance'
label3 = 'foot leg#1'
label4 = 'foot leg#2'
plot(ys[:, 0], ys[:, 2], 'r-', linewidth=1, label=label1)
plot(yd[:, 0], yd[: ,2], 'm-', linewidth=3, label=label2)
plot(f1[0], f1[2], 'kd', label=label3)
plot(f2[0], f2[2], 'cd', label=label4)
rep += 1
legend(loc='best')
#axis('equal')
xlabel('horizontal position [m]')
ylabel('lateral position [m]')
return fig
# define some example values
demo_p_reduced = [13100, 12900, 68.5 * pi / 180., -.05] # [k1, k2, alpha, beta]
demo_p = { 'foot1' : [0, 0, 0],
'foot2' : [-1.5, 0, 0],
'm' : 80,
'g' : [0, -9.81, 0],
'lp1' : [13100, 1, 68.5 * pi / 180, -0.05], # leg params: stiffness, l0, alpha, beta
'lp2' : [12900, 1, 68.5 * pi / 180, 0.1],
'delta_beta' : .05
}
demo_IC = array([-0.153942, 0.929608, 0, 1.16798, 0.593798, -0.045518])
|
antoinecarme/pyaf
|
refs/heads/master
|
tests/model_control/detailed/transf_Anscombe/model_control_one_enabled_Anscombe_ConstantTrend_NoCycle_ARX.py
|
1
|
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Anscombe'] , ['ConstantTrend'] , ['NoCycle'] , ['ARX'] );
|
chirilo/mozillians
|
refs/heads/master
|
vendor-local/lib/python/celery/signals.py
|
12
|
# -*- coding: utf-8 -*-
"""
celery.signals
~~~~~~~~~~~~~~
See :ref:`signals`.
:copyright: (c) 2009 - 2012 by Ask Solem.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from .utils.dispatch import Signal
task_sent = Signal(providing_args=["task_id", "task",
"args", "kwargs",
"eta", "taskset"])
task_prerun = Signal(providing_args=["task_id", "task",
"args", "kwargs"])
task_postrun = Signal(providing_args=["task_id", "task",
"args", "kwargs", "retval"])
task_failure = Signal(providing_args=["task_id", "exception",
"args", "kwargs", "traceback",
"einfo"])
celeryd_init = Signal(providing_args=["instance"])
worker_init = Signal(providing_args=[])
worker_process_init = Signal(providing_args=[])
worker_ready = Signal(providing_args=[])
worker_shutdown = Signal(providing_args=[])
setup_logging = Signal(providing_args=["loglevel", "logfile",
"format", "colorize"])
after_setup_logger = Signal(providing_args=["logger", "loglevel", "logfile",
"format", "colorize"])
after_setup_task_logger = Signal(providing_args=["logger", "loglevel",
"logfile", "format",
"colorize"])
beat_init = Signal(providing_args=[])
beat_embedded_init = Signal(providing_args=[])
eventlet_pool_started = Signal(providing_args=[])
eventlet_pool_preshutdown = Signal(providing_args=[])
eventlet_pool_postshutdown = Signal(providing_args=[])
eventlet_pool_apply = Signal(providing_args=["target", "args", "kwargs"])
|
code4futuredotorg/reeborg_tw
|
refs/heads/master
|
src/libraries/Brython3.2.3/Lib/atexit.py
|
743
|
"""allow programmer to define multiple exit functions to be executedupon normal program termination.
Two public functions, register and unregister, are defined.
"""
class __loader__(object):
pass
def _clear(*args,**kw):
"""_clear() -> None
Clear the list of previously registered exit functions."""
pass
def _run_exitfuncs(*args,**kw):
"""_run_exitfuncs() -> None
Run all registered exit functions."""
pass
def register(*args,**kw):
"""register(func, *args, **kwargs) -> func
Register a function to be executed upon normal program termination
func - function to be called at exit
args - optional arguments to pass to func
kwargs - optional keyword arguments to pass to func
func is returned to facilitate usage as a decorator."""
pass
def unregister(*args,**kw):
"""unregister(func) -> None
Unregister a exit function which was previously registered using
atexit.register
func - function to be unregistered"""
pass
|
zvolsky/edga
|
refs/heads/master
|
controllers/fixdata.py
|
1
|
# coding: utf8
def browser():
import httpagentparser
agent = request.env.http_user_agent
return BEAUTIFY(httpagentparser.simple_detect(agent))
def browser2():
import httpagentparser
agent = request.env.http_user_agent
return BEAUTIFY(httpagentparser.detect(agent))
@auth.requires_membership('admin')
def listy_unique():
root = [0]
smazano = 0
predchozi = []
for lista in db().select(db.lista.ALL):
if [lista.vyrobce, lista.typ, lista.cena, lista.tovarni, lista.sirka]==predchozi:
root[1] = max(root[1], lista.nakupni)
del db.lista[lista.id]
smazano += 1
else:
if root[0]:
db.lista[root[0]] = {'nakupni': root[1]}
predchozi = [lista.vyrobce, lista.typ, lista.cena, lista.tovarni, lista.sirka]
root = [lista.id, lista.nakupni]
db.lista_bv.insert(barva=lista.nazev or lista.barva, cislo=lista.cislo, lista_id=root[0])
return 'smazano : %s' % smazano
|
reddymeghraj/showroom
|
refs/heads/master
|
erpnext/accounts/doctype/sales_invoice_item/sales_invoice_item.py
|
120
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from erpnext.controllers.print_settings import print_settings_for_item_table
class SalesInvoiceItem(Document):
def __setup__(self):
print_settings_for_item_table(self)
|
georgefrank/ansible-modules-extras
|
refs/heads/devel
|
database/misc/riak.py
|
67
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, James Martin <jmartin@basho.com>, Drew Kerrigan <dkerrigan@basho.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: riak
short_description: This module handles some common Riak operations
description:
- This module can be used to join nodes to a cluster, check
the status of the cluster.
version_added: "1.2"
author:
- "James Martin (@jsmartin)"
- "Drew Kerrigan (@drewkerrigan)"
options:
command:
description:
- The command you would like to perform against the cluster.
required: false
default: null
aliases: []
choices: ['ping', 'kv_test', 'join', 'plan', 'commit']
config_dir:
description:
- The path to the riak configuration directory
required: false
default: /etc/riak
aliases: []
http_conn:
description:
- The ip address and port that is listening for Riak HTTP queries
required: false
default: 127.0.0.1:8098
aliases: []
target_node:
description:
- The target node for certain operations (join, ping)
required: false
default: riak@127.0.0.1
aliases: []
wait_for_handoffs:
description:
- Number of seconds to wait for handoffs to complete.
required: false
default: null
aliases: []
type: 'int'
wait_for_ring:
description:
- Number of seconds to wait for all nodes to agree on the ring.
required: false
default: null
aliases: []
type: 'int'
wait_for_service:
description:
- Waits for a riak service to come online before continuing.
required: false
default: None
aliases: []
choices: ['kv']
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: 1.5.1
'''
EXAMPLES = '''
# Join's a Riak node to another node
- riak: command=join target_node=riak@10.1.1.1
# Wait for handoffs to finish. Use with async and poll.
- riak: wait_for_handoffs=yes
# Wait for riak_kv service to startup
- riak: wait_for_service=kv
'''
import time
import socket
import sys
try:
import json
except ImportError:
import simplejson as json
def ring_check(module, riak_admin_bin):
cmd = '%s ringready' % riak_admin_bin
rc, out, err = module.run_command(cmd)
if rc == 0 and 'TRUE All nodes agree on the ring' in out:
return True
else:
return False
def main():
module = AnsibleModule(
argument_spec=dict(
command=dict(required=False, default=None, choices=[
'ping', 'kv_test', 'join', 'plan', 'commit']),
config_dir=dict(default='/etc/riak'),
http_conn=dict(required=False, default='127.0.0.1:8098'),
target_node=dict(default='riak@127.0.0.1', required=False),
wait_for_handoffs=dict(default=False, type='int'),
wait_for_ring=dict(default=False, type='int'),
wait_for_service=dict(
required=False, default=None, choices=['kv']),
validate_certs = dict(default='yes', type='bool'))
)
command = module.params.get('command')
config_dir = module.params.get('config_dir')
http_conn = module.params.get('http_conn')
target_node = module.params.get('target_node')
wait_for_handoffs = module.params.get('wait_for_handoffs')
wait_for_ring = module.params.get('wait_for_ring')
wait_for_service = module.params.get('wait_for_service')
validate_certs = module.params.get('validate_certs')
#make sure riak commands are on the path
riak_bin = module.get_bin_path('riak')
riak_admin_bin = module.get_bin_path('riak-admin')
timeout = time.time() + 120
while True:
if time.time() > timeout:
module.fail_json(msg='Timeout, could not fetch Riak stats.')
(response, info) = fetch_url(module, 'http://%s/stats' % (http_conn), force=True, timeout=5)
if info['status'] == 200:
stats_raw = response.read()
break
time.sleep(5)
# here we attempt to load those stats,
try:
stats = json.loads(stats_raw)
except:
module.fail_json(msg='Could not parse Riak stats.')
node_name = stats['nodename']
nodes = stats['ring_members']
ring_size = stats['ring_creation_size']
rc, out, err = module.run_command([riak_bin, 'version'] )
version = out.strip()
result = dict(node_name=node_name,
nodes=nodes,
ring_size=ring_size,
version=version)
if command == 'ping':
cmd = '%s ping %s' % ( riak_bin, target_node )
rc, out, err = module.run_command(cmd)
if rc == 0:
result['ping'] = out
else:
module.fail_json(msg=out)
elif command == 'kv_test':
cmd = '%s test' % riak_admin_bin
rc, out, err = module.run_command(cmd)
if rc == 0:
result['kv_test'] = out
else:
module.fail_json(msg=out)
elif command == 'join':
if nodes.count(node_name) == 1 and len(nodes) > 1:
result['join'] = 'Node is already in cluster or staged to be in cluster.'
else:
cmd = '%s cluster join %s' % (riak_admin_bin, target_node)
rc, out, err = module.run_command(cmd)
if rc == 0:
result['join'] = out
result['changed'] = True
else:
module.fail_json(msg=out)
elif command == 'plan':
cmd = '%s cluster plan' % riak_admin_bin
rc, out, err = module.run_command(cmd)
if rc == 0:
result['plan'] = out
if 'Staged Changes' in out:
result['changed'] = True
else:
module.fail_json(msg=out)
elif command == 'commit':
cmd = '%s cluster commit' % riak_admin_bin
rc, out, err = module.run_command(cmd)
if rc == 0:
result['commit'] = out
result['changed'] = True
else:
module.fail_json(msg=out)
# this could take a while, recommend to run in async mode
if wait_for_handoffs:
timeout = time.time() + wait_for_handoffs
while True:
cmd = '%s transfers' % riak_admin_bin
rc, out, err = module.run_command(cmd)
if 'No transfers active' in out:
result['handoffs'] = 'No transfers active.'
break
time.sleep(10)
if time.time() > timeout:
module.fail_json(msg='Timeout waiting for handoffs.')
if wait_for_service:
cmd = [riak_admin_bin, 'wait_for_service', 'riak_%s' % wait_for_service, node_name ]
rc, out, err = module.run_command(cmd)
result['service'] = out
if wait_for_ring:
timeout = time.time() + wait_for_ring
while True:
if ring_check(module, riak_admin_bin):
break
time.sleep(10)
if time.time() > timeout:
module.fail_json(msg='Timeout waiting for nodes to agree on ring.')
result['ring_ready'] = ring_check(module, riak_admin_bin)
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
|
wengyanqing/incubator-hawq
|
refs/heads/master
|
tools/bin/pythonSrc/unittest2-0.5.1/unittest2/test/test_unittest2_with.py
|
111
|
from __future__ import with_statement
import unittest2
from unittest2.test.support import OldTestResult, catch_warnings
import warnings
# needed to enable the deprecation warnings
warnings.simplefilter('default')
class TestWith(unittest2.TestCase):
"""Tests that use the with statement live in this
module so that all other tests can be run with Python 2.4.
"""
def testAssertRaisesExcValue(self):
class ExceptionMock(Exception):
pass
def Stub(foo):
raise ExceptionMock(foo)
v = "particular value"
ctx = self.assertRaises(ExceptionMock)
with ctx:
Stub(v)
e = ctx.exception
self.assertIsInstance(e, ExceptionMock)
self.assertEqual(e.args[0], v)
def test_assertRaises(self):
def _raise(e):
raise e
self.assertRaises(KeyError, _raise, KeyError)
self.assertRaises(KeyError, _raise, KeyError("key"))
try:
self.assertRaises(KeyError, lambda: None)
except self.failureException, e:
self.assertIn("KeyError not raised", e.args)
else:
self.fail("assertRaises() didn't fail")
try:
self.assertRaises(KeyError, _raise, ValueError)
except ValueError:
pass
else:
self.fail("assertRaises() didn't let exception pass through")
with self.assertRaises(KeyError) as cm:
try:
raise KeyError
except Exception, e:
raise
self.assertIs(cm.exception, e)
with self.assertRaises(KeyError):
raise KeyError("key")
try:
with self.assertRaises(KeyError):
pass
except self.failureException, e:
self.assertIn("KeyError not raised", e.args)
else:
self.fail("assertRaises() didn't fail")
try:
with self.assertRaises(KeyError):
raise ValueError
except ValueError:
pass
else:
self.fail("assertRaises() didn't let exception pass through")
def test_assert_dict_unicode_error(self):
with catch_warnings(record=True):
# This causes a UnicodeWarning due to its craziness
one = ''.join(chr(i) for i in range(255))
# this used to cause a UnicodeDecodeError constructing the failure msg
with self.assertRaises(self.failureException):
self.assertDictContainsSubset({'foo': one}, {'foo': u'\uFFFD'})
def test_formatMessage_unicode_error(self):
with catch_warnings(record=True):
# This causes a UnicodeWarning due to its craziness
one = ''.join(chr(i) for i in range(255))
# this used to cause a UnicodeDecodeError constructing msg
self._formatMessage(one, u'\uFFFD')
def assertOldResultWarning(self, test, failures):
with catch_warnings(record=True) as log:
result = OldTestResult()
test.run(result)
self.assertEqual(len(result.failures), failures)
warning, = log
self.assertIs(warning.category, DeprecationWarning)
def test_old_testresult(self):
class Test(unittest2.TestCase):
def testSkip(self):
self.skipTest('foobar')
@unittest2.expectedFailure
def testExpectedFail(self):
raise TypeError
@unittest2.expectedFailure
def testUnexpectedSuccess(self):
pass
for test_name, should_pass in (('testSkip', True),
('testExpectedFail', True),
('testUnexpectedSuccess', False)):
test = Test(test_name)
self.assertOldResultWarning(test, int(not should_pass))
def test_old_testresult_setup(self):
class Test(unittest2.TestCase):
def setUp(self):
self.skipTest('no reason')
def testFoo(self):
pass
self.assertOldResultWarning(Test('testFoo'), 0)
def test_old_testresult_class(self):
class Test(unittest2.TestCase):
def testFoo(self):
pass
Test = unittest2.skip('no reason')(Test)
self.assertOldResultWarning(Test('testFoo'), 0)
def testPendingDeprecationMethodNames(self):
"""Test fail* methods pending deprecation, they will warn in 3.2.
Do not use these methods. They will go away in 3.3.
"""
with catch_warnings(record=True):
self.failIfEqual(3, 5)
self.failUnlessEqual(3, 3)
self.failUnlessAlmostEqual(2.0, 2.0)
self.failIfAlmostEqual(3.0, 5.0)
self.failUnless(True)
self.failUnlessRaises(TypeError, lambda _: 3.14 + u'spam')
self.failIf(False)
if __name__ == '__main__':
unittest2.main()
|
dmonopoly/livefeedback
|
refs/heads/master
|
classroom/wsgi.py
|
1
|
"""
WSGI config for classroom project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "classroom.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = Cling(get_wsgi_application())
|
jskulski/Flask-FeatureFlags
|
refs/heads/master
|
tests/fixtures.py
|
3
|
# -*- coding: utf-8 -*-
from flask import Flask, render_template_string
import flask_featureflags as feature_flags
FEATURE_NAME = u"śőmé féátúŕé thíńg"
FEATURE_IS_ON = u'OK'
FEATURE_IS_OFF = u"flag is off"
FLAG_CONFIG = feature_flags.FEATURE_FLAGS_CONFIG
RAISE_ERROR = feature_flags.RAISE_ERROR_ON_MISSING_FEATURES
def NullFlagHandler(feature):
""" This handler always returns False """
return False
def AlwaysOffFlagHandler(feature):
""" This handler always returns False and halts any further checking. """
raise feature_flags.StopCheckingFeatureFlags
def AlwaysOnFlagHandler(feature):
""" This handler always returns True """
return True
# This is a toy app that demos the features we're trying to test.
app = Flask(__name__)
feature_setup = feature_flags.FeatureFlag(app)
@app.route(u"/null")
def redirect_destination():
return FEATURE_IS_ON
@app.route(u"/decorator")
@feature_flags.is_active_feature(FEATURE_NAME)
def feature_decorator():
return FEATURE_IS_ON
@app.route(u"/redirect_to")
@feature_flags.is_active_feature(FEATURE_NAME, redirect_to='/null')
def redirect_to_with_decorator():
return FEATURE_IS_ON
@app.route(u"/redirect")
@feature_flags.is_active_feature(FEATURE_NAME, redirect='redirect_destination')
def redirect_with_decorator():
return FEATURE_IS_ON
@app.route(u"/view")
def view_based_feature_flag():
if feature_flags.is_active(FEATURE_NAME):
return FEATURE_IS_ON
else:
return FEATURE_IS_OFF
@app.route(u"/template")
def template_based_feature_flag():
template_string = u"""
{% if 'śőmé féátúŕé thíńg' is active_feature %}
OK
{% else %}
flag is off
{% endif %}"""
return render_template_string(template_string)
|
omerucel/basitapi-account
|
refs/heads/master
|
setup.py
|
1
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
import basitapi_account
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError:
return ''
setup(
name='basitapi-account',
version=basitapi_account.__version__,
description=read('DESCRIPTION'),
long_description=read('README.md'),
licence='The MIT LICENCE',
platforms=['OS Independent'],
keywords='basitapi, django, api, account, access-token',
author='Ömer ÜCEL',
author_email='omerucel@gmail.com',
url='https://github.com/omerucel/basitapi-account',
packages=find_packages(exclude=['tests', 'cover']),
include_package_data=True,
install_required=[
'Django',
]
)
|
stvreumi/electronic-blackboard
|
refs/heads/master
|
broadcast_api.py
|
1
|
from mysql import mysql
from mysql import DB_Exception
from datetime import date
from datetime import datetime
from datetime import timedelta
from dataAccessObjects import *
import os.path
import json
def getDisplayContent(sche_target_id,return_msg):
"""
Get the display content of the display target.
Args:
sche_target_id: The id of display target
return_msg: Output paramter
"""
targetIdPrefix = sche_target_id[:4]
if targetIdPrefix == "imge":
with ImageDao() as imageDao:
file_info = imageDao.getIdSysName(Id=sche_target_id)
return_msg["display_type"] = "image"
elif targetIdPrefix == "text":
with TextDao() as textDao:
file_info = textDao.getIdSysName(Id=sche_target_id)
return_msg["display_type"] = "text"
else :
raise Exception("target id type error {}".format(targetIdPrefix))
try:
type_id = file_info['typeId']
system_file_name = file_info['systemName']
return_msg["like_count"] = file_info['likeCount']
except:
raise Exception("no file record")
with DataTypeDao() as dataTypeDao:
type_dir = dataTypeDao.getTypeDir(typeId=type_id)
type_name = dataTypeDao.getTypeName(typeId=type_id)
if type_dir == None or type_name == None:
raise Exception("No such type id {}".format(type_id))
targetFile = os.path.join("static", type_dir, system_file_name)
return_msg["type_name"] = type_name
if return_msg["display_type"] == "image":
return_msg["img"] = targetFile
elif return_msg["display_type"] == "text":
if not os.path.isfile(targetFile) :
raise Exception("Text file doesn't exists")
else :
with open(targetFile,"r") as fp:
file_content = json.load(fp)
if file_content.get('text_type','') == 'news':
return_msg['display_type'] = 'news'
return_msg.update(file_content)
def load_schedule():
"""
Get the next display target
Returns:
return_msg: Display target filled with its attributes
"""
try:
return_msg = {}
return_msg["result"] = "fail"
#find next schedule
with ScheduleDao() as scheduleDao:
next_schedule = scheduleDao.getNextSchedule()
if next_schedule is None:
return_msg["error"] = "no schedule"
return return_msg
return_msg["display_time"] = int(next_schedule['display_time'])
sche_target_id = next_schedule['sche_target_id']
getDisplayContent(sche_target_id,return_msg)
if return_msg["display_type"] == "image":
with ImageDao() as imageDao:
imageDao.addDisplayCount(sche_target_id)
elif return_msg["display_type"] in ["text","news"]:
with TextDao() as textDao:
textDao.addDisplayCount(sche_target_id)
return_msg["result"] = "success"
return return_msg
except DB_Exception as e:
return_msg["error"] = e.args[1]
return return_msg
except Exception as e:
return_msg["error"] = str(e)
return return_msg
|
hectord/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.2.5/tests/regressiontests/forms/localflavor/is_.py
|
89
|
from django.contrib.localflavor.is_.forms import (ISIdNumberField,
ISPhoneNumberField, ISPostalCodeSelect)
from utils import LocalFlavorTestCase
class ISLocalFlavorTests(LocalFlavorTestCase):
def test_ISPostalCodeSelect(self):
f = ISPostalCodeSelect()
out = u'''<select name="foo">
<option value="101">101 Reykjav\xedk</option>
<option value="103">103 Reykjav\xedk</option>
<option value="104">104 Reykjav\xedk</option>
<option value="105">105 Reykjav\xedk</option>
<option value="107">107 Reykjav\xedk</option>
<option value="108">108 Reykjav\xedk</option>
<option value="109">109 Reykjav\xedk</option>
<option value="110">110 Reykjav\xedk</option>
<option value="111">111 Reykjav\xedk</option>
<option value="112">112 Reykjav\xedk</option>
<option value="113">113 Reykjav\xedk</option>
<option value="116">116 Kjalarnes</option>
<option value="121">121 Reykjav\xedk</option>
<option value="123">123 Reykjav\xedk</option>
<option value="124">124 Reykjav\xedk</option>
<option value="125">125 Reykjav\xedk</option>
<option value="127">127 Reykjav\xedk</option>
<option value="128">128 Reykjav\xedk</option>
<option value="129">129 Reykjav\xedk</option>
<option value="130">130 Reykjav\xedk</option>
<option value="132">132 Reykjav\xedk</option>
<option value="150">150 Reykjav\xedk</option>
<option value="155">155 Reykjav\xedk</option>
<option value="170">170 Seltjarnarnes</option>
<option value="172">172 Seltjarnarnes</option>
<option value="190">190 Vogar</option>
<option value="200">200 K\xf3pavogur</option>
<option value="201">201 K\xf3pavogur</option>
<option value="202">202 K\xf3pavogur</option>
<option value="203">203 K\xf3pavogur</option>
<option value="210">210 Gar\xf0ab\xe6r</option>
<option value="212">212 Gar\xf0ab\xe6r</option>
<option value="220">220 Hafnarfj\xf6r\xf0ur</option>
<option value="221">221 Hafnarfj\xf6r\xf0ur</option>
<option value="222">222 Hafnarfj\xf6r\xf0ur</option>
<option value="225">225 \xc1lftanes</option>
<option value="230">230 Reykjanesb\xe6r</option>
<option value="232">232 Reykjanesb\xe6r</option>
<option value="233">233 Reykjanesb\xe6r</option>
<option value="235">235 Keflav\xedkurflugv\xf6llur</option>
<option value="240">240 Grindav\xedk</option>
<option value="245">245 Sandger\xf0i</option>
<option value="250">250 Gar\xf0ur</option>
<option value="260">260 Reykjanesb\xe6r</option>
<option value="270">270 Mosfellsb\xe6r</option>
<option value="300">300 Akranes</option>
<option value="301">301 Akranes</option>
<option value="302">302 Akranes</option>
<option value="310">310 Borgarnes</option>
<option value="311">311 Borgarnes</option>
<option value="320">320 Reykholt \xed Borgarfir\xf0i</option>
<option value="340">340 Stykkish\xf3lmur</option>
<option value="345">345 Flatey \xe1 Brei\xf0afir\xf0i</option>
<option value="350">350 Grundarfj\xf6r\xf0ur</option>
<option value="355">355 \xd3lafsv\xedk</option>
<option value="356">356 Sn\xe6fellsb\xe6r</option>
<option value="360">360 Hellissandur</option>
<option value="370">370 B\xfa\xf0ardalur</option>
<option value="371">371 B\xfa\xf0ardalur</option>
<option value="380">380 Reykh\xf3lahreppur</option>
<option value="400">400 \xcdsafj\xf6r\xf0ur</option>
<option value="401">401 \xcdsafj\xf6r\xf0ur</option>
<option value="410">410 Hn\xedfsdalur</option>
<option value="415">415 Bolungarv\xedk</option>
<option value="420">420 S\xfa\xf0av\xedk</option>
<option value="425">425 Flateyri</option>
<option value="430">430 Su\xf0ureyri</option>
<option value="450">450 Patreksfj\xf6r\xf0ur</option>
<option value="451">451 Patreksfj\xf6r\xf0ur</option>
<option value="460">460 T\xe1lknafj\xf6r\xf0ur</option>
<option value="465">465 B\xedldudalur</option>
<option value="470">470 \xdeingeyri</option>
<option value="471">471 \xdeingeyri</option>
<option value="500">500 Sta\xf0ur</option>
<option value="510">510 H\xf3lmav\xedk</option>
<option value="512">512 H\xf3lmav\xedk</option>
<option value="520">520 Drangsnes</option>
<option value="522">522 Kj\xf6rvogur</option>
<option value="523">523 B\xe6r</option>
<option value="524">524 Nor\xf0urfj\xf6r\xf0ur</option>
<option value="530">530 Hvammstangi</option>
<option value="531">531 Hvammstangi</option>
<option value="540">540 Bl\xf6ndu\xf3s</option>
<option value="541">541 Bl\xf6ndu\xf3s</option>
<option value="545">545 Skagastr\xf6nd</option>
<option value="550">550 Sau\xf0\xe1rkr\xf3kur</option>
<option value="551">551 Sau\xf0\xe1rkr\xf3kur</option>
<option value="560">560 Varmahl\xed\xf0</option>
<option value="565">565 Hofs\xf3s</option>
<option value="566">566 Hofs\xf3s</option>
<option value="570">570 Flj\xf3t</option>
<option value="580">580 Siglufj\xf6r\xf0ur</option>
<option value="600">600 Akureyri</option>
<option value="601">601 Akureyri</option>
<option value="602">602 Akureyri</option>
<option value="603">603 Akureyri</option>
<option value="610">610 Greniv\xedk</option>
<option value="611">611 Gr\xedmsey</option>
<option value="620">620 Dalv\xedk</option>
<option value="621">621 Dalv\xedk</option>
<option value="625">625 \xd3lafsfj\xf6r\xf0ur</option>
<option value="630">630 Hr\xedsey</option>
<option value="640">640 H\xfasav\xedk</option>
<option value="641">641 H\xfasav\xedk</option>
<option value="645">645 Fossh\xf3ll</option>
<option value="650">650 Laugar</option>
<option value="660">660 M\xfdvatn</option>
<option value="670">670 K\xf3pasker</option>
<option value="671">671 K\xf3pasker</option>
<option value="675">675 Raufarh\xf6fn</option>
<option value="680">680 \xde\xf3rsh\xf6fn</option>
<option value="681">681 \xde\xf3rsh\xf6fn</option>
<option value="685">685 Bakkafj\xf6r\xf0ur</option>
<option value="690">690 Vopnafj\xf6r\xf0ur</option>
<option value="700">700 Egilssta\xf0ir</option>
<option value="701">701 Egilssta\xf0ir</option>
<option value="710">710 Sey\xf0isfj\xf6r\xf0ur</option>
<option value="715">715 Mj\xf3ifj\xf6r\xf0ur</option>
<option value="720">720 Borgarfj\xf6r\xf0ur eystri</option>
<option value="730">730 Rey\xf0arfj\xf6r\xf0ur</option>
<option value="735">735 Eskifj\xf6r\xf0ur</option>
<option value="740">740 Neskaupsta\xf0ur</option>
<option value="750">750 F\xe1skr\xfa\xf0sfj\xf6r\xf0ur</option>
<option value="755">755 St\xf6\xf0varfj\xf6r\xf0ur</option>
<option value="760">760 Brei\xf0dalsv\xedk</option>
<option value="765">765 Dj\xfapivogur</option>
<option value="780">780 H\xf6fn \xed Hornafir\xf0i</option>
<option value="781">781 H\xf6fn \xed Hornafir\xf0i</option>
<option value="785">785 \xd6r\xe6fi</option>
<option value="800">800 Selfoss</option>
<option value="801">801 Selfoss</option>
<option value="802">802 Selfoss</option>
<option value="810">810 Hverager\xf0i</option>
<option value="815">815 \xdeorl\xe1ksh\xf6fn</option>
<option value="820">820 Eyrarbakki</option>
<option value="825">825 Stokkseyri</option>
<option value="840">840 Laugarvatn</option>
<option value="845">845 Fl\xfa\xf0ir</option>
<option value="850">850 Hella</option>
<option value="851">851 Hella</option>
<option value="860">860 Hvolsv\xf6llur</option>
<option value="861">861 Hvolsv\xf6llur</option>
<option value="870">870 V\xedk</option>
<option value="871">871 V\xedk</option>
<option value="880">880 Kirkjub\xe6jarklaustur</option>
<option value="900">900 Vestmannaeyjar</option>
<option value="902">902 Vestmannaeyjar</option>
</select>'''
self.assertEqual(f.render('foo', 'bar'), out)
def test_ISIdNumberField(self):
error_atleast = [u'Ensure this value has at least 10 characters (it has 9).']
error_invalid = [u'Enter a valid Icelandic identification number. The format is XXXXXX-XXXX.']
error_atmost = [u'Ensure this value has at most 11 characters (it has 12).']
error_notvalid = [u'The Icelandic identification number is not valid.']
valid = {
'2308803449': '230880-3449',
'230880-3449': '230880-3449',
'230880 3449': '230880-3449',
'2308803440': '230880-3440',
}
invalid = {
'230880343': error_atleast + error_invalid,
'230880343234': error_atmost + error_invalid,
'abcdefghijk': error_invalid,
'2308803439': error_notvalid,
}
self.assertFieldOutput(ISIdNumberField, valid, invalid)
def test_ISPhoneNumberField(self):
error_invalid = [u'Enter a valid value.']
error_atleast = [u'Ensure this value has at least 7 characters (it has 6).']
error_atmost = [u'Ensure this value has at most 8 characters (it has 9).']
valid = {
'1234567': '1234567',
'123 4567': '1234567',
'123-4567': '1234567',
}
invalid = {
'123-456': error_invalid,
'123456': error_atleast + error_invalid,
'123456555': error_atmost + error_invalid,
'abcdefg': error_invalid,
' 1234567 ': error_atmost + error_invalid,
' 12367 ': error_invalid
}
self.assertFieldOutput(ISPhoneNumberField, valid, invalid)
|
clld/lexibank
|
refs/heads/master
|
lexibank/views.py
|
9480
|
#
|
cpennington/edx-platform
|
refs/heads/master
|
lms/djangoapps/ccx/api/v0/serializers.py
|
5
|
""" CCX API v0 Serializers. """
import six
from ccx_keys.locator import CCXLocator
from rest_framework import serializers
from lms.djangoapps.ccx.models import CustomCourseForEdX
class CCXCourseSerializer(serializers.ModelSerializer):
"""
Serializer for CCX courses
"""
ccx_course_id = serializers.SerializerMethodField()
master_course_id = serializers.CharField(source='course_id')
display_name = serializers.CharField()
coach_email = serializers.EmailField(source='coach.email')
start = serializers.CharField(allow_blank=True)
due = serializers.CharField(allow_blank=True)
max_students_allowed = serializers.IntegerField(source='max_student_enrollments_allowed')
course_modules = serializers.SerializerMethodField()
class Meta(object):
model = CustomCourseForEdX
fields = (
"ccx_course_id",
"master_course_id",
"display_name",
"coach_email",
"start",
"due",
"max_students_allowed",
"course_modules",
)
read_only_fields = (
"ccx_course_id",
"master_course_id",
"start",
"due",
)
@staticmethod
def get_ccx_course_id(obj):
"""
Getter for the CCX Course ID
"""
return six.text_type(CCXLocator.from_course_locator(obj.course.id, obj.id))
@staticmethod
def get_course_modules(obj):
"""
Getter for the Course Modules. The list is stored in a compressed field.
"""
return obj.structure or []
|
ruchikd/Algorithms
|
refs/heads/master
|
Python/PalindromePairs/palindromPairsPart2.py
|
1
|
def palindromePairs(words):
if words is None:
return None
bigList = []
for word in words:
for w in words:
if w == word:
continue
posPalindrome = w + word
if posPalindrome == posPalindrome[::-1]:
smallList = []
smallList.append(words.index(w))
smallList.append(words.index(word))
if smallList not in bigList:
bigList.append(smallList)
posPalindrome = word + w
if posPalindrome == posPalindrome[::-1]:
smallList = []
smallList.append(words.index(word))
smallList.append(words.index(w))
if smallList not in bigList:
bigList.append(smallList)
return bigList
def main():
words = ["abcd","dcba","lls","s","sssll"]
print palindromePairs(words)
if __name__ == '__main__':
main()
|
maxtorete/frappe
|
refs/heads/develop
|
frappe/patches/v7_1/disabled_print_settings_for_custom_print_format.py
|
17
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doctype('Print Format')
frappe.db.sql("""
update
`tabPrint Format`
set
align_labels_right = 0, line_breaks = 0, show_section_headings = 0
where
custom_format = 1
""")
|
sassoftware/saspy
|
refs/heads/main
|
saspy/tests/test_sastabulate.py
|
2
|
import unittest
from contextlib import redirect_stdout
from io import StringIO
from re import match
import pandas as pd
import saspy
from saspy.sastabulate import Tabulate, Class, Var, Statistic, Grouping
class TestSASTabulate(unittest.TestCase):
def setUp(self):
# Use the first entry in the configuration list
self.sas = saspy.SASsession() #cfgname=saspy.SAScfg.SAS_config_names[0])
self.assertIsInstance(self.sas, saspy.SASsession, msg="sas = saspy.SASsession(...) failed")
# load a sas-help dataset
self.cars = self.sas.sasdata('cars', libref='sashelp', results='text')
def tearDown(self):
if self.sas:
self.sas._endsas()
def test_tabulate(self):
# check for tabulate being available on data set
self.assertIsInstance(self.cars.tabulate, Tabulate, msg="tabulate should be available on data sets")
def test_classes(self):
# extract a class with options
by_drivetrain = self.cars.tabulate.as_class('drivetrain', label="Drive", all="Total")
self.assertIsInstance(by_drivetrain, Class, msg=".as_class() method failed")
self.assertEqual(by_drivetrain.label, "Drive", msg=".as_class() 'label' keyword not applied")
self.assertEqual(by_drivetrain.all, "Total", msg=".as_class() 'all' keyword not applied")
# test apply option functionally using .with_()
with_adjusted_label = by_drivetrain.with_(label="Train")
self.assertEqual(with_adjusted_label.label, "Train", msg=".with_() method did not apply keyword")
# should not mutate original; intended for composition
self.assertEqual(by_drivetrain.label, "Drive", msg=".with_() should clone, not mutate")
# test basic serialization
self.assertEqual(str(by_drivetrain), "(drivetrain='Drive' ALL='Total')",
msg="error with serialization of tabulation class with arguments")
# test get multiple classes as tuple
by_origin, by_type = self.cars.tabulate.classes('origin', 'type')
self.assertIsInstance(by_origin, Class, msg=".classes() method failed")
self.assertIsInstance(by_type, Class, msg=".classes() method failed")
def test_vars(self):
# extract a variable with options
horsepower = self.cars.tabulate.as_var('horsepower', label="Horse")
self.assertIsInstance(horsepower, Var, msg=".as_var() method failed")
self.assertEqual(horsepower.label, "Horse", msg=".as_var() 'label' keyword not applied")
# test apply option functionally using .with_()
with_adjusted_label = horsepower.with_(label="Power")
self.assertEqual(with_adjusted_label.label, "Power", msg=".with_() method did not apply keyword")
# should not mutate original; intended for composition
self.assertEqual(horsepower.label, "Horse", msg=".with_() should clone, not mutate")
# test basic serialization
self.assertEqual(str(horsepower), "horsepower='Horse'",
msg="error with serialization of tabulation var with arguments")
# test get multiple vars as tuple
enginesize, cylinders = self.cars.tabulate.vars('enginesize', 'cylinders')
self.assertIsInstance(enginesize, Var, msg=".vars() method failed")
self.assertIsInstance(cylinders, Var, msg=".vars() method failed")
def test_stats(self):
# create a statistic with options
stdev = self.cars.tabulate.stat('std', label="StDev", format='5.2')
self.assertIsInstance(stdev, Statistic, msg=".stat() method failed")
self.assertEqual(stdev.label, "StDev", msg=".stat() 'label' keyword not applied")
self.assertEqual(stdev.format, "5.2", msg=".stat() 'format' keyword not applied")
# test apply option functionally using .with_()
with_adjusted_format = stdev.with_(format="6.2")
self.assertEqual(with_adjusted_format.format, "6.2", msg=".with_() method did not apply keyword")
# should not mutate original; intended for composition
self.assertEqual(stdev.format, "5.2", msg=".with_() should clone, not mutate")
# test basic serialization
self.assertEqual(str(stdev), "std='StDev'*f=5.2",
msg="error with serialization of tabulation statistic with arguments")
# test get multiple stats as tuple
mean, n = self.cars.tabulate.stats('mean', 'n')
self.assertIsInstance(mean, Statistic, msg=".stats() method failed")
self.assertIsInstance(n, Statistic, msg=".stats() method failed")
def test_hierarchy(self):
by_origin, by_type = self.cars.tabulate.classes('origin', 'type')
enginesize, cylinders = self.cars.tabulate.vars('enginesize', 'cylinders')
mean, n = self.cars.tabulate.stats('mean', 'n')
# test valid same-level concatenations
concat_classes = by_origin | by_type
self.assertIsInstance(concat_classes, Grouping, msg="concatenation of classes failed")
concat_vars = enginesize | cylinders
self.assertIsInstance(concat_vars, Grouping, msg="concatenation of vars failed")
concat_stats = mean | n
self.assertIsInstance(concat_stats, Grouping, msg="concatenation of stats failed")
# test valid nestings; applies right side as child of left side
nest_classes = by_origin * by_type
self.assertIsInstance(nest_classes.child, Class, msg="nesting of classes failed")
nest_class_var = by_origin * enginesize
self.assertIsInstance(nest_class_var.child, Var, msg="nesting of var under class failed")
nest_var_stat = enginesize * mean
self.assertIsInstance(nest_var_stat.child, Statistic, msg="nesting of statistic under var failed")
# nesting of concatenations should work
nest_concats = (by_origin | by_type) * (mean | n)
self.assertIsInstance(nest_concats, Grouping, msg="nesting of concatenated elements failed")
self.assertIsInstance(nest_concats.child, Grouping, msg="nesting of concatenated elements failed")
# test invalid nestings for appropriate rejection
self.assertRaises(SyntaxError, lambda: enginesize * by_origin) # class under var
self.assertRaises(SyntaxError, lambda: mean * enginesize) # var under stat
self.assertRaises(SyntaxError, lambda: n * mean) # stat under stat
self.assertRaises(SyntaxError, lambda: mean * by_origin) # class under stat
def test_composition_serialization(self):
by_origin, by_type, by_drivetrain = self.cars.tabulate.classes('origin', 'type', 'drivetrain')
enginesize, cylinders = self.cars.tabulate.vars('enginesize', 'cylinders')
mean, n = self.cars.tabulate.stats('mean', 'n')
# compoase a larger fragment using all options, check its serialization
my_tabulation = (
(by_origin | by_type) * by_drivetrain.with_(all="Total") * enginesize
* (mean.with_(label="Average") | n)
)
self.assertEqual(
str(my_tabulation),
"((origin type) * (drivetrain ALL='Total') * enginesize * (mean='Average' n))",
msg="serialized table composition did not match expectation"
)
def test_procedure(self):
by_origin, by_type, by_drivetrain = self.cars.tabulate.classes('origin', 'type', 'drivetrain')
enginesize, cylinders = self.cars.tabulate.vars('enginesize', 'cylinders')
mean, n = self.cars.tabulate.stats('mean', 'n')
# check the full generated syntax of a command
def get_generated_code(method: str) -> dict:
captured = StringIO()
with redirect_stdout(captured):
self.sas.teach_me_SAS(True)
method()
self.sas.teach_me_SAS(False)
lines = captured.getvalue().split('\n')
# break submitted code into statements for assertions
match_keyword = '^\s*(\w+?)\s'
return dict(
(match(match_keyword, l).group(1), l) for l in lines if match(match_keyword, l)
)
invocation = lambda: \
self.cars.tabulate.table(
where="cylinders > 0",
left= by_drivetrain.with_(all="Total") * by_type,
top= by_origin * (enginesize | cylinders) * (mean | n),
)
statements = get_generated_code(invocation)
self.assertIn("proc tabulate data=sashelp.cars", statements['proc'])
# gathered all classes used?
expected_classes = {"drivetrain", "origin", "type"}
classes_sent = statements['class'].replace(';','').split(' ')
self.assertTrue(expected_classes.issubset(set(classes_sent)), msg="classes were not gathered")
# gathered all vars used?
expected_vars = {"cylinders", "enginesize"}
vars_sent = statements['var'].replace(';','').split(' ')
self.assertTrue(expected_vars.issubset(set(vars_sent)), msg="vars were not gathered")
# passed the additional valid "where" option?
self.assertIn('where cylinders > 0', statements['where'], msg="additional options (where) failed")
# check table statement
self.assertIn(
"table (drivetrain ALL='Total') * type, origin * ((enginesize cylinders) * (mean n))",
statements['table'],
msg="generated table syntax did not match expectation"
)
def test_to_dataframe(self):
by_origin, by_type, by_drivetrain = self.cars.tabulate.classes('origin', 'type', 'drivetrain')
enginesize, cylinders = self.cars.tabulate.vars('enginesize', 'cylinders')
mean, n = self.cars.tabulate.stats('mean', 'n')
# generate a MultiIndex DataFrame instead of printing results
frame = self.cars.tabulate.to_dataframe(
left= by_drivetrain.with_(all="Total") * by_type *
by_origin * (enginesize | cylinders) * (mean | n),
)
# verify that the frame was generated correctly
self.assertIsInstance(frame, pd.DataFrame, msg=".to_dataframe() method failed")
self.assertEqual(set(frame.index.names), {'Type', 'Origin', 'DriveTrain'})
self.assertEqual(set(frame.columns), {'Cylinders_N', 'Cylinders_Mean', 'EngineSize_Mean', 'EngineSize_N'})
|
sdh11/gnuradio
|
refs/heads/master
|
gr-digital/python/digital/generic_mod_demod.py
|
6
|
#
# Copyright 2005,2006,2007,2009,2011 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
"""
Generic modulation and demodulation.
"""
# See gnuradio-examples/python/digital for examples
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from gnuradio import gr, blocks, filter, analog
from .modulation_utils import extract_kwargs_from_options_for_class
from .utils import mod_codes
from . import digital_swig as digital
import math
# default values (used in __init__ and add_options)
_def_samples_per_symbol = 2
_def_excess_bw = 0.35
_def_verbose = False
_def_log = False
# Frequency correction
_def_freq_bw = 2*math.pi/100.0
# Symbol timing recovery
_def_timing_bw = 2*math.pi/100.0
_def_timing_max_dev = 1.5
# Fine frequency / Phase correction
_def_phase_bw = 2*math.pi/100.0
# Number of points in constellation
_def_constellation_points = 16
# Whether differential coding is used.
_def_differential = False
def add_common_options(parser):
"""
Sets options common to both modulator and demodulator.
"""
parser.add_option("-p", "--constellation-points", type="int", default=_def_constellation_points,
help="set the number of constellation points (must be a power of 2 for psk, power of 4 for QAM) [default=%default]")
parser.add_option("", "--non-differential", action="store_false",
dest="differential",
help="do not use differential encoding [default=False]")
parser.add_option("", "--differential", action="store_true",
dest="differential", default=True,
help="use differential encoding [default=%default]")
parser.add_option("", "--mod-code", type="choice", choices=mod_codes.codes,
default=mod_codes.NO_CODE,
help="Select modulation code from: %s [default=%%default]"
% (', '.join(mod_codes.codes),))
parser.add_option("", "--excess-bw", type="float", default=_def_excess_bw,
help="set RRC excess bandwidth factor [default=%default]")
# /////////////////////////////////////////////////////////////////////////////
# Generic modulator
# /////////////////////////////////////////////////////////////////////////////
class generic_mod(gr.hier_block2):
"""
Hierarchical block for RRC-filtered differential generic modulation.
The input is a byte stream (unsigned char) and the
output is the complex modulated signal at baseband.
Args:
constellation: determines the modulation type (gnuradio.digital.digital_constellation)
samples_per_symbol: samples per baud >= 2 (float)
differential: whether to use differential encoding (boolean)
pre_diff_code: whether to use apply a pre-differential mapping (boolean)
excess_bw: Root-raised cosine filter excess bandwidth (float)
verbose: Print information about modulator? (boolean)
log: Log modulation data to files? (boolean)
"""
def __init__(self, constellation,
differential=_def_differential,
samples_per_symbol=_def_samples_per_symbol,
pre_diff_code=True,
excess_bw=_def_excess_bw,
verbose=_def_verbose,
log=_def_log):
gr.hier_block2.__init__(self, "generic_mod",
gr.io_signature(1, 1, gr.sizeof_char), # Input signature
gr.io_signature(1, 1, gr.sizeof_gr_complex)) # Output signature
self._constellation = constellation
self._samples_per_symbol = samples_per_symbol
self._excess_bw = excess_bw
self._differential = differential
# Only apply a predifferential coding if the constellation also supports it.
self.pre_diff_code = pre_diff_code and self._constellation.apply_pre_diff_code()
if self._samples_per_symbol < 2:
raise TypeError("sps must be >= 2, is %f" % self._samples_per_symbol)
arity = pow(2,self.bits_per_symbol())
# turn bytes into k-bit vectors
self.bytes2chunks = \
blocks.packed_to_unpacked_bb(self.bits_per_symbol(), gr.GR_MSB_FIRST)
if self.pre_diff_code:
self.symbol_mapper = digital.map_bb(self._constellation.pre_diff_code())
if differential:
self.diffenc = digital.diff_encoder_bb(arity)
self.chunks2symbols = digital.chunks_to_symbols_bc(self._constellation.points())
# pulse shaping filter
nfilts = 32
ntaps = nfilts * 11 * int(self._samples_per_symbol) # make nfilts filters of ntaps each
self.rrc_taps = filter.firdes.root_raised_cosine(
nfilts, # gain
nfilts, # sampling rate based on 32 filters in resampler
1.0, # symbol rate
self._excess_bw, # excess bandwidth (roll-off factor)
ntaps)
self.rrc_filter = filter.pfb_arb_resampler_ccf(self._samples_per_symbol,
self.rrc_taps)
# Connect
self._blocks = [self, self.bytes2chunks]
if self.pre_diff_code:
self._blocks.append(self.symbol_mapper)
if differential:
self._blocks.append(self.diffenc)
self._blocks += [self.chunks2symbols, self.rrc_filter, self]
self.connect(*self._blocks)
if verbose:
self._print_verbage()
if log:
self._setup_logging()
def samples_per_symbol(self):
return self._samples_per_symbol
def bits_per_symbol(self): # static method that's also callable on an instance
return self._constellation.bits_per_symbol()
@staticmethod
def add_options(parser):
"""
Adds generic modulation options to the standard parser
"""
add_common_options(parser)
def extract_kwargs_from_options(cls, options):
"""
Given command line options, create dictionary suitable for passing to __init__
"""
return extract_kwargs_from_options_for_class(cls, options)
extract_kwargs_from_options=classmethod(extract_kwargs_from_options)
def _print_verbage(self):
print("\nModulator:")
print("bits per symbol: %d" % self.bits_per_symbol())
print("RRC roll-off factor: %.2f" % self._excess_bw)
def _setup_logging(self):
print("Modulation logging turned on.")
self.connect(self.bytes2chunks,
blocks.file_sink(gr.sizeof_char, "tx_bytes2chunks.8b"))
if self.pre_diff_code:
self.connect(self.symbol_mapper,
blocks.file_sink(gr.sizeof_char, "tx_symbol_mapper.8b"))
if self._differential:
self.connect(self.diffenc,
blocks.file_sink(gr.sizeof_char, "tx_diffenc.8b"))
self.connect(self.chunks2symbols,
blocks.file_sink(gr.sizeof_gr_complex, "tx_chunks2symbols.32fc"))
self.connect(self.rrc_filter,
blocks.file_sink(gr.sizeof_gr_complex, "tx_rrc_filter.32fc"))
# /////////////////////////////////////////////////////////////////////////////
# Generic demodulator
#
# Differentially coherent detection of differentially encoded generically
# modulated signal.
# /////////////////////////////////////////////////////////////////////////////
class generic_demod(gr.hier_block2):
"""
Hierarchical block for RRC-filtered differential generic demodulation.
The input is the complex modulated signal at baseband.
The output is a stream of bits packed 1 bit per byte (LSB)
Args:
constellation: determines the modulation type (gnuradio.digital.digital_constellation)
samples_per_symbol: samples per baud >= 2 (float)
differential: whether to use differential encoding (boolean)
pre_diff_code: whether to use apply a pre-differential mapping (boolean)
excess_bw: Root-raised cosine filter excess bandwidth (float)
freq_bw: loop filter lock-in bandwidth (float)
timing_bw: timing recovery loop lock-in bandwidth (float)
phase_bw: phase recovery loop bandwidth (float)
verbose: Print information about modulator? (boolean)
log: Log modulation data to files? (boolean)
"""
def __init__(self, constellation,
differential=_def_differential,
samples_per_symbol=_def_samples_per_symbol,
pre_diff_code=True,
excess_bw=_def_excess_bw,
freq_bw=_def_freq_bw,
timing_bw=_def_timing_bw,
phase_bw=_def_phase_bw,
verbose=_def_verbose,
log=_def_log):
gr.hier_block2.__init__(self, "generic_demod",
gr.io_signature(1, 1, gr.sizeof_gr_complex), # Input signature
gr.io_signature(1, 1, gr.sizeof_char)) # Output signature
self._constellation = constellation
self._samples_per_symbol = samples_per_symbol
self._excess_bw = excess_bw
self._phase_bw = phase_bw
self._freq_bw = freq_bw
self._timing_bw = timing_bw
self._timing_max_dev= _def_timing_max_dev
self._differential = differential
if self._samples_per_symbol < 2:
raise TypeError("sps must be >= 2, is %d" % self._samples_per_symbol)
# Only apply a predifferential coding if the constellation also supports it.
self.pre_diff_code = pre_diff_code and self._constellation.apply_pre_diff_code()
arity = pow(2,self.bits_per_symbol())
nfilts = 32
ntaps = 11 * int(self._samples_per_symbol*nfilts)
# Automatic gain control
self.agc = analog.agc2_cc(0.6e-1, 1e-3, 1, 1)
# Frequency correction
fll_ntaps = 55
self.freq_recov = digital.fll_band_edge_cc(self._samples_per_symbol, self._excess_bw,
fll_ntaps, self._freq_bw)
# symbol timing recovery with RRC data filter
taps = filter.firdes.root_raised_cosine(nfilts, nfilts*self._samples_per_symbol,
1.0, self._excess_bw, ntaps)
self.time_recov = digital.pfb_clock_sync_ccf(self._samples_per_symbol,
self._timing_bw, taps,
nfilts, nfilts//2, self._timing_max_dev)
fmin = -0.25
fmax = 0.25
self.receiver = digital.constellation_receiver_cb(
self._constellation.base(), self._phase_bw,
fmin, fmax)
# Do differential decoding based on phase change of symbols
if differential:
self.diffdec = digital.diff_decoder_bb(arity)
if self.pre_diff_code:
self.symbol_mapper = digital.map_bb(
mod_codes.invert_code(self._constellation.pre_diff_code()))
# unpack the k bit vector into a stream of bits
self.unpack = blocks.unpack_k_bits_bb(self.bits_per_symbol())
if verbose:
self._print_verbage()
if log:
self._setup_logging()
# Connect and Initialize base class
self._blocks = [self, self.agc, self.freq_recov,
self.time_recov, self.receiver]
if differential:
self._blocks.append(self.diffdec)
if self.pre_diff_code:
self._blocks.append(self.symbol_mapper)
self._blocks += [self.unpack, self]
self.connect(*self._blocks)
def samples_per_symbol(self):
return self._samples_per_symbol
def bits_per_symbol(self):
return self._constellation.bits_per_symbol()
def _print_verbage(self):
print("\nDemodulator:")
print("bits per symbol: %d" % self.bits_per_symbol())
print("RRC roll-off factor: %.2f" % self._excess_bw)
print("FLL bandwidth: %.2e" % self._freq_bw)
print("Timing bandwidth: %.2e" % self._timing_bw)
print("Phase bandwidth: %.2e" % self._phase_bw)
def _setup_logging(self):
print("Modulation logging turned on.")
self.connect(self.agc,
blocks.file_sink(gr.sizeof_gr_complex, "rx_agc.32fc"))
self.connect((self.freq_recov, 0),
blocks.file_sink(gr.sizeof_gr_complex, "rx_freq_recov.32fc"))
self.connect((self.freq_recov, 1),
blocks.file_sink(gr.sizeof_float, "rx_freq_recov_freq.32f"))
self.connect((self.freq_recov, 2),
blocks.file_sink(gr.sizeof_float, "rx_freq_recov_phase.32f"))
self.connect((self.freq_recov, 3),
blocks.file_sink(gr.sizeof_float, "rx_freq_recov_error.32f"))
self.connect((self.time_recov, 0),
blocks.file_sink(gr.sizeof_gr_complex, "rx_time_recov.32fc"))
self.connect((self.time_recov, 1),
blocks.file_sink(gr.sizeof_float, "rx_time_recov_error.32f"))
self.connect((self.time_recov, 2),
blocks.file_sink(gr.sizeof_float, "rx_time_recov_rate.32f"))
self.connect((self.time_recov, 3),
blocks.file_sink(gr.sizeof_float, "rx_time_recov_phase.32f"))
self.connect((self.receiver, 0),
blocks.file_sink(gr.sizeof_char, "rx_receiver.8b"))
self.connect((self.receiver, 1),
blocks.file_sink(gr.sizeof_float, "rx_receiver_error.32f"))
self.connect((self.receiver, 2),
blocks.file_sink(gr.sizeof_float, "rx_receiver_phase.32f"))
self.connect((self.receiver, 3),
blocks.file_sink(gr.sizeof_float, "rx_receiver_freq.32f"))
if self._differential:
self.connect(self.diffdec,
blocks.file_sink(gr.sizeof_char, "rx_diffdec.8b"))
if self.pre_diff_code:
self.connect(self.symbol_mapper,
blocks.file_sink(gr.sizeof_char, "rx_symbol_mapper.8b"))
self.connect(self.unpack,
blocks.file_sink(gr.sizeof_char, "rx_unpack.8b"))
@staticmethod
def add_options(parser):
"""
Adds generic demodulation options to the standard parser
"""
# Add options shared with modulator.
add_common_options(parser)
# Add options specific to demodulator.
parser.add_option("", "--freq-bw", type="float", default=_def_freq_bw,
help="set frequency lock loop lock-in bandwidth [default=%default]")
parser.add_option("", "--phase-bw", type="float", default=_def_phase_bw,
help="set phase tracking loop lock-in bandwidth [default=%default]")
parser.add_option("", "--timing-bw", type="float", default=_def_timing_bw,
help="set timing symbol sync loop gain lock-in bandwidth [default=%default]")
def extract_kwargs_from_options(cls, options):
"""
Given command line options, create dictionary suitable for passing to __init__
"""
return extract_kwargs_from_options_for_class(cls, options)
extract_kwargs_from_options=classmethod(extract_kwargs_from_options)
shared_demod_args = """ samples_per_symbol: samples per baud >= 2 (float)
excess_bw: Root-raised cosine filter excess bandwidth (float)
freq_bw: loop filter lock-in bandwidth (float)
timing_bw: timing recovery loop lock-in bandwidth (float)
phase_bw: phase recovery loop bandwidth (float)
verbose: Print information about modulator? (boolean)
log: Log modulation data to files? (boolean)
"""
shared_mod_args = """ samples_per_symbol: samples per baud >= 2 (float)
excess_bw: Root-raised cosine filter excess bandwidth (float)
verbose: Print information about modulator? (boolean)
log: Log modulation data to files? (boolean)
"""
|
sdague/home-assistant
|
refs/heads/dev
|
homeassistant/components/ipma/config_flow.py
|
20
|
"""Config flow to configure IPMA component."""
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_MODE, CONF_NAME
import homeassistant.helpers.config_validation as cv
from .const import DOMAIN, HOME_LOCATION_NAME
from .weather import FORECAST_MODE
@config_entries.HANDLERS.register(DOMAIN)
class IpmaFlowHandler(config_entries.ConfigFlow):
"""Config flow for IPMA component."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Init IpmaFlowHandler."""
self._errors = {}
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
self._errors = {}
if user_input is not None:
if user_input[CONF_NAME] not in self.hass.config_entries.async_entries(
DOMAIN
):
return self.async_create_entry(
title=user_input[CONF_NAME], data=user_input
)
self._errors[CONF_NAME] = "name_exists"
# default location is set hass configuration
return await self._show_config_form(
name=HOME_LOCATION_NAME,
latitude=self.hass.config.latitude,
longitude=self.hass.config.longitude,
)
async def _show_config_form(self, name=None, latitude=None, longitude=None):
"""Show the configuration form to edit location data."""
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_NAME, default=name): str,
vol.Required(CONF_LATITUDE, default=latitude): cv.latitude,
vol.Required(CONF_LONGITUDE, default=longitude): cv.longitude,
vol.Required(CONF_MODE, default="daily"): vol.In(FORECAST_MODE),
}
),
errors=self._errors,
)
|
Gamebasis/3DGamebasisServer
|
refs/heads/master
|
GameData/blender-2.71-windows64/2.71/python/lib/encodings/koi8_r.py
|
272
|
""" Python Character Mapping Codec koi8_r generated from 'MAPPINGS/VENDORS/MISC/KOI8-R.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='koi8-r',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\u2500' # 0x80 -> BOX DRAWINGS LIGHT HORIZONTAL
'\u2502' # 0x81 -> BOX DRAWINGS LIGHT VERTICAL
'\u250c' # 0x82 -> BOX DRAWINGS LIGHT DOWN AND RIGHT
'\u2510' # 0x83 -> BOX DRAWINGS LIGHT DOWN AND LEFT
'\u2514' # 0x84 -> BOX DRAWINGS LIGHT UP AND RIGHT
'\u2518' # 0x85 -> BOX DRAWINGS LIGHT UP AND LEFT
'\u251c' # 0x86 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
'\u2524' # 0x87 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
'\u252c' # 0x88 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
'\u2534' # 0x89 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
'\u253c' # 0x8A -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
'\u2580' # 0x8B -> UPPER HALF BLOCK
'\u2584' # 0x8C -> LOWER HALF BLOCK
'\u2588' # 0x8D -> FULL BLOCK
'\u258c' # 0x8E -> LEFT HALF BLOCK
'\u2590' # 0x8F -> RIGHT HALF BLOCK
'\u2591' # 0x90 -> LIGHT SHADE
'\u2592' # 0x91 -> MEDIUM SHADE
'\u2593' # 0x92 -> DARK SHADE
'\u2320' # 0x93 -> TOP HALF INTEGRAL
'\u25a0' # 0x94 -> BLACK SQUARE
'\u2219' # 0x95 -> BULLET OPERATOR
'\u221a' # 0x96 -> SQUARE ROOT
'\u2248' # 0x97 -> ALMOST EQUAL TO
'\u2264' # 0x98 -> LESS-THAN OR EQUAL TO
'\u2265' # 0x99 -> GREATER-THAN OR EQUAL TO
'\xa0' # 0x9A -> NO-BREAK SPACE
'\u2321' # 0x9B -> BOTTOM HALF INTEGRAL
'\xb0' # 0x9C -> DEGREE SIGN
'\xb2' # 0x9D -> SUPERSCRIPT TWO
'\xb7' # 0x9E -> MIDDLE DOT
'\xf7' # 0x9F -> DIVISION SIGN
'\u2550' # 0xA0 -> BOX DRAWINGS DOUBLE HORIZONTAL
'\u2551' # 0xA1 -> BOX DRAWINGS DOUBLE VERTICAL
'\u2552' # 0xA2 -> BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
'\u0451' # 0xA3 -> CYRILLIC SMALL LETTER IO
'\u2553' # 0xA4 -> BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
'\u2554' # 0xA5 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
'\u2555' # 0xA6 -> BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
'\u2556' # 0xA7 -> BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
'\u2557' # 0xA8 -> BOX DRAWINGS DOUBLE DOWN AND LEFT
'\u2558' # 0xA9 -> BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
'\u2559' # 0xAA -> BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
'\u255a' # 0xAB -> BOX DRAWINGS DOUBLE UP AND RIGHT
'\u255b' # 0xAC -> BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
'\u255c' # 0xAD -> BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
'\u255d' # 0xAE -> BOX DRAWINGS DOUBLE UP AND LEFT
'\u255e' # 0xAF -> BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
'\u255f' # 0xB0 -> BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
'\u2560' # 0xB1 -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
'\u2561' # 0xB2 -> BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
'\u0401' # 0xB3 -> CYRILLIC CAPITAL LETTER IO
'\u2562' # 0xB4 -> BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
'\u2563' # 0xB5 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
'\u2564' # 0xB6 -> BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
'\u2565' # 0xB7 -> BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
'\u2566' # 0xB8 -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
'\u2567' # 0xB9 -> BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
'\u2568' # 0xBA -> BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
'\u2569' # 0xBB -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
'\u256a' # 0xBC -> BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
'\u256b' # 0xBD -> BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
'\u256c' # 0xBE -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
'\xa9' # 0xBF -> COPYRIGHT SIGN
'\u044e' # 0xC0 -> CYRILLIC SMALL LETTER YU
'\u0430' # 0xC1 -> CYRILLIC SMALL LETTER A
'\u0431' # 0xC2 -> CYRILLIC SMALL LETTER BE
'\u0446' # 0xC3 -> CYRILLIC SMALL LETTER TSE
'\u0434' # 0xC4 -> CYRILLIC SMALL LETTER DE
'\u0435' # 0xC5 -> CYRILLIC SMALL LETTER IE
'\u0444' # 0xC6 -> CYRILLIC SMALL LETTER EF
'\u0433' # 0xC7 -> CYRILLIC SMALL LETTER GHE
'\u0445' # 0xC8 -> CYRILLIC SMALL LETTER HA
'\u0438' # 0xC9 -> CYRILLIC SMALL LETTER I
'\u0439' # 0xCA -> CYRILLIC SMALL LETTER SHORT I
'\u043a' # 0xCB -> CYRILLIC SMALL LETTER KA
'\u043b' # 0xCC -> CYRILLIC SMALL LETTER EL
'\u043c' # 0xCD -> CYRILLIC SMALL LETTER EM
'\u043d' # 0xCE -> CYRILLIC SMALL LETTER EN
'\u043e' # 0xCF -> CYRILLIC SMALL LETTER O
'\u043f' # 0xD0 -> CYRILLIC SMALL LETTER PE
'\u044f' # 0xD1 -> CYRILLIC SMALL LETTER YA
'\u0440' # 0xD2 -> CYRILLIC SMALL LETTER ER
'\u0441' # 0xD3 -> CYRILLIC SMALL LETTER ES
'\u0442' # 0xD4 -> CYRILLIC SMALL LETTER TE
'\u0443' # 0xD5 -> CYRILLIC SMALL LETTER U
'\u0436' # 0xD6 -> CYRILLIC SMALL LETTER ZHE
'\u0432' # 0xD7 -> CYRILLIC SMALL LETTER VE
'\u044c' # 0xD8 -> CYRILLIC SMALL LETTER SOFT SIGN
'\u044b' # 0xD9 -> CYRILLIC SMALL LETTER YERU
'\u0437' # 0xDA -> CYRILLIC SMALL LETTER ZE
'\u0448' # 0xDB -> CYRILLIC SMALL LETTER SHA
'\u044d' # 0xDC -> CYRILLIC SMALL LETTER E
'\u0449' # 0xDD -> CYRILLIC SMALL LETTER SHCHA
'\u0447' # 0xDE -> CYRILLIC SMALL LETTER CHE
'\u044a' # 0xDF -> CYRILLIC SMALL LETTER HARD SIGN
'\u042e' # 0xE0 -> CYRILLIC CAPITAL LETTER YU
'\u0410' # 0xE1 -> CYRILLIC CAPITAL LETTER A
'\u0411' # 0xE2 -> CYRILLIC CAPITAL LETTER BE
'\u0426' # 0xE3 -> CYRILLIC CAPITAL LETTER TSE
'\u0414' # 0xE4 -> CYRILLIC CAPITAL LETTER DE
'\u0415' # 0xE5 -> CYRILLIC CAPITAL LETTER IE
'\u0424' # 0xE6 -> CYRILLIC CAPITAL LETTER EF
'\u0413' # 0xE7 -> CYRILLIC CAPITAL LETTER GHE
'\u0425' # 0xE8 -> CYRILLIC CAPITAL LETTER HA
'\u0418' # 0xE9 -> CYRILLIC CAPITAL LETTER I
'\u0419' # 0xEA -> CYRILLIC CAPITAL LETTER SHORT I
'\u041a' # 0xEB -> CYRILLIC CAPITAL LETTER KA
'\u041b' # 0xEC -> CYRILLIC CAPITAL LETTER EL
'\u041c' # 0xED -> CYRILLIC CAPITAL LETTER EM
'\u041d' # 0xEE -> CYRILLIC CAPITAL LETTER EN
'\u041e' # 0xEF -> CYRILLIC CAPITAL LETTER O
'\u041f' # 0xF0 -> CYRILLIC CAPITAL LETTER PE
'\u042f' # 0xF1 -> CYRILLIC CAPITAL LETTER YA
'\u0420' # 0xF2 -> CYRILLIC CAPITAL LETTER ER
'\u0421' # 0xF3 -> CYRILLIC CAPITAL LETTER ES
'\u0422' # 0xF4 -> CYRILLIC CAPITAL LETTER TE
'\u0423' # 0xF5 -> CYRILLIC CAPITAL LETTER U
'\u0416' # 0xF6 -> CYRILLIC CAPITAL LETTER ZHE
'\u0412' # 0xF7 -> CYRILLIC CAPITAL LETTER VE
'\u042c' # 0xF8 -> CYRILLIC CAPITAL LETTER SOFT SIGN
'\u042b' # 0xF9 -> CYRILLIC CAPITAL LETTER YERU
'\u0417' # 0xFA -> CYRILLIC CAPITAL LETTER ZE
'\u0428' # 0xFB -> CYRILLIC CAPITAL LETTER SHA
'\u042d' # 0xFC -> CYRILLIC CAPITAL LETTER E
'\u0429' # 0xFD -> CYRILLIC CAPITAL LETTER SHCHA
'\u0427' # 0xFE -> CYRILLIC CAPITAL LETTER CHE
'\u042a' # 0xFF -> CYRILLIC CAPITAL LETTER HARD SIGN
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
cosanlab/emote
|
refs/heads/master
|
src/gui/image.py
|
1
|
import os
import cv2
from util.constants import FRAME_WIDTH, FRAME_HEIGHT
from face_process import detect_and_align_face
def process_image(expresser, path, grayscale=False):
""" Uses a detector model, expresser model, and prints the results of the expresser model for the given image path
:param expresser: Model for recognizing expressions
:type expresser: FEExpresser
:param path: Path to image to be processed
:type path: str
:param out_file: Path to write results of detector to
:type out_file: str
"""
if path is None or not os.path.isfile(path):
print("Unable to find file")
return False
image = cv2.imread(path, cv2.IMREAD_COLOR)
#Find the face in the provided image
face = detect_and_align_face(image, expresser.get_image_size(), grayscale)
#If it doesn't exist, exit
if face is not None:
#Run recognition on normalized face
data = expresser.predict(face)
print(data)
write_image_to_file(face, out_file)
def write_image_to_file(img, path):
""" Writes image representation to some path
:param img: Representation of image to write
:type img: OpenCV Mat or numpy ndarray
:param path: Path to write image to
:type path: str
"""
if path is not None:
cv2.imwrite(path, img)
else:
print("Output file path is invalid")
|
alrusdi/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.2.5/django/contrib/gis/utils/wkt.py
|
419
|
"""
Utilities for manipulating Geometry WKT.
"""
def precision_wkt(geom, prec):
"""
Returns WKT text of the geometry according to the given precision (an
integer or a string). If the precision is an integer, then the decimal
places of coordinates WKT will be truncated to that number:
>>> pnt = Point(5, 23)
>>> pnt.wkt
'POINT (5.0000000000000000 23.0000000000000000)'
>>> precision(geom, 1)
'POINT (5.0 23.0)'
If the precision is a string, it must be valid Python format string
(e.g., '%20.7f') -- thus, you should know what you're doing.
"""
if isinstance(prec, int):
num_fmt = '%%.%df' % prec
elif isinstance(prec, basestring):
num_fmt = prec
else:
raise TypeError
# TODO: Support 3D geometries.
coord_fmt = ' '.join([num_fmt, num_fmt])
def formatted_coords(coords):
return ','.join([coord_fmt % c[:2] for c in coords])
def formatted_poly(poly):
return ','.join(['(%s)' % formatted_coords(r) for r in poly])
def formatted_geom(g):
gtype = str(g.geom_type).upper()
yield '%s(' % gtype
if gtype == 'POINT':
yield formatted_coords((g.coords,))
elif gtype in ('LINESTRING', 'LINEARRING'):
yield formatted_coords(g.coords)
elif gtype in ('POLYGON', 'MULTILINESTRING'):
yield formatted_poly(g)
elif gtype == 'MULTIPOINT':
yield formatted_coords(g.coords)
elif gtype == 'MULTIPOLYGON':
yield ','.join(['(%s)' % formatted_poly(p) for p in g])
elif gtype == 'GEOMETRYCOLLECTION':
yield ','.join([''.join([wkt for wkt in formatted_geom(child)]) for child in g])
else:
raise TypeError
yield ')'
return ''.join([wkt for wkt in formatted_geom(geom)])
|
JDat/RocketTestStand
|
refs/heads/dev
|
web_framework/modules/__init__.py
|
12133432
| |
devilry/trix2
|
refs/heads/master
|
trix/trix_student/middleware/__init__.py
|
12133432
| |
stuntman723/rap-analyzer
|
refs/heads/master
|
rap_analyzer/lib/python2.7/site-packages/django/http/multipartparser.py
|
105
|
"""
Multi-part parsing for file uploads.
Exposes one class, ``MultiPartParser``, which feeds chunks of uploaded data to
file upload handlers for processing.
"""
from __future__ import unicode_literals
import base64
import binascii
import cgi
import sys
from django.conf import settings
from django.core.exceptions import SuspiciousMultipartForm
from django.core.files.uploadhandler import (
SkipFile, StopFutureHandlers, StopUpload,
)
from django.utils import six
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_text
from django.utils.six.moves.urllib.parse import unquote
from django.utils.text import unescape_entities
__all__ = ('MultiPartParser', 'MultiPartParserError', 'InputStreamExhausted')
class MultiPartParserError(Exception):
pass
class InputStreamExhausted(Exception):
"""
No more reads are allowed from this device.
"""
pass
RAW = "raw"
FILE = "file"
FIELD = "field"
_BASE64_DECODE_ERROR = TypeError if six.PY2 else binascii.Error
class MultiPartParser(object):
"""
A rfc2388 multipart/form-data parser.
``MultiValueDict.parse()`` reads the input stream in ``chunk_size`` chunks
and returns a tuple of ``(MultiValueDict(POST), MultiValueDict(FILES))``.
"""
def __init__(self, META, input_data, upload_handlers, encoding=None):
"""
Initialize the MultiPartParser object.
:META:
The standard ``META`` dictionary in Django request objects.
:input_data:
The raw post data, as a file-like object.
:upload_handlers:
A list of UploadHandler instances that perform operations on the uploaded
data.
:encoding:
The encoding with which to treat the incoming data.
"""
#
# Content-Type should contain multipart and the boundary information.
#
content_type = META.get('HTTP_CONTENT_TYPE', META.get('CONTENT_TYPE', ''))
if not content_type.startswith('multipart/'):
raise MultiPartParserError('Invalid Content-Type: %s' % content_type)
# Parse the header to get the boundary to split the parts.
ctypes, opts = parse_header(content_type.encode('ascii'))
boundary = opts.get('boundary')
if not boundary or not cgi.valid_boundary(boundary):
raise MultiPartParserError('Invalid boundary in multipart: %s' % boundary)
# Content-Length should contain the length of the body we are about
# to receive.
try:
content_length = int(META.get('HTTP_CONTENT_LENGTH', META.get('CONTENT_LENGTH', 0)))
except (ValueError, TypeError):
content_length = 0
if content_length < 0:
# This means we shouldn't continue...raise an error.
raise MultiPartParserError("Invalid content length: %r" % content_length)
if isinstance(boundary, six.text_type):
boundary = boundary.encode('ascii')
self._boundary = boundary
self._input_data = input_data
# For compatibility with low-level network APIs (with 32-bit integers),
# the chunk size should be < 2^31, but still divisible by 4.
possible_sizes = [x.chunk_size for x in upload_handlers if x.chunk_size]
self._chunk_size = min([2 ** 31 - 4] + possible_sizes)
self._meta = META
self._encoding = encoding or settings.DEFAULT_CHARSET
self._content_length = content_length
self._upload_handlers = upload_handlers
def parse(self):
"""
Parse the POST data and break it into a FILES MultiValueDict and a POST
MultiValueDict.
Returns a tuple containing the POST and FILES dictionary, respectively.
"""
# We have to import QueryDict down here to avoid a circular import.
from django.http import QueryDict
encoding = self._encoding
handlers = self._upload_handlers
# HTTP spec says that Content-Length >= 0 is valid
# handling content-length == 0 before continuing
if self._content_length == 0:
return QueryDict('', encoding=self._encoding), MultiValueDict()
# See if any of the handlers take care of the parsing.
# This allows overriding everything if need be.
for handler in handlers:
result = handler.handle_raw_input(self._input_data,
self._meta,
self._content_length,
self._boundary,
encoding)
# Check to see if it was handled
if result is not None:
return result[0], result[1]
# Create the data structures to be used later.
self._post = QueryDict('', mutable=True)
self._files = MultiValueDict()
# Instantiate the parser and stream:
stream = LazyStream(ChunkIter(self._input_data, self._chunk_size))
# Whether or not to signal a file-completion at the beginning of the loop.
old_field_name = None
counters = [0] * len(handlers)
try:
for item_type, meta_data, field_stream in Parser(stream, self._boundary):
if old_field_name:
# We run this at the beginning of the next loop
# since we cannot be sure a file is complete until
# we hit the next boundary/part of the multipart content.
self.handle_file_complete(old_field_name, counters)
old_field_name = None
try:
disposition = meta_data['content-disposition'][1]
field_name = disposition['name'].strip()
except (KeyError, IndexError, AttributeError):
continue
transfer_encoding = meta_data.get('content-transfer-encoding')
if transfer_encoding is not None:
transfer_encoding = transfer_encoding[0].strip()
field_name = force_text(field_name, encoding, errors='replace')
if item_type == FIELD:
# This is a post field, we can just set it in the post
if transfer_encoding == 'base64':
raw_data = field_stream.read()
try:
data = base64.b64decode(raw_data)
except _BASE64_DECODE_ERROR:
data = raw_data
else:
data = field_stream.read()
self._post.appendlist(field_name,
force_text(data, encoding, errors='replace'))
elif item_type == FILE:
# This is a file, use the handler...
file_name = disposition.get('filename')
if not file_name:
continue
file_name = force_text(file_name, encoding, errors='replace')
file_name = self.IE_sanitize(unescape_entities(file_name))
content_type, content_type_extra = meta_data.get('content-type', ('', {}))
content_type = content_type.strip()
charset = content_type_extra.get('charset')
try:
content_length = int(meta_data.get('content-length')[0])
except (IndexError, TypeError, ValueError):
content_length = None
counters = [0] * len(handlers)
try:
for handler in handlers:
try:
handler.new_file(field_name, file_name,
content_type, content_length,
charset, content_type_extra)
except StopFutureHandlers:
break
for chunk in field_stream:
if transfer_encoding == 'base64':
# We only special-case base64 transfer encoding
# We should always decode base64 chunks by multiple of 4,
# ignoring whitespace.
stripped_chunk = b"".join(chunk.split())
remaining = len(stripped_chunk) % 4
while remaining != 0:
over_chunk = field_stream.read(4 - remaining)
stripped_chunk += b"".join(over_chunk.split())
remaining = len(stripped_chunk) % 4
try:
chunk = base64.b64decode(stripped_chunk)
except Exception as e:
# Since this is only a chunk, any error is an unfixable error.
msg = "Could not decode base64 data: %r" % e
six.reraise(MultiPartParserError, MultiPartParserError(msg), sys.exc_info()[2])
for i, handler in enumerate(handlers):
chunk_length = len(chunk)
chunk = handler.receive_data_chunk(chunk,
counters[i])
counters[i] += chunk_length
if chunk is None:
# If the chunk received by the handler is None, then don't continue.
break
except SkipFile:
self._close_files()
# Just use up the rest of this file...
exhaust(field_stream)
else:
# Handle file upload completions on next iteration.
old_field_name = field_name
else:
# If this is neither a FIELD or a FILE, just exhaust the stream.
exhaust(stream)
except StopUpload as e:
self._close_files()
if not e.connection_reset:
exhaust(self._input_data)
else:
# Make sure that the request data is all fed
exhaust(self._input_data)
# Signal that the upload has completed.
for handler in handlers:
retval = handler.upload_complete()
if retval:
break
return self._post, self._files
def handle_file_complete(self, old_field_name, counters):
"""
Handle all the signaling that takes place when a file is complete.
"""
for i, handler in enumerate(self._upload_handlers):
file_obj = handler.file_complete(counters[i])
if file_obj:
# If it returns a file object, then set the files dict.
self._files.appendlist(
force_text(old_field_name, self._encoding, errors='replace'),
file_obj)
break
def IE_sanitize(self, filename):
"""Cleanup filename from Internet Explorer full paths."""
return filename and filename[filename.rfind("\\") + 1:].strip()
def _close_files(self):
# Free up all file handles.
# FIXME: this currently assumes that upload handlers store the file as 'file'
# We should document that... (Maybe add handler.free_file to complement new_file)
for handler in self._upload_handlers:
if hasattr(handler, 'file'):
handler.file.close()
class LazyStream(six.Iterator):
"""
The LazyStream wrapper allows one to get and "unget" bytes from a stream.
Given a producer object (an iterator that yields bytestrings), the
LazyStream object will support iteration, reading, and keeping a "look-back"
variable in case you need to "unget" some bytes.
"""
def __init__(self, producer, length=None):
"""
Every LazyStream must have a producer when instantiated.
A producer is an iterable that returns a string each time it
is called.
"""
self._producer = producer
self._empty = False
self._leftover = b''
self.length = length
self.position = 0
self._remaining = length
self._unget_history = []
def tell(self):
return self.position
def read(self, size=None):
def parts():
remaining = self._remaining if size is None else size
# do the whole thing in one shot if no limit was provided.
if remaining is None:
yield b''.join(self)
return
# otherwise do some bookkeeping to return exactly enough
# of the stream and stashing any extra content we get from
# the producer
while remaining != 0:
assert remaining > 0, 'remaining bytes to read should never go negative'
chunk = next(self)
emitting = chunk[:remaining]
self.unget(chunk[remaining:])
remaining -= len(emitting)
yield emitting
out = b''.join(parts())
return out
def __next__(self):
"""
Used when the exact number of bytes to read is unimportant.
This procedure just returns whatever is chunk is conveniently returned
from the iterator instead. Useful to avoid unnecessary bookkeeping if
performance is an issue.
"""
if self._leftover:
output = self._leftover
self._leftover = b''
else:
output = next(self._producer)
self._unget_history = []
self.position += len(output)
return output
def close(self):
"""
Used to invalidate/disable this lazy stream.
Replaces the producer with an empty list. Any leftover bytes that have
already been read will still be reported upon read() and/or next().
"""
self._producer = []
def __iter__(self):
return self
def unget(self, bytes):
"""
Places bytes back onto the front of the lazy stream.
Future calls to read() will return those bytes first. The
stream position and thus tell() will be rewound.
"""
if not bytes:
return
self._update_unget_history(len(bytes))
self.position -= len(bytes)
self._leftover = b''.join([bytes, self._leftover])
def _update_unget_history(self, num_bytes):
"""
Updates the unget history as a sanity check to see if we've pushed
back the same number of bytes in one chunk. If we keep ungetting the
same number of bytes many times (here, 50), we're mostly likely in an
infinite loop of some sort. This is usually caused by a
maliciously-malformed MIME request.
"""
self._unget_history = [num_bytes] + self._unget_history[:49]
number_equal = len([current_number for current_number in self._unget_history
if current_number == num_bytes])
if number_equal > 40:
raise SuspiciousMultipartForm(
"The multipart parser got stuck, which shouldn't happen with"
" normal uploaded files. Check for malicious upload activity;"
" if there is none, report this to the Django developers."
)
class ChunkIter(six.Iterator):
"""
An iterable that will yield chunks of data. Given a file-like object as the
constructor, this object will yield chunks of read operations from that
object.
"""
def __init__(self, flo, chunk_size=64 * 1024):
self.flo = flo
self.chunk_size = chunk_size
def __next__(self):
try:
data = self.flo.read(self.chunk_size)
except InputStreamExhausted:
raise StopIteration()
if data:
return data
else:
raise StopIteration()
def __iter__(self):
return self
class InterBoundaryIter(six.Iterator):
"""
A Producer that will iterate over boundaries.
"""
def __init__(self, stream, boundary):
self._stream = stream
self._boundary = boundary
def __iter__(self):
return self
def __next__(self):
try:
return LazyStream(BoundaryIter(self._stream, self._boundary))
except InputStreamExhausted:
raise StopIteration()
class BoundaryIter(six.Iterator):
"""
A Producer that is sensitive to boundaries.
Will happily yield bytes until a boundary is found. Will yield the bytes
before the boundary, throw away the boundary bytes themselves, and push the
post-boundary bytes back on the stream.
The future calls to next() after locating the boundary will raise a
StopIteration exception.
"""
def __init__(self, stream, boundary):
self._stream = stream
self._boundary = boundary
self._done = False
# rollback an additional six bytes because the format is like
# this: CRLF<boundary>[--CRLF]
self._rollback = len(boundary) + 6
# Try to use mx fast string search if available. Otherwise
# use Python find. Wrap the latter for consistency.
unused_char = self._stream.read(1)
if not unused_char:
raise InputStreamExhausted()
self._stream.unget(unused_char)
def __iter__(self):
return self
def __next__(self):
if self._done:
raise StopIteration()
stream = self._stream
rollback = self._rollback
bytes_read = 0
chunks = []
for bytes in stream:
bytes_read += len(bytes)
chunks.append(bytes)
if bytes_read > rollback:
break
if not bytes:
break
else:
self._done = True
if not chunks:
raise StopIteration()
chunk = b''.join(chunks)
boundary = self._find_boundary(chunk, len(chunk) < self._rollback)
if boundary:
end, next = boundary
stream.unget(chunk[next:])
self._done = True
return chunk[:end]
else:
# make sure we don't treat a partial boundary (and
# its separators) as data
if not chunk[:-rollback]: # and len(chunk) >= (len(self._boundary) + 6):
# There's nothing left, we should just return and mark as done.
self._done = True
return chunk
else:
stream.unget(chunk[-rollback:])
return chunk[:-rollback]
def _find_boundary(self, data, eof=False):
"""
Finds a multipart boundary in data.
Should no boundary exist in the data None is returned instead. Otherwise
a tuple containing the indices of the following are returned:
* the end of current encapsulation
* the start of the next encapsulation
"""
index = data.find(self._boundary)
if index < 0:
return None
else:
end = index
next = index + len(self._boundary)
# backup over CRLF
last = max(0, end - 1)
if data[last:last + 1] == b'\n':
end -= 1
last = max(0, end - 1)
if data[last:last + 1] == b'\r':
end -= 1
return end, next
def exhaust(stream_or_iterable):
"""
Completely exhausts an iterator or stream.
Raise a MultiPartParserError if the argument is not a stream or an iterable.
"""
iterator = None
try:
iterator = iter(stream_or_iterable)
except TypeError:
iterator = ChunkIter(stream_or_iterable, 16384)
if iterator is None:
raise MultiPartParserError('multipartparser.exhaust() was passed a non-iterable or stream parameter')
for __ in iterator:
pass
def parse_boundary_stream(stream, max_header_size):
"""
Parses one and exactly one stream that encapsulates a boundary.
"""
# Stream at beginning of header, look for end of header
# and parse it if found. The header must fit within one
# chunk.
chunk = stream.read(max_header_size)
# 'find' returns the top of these four bytes, so we'll
# need to munch them later to prevent them from polluting
# the payload.
header_end = chunk.find(b'\r\n\r\n')
def _parse_header(line):
main_value_pair, params = parse_header(line)
try:
name, value = main_value_pair.split(':', 1)
except ValueError:
raise ValueError("Invalid header: %r" % line)
return name, (value, params)
if header_end == -1:
# we find no header, so we just mark this fact and pass on
# the stream verbatim
stream.unget(chunk)
return (RAW, {}, stream)
header = chunk[:header_end]
# here we place any excess chunk back onto the stream, as
# well as throwing away the CRLFCRLF bytes from above.
stream.unget(chunk[header_end + 4:])
TYPE = RAW
outdict = {}
# Eliminate blank lines
for line in header.split(b'\r\n'):
# This terminology ("main value" and "dictionary of
# parameters") is from the Python docs.
try:
name, (value, params) = _parse_header(line)
except ValueError:
continue
if name == 'content-disposition':
TYPE = FIELD
if params.get('filename'):
TYPE = FILE
outdict[name] = value, params
if TYPE == RAW:
stream.unget(chunk)
return (TYPE, outdict, stream)
class Parser(object):
def __init__(self, stream, boundary):
self._stream = stream
self._separator = b'--' + boundary
def __iter__(self):
boundarystream = InterBoundaryIter(self._stream, self._separator)
for sub_stream in boundarystream:
# Iterate over each part
yield parse_boundary_stream(sub_stream, 1024)
def parse_header(line):
""" Parse the header into a key-value.
Input (line): bytes, output: unicode for key/name, bytes for value which
will be decoded later
"""
plist = _parse_header_params(b';' + line)
key = plist.pop(0).lower().decode('ascii')
pdict = {}
for p in plist:
i = p.find(b'=')
if i >= 0:
has_encoding = False
name = p[:i].strip().lower().decode('ascii')
if name.endswith('*'):
# Lang/encoding embedded in the value (like "filename*=UTF-8''file.ext")
# http://tools.ietf.org/html/rfc2231#section-4
name = name[:-1]
if p.count(b"'") == 2:
has_encoding = True
value = p[i + 1:].strip()
if has_encoding:
encoding, lang, value = value.split(b"'")
if six.PY3:
value = unquote(value.decode(), encoding=encoding.decode())
else:
value = unquote(value).decode(encoding)
if len(value) >= 2 and value[:1] == value[-1:] == b'"':
value = value[1:-1]
value = value.replace(b'\\\\', b'\\').replace(b'\\"', b'"')
pdict[name] = value
return key, pdict
def _parse_header_params(s):
plist = []
while s[:1] == b';':
s = s[1:]
end = s.find(b';')
while end > 0 and s.count(b'"', 0, end) % 2:
end = s.find(b';', end + 1)
if end < 0:
end = len(s)
f = s[:end]
plist.append(f.strip())
s = s[end:]
return plist
|
jamesmarva/ripozo
|
refs/heads/master
|
ripozo/resources/request.py
|
2
|
"""
The RequestContainer.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from ripozo.resources.constants import input_categories
class RequestContainer(object):
"""
An object that represents an incoming request.
This is done primarily to keep the data in one
place and to make a generically accessible object.
It should be assumed that no parameter is required
and no property is guaranteed.
"""
def __init__(self, url_params=None, query_args=None, body_args=None, headers=None, method=None):
"""
Create a new request container. Typically this is constructed
in the dispatcher.
:param dict url_params: The url parameters that are a part of the
request. These are the variable parts of the url. For example,
a request with /resource/<id> would have the id as a url_param
:param dict query_args: The query args are were in the request. They
should be a adictionary
:param dict body_args: The arguments in the body.
:param dict headers: A dictionary of the headers and their values
:param unicode method: The method that was used to make
the request.
"""
self._url_params = url_params or {}
self._query_args = query_args or {}
self._body_args = body_args or {}
self._headers = headers or {}
self.method = method
@property
def url_params(self):
"""
:return: A copy of the url_params dictionary
:rtype: dict
"""
return self._url_params.copy()
@url_params.setter
def url_params(self, value):
self._url_params = value
@property
def query_args(self):
"""
:return: A copy of the query_args
:rtype: dict
"""
return self._query_args.copy()
@query_args.setter
def query_args(self, value):
self._query_args = value
@property
def body_args(self):
"""
:return: a copy of the body_args
:rtype: dict
"""
return self._body_args.copy()
@body_args.setter
def body_args(self, value):
self._body_args = value
@property
def headers(self):
"""
:return: A copy of the headers dict
:rtype: dict
"""
return self._headers.copy()
@headers.setter
def headers(self, value):
self._headers = value
@property
def content_type(self):
"""
:return: The Content-Type header or None if it is not available in
the headers property on this request object.
:rtype: unicode
"""
return self._headers.get('Content-Type', None)
@content_type.setter
def content_type(self, value):
self._headers['Content-Type'] = value
def get(self, name, default=None, location=None):
"""
Attempts to retrieve the parameter with the
name in the url_params, query_args and then
body_args in that order. Returns the default
if not found.
:param unicode name: The name of the parameter
to retrieve. From the request
:return: The requested attribute if found
otherwise the default if specified.
:rtype: object
:raises: KeyError
"""
if not location and name in self._url_params or location == input_categories.URL_PARAMS:
return self.url_params.get(name)
elif not location and name in self._query_args or location == input_categories.QUERY_ARGS:
return self._query_args.get(name)
elif not location and name in self._body_args or location == input_categories.BODY_ARGS:
return self._body_args.get(name, default)
return default
def set(self, name, value, location=None):
"""
Attempts to set the field with the specified name.
in the location specified. Searches through all
the fields if location is not specified. Raises
a KeyError if no location is set and the name is
not found in any of the locations.
:param unicode name: The name of the field
:param unicode location: The location of the
field to get. I.e. QUERY_ARGS.
:return: The field that was requestedor None.
:rtype: object
"""
if not location and name in self._url_params or location == input_categories.URL_PARAMS:
self._url_params[name] = value
return
elif not location and name in self._query_args or location == input_categories.QUERY_ARGS:
self._query_args[name] = value
return
elif not location and name in self._body_args or location == input_categories.BODY_ARGS:
self._body_args[name] = value
return
raise KeyError('Location was not specified and the parameter {0} '
'could not be found on the request object'.format(name))
def __contains__(self, item):
"""
Checks if the item is available in any of
the url_params, body_args, or query_args
:param unicode item: The key to look for in the
various parameter dictionaries.
:return: Whether the object was actually found.
:rtype: bool
"""
if item in self._url_params or item in self._body_args or item in self._query_args:
return True
return False
|
rdio/translate-toolkit
|
refs/heads/master
|
lang/test_poedit.py
|
3
|
from translate.lang.poedit import isocode
def test_isocode():
"""Test the isocode function"""
# Standard lookup
assert isocode("French") == "fr"
# Dialect lookups: Portuguese
assert isocode("Portuguese") == "pt" # No country we default to 'None'
assert isocode("Portuguese", "BRAZIL") == "pt_BR" # Country with a valid dialect
assert isocode("Portuguese", "PORTUGAL") == "pt"
assert isocode("Portuguese", "MOZAMBIQUE") == "pt" # Country is not a dialect so use default
# Dialect lookups: English
assert isocode("English") == "en"
assert isocode("English", "UNITED KINGDOM") == "en_GB"
assert isocode("English", "UNITED STATES") == "en"
|
nelsonsar/ansible
|
refs/heads/devel
|
lib/ansible/executor/task_queue_manager.py
|
52
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import multiprocessing
import os
import socket
import sys
import tempfile
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.executor.play_iterator import PlayIterator
from ansible.executor.process.worker import WorkerProcess
from ansible.executor.process.result import ResultProcess
from ansible.executor.stats import AggregateStats
from ansible.playbook.play_context import PlayContext
from ansible.plugins import callback_loader, strategy_loader, module_loader
from ansible.template import Templar
__all__ = ['TaskQueueManager']
class TaskQueueManager:
'''
This class handles the multiprocessing requirements of Ansible by
creating a pool of worker forks, a result handler fork, and a
manager object with shared datastructures/queues for coordinating
work between all processes.
The queue manager is responsible for loading the play strategy plugin,
which dispatches the Play's tasks to hosts.
'''
def __init__(self, inventory, variable_manager, loader, display, options, passwords, stdout_callback=None):
self._inventory = inventory
self._variable_manager = variable_manager
self._loader = loader
self._display = display
self._options = options
self._stats = AggregateStats()
self.passwords = passwords
self._stdout_callback = stdout_callback
self._callbacks_loaded = False
self._callback_plugins = []
# make sure the module path (if specified) is parsed and
# added to the module_loader object
if options.module_path is not None:
for path in options.module_path.split(os.pathsep):
module_loader.add_directory(path)
# a special flag to help us exit cleanly
self._terminated = False
# this dictionary is used to keep track of notified handlers
self._notified_handlers = dict()
# dictionaries to keep track of failed/unreachable hosts
self._failed_hosts = dict()
self._unreachable_hosts = dict()
self._final_q = multiprocessing.Queue()
# create the pool of worker threads, based on the number of forks specified
try:
fileno = sys.stdin.fileno()
except ValueError:
fileno = None
# A temporary file (opened pre-fork) used by connection
# plugins for inter-process locking.
self._connection_lockfile = tempfile.TemporaryFile()
self._workers = []
for i in range(self._options.forks):
main_q = multiprocessing.Queue()
rslt_q = multiprocessing.Queue()
prc = WorkerProcess(self, main_q, rslt_q, loader)
prc.start()
self._workers.append((prc, main_q, rslt_q))
self._result_prc = ResultProcess(self._final_q, self._workers)
self._result_prc.start()
def _initialize_notified_handlers(self, handlers):
'''
Clears and initializes the shared notified handlers dict with entries
for each handler in the play, which is an empty array that will contain
inventory hostnames for those hosts triggering the handler.
'''
# Zero the dictionary first by removing any entries there.
# Proxied dicts don't support iteritems, so we have to use keys()
for key in self._notified_handlers.keys():
del self._notified_handlers[key]
# FIXME: there is a block compile helper for this...
handler_list = []
for handler_block in handlers:
for handler in handler_block.block:
handler_list.append(handler)
# then initialize it with the handler names from the handler list
for handler in handler_list:
self._notified_handlers[handler.get_name()] = []
def load_callbacks(self):
'''
Loads all available callbacks, with the exception of those which
utilize the CALLBACK_TYPE option. When CALLBACK_TYPE is set to 'stdout',
only one such callback plugin will be loaded.
'''
if self._callbacks_loaded:
return
stdout_callback_loaded = False
if self._stdout_callback is None:
self._stdout_callback = C.DEFAULT_STDOUT_CALLBACK
if self._stdout_callback not in callback_loader:
raise AnsibleError("Invalid callback for stdout specified: %s" % self._stdout_callback)
for callback_plugin in callback_loader.all(class_only=True):
if hasattr(callback_plugin, 'CALLBACK_VERSION') and callback_plugin.CALLBACK_VERSION >= 2.0:
# we only allow one callback of type 'stdout' to be loaded, so check
# the name of the current plugin and type to see if we need to skip
# loading this callback plugin
callback_type = getattr(callback_plugin, 'CALLBACK_TYPE', None)
(callback_name, _) = os.path.splitext(os.path.basename(callback_plugin._original_path))
if callback_type == 'stdout':
if callback_name != self._stdout_callback or stdout_callback_loaded:
continue
stdout_callback_loaded = True
elif C.DEFAULT_CALLBACK_WHITELIST is None or callback_name not in C.DEFAULT_CALLBACK_WHITELIST:
continue
self._callback_plugins.append(callback_plugin(self._display))
else:
self._callback_plugins.append(callback_plugin())
self._callbacks_loaded = True
def run(self, play):
'''
Iterates over the roles/tasks in a play, using the given (or default)
strategy for queueing tasks. The default is the linear strategy, which
operates like classic Ansible by keeping all hosts in lock-step with
a given task (meaning no hosts move on to the next task until all hosts
are done with the current task).
'''
if not self._callbacks_loaded:
self.load_callbacks()
all_vars = self._variable_manager.get_vars(loader=self._loader, play=play)
templar = Templar(loader=self._loader, variables=all_vars)
new_play = play.copy()
new_play.post_validate(templar)
play_context = PlayContext(new_play, self._options, self.passwords, self._connection_lockfile.fileno())
for callback_plugin in self._callback_plugins:
if hasattr(callback_plugin, 'set_play_context'):
callback_plugin.set_play_context(play_context)
self.send_callback('v2_playbook_on_play_start', new_play)
# initialize the shared dictionary containing the notified handlers
self._initialize_notified_handlers(new_play.handlers)
# load the specified strategy (or the default linear one)
strategy = strategy_loader.get(new_play.strategy, self)
if strategy is None:
raise AnsibleError("Invalid play strategy specified: %s" % new_play.strategy, obj=play._ds)
# build the iterator
iterator = PlayIterator(
inventory=self._inventory,
play=new_play,
play_context=play_context,
variable_manager=self._variable_manager,
all_vars=all_vars,
)
# and run the play using the strategy
return strategy.run(iterator, play_context)
def cleanup(self):
self._display.debug("RUNNING CLEANUP")
self.terminate()
self._final_q.close()
self._result_prc.terminate()
for (worker_prc, main_q, rslt_q) in self._workers:
rslt_q.close()
main_q.close()
worker_prc.terminate()
def clear_failed_hosts(self):
self._failed_hosts = dict()
def get_inventory(self):
return self._inventory
def get_variable_manager(self):
return self._variable_manager
def get_loader(self):
return self._loader
def get_notified_handlers(self):
return self._notified_handlers
def get_workers(self):
return self._workers[:]
def terminate(self):
self._terminated = True
def send_callback(self, method_name, *args, **kwargs):
for callback_plugin in self._callback_plugins:
# a plugin that set self.disabled to True will not be called
# see osx_say.py example for such a plugin
if getattr(callback_plugin, 'disabled', False):
continue
methods = [
getattr(callback_plugin, method_name, None),
getattr(callback_plugin, 'v2_on_any', None)
]
for method in methods:
if method is not None:
try:
method(*args, **kwargs)
except Exception as e:
self._display.warning('Error when using %s: %s' % (method, str(e)))
|
javachengwc/hue
|
refs/heads/master
|
desktop/core/ext-py/Django-1.6.10/django/core/management/commands/runfcgi.py
|
674
|
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Runs this project as a FastCGI application. Requires flup."
args = '[various KEY=val options, use `runfcgi help` for help]'
def handle(self, *args, **options):
from django.conf import settings
from django.utils import translation
# Activate the current language, because it won't get activated later.
try:
translation.activate(settings.LANGUAGE_CODE)
except AttributeError:
pass
from django.core.servers.fastcgi import runfastcgi
runfastcgi(args)
def usage(self, subcommand):
from django.core.servers.fastcgi import FASTCGI_HELP
return FASTCGI_HELP
|
xfournet/intellij-community
|
refs/heads/master
|
python/helpers/pydev/pydev_ipython/version.py
|
142
|
# encoding: utf-8
"""
Utilities for version comparison
It is a bit ridiculous that we need these.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from distutils.version import LooseVersion
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def check_version(v, check):
"""check version string v >= check
If dev/prerelease tags result in TypeError for string-number comparison,
it is assumed that the dependency is satisfied.
Users on dev branches are responsible for keeping their own packages up to date.
"""
try:
return LooseVersion(v) >= LooseVersion(check)
except TypeError:
return True
|
johnnyliu27/openmc
|
refs/heads/develop
|
tests/regression_tests/mgxs_library_nuclides/test.py
|
4
|
import hashlib
import openmc
import openmc.mgxs
from openmc.examples import pwr_pin_cell
from tests.testing_harness import PyAPITestHarness
class MGXSTestHarness(PyAPITestHarness):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Initialize a two-group structure
energy_groups = openmc.mgxs.EnergyGroups(group_edges=[0, 0.625, 20.e6])
# Initialize MGXS Library for a few cross section types
self.mgxs_lib = openmc.mgxs.Library(self._model.geometry)
self.mgxs_lib.by_nuclide = True
# Test all MGXS types
self.mgxs_lib.mgxs_types = openmc.mgxs.MGXS_TYPES
self.mgxs_lib.energy_groups = energy_groups
self.mgxs_lib.legendre_order = 3
self.mgxs_lib.domain_type = 'material'
self.mgxs_lib.build_library()
# Add tallies
self.mgxs_lib.add_to_tallies_file(self._model.tallies, merge=False)
def _get_results(self, hash_output=True):
"""Digest info in the statepoint and return as a string."""
# Read the statepoint file.
sp = openmc.StatePoint(self._sp_name)
# Load the MGXS library from the statepoint
self.mgxs_lib.load_from_statepoint(sp)
# Build a string from Pandas Dataframe for each MGXS
outstr = ''
for domain in self.mgxs_lib.domains:
for mgxs_type in self.mgxs_lib.mgxs_types:
mgxs = self.mgxs_lib.get_mgxs(domain, mgxs_type)
df = mgxs.get_pandas_dataframe()
outstr += df.to_string() + '\n'
# Hash the results if necessary
if hash_output:
sha512 = hashlib.sha512()
sha512.update(outstr.encode('utf-8'))
outstr = sha512.hexdigest()
return outstr
def test_mgxs_library_nuclides():
model = pwr_pin_cell()
harness = MGXSTestHarness('statepoint.10.h5', model)
harness.main()
|
iansf/sky_engine
|
refs/heads/master
|
sky/tools/webkitpy/thirdparty/mod_pywebsocket/_stream_hybi.py
|
628
|
# Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This file provides classes and helper functions for parsing/building frames
of the WebSocket protocol (RFC 6455).
Specification:
http://tools.ietf.org/html/rfc6455
"""
from collections import deque
import logging
import os
import struct
import time
from mod_pywebsocket import common
from mod_pywebsocket import util
from mod_pywebsocket._stream_base import BadOperationException
from mod_pywebsocket._stream_base import ConnectionTerminatedException
from mod_pywebsocket._stream_base import InvalidFrameException
from mod_pywebsocket._stream_base import InvalidUTF8Exception
from mod_pywebsocket._stream_base import StreamBase
from mod_pywebsocket._stream_base import UnsupportedFrameException
_NOOP_MASKER = util.NoopMasker()
class Frame(object):
def __init__(self, fin=1, rsv1=0, rsv2=0, rsv3=0,
opcode=None, payload=''):
self.fin = fin
self.rsv1 = rsv1
self.rsv2 = rsv2
self.rsv3 = rsv3
self.opcode = opcode
self.payload = payload
# Helper functions made public to be used for writing unittests for WebSocket
# clients.
def create_length_header(length, mask):
"""Creates a length header.
Args:
length: Frame length. Must be less than 2^63.
mask: Mask bit. Must be boolean.
Raises:
ValueError: when bad data is given.
"""
if mask:
mask_bit = 1 << 7
else:
mask_bit = 0
if length < 0:
raise ValueError('length must be non negative integer')
elif length <= 125:
return chr(mask_bit | length)
elif length < (1 << 16):
return chr(mask_bit | 126) + struct.pack('!H', length)
elif length < (1 << 63):
return chr(mask_bit | 127) + struct.pack('!Q', length)
else:
raise ValueError('Payload is too big for one frame')
def create_header(opcode, payload_length, fin, rsv1, rsv2, rsv3, mask):
"""Creates a frame header.
Raises:
Exception: when bad data is given.
"""
if opcode < 0 or 0xf < opcode:
raise ValueError('Opcode out of range')
if payload_length < 0 or (1 << 63) <= payload_length:
raise ValueError('payload_length out of range')
if (fin | rsv1 | rsv2 | rsv3) & ~1:
raise ValueError('FIN bit and Reserved bit parameter must be 0 or 1')
header = ''
first_byte = ((fin << 7)
| (rsv1 << 6) | (rsv2 << 5) | (rsv3 << 4)
| opcode)
header += chr(first_byte)
header += create_length_header(payload_length, mask)
return header
def _build_frame(header, body, mask):
if not mask:
return header + body
masking_nonce = os.urandom(4)
masker = util.RepeatedXorMasker(masking_nonce)
return header + masking_nonce + masker.mask(body)
def _filter_and_format_frame_object(frame, mask, frame_filters):
for frame_filter in frame_filters:
frame_filter.filter(frame)
header = create_header(
frame.opcode, len(frame.payload), frame.fin,
frame.rsv1, frame.rsv2, frame.rsv3, mask)
return _build_frame(header, frame.payload, mask)
def create_binary_frame(
message, opcode=common.OPCODE_BINARY, fin=1, mask=False, frame_filters=[]):
"""Creates a simple binary frame with no extension, reserved bit."""
frame = Frame(fin=fin, opcode=opcode, payload=message)
return _filter_and_format_frame_object(frame, mask, frame_filters)
def create_text_frame(
message, opcode=common.OPCODE_TEXT, fin=1, mask=False, frame_filters=[]):
"""Creates a simple text frame with no extension, reserved bit."""
encoded_message = message.encode('utf-8')
return create_binary_frame(encoded_message, opcode, fin, mask,
frame_filters)
def parse_frame(receive_bytes, logger=None,
ws_version=common.VERSION_HYBI_LATEST,
unmask_receive=True):
"""Parses a frame. Returns a tuple containing each header field and
payload.
Args:
receive_bytes: a function that reads frame data from a stream or
something similar. The function takes length of the bytes to be
read. The function must raise ConnectionTerminatedException if
there is not enough data to be read.
logger: a logging object.
ws_version: the version of WebSocket protocol.
unmask_receive: unmask received frames. When received unmasked
frame, raises InvalidFrameException.
Raises:
ConnectionTerminatedException: when receive_bytes raises it.
InvalidFrameException: when the frame contains invalid data.
"""
if not logger:
logger = logging.getLogger()
logger.log(common.LOGLEVEL_FINE, 'Receive the first 2 octets of a frame')
received = receive_bytes(2)
first_byte = ord(received[0])
fin = (first_byte >> 7) & 1
rsv1 = (first_byte >> 6) & 1
rsv2 = (first_byte >> 5) & 1
rsv3 = (first_byte >> 4) & 1
opcode = first_byte & 0xf
second_byte = ord(received[1])
mask = (second_byte >> 7) & 1
payload_length = second_byte & 0x7f
logger.log(common.LOGLEVEL_FINE,
'FIN=%s, RSV1=%s, RSV2=%s, RSV3=%s, opcode=%s, '
'Mask=%s, Payload_length=%s',
fin, rsv1, rsv2, rsv3, opcode, mask, payload_length)
if (mask == 1) != unmask_receive:
raise InvalidFrameException(
'Mask bit on the received frame did\'nt match masking '
'configuration for received frames')
# The HyBi and later specs disallow putting a value in 0x0-0xFFFF
# into the 8-octet extended payload length field (or 0x0-0xFD in
# 2-octet field).
valid_length_encoding = True
length_encoding_bytes = 1
if payload_length == 127:
logger.log(common.LOGLEVEL_FINE,
'Receive 8-octet extended payload length')
extended_payload_length = receive_bytes(8)
payload_length = struct.unpack(
'!Q', extended_payload_length)[0]
if payload_length > 0x7FFFFFFFFFFFFFFF:
raise InvalidFrameException(
'Extended payload length >= 2^63')
if ws_version >= 13 and payload_length < 0x10000:
valid_length_encoding = False
length_encoding_bytes = 8
logger.log(common.LOGLEVEL_FINE,
'Decoded_payload_length=%s', payload_length)
elif payload_length == 126:
logger.log(common.LOGLEVEL_FINE,
'Receive 2-octet extended payload length')
extended_payload_length = receive_bytes(2)
payload_length = struct.unpack(
'!H', extended_payload_length)[0]
if ws_version >= 13 and payload_length < 126:
valid_length_encoding = False
length_encoding_bytes = 2
logger.log(common.LOGLEVEL_FINE,
'Decoded_payload_length=%s', payload_length)
if not valid_length_encoding:
logger.warning(
'Payload length is not encoded using the minimal number of '
'bytes (%d is encoded using %d bytes)',
payload_length,
length_encoding_bytes)
if mask == 1:
logger.log(common.LOGLEVEL_FINE, 'Receive mask')
masking_nonce = receive_bytes(4)
masker = util.RepeatedXorMasker(masking_nonce)
logger.log(common.LOGLEVEL_FINE, 'Mask=%r', masking_nonce)
else:
masker = _NOOP_MASKER
logger.log(common.LOGLEVEL_FINE, 'Receive payload data')
if logger.isEnabledFor(common.LOGLEVEL_FINE):
receive_start = time.time()
raw_payload_bytes = receive_bytes(payload_length)
if logger.isEnabledFor(common.LOGLEVEL_FINE):
logger.log(
common.LOGLEVEL_FINE,
'Done receiving payload data at %s MB/s',
payload_length / (time.time() - receive_start) / 1000 / 1000)
logger.log(common.LOGLEVEL_FINE, 'Unmask payload data')
if logger.isEnabledFor(common.LOGLEVEL_FINE):
unmask_start = time.time()
unmasked_bytes = masker.mask(raw_payload_bytes)
if logger.isEnabledFor(common.LOGLEVEL_FINE):
logger.log(
common.LOGLEVEL_FINE,
'Done unmasking payload data at %s MB/s',
payload_length / (time.time() - unmask_start) / 1000 / 1000)
return opcode, unmasked_bytes, fin, rsv1, rsv2, rsv3
class FragmentedFrameBuilder(object):
"""A stateful class to send a message as fragments."""
def __init__(self, mask, frame_filters=[], encode_utf8=True):
"""Constructs an instance."""
self._mask = mask
self._frame_filters = frame_filters
# This is for skipping UTF-8 encoding when building text type frames
# from compressed data.
self._encode_utf8 = encode_utf8
self._started = False
# Hold opcode of the first frame in messages to verify types of other
# frames in the message are all the same.
self._opcode = common.OPCODE_TEXT
def build(self, payload_data, end, binary):
if binary:
frame_type = common.OPCODE_BINARY
else:
frame_type = common.OPCODE_TEXT
if self._started:
if self._opcode != frame_type:
raise ValueError('Message types are different in frames for '
'the same message')
opcode = common.OPCODE_CONTINUATION
else:
opcode = frame_type
self._opcode = frame_type
if end:
self._started = False
fin = 1
else:
self._started = True
fin = 0
if binary or not self._encode_utf8:
return create_binary_frame(
payload_data, opcode, fin, self._mask, self._frame_filters)
else:
return create_text_frame(
payload_data, opcode, fin, self._mask, self._frame_filters)
def _create_control_frame(opcode, body, mask, frame_filters):
frame = Frame(opcode=opcode, payload=body)
for frame_filter in frame_filters:
frame_filter.filter(frame)
if len(frame.payload) > 125:
raise BadOperationException(
'Payload data size of control frames must be 125 bytes or less')
header = create_header(
frame.opcode, len(frame.payload), frame.fin,
frame.rsv1, frame.rsv2, frame.rsv3, mask)
return _build_frame(header, frame.payload, mask)
def create_ping_frame(body, mask=False, frame_filters=[]):
return _create_control_frame(common.OPCODE_PING, body, mask, frame_filters)
def create_pong_frame(body, mask=False, frame_filters=[]):
return _create_control_frame(common.OPCODE_PONG, body, mask, frame_filters)
def create_close_frame(body, mask=False, frame_filters=[]):
return _create_control_frame(
common.OPCODE_CLOSE, body, mask, frame_filters)
def create_closing_handshake_body(code, reason):
body = ''
if code is not None:
if (code > common.STATUS_USER_PRIVATE_MAX or
code < common.STATUS_NORMAL_CLOSURE):
raise BadOperationException('Status code is out of range')
if (code == common.STATUS_NO_STATUS_RECEIVED or
code == common.STATUS_ABNORMAL_CLOSURE or
code == common.STATUS_TLS_HANDSHAKE):
raise BadOperationException('Status code is reserved pseudo '
'code')
encoded_reason = reason.encode('utf-8')
body = struct.pack('!H', code) + encoded_reason
return body
class StreamOptions(object):
"""Holds option values to configure Stream objects."""
def __init__(self):
"""Constructs StreamOptions."""
# Filters applied to frames.
self.outgoing_frame_filters = []
self.incoming_frame_filters = []
# Filters applied to messages. Control frames are not affected by them.
self.outgoing_message_filters = []
self.incoming_message_filters = []
self.encode_text_message_to_utf8 = True
self.mask_send = False
self.unmask_receive = True
class Stream(StreamBase):
"""A class for parsing/building frames of the WebSocket protocol
(RFC 6455).
"""
def __init__(self, request, options):
"""Constructs an instance.
Args:
request: mod_python request.
"""
StreamBase.__init__(self, request)
self._logger = util.get_class_logger(self)
self._options = options
self._request.client_terminated = False
self._request.server_terminated = False
# Holds body of received fragments.
self._received_fragments = []
# Holds the opcode of the first fragment.
self._original_opcode = None
self._writer = FragmentedFrameBuilder(
self._options.mask_send, self._options.outgoing_frame_filters,
self._options.encode_text_message_to_utf8)
self._ping_queue = deque()
def _receive_frame(self):
"""Receives a frame and return data in the frame as a tuple containing
each header field and payload separately.
Raises:
ConnectionTerminatedException: when read returns empty
string.
InvalidFrameException: when the frame contains invalid data.
"""
def _receive_bytes(length):
return self.receive_bytes(length)
return parse_frame(receive_bytes=_receive_bytes,
logger=self._logger,
ws_version=self._request.ws_version,
unmask_receive=self._options.unmask_receive)
def _receive_frame_as_frame_object(self):
opcode, unmasked_bytes, fin, rsv1, rsv2, rsv3 = self._receive_frame()
return Frame(fin=fin, rsv1=rsv1, rsv2=rsv2, rsv3=rsv3,
opcode=opcode, payload=unmasked_bytes)
def receive_filtered_frame(self):
"""Receives a frame and applies frame filters and message filters.
The frame to be received must satisfy following conditions:
- The frame is not fragmented.
- The opcode of the frame is TEXT or BINARY.
DO NOT USE this method except for testing purpose.
"""
frame = self._receive_frame_as_frame_object()
if not frame.fin:
raise InvalidFrameException(
'Segmented frames must not be received via '
'receive_filtered_frame()')
if (frame.opcode != common.OPCODE_TEXT and
frame.opcode != common.OPCODE_BINARY):
raise InvalidFrameException(
'Control frames must not be received via '
'receive_filtered_frame()')
for frame_filter in self._options.incoming_frame_filters:
frame_filter.filter(frame)
for message_filter in self._options.incoming_message_filters:
frame.payload = message_filter.filter(frame.payload)
return frame
def send_message(self, message, end=True, binary=False):
"""Send message.
Args:
message: text in unicode or binary in str to send.
binary: send message as binary frame.
Raises:
BadOperationException: when called on a server-terminated
connection or called with inconsistent message type or
binary parameter.
"""
if self._request.server_terminated:
raise BadOperationException(
'Requested send_message after sending out a closing handshake')
if binary and isinstance(message, unicode):
raise BadOperationException(
'Message for binary frame must be instance of str')
for message_filter in self._options.outgoing_message_filters:
message = message_filter.filter(message, end, binary)
try:
# Set this to any positive integer to limit maximum size of data in
# payload data of each frame.
MAX_PAYLOAD_DATA_SIZE = -1
if MAX_PAYLOAD_DATA_SIZE <= 0:
self._write(self._writer.build(message, end, binary))
return
bytes_written = 0
while True:
end_for_this_frame = end
bytes_to_write = len(message) - bytes_written
if (MAX_PAYLOAD_DATA_SIZE > 0 and
bytes_to_write > MAX_PAYLOAD_DATA_SIZE):
end_for_this_frame = False
bytes_to_write = MAX_PAYLOAD_DATA_SIZE
frame = self._writer.build(
message[bytes_written:bytes_written + bytes_to_write],
end_for_this_frame,
binary)
self._write(frame)
bytes_written += bytes_to_write
# This if must be placed here (the end of while block) so that
# at least one frame is sent.
if len(message) <= bytes_written:
break
except ValueError, e:
raise BadOperationException(e)
def _get_message_from_frame(self, frame):
"""Gets a message from frame. If the message is composed of fragmented
frames and the frame is not the last fragmented frame, this method
returns None. The whole message will be returned when the last
fragmented frame is passed to this method.
Raises:
InvalidFrameException: when the frame doesn't match defragmentation
context, or the frame contains invalid data.
"""
if frame.opcode == common.OPCODE_CONTINUATION:
if not self._received_fragments:
if frame.fin:
raise InvalidFrameException(
'Received a termination frame but fragmentation '
'not started')
else:
raise InvalidFrameException(
'Received an intermediate frame but '
'fragmentation not started')
if frame.fin:
# End of fragmentation frame
self._received_fragments.append(frame.payload)
message = ''.join(self._received_fragments)
self._received_fragments = []
return message
else:
# Intermediate frame
self._received_fragments.append(frame.payload)
return None
else:
if self._received_fragments:
if frame.fin:
raise InvalidFrameException(
'Received an unfragmented frame without '
'terminating existing fragmentation')
else:
raise InvalidFrameException(
'New fragmentation started without terminating '
'existing fragmentation')
if frame.fin:
# Unfragmented frame
self._original_opcode = frame.opcode
return frame.payload
else:
# Start of fragmentation frame
if common.is_control_opcode(frame.opcode):
raise InvalidFrameException(
'Control frames must not be fragmented')
self._original_opcode = frame.opcode
self._received_fragments.append(frame.payload)
return None
def _process_close_message(self, message):
"""Processes close message.
Args:
message: close message.
Raises:
InvalidFrameException: when the message is invalid.
"""
self._request.client_terminated = True
# Status code is optional. We can have status reason only if we
# have status code. Status reason can be empty string. So,
# allowed cases are
# - no application data: no code no reason
# - 2 octet of application data: has code but no reason
# - 3 or more octet of application data: both code and reason
if len(message) == 0:
self._logger.debug('Received close frame (empty body)')
self._request.ws_close_code = (
common.STATUS_NO_STATUS_RECEIVED)
elif len(message) == 1:
raise InvalidFrameException(
'If a close frame has status code, the length of '
'status code must be 2 octet')
elif len(message) >= 2:
self._request.ws_close_code = struct.unpack(
'!H', message[0:2])[0]
self._request.ws_close_reason = message[2:].decode(
'utf-8', 'replace')
self._logger.debug(
'Received close frame (code=%d, reason=%r)',
self._request.ws_close_code,
self._request.ws_close_reason)
# As we've received a close frame, no more data is coming over the
# socket. We can now safely close the socket without worrying about
# RST sending.
if self._request.server_terminated:
self._logger.debug(
'Received ack for server-initiated closing handshake')
return
self._logger.debug(
'Received client-initiated closing handshake')
code = common.STATUS_NORMAL_CLOSURE
reason = ''
if hasattr(self._request, '_dispatcher'):
dispatcher = self._request._dispatcher
code, reason = dispatcher.passive_closing_handshake(
self._request)
if code is None and reason is not None and len(reason) > 0:
self._logger.warning(
'Handler specified reason despite code being None')
reason = ''
if reason is None:
reason = ''
self._send_closing_handshake(code, reason)
self._logger.debug(
'Acknowledged closing handshake initiated by the peer '
'(code=%r, reason=%r)', code, reason)
def _process_ping_message(self, message):
"""Processes ping message.
Args:
message: ping message.
"""
try:
handler = self._request.on_ping_handler
if handler:
handler(self._request, message)
return
except AttributeError, e:
pass
self._send_pong(message)
def _process_pong_message(self, message):
"""Processes pong message.
Args:
message: pong message.
"""
# TODO(tyoshino): Add ping timeout handling.
inflight_pings = deque()
while True:
try:
expected_body = self._ping_queue.popleft()
if expected_body == message:
# inflight_pings contains pings ignored by the
# other peer. Just forget them.
self._logger.debug(
'Ping %r is acked (%d pings were ignored)',
expected_body, len(inflight_pings))
break
else:
inflight_pings.append(expected_body)
except IndexError, e:
# The received pong was unsolicited pong. Keep the
# ping queue as is.
self._ping_queue = inflight_pings
self._logger.debug('Received a unsolicited pong')
break
try:
handler = self._request.on_pong_handler
if handler:
handler(self._request, message)
except AttributeError, e:
pass
def receive_message(self):
"""Receive a WebSocket frame and return its payload as a text in
unicode or a binary in str.
Returns:
payload data of the frame
- as unicode instance if received text frame
- as str instance if received binary frame
or None iff received closing handshake.
Raises:
BadOperationException: when called on a client-terminated
connection.
ConnectionTerminatedException: when read returns empty
string.
InvalidFrameException: when the frame contains invalid
data.
UnsupportedFrameException: when the received frame has
flags, opcode we cannot handle. You can ignore this
exception and continue receiving the next frame.
"""
if self._request.client_terminated:
raise BadOperationException(
'Requested receive_message after receiving a closing '
'handshake')
while True:
# mp_conn.read will block if no bytes are available.
# Timeout is controlled by TimeOut directive of Apache.
frame = self._receive_frame_as_frame_object()
# Check the constraint on the payload size for control frames
# before extension processes the frame.
# See also http://tools.ietf.org/html/rfc6455#section-5.5
if (common.is_control_opcode(frame.opcode) and
len(frame.payload) > 125):
raise InvalidFrameException(
'Payload data size of control frames must be 125 bytes or '
'less')
for frame_filter in self._options.incoming_frame_filters:
frame_filter.filter(frame)
if frame.rsv1 or frame.rsv2 or frame.rsv3:
raise UnsupportedFrameException(
'Unsupported flag is set (rsv = %d%d%d)' %
(frame.rsv1, frame.rsv2, frame.rsv3))
message = self._get_message_from_frame(frame)
if message is None:
continue
for message_filter in self._options.incoming_message_filters:
message = message_filter.filter(message)
if self._original_opcode == common.OPCODE_TEXT:
# The WebSocket protocol section 4.4 specifies that invalid
# characters must be replaced with U+fffd REPLACEMENT
# CHARACTER.
try:
return message.decode('utf-8')
except UnicodeDecodeError, e:
raise InvalidUTF8Exception(e)
elif self._original_opcode == common.OPCODE_BINARY:
return message
elif self._original_opcode == common.OPCODE_CLOSE:
self._process_close_message(message)
return None
elif self._original_opcode == common.OPCODE_PING:
self._process_ping_message(message)
elif self._original_opcode == common.OPCODE_PONG:
self._process_pong_message(message)
else:
raise UnsupportedFrameException(
'Opcode %d is not supported' % self._original_opcode)
def _send_closing_handshake(self, code, reason):
body = create_closing_handshake_body(code, reason)
frame = create_close_frame(
body, mask=self._options.mask_send,
frame_filters=self._options.outgoing_frame_filters)
self._request.server_terminated = True
self._write(frame)
def close_connection(self, code=common.STATUS_NORMAL_CLOSURE, reason='',
wait_response=True):
"""Closes a WebSocket connection.
Args:
code: Status code for close frame. If code is None, a close
frame with empty body will be sent.
reason: string representing close reason.
wait_response: True when caller want to wait the response.
Raises:
BadOperationException: when reason is specified with code None
or reason is not an instance of both str and unicode.
"""
if self._request.server_terminated:
self._logger.debug(
'Requested close_connection but server is already terminated')
return
if code is None:
if reason is not None and len(reason) > 0:
raise BadOperationException(
'close reason must not be specified if code is None')
reason = ''
else:
if not isinstance(reason, str) and not isinstance(reason, unicode):
raise BadOperationException(
'close reason must be an instance of str or unicode')
self._send_closing_handshake(code, reason)
self._logger.debug(
'Initiated closing handshake (code=%r, reason=%r)',
code, reason)
if (code == common.STATUS_GOING_AWAY or
code == common.STATUS_PROTOCOL_ERROR) or not wait_response:
# It doesn't make sense to wait for a close frame if the reason is
# protocol error or that the server is going away. For some of
# other reasons, it might not make sense to wait for a close frame,
# but it's not clear, yet.
return
# TODO(ukai): 2. wait until the /client terminated/ flag has been set,
# or until a server-defined timeout expires.
#
# For now, we expect receiving closing handshake right after sending
# out closing handshake.
message = self.receive_message()
if message is not None:
raise ConnectionTerminatedException(
'Didn\'t receive valid ack for closing handshake')
# TODO: 3. close the WebSocket connection.
# note: mod_python Connection (mp_conn) doesn't have close method.
def send_ping(self, body=''):
frame = create_ping_frame(
body,
self._options.mask_send,
self._options.outgoing_frame_filters)
self._write(frame)
self._ping_queue.append(body)
def _send_pong(self, body):
frame = create_pong_frame(
body,
self._options.mask_send,
self._options.outgoing_frame_filters)
self._write(frame)
def get_last_received_opcode(self):
"""Returns the opcode of the WebSocket message which the last received
frame belongs to. The return value is valid iff immediately after
receive_message call.
"""
return self._original_opcode
# vi:sts=4 sw=4 et
|
Theb-1/home-assistant
|
refs/heads/dev
|
homeassistant/helpers/event_decorators.py
|
4
|
""" Event Decorators for custom components """
import functools
from homeassistant.helpers import event
HASS = None
def track_state_change(entity_ids, from_state=None, to_state=None):
""" Decorator factory to track state changes for entity id """
def track_state_change_decorator(action):
""" Decorator to track state changes """
event.track_state_change(HASS, entity_ids,
functools.partial(action, HASS),
from_state, to_state)
return action
return track_state_change_decorator
def track_sunrise(offset=None):
""" Decorator factory to track sunrise events """
def track_sunrise_decorator(action):
""" Decorator to track sunrise events """
event.track_sunrise(HASS,
functools.partial(action, HASS),
offset)
return action
return track_sunrise_decorator
def track_sunset(offset=None):
""" Decorator factory to track sunset events """
def track_sunset_decorator(action):
""" Decorator to track sunset events """
event.track_sunset(HASS,
functools.partial(action, HASS),
offset)
return action
return track_sunset_decorator
# pylint: disable=too-many-arguments
def track_time_change(year=None, month=None, day=None, hour=None, minute=None,
second=None):
""" Decorator factory to track time changes """
def track_time_change_decorator(action):
""" Decorator to track time changes """
event.track_time_change(HASS,
functools.partial(action, HASS),
year, month, day, hour, minute, second)
return action
return track_time_change_decorator
# pylint: disable=too-many-arguments
def track_utc_time_change(year=None, month=None, day=None, hour=None,
minute=None, second=None):
""" Decorator factory to track time changes """
def track_utc_time_change_decorator(action):
""" Decorator to track time changes """
event.track_utc_time_change(HASS,
functools.partial(action, HASS),
year, month, day, hour, minute, second)
return action
return track_utc_time_change_decorator
|
baishancloud/pykit
|
refs/heads/master
|
proc/__init__.py
|
2
|
from .proc import (
ProcError,
command,
command_ex,
shell_script,
start_process,
)
__all__ = [
'ProcError',
'command',
'command_ex',
'shell_script',
'start_process',
]
|
Proyag/nematus
|
refs/heads/master
|
test/test_translate.py
|
2
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import unittest
import requests
sys.path.append(os.path.abspath('../nematus'))
from translate import main as translate
from settings import TranslationSettings
def load_wmt16_model(src, target):
path = os.path.join('models', '{0}-{1}'.format(src,target))
try:
os.makedirs(path)
except OSError:
pass
for filename in ['model.npz', 'model.npz.json', 'vocab.{0}.json'.format(src), 'vocab.{0}.json'.format(target)]:
if not os.path.exists(os.path.join(path, filename)):
r = requests.get('http://data.statmt.org/rsennrich/wmt16_systems/{0}-{1}/'.format(src,target) + filename, stream=True)
with open(os.path.join(path, filename), 'wb') as f:
for chunk in r.iter_content(1024**2):
f.write(chunk)
class TestTranslate(unittest.TestCase):
"""
Regression tests for translation with WMT16 models
"""
def setUp(self):
"""
Download pre-trained models
"""
load_wmt16_model('en','de')
load_wmt16_model('en','ro')
def outputEqual(self, output1, output2):
"""given two translation outputs, check that output string is identical,
and probabilities are equal within rounding error.
"""
for i, (line, line2) in enumerate(zip(open(output1).readlines(), open(output2).readlines())):
if not i % 2:
self.assertEqual(line, line2)
else:
probs = map(float, line.split())
probs2 = map(float, line.split())
for p, p2 in zip(probs, probs2):
self.assertAlmostEqual(p, p2, 5)
def get_settings(self):
"""
Initialize and customize settings.
"""
translation_settings = TranslationSettings()
translation_settings.models = ["model.npz"]
translation_settings.num_processes = 1
translation_settings.beam_width = 12
translation_settings.normalization_alpha = 1.0
translation_settings.suppress_unk = True
translation_settings.get_word_probs = True
return translation_settings
# English-German WMT16 system, no dropout
def test_ende(self):
os.chdir('models/en-de/')
translation_settings = self.get_settings()
translate(
input_file=open('../../en-de/in'),
output_file=open('../../en-de/out','w'),
translation_settings=translation_settings
)
os.chdir('../..')
self.outputEqual('en-de/ref','en-de/out')
# English-Romanian WMT16 system, dropout
def test_enro(self):
os.chdir('models/en-ro/')
translation_settings = self.get_settings()
translate(
input_file=open('../../en-ro/in'),
output_file=open('../../en-ro/out','w'),
translation_settings=translation_settings
)
os.chdir('../..')
self.outputEqual('en-ro/ref','en-ro/out')
if __name__ == '__main__':
unittest.main()
|
390910131/Misago
|
refs/heads/master
|
misago/threads/tests/test_threads_moderation.py
|
8
|
from misago.forums.models import Forum
from misago.users.testutils import AuthenticatedUserTestCase
from misago.threads import moderation, testutils
from misago.threads.models import Label, Thread, Post, Event
class ThreadsModerationTests(AuthenticatedUserTestCase):
def setUp(self):
super(ThreadsModerationTests, self).setUp()
self.forum = Forum.objects.all_forums().filter(role="forum")[:1][0]
self.thread = testutils.post_thread(self.forum)
Label.objects.clear_cache()
def tearDown(self):
super(ThreadsModerationTests, self).tearDown()
Label.objects.clear_cache()
def reload_thread(self):
self.thread = Thread.objects.get(pk=self.thread.pk)
def test_label_thread(self):
"""label_thread makes thread announcement"""
label = Label.objects.create(name="Label", slug="label")
self.assertIsNone(self.thread.label)
self.assertTrue(moderation.label_thread(self.user, self.thread, label))
self.reload_thread()
self.assertEqual(self.thread.label, label)
self.assertTrue(self.thread.has_events)
event = self.thread.event_set.last()
self.assertEqual(event.icon, "tag")
self.assertIn("set thread label to", event.message)
def test_unlabel_thread(self):
"""unlabel_thread removes thread label"""
label = Label.objects.create(name="Label", slug="label")
self.assertTrue(moderation.label_thread(self.user, self.thread, label))
self.reload_thread()
self.assertTrue(moderation.unlabel_thread(self.user, self.thread))
self.reload_thread()
self.assertIsNone(self.thread.label)
self.assertTrue(self.thread.has_events)
event = self.thread.event_set.last()
self.assertEqual(event.icon, "tag")
self.assertIn("removed thread label.", event.message)
def test_pin_thread(self):
"""pin_thread makes thread pinned"""
self.assertFalse(self.thread.is_pinned)
self.assertTrue(moderation.pin_thread(self.user, self.thread))
self.reload_thread()
self.assertTrue(self.thread.is_pinned)
self.assertTrue(self.thread.has_events)
event = self.thread.event_set.last()
self.assertEqual(event.icon, "star")
self.assertIn("pinned thread.", event.message)
def test_pin_invalid_thread(self):
"""pin_thread returns false for already pinned thread"""
self.thread.is_pinned = True
self.assertFalse(moderation.pin_thread(self.user, self.thread))
self.assertTrue(self.thread.is_pinned)
def test_unpin_thread(self):
"""unpin_thread defaults thread weight"""
moderation.pin_thread(self.user, self.thread)
self.assertTrue(self.thread.is_pinned)
self.assertTrue(moderation.unpin_thread(self.user, self.thread))
self.reload_thread()
self.assertFalse(self.thread.is_pinned)
self.assertTrue(self.thread.has_events)
event = self.thread.event_set.last()
self.assertIn("unpinned thread.", event.message)
self.assertEqual(event.icon, "circle")
def test_unpin_invalid_thread(self):
"""unpin_thread returns false for already pinned thread"""
self.assertFalse(moderation.unpin_thread(self.user, self.thread))
self.assertFalse(self.thread.is_pinned)
def test_approve_thread(self):
"""approve_thread approves moderated thread"""
thread = testutils.post_thread(self.forum, is_moderated=True)
self.assertTrue(thread.is_moderated)
self.assertTrue(thread.first_post.is_moderated)
self.assertTrue(moderation.approve_thread(self.user, thread))
self.reload_thread()
self.assertFalse(thread.is_moderated)
self.assertFalse(thread.first_post.is_moderated)
self.assertTrue(thread.has_events)
event = thread.event_set.last()
self.assertIn("approved thread.", event.message)
self.assertEqual(event.icon, "check")
def test_move_thread(self):
"""moves_thread moves moderated thread to other froum"""
new_forum = Forum.objects.all_forums().filter(role="category")[:1][0]
self.assertEqual(self.thread.forum, self.forum)
self.assertTrue(
moderation.move_thread(self.user, self.thread, new_forum))
self.reload_thread()
self.assertEqual(self.thread.forum, new_forum)
self.assertTrue(self.thread.has_events)
event = self.thread.event_set.last()
self.assertIn("moved thread", event.message)
self.assertEqual(event.icon, "arrow-right")
def test_move_thread_to_same_forum(self):
"""moves_thread does not move thread to same forum it is in"""
self.assertEqual(self.thread.forum, self.forum)
self.assertFalse(
moderation.move_thread(self.user, self.thread, self.forum))
self.reload_thread()
self.assertEqual(self.thread.forum, self.forum)
self.assertFalse(self.thread.has_events)
def test_close_thread(self):
"""close_thread closes thread"""
self.assertFalse(self.thread.is_closed)
self.assertTrue(moderation.close_thread(self.user, self.thread))
self.reload_thread()
self.assertTrue(self.thread.is_closed)
self.assertTrue(self.thread.has_events)
event = self.thread.event_set.last()
self.assertIn("closed thread.", event.message)
self.assertEqual(event.icon, "lock")
def test_close_invalid_thread(self):
"""close_thread fails gracefully for opened thread"""
moderation.close_thread(self.user, self.thread)
self.reload_thread()
self.assertTrue(self.thread.is_closed)
self.assertFalse(moderation.close_thread(self.user, self.thread))
def test_open_thread(self):
"""open_thread closes thread"""
moderation.close_thread(self.user, self.thread)
self.reload_thread()
self.assertTrue(self.thread.is_closed)
self.assertTrue(moderation.open_thread(self.user, self.thread))
self.reload_thread()
self.assertFalse(self.thread.is_closed)
self.assertTrue(self.thread.has_events)
event = self.thread.event_set.last()
self.assertIn("opened thread.", event.message)
self.assertEqual(event.icon, "unlock-alt")
def test_open_invalid_thread(self):
"""open_thread fails gracefully for opened thread"""
self.assertFalse(self.thread.is_closed)
self.assertFalse(moderation.open_thread(self.user, self.thread))
def test_hide_thread(self):
"""hide_thread hides thread"""
self.assertFalse(self.thread.is_hidden)
self.assertTrue(moderation.hide_thread(self.user, self.thread))
self.reload_thread()
self.assertTrue(self.thread.is_hidden)
self.assertTrue(self.thread.has_events)
event = self.thread.event_set.last()
self.assertIn("hidden thread.", event.message)
self.assertEqual(event.icon, "eye-slash")
def test_hide_hidden_thread(self):
"""hide_thread fails gracefully for hidden thread"""
self.thread.is_hidden = True
self.assertFalse(moderation.hide_thread(self.user, self.thread))
def test_unhide_thread(self):
"""unhide_thread unhides thread"""
moderation.hide_thread(self.user, self.thread)
self.reload_thread()
self.assertTrue(self.thread.is_hidden)
self.assertTrue(moderation.unhide_thread(self.user, self.thread))
self.reload_thread()
self.assertFalse(self.thread.is_hidden)
self.assertTrue(self.thread.has_events)
event = self.thread.event_set.last()
self.assertIn("made thread visible.", event.message)
self.assertEqual(event.icon, "eye")
def test_unhide_visible_thread(self):
"""unhide_thread fails gracefully for visible thread"""
self.assertFalse(moderation.unhide_thread(self.user, self.thread))
def test_delete_thread(self):
"""delete_thread deletes thread"""
self.assertTrue(moderation.delete_thread(self.user, self.thread))
with self.assertRaises(Thread.DoesNotExist):
self.reload_thread()
|
miguelparaiso/PracticaOdoo
|
refs/heads/master
|
addons/l10n_cn/__init__.py
|
339
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2007-2014 Jeff Wang(<http://jeff@osbzr.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
freakboy3742/django
|
refs/heads/main
|
tests/forms_tests/field_tests/test_multivaluefield.py
|
27
|
from datetime import datetime
from django.core.exceptions import ValidationError
from django.forms import (
CharField, Form, MultipleChoiceField, MultiValueField, MultiWidget,
SelectMultiple, SplitDateTimeField, SplitDateTimeWidget, TextInput,
)
from django.test import SimpleTestCase
beatles = (('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))
class ComplexMultiWidget(MultiWidget):
def __init__(self, attrs=None):
widgets = (
TextInput(),
SelectMultiple(choices=beatles),
SplitDateTimeWidget(),
)
super().__init__(widgets, attrs)
def decompress(self, value):
if value:
data = value.split(',')
return [
data[0],
list(data[1]),
datetime.strptime(data[2], "%Y-%m-%d %H:%M:%S"),
]
return [None, None, None]
class ComplexField(MultiValueField):
def __init__(self, **kwargs):
fields = (
CharField(),
MultipleChoiceField(choices=beatles),
SplitDateTimeField(),
)
super().__init__(fields, **kwargs)
def compress(self, data_list):
if data_list:
return '%s,%s,%s' % (data_list[0], ''.join(data_list[1]), data_list[2])
return None
class ComplexFieldForm(Form):
field1 = ComplexField(widget=ComplexMultiWidget())
class MultiValueFieldTest(SimpleTestCase):
@classmethod
def setUpClass(cls):
cls.field = ComplexField(widget=ComplexMultiWidget())
super().setUpClass()
def test_clean(self):
self.assertEqual(
self.field.clean(['some text', ['J', 'P'], ['2007-04-25', '6:24:00']]),
'some text,JP,2007-04-25 06:24:00',
)
def test_clean_disabled_multivalue(self):
class ComplexFieldForm(Form):
f = ComplexField(disabled=True, widget=ComplexMultiWidget)
inputs = (
'some text,JP,2007-04-25 06:24:00',
['some text', ['J', 'P'], ['2007-04-25', '6:24:00']],
)
for data in inputs:
with self.subTest(data=data):
form = ComplexFieldForm({}, initial={'f': data})
form.full_clean()
self.assertEqual(form.errors, {})
self.assertEqual(form.cleaned_data, {'f': inputs[0]})
def test_bad_choice(self):
msg = "'Select a valid choice. X is not one of the available choices.'"
with self.assertRaisesMessage(ValidationError, msg):
self.field.clean(['some text', ['X'], ['2007-04-25', '6:24:00']])
def test_no_value(self):
"""
If insufficient data is provided, None is substituted.
"""
msg = "'This field is required.'"
with self.assertRaisesMessage(ValidationError, msg):
self.field.clean(['some text', ['JP']])
def test_has_changed_no_initial(self):
self.assertTrue(self.field.has_changed(None, ['some text', ['J', 'P'], ['2007-04-25', '6:24:00']]))
def test_has_changed_same(self):
self.assertFalse(self.field.has_changed(
'some text,JP,2007-04-25 06:24:00',
['some text', ['J', 'P'], ['2007-04-25', '6:24:00']],
))
def test_has_changed_first_widget(self):
"""
Test when the first widget's data has changed.
"""
self.assertTrue(self.field.has_changed(
'some text,JP,2007-04-25 06:24:00',
['other text', ['J', 'P'], ['2007-04-25', '6:24:00']],
))
def test_has_changed_last_widget(self):
"""
Test when the last widget's data has changed. This ensures that it is
not short circuiting while testing the widgets.
"""
self.assertTrue(self.field.has_changed(
'some text,JP,2007-04-25 06:24:00',
['some text', ['J', 'P'], ['2009-04-25', '11:44:00']],
))
def test_disabled_has_changed(self):
f = MultiValueField(fields=(CharField(), CharField()), disabled=True)
self.assertIs(f.has_changed(['x', 'x'], ['y', 'y']), False)
def test_form_as_table(self):
form = ComplexFieldForm()
self.assertHTMLEqual(
form.as_table(),
"""
<tr><th><label for="id_field1_0">Field1:</label></th>
<td><input type="text" name="field1_0" id="id_field1_0" required>
<select multiple name="field1_1" id="id_field1_1" required>
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>
<input type="text" name="field1_2_0" id="id_field1_2_0" required>
<input type="text" name="field1_2_1" id="id_field1_2_1" required></td></tr>
""",
)
def test_form_as_table_data(self):
form = ComplexFieldForm({
'field1_0': 'some text',
'field1_1': ['J', 'P'],
'field1_2_0': '2007-04-25',
'field1_2_1': '06:24:00',
})
self.assertHTMLEqual(
form.as_table(),
"""
<tr><th><label for="id_field1_0">Field1:</label></th>
<td><input type="text" name="field1_0" value="some text" id="id_field1_0" required>
<select multiple name="field1_1" id="id_field1_1" required>
<option value="J" selected>John</option>
<option value="P" selected>Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>
<input type="text" name="field1_2_0" value="2007-04-25" id="id_field1_2_0" required>
<input type="text" name="field1_2_1" value="06:24:00" id="id_field1_2_1" required></td></tr>
""",
)
def test_form_cleaned_data(self):
form = ComplexFieldForm({
'field1_0': 'some text',
'field1_1': ['J', 'P'],
'field1_2_0': '2007-04-25',
'field1_2_1': '06:24:00',
})
form.is_valid()
self.assertEqual(form.cleaned_data['field1'], 'some text,JP,2007-04-25 06:24:00')
|
StackStorm/python-mistralclient
|
refs/heads/master
|
mistralclient/tests/unit/test_utils.py
|
1
|
# Copyright 2015 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os.path
import tempfile
import yaml
from mistralclient import utils
from oslotest import base
ENV_DICT = {'k1': 'abc', 'k2': 123, 'k3': True}
ENV_STR = json.dumps(ENV_DICT)
ENV_YAML = yaml.safe_dump(ENV_DICT, default_flow_style=False)
class UtilityTest(base.BaseTestCase):
def test_load_empty(self):
self.assertDictEqual(dict(), utils.load_content(None))
self.assertDictEqual(dict(), utils.load_content(''))
self.assertDictEqual(dict(), utils.load_content('{}'))
self.assertListEqual(list(), utils.load_content('[]'))
def test_load_json_content(self):
self.assertDictEqual(ENV_DICT, utils.load_content(ENV_STR))
def test_load_json_file(self):
with tempfile.NamedTemporaryFile() as f:
f.write(ENV_STR.encode('utf-8'))
f.flush()
file_path = os.path.abspath(f.name)
self.assertDictEqual(ENV_DICT, utils.load_file(file_path))
def test_load_yaml_content(self):
self.assertDictEqual(ENV_DICT, utils.load_content(ENV_YAML))
def test_load_yaml_file(self):
with tempfile.NamedTemporaryFile() as f:
f.write(ENV_YAML.encode('utf-8'))
f.flush()
file_path = os.path.abspath(f.name)
self.assertDictEqual(ENV_DICT, utils.load_file(file_path))
def test_load_json(self):
with tempfile.NamedTemporaryFile() as f:
f.write(ENV_STR.encode('utf-8'))
f.flush()
self.assertDictEqual(ENV_DICT, utils.load_json(f.name))
self.assertDictEqual(ENV_DICT, utils.load_json(ENV_STR))
|
raycarnes/odoomrp-wip
|
refs/heads/8.0
|
mrp_bom_catch_product_code/__openerp__.py
|
27
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
{
"name": "MRP Bom Catch Product Code",
"version": "1.0",
"author": "OdooMRP team,"
"AvanzOSC,"
"Serv. Tecnol. Avanzados - Pedro M. Baeza",
"category": "Manufacturing",
"website": "http://www.odoomrp.com",
"depends": [
"mrp",
"product_variant_default_code",
],
"installable": True
}
|
koparasy/faultinjection-gem5
|
refs/heads/master
|
src/arch/x86/isa/insts/general_purpose/__init__.py
|
91
|
# Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
categories = ["arithmetic",
"cache_and_memory_management",
"compare_and_test",
"control_transfer",
"data_conversion",
"data_transfer",
"flags",
"input_output",
"load_effective_address",
"load_segment_registers",
"logical",
"no_operation",
"rotate_and_shift",
"semaphores",
"string",
"system_calls"]
microcode = '''
# Microcode for general purpose instructions
'''
for category in categories:
exec "import %s as cat" % category
microcode += cat.microcode
|
unixhot/opencmdb
|
refs/heads/master
|
django_mongoengine/mongo_admin/apps.py
|
3
|
from django.apps import AppConfig
from django.core import checks
from django.utils.translation import ugettext_lazy as _
def check_admin_app(**kwargs):
from .sites import system_check_errors
return system_check_errors
class SimpleMongoAdminConfig(AppConfig):
"""Simple AppConfig which does not do automatic discovery."""
name = "django_mongoengine.mongo_admin"
verbose_name = _("Administration")
def ready(self):
checks.register(check_admin_app, checks.Tags.admin)
class MongoAdminConfig(SimpleMongoAdminConfig):
def ready(self):
super(MongoAdminConfig, self).ready()
self.module.autodiscover()
|
ryanpitts/source
|
refs/heads/master
|
source/people/management/commands/__init__.py
|
12133432
| |
geolinkedata/api
|
refs/heads/master
|
api_tutorial/api_tutorial/__init__.py
|
12133432
| |
Suninus/erpnext
|
refs/heads/develop
|
erpnext/patches/v4_4/__init__.py
|
12133432
| |
anchit1/mBlog
|
refs/heads/master
|
micro_blog/wsgi.py
|
1
|
"""
WSGI config for micro_blog project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "micro_blog.settings")
application = get_wsgi_application()
|
poderopedia/podermapa
|
refs/heads/master
|
languages/fr.py
|
140
|
# coding: utf8
{
'!langcode!': 'fr',
'!langname!': 'Français',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" est une expression optionnelle comme "champ1=\'nouvellevaleur\'". Vous ne pouvez mettre à jour ou supprimer les résultats d\'un JOIN',
'%s %%{row} deleted': '%s lignes supprimées',
'%s %%{row} updated': '%s lignes mises à jour',
'%s selected': '%s sélectionné',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'About': 'À propos',
'Access Control': "Contrôle d'accès",
'Administrative Interface': "Interface d'administration",
'Administrative interface': "Interface d'administration",
'Ajax Recipes': 'Recettes Ajax',
'appadmin is disabled because insecure channel': "appadmin est désactivée parce que le canal n'est pas sécurisé",
'Are you sure you want to delete this object?': 'Êtes-vous sûr de vouloir supprimer cet objet?',
'Authentication': 'Authentification',
'Available Databases and Tables': 'Bases de données et tables disponibles',
'Buy this book': 'Acheter ce livre',
'cache': 'cache',
'Cache': 'Cache',
'Cache Keys': 'Clés de cache',
'Cannot be empty': 'Ne peut pas être vide',
'change password': 'changer le mot de passe',
'Check to delete': 'Cliquez pour supprimer',
'Check to delete:': 'Cliquez pour supprimer:',
'Clear CACHE?': 'Vider le CACHE?',
'Clear DISK': 'Vider le DISQUE',
'Clear RAM': 'Vider la RAM',
'Client IP': 'IP client',
'Community': 'Communauté',
'Components and Plugins': 'Composants et Plugins',
'Controller': 'Contrôleur',
'Copyright': 'Copyright',
'Created By': 'Créé par',
'Created On': 'Créé le',
'Current request': 'Demande actuelle',
'Current response': 'Réponse actuelle',
'Current session': 'Session en cours',
'customize me!': 'personnalisez-moi!',
'data uploaded': 'données téléchargées',
'Database': 'base de données',
'Database %s select': 'base de données %s selectionnée',
'db': 'bdd',
'DB Model': 'Modèle BDD',
'Delete:': 'Supprimer:',
'Demo': 'Démo',
'Deployment Recipes': 'Recettes de déploiement',
'Description': 'Description',
'design': 'design',
'DISK': 'DISQUE',
'Disk Cache Keys': 'Clés de cache du disque',
'Disk Cleared': 'Disque vidé',
'Documentation': 'Documentation',
"Don't know what to do?": 'Vous ne savez pas quoi faire?',
'done!': 'fait!',
'Download': 'Téléchargement',
'E-mail': 'E-mail',
'Edit': 'Éditer',
'Edit current record': "Modifier l'enregistrement courant",
'edit profile': 'modifier le profil',
'Edit This App': 'Modifier cette application',
'Email and SMS': 'Email et SMS',
'enter an integer between %(min)g and %(max)g': 'entrez un entier entre %(min)g et %(max)g',
'Errors': 'Erreurs',
'export as csv file': 'exporter sous forme de fichier csv',
'FAQ': 'FAQ',
'First name': 'Prénom',
'Forms and Validators': 'Formulaires et Validateurs',
'Free Applications': 'Applications gratuites',
'Function disabled': 'Fonction désactivée',
'Group ID': 'Groupe ID',
'Groups': 'Groupes',
'Hello World': 'Bonjour le monde',
'Home': 'Accueil',
'How did you get here?': 'Comment êtes-vous arrivé ici?',
'import': 'import',
'Import/Export': 'Importer/Exporter',
'Index': 'Index',
'insert new': 'insérer un nouveau',
'insert new %s': 'insérer un nouveau %s',
'Internal State': 'État interne',
'Introduction': 'Introduction',
'Invalid email': 'E-mail invalide',
'Invalid Query': 'Requête Invalide',
'invalid request': 'requête invalide',
'Is Active': 'Est actif',
'Key': 'Clé',
'Last name': 'Nom',
'Layout': 'Mise en page',
'Layout Plugins': 'Plugins de mise en page',
'Layouts': 'Mises en page',
'Live chat': 'Chat en direct',
'Live Chat': 'Chat en direct',
'login': 'connectez-vous',
'Login': 'Connectez-vous',
'logout': 'déconnectez-vous',
'lost password': 'mot de passe perdu',
'Lost Password': 'Mot de passe perdu',
'Lost password?': 'Mot de passe perdu?',
'lost password?': 'mot de passe perdu?',
'Main Menu': 'Menu principal',
'Manage Cache': 'Gérer le Cache',
'Menu Model': 'Menu modèle',
'Modified By': 'Modifié par',
'Modified On': 'Modifié le',
'My Sites': 'Mes sites',
'Name': 'Nom',
'New Record': 'Nouvel enregistrement',
'new record inserted': 'nouvel enregistrement inséré',
'next 100 rows': '100 prochaines lignes',
'No databases in this application': "Cette application n'a pas de bases de données",
'Object or table name': 'Objet ou nom de table',
'Online examples': 'Exemples en ligne',
'or import from csv file': "ou importer d'un fichier CSV",
'Origin': 'Origine',
'Other Plugins': 'Autres Plugins',
'Other Recipes': 'Autres recettes',
'Overview': 'Présentation',
'Password': 'Mot de passe',
"Password fields don't match": 'Les mots de passe ne correspondent pas',
'Plugins': 'Plugins',
'Powered by': 'Alimenté par',
'Preface': 'Préface',
'previous 100 rows': '100 lignes précédentes',
'Python': 'Python',
'Query:': 'Requête:',
'Quick Examples': 'Exemples Rapides',
'RAM': 'RAM',
'RAM Cache Keys': 'Clés de cache de la RAM',
'Ram Cleared': 'Ram vidée',
'Readme': 'Lisez-moi',
'Recipes': 'Recettes',
'Record': 'enregistrement',
'record does not exist': "l'archive n'existe pas",
'Record ID': "ID d'enregistrement",
'Record id': "id d'enregistrement",
'Register': "S'inscrire",
'register': "s'inscrire",
'Registration identifier': "Identifiant d'enregistrement",
'Registration key': "Clé d'enregistrement",
'Remember me (for 30 days)': 'Se souvenir de moi (pendant 30 jours)',
'Request reset password': 'Demande de réinitialiser le mot clé',
'Reset Password key': 'Réinitialiser le mot clé',
'Resources': 'Ressources',
'Role': 'Rôle',
'Rows in Table': 'Lignes du tableau',
'Rows selected': 'Lignes sélectionnées',
'Semantic': 'Sémantique',
'Services': 'Services',
'Size of cache:': 'Taille du cache:',
'state': 'état',
'Statistics': 'Statistiques',
'Stylesheet': 'Feuille de style',
'submit': 'soumettre',
'Submit': 'Soumettre',
'Support': 'Support',
'Sure you want to delete this object?': 'Êtes-vous sûr de vouloir supprimer cet objet?',
'Table': 'tableau',
'Table name': 'Nom du tableau',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'La "requête" est une condition comme "db.table1.champ1==\'valeur\'". Quelque chose comme "db.table1.champ1==db.table2.champ2" résulte en un JOIN SQL.',
'The Core': 'Le noyau',
'The output of the file is a dictionary that was rendered by the view %s': 'La sortie de ce fichier est un dictionnaire qui été restitué par la vue %s',
'The Views': 'Les Vues',
'This App': 'Cette Appli',
'This is a copy of the scaffolding application': "Ceci est une copie de l'application échafaudage",
'Time in Cache (h:m:s)': 'Temps en Cache (h:m:s)',
'Timestamp': 'Horodatage',
'Twitter': 'Twitter',
'unable to parse csv file': "incapable d'analyser le fichier cvs",
'Update:': 'Mise à jour:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Employez (...)&(...) pour AND, (...)|(...) pour OR, and ~(...) pour NOT afin de construire des requêtes plus complexes.',
'User %(id)s Logged-in': 'Utilisateur %(id)s connecté',
'User %(id)s Registered': 'Utilisateur %(id)s enregistré',
'User ID': 'ID utilisateur',
'User Voice': "Voix de l'utilisateur",
'Verify Password': 'Vérifiez le mot de passe',
'Videos': 'Vidéos',
'View': 'Présentation',
'Web2py': 'Web2py',
'Welcome': 'Bienvenue',
'Welcome %s': 'Bienvenue %s',
'Welcome to web2py': 'Bienvenue à web2py',
'Welcome to web2py!': 'Bienvenue à web2py!',
'Which called the function %s located in the file %s': 'Qui a appelé la fonction %s se trouvant dans le fichier %s',
'You are successfully running web2py': 'Vous exécutez avec succès web2py',
'You can modify this application and adapt it to your needs': "Vous pouvez modifier cette application et l'adapter à vos besoins",
'You visited the url %s': "Vous avez visité l'URL %s",
}
|
weave-lab/nw.js
|
refs/heads/nw13
|
tools/file_util.py
|
77
|
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
# reserved. Use of this source code is governed by a BSD-style license that
# can be found in the LICENSE file.
from glob import iglob
import os
import shutil
import sys
import time
def read_file(name, normalize = True):
""" Read a file. """
try:
f = open(name, 'r')
# read the data
data = f.read()
if normalize:
# normalize line endings
data = data.replace("\r\n", "\n")
return data
except IOError, (errno, strerror):
sys.stderr.write('Failed to read file '+name+': '+strerror)
raise
else:
f.close()
def write_file(name, data):
""" Write a file. """
try:
f = open(name, 'w')
# write the data
f.write(data)
except IOError, (errno, strerror):
sys.stderr.write('Failed to write file '+name+': '+strerror)
raise
else:
f.close()
def path_exists(name):
""" Returns true if the path currently exists. """
return os.path.exists(name)
def backup_file(name):
""" Rename the file to a name that includes the current time stamp. """
move_file(name, name+'.'+time.strftime('%Y-%m-%d-%H-%M-%S'))
def copy_file(src, dst, quiet = True):
""" Copy a file. """
try:
shutil.copy(src, dst)
if not quiet:
sys.stdout.write('Transferring '+src+' file.\n')
except IOError, (errno, strerror):
sys.stderr.write('Failed to copy file from '+src+' to '+dst+': '+strerror)
raise
def move_file(src, dst, quiet = True):
""" Move a file. """
try:
shutil.move(src, dst)
if not quiet:
sys.stdout.write('Moving '+src+' file.\n')
except IOError, (errno, strerror):
sys.stderr.write('Failed to move file from '+src+' to '+dst+': '+strerror)
raise
def copy_files(src_glob, dst_folder, quiet = True):
""" Copy multiple files. """
for fname in iglob(src_glob):
dst = os.path.join(dst_folder, os.path.basename(fname))
if os.path.isdir(fname):
copy_dir(fname, dst, quiet)
else:
copy_file(fname, dst, quiet)
def remove_file(name, quiet = True):
""" Remove the specified file. """
try:
if path_exists(name):
os.remove(name)
if not quiet:
sys.stdout.write('Removing '+name+' file.\n')
except IOError, (errno, strerror):
sys.stderr.write('Failed to remove file '+name+': '+strerror)
raise
def copy_dir(src, dst, quiet = True):
""" Copy a directory tree. """
try:
remove_dir(dst, quiet)
shutil.copytree(src, dst)
if not quiet:
sys.stdout.write('Transferring '+src+' directory.\n')
except IOError, (errno, strerror):
sys.stderr.write('Failed to copy directory from '+src+' to '+dst+': '+strerror)
raise
def remove_dir(name, quiet = True):
""" Remove the specified directory. """
try:
if path_exists(name):
shutil.rmtree(name)
if not quiet:
sys.stdout.write('Removing '+name+' directory.\n')
except IOError, (errno, strerror):
sys.stderr.write('Failed to remove directory '+name+': '+strerror)
raise
def make_dir(name, quiet = True):
""" Create the specified directory. """
try:
if not path_exists(name):
if not quiet:
sys.stdout.write('Creating '+name+' directory.\n')
os.makedirs(name)
except IOError, (errno, strerror):
sys.stderr.write('Failed to create directory '+name+': '+strerror)
raise
def get_files(search_glob):
""" Returns all files matching the search glob. """
# Sort the result for consistency across platforms.
return sorted(iglob(search_glob))
def read_version_file(file, args):
""" Read and parse a version file (key=value pairs, one per line). """
lines = read_file(file).split("\n")
for line in lines:
parts = line.split('=', 1)
if len(parts) == 2:
args[parts[0]] = parts[1]
def eval_file(src):
""" Loads and evaluates the contents of the specified file. """
return eval(read_file(src), {'__builtins__': None}, None)
def normalize_path(path):
""" Normalizes the path separator to match the Unix standard. """
if sys.platform == 'win32':
return path.replace('\\', '/')
return path
|
ekhdkv/vboxweb
|
refs/heads/master
|
cherrypy/_cptools.py
|
17
|
"""CherryPy tools. A "tool" is any helper, adapted to CP.
Tools are usually designed to be used in a variety of ways (although some
may only offer one if they choose):
Library calls:
All tools are callables that can be used wherever needed.
The arguments are straightforward and should be detailed within the
docstring.
Function decorators:
All tools, when called, may be used as decorators which configure
individual CherryPy page handlers (methods on the CherryPy tree).
That is, "@tools.anytool()" should "turn on" the tool via the
decorated function's _cp_config attribute.
CherryPy config:
If a tool exposes a "_setup" callable, it will be called
once per Request (if the feature is "turned on" via config).
Tools may be implemented as any object with a namespace. The builtins
are generally either modules or instances of the tools.Tool class.
"""
import cherrypy
def _getargs(func):
"""Return the names of all static arguments to the given function."""
# Use this instead of importing inspect for less mem overhead.
import types
if isinstance(func, types.MethodType):
func = func.im_func
co = func.func_code
return co.co_varnames[:co.co_argcount]
class Tool(object):
"""A registered function for use with CherryPy request-processing hooks.
help(tool.callable) should give you more information about this Tool.
"""
namespace = "tools"
def __init__(self, point, callable, name=None, priority=50):
self._point = point
self.callable = callable
self._name = name
self._priority = priority
self.__doc__ = self.callable.__doc__
self._setargs()
def _setargs(self):
"""Copy func parameter names to obj attributes."""
try:
for arg in _getargs(self.callable):
setattr(self, arg, None)
except (TypeError, AttributeError):
if hasattr(self.callable, "__call__"):
for arg in _getargs(self.callable.__call__):
setattr(self, arg, None)
# IronPython 1.0 raises NotImplementedError because
# inspect.getargspec tries to access Python bytecode
# in co_code attribute.
except NotImplementedError:
pass
# IronPython 1B1 may raise IndexError in some cases,
# but if we trap it here it doesn't prevent CP from working.
except IndexError:
pass
def _merged_args(self, d=None):
"""Return a dict of configuration entries for this Tool."""
if d:
conf = d.copy()
else:
conf = {}
tm = cherrypy.request.toolmaps[self.namespace]
if self._name in tm:
conf.update(tm[self._name])
if "on" in conf:
del conf["on"]
return conf
def __call__(self, *args, **kwargs):
"""Compile-time decorator (turn on the tool in config).
For example:
@tools.proxy()
def whats_my_base(self):
return cherrypy.request.base
whats_my_base.exposed = True
"""
if args:
raise TypeError("The %r Tool does not accept positional "
"arguments; you must use keyword arguments."
% self._name)
def tool_decorator(f):
if not hasattr(f, "_cp_config"):
f._cp_config = {}
subspace = self.namespace + "." + self._name + "."
f._cp_config[subspace + "on"] = True
for k, v in kwargs.iteritems():
f._cp_config[subspace + k] = v
return f
return tool_decorator
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call this
method when the tool is "turned on" in config.
"""
conf = self._merged_args()
p = conf.pop("priority", None)
if p is None:
p = getattr(self.callable, "priority", self._priority)
cherrypy.request.hooks.attach(self._point, self.callable,
priority=p, **conf)
class HandlerTool(Tool):
"""Tool which is called 'before main', that may skip normal handlers.
If the tool successfully handles the request (by setting response.body),
if should return True. This will cause CherryPy to skip any 'normal' page
handler. If the tool did not handle the request, it should return False
to tell CherryPy to continue on and call the normal page handler. If the
tool is declared AS a page handler (see the 'handler' method), returning
False will raise NotFound.
"""
def __init__(self, callable, name=None):
Tool.__init__(self, 'before_handler', callable, name)
def handler(self, *args, **kwargs):
"""Use this tool as a CherryPy page handler.
For example:
class Root:
nav = tools.staticdir.handler(section="/nav", dir="nav",
root=absDir)
"""
def handle_func(*a, **kw):
handled = self.callable(*args, **self._merged_args(kwargs))
if not handled:
raise cherrypy.NotFound()
return cherrypy.response.body
handle_func.exposed = True
return handle_func
def _wrapper(self, **kwargs):
if self.callable(**kwargs):
cherrypy.request.handler = None
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call this
method when the tool is "turned on" in config.
"""
conf = self._merged_args()
p = conf.pop("priority", None)
if p is None:
p = getattr(self.callable, "priority", self._priority)
cherrypy.request.hooks.attach(self._point, self._wrapper,
priority=p, **conf)
class HandlerWrapperTool(Tool):
"""Tool which wraps request.handler in a provided wrapper function.
The 'newhandler' arg must be a handler wrapper function that takes a
'next_handler' argument, plus *args and **kwargs. Like all page handler
functions, it must return an iterable for use as cherrypy.response.body.
For example, to allow your 'inner' page handlers to return dicts
which then get interpolated into a template:
def interpolator(next_handler, *args, **kwargs):
filename = cherrypy.request.config.get('template')
cherrypy.response.template = env.get_template(filename)
response_dict = next_handler(*args, **kwargs)
return cherrypy.response.template.render(**response_dict)
cherrypy.tools.jinja = HandlerWrapperTool(interpolator)
"""
def __init__(self, newhandler, point='before_handler', name=None, priority=50):
self.newhandler = newhandler
self._point = point
self._name = name
self._priority = priority
def callable(self):
innerfunc = cherrypy.request.handler
def wrap(*args, **kwargs):
return self.newhandler(innerfunc, *args, **kwargs)
cherrypy.request.handler = wrap
class ErrorTool(Tool):
"""Tool which is used to replace the default request.error_response."""
def __init__(self, callable, name=None):
Tool.__init__(self, None, callable, name)
def _wrapper(self):
self.callable(**self._merged_args())
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call this
method when the tool is "turned on" in config.
"""
cherrypy.request.error_response = self._wrapper
# Builtin tools #
from cherrypy.lib import cptools, encoding, auth, static, tidy
from cherrypy.lib import sessions as _sessions, xmlrpc as _xmlrpc
from cherrypy.lib import caching as _caching, wsgiapp as _wsgiapp
class SessionTool(Tool):
"""Session Tool for CherryPy.
sessions.locking:
When 'implicit' (the default), the session will be locked for you,
just before running the page handler.
When 'early', the session will be locked before reading the request
body. This is off by default for safety reasons; for example,
a large upload would block the session, denying an AJAX
progress meter (see http://www.cherrypy.org/ticket/630).
When 'explicit' (or any other value), you need to call
cherrypy.session.acquire_lock() yourself before using
session data.
"""
def __init__(self):
# _sessions.init must be bound after headers are read
Tool.__init__(self, 'before_request_body', _sessions.init)
def _lock_session(self):
cherrypy.serving.session.acquire_lock()
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call this
method when the tool is "turned on" in config.
"""
hooks = cherrypy.request.hooks
conf = self._merged_args()
p = conf.pop("priority", None)
if p is None:
p = getattr(self.callable, "priority", self._priority)
hooks.attach(self._point, self.callable, priority=p, **conf)
locking = conf.pop('locking', 'implicit')
if locking == 'implicit':
hooks.attach('before_handler', self._lock_session)
elif locking == 'early':
# Lock before the request body (but after _sessions.init runs!)
hooks.attach('before_request_body', self._lock_session,
priority=60)
else:
# Don't lock
pass
hooks.attach('before_finalize', _sessions.save)
hooks.attach('on_end_request', _sessions.close)
def regenerate(self):
"""Drop the current session and make a new one (with a new id)."""
sess = cherrypy.serving.session
sess.regenerate()
# Grab cookie-relevant tool args
conf = dict([(k, v) for k, v in self._merged_args().iteritems()
if k in ('path', 'path_header', 'name', 'timeout',
'domain', 'secure')])
_sessions.set_response_cookie(**conf)
class XMLRPCController(object):
"""A Controller (page handler collection) for XML-RPC.
To use it, have your controllers subclass this base class (it will
turn on the tool for you).
You can also supply the following optional config entries:
tools.xmlrpc.encoding: 'utf-8'
tools.xmlrpc.allow_none: 0
XML-RPC is a rather discontinuous layer over HTTP; dispatching to the
appropriate handler must first be performed according to the URL, and
then a second dispatch step must take place according to the RPC method
specified in the request body. It also allows a superfluous "/RPC2"
prefix in the URL, supplies its own handler args in the body, and
requires a 200 OK "Fault" response instead of 404 when the desired
method is not found.
Therefore, XML-RPC cannot be implemented for CherryPy via a Tool alone.
This Controller acts as the dispatch target for the first half (based
on the URL); it then reads the RPC method from the request body and
does its own second dispatch step based on that method. It also reads
body params, and returns a Fault on error.
The XMLRPCDispatcher strips any /RPC2 prefix; if you aren't using /RPC2
in your URL's, you can safely skip turning on the XMLRPCDispatcher.
Otherwise, you need to use declare it in config:
request.dispatch: cherrypy.dispatch.XMLRPCDispatcher()
"""
# Note we're hard-coding this into the 'tools' namespace. We could do
# a huge amount of work to make it relocatable, but the only reason why
# would be if someone actually disabled the default_toolbox. Meh.
_cp_config = {'tools.xmlrpc.on': True}
def default(self, *vpath, **params):
rpcparams, rpcmethod = _xmlrpc.process_body()
subhandler = self
for attr in str(rpcmethod).split('.'):
subhandler = getattr(subhandler, attr, None)
if subhandler and getattr(subhandler, "exposed", False):
body = subhandler(*(vpath + rpcparams), **params)
else:
# http://www.cherrypy.org/ticket/533
# if a method is not found, an xmlrpclib.Fault should be returned
# raising an exception here will do that; see
# cherrypy.lib.xmlrpc.on_error
raise Exception, 'method "%s" is not supported' % attr
conf = cherrypy.request.toolmaps['tools'].get("xmlrpc", {})
_xmlrpc.respond(body,
conf.get('encoding', 'utf-8'),
conf.get('allow_none', 0))
return cherrypy.response.body
default.exposed = True
class WSGIAppTool(HandlerTool):
"""A tool for running any WSGI middleware/application within CP.
Here are the parameters:
wsgi_app - any wsgi application callable
env_update - a dictionary with arbitrary keys and values to be
merged with the WSGI environ dictionary.
Example:
class Whatever:
_cp_config = {'tools.wsgiapp.on': True,
'tools.wsgiapp.app': some_app,
'tools.wsgiapp.env': app_environ,
}
"""
def _setup(self):
# Keep request body intact so the wsgi app can have its way with it.
cherrypy.request.process_request_body = False
HandlerTool._setup(self)
class SessionAuthTool(HandlerTool):
def _setargs(self):
for name in dir(cptools.SessionAuth):
if not name.startswith("__"):
setattr(self, name, None)
class CachingTool(Tool):
"""Caching Tool for CherryPy."""
def _wrapper(self, invalid_methods=("POST", "PUT", "DELETE"), **kwargs):
request = cherrypy.request
if not hasattr(cherrypy, "_cache"):
# Make a process-wide Cache object.
cherrypy._cache = kwargs.pop("cache_class", _caching.MemoryCache)()
# Take all remaining kwargs and set them on the Cache object.
for k, v in kwargs.iteritems():
setattr(cherrypy._cache, k, v)
if _caching.get(invalid_methods=invalid_methods):
request.handler = None
else:
if request.cacheable:
# Note the devious technique here of adding hooks on the fly
request.hooks.attach('before_finalize', _caching.tee_output,
priority = 90)
_wrapper.priority = 20
def _setup(self):
"""Hook caching into cherrypy.request."""
conf = self._merged_args()
p = conf.pop("priority", None)
cherrypy.request.hooks.attach('before_handler', self._wrapper,
priority=p, **conf)
class Toolbox(object):
"""A collection of Tools.
This object also functions as a config namespace handler for itself.
Custom toolboxes should be added to each Application's toolboxes dict.
"""
def __init__(self, namespace):
self.namespace = namespace
def __setattr__(self, name, value):
# If the Tool._name is None, supply it from the attribute name.
if isinstance(value, Tool):
if value._name is None:
value._name = name
value.namespace = self.namespace
object.__setattr__(self, name, value)
def __enter__(self):
"""Populate request.toolmaps from tools specified in config."""
cherrypy.request.toolmaps[self.namespace] = map = {}
def populate(k, v):
toolname, arg = k.split(".", 1)
bucket = map.setdefault(toolname, {})
bucket[arg] = v
return populate
def __exit__(self, exc_type, exc_val, exc_tb):
"""Run tool._setup() for each tool in our toolmap."""
map = cherrypy.request.toolmaps.get(self.namespace)
if map:
for name, settings in map.items():
if settings.get("on", False):
tool = getattr(self, name)
tool._setup()
default_toolbox = _d = Toolbox("tools")
_d.session_auth = SessionAuthTool(cptools.session_auth)
_d.proxy = Tool('before_request_body', cptools.proxy, priority=30)
_d.response_headers = Tool('on_start_resource', cptools.response_headers)
_d.log_tracebacks = Tool('before_error_response', cptools.log_traceback)
_d.log_headers = Tool('before_error_response', cptools.log_request_headers)
_d.log_hooks = Tool('on_end_request', cptools.log_hooks, priority=100)
_d.err_redirect = ErrorTool(cptools.redirect)
_d.etags = Tool('before_finalize', cptools.validate_etags, priority=75)
_d.decode = Tool('before_handler', encoding.decode)
# the order of encoding, gzip, caching is important
_d.encode = Tool('before_finalize', encoding.encode, priority=70)
_d.gzip = Tool('before_finalize', encoding.gzip, priority=80)
_d.staticdir = HandlerTool(static.staticdir)
_d.staticfile = HandlerTool(static.staticfile)
_d.sessions = SessionTool()
_d.xmlrpc = ErrorTool(_xmlrpc.on_error)
_d.wsgiapp = WSGIAppTool(_wsgiapp.run)
_d.caching = CachingTool('before_handler', _caching.get, 'caching')
_d.expires = Tool('before_finalize', _caching.expires)
_d.tidy = Tool('before_finalize', tidy.tidy)
_d.nsgmls = Tool('before_finalize', tidy.nsgmls)
_d.ignore_headers = Tool('before_request_body', cptools.ignore_headers)
_d.referer = Tool('before_request_body', cptools.referer)
_d.basic_auth = Tool('on_start_resource', auth.basic_auth)
_d.digest_auth = Tool('on_start_resource', auth.digest_auth)
_d.trailing_slash = Tool('before_handler', cptools.trailing_slash, priority=60)
_d.flatten = Tool('before_finalize', cptools.flatten)
_d.accept = Tool('on_start_resource', cptools.accept)
_d.redirect = Tool('on_start_resource', cptools.redirect)
del _d, cptools, encoding, auth, static, tidy
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.